gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.server.coordination;
import com.fasterxml.jackson.core.type.TypeReference;
import com.google.common.base.Function;
import com.google.common.base.Functions;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.apache.druid.client.cache.CacheConfig;
import org.apache.druid.client.cache.CachePopulatorStats;
import org.apache.druid.client.cache.ForegroundCachePopulator;
import org.apache.druid.client.cache.LocalCacheProvider;
import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.druid.java.util.common.IAE;
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.MapUtils;
import org.apache.druid.java.util.common.Pair;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.java.util.common.granularity.Granularity;
import org.apache.druid.java.util.common.guava.Sequence;
import org.apache.druid.java.util.common.guava.Sequences;
import org.apache.druid.java.util.common.guava.Yielder;
import org.apache.druid.java.util.common.guava.YieldingAccumulator;
import org.apache.druid.java.util.common.guava.YieldingSequenceBase;
import org.apache.druid.java.util.emitter.EmittingLogger;
import org.apache.druid.query.ConcatQueryRunner;
import org.apache.druid.query.DefaultQueryMetrics;
import org.apache.druid.query.Druids;
import org.apache.druid.query.NoopQueryRunner;
import org.apache.druid.query.Query;
import org.apache.druid.query.QueryMetrics;
import org.apache.druid.query.QueryPlus;
import org.apache.druid.query.QueryRunner;
import org.apache.druid.query.QueryRunnerFactory;
import org.apache.druid.query.QueryRunnerFactoryConglomerate;
import org.apache.druid.query.QueryToolChest;
import org.apache.druid.query.Result;
import org.apache.druid.query.aggregation.MetricManipulationFn;
import org.apache.druid.query.search.SearchQuery;
import org.apache.druid.query.search.SearchResultValue;
import org.apache.druid.segment.AbstractSegment;
import org.apache.druid.segment.IndexIO;
import org.apache.druid.segment.QueryableIndex;
import org.apache.druid.segment.ReferenceCountingSegment;
import org.apache.druid.segment.Segment;
import org.apache.druid.segment.StorageAdapter;
import org.apache.druid.segment.loading.SegmentLoader;
import org.apache.druid.segment.loading.SegmentLoadingException;
import org.apache.druid.server.SegmentManager;
import org.apache.druid.server.initialization.ServerConfig;
import org.apache.druid.server.metrics.NoopServiceEmitter;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.SegmentId;
import org.apache.druid.timeline.partition.NoneShardSpec;
import org.joda.time.Interval;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
/**
*/
public class ServerManagerTest
{
private ServerManager serverManager;
private MyQueryRunnerFactory factory;
private CountDownLatch queryWaitLatch;
private CountDownLatch queryWaitYieldLatch;
private CountDownLatch queryNotifyLatch;
private ExecutorService serverManagerExec;
private SegmentManager segmentManager;
@Before
public void setUp()
{
EmittingLogger.registerEmitter(new NoopServiceEmitter());
queryWaitLatch = new CountDownLatch(1);
queryWaitYieldLatch = new CountDownLatch(1);
queryNotifyLatch = new CountDownLatch(1);
factory = new MyQueryRunnerFactory(queryWaitLatch, queryWaitYieldLatch, queryNotifyLatch);
serverManagerExec = Executors.newFixedThreadPool(2);
segmentManager = new SegmentManager(
new SegmentLoader()
{
@Override
public boolean isSegmentLoaded(DataSegment segment)
{
return false;
}
@Override
public Segment getSegment(final DataSegment segment)
{
return new SegmentForTesting(
MapUtils.getString(segment.getLoadSpec(), "version"),
(Interval) segment.getLoadSpec().get("interval")
);
}
@Override
public File getSegmentFiles(DataSegment segment)
{
throw new UnsupportedOperationException();
}
@Override
public void cleanup(DataSegment segment)
{
}
}
);
serverManager = new ServerManager(
new QueryRunnerFactoryConglomerate()
{
@Override
public <T, QueryType extends Query<T>> QueryRunnerFactory<T, QueryType> findFactory(QueryType query)
{
return (QueryRunnerFactory) factory;
}
},
new NoopServiceEmitter(),
serverManagerExec,
new ForegroundCachePopulator(new DefaultObjectMapper(), new CachePopulatorStats(), -1),
new DefaultObjectMapper(),
new LocalCacheProvider().get(),
new CacheConfig(),
segmentManager,
new ServerConfig()
);
loadQueryable("test", "1", Intervals.of("P1d/2011-04-01"));
loadQueryable("test", "1", Intervals.of("P1d/2011-04-02"));
loadQueryable("test", "2", Intervals.of("P1d/2011-04-02"));
loadQueryable("test", "1", Intervals.of("P1d/2011-04-03"));
loadQueryable("test", "1", Intervals.of("P1d/2011-04-04"));
loadQueryable("test", "1", Intervals.of("P1d/2011-04-05"));
loadQueryable("test", "2", Intervals.of("PT1h/2011-04-04T01"));
loadQueryable("test", "2", Intervals.of("PT1h/2011-04-04T02"));
loadQueryable("test", "2", Intervals.of("PT1h/2011-04-04T03"));
loadQueryable("test", "2", Intervals.of("PT1h/2011-04-04T05"));
loadQueryable("test", "2", Intervals.of("PT1h/2011-04-04T06"));
loadQueryable("test2", "1", Intervals.of("P1d/2011-04-01"));
loadQueryable("test2", "1", Intervals.of("P1d/2011-04-02"));
}
@Test
public void testSimpleGet()
{
Future future = assertQueryable(
Granularities.DAY,
"test",
Intervals.of("P1d/2011-04-01"),
ImmutableList.of(
new Pair<String, Interval>("1", Intervals.of("P1d/2011-04-01"))
)
);
waitForTestVerificationAndCleanup(future);
future = assertQueryable(
Granularities.DAY,
"test", Intervals.of("P2d/2011-04-02"),
ImmutableList.of(
new Pair<String, Interval>("1", Intervals.of("P1d/2011-04-01")),
new Pair<String, Interval>("2", Intervals.of("P1d/2011-04-02"))
)
);
waitForTestVerificationAndCleanup(future);
}
@Test
public void testDelete1()
{
final String dataSouce = "test";
final Interval interval = Intervals.of("2011-04-01/2011-04-02");
Future future = assertQueryable(
Granularities.DAY,
dataSouce, interval,
ImmutableList.of(
new Pair<String, Interval>("2", interval)
)
);
waitForTestVerificationAndCleanup(future);
dropQueryable(dataSouce, "2", interval);
future = assertQueryable(
Granularities.DAY,
dataSouce, interval,
ImmutableList.of(
new Pair<String, Interval>("1", interval)
)
);
waitForTestVerificationAndCleanup(future);
}
@Test
public void testDelete2()
{
loadQueryable("test", "3", Intervals.of("2011-04-04/2011-04-05"));
Future future = assertQueryable(
Granularities.DAY,
"test", Intervals.of("2011-04-04/2011-04-06"),
ImmutableList.of(
new Pair<String, Interval>("3", Intervals.of("2011-04-04/2011-04-05"))
)
);
waitForTestVerificationAndCleanup(future);
dropQueryable("test", "3", Intervals.of("2011-04-04/2011-04-05"));
dropQueryable("test", "1", Intervals.of("2011-04-04/2011-04-05"));
future = assertQueryable(
Granularities.HOUR,
"test", Intervals.of("2011-04-04/2011-04-04T06"),
ImmutableList.of(
new Pair<String, Interval>("2", Intervals.of("2011-04-04T00/2011-04-04T01")),
new Pair<String, Interval>("2", Intervals.of("2011-04-04T01/2011-04-04T02")),
new Pair<String, Interval>("2", Intervals.of("2011-04-04T02/2011-04-04T03")),
new Pair<String, Interval>("2", Intervals.of("2011-04-04T04/2011-04-04T05")),
new Pair<String, Interval>("2", Intervals.of("2011-04-04T05/2011-04-04T06"))
)
);
waitForTestVerificationAndCleanup(future);
future = assertQueryable(
Granularities.HOUR,
"test", Intervals.of("2011-04-04/2011-04-04T03"),
ImmutableList.of(
new Pair<String, Interval>("2", Intervals.of("2011-04-04T00/2011-04-04T01")),
new Pair<String, Interval>("2", Intervals.of("2011-04-04T01/2011-04-04T02")),
new Pair<String, Interval>("2", Intervals.of("2011-04-04T02/2011-04-04T03"))
)
);
waitForTestVerificationAndCleanup(future);
future = assertQueryable(
Granularities.HOUR,
"test", Intervals.of("2011-04-04T04/2011-04-04T06"),
ImmutableList.of(
new Pair<String, Interval>("2", Intervals.of("2011-04-04T04/2011-04-04T05")),
new Pair<String, Interval>("2", Intervals.of("2011-04-04T05/2011-04-04T06"))
)
);
waitForTestVerificationAndCleanup(future);
}
@Test
public void testReferenceCounting() throws Exception
{
loadQueryable("test", "3", Intervals.of("2011-04-04/2011-04-05"));
Future future = assertQueryable(
Granularities.DAY,
"test", Intervals.of("2011-04-04/2011-04-06"),
ImmutableList.of(
new Pair<String, Interval>("3", Intervals.of("2011-04-04/2011-04-05"))
)
);
queryNotifyLatch.await(1000, TimeUnit.MILLISECONDS);
Assert.assertEquals(1, factory.getSegmentReferences().size());
for (ReferenceCountingSegment referenceCountingSegment : factory.getSegmentReferences()) {
Assert.assertEquals(1, referenceCountingSegment.getNumReferences());
}
queryWaitYieldLatch.countDown();
Assert.assertTrue(factory.getAdapters().size() == 1);
for (SegmentForTesting segmentForTesting : factory.getAdapters()) {
Assert.assertFalse(segmentForTesting.isClosed());
}
queryWaitLatch.countDown();
future.get();
dropQueryable("test", "3", Intervals.of("2011-04-04/2011-04-05"));
for (SegmentForTesting segmentForTesting : factory.getAdapters()) {
Assert.assertTrue(segmentForTesting.isClosed());
}
}
@Test
public void testReferenceCountingWhileQueryExecuting() throws Exception
{
loadQueryable("test", "3", Intervals.of("2011-04-04/2011-04-05"));
Future future = assertQueryable(
Granularities.DAY,
"test", Intervals.of("2011-04-04/2011-04-06"),
ImmutableList.of(
new Pair<String, Interval>("3", Intervals.of("2011-04-04/2011-04-05"))
)
);
queryNotifyLatch.await(1000, TimeUnit.MILLISECONDS);
Assert.assertEquals(1, factory.getSegmentReferences().size());
for (ReferenceCountingSegment referenceCountingSegment : factory.getSegmentReferences()) {
Assert.assertEquals(1, referenceCountingSegment.getNumReferences());
}
queryWaitYieldLatch.countDown();
Assert.assertEquals(1, factory.getAdapters().size());
for (SegmentForTesting segmentForTesting : factory.getAdapters()) {
Assert.assertFalse(segmentForTesting.isClosed());
}
dropQueryable("test", "3", Intervals.of("2011-04-04/2011-04-05"));
for (SegmentForTesting segmentForTesting : factory.getAdapters()) {
Assert.assertFalse(segmentForTesting.isClosed());
}
queryWaitLatch.countDown();
future.get();
for (SegmentForTesting segmentForTesting : factory.getAdapters()) {
Assert.assertTrue(segmentForTesting.isClosed());
}
}
@Test
public void testMultipleDrops() throws Exception
{
loadQueryable("test", "3", Intervals.of("2011-04-04/2011-04-05"));
Future future = assertQueryable(
Granularities.DAY,
"test", Intervals.of("2011-04-04/2011-04-06"),
ImmutableList.of(
new Pair<String, Interval>("3", Intervals.of("2011-04-04/2011-04-05"))
)
);
queryNotifyLatch.await(1000, TimeUnit.MILLISECONDS);
Assert.assertEquals(1, factory.getSegmentReferences().size());
for (ReferenceCountingSegment referenceCountingSegment : factory.getSegmentReferences()) {
Assert.assertEquals(1, referenceCountingSegment.getNumReferences());
}
queryWaitYieldLatch.countDown();
Assert.assertEquals(1, factory.getAdapters().size());
for (SegmentForTesting segmentForTesting : factory.getAdapters()) {
Assert.assertFalse(segmentForTesting.isClosed());
}
dropQueryable("test", "3", Intervals.of("2011-04-04/2011-04-05"));
dropQueryable("test", "3", Intervals.of("2011-04-04/2011-04-05"));
for (SegmentForTesting segmentForTesting : factory.getAdapters()) {
Assert.assertFalse(segmentForTesting.isClosed());
}
queryWaitLatch.countDown();
future.get();
for (SegmentForTesting segmentForTesting : factory.getAdapters()) {
Assert.assertTrue(segmentForTesting.isClosed());
}
}
private void waitForTestVerificationAndCleanup(Future future)
{
try {
queryNotifyLatch.await(1000, TimeUnit.MILLISECONDS);
queryWaitYieldLatch.countDown();
queryWaitLatch.countDown();
future.get();
factory.clearAdapters();
}
catch (Exception e) {
throw Throwables.propagate(e);
}
}
private Future assertQueryable(
Granularity granularity,
String dataSource,
Interval interval,
List<Pair<String, Interval>> expected
)
{
final Iterator<Pair<String, Interval>> expectedIter = expected.iterator();
final List<Interval> intervals = Collections.singletonList(interval);
final SearchQuery query = Druids.newSearchQueryBuilder()
.dataSource(dataSource)
.intervals(intervals)
.granularity(granularity)
.limit(10000)
.query("wow")
.build();
final QueryRunner<Result<SearchResultValue>> runner = serverManager.getQueryRunnerForIntervals(
query,
intervals
);
return serverManagerExec.submit(
new Runnable()
{
@Override
public void run()
{
Map<String, Object> context = new HashMap<String, Object>();
Sequence<Result<SearchResultValue>> seq = runner.run(QueryPlus.wrap(query), context);
seq.toList();
Iterator<SegmentForTesting> adaptersIter = factory.getAdapters().iterator();
while (expectedIter.hasNext() && adaptersIter.hasNext()) {
Pair<String, Interval> expectedVals = expectedIter.next();
SegmentForTesting value = adaptersIter.next();
Assert.assertEquals(expectedVals.lhs, value.getVersion());
Assert.assertEquals(expectedVals.rhs, value.getInterval());
}
Assert.assertFalse(expectedIter.hasNext());
Assert.assertFalse(adaptersIter.hasNext());
}
}
);
}
public void loadQueryable(String dataSource, String version, Interval interval)
{
try {
segmentManager.loadSegment(
new DataSegment(
dataSource,
interval,
version,
ImmutableMap.of("version", version, "interval", interval),
Arrays.asList("dim1", "dim2", "dim3"),
Arrays.asList("metric1", "metric2"),
NoneShardSpec.instance(),
IndexIO.CURRENT_VERSION_ID,
123L
)
);
}
catch (SegmentLoadingException e) {
throw new RuntimeException(e);
}
}
public void dropQueryable(String dataSource, String version, Interval interval)
{
segmentManager.dropSegment(
new DataSegment(
dataSource,
interval,
version,
ImmutableMap.of("version", version, "interval", interval),
Arrays.asList("dim1", "dim2", "dim3"),
Arrays.asList("metric1", "metric2"),
NoneShardSpec.instance(),
IndexIO.CURRENT_VERSION_ID,
123L
)
);
}
public static class MyQueryRunnerFactory implements QueryRunnerFactory<Result<SearchResultValue>, SearchQuery>
{
private final CountDownLatch waitLatch;
private final CountDownLatch waitYieldLatch;
private final CountDownLatch notifyLatch;
private List<SegmentForTesting> adapters = new ArrayList<>();
private List<ReferenceCountingSegment> segmentReferences = new ArrayList<>();
public MyQueryRunnerFactory(
CountDownLatch waitLatch,
CountDownLatch waitYieldLatch,
CountDownLatch notifyLatch
)
{
this.waitLatch = waitLatch;
this.waitYieldLatch = waitYieldLatch;
this.notifyLatch = notifyLatch;
}
@Override
public QueryRunner<Result<SearchResultValue>> createRunner(Segment adapter)
{
if (!(adapter instanceof ReferenceCountingSegment)) {
throw new IAE("Expected instance of ReferenceCountingSegment, got %s", adapter.getClass());
}
final ReferenceCountingSegment segment = (ReferenceCountingSegment) adapter;
Assert.assertTrue(segment.getNumReferences() > 0);
segmentReferences.add(segment);
adapters.add((SegmentForTesting) segment.getBaseSegment());
return new BlockingQueryRunner<>(new NoopQueryRunner<>(), waitLatch, waitYieldLatch, notifyLatch);
}
@Override
public QueryRunner<Result<SearchResultValue>> mergeRunners(
ExecutorService queryExecutor,
Iterable<QueryRunner<Result<SearchResultValue>>> queryRunners
)
{
return new ConcatQueryRunner<>(Sequences.simple(queryRunners));
}
@Override
public QueryToolChest<Result<SearchResultValue>, SearchQuery> getToolchest()
{
return new NoopQueryToolChest<>();
}
public List<SegmentForTesting> getAdapters()
{
return adapters;
}
public List<ReferenceCountingSegment> getSegmentReferences()
{
return segmentReferences;
}
public void clearAdapters()
{
adapters.clear();
}
}
public static class NoopQueryToolChest<T, QueryType extends Query<T>> extends QueryToolChest<T, QueryType>
{
@Override
public QueryRunner<T> mergeResults(QueryRunner<T> runner)
{
return runner;
}
@Override
public QueryMetrics<Query<?>> makeMetrics(QueryType query)
{
return new DefaultQueryMetrics<>(new DefaultObjectMapper());
}
@Override
public Function<T, T> makePreComputeManipulatorFn(QueryType query, MetricManipulationFn fn)
{
return Functions.identity();
}
@Override
public TypeReference<T> getResultTypeReference()
{
return new TypeReference<T>()
{
};
}
}
private static class SegmentForTesting extends AbstractSegment
{
private final String version;
private final Interval interval;
private final Object lock = new Object();
private volatile boolean closed = false;
SegmentForTesting(
String version,
Interval interval
)
{
this.version = version;
this.interval = interval;
}
public String getVersion()
{
return version;
}
public Interval getInterval()
{
return interval;
}
@Override
public SegmentId getId()
{
return SegmentId.dummy(version);
}
public boolean isClosed()
{
return closed;
}
@Override
public Interval getDataInterval()
{
return interval;
}
@Override
public QueryableIndex asQueryableIndex()
{
throw new UnsupportedOperationException();
}
@Override
public StorageAdapter asStorageAdapter()
{
throw new UnsupportedOperationException();
}
@Override
public void close()
{
synchronized (lock) {
closed = true;
}
}
}
private static class BlockingQueryRunner<T> implements QueryRunner<T>
{
private final QueryRunner<T> runner;
private final CountDownLatch waitLatch;
private final CountDownLatch waitYieldLatch;
private final CountDownLatch notifyLatch;
public BlockingQueryRunner(
QueryRunner<T> runner,
CountDownLatch waitLatch,
CountDownLatch waitYieldLatch,
CountDownLatch notifyLatch
)
{
this.runner = runner;
this.waitLatch = waitLatch;
this.waitYieldLatch = waitYieldLatch;
this.notifyLatch = notifyLatch;
}
@Override
public Sequence<T> run(QueryPlus<T> queryPlus, Map<String, Object> responseContext)
{
return new BlockingSequence<>(runner.run(queryPlus, responseContext), waitLatch, waitYieldLatch, notifyLatch);
}
}
private static class BlockingSequence<T> extends YieldingSequenceBase<T>
{
private final Sequence<T> baseSequence;
private final CountDownLatch waitLatch;
private final CountDownLatch waitYieldLatch;
private final CountDownLatch notifyLatch;
private BlockingSequence(
Sequence<T> baseSequence,
CountDownLatch waitLatch,
CountDownLatch waitYieldLatch,
CountDownLatch notifyLatch
)
{
this.baseSequence = baseSequence;
this.waitLatch = waitLatch;
this.waitYieldLatch = waitYieldLatch;
this.notifyLatch = notifyLatch;
}
@Override
public <OutType> Yielder<OutType> toYielder(
final OutType initValue,
final YieldingAccumulator<OutType, T> accumulator
)
{
notifyLatch.countDown();
try {
waitYieldLatch.await(1000, TimeUnit.MILLISECONDS);
}
catch (Exception e) {
throw Throwables.propagate(e);
}
final Yielder<OutType> baseYielder = baseSequence.toYielder(initValue, accumulator);
return new Yielder<OutType>()
{
@Override
public OutType get()
{
try {
waitLatch.await(1000, TimeUnit.MILLISECONDS);
}
catch (Exception e) {
throw Throwables.propagate(e);
}
return baseYielder.get();
}
@Override
public Yielder<OutType> next(OutType initValue)
{
return baseYielder.next(initValue);
}
@Override
public boolean isDone()
{
return baseYielder.isDone();
}
@Override
public void close() throws IOException
{
baseYielder.close();
}
};
}
}
}
| |
package interfaz;
import java.awt.Color;
import java.awt.Font;
import java.awt.Graphics;
import java.awt.Rectangle;
import dominio.Personaje;
import juego.Pantalla;
import recursos.Recursos;
/**
* The Class MenuBatalla.
*/
public class MenuBatalla {
/**
* The Constant BOTONESCUATRO.
*/
private static final int BOTONESCUATRO = 4;
/**
* The Constant BOTONESTRES.
*/
private static final int BOTONESTRES = 3;
/**
* The Constant BOTONESDOS.
*/
private static final int BOTONESDOS = 2;
/**
* The Constant BOTONESUNO.
*/
private static final int BOTONESUNO = 1;
/**
* The Constant BOTONESCERO.
*/
private static final int BOTONESCERO = 0;
/**
* The Constant BOTONESCINCO.
*/
private static final int BOTONESCINCO = 5;
/**
* The Constant SUMAYGETHABILIDADES2.
*/
private static final int SUMAYGETHABILIDADES2 = 168;
/**
* The Constant SUMAYGETHABILIDADES.
*/
private static final int SUMAYGETHABILIDADES = 94;
/**
* The Constant SUMAXHABILIDADESCASTA2.
*/
private static final int SUMAXHABILIDADESCASTA2 = 442;
/**
* The Constant SUMAXHABILIDADESCASTA.
*/
private static final int SUMAXHABILIDADESCASTA = 268;
/**
* The Constant SUMAXHABILIDADESRAZA.
*/
private static final int SUMAXHABILIDADESRAZA = 95;
/**
* The Constant TAMANIOCATORCE.
*/
private static final int TAMANIOCATORCE = 14;
/**
* The Constant SUMAYRECTANGULO.
*/
private static final int SUMAYRECTANGULO = 5;
/**
* The Constant ANCHURA.
*/
private static final int ANCHURA = 20;
/**
* The Constant x.
*/
private static final int X = 100;
/**
* The Constant y.
*/
private static final int Y = 380;
/**
* The Constant anchoBoton.
*/
private static final int ANCHOBOTON = 40;
/**
* The Constant botones.
*/
private static final int[][] BOTONES = {{X + 48, Y + 72 },
{X + 48, Y + 146 }, {X + 221, Y + 72 }, {X + 221, Y + 146 },
{X + 394, Y + 72 }, {X + 394, Y + 146 } };
/**
* The habilitado.
*/
private boolean habilitado;
/**
* The personaje.
*/
private Personaje personaje;
/**
* Instantiates a new menu batalla.
*
* @param habilitadoParam
* the habilitado
* @param personajeParam
* the personaje
*/
public MenuBatalla(final boolean habilitadoParam,
final Personaje personajeParam) {
this.habilitado = habilitadoParam;
this.personaje = personajeParam;
}
/**
* Graficar.
*
* @param g
* the g
*/
public void graficar(final Graphics g) {
if (habilitado) {
g.drawImage(Recursos.getMenuBatalla(), X, Y, null);
} else {
g.drawImage(Recursos.getMenuBatallaDeshabilitado(), X, Y, null);
}
// Dibujo los boones
g.drawImage(
Recursos.getHabilidades()
.get(personaje.getHabilidadesRaza()[0]),
BOTONES[BOTONESCERO][BOTONESCERO],
BOTONES[BOTONESCERO][BOTONESUNO], ANCHOBOTON, ANCHOBOTON, null);
g.drawImage(
Recursos.getHabilidades()
.get(personaje.getHabilidadesRaza()[1]),
BOTONES[BOTONESUNO][BOTONESCERO],
BOTONES[BOTONESUNO][BOTONESUNO], ANCHOBOTON, ANCHOBOTON, null);
g.drawImage(
Recursos.getHabilidades()
.get(personaje.getHabilidadesCasta()[0]),
BOTONES[BOTONESDOS][BOTONESCERO],
BOTONES[BOTONESDOS][BOTONESUNO], ANCHOBOTON, ANCHOBOTON, null);
g.drawImage(
Recursos.getHabilidades()
.get(personaje.getHabilidadesCasta()[1]),
BOTONES[BOTONESTRES][BOTONESCERO],
BOTONES[BOTONESTRES][BOTONESUNO], ANCHOBOTON, ANCHOBOTON, null);
g.drawImage(
Recursos.getHabilidades()
.get(personaje.getHabilidadesCasta()[2]),
BOTONES[BOTONESCUATRO][BOTONESCERO],
BOTONES[BOTONESCUATRO][BOTONESUNO], ANCHOBOTON, ANCHOBOTON,
null);
g.drawImage(Recursos.getHabilidades().get("Ser Energizado"),
BOTONES[BOTONESCINCO][BOTONESCERO],
BOTONES[BOTONESCINCO][BOTONESUNO], ANCHOBOTON, ANCHOBOTON,
null);
// Dibujo las leyendas
g.setFont(new Font("Book Antiqua", 1, TAMANIOCATORCE));
g.drawString(personaje.getHabilidadesRaza()[0],
X + SUMAXHABILIDADESRAZA, Y + SUMAYGETHABILIDADES);
g.drawString(personaje.getHabilidadesRaza()[1],
X + SUMAXHABILIDADESRAZA, Y + SUMAYGETHABILIDADES2);
g.drawString(personaje.getHabilidadesCasta()[0],
X + SUMAXHABILIDADESCASTA, Y + SUMAYGETHABILIDADES);
g.drawString(personaje.getHabilidadesCasta()[1],
X + SUMAXHABILIDADESCASTA, Y + SUMAYGETHABILIDADES2);
g.drawString(personaje.getHabilidadesCasta()[2],
X + SUMAXHABILIDADESCASTA2, Y + SUMAYGETHABILIDADES);
g.drawString("Ser energizado", X + SUMAXHABILIDADESCASTA2,
Y + SUMAYGETHABILIDADES2);
// Dibujo el turno de quien es
g.setColor(Color.WHITE);
if (habilitado) {
Pantalla.centerString(g,
new Rectangle(X, Y + SUMAYRECTANGULO,
Recursos.getMenuBatalla().getWidth(), ANCHURA),
"Mi Turno");
} else {
Pantalla.centerString(g,
new Rectangle(X, Y + SUMAYRECTANGULO,
Recursos.getMenuBatalla().getWidth(), ANCHURA),
"Turno Rival");
}
}
/**
* Gets the boton clickeado.
*
* @param mouseX
* the mouse X
* @param mouseY
* the mouse Y
* @return the boton clickeado
*/
public int getBotonClickeado(final int mouseX, final int mouseY) {
if (!habilitado) {
return 0;
}
for (int i = 0; i < BOTONES.length; i++) {
if (mouseX >= BOTONES[i][0] && mouseX <= BOTONES[i][0] + ANCHOBOTON
&& mouseY >= BOTONES[i][1]
&& mouseY <= BOTONES[i][1] + ANCHOBOTON) {
return i + 1;
}
}
return 0;
}
/**
* Click en menu.
*
* @param mouseX
* the mouse X
* @param mouseY
* the mouse Y
* @return true, if successful
*/
public boolean clickEnMenu(final int mouseX, final int mouseY) {
if (mouseX >= X && mouseX <= X + Recursos.getMenuBatalla().getWidth()
&& mouseY >= Y
&& mouseY <= Y + Recursos.getMenuBatalla().getHeight()) {
return habilitado;
}
return false;
}
/**
* Sets the habilitado.
*
* @param b
* the new habilitado
*/
public void setHabilitado(final boolean b) {
habilitado = b;
}
}
| |
/*
* (C) 2007-2012 Alibaba Group Holding Limited.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* Authors:
* wuhua <wq163@163.com> , boyan <killme2008@gmail.com>
*/
package com.taobao.metamorphosis.client.consumer;
import java.io.UnsupportedEncodingException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.zip.CRC32;
import com.taobao.metamorphosis.network.ByteUtils;
/**
* Known hashing algorithms for locating a server for a key. Note that all hash
* algorithms return 64-bits of hash, but only the lower 32-bits are
* significant. This allows a positive 32-bit number to be returned for all
* cases.
*/
public enum HashAlgorithm {
/**
* Native hash (String.hashCode()).
*/
NATIVE_HASH,
/**
* CRC32_HASH as used by the perl API. This will be more consistent both
* across multiple API users as well as java versions, but is mostly likely
* significantly slower.
*/
CRC32_HASH,
/**
* FNV hashes are designed to be fast while maintaining a low collision
* rate. The FNV speed allows one to quickly hash lots of data while
* maintaining a reasonable collision rate.
*
* @see http://www.isthe.com/chongo/tech/comp/fnv/
* @see http://en.wikipedia.org/wiki/Fowler_Noll_Vo_hash
*/
FNV1_64_HASH,
/**
* Variation of FNV.
*/
FNV1A_64_HASH,
/**
* 32-bit FNV1.
*/
FNV1_32_HASH,
/**
* 32-bit FNV1a.
*/
FNV1A_32_HASH,
/**
* MD5-based hash algorithm used by ketama.
*/
KETAMA_HASH,
/**
* From mysql source
*/
MYSQL_HASH,
ELF_HASH,
RS_HASH,
/**
* From lua source,it is used for long key
*/
LUA_HASH,
ELECTION_HASH,
/**
* The Jenkins One-at-a-time hash ,please see
* http://www.burtleburtle.net/bob/hash/doobs.html
*/
ONE_AT_A_TIME;
private static final long FNV_64_INIT = 0xcbf29ce484222325L;
private static final long FNV_64_PRIME = 0x100000001b3L;
private static final long FNV_32_INIT = 2166136261L;
private static final long FNV_32_PRIME = 16777619;
/**
* Compute the hash for the given key.
*
* @return a positive integer hash
*/
public long hash(final String k) {
long rv = 0;
switch (this) {
case NATIVE_HASH:
rv = k.hashCode();
break;
case CRC32_HASH:
// return (crc32(shift) >> 16) & 0x7fff;
final CRC32 crc32 = new CRC32();
crc32.update(ByteUtils.getBytes(k));
rv = crc32.getValue() >> 16 & 0x7fff;
break;
case FNV1_64_HASH: {
// Thanks to pierre@demartines.com for the pointer
rv = FNV_64_INIT;
final int len = k.length();
for (int i = 0; i < len; i++) {
rv *= FNV_64_PRIME;
rv ^= k.charAt(i);
}
}
break;
case FNV1A_64_HASH: {
rv = FNV_64_INIT;
final int len = k.length();
for (int i = 0; i < len; i++) {
rv ^= k.charAt(i);
rv *= FNV_64_PRIME;
}
}
break;
case FNV1_32_HASH: {
rv = FNV_32_INIT;
final int len = k.length();
for (int i = 0; i < len; i++) {
rv *= FNV_32_PRIME;
rv ^= k.charAt(i);
}
}
break;
case FNV1A_32_HASH: {
rv = FNV_32_INIT;
final int len = k.length();
for (int i = 0; i < len; i++) {
rv ^= k.charAt(i);
rv *= FNV_32_PRIME;
}
}
break;
case ELECTION_HASH:
case KETAMA_HASH:
final byte[] bKey = computeMd5(k);
rv =
(long) (bKey[3] & 0xFF) << 24 | (long) (bKey[2] & 0xFF) << 16 | (long) (bKey[1] & 0xFF) << 8
| bKey[0] & 0xFF;
break;
case MYSQL_HASH:
int nr2 = 4;
for (int i = 0; i < k.length(); i++) {
rv ^= ((rv & 63) + nr2) * k.charAt(i) + (rv << 8);
nr2 += 3;
}
break;
case ELF_HASH:
long x = 0;
for (int i = 0; i < k.length(); i++) {
rv = (rv << 4) + k.charAt(i);
if ((x = rv & 0xF0000000L) != 0) {
rv ^= x >> 24;
rv &= ~x;
}
}
rv = rv & 0x7FFFFFFF;
break;
case RS_HASH:
final long b = 378551;
long a = 63689;
for (int i = 0; i < k.length(); i++) {
rv = rv * a + k.charAt(i);
a *= b;
}
rv = rv & 0x7FFFFFFF;
break;
case LUA_HASH:
final int step = (k.length() >> 5) + 1;
rv = k.length();
for (int len = k.length(); len >= step; len -= step) {
rv = rv ^ (rv << 5) + (rv >> 2) + k.charAt(len - 1);
}
break;
case ONE_AT_A_TIME:
try {
int hash = 0;
for (final byte bt : k.getBytes("utf-8")) {
hash += bt & 0xFF;
hash += hash << 10;
hash ^= hash >>> 6;
}
hash += hash << 3;
hash ^= hash >>> 11;
hash += hash << 15;
return hash;
}
catch (final UnsupportedEncodingException e) {
throw new IllegalStateException("Hash function error", e);
}
default:
assert false;
}
return rv & 0xffffffffL; /* Truncate to 32-bits */
}
/**
* Get the md5 of the given key.
*/
public static byte[] computeMd5(final String k) {
MessageDigest md5;
try {
md5 = MessageDigest.getInstance("MD5");
}
catch (final NoSuchAlgorithmException e) {
throw new RuntimeException("MD5 not supported", e);
}
md5.reset();
md5.update(ByteUtils.getBytes(k));
return md5.digest();
}
// public static void main(String[] args) {
// HashAlgorithm alg=HashAlgorithm.CRC32_HASH;
// long h=0;
// long start=System.currentTimeMillis();
// for(int i=0;i<100000;i++)
// h=alg.hash("MYSQL_HASH");
// System.out.println(System.currentTimeMillis()-start);
// }
}
| |
package org.robolectric;
import android.app.Activity;
import android.content.ActivityNotFoundException;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.os.Handler;
import android.view.Display;
import android.view.View;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.robolectric.annotation.Implementation;
import org.robolectric.annotation.Implements;
import org.robolectric.res.builder.RobolectricPackageManager;
import org.robolectric.shadows.ShadowApplication;
import org.robolectric.shadows.ShadowDisplay;
import org.robolectric.shadows.ShadowLooper;
import org.robolectric.shadows.ShadowView;
import org.robolectric.shadows.StubViewRoot;
import org.robolectric.internal.Shadow;
import org.robolectric.internal.ShadowProvider;
import org.robolectric.util.ReflectionHelpers;
import org.robolectric.util.TestOnClickListener;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.*;
import static org.robolectric.Shadows.shadowOf;
@RunWith(TestRunners.WithDefaults.class)
public class RobolectricTest {
private PrintStream originalSystemOut;
private ByteArrayOutputStream buff;
private String defaultLineSeparator;
@Before
public void setUp() {
originalSystemOut = System.out;
defaultLineSeparator = System.getProperty("line.separator");
System.setProperty("line.separator", "\n");
buff = new ByteArrayOutputStream();
PrintStream testOut = new PrintStream(buff);
System.setOut(testOut);
}
@After
public void tearDown() throws Exception {
System.setProperty("line.separator", defaultLineSeparator);
System.setOut(originalSystemOut);
}
@Test(expected = RuntimeException.class)
public void clickOn_shouldThrowIfViewIsDisabled() throws Exception {
View view = new View(RuntimeEnvironment.application);
view.setEnabled(false);
ShadowView.clickOn(view);
}
@Test
public void shouldResetBackgroundSchedulerBeforeTests() throws Exception {
assertThat(Robolectric.getBackgroundThreadScheduler().isPaused()).isFalse();
Robolectric.getBackgroundThreadScheduler().pause();
}
@Test
public void shouldResetBackgroundSchedulerAfterTests() throws Exception {
assertThat(Robolectric.getBackgroundThreadScheduler().isPaused()).isFalse();
Robolectric.getBackgroundThreadScheduler().pause();
}
@Test
public void idleMainLooper_executesScheduledTasks() {
final boolean[] wasRun = new boolean[]{false};
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
wasRun[0] = true;
}
}, 2000);
assertFalse(wasRun[0]);
ShadowLooper.idleMainLooper(1999);
assertFalse(wasRun[0]);
ShadowLooper.idleMainLooper(1);
assertTrue(wasRun[0]);
}
@Test
public void shouldUseSetDensityForContexts() throws Exception {
assertThat(new Activity().getResources().getDisplayMetrics().density).isEqualTo(1.0f);
ShadowApplication.setDisplayMetricsDensity(1.5f);
assertThat(new Activity().getResources().getDisplayMetrics().density).isEqualTo(1.5f);
}
@Test
public void shouldUseSetDisplayForContexts() throws Exception {
assertThat(new Activity().getResources().getDisplayMetrics().widthPixels).isEqualTo(480);
assertThat(new Activity().getResources().getDisplayMetrics().heightPixels).isEqualTo(800);
Display display = Shadow.newInstanceOf(Display.class);
ShadowDisplay shadowDisplay = Shadows.shadowOf(display);
shadowDisplay.setWidth(100);
shadowDisplay.setHeight(200);
ShadowApplication.setDefaultDisplay(display);
assertThat(new Activity().getResources().getDisplayMetrics().widthPixels).isEqualTo(100);
assertThat(new Activity().getResources().getDisplayMetrics().heightPixels).isEqualTo(200);
}
@Test
public void clickOn_shouldCallClickListener() throws Exception {
View view = new View(RuntimeEnvironment.application);
shadowOf(view).setMyParent(new StubViewRoot());
TestOnClickListener testOnClickListener = new TestOnClickListener();
view.setOnClickListener(testOnClickListener);
ShadowView.clickOn(view);
assertTrue(testOnClickListener.clicked);
}
@Test(expected = ActivityNotFoundException.class)
public void checkActivities_shouldSetValueOnShadowApplication() throws Exception {
ShadowApplication.getInstance().checkActivities(true);
RuntimeEnvironment.application.startActivity(new Intent("i.dont.exist.activity"));
}
@Test
public void setupActivity_returnsAVisibleActivity() throws Exception {
LifeCycleActivity activity = Robolectric.setupActivity(LifeCycleActivity.class);
assertThat(activity.isCreated()).isTrue();
assertThat(activity.isStarted()).isTrue();
assertThat(activity.isResumed()).isTrue();
assertThat(activity.isVisible()).isTrue();
}
private List<String> order = new ArrayList<>();
private class MockProvider implements ShadowProvider {
@Override
public void reset() {
order.add("shadowProvider");
assertThat(RuntimeEnvironment.application).as("app during shadow reset").isNotNull();
assertThat(RuntimeEnvironment.getActivityThread()).as("activityThread during shadow reset").isNotNull();
assertThat(RuntimeEnvironment.getRobolectricPackageManager()).as("packageManager during shadow reset").isNotNull();
}
@Override
public String[] getProvidedPackageNames() {
return null;
}
}
@Test
public void reset_shouldResetShadows_beforeClearingPackageManager() {
Iterable<ShadowProvider> oldProviders = ReflectionHelpers.getStaticField(Robolectric.class, "providers");;
ShadowProvider mockProvider = new MockProvider();
List<ShadowProvider> mockProviders = Collections.singletonList(mockProvider);
ReflectionHelpers.setStaticField(Robolectric.class, "providers", mockProviders);
RobolectricPackageManager mockManager = mock(RobolectricPackageManager.class);
doAnswer(new Answer<Void>() {
public Void answer(InvocationOnMock invocation) {
order.add("packageManager");
return null;
}
}).when(mockManager).reset();
RuntimeEnvironment.setRobolectricPackageManager(mockManager);
try {
Robolectric.reset();
} finally {
// Make sure we clean up after ourselves
ReflectionHelpers.setStaticField(Robolectric.class, "providers", oldProviders);
}
assertThat(order).as("reset order").containsExactly("shadowProvider", "packageManager");
assertThat(RuntimeEnvironment.application).as("app after reset").isNull();
assertThat(RuntimeEnvironment.getPackageManager()).as("packageManager after reset").isNull();
assertThat(RuntimeEnvironment.getActivityThread()).as("activityThread after reset").isNull();
}
@Implements(View.class)
public static class TestShadowView {
@Implementation
public Context getContext() {
return null;
}
}
private static class LifeCycleActivity extends Activity {
private boolean created;
private boolean started;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
created = true;
}
@Override
protected void onStart() {
super.onStart();
started = true;
}
public boolean isStarted() {
return started;
}
public boolean isCreated() {
return created;
}
public boolean isVisible() {
return getWindow().getDecorView().getWindowToken() != null;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.cql3.statements;
import java.nio.ByteBuffer;
import java.util.*;
import com.google.common.collect.*;
import org.apache.cassandra.index.IndexRegistry;
import org.apache.cassandra.schema.TableMetadata;
import org.apache.cassandra.cql3.*;
import org.apache.cassandra.cql3.conditions.ColumnCondition;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.filter.*;
import org.apache.cassandra.db.rows.Row;
import org.apache.cassandra.db.partitions.FilteredPartition;
import org.apache.cassandra.db.partitions.Partition;
import org.apache.cassandra.db.partitions.PartitionUpdate;
import org.apache.cassandra.exceptions.InvalidRequestException;
import org.apache.cassandra.service.CASRequest;
import org.apache.cassandra.utils.Pair;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
/**
* Processed CAS conditions and update on potentially multiple rows of the same partition.
*/
public class CQL3CasRequest implements CASRequest
{
public final TableMetadata metadata;
public final DecoratedKey key;
private final RegularAndStaticColumns conditionColumns;
private final boolean updatesRegularRows;
private final boolean updatesStaticRow;
private boolean hasExists; // whether we have an exist or if not exist condition
// Conditions on the static row. We keep it separate from 'conditions' as most things related to the static row are
// special cases anyway.
private RowCondition staticConditions;
// We index RowCondition by the clustering of the row they applied to for 2 reasons:
// 1) this allows to keep things sorted to build the read command below
// 2) this allows to detect when contradictory conditions are set (not exists with some other conditions on the same row)
private final TreeMap<Clustering<?>, RowCondition> conditions;
private final List<RowUpdate> updates = new ArrayList<>();
private final List<RangeDeletion> rangeDeletions = new ArrayList<>();
public CQL3CasRequest(TableMetadata metadata,
DecoratedKey key,
RegularAndStaticColumns conditionColumns,
boolean updatesRegularRows,
boolean updatesStaticRow)
{
this.metadata = metadata;
this.key = key;
this.conditions = new TreeMap<>(metadata.comparator);
this.conditionColumns = conditionColumns;
this.updatesRegularRows = updatesRegularRows;
this.updatesStaticRow = updatesStaticRow;
}
void addRowUpdate(Clustering<?> clustering, ModificationStatement stmt, QueryOptions options, long timestamp, int nowInSeconds)
{
updates.add(new RowUpdate(clustering, stmt, options, timestamp, nowInSeconds));
}
void addRangeDeletion(Slice slice, ModificationStatement stmt, QueryOptions options, long timestamp, int nowInSeconds)
{
rangeDeletions.add(new RangeDeletion(slice, stmt, options, timestamp, nowInSeconds));
}
public void addNotExist(Clustering<?> clustering) throws InvalidRequestException
{
addExistsCondition(clustering, new NotExistCondition(clustering), true);
}
public void addExist(Clustering<?> clustering) throws InvalidRequestException
{
addExistsCondition(clustering, new ExistCondition(clustering), false);
}
private void addExistsCondition(Clustering<?> clustering, RowCondition condition, boolean isNotExist)
{
assert condition instanceof ExistCondition || condition instanceof NotExistCondition;
RowCondition previous = getConditionsForRow(clustering);
if (previous != null)
{
if (previous.getClass().equals(condition.getClass()))
{
// We can get here if a BATCH has 2 different statements on the same row with the same "exist" condition.
// For instance (assuming 'k' is the full PK):
// BEGIN BATCH
// INSERT INTO t(k, v1) VALUES (0, 'foo') IF NOT EXISTS;
// INSERT INTO t(k, v2) VALUES (0, 'bar') IF NOT EXISTS;
// APPLY BATCH;
// Of course, those can be trivially rewritten by the user as a single INSERT statement, but we still don't
// want this to be a problem (see #12867 in particular), so we simply return (the condition itself has
// already be set).
assert hasExists; // We shouldn't have a previous condition unless hasExists has been set already.
return;
}
else
{
// these should be prevented by the parser, but it doesn't hurt to check
throw (previous instanceof NotExistCondition || previous instanceof ExistCondition)
? new InvalidRequestException("Cannot mix IF EXISTS and IF NOT EXISTS conditions for the same row")
: new InvalidRequestException("Cannot mix IF conditions and IF " + (isNotExist ? "NOT " : "") + "EXISTS for the same row");
}
}
setConditionsForRow(clustering, condition);
hasExists = true;
}
public void addConditions(Clustering<?> clustering, Collection<ColumnCondition> conds, QueryOptions options) throws InvalidRequestException
{
RowCondition condition = getConditionsForRow(clustering);
if (condition == null)
{
condition = new ColumnsConditions(clustering);
setConditionsForRow(clustering, condition);
}
else if (!(condition instanceof ColumnsConditions))
{
throw new InvalidRequestException("Cannot mix IF conditions and IF NOT EXISTS for the same row");
}
((ColumnsConditions)condition).addConditions(conds, options);
}
private RowCondition getConditionsForRow(Clustering<?> clustering)
{
return clustering == Clustering.STATIC_CLUSTERING ? staticConditions : conditions.get(clustering);
}
private void setConditionsForRow(Clustering<?> clustering, RowCondition condition)
{
if (clustering == Clustering.STATIC_CLUSTERING)
{
assert staticConditions == null;
staticConditions = condition;
}
else
{
RowCondition previous = conditions.put(clustering, condition);
assert previous == null;
}
}
private RegularAndStaticColumns columnsToRead()
{
RegularAndStaticColumns allColumns = metadata.regularAndStaticColumns();
// If we update static row, we won't have any conditions on regular rows.
// If we update regular row, we have to fetch all regular rows (which would satisfy column condition) and
// static rows that take part in column condition.
// In both cases, we're fetching enough rows to distinguish between "all conditions are nulls" and "row does not exist".
// We have to do this as we can't rely on row marker for that (see #6623)
Columns statics = updatesStaticRow ? allColumns.statics : conditionColumns.statics;
Columns regulars = updatesRegularRows ? allColumns.regulars : conditionColumns.regulars;
return new RegularAndStaticColumns(statics, regulars);
}
public SinglePartitionReadQuery readCommand(int nowInSec)
{
assert staticConditions != null || !conditions.isEmpty();
// Fetch all columns, but query only the selected ones
ColumnFilter columnFilter = ColumnFilter.selection(columnsToRead());
// With only a static condition, we still want to make the distinction between a non-existing partition and one
// that exists (has some live data) but has not static content. So we query the first live row of the partition.
if (conditions.isEmpty())
return SinglePartitionReadQuery.create(metadata,
nowInSec,
columnFilter,
RowFilter.NONE,
DataLimits.cqlLimits(1),
key,
new ClusteringIndexSliceFilter(Slices.ALL, false));
ClusteringIndexNamesFilter filter = new ClusteringIndexNamesFilter(conditions.navigableKeySet(), false);
return SinglePartitionReadQuery.create(metadata, nowInSec, key, columnFilter, filter);
}
/**
* Checks whether the conditions represented by this object applies provided the current state of the partition on
* which those conditions are.
*
* @param current the partition with current data corresponding to these conditions. More precisely, this must be
* the result of executing the command returned by {@link #readCommand}. This can be empty but it should not be
* {@code null}.
* @return whether the conditions represented by this object applies or not.
*/
public boolean appliesTo(FilteredPartition current) throws InvalidRequestException
{
if (staticConditions != null && !staticConditions.appliesTo(current))
return false;
for (RowCondition condition : conditions.values())
{
if (!condition.appliesTo(current))
return false;
}
return true;
}
private RegularAndStaticColumns updatedColumns()
{
RegularAndStaticColumns.Builder builder = RegularAndStaticColumns.builder();
for (RowUpdate upd : updates)
builder.addAll(upd.stmt.updatedColumns());
return builder.build();
}
public PartitionUpdate makeUpdates(FilteredPartition current) throws InvalidRequestException
{
PartitionUpdate.Builder updateBuilder = new PartitionUpdate.Builder(metadata, key, updatedColumns(), conditions.size());
for (RowUpdate upd : updates)
upd.applyUpdates(current, updateBuilder);
for (RangeDeletion upd : rangeDeletions)
upd.applyUpdates(current, updateBuilder);
PartitionUpdate partitionUpdate = updateBuilder.build();
IndexRegistry.obtain(metadata).validate(partitionUpdate);
return partitionUpdate;
}
/**
* Due to some operation on lists, we can't generate the update that a given Modification statement does before
* we get the values read by the initial read of Paxos. A RowUpdate thus just store the relevant information
* (include the statement iself) to generate those updates. We'll have multiple RowUpdate for a Batch, otherwise
* we'll have only one.
*/
private class RowUpdate
{
private final Clustering<?> clustering;
private final ModificationStatement stmt;
private final QueryOptions options;
private final long timestamp;
private final int nowInSeconds;
private RowUpdate(Clustering<?> clustering, ModificationStatement stmt, QueryOptions options, long timestamp, int nowInSeconds)
{
this.clustering = clustering;
this.stmt = stmt;
this.options = options;
this.timestamp = timestamp;
this.nowInSeconds = nowInSeconds;
}
void applyUpdates(FilteredPartition current, PartitionUpdate.Builder updateBuilder)
{
Map<DecoratedKey, Partition> map = stmt.requiresRead() ? Collections.singletonMap(key, current) : null;
UpdateParameters params =
new UpdateParameters(metadata,
updateBuilder.columns(),
options,
timestamp,
nowInSeconds,
stmt.getTimeToLive(options),
map);
stmt.addUpdateForKey(updateBuilder, clustering, params);
}
}
private class RangeDeletion
{
private final Slice slice;
private final ModificationStatement stmt;
private final QueryOptions options;
private final long timestamp;
private final int nowInSeconds;
private RangeDeletion(Slice slice, ModificationStatement stmt, QueryOptions options, long timestamp, int nowInSeconds)
{
this.slice = slice;
this.stmt = stmt;
this.options = options;
this.timestamp = timestamp;
this.nowInSeconds = nowInSeconds;
}
void applyUpdates(FilteredPartition current, PartitionUpdate.Builder updateBuilder)
{
// No slice statements currently require a read, but this maintains consistency with RowUpdate, and future proofs us
Map<DecoratedKey, Partition> map = stmt.requiresRead() ? Collections.singletonMap(key, current) : null;
UpdateParameters params =
new UpdateParameters(metadata,
updateBuilder.columns(),
options,
timestamp,
nowInSeconds,
stmt.getTimeToLive(options),
map);
stmt.addUpdateForKey(updateBuilder, slice, params);
}
}
private static abstract class RowCondition
{
public final Clustering<?> clustering;
protected RowCondition(Clustering<?> clustering)
{
this.clustering = clustering;
}
public abstract boolean appliesTo(FilteredPartition current) throws InvalidRequestException;
}
private static class NotExistCondition extends RowCondition
{
private NotExistCondition(Clustering<?> clustering)
{
super(clustering);
}
public boolean appliesTo(FilteredPartition current)
{
return current.getRow(clustering) == null;
}
}
private static class ExistCondition extends RowCondition
{
private ExistCondition(Clustering<?> clustering)
{
super(clustering);
}
public boolean appliesTo(FilteredPartition current)
{
return current.getRow(clustering) != null;
}
}
private static class ColumnsConditions extends RowCondition
{
private final Multimap<Pair<ColumnIdentifier, ByteBuffer>, ColumnCondition.Bound> conditions = HashMultimap.create();
private ColumnsConditions(Clustering<?> clustering)
{
super(clustering);
}
public void addConditions(Collection<ColumnCondition> conds, QueryOptions options) throws InvalidRequestException
{
for (ColumnCondition condition : conds)
{
ColumnCondition.Bound current = condition.bind(options);
conditions.put(Pair.create(condition.column.name, current.getCollectionElementValue()), current);
}
}
public boolean appliesTo(FilteredPartition current) throws InvalidRequestException
{
Row row = current.getRow(clustering);
for (ColumnCondition.Bound condition : conditions.values())
{
if (!condition.appliesTo(row))
return false;
}
return true;
}
}
@Override
public String toString()
{
return ToStringBuilder.reflectionToString(this, ToStringStyle.SHORT_PREFIX_STYLE);
}
}
| |
package com.github.TKnudsen.timeseries.operations.preprocessing.univariate;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
import com.github.TKnudsen.ComplexDataObject.data.entry.EntryWithComparableKey;
import com.github.TKnudsen.ComplexDataObject.data.ranking.Ranking;
import com.github.TKnudsen.ComplexDataObject.model.processors.IDataProcessor;
import com.github.TKnudsen.ComplexDataObject.model.processors.ParameterSupportTools;
import com.github.TKnudsen.ComplexDataObject.model.processors.complexDataObject.DataProcessingCategory;
import com.github.TKnudsen.timeseries.data.univariate.ITimeSeriesUnivariate;
import com.github.TKnudsen.timeseries.operations.preprocessing.TimeSeriesProcessor;
/**
* <p>
* Title: PerceptualImportantPoints
* </p>
*
* <p>
* Description: Perceptual important points preprocessing routine wich can be
* used for data reduction purposes. The timeseries to be processed is
* subsequently reduced to the size of a given pipCount. The criterion of
* reduction is based on the preservation of perceived points (min/max values).
*
* The implementation of the algorithm is in accordance to the publication
*
* Chung, F.L., Fu, T.C., Luk, R., Ng, V., Flexible Time Series Pattern Matching
* Based on Perceptually Important Points. In: Workshop on Learning from
* Temporal and Spatial Data at IJCAI (2001) 1-7
* </p>
*
* <p>
* Copyright: Copyright (c) 2016-2018
* </p>
*
* @author Juergen Bernard
* @version 1.08
*/
public class PerceptuallyImportantPoints extends TimeSeriesProcessor<ITimeSeriesUnivariate> {
private int pipCount;
@SuppressWarnings("unused")
private PerceptuallyImportantPoints() {
}
public PerceptuallyImportantPoints(int pipCount) {
if (pipCount < 2)
throw new IllegalArgumentException("PIP: parameter value <2");
this.pipCount = pipCount;
}
public int getPipCount() {
return pipCount;
}
public void setPipCount(int pipCount) {
if (pipCount < 2)
throw new IllegalArgumentException("PIP: parameter value <2");
this.pipCount = pipCount;
}
@Override
public DataProcessingCategory getPreprocessingCategory() {
return DataProcessingCategory.DATA_REDUCTION;
}
@Override
public void process(List<ITimeSeriesUnivariate> data) {
for (ITimeSeriesUnivariate timeSeries : data)
process(timeSeries);
}
public void process(ITimeSeriesUnivariate data) {
if (data == null)
return;
if (data.size() < pipCount)
return;
SortedSet<Long> pipTimeStamps = new TreeSet<>();
pipTimeStamps.add(data.getFirstTimestamp());
pipTimeStamps.add(data.getLastTimestamp());
// identify additional pips until pipCount is reached
for (int i = 0; i < getPipCount() - 2; i++) {
pipTimeStamps.add(calculateNextPip(data, pipTimeStamps));
}
// remove all data not matching the pipTmp result
for (int i = 0; i < data.getTimestamps().size(); i++)
if (!pipTimeStamps.contains(data.getTimestamp(i)))
data.removeTimeValue(i--);
}
/**
* identifies the next pip. For that purpose all intervals between existing pips
* are investigated.
*
* @param data
* @param pipTimeStamps
* @return
*/
public static Long calculateNextPip(ITimeSeriesUnivariate data, SortedSet<Long> pipTimeStamps) {
if (data.size() <= pipTimeStamps.size())
throw new IllegalArgumentException(
"PIP: impossible to calculate another pip if given time series does not contain more time-value-pairs than current pip count");
Iterator<Long> timeSeriesIterator = data.getTimestamps().iterator();
timeSeriesIterator.next(); // first time stamp is always included
Iterator<Long> pipTimeStampIterator = pipTimeStamps.iterator();
Long lastPiPTimeStamp = pipTimeStampIterator.next(); // first existing pip
Long nextPip = Long.MIN_VALUE + 1;
double pipYOffsetCurrent = Double.NEGATIVE_INFINITY;
while (pipTimeStampIterator.hasNext()) {
// address a subSequence between two existing pips (pipTimeStampsSorted)
Long nextPiPTimeStamp = pipTimeStampIterator.next();
// calculate reference gradient and xAxisIntercept
double gradient = (data.getValue(nextPiPTimeStamp, false) - data.getValue(lastPiPTimeStamp, false))
/ (nextPiPTimeStamp - lastPiPTimeStamp);
double xAxisIntercept = data.getValue(nextPiPTimeStamp, false) - (nextPiPTimeStamp * gradient);
// identify the most applicable point within the particular interval
while (timeSeriesIterator.hasNext()) {
Long timeStamp = timeSeriesIterator.next();
if (timeStamp >= nextPiPTimeStamp)
break;
else {
double dist = Math.abs(gradient * timeStamp + xAxisIntercept - data.getValue(timeStamp, false));
if (dist > pipYOffsetCurrent) {
pipYOffsetCurrent = dist;
nextPip = timeStamp;
}
}
}
lastPiPTimeStamp = nextPiPTimeStamp;
}
if (nextPip == Long.MIN_VALUE + 1)
throw new IllegalArgumentException(
"PerceptuallyImportantPoints.calculateNextPip: next pip calculation failed");
return nextPip;
}
/**
* calculates the interestingness value of every remaining time stamp to be the
* next pip.
*
* @param data
* @param pipTimeStamps
* @param rankCount parameter that limits the length of the ranking. Can be
* used to cope with scalability issues.
*
* @return ranking of timestamps
*/
public static Ranking<EntryWithComparableKey<Double, Long>> calculateNextPipCandidates(ITimeSeriesUnivariate data,
SortedSet<Long> pipTimeStamps, int rankCount) {
if (data.size() <= pipTimeStamps.size())
throw new IllegalArgumentException(
"PIP: impossible to calculate another pip if given time series does not contain more time-value-pairs than current pip count");
Ranking<EntryWithComparableKey<Double, Long>> ranking = new Ranking<>();
Iterator<Long> timeSeriesIterator = data.getTimestamps().iterator();
timeSeriesIterator.next(); // first time stamp is always included
Iterator<Long> pipTimeStampIterator = pipTimeStamps.iterator();
Long lastPiPTimeStamp = pipTimeStampIterator.next(); // first existing pip
while (pipTimeStampIterator.hasNext()) {
// address a subSequence between two existing pips (pipTimeStampsSorted)
Long nextPiPTimeStamp = pipTimeStampIterator.next();
// calculate reference gradient and xAxisIntercept
double gradient = (data.getValue(nextPiPTimeStamp, false) - data.getValue(lastPiPTimeStamp, false))
/ (nextPiPTimeStamp - lastPiPTimeStamp);
double xAxisIntercept = data.getValue(nextPiPTimeStamp, false) - (nextPiPTimeStamp * gradient);
// identify the most applicable point within the particular interval
while (timeSeriesIterator.hasNext()) {
Long timeStamp = timeSeriesIterator.next();
if (timeStamp >= nextPiPTimeStamp)
break;
else {
double dist = Math.abs(gradient * timeStamp + xAxisIntercept - data.getValue(timeStamp, false));
// avoid insert/delete for weak candidates
if (ranking.size() == rankCount && ranking.getFirst().getKey() > dist)
continue;
ranking.add(new EntryWithComparableKey<Double, Long>(dist, timeStamp));
// stick to the maximum length defined with rankCount
if (ranking.size() > rankCount)
ranking.removeFirst();
}
}
lastPiPTimeStamp = nextPiPTimeStamp;
}
return ranking;
}
/**
* calculates the interestingness value of every remaining time stamp to be the
* next pip.
*
* @param data
* @param pipTimeStamps
* @return
*/
public static Ranking<EntryWithComparableKey<Double, Long>> calculateNextPipCandidates(ITimeSeriesUnivariate data,
SortedSet<Long> pipTimeStamps) {
return calculateNextPipCandidates(data, pipTimeStamps, data.size());
}
@Override
public List<IDataProcessor<ITimeSeriesUnivariate>> getAlternativeParameterizations(int count) {
List<Integer> integers = ParameterSupportTools.getAlternativeIntegers(pipCount, count);
List<IDataProcessor<ITimeSeriesUnivariate>> processors = new ArrayList<>();
for (Integer i : integers)
if (i >= 2)
processors.add(new PerceptuallyImportantPoints(i));
return processors;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.planner;
import com.facebook.presto.metadata.Partition;
import com.facebook.presto.metadata.PartitionResult;
import com.facebook.presto.split.SampledSplitSource;
import com.facebook.presto.split.SplitManager;
import com.facebook.presto.split.SplitSource;
import com.facebook.presto.sql.planner.plan.AggregationNode;
import com.facebook.presto.sql.planner.plan.DistinctLimitNode;
import com.facebook.presto.sql.planner.plan.FilterNode;
import com.facebook.presto.sql.planner.plan.IndexJoinNode;
import com.facebook.presto.sql.planner.plan.JoinNode;
import com.facebook.presto.sql.planner.plan.LimitNode;
import com.facebook.presto.sql.planner.plan.MarkDistinctNode;
import com.facebook.presto.sql.planner.plan.OutputNode;
import com.facebook.presto.sql.planner.plan.PlanNode;
import com.facebook.presto.sql.planner.plan.PlanVisitor;
import com.facebook.presto.sql.planner.plan.ProjectNode;
import com.facebook.presto.sql.planner.plan.RemoteSourceNode;
import com.facebook.presto.sql.planner.plan.RowNumberNode;
import com.facebook.presto.sql.planner.plan.SampleNode;
import com.facebook.presto.sql.planner.plan.SemiJoinNode;
import com.facebook.presto.sql.planner.plan.SortNode;
import com.facebook.presto.sql.planner.plan.TableCommitNode;
import com.facebook.presto.sql.planner.plan.TableScanNode;
import com.facebook.presto.sql.planner.plan.TableWriterNode;
import com.facebook.presto.sql.planner.plan.TopNNode;
import com.facebook.presto.sql.planner.plan.TopNRowNumberNode;
import com.facebook.presto.sql.planner.plan.UnionNode;
import com.facebook.presto.sql.planner.plan.UnnestNode;
import com.facebook.presto.sql.planner.plan.ValuesNode;
import com.facebook.presto.sql.planner.plan.WindowNode;
import com.google.common.collect.ImmutableList;
import javax.inject.Inject;
import java.util.List;
import java.util.Optional;
import static com.google.common.base.Preconditions.checkNotNull;
public class DistributedExecutionPlanner
{
private final SplitManager splitManager;
@Inject
public DistributedExecutionPlanner(SplitManager splitManager)
{
this.splitManager = checkNotNull(splitManager, "splitManager is null");
}
public StageExecutionPlan plan(SubPlan root)
{
PlanFragment currentFragment = root.getFragment();
// get splits for this fragment, this is lazy so split assignments aren't actually calculated here
Visitor visitor = new Visitor();
Optional<SplitSource> splits = currentFragment.getRoot().accept(visitor, null);
// create child stages
ImmutableList.Builder<StageExecutionPlan> dependencies = ImmutableList.builder();
for (SubPlan childPlan : root.getChildren()) {
dependencies.add(plan(childPlan));
}
return new StageExecutionPlan(currentFragment,
splits,
dependencies.build()
);
}
private final class Visitor
extends PlanVisitor<Void, Optional<SplitSource>>
{
@Override
public Optional<SplitSource> visitTableScan(TableScanNode node, Void context)
{
// get dataSource for table
SplitSource splitSource = splitManager.getPartitionSplits(node.getTable(), getPartitions(node));
return Optional.of(splitSource);
}
private List<Partition> getPartitions(TableScanNode node)
{
if (node.getGeneratedPartitions().isPresent()) {
return node.getGeneratedPartitions().get().getPartitions();
}
PartitionResult allPartitions = splitManager.getPartitions(node.getTable(), Optional.empty());
return allPartitions.getPartitions();
}
@Override
public Optional<SplitSource> visitJoin(JoinNode node, Void context)
{
Optional<SplitSource> leftSplits = node.getLeft().accept(this, context);
Optional<SplitSource> rightSplits = node.getRight().accept(this, context);
if (leftSplits.isPresent() && rightSplits.isPresent()) {
throw new IllegalArgumentException("Both left and right join nodes are partitioned"); // TODO: "partitioned" may not be the right term
}
return leftSplits.isPresent() ? leftSplits : rightSplits;
}
@Override
public Optional<SplitSource> visitSemiJoin(SemiJoinNode node, Void context)
{
Optional<SplitSource> sourceSplits = node.getSource().accept(this, context);
Optional<SplitSource> filteringSourceSplits = node.getFilteringSource().accept(this, context);
if (sourceSplits.isPresent() && filteringSourceSplits.isPresent()) {
throw new IllegalArgumentException("Both source and filteringSource semi join nodes are partitioned"); // TODO: "partitioned" may not be the right term
}
return sourceSplits.isPresent() ? sourceSplits : filteringSourceSplits;
}
@Override
public Optional<SplitSource> visitIndexJoin(IndexJoinNode node, Void context)
{
return node.getProbeSource().accept(this, context);
}
@Override
public Optional<SplitSource> visitRemoteSource(RemoteSourceNode node, Void context)
{
// remote source node does not have splits
return Optional.empty();
}
@Override
public Optional<SplitSource> visitValues(ValuesNode node, Void context)
{
// values node does not have splits
return Optional.empty();
}
@Override
public Optional<SplitSource> visitFilter(FilterNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Optional<SplitSource> visitSample(SampleNode node, Void context)
{
switch (node.getSampleType()) {
case BERNOULLI:
case POISSONIZED:
return node.getSource().accept(this, context);
case SYSTEM:
Optional<SplitSource> nodeSplits = node.getSource().accept(this, context);
if (nodeSplits.isPresent()) {
SplitSource sampledSplitSource = new SampledSplitSource(nodeSplits.get(), node.getSampleRatio());
return Optional.of(sampledSplitSource);
}
// table sampling on a sub query without splits is meaningless
return nodeSplits;
default:
throw new UnsupportedOperationException("Sampling is not supported for type " + node.getSampleType());
}
}
@Override
public Optional<SplitSource> visitAggregation(AggregationNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Optional<SplitSource> visitMarkDistinct(MarkDistinctNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Optional<SplitSource> visitWindow(WindowNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Optional<SplitSource> visitRowNumber(RowNumberNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Optional<SplitSource> visitTopNRowNumber(TopNRowNumberNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Optional<SplitSource> visitProject(ProjectNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Optional<SplitSource> visitUnnest(UnnestNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Optional<SplitSource> visitTopN(TopNNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Optional<SplitSource> visitOutput(OutputNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Optional<SplitSource> visitLimit(LimitNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Optional<SplitSource> visitDistinctLimit(DistinctLimitNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Optional<SplitSource> visitSort(SortNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Optional<SplitSource> visitTableWriter(TableWriterNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Optional<SplitSource> visitTableCommit(TableCommitNode node, Void context)
{
return node.getSource().accept(this, context);
}
@Override
public Optional<SplitSource> visitUnion(UnionNode node, Void context)
{
Optional<SplitSource> result = Optional.empty();
for (PlanNode child : node.getSources()) {
Optional<SplitSource> source = child.accept(this, context);
if (result.isPresent() && source.isPresent()) {
throw new IllegalArgumentException("Multiple children are source-distributed");
}
if (source.isPresent()) {
result = source;
}
}
return result;
}
@Override
protected Optional<SplitSource> visitPlan(PlanNode node, Void context)
{
throw new UnsupportedOperationException("not yet implemented: " + node.getClass().getName());
}
}
}
| |
/*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.job.entries.sqoop;
import org.junit.Test;
import org.pentaho.di.job.ArgumentWrapper;
import org.pentaho.di.job.JobEntryMode;
import org.pentaho.di.job.entries.helper.PersistentPropertyChangeListener;
import org.pentaho.di.core.variables.Variables;
import org.pentaho.ui.xul.util.AbstractModelList;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import static org.junit.Assert.*;
/**
* Test the SqoopConfig functionality not exercised by {@link PropertyFiringObjectTests}
*/
public class SqoopConfigTest {
@Test
public void addRemovePropertyChangeListener() {
SqoopConfig config = new SqoopConfig() {
};
PersistentPropertyChangeListener l = new PersistentPropertyChangeListener();
config.addPropertyChangeListener( l );
config.setJobEntryName( "test" );
assertEquals( 1, l.getReceivedEvents().size() );
config.removePropertyChangeListener( l );
config.setJobEntryName( "test1" );
assertEquals( 1, l.getReceivedEvents().size() );
}
@Test
public void addRemovePropertyChangeListener_propertyName() {
SqoopConfig config = new SqoopConfig() {
};
PersistentPropertyChangeListener l = new PersistentPropertyChangeListener();
config.addPropertyChangeListener( "test", l );
config.setJobEntryName( "test" );
assertEquals( 0, l.getReceivedEvents().size() );
config.removePropertyChangeListener( "test", l );
config.addPropertyChangeListener( SqoopConfig.JOB_ENTRY_NAME, l );
config.setJobEntryName( "test1" );
assertEquals( 1, l.getReceivedEvents().size() );
config.removePropertyChangeListener( SqoopConfig.JOB_ENTRY_NAME, l );
config.setJobEntryName( "test2" );
assertEquals( 1, l.getReceivedEvents().size() );
}
@Test
public void getAdvancedArgumentsList() {
SqoopConfig config = new SqoopConfig() {
};
AbstractModelList<ArgumentWrapper> args = config.getAdvancedArgumentsList();
assertEquals( 33, args.size() );
PersistentPropertyChangeListener l = new PersistentPropertyChangeListener();
config.addPropertyChangeListener( l );
// Make sure we can get and set the value for all arguments returned
String value = String.valueOf( System.currentTimeMillis() );
for ( ArgumentWrapper arg : args ) {
arg.setValue( value );
assertEquals( value, arg.getValue() );
}
// We should have received one event for every property changed
assertEquals( 33, l.getReceivedEvents().size() );
}
@Test
public void testClone() {
SqoopConfig config = new SqoopConfig() {
};
config.setConnect( SqoopConfig.CONNECT );
config.setJobEntryName( SqoopConfig.JOB_ENTRY_NAME );
SqoopConfig clone = config.clone();
assertEquals( config.getConnect(), clone.getConnect() );
assertEquals( config.getJobEntryName(), clone.getJobEntryName() );
}
@Test
public void setDatabaseConnectionInformation() {
SqoopConfig config = new SqoopConfig() {
};
PersistentPropertyChangeListener l = new PersistentPropertyChangeListener();
config.addPropertyChangeListener( l );
String database = "bogus";
String connect = "jdbc:bogus://bogus";
String username = "bob";
String password = "uncle";
config.setConnectionInfo( database, connect, username, password );
assertEquals( 0, l.getReceivedEvents().size() );
assertEquals( database, config.getDatabase() );
assertEquals( connect, config.getConnect() );
assertEquals( username, config.getUsername() );
assertEquals( password, config.getPassword() );
}
@Test
public void numMappers() {
SqoopConfig config = new SqoopConfig() {
};
String numMappers = "5";
config.setNumMappers( numMappers );
List<String> args = new ArrayList<String>();
List<ArgumentWrapper> argumentWrappers = config.getAdvancedArgumentsList();
ArgumentWrapper arg = null;
Iterator<ArgumentWrapper> argIter = argumentWrappers.iterator();
while ( arg == null && argIter.hasNext() ) {
ArgumentWrapper a = argIter.next();
if ( a.getName().equals( "num-mappers" ) ) {
arg = a;
}
}
assertNotNull( arg );
SqoopUtils.appendArgument( args, arg, new Variables() );
assertEquals( 2, args.size() );
assertEquals( "--num-mappers", args.get( 0 ) );
assertEquals( numMappers, args.get( 1 ) );
}
@Test
public void copyConnectionInfoFromAdvanced() {
SqoopConfig config = new SqoopConfig() {
};
PersistentPropertyChangeListener l = new PersistentPropertyChangeListener();
config.addPropertyChangeListener( l );
String connect = "connect";
String username = "username";
String password = "password";
config.setConnectFromAdvanced( connect );
config.setUsernameFromAdvanced( username );
config.setPasswordFromAdvanced( password );
assertNull( config.getConnect() );
assertNull( config.getUsername() );
assertNull( config.getPassword() );
config.copyConnectionInfoFromAdvanced();
assertEquals( connect, config.getConnect() );
assertEquals( username, config.getUsername() );
assertEquals( password, config.getPassword() );
assertEquals( 0, l.getReceivedEvents().size() );
}
@Test
public void copyConnectionInfoToAdvanced() {
SqoopConfig config = new SqoopConfig() {
};
PersistentPropertyChangeListener l = new PersistentPropertyChangeListener();
config.addPropertyChangeListener( l );
String connect = "connect";
String username = "username";
String password = "password";
config.setConnect( connect );
config.setUsername( username );
config.setPassword( password );
assertNull( config.getConnectFromAdvanced() );
assertNull( config.getUsernameFromAdvanced() );
assertNull( config.getPasswordFromAdvanced() );
config.copyConnectionInfoToAdvanced();
assertEquals( connect, config.getConnectFromAdvanced() );
assertEquals( username, config.getUsernameFromAdvanced() );
assertEquals( password, config.getPasswordFromAdvanced() );
assertEquals( 3, l.getReceivedEvents().size() );
assertEquals( "connect", l.getReceivedEvents().get( 0 ).getPropertyName() );
assertEquals( "username", l.getReceivedEvents().get( 1 ).getPropertyName() );
assertEquals( "password", l.getReceivedEvents().get( 2 ).getPropertyName() );
}
@Test
public void getModeAsEnum() {
SqoopConfig config = new SqoopConfig() {
};
assertNull( config.getMode() );
assertEquals( JobEntryMode.QUICK_SETUP, config.getModeAsEnum() );
config.setMode( JobEntryMode.ADVANCED_COMMAND_LINE.name() );
assertEquals( JobEntryMode.ADVANCED_COMMAND_LINE.name(), config.getMode() );
assertEquals( JobEntryMode.ADVANCED_COMMAND_LINE, config.getModeAsEnum() );
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.impl.engine;
import java.util.Objects;
import org.apache.camel.CamelContext;
import org.apache.camel.CamelContextAware;
import org.apache.camel.Exchange;
import org.apache.camel.NamedNode;
import org.apache.camel.NamedRoute;
import org.apache.camel.Route;
import org.apache.camel.spi.ExchangeFormatter;
import org.apache.camel.spi.Tracer;
import org.apache.camel.support.CamelContextHelper;
import org.apache.camel.support.PatternHelper;
import org.apache.camel.support.builder.ExpressionBuilder;
import org.apache.camel.support.processor.DefaultExchangeFormatter;
import org.apache.camel.support.service.ServiceSupport;
import org.apache.camel.util.StringHelper;
import org.apache.camel.util.URISupport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Default {@link Tracer} implementation that will log traced messages
* to the logger named <tt>org.apache.camel.Tracing</tt>.
*/
public class DefaultTracer extends ServiceSupport implements CamelContextAware, Tracer {
private static final String TRACING_OUTPUT = "%-4.4s [%-12.12s] [%-33.33s]";
// use a fixed logger name so its easy to spot
private static final Logger LOG = LoggerFactory.getLogger("org.apache.camel.Tracing");
private CamelContext camelContext;
private boolean enabled = true;
private long traceCounter;
private ExchangeFormatter exchangeFormatter;
private String tracePattern;
private transient String[] patterns;
private boolean traceBeforeAndAfterRoute = true;
public DefaultTracer() {
DefaultExchangeFormatter formatter = new DefaultExchangeFormatter();
formatter.setShowExchangeId(true);
formatter.setShowExchangePattern(false);
formatter.setMultiline(false);
formatter.setShowHeaders(false);
formatter.setStyle(DefaultExchangeFormatter.OutputStyle.Default);
setExchangeFormatter(formatter);
}
@Override
public CamelContext getCamelContext() {
return camelContext;
}
@Override
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
@Override
@SuppressWarnings("unchecked")
public void traceBeforeNode(NamedNode node, Exchange exchange) {
if (shouldTrace(node)) {
traceCounter++;
String routeId = ExpressionBuilder.routeIdExpression().evaluate(exchange, String.class);
// we need to avoid leak the sensible information here
// the sanitizeUri takes a very long time for very long string and the format cuts this to
// 33 characters, anyway. Cut this to 50 characters. This will give enough space for removing
// characters in the sanitizeUri method and will be reasonably fast
String label = URISupport.sanitizeUri(StringHelper.limitLength(node.getLabel(), 50));
StringBuilder sb = new StringBuilder();
sb.append(String.format(TRACING_OUTPUT, " ", routeId, label));
sb.append(" ");
String data = exchangeFormatter.format(exchange);
sb.append(data);
String out = sb.toString();
dumpTrace(out);
}
}
@Override
public void traceAfterNode(NamedNode node, Exchange exchange) {
// noop
}
@Override
public void traceBeforeRoute(NamedRoute route, Exchange exchange) {
if (!traceBeforeAndAfterRoute) {
return;
}
// we need to avoid leak the sensible information here
// the sanitizeUri takes a very long time for very long string and the format cuts this to
// 33 characters, anyway. Cut this to 50 characters. This will give enough space for removing
// characters in the sanitizeUri method and will be reasonably fast
String uri = route.getEndpointUrl();
String label = "from[" + URISupport.sanitizeUri(StringHelper.limitLength(uri, 50) + "]");
// the arrow has a * if its a new exchange that is starting
boolean original = route.getRouteId().equals(exchange.getFromRouteId());
String arrow = original ? "*-->" : "--->";
StringBuilder sb = new StringBuilder();
sb.append(String.format(TRACING_OUTPUT, arrow, route.getRouteId(), label));
sb.append(" ");
String data = exchangeFormatter.format(exchange);
sb.append(data);
String out = sb.toString();
LOG.info(out);
}
@Override
public void traceAfterRoute(Route route, Exchange exchange) {
if (!traceBeforeAndAfterRoute) {
return;
}
// we need to avoid leak the sensible information here
// the sanitizeUri takes a very long time for very long string and the format cuts this to
// 33 characters, anyway. Cut this to 50 characters. This will give enough space for removing
// characters in the sanitizeUri method and will be reasonably fast
String uri = route.getConsumer().getEndpoint().getEndpointUri();
String label = "from[" + URISupport.sanitizeUri(StringHelper.limitLength(uri, 50) + "]");
// the arrow has a * if its an exchange that is done
boolean original = route.getId().equals(exchange.getFromRouteId());
String arrow = original ? "*<--" : "<---";
StringBuilder sb = new StringBuilder();
sb.append(String.format(TRACING_OUTPUT, arrow, route.getId(), label));
sb.append(" ");
String data = exchangeFormatter.format(exchange);
sb.append(data);
String out = sb.toString();
dumpTrace(out);
}
@Override
public boolean shouldTrace(NamedNode definition) {
if (!enabled) {
return false;
}
boolean pattern = true;
if (patterns != null) {
pattern = shouldTracePattern(definition);
}
if (LOG.isTraceEnabled()) {
LOG.trace("Should trace evaluated {} -> pattern: {}", definition.getId(), pattern);
}
return pattern;
}
@Override
public long getTraceCounter() {
return traceCounter;
}
@Override
public void resetTraceCounter() {
traceCounter = 0;
}
@Override
public boolean isEnabled() {
return enabled;
}
@Override
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
@Override
public String getTracePattern() {
return tracePattern;
}
@Override
public void setTracePattern(String tracePattern) {
this.tracePattern = tracePattern;
if (tracePattern != null) {
// the pattern can have multiple nodes separated by comma
this.patterns = tracePattern.split(",");
} else {
this.patterns = null;
}
}
@Override
public boolean isTraceBeforeAndAfterRoute() {
return traceBeforeAndAfterRoute;
}
@Override
public void setTraceBeforeAndAfterRoute(boolean traceBeforeAndAfterRoute) {
this.traceBeforeAndAfterRoute = traceBeforeAndAfterRoute;
}
@Override
public ExchangeFormatter getExchangeFormatter() {
return exchangeFormatter;
}
@Override
public void setExchangeFormatter(ExchangeFormatter exchangeFormatter) {
this.exchangeFormatter = exchangeFormatter;
}
protected void dumpTrace(String out) {
LOG.info(out);
}
protected boolean shouldTracePattern(NamedNode definition) {
for (String pattern : patterns) {
// match either route id, or node id
String id = definition.getId();
// use matchPattern method from endpoint helper that has a good matcher we use in Camel
if (PatternHelper.matchPattern(id, pattern)) {
return true;
}
String routeId = CamelContextHelper.getRouteId(definition);
if (routeId != null && !Objects.equals(routeId, id)) {
if (PatternHelper.matchPattern(routeId, pattern)) {
return true;
}
}
}
// not matched the pattern
return false;
}
@Override
protected void doStart() throws Exception {
// noop
}
@Override
protected void doStop() throws Exception {
// noop
}
}
| |
/*******************************************************************************
* (c) Copyright IBM Corporation 2017.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package wasdev.sample.adapter.helloworld;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
public class HelloWorldIndexedRecordImpl implements HelloWorldIndexedRecord {
private static final long serialVersionUID = -2866469109320308474L;
private ArrayList list = new ArrayList();
private String name;
private String description;
/**
* Constructor for HelloWorldIndexedRecordImpl
*/
public HelloWorldIndexedRecordImpl() {
super();
}
/**
* @see Record#getRecordName()
*/
public String getRecordName() {
return name;
}
/**
* @see Record#setRecordName(String)
*/
public void setRecordName(String name) {
this.name = name;
}
/**
* @see Record#setRecordShortDescription(String)
*/
public void setRecordShortDescription(String description) {
this.description = description;
}
/**
* @see Record#getRecordShortDescription()
*/
public String getRecordShortDescription() {
return description;
}
/**
* @see List#size()
*/
public int size() {
return list.size();
}
/**
* @see List#isEmpty()
*/
public boolean isEmpty() {
return list.isEmpty();
}
/**
* @see List#contains(Object)
*/
public boolean contains(Object o) {
return list.contains(o);
}
/**
* @see List#iterator()
*/
public Iterator iterator() {
return list.iterator();
}
/**
* @see List#toArray()
*/
public Object[] toArray() {
return list.toArray();
}
/**
* @see List#toArray(Object[])
*/
public Object[] toArray(Object[] a) {
return list.toArray(a);
}
/**
* @see List#add(Object)
*/
public boolean add(Object o) {
return list.add(o);
}
/**
* @see List#remove(Object)
*/
public boolean remove(Object o) {
return list.remove(o);
}
/**
* @see List#containsAll(Collection)
*/
public boolean containsAll(Collection c) {
return list.containsAll(c);
}
/**
* @see List#addAll(Collection)
*/
public boolean addAll(Collection c) {
return list.addAll(c);
}
/**
* @see List#addAll(int, Collection)
*/
public boolean addAll(int index, Collection c) {
return list.addAll(index, c);
}
/**
* @see List#removeAll(Collection)
*/
public boolean removeAll(Collection c) {
return list.removeAll(c);
}
/**
* @see List#retainAll(Collection)
*/
public boolean retainAll(Collection c) {
return list.retainAll(c);
}
/**
* @see List#clear()
*/
public void clear() {
list.clear();
}
/**
* @see List#get(int)
*/
public Object get(int index) {
return list.get(index);
}
/**
* @see List#set(int, Object)
*/
public Object set(int index, Object o) {
return list.set(index, o);
}
/**
* @see List#add(int, Object)
*/
public void add(int index, Object o) {
list.add(index, o);
}
/**
* @see List#remove(int)
*/
public Object remove(int index) {
return list.remove(index);
}
/**
* @see List#indexOf(Object)
*/
public int indexOf(Object o) {
return list.indexOf(o);
}
/**
* @see List#lastIndexOf(Object)
*/
public int lastIndexOf(Object o) {
return list.lastIndexOf(o);
}
/**
* @see List#listIterator()
*/
public ListIterator listIterator() {
return list.listIterator();
}
/**
* @see List#listIterator(int)
*/
public ListIterator listIterator(int index) {
return list.listIterator(index);
}
/**
* @see List#subList(int, int)
*/
public List subList(int fromIndex, int toIndex) {
return list.subList(fromIndex, toIndex);
}
/**
* @see Record#clone()
*/
public Object clone() throws CloneNotSupportedException{
throw new CloneNotSupportedException();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.usecases;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import javax.jms.Connection;
import javax.jms.Destination;
import javax.jms.Message;
import javax.jms.MessageConsumer;
import javax.jms.MessageProducer;
import javax.jms.Session;
import org.apache.activemq.ActiveMQConnectionFactory;
import org.apache.activemq.JmsTestSupport;
import org.apache.activemq.broker.BrokerService;
import org.apache.activemq.broker.TransportConnector;
import org.apache.activemq.broker.region.policy.PolicyEntry;
import org.apache.activemq.broker.region.policy.PolicyMap;
import org.apache.activemq.command.ActiveMQQueue;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.BlockJUnit4ClassRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.*;
@RunWith(BlockJUnit4ClassRunner.class)
public class MessageGroupLateArrivalsTest {
public static final Logger log = LoggerFactory.getLogger(MessageGroupLateArrivalsTest.class);
protected Connection connection;
protected Session session;
protected MessageProducer producer;
protected Destination destination;
BrokerService broker;
protected TransportConnector connector;
protected HashMap<String, Integer> messageCount = new HashMap<String, Integer>();
protected HashMap<String, Set<String>> messageGroups = new HashMap<String, Set<String>>();
@Before
public void setUp() throws Exception {
broker = createBroker();
broker.start();
ActiveMQConnectionFactory connFactory = new ActiveMQConnectionFactory(connector.getConnectUri() + "?jms.prefetchPolicy.all=1000");
connection = connFactory.createConnection();
session = connection.createSession(false, Session.CLIENT_ACKNOWLEDGE);
destination = new ActiveMQQueue("test-queue2");
producer = session.createProducer(destination);
connection.start();
}
protected BrokerService createBroker() throws Exception {
BrokerService service = new BrokerService();
service.setPersistent(false);
service.setUseJmx(false);
PolicyMap policyMap = new PolicyMap();
PolicyEntry policy = new PolicyEntry();
policy.setUseConsumerPriority(true);
policyMap.setDefaultEntry(policy);
service.setDestinationPolicy(policyMap);
connector = service.addConnector("tcp://localhost:0");
return service;
}
@After
public void tearDown() throws Exception {
producer.close();
session.close();
connection.close();
broker.stop();
}
@Test(timeout = 30 * 1000)
public void testConsumersLateToThePartyGetSomeNewGroups() throws Exception {
final int perBatch = 3;
int[] counters = {perBatch, perBatch, perBatch};
CountDownLatch startSignal = new CountDownLatch(0);
CountDownLatch doneSignal = new CountDownLatch(3);
CountDownLatch worker1Started = new CountDownLatch(1);
CountDownLatch worker2Started = new CountDownLatch(1);
CountDownLatch worker3Started = new CountDownLatch(1);
messageCount.put("worker1", 0);
messageGroups.put("worker1", new HashSet<String>());
Worker worker1 = new Worker(connection, destination, "worker1", startSignal, doneSignal, counters, messageCount, messageGroups, worker1Started);
messageCount.put("worker2", 0);
messageGroups.put("worker2", new HashSet<String>());
Worker worker2 = new Worker(connection, destination, "worker2", startSignal, doneSignal, counters, messageCount, messageGroups, worker2Started);
messageCount.put("worker3", 0);
messageGroups.put("worker3", new HashSet<String>());
Worker worker3 = new Worker(connection, destination, "worker3", startSignal, doneSignal, counters, messageCount, messageGroups, worker3Started);
new Thread(worker1).start();
new Thread(worker2).start();
worker1Started.await();
worker2Started.await();
for (int i = 0; i < perBatch; i++) {
Message msga = session.createTextMessage("hello a");
msga.setStringProperty("JMSXGroupID", "A");
producer.send(msga);
Message msgb = session.createTextMessage("hello b");
msgb.setStringProperty("JMSXGroupID", "B");
producer.send(msgb);
}
// ensure this chap, late to the party gets a new group
new Thread(worker3).start();
// wait for presence before new group
worker3Started.await();
for (int i = 0; i < perBatch; i++) {
Message msgc = session.createTextMessage("hello c");
msgc.setStringProperty("JMSXGroupID", "C");
producer.send(msgc);
}
doneSignal.await();
List<String> workers = new ArrayList<String>(messageCount.keySet());
Collections.sort(workers);
for (String worker : workers) {
log.info("worker " + worker + " received " + messageCount.get(worker) + " messages from groups " + messageGroups.get(worker));
}
for (String worker : workers) {
assertEquals("worker " + worker + " received " + messageCount.get(worker) + " messages from groups " + messageGroups.get(worker), perBatch, messageCount.get(worker).intValue());
assertEquals("worker " + worker + " received " + messageCount.get(worker) + " messages from groups " + messageGroups.get(worker), 1, messageGroups.get(worker).size());
}
}
@Test(timeout = 30 * 1000)
public void testConsumerLateToBigPartyGetsNewGroup() throws Exception {
final int perBatch = 2;
int[] counters = {perBatch, perBatch, perBatch};
CountDownLatch startSignal = new CountDownLatch(0);
CountDownLatch doneSignal = new CountDownLatch(2);
CountDownLatch worker1Started = new CountDownLatch(1);
CountDownLatch worker2Started = new CountDownLatch(1);
messageCount.put("worker1", 0);
messageGroups.put("worker1", new HashSet<String>());
Worker worker1 = new Worker(connection, destination, "worker1", startSignal, doneSignal, counters, messageCount, messageGroups, worker1Started);
messageCount.put("worker2", 0);
messageGroups.put("worker2", new HashSet<String>());
Worker worker2 = new Worker(connection, destination, "worker2", startSignal, doneSignal, counters, messageCount, messageGroups, worker2Started);
new Thread(worker1).start();
for (int i = 0; i < perBatch; i++) {
Message msga = session.createTextMessage("hello c");
msga.setStringProperty("JMSXGroupID", "A");
producer.send(msga);
Message msgb = session.createTextMessage("hello b");
msgb.setStringProperty("JMSXGroupID", "B");
producer.send(msgb);
}
// ensure this chap, late to the party gets a new group
new Thread(worker2).start();
// wait for presence before new group
worker2Started.await();
for (int i = 0; i < perBatch; i++) {
Message msgc = session.createTextMessage("hello a");
msgc.setStringProperty("JMSXGroupID", "C");
producer.send(msgc);
}
doneSignal.await();
log.info("worker1 received " + messageCount.get("worker1") + " messages from groups " + messageGroups.get("worker1"));
assertEquals("worker1 received " + messageCount.get("worker1") + " messages from groups " + messageGroups.get("worker1"), 2 * perBatch, messageCount.get("worker1").intValue());
assertEquals("worker1 received " + messageCount.get("worker1") + " messages from groups " + messageGroups.get("worker1"), 2, messageGroups.get("worker1").size());
log.info("worker2 received " + messageCount.get("worker2") + " messages from groups " + messageGroups.get("worker2"));
assertEquals("worker2 received " + messageCount.get("worker2") + " messages from groups " + messageGroups.get("worker2"), 2 * perBatch, messageCount.get("worker1").intValue());
assertEquals("worker2 received " + messageCount.get("worker2") + " messages from groups " + messageGroups.get("worker2"), 1, messageGroups.get("worker2").size());
}
private static final class Worker implements Runnable {
private Connection connection = null;
private Destination queueName = null;
private String workerName = null;
private CountDownLatch startSignal = null;
private CountDownLatch doneSignal = null;
private CountDownLatch workerStarted = null;
private int[] counters = null;
private final HashMap<String, Integer> messageCount;
private final HashMap<String, Set<String>> messageGroups;
private Worker(Connection connection,
Destination queueName,
String workerName,
CountDownLatch startSignal,
CountDownLatch doneSignal,
int[] counters,
HashMap<String, Integer> messageCount,
HashMap<String, Set<String>> messageGroups,
CountDownLatch workerStarted) {
this.connection = connection;
this.queueName = queueName;
this.workerName = workerName;
this.startSignal = startSignal;
this.doneSignal = doneSignal;
this.counters = counters;
this.messageCount = messageCount;
this.messageGroups = messageGroups;
this.workerStarted = workerStarted;
}
private void update(String group) {
int msgCount = messageCount.get(workerName);
messageCount.put(workerName, msgCount + 1);
Set<String> groups = messageGroups.get(workerName);
groups.add(group);
messageGroups.put(workerName, groups);
}
@Override
public void run() {
try {
startSignal.await();
log.info(workerName);
Session sess = connection.createSession(false, Session.CLIENT_ACKNOWLEDGE);
MessageConsumer consumer = sess.createConsumer(queueName);
workerStarted.countDown();
while (true) {
if (counters[0] == 0 && counters[1] == 0 && counters[2] == 0) {
doneSignal.countDown();
log.info(workerName + " done...");
break;
}
Message msg = consumer.receive(500);
if (msg == null)
continue;
msg.acknowledge();
String group = msg.getStringProperty("JMSXGroupID");
msg.getBooleanProperty("JMSXGroupFirstForConsumer");
if ("A".equals(group)) {
--counters[0];
update(group);
}
else if ("B".equals(group)) {
--counters[1];
update(group);
}
else if ("C".equals(group)) {
--counters[2];
update(group);
}
else {
log.warn(workerName + ", unknown group");
}
if (counters[0] != 0 || counters[1] != 0 || counters[2] != 0) {
msg.acknowledge();
}
}
consumer.close();
sess.close();
}
catch (Exception e) {
e.printStackTrace();
}
}
}
}
| |
/*L
* Copyright Northrop Grumman Information Technology.
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/nci-report-writer/LICENSE.txt for details.
*/
package gov.nih.nci.evs.reportwriter.bean;
import gov.nih.nci.evs.reportwriter.utils.*;
import gov.nih.nci.evs.reportwriter.webapp.*;
import gov.nih.nci.evs.utils.*;
import java.io.*;
import java.util.*;
import javax.faces.event.*;
import javax.faces.model.*;
import javax.servlet.http.*;
import org.apache.log4j.*;
/**
*
*/
/**
* @author EVS Team (Kim Ong, David Yee)
* @version 1.0
*/
public class UserSessionBean extends Object {
private static Logger _logger = Logger.getLogger(UserSessionBean.class);
private TaskRequest _taskRequest = new TaskRequest();
private StandardReportTemplateManager _srtMgr =
new StandardReportTemplateManager();
private String _selectedPropertyType = null;
private String _rootConceptCode = null;
private String _selectedOntology = null;
private String _selectedReportStatus = null;
private List<SelectItem> _reportStatusList = null;
private Vector<String> _reportStatusListData = null;
private String _selectedReportFormat = null;
private List<SelectItem> _reportFormatList = null;
private Vector<String> _reportFormatListData = null;
public String getSelectedPropertyType() {
return _selectedPropertyType;
}
public void setSelectedPropertyType(String selectedPropertyType) {
_selectedPropertyType = selectedPropertyType;
HttpServletRequest request = HTTPUtils.getRequest();
request.getSession().setAttribute("selectedPropertyType",
selectedPropertyType);
}
public void propertyTypeSelectionChanged(ValueChangeEvent event) {
if (event.getNewValue() == null)
return;
String newValue = (String) event.getNewValue();
setSelectedPropertyType(newValue);
}
public String getSelectedOntology() {
return _selectedOntology;
}
public void setSelectedOntology(String selectedOntology) {
_selectedOntology = selectedOntology;
}
public List<SelectItem> getPropertyTypeList() {
List<SelectItem> list = DataUtils.getPropertyTypeList();
return list;
}
public void reportSelectionChanged(ValueChangeEvent vce) {
String newValue = (String) vce.getNewValue();
setSelectedStandardReportTemplate(newValue);
}
public void reportSelectionChanged_draft(ValueChangeEvent vce) {
String newValue = (String) vce.getNewValue();
setSelectedStandardReportTemplate_draft(newValue);
}
public void reportSelectionChanged_approved(ValueChangeEvent vce) {
String newValue = (String) vce.getNewValue();
setSelectedStandardReportTemplate_approved(newValue);
}
public void taskSelectionChanged(ValueChangeEvent event) {
if (event.getNewValue() == null)
return;
String task = (String) event.getNewValue();
setSelectedTask(task);
}
public String getRootConceptCode() {
return _rootConceptCode;
}
public void setRootConceptCode(String rootConceptCode) {
if (rootConceptCode == null)
return;
_rootConceptCode = rootConceptCode;
}
public List<SelectItem> getReportFormatList() {
_reportFormatListData = DataUtils.getReportFormatListData();
_reportFormatList = new ArrayList<SelectItem>();
for (int i = 0; i < _reportFormatListData.size(); i++) {
String t = _reportFormatListData.elementAt(i);
_reportFormatList.add(new SelectItem(t));
}
if (_reportFormatList != null && _reportFormatList.size() > 0) {
_selectedReportFormat = _reportFormatList.get(0).getLabel();
}
return _reportFormatList;
}
public void setSelectedReportFormat(String selectedReportFormat) {
_selectedReportFormat = selectedReportFormat;
HttpServletRequest request = HTTPUtils.getRequest();
request.getSession().setAttribute("selectedReportFormat",
selectedReportFormat);
}
public String getSelectedReportFormat() {
return _selectedReportFormat;
}
public void ReportFormatSelectionChanged(ValueChangeEvent event) {
if (event.getNewValue() == null)
return;
setSelectedReportFormat(_selectedReportFormat);
}
public List<SelectItem> getReportStatusList() {
_reportStatusListData = DataUtils.getReportStatusListData();
_reportStatusList = new ArrayList<SelectItem>();
for (int i = 0; i < _reportStatusListData.size(); i++) {
String t = _reportStatusListData.elementAt(i);
_reportStatusList.add(new SelectItem(t));
}
if (_reportStatusList != null && _reportStatusList.size() > 0)
_selectedReportStatus = _reportStatusList.get(0).getLabel();
return _reportStatusList;
}
public void setSelectedReportStatus(String selectedReportStatus) {
_selectedReportStatus = selectedReportStatus;
HttpServletRequest request = HTTPUtils.getRequest();
request.getSession().setAttribute("selectedReportStatus",
selectedReportStatus);
}
public String getSelectedReportStatus() {
return _selectedReportStatus;
}
public void reportStatusSelectionChanged(ValueChangeEvent event) {
if (event.getNewValue() == null)
return;
setSelectedReportStatus(_selectedReportStatus);
}
// -------------------------------------------------------------------------
private String _selectedVersion = null;
private List<SelectItem> _versionList = null;
private Vector<String> _versionListData = null;
public List<SelectItem> getVersionList(String codingschemename) {
_versionListData = DataUtils.getVersionListData(codingschemename);
_versionList = new ArrayList<SelectItem>();
for (int i = 0; i < _versionListData.size(); i++) {
String t = _versionListData.elementAt(i);
_logger.debug("version: " + t);
_versionList.add(new SelectItem(t));
}
if (_versionList != null && _versionList.size() > 0) {
_selectedVersion = _versionList.get(0).getLabel();
}
return _versionList;
}
public void setVersionList(List<SelectItem> list) {
_versionList = list;
}
public void setSelectedVersion(String selectedVersion) {
_selectedVersion = selectedVersion;
HttpServletRequest request = HTTPUtils.getRequest();
request.getSession().setAttribute("selectedVersion", selectedVersion);
}
public String getSelectedVersion() {
return _selectedVersion;
}
public void versionSelectionChanged(ValueChangeEvent event) {
if (event.getNewValue() == null)
return;
setSelectedVersion(_selectedVersion);
}
// -------------------------------------------------------------------------
public StandardReportTemplate getStandardReportTemplate(String label) {
return _srtMgr.getStandardReportTemplate(label);
}
public List<SelectItem> getStandardReportTemplateList() {
return _srtMgr.getStandardReportTemplateList();
}
public String getSelectedStandardReportTemplate() {
return _srtMgr.getSelectedStandardReportTemplate();
}
public void setSelectedStandardReportTemplate(
String selectedStandardReportTemplate) {
_srtMgr.setSelectedStandardReportTemplate(
selectedStandardReportTemplate);
}
public String getSelectedStandardReportTemplate_draft() {
return _srtMgr.getSelectedStandardReportTemplate_draft();
}
public void setSelectedStandardReportTemplate_draft(
String selectedStandardReportTemplate_draft) {
_srtMgr.setSelectedStandardReportTemplate_draft(
selectedStandardReportTemplate_draft);
}
public List<SelectItem> getStandardReportTemplateList_draft() {
return _srtMgr.getStandardReportTemplateList_draft();
}
public String getSelectedStandardReportTemplate_approved() {
return _srtMgr.getSelectedStandardReportTemplate_approved();
}
public void setSelectedStandardReportTemplate_approved(
String selectedStandardReportTemplate_draft) {
_srtMgr.setSelectedStandardReportTemplate_approved(
selectedStandardReportTemplate_draft);
}
public List<SelectItem> getStandardReportTemplateList_approved() {
return _srtMgr.getStandardReportTemplateList_approved();
}
// -------------------------------------------------------------------------
public String performTask() {
return _taskRequest.performAction();
}
public String getSelectedTask() {
return _taskRequest.getSelectedTask();
}
public void setSelectedTask(String selectedTask) {
_taskRequest.setSelectedTask(selectedTask);
}
public void changeTaskSelection(ValueChangeEvent vce) {
_taskRequest.changeTaskSelection(vce);
}
public List<SelectItem> getTaskList() {
return _taskRequest.getTaskList();
}
// -------------------------------------------------------------------------
public String addReportTemplateAction() {
return new ReportTemplateRequest().addAction();
}
public String modifyReportTemplateAction() {
return new ReportTemplateRequest().modifyAction();
}
public String saveTemplateAction() {
return new ReportTemplateRequest().saveAction();
}
public String saveModifiedTemplateAction() {
return new ReportTemplateRequest().saveModifiedAction();
}
public String selectGenerateReportOptionAction() {
return "selectGenerateReportOption";
}
public String deleteReportTemplateAction() {
return new ReportTemplateRequest().deleteAction();
}
// -------------------------------------------------------------------------
public String addColumnAction() { // Might not be called.
return new ReportColumnRequest().addAction();
}
public String modifyColumnAction() {
return new ReportColumnRequest().modifyAction();
}
public String insertBeforeColumnAction() {
return new ReportColumnRequest().insertBeforeAction();
}
public String insertAfterColumnAction() { // Might not be called.
return new ReportColumnRequest().insertAfterAction();
}
public String deleteColumnAction() {
return new ReportColumnRequest().deleteAction();
}
public String saveReportColumnAction() {
return new ReportColumnRequest()
.saveAction(_srtMgr.getSelected());
}
public String saveModifiedReportColumnAction() {
return new ReportColumnRequest()
.saveModifiedAction(_srtMgr.getSelected());
}
// -------------------------------------------------------------------------
public String editReportContentAction() {
return new ReportContentRequest()
.editAction(_srtMgr.getSelected());
}
public String generateStandardReportAction() {
return new ReportContentRequest()
.generateAction(_srtMgr.getSelected());
}
public String displayStandardReportTemplateAction() {
return "standard_report_template";
}
// -------------------------------------------------------------------------
public String addStatusAction() {
return new ReportStatusRequest().addAction();
}
public String activateStatusAction () {
return new ReportStatusRequest().activateAction();
}
public String inactivateStatusAction () {
return new ReportStatusRequest().inactivateAction();
}
public String assignStatusAction() {
return new ReportStatusRequest().assignAction();
}
public String saveStatusAction() { // Might not be called.
return new ReportStatusRequest().
saveAction(_srtMgr.getSelected());
}
// -------------------------------------------------------------------------
public String downloadReportAction() {
return new ReportDownloadRequest()
.downloadReportAction(_srtMgr.getSelected());
}
// -------------------------------------------------------------------------
public String submitAccessDenied() {
return new AccessDeniedRequest().submit();
}
public String clearAccessDenied() {
return new AccessDeniedRequest().clear();
}
public String submitContactUs() {
return new ContactUsRequest().submit();
}
public String clearContactUs() {
return new ContactUsRequest().clear();
}
public String submitUnlockAccount() {
return new UserAccountRequest().unlock();
}
public String clearUnlockAccount() {
return new UserAccountRequest().clear();
}
public String enterExcelMetadata() {
String selectedStandardReportTemplate = _srtMgr.getSelected();
System.out.println("selectedStandardReportTemplate: " + selectedStandardReportTemplate);
HttpServletRequest request = HTTPUtils.getRequest();
String author = request.getParameter("author");
System.out.println("author: " + author);
String keywords = request.getParameter("keywords");
System.out.println("keywords: " + keywords);
String title = request.getParameter("title");
System.out.println("title: " + title);
String subject = request.getParameter("subject");
System.out.println("subject: " + subject);
String worksheet = request.getParameter("worksheet");
worksheet = worksheet.trim();
System.out.println("worksheet: " + worksheet);
String frozen_rows = request.getParameter("frozen_rows");
System.out.println("frozen_rows: " + frozen_rows);
frozen_rows = frozen_rows.trim();
title = title.trim();
if (title == null || title.length() == 0) {
title = selectedStandardReportTemplate;
}
request.getSession().setAttribute("author", author);
request.getSession().setAttribute("keywords", keywords);
request.getSession().setAttribute("title", title);
request.getSession().setAttribute("subject", subject);
request.getSession().setAttribute("worksheet", worksheet);
request.getSession().setAttribute("frozen_rows", frozen_rows);
String format_description = "Microsoft Office Excel";
String hibernate_cfg_xml = request.getSession().getServletContext().getRealPath(JDBCUtil.HIBERNATE_CFG_PATH);//"/WEB-INF/classes/hibernate.cfg.xml");
File f = new File(hibernate_cfg_xml);
if (f.exists()) {
JDBCUtil util = new JDBCUtil(hibernate_cfg_xml);
int templateId = util.getTemplateId(selectedStandardReportTemplate);
System.out.println("templateId: " + templateId);
Vector<Integer> reportIds = util.getReportIds(templateId);
if (reportIds != null) {
Vector reports = util.getReportData(reportIds, format_description);
System.out.println("Number of excel files: " + reports.size());
if (reports != null) {
int success_knt = 0;
for (int i=0; i<reports.size(); i++) {
ReportMetadata mrd = (ReportMetadata) reports.elementAt(i);
String sourcefile = mrd.getPathName();
if (sourcefile != null) {
System.out.println("sourcefile: " + sourcefile);
ExcelMetadataUtils.updateMetadata(sourcefile, author, keywords, title, subject);
if (worksheet.length() > 0 && frozen_rows.length() > 0) {
int worksheet_number = Integer.parseInt(worksheet);
int frozen_rows_number = Integer.parseInt(frozen_rows);
boolean success = ExcelMetadataUtils.freezeRow(sourcefile, worksheet_number, frozen_rows_number);
if (success) {
success_knt++;
}
}
}
}
if (success_knt < reports.size()) {
String msg = "Please verify input values and try again.";
request.getSession().setAttribute("warningMsg", msg);
return "failed";
}
}
}
}
return "success";
}
}
| |
// Copyright (c) Committed Software 2018, opensource@committed.io
package uk.gov.dstl.baleen.resources.rabbitmq;
import java.io.FileInputStream;
import java.io.IOException;
import java.security.KeyStore;
import java.util.Map;
import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManagerFactory;
import org.apache.uima.fit.descriptor.ConfigurationParameter;
import org.apache.uima.resource.ResourceInitializationException;
import org.apache.uima.resource.ResourceSpecifier;
import com.rabbitmq.client.Channel;
import com.rabbitmq.client.Connection;
import com.rabbitmq.client.ConnectionFactory;
import uk.gov.dstl.baleen.exceptions.BaleenException;
import uk.gov.dstl.baleen.uima.BaleenResource;
/**
* <b>Shared resource for accessing RabbitMQ broker</b>
*
* <p>This resource removes the need for individual annotators to establish their own connections to
* RabbitMQ, instead providing a single instance Channel for Baleen that can be used. This provides
* benefits such as reduced configuration and reduced repeated code.
*
* <p>Creation of Queue, Consumer and Supplier typically require individual request/response
* interactions with the broker so are best created upfront.
*
* @baleen.javadoc
*/
public class SharedRabbitMQResource extends BaleenResource {
/** A default resource key for this shared resource */
public static final String RESOURCE_KEY = "rabbitmqResource";
/**
* The RabbitMQ broker host to connect to
*
* @baleen.config localhost
*/
public static final String PARAM_HOST = "rabbitmq.host";
@ConfigurationParameter(name = PARAM_HOST, defaultValue = "localhost")
private String host;
/**
* The RabbitMQ virtual host to connect to
*
* @baleen.config localhost
*/
public static final String PARAM_VIRTUAL_HOST = "rabbitmq.virtualHost";
@ConfigurationParameter(name = PARAM_VIRTUAL_HOST, defaultValue = "/")
private String virtualHost;
/**
* The RabbitMQ broker port to connect to
*
* @baleen.config 61616
*/
public static final String PARAM_PORT = "rabbitmq.port";
@ConfigurationParameter(name = PARAM_PORT, defaultValue = "5672")
private int port;
/**
* The username to use for authentication. If left blank, then authentication will not be used.
*
* @baleen.config
*/
public static final String PARAM_USER = "rabbitmq.user";
@ConfigurationParameter(name = PARAM_USER, defaultValue = "")
private String username;
/**
* The password to use for authentication. If left blank, then authentication will not be used.
*
* @baleen.config
*/
public static final String PARAM_PASS = "rabbitmq.pass";
@ConfigurationParameter(name = PARAM_PASS, defaultValue = "")
private String password;
/**
* Set true to use https (tls) communication protocol
*
* @baleen.config false
*/
public static final String PARAM_HTTPS = "rabbitmq.https";
@ConfigurationParameter(name = PARAM_HTTPS, defaultValue = "false")
private boolean useHttps;
/**
* Set true to use default tls and a trust all certificates. Not recommended for production use.
*
* @baleen.config false
*/
public static final String PARAM_TRUSTALL = "rabbitmq.trustAll";
@ConfigurationParameter(name = PARAM_TRUSTALL, defaultValue = "false")
private boolean trustAll;
/**
* The ssl communication protocol to use (eg TLSv1.1, TLSv1.2)
*
* @baleen.config
*/
public static final String PARAM_SSLPROTCOL = "rabbitmq.sslprotocol";
@ConfigurationParameter(name = PARAM_PASS, defaultValue = "TLSv1.1")
private String sslProtocol;
/**
* The keystore passphrase
*
* @baleen.config
*/
public static final String PARAM_KEYSTORE_PASS = "rabbitmq.keystorePass";
@ConfigurationParameter(name = PARAM_KEYSTORE_PASS, mandatory = false)
private String keystorePass;
/**
* The path to the keystore eg "/path/to/client/keycert.p12"
*
* @baleen.config
*/
public static final String PARAM_KEYSTORE_PATH = "rabbitmq.keystorePath";
@ConfigurationParameter(name = PARAM_KEYSTORE_PATH, mandatory = false)
private String keystorePath;
/**
* The truststore passphrase
*
* @baleen.config
*/
public static final String PARAM_TRUSTSTORE_PASS = "rabbitmq.truststorePass";
@ConfigurationParameter(name = PARAM_TRUSTSTORE_PASS, mandatory = false)
private String truststorePass;
/**
* The path to the truststore eg "/path/to/trustStore"
*
* @baleen.config
*/
public static final String PARAM_TRUSTSTORE_PATH = "rabbitmq.truststorePath";
@ConfigurationParameter(name = PARAM_TRUSTSTORE_PATH, mandatory = false)
private String truststorePath;
private Connection connection;
@Override
protected boolean doInitialize(
final ResourceSpecifier aSpecifier, final Map<String, Object> aAdditionalParams)
throws ResourceInitializationException {
try {
final ConnectionFactory factory = new ConnectionFactory();
factory.setUsername(username);
factory.setPassword(password);
factory.setVirtualHost(virtualHost);
factory.setHost(host);
factory.setPort(port);
if (useHttps) {
if (trustAll) {
factory.useSslProtocol();
} else {
try (FileInputStream keystoreStream = new FileInputStream(keystorePath);
FileInputStream trustStoreStream = new FileInputStream(truststorePath); ) {
KeyStore ks = KeyStore.getInstance("PKCS12");
ks.load(keystoreStream, keystorePass.toCharArray());
KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509");
kmf.init(ks, keystorePass.toCharArray());
KeyStore tks = KeyStore.getInstance("JKS");
tks.load(trustStoreStream, truststorePass.toCharArray());
TrustManagerFactory tmf = TrustManagerFactory.getInstance("SunX509");
tmf.init(tks);
SSLContext c = SSLContext.getInstance(sslProtocol);
c.init(kmf.getKeyManagers(), tmf.getTrustManagers(), null);
factory.useSslProtocol(c);
}
}
}
connection = factory.newConnection();
} catch (final Exception e) {
throw new ResourceInitializationException(
new BaleenException("Error connecting to RabbitMQ", e));
}
getMonitor().info("Initialised shared RabbitMQ resource");
return true;
}
private void declareRoute(
final String exchangeName,
final String routingKey,
final String queueName,
final Channel channel)
throws IOException {
channel.exchangeDeclare(exchangeName, "direct", true);
channel.queueDeclare(queueName, true, false, false, null);
channel.queueBind(queueName, exchangeName, routingKey);
}
/** Creates the RabbitMQ consumer */
public RabbitMQConsumer createConsumer(
final String exchangeName, final String routingKey, final String queueName)
throws IOException {
final Channel channel = connection.createChannel();
declareRoute(exchangeName, routingKey, queueName, channel);
return new LiveRabbitMQConsumer(getMonitor(), channel, exchangeName, routingKey, queueName);
}
/** Creates the RabbitMQ supplier */
public RabbitMQSupplier createSupplier(
final String exchangeName, final String routingKey, final String queueName)
throws IOException {
final Channel channel = connection.createChannel();
declareRoute(exchangeName, routingKey, queueName, channel);
return new LiveRabbitMQSupplier(getMonitor(), channel, queueName);
}
@Override
protected void doDestroy() {
getMonitor().debug("Disconnecting from RabbitMQ");
try {
connection.close();
} catch (final Exception e) {
getMonitor().error("Could not close connection to RabbitMQ", e);
}
}
}
| |
/*
* Copyright 2011 - 2013 NTB University of Applied Sciences in Technology
* Buchs, Switzerland, http://www.ntb.ch/inf
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package ch.ntb.inf.deep.runtime.mpc555.driver;
import java.io.IOException;
import ch.ntb.inf.deep.runtime.mpc555.Interrupt;
import ch.ntb.inf.deep.runtime.mpc555.Kernel;
import ch.ntb.inf.deep.runtime.util.ByteFifo;
import ch.ntb.inf.deep.unsafe.US;
/**
* <p>Interrupt controlled driver for the <i>Serial Communication Interface 1</i> or
* the <i>Serial Communication Interface 2</i> of the Freescale MPC555.</p>
* <p><b>Remember:</b><br>
* Depending on the baudrate configured, the effective baudrate can be different.
* This may cause miss interpretation of the bytes sent at the receiver! For more
* details, please consider table 14-29 in chapter 14.8.7.3 in the <a href=
* "http://www.ntb.ch/infoportal/_media/embedded_systems:mpc555:mpc555_usermanual.pdf"
* >MPC555 User's manual</a>.
* </p>
*/
/* Changes:
* 3.6.2014 Urs Graf exception handling added
* 13.10.2011 NTB/Martin Zueger reset() implemented, JavaDoc fixed
* 08.03.2011 NTB/Urs Graf ported to deep
*/
public class SCI extends Interrupt {
public static final int pSCI1 = 0;
public static final int pSCI2 = 1;
public static final byte NO_PARITY = 0, ODD_PARITY = 1, EVEN_PARITY = 2;
// Driver states
public static final int PORT_OPEN = 9, TX_EMPTY = 8, TX_COMPLETE = 7,
RX_RDY = 6, RX_ACTIVE = 5;
// Error states
public static final int IDLE_LINE_DET = 4, OVERRUN_ERR = 3, NOISE_ERR = 2,
FRAME_ERR = 1, PARITY_ERR = 0, LENGTH_NEG_ERR = -1,
OFFSET_NEG_ERR = -2, NULL_POINTER_ERR = -3;
public static final int QUEUE_LEN = 2047;
public static final int CLOCK = Kernel.clockFrequency;
/**
* Output stream to write to this <i>Serial Communication Interface</i>..
*/
public SCIOutputStream out;
/**
* Input stream to read from this <i>Serial Communication Interface</i>..
*/
public SCIInputStream in;
private short portStat; // just for saving flag portOpen
private short sccr1; // content of SCCxR1
private int diff; // used to access register interface for SCI1 or SCI2
private int currentBaudRate = 9600;
private short currentParity = NO_PARITY;
private short currentDataBits = 8;
/*
* rxQueue: the receive queue, head points to the front item, tail to tail
* item plus 1: head=tail -> empty q head is moved by the interrupt proc
*/
private ByteFifo rxQueue;
/*
* txQueue: the transmit queue, head points to the front item, tail to tail
* item plus 1: head=tail -> empty q head is moved by the interrupt proc,
* tail is moved by the send primitives called by the application
*/
private ByteFifo txQueue;
private boolean txDone;
private static SCI sci1, sci2;
// @SuppressWarnings("unused")
// private int intCtr; // for debugging purposes
/**
*
* @param sciNr
* @return
*/
public static SCI getInstance(int sciNr) {
if (sciNr == pSCI1) {
if (sci1 == null) sci1 = new SCI(0);
return sci1;
} else if (sciNr == pSCI2) {
if (sci2 == null) {
sci2 = new SCI(SCC2R0 - SCC1R0);
}
return sci2;
} else return null;
}
private SCI(int regDiff) {
diff = regDiff;
out = new SCIOutputStream(this);
in = new SCIInputStream(this);
QSMCM.init();
rxQueue = new ByteFifo(QUEUE_LEN);
txQueue = new ByteFifo(QUEUE_LEN);
enableRegAdr = QSMCM.SCC1R1 + diff;
enBitMask = (1 << QSMCM.scc1r1TIE) | (1 << QSMCM.scc1r1RIE);
flagRegAdr = QSMCM.SC1SR + diff;
flagMask = (1 << QSMCM.sc1srTDRE) | (1 << QSMCM.sc1srRDRF);
Interrupt.install(this, 5, true);
}
/* (non-Javadoc)
* @see ch.ntb.inf.deep.runtime.mpc555.Interrupt#action()
*/
public void action() {
// intCtr++;
if ((US.GET2(flagRegAdr) & (1 << QSMCM.sc1srRDRF)) != 0) {
short word = US.GET2(QSMCM.SC1DR + diff);
rxQueue.enqueue((byte) word);
} else {
if (txQueue.availToRead() > 0) {
int d = 0;
try {
d = txQueue.dequeue();
} catch (IOException e) {}
US.PUT2(QSMCM.SC1DR + diff, d);
} else {
txDone = true;
sccr1 &= ~(1 << QSMCM.scc1r1TIE);
US.PUT2(QSMCM.SCC1R1 + diff, sccr1);
}
}
}
private void startTransmission() {
if (txDone && (txQueue.availToRead() > 0)) {
txDone = false;
try {
US.PUT2(QSMCM.SC1DR + diff, txQueue.dequeue());
} catch (IOException e) {}
sccr1 |= (1 << QSMCM.scc1r1TIE);
US.PUT2(QSMCM.SCC1R1 + diff, sccr1);
}
}
/**
* Clear the receive buffer.
*/
public void clearReceiveBuffer() {
rxQueue.clear();
}
/**
* Clear the transmit buffer.
*/
public void clearTransmitBuffer() {
sccr1 &= ~(1 << QSMCM.scc1r1TIE);
US.PUT2(QSMCM.SCC1R1 + diff, sccr1);
txQueue.clear();
txDone = true;
}
/**
* Clear the receive and transmit buffers.
*/
public void clear() {
clearReceiveBuffer();
clearTransmitBuffer();
}
/**
* Stop the <i>Serial Communication Interface</i>.
*/
public void stop() {
clear();
US.PUT2(QSMCM.SCC1R1 + diff, 0);
portStat = 0;
}
/**
* <p>Initialize and start the <i>Serial Communication Interface</i>.</p>
* <p>This method have to be called before using the SCI! The number of
* stop bits can't be set. There is always one stop bit!<p>
*
* @param baudRate
* The baud rate. Allowed Range: 64 to 500'000 bits/s.
* @param parity
* Parity bits configuration. Possible values: {@link #NO_PARITY},
* {@link #ODD_PARITY} or {@link #EVEN_PARITY}.
* @param data
* Number of data bits. Allowed values are 7 to 9 bits. If you
* choose 9 data bits, than is no parity bit more available!
*/
public void start(int baudRate, short parity, short data) {
stop();
currentBaudRate = baudRate;
currentParity = parity;
currentDataBits = data;
short scbr = (short) ((CLOCK / baudRate + 16) / 32);
if (scbr <= 0)
scbr = 1;
else if (scbr > 8191)
scbr = 8191;
sccr1 |= (1 << QSMCM.scc1r1TE) | (1 << QSMCM.scc1r1RE)
| (1 << QSMCM.scc1r1RIE); // Transmitter and Receiver enable
if (parity == 0) {
if (data >= 9)
sccr1 |= (1 << QSMCM.scc1r1M);
} else {
if (data >= 8)
sccr1 |= (1 << QSMCM.scc1r1M) | (1 << QSMCM.scc1r1PE);
else
sccr1 = (1 << QSMCM.scc1r1PE);
if (parity == 1)
sccr1 |= (1 << QSMCM.scc1r1PT);
}
US.PUT2(QSMCM.SCC1R0 + diff, scbr);
US.PUT2(QSMCM.SCC1R1 + diff, sccr1);
portStat |= (1 << PORT_OPEN);
US.GET2(QSMCM.SC1SR + diff); // Clear status register
}
/**
* Check the port status. Returns the port status bits.<br>
* Every bit is representing a flag (e.g. {@link #PORT_OPEN}).
*
* @return the port status bits.
*/
public short portStatus() {
return (short) (portStat | US.GET2(QSMCM.SC1SR + diff));
}
/**
* Returns the number of bytes available in the receive buffer.
*
* @return number of bytes in the receive buffer.
*/
public int availToRead() {
return rxQueue.availToRead();
}
/**
* Returns the number of free bytes available in the transmit buffer.
* It is possible, to send the returned number of bytes in one
* nonblocking transfer.
*
* @return the available free bytes in the transmit buffer.
*/
public int availToWrite() {
return txQueue.availToWrite();
}
/**
* Reads the given number of bytes from the SCI. A call of
* this method is not blocking!
*
* @param buffer
* Byte aray to write the received data.
* @param off
* Offset in the array to start writing the data.
* @param count
* Length (number of bytes) to read.
* @return the number of bytes read.
* @throws IOException
* if an error occurs while reading from this stream.
* @throws NullPointerException
* if {@code buffer} is null.
* @throws IndexOutOfBoundsException
* if {@code off < 0} or {@code count < 0}, or if
* {@code off + count} is bigger than the length of
* {@code buffer}.
*/
public int read(byte[] buffer, int off, int count) throws IOException {
int len = buffer.length;
if ((off | count) < 0 || off > len || len - off < count) {
throw new ArrayIndexOutOfBoundsException(len, off, count);
}
for (int i = 0; i < count; i++) {
buffer[off + i] = rxQueue.dequeue();
}
return len;
}
/**
* Reads the given number of bytes from the SCI. A call of
* this method is not blocking!
*
* @param buffer
* Byte array to write the received data.
* @return the number of bytes read.
* @throws IOException
* if no data available.
*/
public int read(byte[] buffer) throws IOException {
return read(buffer, 0, buffer.length);
}
/**
* Reads one byte from the SCI. A call of
* this method is not blocking!
*
* @return byte single entry in queue.
* @throws IOException
* if no byte available.
*/
public int read() throws IOException {
return rxQueue.dequeue();
}
/**
* Writes a given number of bytes into the transmit buffer.
* A call of this method is not blocking! There will only as
* many bytes written, which are free in the buffer.
*
* @param buffer
* Array of bytes to send.
* @param off
* Offset to the data which should be sent.
* @param count
* Number of bytes to send.
* @return the number of bytes written.
* @throws IOException
* if an error occurs while writing to this stream.
* @throws NullPointerException
* if {@code buffer} is null.
* @throws IndexOutOfBoundsException
* if {@code off < 0} or {@code count < 0}, or if
* {@code off + count} is bigger than the length of
* {@code buffer}.
*/
public int write(byte[] buffer, int off, int count) throws IOException{
if ((portStat & (1 << PORT_OPEN)) == 0) throw new IOException("IOException");
int len = buffer.length;
if ((off | count) < 0 || off > len || len - off < count) {
throw new ArrayIndexOutOfBoundsException(len, off, count);
}
for (int i = 0; i < count; i++) {
txQueue.enqueue(buffer[off + i]);
}
startTransmission();
return count;
}
/**
* Writes a given number of bytes into the transmit buffer.
* A call of this method is not blocking! There will only as
* many bytes written, which are free in the buffer.
*
* @param buffer
* Array of bytes to send.
* @return the number of bytes written.
* @throws IOException
* if an error occurs while writing to this stream.
*/
public int write(byte[] buffer) throws IOException {
return write(buffer, 0, buffer.length);
}
/**
* Writes a given byte into the transmit buffer.
* A call of this method is blocking! That means
* this method won't terminate until the byte is
* written to the buffer!
*
* @param b
* Byte to write.
* @throws IOException
* if an error occurs while writing to this stream.
*/
public void write(byte b) throws IOException {
if ((portStat & (1 << PORT_OPEN)) == 0) throw new IOException("IOException");
while (txQueue.availToWrite() <= 0);
txQueue.enqueue(b);
startTransmission();
}
/**
* Resets the SCI. This means, the SCI will be
* stopped and reinitialized with the same configuration.
*/
public void reset() {
stop();
start(currentBaudRate, currentParity, currentDataBits);
}
}
| |
package com.alexstyl.currency.ui.fragment;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.text.TextUtils;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.TextView;
import com.alexstyl.currency.Currency;
import com.alexstyl.currency.CurrencyConverter;
import com.alexstyl.currency.CurrencyHelper;
import com.alexstyl.currency.R;
import com.alexstyl.currency.ui.BaseFragment;
import com.alexstyl.currency.ui.dialog.CurrencySelectorDialog;
import com.alexstyl.currency.util.DeLog;
import java.util.Random;
/**
* The ConversionFragment displays the selected currencies and allows the user to convert from one to the other.
* <p>Created by alexstyl on 10/02/15.</p>
*/
public class ConversionFragment extends BaseFragment implements CurrencyConverter.CurrencyChangeListener,
CurrencySelectorDialog.OnCurrencySelectedListener, NumPadFragment.OnNumberClickedListener {
private static final String TAG = "ConvertionFragment";
private static final String KEY_CURRENCY_1 = "key_currency_1";
private static final String KEY_CURRENCY_2 = "key_currency_2";
private static final String TAG_CURRENCY_SELECT = "alexstyl:cur_select";
private static final String KEY_SELECTED_CURRENCY_INDEX = "key_cur_btn_index";
private static final String KEY_CURRENCY_1_VALUE = "key_value1";
private static final String KEY_CURRENCY_2_VALUE = "key_value2";
/*
* A button that displays the first selected currency.
* When clicked, the user can select a different currency
*/
Button mCurrencyBtn1;
Button mCurrencyBtn2;
/*
Display of the values of the 'from' currency
*/
TextView mCurrencyTxt1; // value of the 'from' currency
TextView mCurrencyTxt2; // value of the 'to' currency
Currency mCurrency1;
Currency mCurrency2;
// Double mCurrency1Value;
// Double mCurrency2Value;
/**
* int that stores which button was pressed, in case of configuration change
*/
private int mCurrencyButtonPressed = -1;
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_converter, container, false);
mCurrencyBtn1 = (Button) view.findViewById(R.id.cur1_button_from);
mCurrencyBtn2 = (Button) view.findViewById(R.id.cur2_button_from);
mCurrencyTxt1 = (TextView) view.findViewById(R.id.cur1_edit_from);
mCurrencyTxt2 = (TextView) view.findViewById(R.id.cur2_edit_from);
if (savedInstanceState == null) {
// if the app is running for the first time,
// just pick a random currency to convert.
Random r = new Random();
int index1 = r.nextInt(12);
mCurrency1 = CurrencyHelper.loadCurrencyFromIndex(getResources(), index1);
int index2 = r.nextInt(12);
while (index1 == index2) {
index2 = r.nextInt(12);
}
mCurrency2 = CurrencyHelper.loadCurrencyFromIndex(getResources(), index2);
// TODO store the previously selected currency in file, so that we can resume when the app starts again
} else {
mCurrency1 = (Currency) savedInstanceState.getSerializable(KEY_CURRENCY_1);
mCurrency2 = (Currency) savedInstanceState.getSerializable(KEY_CURRENCY_2);
CurrencySelectorDialog dialog = (CurrencySelectorDialog) getFragmentManager().findFragmentByTag(TAG_CURRENCY_SELECT);
if (dialog != null) {
mCurrencyButtonPressed = savedInstanceState.getInt(KEY_SELECTED_CURRENCY_INDEX);
dialog.setCurrencySelectedListener(this);
dialog.setSelectedCurrency(mCurrency1.getISO(), mCurrency2.getISO());
}
mCurrencyTxt1.setText(String.valueOf(savedInstanceState.getDouble(KEY_CURRENCY_1_VALUE)));
mCurrencyTxt2.setText(String.valueOf(savedInstanceState.getDouble(KEY_CURRENCY_2_VALUE)));
}
mCurrencyBtn1.setText(mCurrency1.getSymbol());
mCurrencyBtn1.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
CurrencySelectorDialog dialog = new CurrencySelectorDialog();
mCurrencyButtonPressed = 0;
dialog.setCurrencySelectedListener(ConversionFragment.this);
dialog.setSelectedCurrency(mCurrency1.getISO(), mCurrency2.getISO());
dialog.show(getFragmentManager(), TAG_CURRENCY_SELECT);
}
});
mCurrencyBtn2.setText(mCurrency2.getSymbol());
mCurrencyBtn2.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
CurrencySelectorDialog dialog = new CurrencySelectorDialog();
mCurrencyButtonPressed = 1;
dialog.setCurrencySelectedListener(ConversionFragment.this);
dialog.setSelectedCurrency(mCurrency1.getISO(), mCurrency2.getISO());
dialog.show(getFragmentManager(), TAG_CURRENCY_SELECT);
}
});
view.findViewById(R.id.swap).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Currency temp = mCurrency1;
mCurrency1 = mCurrency2;
mCurrency2 = temp;
mCurrencyBtn1.setText(mCurrency1.getSymbol());
mCurrencyBtn2.setText(mCurrency2.getSymbol());
mConvertButtonClickListener.onClick(v);
}
});
return view;
}
@Override
public void onResume() {
super.onResume();
CurrencyConverter.getInstance(getActivity()).addCurrencyChangeListener(this);
}
@Override
public void onStop() {
super.onStop();
CurrencyConverter.getInstance(getActivity()).removeCurrencyChangeListener(this);
}
private final View.OnClickListener mConvertButtonClickListener = new View.OnClickListener() {
@Override
public void onClick(View v) {
// When the convert button is pressed, get the converted value and show in on the UI
Double value =
CurrencyConverter.getInstance(getActivity()).convert(
mCurrency1.getISO(), mCurrency2.getISO(),
mCurrencyTxt1.getText().toString());
if (value == 0) {
// if the return value is zero, clear the field so that
// the user can edit it without having to erase the zero
mCurrencyTxt2.clearComposingText();
} else {
String prettyDouble = CurrencyHelper.getPrettyDecimal(value);
mCurrencyTxt2.setText(prettyDouble);
}
}
};
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putSerializable(KEY_CURRENCY_1, mCurrency1);
outState.putSerializable(KEY_CURRENCY_2, mCurrency2);
outState.putInt(KEY_SELECTED_CURRENCY_INDEX, mCurrencyButtonPressed);
}
@Override
public void onCurrencySelected(Currency cur) {
Button modifiedButton;
if (mCurrencyButtonPressed == 0) {
mCurrency1 = cur;
modifiedButton = mCurrencyBtn1;
} else if (mCurrencyButtonPressed == 1) {
mCurrency2 = cur;
modifiedButton = mCurrencyBtn2;
} else {
throw new RuntimeException("Which button was pressed?");
}
modifiedButton.setText(cur.getSymbol());
mConvertButtonClickListener.onClick(null);
mCurrencyButtonPressed = -1;
}
@Override
public void onCurrencyChanged(String from, String to, Double value) {
DeLog.v(TAG, "Currency Updated! " + from + " -> " + to);
// we can refresh the UI, but that would seem weird to the UI, as if the app is glitching
// since the currency are being updated asynchronously
}
@Override
public boolean onNumberClicked(View view, int value) {
DeLog.v(TAG, "onNumberClicked: " + value);
mCurrencyTxt1.append(String.valueOf(value));
return true;
}
@Override
public boolean onSymbolClicked(View view, int symbol) {
DeLog.v(TAG, "onSymbolClicked: " + symbol);
switch (symbol) {
case NumPadFragment.SYMBOL_COMMA:
mCurrencyTxt1.append(".");
return true;
case NumPadFragment.SYMBOL_EQUALS:
mConvertButtonClickListener.onClick(view);
// don't consume the event
return false;
case NumPadFragment.SYMBOL_BACKSPACE:
onBackspacePressed();
return true;
case NumPadFragment.SYMBOL_CLEAR_ALL:
onClearAllPressed();
return true;
default:
//
}
return false;
}
private void onClearAllPressed() {
mCurrencyTxt1.setText(null);
mCurrencyTxt2.setText(null);
}
private void onBackspacePressed() {
String text = mCurrencyTxt1.getText().toString();
if (TextUtils.isEmpty(text)) {
// nothing to do here
return;
}
mCurrencyTxt1.setText(text.substring(0, text.length() - 1));
}
public String getFromValue() {
String from = mCurrencyTxt1.getText().toString();
if (TextUtils.isEmpty(from)) {
return "0";
}
return from;
}
public String getFrom() {
return mCurrency1.getISO();
}
public String getTo() {
return mCurrency2.getISO();
}
public String getToValue() {
String from = mCurrencyTxt2.getText().toString();
if (TextUtils.isEmpty(from)) {
return "0";
}
return from;
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
import com.amazonaws.Request;
import com.amazonaws.services.ec2.model.transform.UpdateSecurityGroupRuleDescriptionsIngressRequestMarshaller;
/**
*
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class UpdateSecurityGroupRuleDescriptionsIngressRequest extends AmazonWebServiceRequest implements Serializable, Cloneable,
DryRunSupportedRequest<UpdateSecurityGroupRuleDescriptionsIngressRequest> {
/**
* <p>
* The ID of the security group. You must specify either the security group ID or the security group name in the
* request. For security groups in a nondefault VPC, you must specify the security group ID.
* </p>
*/
private String groupId;
/**
* <p>
* [EC2-Classic, default VPC] The name of the security group. You must specify either the security group ID or the
* security group name in the request.
* </p>
*/
private String groupName;
/**
* <p>
* The IP permissions for the security group rule.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<IpPermission> ipPermissions;
/**
* <p>
* The ID of the security group. You must specify either the security group ID or the security group name in the
* request. For security groups in a nondefault VPC, you must specify the security group ID.
* </p>
*
* @param groupId
* The ID of the security group. You must specify either the security group ID or the security group name in
* the request. For security groups in a nondefault VPC, you must specify the security group ID.
*/
public void setGroupId(String groupId) {
this.groupId = groupId;
}
/**
* <p>
* The ID of the security group. You must specify either the security group ID or the security group name in the
* request. For security groups in a nondefault VPC, you must specify the security group ID.
* </p>
*
* @return The ID of the security group. You must specify either the security group ID or the security group name in
* the request. For security groups in a nondefault VPC, you must specify the security group ID.
*/
public String getGroupId() {
return this.groupId;
}
/**
* <p>
* The ID of the security group. You must specify either the security group ID or the security group name in the
* request. For security groups in a nondefault VPC, you must specify the security group ID.
* </p>
*
* @param groupId
* The ID of the security group. You must specify either the security group ID or the security group name in
* the request. For security groups in a nondefault VPC, you must specify the security group ID.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateSecurityGroupRuleDescriptionsIngressRequest withGroupId(String groupId) {
setGroupId(groupId);
return this;
}
/**
* <p>
* [EC2-Classic, default VPC] The name of the security group. You must specify either the security group ID or the
* security group name in the request.
* </p>
*
* @param groupName
* [EC2-Classic, default VPC] The name of the security group. You must specify either the security group ID
* or the security group name in the request.
*/
public void setGroupName(String groupName) {
this.groupName = groupName;
}
/**
* <p>
* [EC2-Classic, default VPC] The name of the security group. You must specify either the security group ID or the
* security group name in the request.
* </p>
*
* @return [EC2-Classic, default VPC] The name of the security group. You must specify either the security group ID
* or the security group name in the request.
*/
public String getGroupName() {
return this.groupName;
}
/**
* <p>
* [EC2-Classic, default VPC] The name of the security group. You must specify either the security group ID or the
* security group name in the request.
* </p>
*
* @param groupName
* [EC2-Classic, default VPC] The name of the security group. You must specify either the security group ID
* or the security group name in the request.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateSecurityGroupRuleDescriptionsIngressRequest withGroupName(String groupName) {
setGroupName(groupName);
return this;
}
/**
* <p>
* The IP permissions for the security group rule.
* </p>
*
* @return The IP permissions for the security group rule.
*/
public java.util.List<IpPermission> getIpPermissions() {
if (ipPermissions == null) {
ipPermissions = new com.amazonaws.internal.SdkInternalList<IpPermission>();
}
return ipPermissions;
}
/**
* <p>
* The IP permissions for the security group rule.
* </p>
*
* @param ipPermissions
* The IP permissions for the security group rule.
*/
public void setIpPermissions(java.util.Collection<IpPermission> ipPermissions) {
if (ipPermissions == null) {
this.ipPermissions = null;
return;
}
this.ipPermissions = new com.amazonaws.internal.SdkInternalList<IpPermission>(ipPermissions);
}
/**
* <p>
* The IP permissions for the security group rule.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setIpPermissions(java.util.Collection)} or {@link #withIpPermissions(java.util.Collection)} if you want
* to override the existing values.
* </p>
*
* @param ipPermissions
* The IP permissions for the security group rule.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateSecurityGroupRuleDescriptionsIngressRequest withIpPermissions(IpPermission... ipPermissions) {
if (this.ipPermissions == null) {
setIpPermissions(new com.amazonaws.internal.SdkInternalList<IpPermission>(ipPermissions.length));
}
for (IpPermission ele : ipPermissions) {
this.ipPermissions.add(ele);
}
return this;
}
/**
* <p>
* The IP permissions for the security group rule.
* </p>
*
* @param ipPermissions
* The IP permissions for the security group rule.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateSecurityGroupRuleDescriptionsIngressRequest withIpPermissions(java.util.Collection<IpPermission> ipPermissions) {
setIpPermissions(ipPermissions);
return this;
}
/**
* This method is intended for internal use only. Returns the marshaled request configured with additional
* parameters to enable operation dry-run.
*/
@Override
public Request<UpdateSecurityGroupRuleDescriptionsIngressRequest> getDryRunRequest() {
Request<UpdateSecurityGroupRuleDescriptionsIngressRequest> request = new UpdateSecurityGroupRuleDescriptionsIngressRequestMarshaller().marshall(this);
request.addParameter("DryRun", Boolean.toString(true));
return request;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getGroupId() != null)
sb.append("GroupId: ").append(getGroupId()).append(",");
if (getGroupName() != null)
sb.append("GroupName: ").append(getGroupName()).append(",");
if (getIpPermissions() != null)
sb.append("IpPermissions: ").append(getIpPermissions());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof UpdateSecurityGroupRuleDescriptionsIngressRequest == false)
return false;
UpdateSecurityGroupRuleDescriptionsIngressRequest other = (UpdateSecurityGroupRuleDescriptionsIngressRequest) obj;
if (other.getGroupId() == null ^ this.getGroupId() == null)
return false;
if (other.getGroupId() != null && other.getGroupId().equals(this.getGroupId()) == false)
return false;
if (other.getGroupName() == null ^ this.getGroupName() == null)
return false;
if (other.getGroupName() != null && other.getGroupName().equals(this.getGroupName()) == false)
return false;
if (other.getIpPermissions() == null ^ this.getIpPermissions() == null)
return false;
if (other.getIpPermissions() != null && other.getIpPermissions().equals(this.getIpPermissions()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getGroupId() == null) ? 0 : getGroupId().hashCode());
hashCode = prime * hashCode + ((getGroupName() == null) ? 0 : getGroupName().hashCode());
hashCode = prime * hashCode + ((getIpPermissions() == null) ? 0 : getIpPermissions().hashCode());
return hashCode;
}
@Override
public UpdateSecurityGroupRuleDescriptionsIngressRequest clone() {
return (UpdateSecurityGroupRuleDescriptionsIngressRequest) super.clone();
}
}
| |
package org.ovirt.engine.core.sso.utils;
import java.io.ByteArrayOutputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.UnsupportedEncodingException;
import java.net.HttpURLConnection;
import java.net.URLDecoder;
import java.nio.charset.StandardCharsets;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.security.cert.CertificateFactory;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import javax.net.ssl.TrustManagerFactory;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.lang.StringUtils;
import org.apache.http.HttpHeaders;
import org.codehaus.jackson.map.DeserializationConfig.Feature;
import org.codehaus.jackson.map.ObjectMapper;
import org.ovirt.engine.api.extensions.ExtMap;
import org.ovirt.engine.api.extensions.aaa.Authn;
import org.ovirt.engine.api.extensions.aaa.Authz;
import org.ovirt.engine.core.uutils.crypto.EnvelopeEncryptDecrypt;
import org.ovirt.engine.core.uutils.crypto.EnvelopePBE;
import org.ovirt.engine.core.uutils.net.HttpURLConnectionBuilder;
import org.ovirt.engine.core.uutils.net.URLBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class SSOUtils {
private static Logger log = LoggerFactory.getLogger(SSOUtils.class);
public static boolean isUserAuthenticated(HttpServletRequest request) {
return getSsoSession(request).getStatus() == SSOSession.Status.authenticated;
}
public static void redirectToModule(HttpServletRequest request,
HttpServletResponse response)
throws IOException {
log.debug("Entered redirectToModule");
try {
SSOSession ssoSession = getSsoSession(request);
URLBuilder redirectUrl = new URLBuilder(getRedirectUrl(request).toString())
.addParameter("code", ssoSession.getAuthorizationCode());
String state = ssoSession.getState();
if (StringUtils.isNotEmpty(state)) {
redirectUrl.addParameter("state", state);
}
response.sendRedirect(redirectUrl.build());
log.debug("Redirecting back to module: {}", redirectUrl);
} catch (Exception ex) {
log.error("Error redirecting back to module: {}", ex.getMessage());
log.debug("Error redirecting back to module", ex);
throw new RuntimeException(ex);
} finally {
SSOUtils.getSsoSession(request).cleanup();
}
}
public static String getRedirectUrl(HttpServletRequest request) throws Exception {
String uri = getSsoSession(request, true).getRedirectUri();
return StringUtils.isEmpty(uri) ? new URLBuilder(getSsoContext(request).getEngineUrl(), "/oauth2-callback").build() : uri;
}
public static void redirectToErrorPage(HttpServletRequest request, HttpServletResponse response, Exception ex) {
log.error(ex.getMessage());
log.debug("Exception in OAuthAuthorizeServlet:", ex);
redirectToErrorPageImpl(request, response, new OAuthException(SSOConstants.ERR_CODE_SERVER_ERROR, ex.getMessage(), ex));
}
public static void redirectToErrorPage(HttpServletRequest request, HttpServletResponse response, OAuthException ex) {
log.error("OAuthException {}: {}", ex.getCode(), ex.getMessage());
log.debug("OAuthException:", ex);
redirectToErrorPageImpl(request, response, ex);
}
private static void redirectToErrorPageImpl(HttpServletRequest request, HttpServletResponse response, OAuthException ex) {
log.debug("Entered redirectToErrorPage");
SSOSession ssoSession = null;
try {
ssoSession = SSOUtils.getSsoSession(request, true);
if (ssoSession.getStatus() != SSOSession.Status.authenticated) {
ssoSession.setStatus(SSOSession.Status.unauthenticated);
}
String redirectUrl = new URLBuilder(getRedirectUrl(request))
.addParameter("error_code", ex.getCode())
.addParameter("error", ex.getMessage()).build();
response.sendRedirect(redirectUrl);
log.debug("Redirecting back to module: {}", redirectUrl);
} catch (Exception e) {
log.error("Error redirecting to error page: {}", e.getMessage());
log.debug("Error redirecting to error page", e);
throw new RuntimeException(ex);
} finally {
if (ssoSession != null) {
ssoSession.cleanup();
}
}
}
public static String generateAuthorizationToken() {
String ssoTokenId;
try {
byte[] s = new byte[64];
SecureRandom.getInstance("SHA1PRNG").nextBytes(s);
ssoTokenId = new Base64(0, new byte[0], true).encodeToString(s);
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
return ssoTokenId;
}
public static String getJson(Object obj) throws IOException {
ObjectMapper mapper = new ObjectMapper().configure(Feature.FAIL_ON_UNKNOWN_PROPERTIES, false)
.enableDefaultTyping(ObjectMapper.DefaultTyping.OBJECT_AND_NON_CONCRETE);
mapper.getSerializationConfig().addMixInAnnotations(ExtMap.class, JsonExtMapMixIn.class);
return mapper.writeValueAsString(obj);
}
public static String[] getClientIdClientSecret(HttpServletRequest request) throws Exception {
String[] retVal = new String[2];
retVal[0] = getParameter(request, SSOConstants.HTTP_PARAM_CLIENT_ID);
retVal[1] = getParameter(request, SSOConstants.HTTP_PARAM_CLIENT_SECRET);
if (StringUtils.isEmpty(retVal[0]) && StringUtils.isEmpty(retVal[1])) {
retVal = getClientIdClientSecretFromHeader(request);
}
if (StringUtils.isEmpty(retVal[0])) {
throw new OAuthException(SSOConstants.ERR_CODE_INVALID_REQUEST, String.format(SSOConstants.ERR_CODE_INVALID_REQUEST_MSG, SSOConstants.HTTP_PARAM_CLIENT_ID));
}
if (StringUtils.isEmpty(retVal[1])) {
throw new OAuthException(SSOConstants.ERR_CODE_INVALID_REQUEST, String.format(SSOConstants.ERR_CODE_INVALID_REQUEST_MSG, SSOConstants.HTTP_PARAM_CLIENT_SECRET));
}
return retVal;
}
public static String getClientId(HttpServletRequest request) {
String clientId = null;
String[] retVal = getClientIdClientSecretFromHeader(request);
if (retVal != null &&
StringUtils.isNotEmpty(retVal[0]) &&
getSsoContext(request).getClienInfo(retVal[0]) != null) {
clientId = retVal[0];
}
return clientId;
}
public static String[] getClientIdClientSecretFromHeader(HttpServletRequest request) {
String[] retVal = new String[2];
String header = request.getHeader(SSOConstants.HEADER_AUTHORIZATION);
if (StringUtils.isNotEmpty(header) && header.startsWith("Basic")) {
String[] creds = new String(
Base64.decodeBase64(header.substring("Basic".length())),
StandardCharsets.UTF_8
).split(":", 2);
if (creds.length == 2) {
retVal = creds;
}
}
return retVal;
}
public static String getParameter(HttpServletRequest request, String paramName) throws UnsupportedEncodingException {
String value = request.getParameter(paramName);
return value == null ? null : URLDecoder.decode(new String(value.getBytes("iso-8859-1")), StandardCharsets.UTF_8.name());
}
public static String getRequestParameter(HttpServletRequest request, String paramName) throws Exception {
String value = getParameter(request, paramName);
if (value == null) {
throw new OAuthException(SSOConstants.ERR_CODE_INVALID_REQUEST, String.format(SSOConstants.ERR_CODE_INVALID_REQUEST_MSG, paramName));
}
return URLDecoder.decode(value, StandardCharsets.UTF_8.name());
}
public static String getRequestParameters(HttpServletRequest request) {
StringBuilder value = new StringBuilder("");
try {
Enumeration<String> paramNames = request.getParameterNames();
String paramName;
while (paramNames.hasMoreElements()) {
paramName = paramNames.nextElement();
value.append(String.format("%s = %s, ", paramName, getRequestParameter(request, paramName)));
}
} catch (Exception ex) {
log.debug("Unable to get parameters from request");
}
return value.toString();
}
public static String getRequestParameter(HttpServletRequest request, String paramName, String defaultValue) {
String value;
try {
value = getRequestParameter(request, paramName);
} catch (Exception ex) {
log.debug("Parameter {} not found request, using default value", paramName);
value = defaultValue;
}
return value;
}
public static String getScopeRequestParameter(HttpServletRequest request, String defaultValue) {
return resolveScopeWithDependencies(getSsoContext(request),
getRequestParameter(request, SSOConstants.HTTP_PARAM_SCOPE, defaultValue));
}
public static String resolveScopeWithDependencies(SSOContext context, String scopes) {
Set<String> scopesSet = new TreeSet<>();
for (String scope : scopeAsList(scopes)) {
scopesSet.add(scope);
scopesSet.addAll(context.getScopeDependencies(scope));
}
return StringUtils.join(scopesSet, " ");
}
public static SSOContext getSsoContext(HttpServletRequest request) {
return (SSOContext) request.getServletContext().getAttribute(SSOConstants.OVIRT_SSO_CONTEXT);
}
public static SSOContext getSsoContext(ServletContext ctx) {
return (SSOContext) ctx.getAttribute(SSOConstants.OVIRT_SSO_CONTEXT);
}
public static SSOSession getSsoSessionFromRequest(HttpServletRequest request, String token) {
return getSsoSession(request, null, token, false);
}
public static SSOSession getSsoSession(HttpServletRequest request, String token, boolean mustExist) {
return getSsoSession(request, null, token, mustExist);
}
public static SSOSession getSsoSession(HttpServletRequest request, String clientId, String token, boolean mustExist) {
TokenCleanupUtility.cleanupExpiredTokens(request.getServletContext());
SSOSession ssoSession = null;
if (StringUtils.isNotEmpty(token)) {
ssoSession = getSsoContext(request).getSsoSession(token);
if (ssoSession != null) {
ssoSession.touch();
}
}
if (mustExist && ssoSession == null) {
throw new OAuthException(SSOConstants.ERR_CODE_INVALID_GRANT, "The provided authorization grant for the auth code has expired");
}
if (StringUtils.isNotEmpty(clientId) &&
StringUtils.isNotEmpty(ssoSession.getClientId()) &&
!ssoSession.getClientId().equals(clientId)) {
throw new OAuthException(SSOConstants.ERR_CODE_UNAUTHORIZED_CLIENT, SSOConstants.ERR_CODE_UNAUTHORIZED_CLIENT);
}
return ssoSession;
}
public static SSOSession getSsoSession(HttpServletRequest request) {
SSOSession ssoSession = request.getSession(false) == null ? null : (SSOSession) request.getSession().getAttribute(SSOConstants.OVIRT_SSO_SESSION);
if (ssoSession == null) {
throw new OAuthException(SSOConstants.ERR_CODE_INVALID_GRANT, "Session expired please try again.");
}
return ssoSession;
}
public static SSOSession getSsoSession(HttpServletRequest request, boolean mustExist) throws UnsupportedEncodingException {
SSOSession ssoSession = request.getSession(false) == null ? null : (SSOSession) request.getSession().getAttribute(SSOConstants.OVIRT_SSO_SESSION);
if ((ssoSession == null || StringUtils.isEmpty(ssoSession.getClientId())) && mustExist) {
ssoSession = ssoSession == null ? new SSOSession() : ssoSession;
ssoSession.setClientId(getClientId(request));
ssoSession.setScope(getScopeRequestParameter(request, ""));
ssoSession.setState(getRequestParameter(request, SSOConstants.HTTP_PARAM_STATE, ""));
ssoSession.setRedirectUri(getParameter(request, SSOConstants.HTTP_PARAM_REDIRECT_URI));
}
return ssoSession;
}
public static Credentials getUserCredentialsFromHeader(HttpServletRequest request) {
String header = request.getHeader(SSOConstants.HEADER_AUTHORIZATION);
Credentials credentials = null;
if (StringUtils.isNotEmpty(header)) {
String[] creds = new String(
Base64.decodeBase64(header.substring("Basic".length())),
StandardCharsets.UTF_8
).split(":", 2);
if (creds.length == 2) {
credentials = translateUser(creds[0], creds[1], getSsoContext(request));
}
}
return credentials;
}
public static Credentials translateUser(String user, String password, SSOContext ssoContext) {
Credentials credentials = new Credentials();
String username = user;
int separator = user.lastIndexOf("@");
if (separator != -1) {
username = user.substring(0, separator);
String profile = user.substring(separator + 1);
if (StringUtils.isNotEmpty(profile)) {
credentials.setProfile(profile);
credentials.setProfileValid(ssoContext.getSsoProfiles().contains(profile));
}
}
credentials.setUsername(username);
credentials.setPassword(password);
return credentials;
}
public static String getUserId(ExtMap principalRecord) {
String principal = principalRecord.get(Authz.PrincipalRecord.PRINCIPAL);
return principal != null ? principal : principalRecord.<String>get(Authz.PrincipalRecord.NAME);
}
public static SSOSession persistAuthInfoInContextWithToken(HttpServletRequest request,
String password,
String profileName,
ExtMap authRecord,
ExtMap principalRecord) throws Exception {
String validTo = authRecord.get(Authn.AuthRecord.VALID_TO);
String authCode = generateAuthorizationToken();
String accessToken = generateAuthorizationToken();
SSOSession ssoSession = getSsoSession(request, true);
ssoSession.setAccessToken(accessToken);
ssoSession.setAuthorizationCode(authCode);
request.setAttribute(SSOConstants.HTTP_REQ_ATTR_ACCESS_TOKEN, accessToken);
ssoSession.setActive(true);
ssoSession.setAuthRecord(authRecord);
ssoSession.setAutheticatedCredentials(ssoSession.getTempCredentials());
getSsoContext(request).registerSsoSession(ssoSession);
ssoSession.setPrincipalRecord(principalRecord);
ssoSession.setProfile(profileName);
ssoSession.setStatus(SSOSession.Status.authenticated);
ssoSession.setTempCredentials(null);
ssoSession.setUserId(getUserId(principalRecord));
try {
ssoSession.setValidTo(validTo == null ? Integer.MAX_VALUE : (int) new SimpleDateFormat("yyyyMMddHHmmssZ").parse(validTo).getTime());
} catch (Exception ex) {
log.error("Unable to parse Auth Record valid_to value: {}", ex.getMessage());
log.debug("Unable to parse Auth Record valid_to value", ex);
}
try {
if (ssoSession.getScopeAsList().contains("ovirt-ext=token:password-access") &&
password != null &&
StringUtils.isNotEmpty(ssoSession.getClientId())) {
ssoSession.setPassword(encrypt(request.getServletContext(), ssoSession.getClientId(), password));
}
} catch (Exception ex) {
log.error("Unable to encrypt password: {}", ex.getMessage());
log.debug("Unable to encrypt password", ex);
}
ssoSession.touch();
return ssoSession;
}
public static void validateClientAcceptHeader(HttpServletRequest request) {
String acceptHeader = request.getHeader("Accept");
if (StringUtils.isEmpty(acceptHeader) || !acceptHeader.equals("application/json")) {
throw new OAuthException(SSOConstants.ERR_CODE_INVALID_REQUEST, String.format(SSOConstants.ERR_CODE_INVALID_REQUEST_MSG, "Accept Header"));
}
}
public static void validateClientRequest(HttpServletRequest request, String clientId, String clientSecret, String scope, String redirectUri) {
try {
SSOContext ssoContext = getSsoContext(request);
ClientInfo clientInfo = ssoContext.getClienInfo(clientId);
if (clientInfo == null) {
throw new OAuthException(SSOConstants.ERR_CODE_UNAUTHORIZED_CLIENT, SSOConstants.ERR_CODE_UNAUTHORIZED_CLIENT_MSG);
}
if (!clientInfo.isTrusted()) {
throw new OAuthException(SSOConstants.ERR_CODE_ACCESS_DENIED, SSOConstants.ERR_CODE_ACCESS_DENIED_MSG);
}
if (StringUtils.isNotEmpty(clientSecret) && !EnvelopePBE.check(clientInfo.getClientSecret(), clientSecret)) {
throw new OAuthException(SSOConstants.ERR_CODE_INVALID_REQUEST, String.format(SSOConstants.ERR_CODE_INVALID_REQUEST_MSG, SSOConstants.HTTP_PARAM_CLIENT_SECRET));
}
if (StringUtils.isNotEmpty(scope)) {
validateScope(clientInfo.getScope(), scope);
}
if (StringUtils.isNotEmpty(redirectUri) && ssoContext.getSsoLocalConfig().getBoolean("SSO_CALLBACK_PREFIX_CHECK")) {
List<String> allowedPrefixes = scopeAsList(clientInfo.getCallbackPrefix());
boolean isValidUri = false;
for (String allowedPrefix : allowedPrefixes) {
if (redirectUri.startsWith(allowedPrefix)) {
isValidUri = true;
break;
}
}
if (!isValidUri) {
throw new OAuthException(SSOConstants.ERR_CODE_UNAUTHORIZED_CLIENT, SSOConstants.ERR_CODE_UNAUTHORIZED_CLIENT_MSG);
}
}
} catch (OAuthException ex) {
throw ex;
} catch (Exception ex) {
log.error("Internal Server Error: {}", ex.getMessage());
log.debug("Internal Server Error", ex);
throw new OAuthException(SSOConstants.ERR_CODE_SERVER_ERROR, ex.getMessage());
}
}
public static void validateRequestScope(HttpServletRequest req, String token, String scope) {
if (StringUtils.isNotEmpty(scope)) {
SSOSession ssoSession = getSsoSessionFromRequest(req, token);
if (ssoSession != null && ssoSession.getScope() != null) {
validateScope(ssoSession.getScopeAsList(), scope);
}
}
}
public static void validateScope(List<String> scope, String requestScope) {
List<String> requestedScope = strippedScopeAsList(scopeAsList(requestScope));
if (!scope.containsAll(requestedScope)) {
throw new OAuthException(SSOConstants.ERR_CODE_INVALID_SCOPE, String.format(SSOConstants.ERR_CODE_INVALID_SCOPE_MSG, requestedScope));
}
}
public static void sendJsonDataWithMessage(HttpServletResponse response, String errorCode, Exception ex) throws IOException {
sendJsonDataWithMessage(response, new OAuthException(errorCode, ex.getMessage(), ex));
}
public static void sendJsonDataWithMessage(HttpServletResponse response, OAuthException ex) throws IOException {
sendJsonDataWithMessage(response, ex, false);
}
public static void sendJsonDataWithMessage(HttpServletResponse response, OAuthException ex, boolean isValidateRequest) throws IOException {
if (isValidateRequest) {
log.debug("OAuthException {}: {}", ex.getCode(), ex.getMessage());
} else {
log.error("OAuthException {}: {}", ex.getCode(), ex.getMessage());
}
log.debug("OAuthException:", ex);
Map<String, Object> errorData = new HashMap<>();
errorData.put(SSOConstants.ERROR_CODE, ex.getCode());
errorData.put(SSOConstants.ERROR, ex.getMessage());
sendJsonData(response, errorData);
}
public static void sendJsonData(HttpServletResponse response, Map<String, Object> payload) throws IOException {
try (OutputStream os = response.getOutputStream()) {
String jsonPayload = getJson(payload);
response.setContentType("application/json");
response.setContentLength(jsonPayload.length());
os.write(jsonPayload.getBytes(StandardCharsets.UTF_8.name()));
log.trace("Sending json data {}", jsonPayload);
}
}
public static List<String> strippedScopeAsList(List<String> scope) {
List<String> scopes = new ArrayList<>();
String[] tokens;
for (String s : scope) {
tokens = s.split("=", 3);
if (tokens.length == 1) {
scopes.add(tokens[0]);
} else if (!tokens[1].equals("auth:identity")) {
scopes.add(tokens[0] + "=" + tokens[1]);
}
}
return scopes;
}
public static List<String> scopeAsList(String scope) {
return StringUtils.isEmpty(scope) ? Collections.<String>emptyList() : Arrays.asList(scope.trim().split("\\s *"));
}
public static String encrypt(ServletContext ctx, String clientId, String rawText) throws Exception {
ClientInfo clientInfo = getSsoContext(ctx).getClienInfo(clientId);
try (InputStream in = new FileInputStream(clientInfo.getCertificateLocation())) {
return EnvelopeEncryptDecrypt.encrypt(
"AES/OFB/PKCS5Padding",
256,
CertificateFactory.getInstance("X.509").generateCertificate(in),
100,
rawText.getBytes(StandardCharsets.UTF_8));
}
}
public static void notifyClientsOfLogoutEvent(SSOContext ssoContext, Set<String> clientIdsForToken, String token) throws Exception {
if (clientIdsForToken != null) {
for (String clientId : clientIdsForToken) {
notifyClientOfLogoutEvent(ssoContext, clientId, token);
}
}
}
private static void notifyClientOfLogoutEvent(SSOContext ssoContext, String clientId, String token) throws Exception {
ClientInfo clientInfo = ssoContext.getClienInfo(clientId);
String url = clientInfo.getClientNotificationCallback();
if (StringUtils.isNotEmpty(url)) {
HttpURLConnection connection = null;
try {
connection = createConnection(ssoContext.getSsoLocalConfig(), clientInfo, url);
String data = new URLBuilder(url).addParameter("event", "logout")
.addParameter("token", token)
.addParameter("token_type", "bearer").buildURL().getQuery();
connection.setRequestProperty("Content-Length", "" + data.length());
connection.connect();
try (OutputStreamWriter outputWriter = new OutputStreamWriter(connection.getOutputStream())) {
outputWriter.write(data);
outputWriter.flush();
}
try (ByteArrayOutputStream os = new ByteArrayOutputStream()) {
final byte[] buffer = new byte[8*1024];
int n;
try (InputStream input = connection.getInputStream()) {
while ((n = input.read(buffer)) != -1) {
os.write(buffer, 0, n);
}
}
}
} finally {
if (connection != null) {
connection.disconnect();
}
}
}
}
private static HttpURLConnection createConnection(SSOLocalConfig config, ClientInfo clientInfo, String url) throws Exception {
HttpURLConnection connection = new HttpURLConnectionBuilder(url).setHttpsProtocol(clientInfo.getNotificationCallbackProtocol())
.setReadTimeout(config.getInteger("SSO_CALLBACK_READ_TIMEOUT"))
.setConnectTimeout(config.getInteger("SSO_CALLBACK_CONNECT_TIMEOUT"))
.setTrustManagerAlgorithm(TrustManagerFactory.getDefaultAlgorithm())
.setTrustStore(config.getProperty("SSO_PKI_TRUST_STORE"))
.setTrustStorePassword(config.getProperty("SSO_PKI_TRUST_STORE_PASSWORD"))
.setTrustStoreType(config.getProperty("SSO_PKI_TRUST_STORE_TYPE"))
.setURL(url)
.setVerifyChain(clientInfo.isNotificationCallbackVerifyChain())
.setVerifyHost(clientInfo.isNotificationCallbackVerifyHost()).create();
connection.setDoInput(true);
connection.setDoOutput(true);
connection.setRequestMethod("POST");
connection.setRequestProperty(HttpHeaders.ACCEPT, "application/json");
connection.setRequestProperty(HttpHeaders.CONTENT_TYPE, "application/x-www-form-urlencoded");
return connection;
}
}
| |
/*
* The MIT License (MIT)
*
* Copyright (c) 2018 MrBlobman
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package io.github.mrblobman.spigotcommandlib.registry;
import com.google.common.base.Defaults;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import com.google.gson.JsonPrimitive;
import io.github.mrblobman.spigotcommandlib.args.CommandParameter;
import io.github.mrblobman.spigotcommandlib.args.ParseException;
import net.md_5.bungee.api.ChatColor;
import net.md_5.bungee.api.chat.*;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import java.lang.reflect.Array;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class HandleInvoker implements Invoker {
protected SubCommand subCommand;
protected String cmdDesc;
protected Object invocationTarget;
protected Method method;
protected Class<?> senderType;
protected List<CommandParameter<?>> commandParameters;
protected int minArgsRequired;
HandleInvoker(SubCommand subCmd, String cmdDesc, Object invocationTarget, Method cmdHandler, Class<?> senderType, List<CommandParameter<?>> commandParameters) {
this.subCommand = subCmd;
this.cmdDesc = cmdDesc;
this.invocationTarget = invocationTarget;
this.method = cmdHandler;
this.method.setAccessible(true);
this.senderType = senderType;
this.commandParameters = commandParameters;
int minArgs = 0;
for (CommandParameter arg : commandParameters) {
if (!arg.isOptional()) minArgs++;
}
this.minArgsRequired = minArgs;
}
/**
* Invoke this handler with the given arguments. The args are all String args that follow the sub command.<br>
* Ex: /baseCmd sub1 sub2 arg0 arg1 arg2
*
* @param sender the command sender. If this type doesn't match the sender type it will inform the sender.
* @param args the args in which to invoke the handler with.
*
* @throws Exception if the method invocation fails for a non user based error, user based errors will directly be
* messaged to the player.
*/
@Override
public boolean invoke(SubCommand command, CommandSender sender, String[] args) throws Exception {
if (!this.subCommand.equals(command)) return false;
if (!senderType.isInstance(sender)) {
//Wrong sender type, cannot invoke
sendIncorrectSenderMessage(sender);
return true;
}
List<Object> params = buildMethodParams(sender, args);
if (params == null) return true;
int i = 0;
Object[] callParams = new Object[params.size() + 1];
callParams[i++] = sender;
for (Object param : params) {
callParams[i++] = param;
}
method.invoke(invocationTarget, callParams);
return true;
}
protected List<Object> buildMethodParams(CommandSender sender, String[] args) {
if (args.length < minArgsRequired) {
// Not enough args, send usage
sendUsage(sender);
return null;
}
List<Object> params = new ArrayList<>();
// Parse all required
for (int i = 0; i < minArgsRequired; i++) {
CommandParameter<?> cmdParam = this.commandParameters.get(i);
if (cmdParam.getFormatter().canBeParsedFrom(args[i])) {
try {
params.add(cmdParam.getFormatter().parse(args[i]));
} catch (ParseException e) {
sender.sendMessage(ChatColor.RED + "Invalid argument value " + args[i] + ". " + e.getLocalizedMessage());
return null;
}
} else {
// Invalid type param
sendUsage(sender);
return null;
}
}
// Parse all optional
int argIndex;
for (argIndex = minArgsRequired; argIndex < commandParameters.size(); argIndex++) {
CommandParameter cmdParam = this.commandParameters.get(argIndex);
if (cmdParam.isVarArgs()) break;
if (argIndex >= args.length) {
//Param was not given so we insert a null reference
if (cmdParam.getArgumentType().isPrimitive()) {
//We can't forward a null primitive, we need to set the default value
params.add(Defaults.defaultValue(cmdParam.getArgumentType()));
} else {
params.add(null);
}
continue;
}
if (cmdParam.getFormatter().canBeParsedFrom(args[argIndex])) {
try {
params.add(cmdParam.getFormatter().parse(args[argIndex]));
} catch (ParseException e) {
sender.sendMessage(ChatColor.RED + "Invalid argument value " + args[argIndex] + ". " + e.getLocalizedMessage());
return null;
}
} else {
//Invalid type param
sendUsage(sender);
return null;
}
}
if (commandParameters.size() > 0) {
// We need to handle the last one
CommandParameter lastArg = commandParameters.get(commandParameters.size() - 1);
if (!lastArg.isVarArgs()) {
if (argIndex < args.length) {
//They gave too many args
sendUsage(sender);
return null;
}
} else {
//Build the varargs array
@SuppressWarnings("unchecked")
List<Object> varArgs = lastArg.getFormatter().createTypedList();
//Handle varargs
for (/*argIndex*/; argIndex < args.length; argIndex++) {
if (lastArg.getFormatter().canBeParsedFrom(args[argIndex])) {
varArgs.add(lastArg.getFormatter().parse(args[argIndex]));
} else {
//Invalid type param
sendUsage(sender);
return null;
}
}
params.add(varArgs.toArray((Object[]) Array.newInstance(lastArg.getFormatter().getParseType(), varArgs.size())));
}
}
return params;
}
protected void sendIncorrectSenderMessage(CommandSender sender) {
sender.sendMessage(ChatColor.RED + "This can only be executed by a(n) " + senderType.getSimpleName() + ". You are a(n) " + sender.getClass().getSimpleName() + ".");
}
protected void sendUsage(CommandSender sender) {
StringBuilder strBuilder = new StringBuilder(this.subCommand.toExecutableString());
for (CommandParameter arg : this.commandParameters) strBuilder.append(" ").append(arg.getDescriptiveName());
if (sender instanceof Player) {
ComponentBuilder message = new ComponentBuilder(this.subCommand.toString());
message.color(ChatColor.RED);
message.event(new ClickEvent(ClickEvent.Action.SUGGEST_COMMAND, strBuilder.toString()));
message.event(buildTooltip(ChatColor.YELLOW + this.subCommand.toString(),
ChatColor.GRAY + "Click to paste this command's",
ChatColor.GRAY + "format in your chat box."));
for (CommandParameter<?> arg : this.commandParameters) {
message.append(" " + arg.getDescriptiveName());
message.event(buildTooltip(arg.getDescription()));
}
((Player) sender).spigot().sendMessage(message.create());
} else {
sender.sendMessage(ChatColor.YELLOW + strBuilder.toString());
}
}
@Override
public void sendDescription(SubCommand command, CommandSender sender) {
if (!this.subCommand.equals(command)) return;
sender.sendMessage(ChatColor.AQUA + this.cmdDesc);
StringBuilder strBuilder = new StringBuilder(this.subCommand.toExecutableString());
for (CommandParameter arg : this.commandParameters) strBuilder.append(" ").append(arg.getDescriptiveName());
if (sender instanceof Player) {
ComponentBuilder message = new ComponentBuilder(" \u27A5" + this.subCommand.toString());
message.color(ChatColor.RED);
message.event(new ClickEvent(ClickEvent.Action.SUGGEST_COMMAND, strBuilder.toString()));
message.event(buildTooltip(ChatColor.YELLOW + this.subCommand.toString(),
ChatColor.GRAY + "Click to paste this command's",
ChatColor.GRAY + "format in your chat box."));
for (CommandParameter<?> arg : this.commandParameters) {
message.append(" " + arg.getDescriptiveName());
message.event(buildTooltip(arg.getDescription()));
}
((Player) sender).spigot().sendMessage(message.create());
} else {
sender.sendMessage(ChatColor.YELLOW + strBuilder.toString());
}
}
protected static HoverEvent buildTooltip(String... lines) {
return buildTooltip(Arrays.asList(lines));
}
/**
* Build the HoverEvent that would result in the display
* of lines when you hover over the component the event is
* for.
*
* @param lines the lines in the tooltip
*
* @return the constructed hover event
*/
protected static HoverEvent buildTooltip(List<String> lines) {
JsonObject item = new JsonObject();
item.addProperty("id", "minecraft:stone");
item.addProperty("Count", 1);
if (lines.isEmpty())
return new HoverEvent(HoverEvent.Action.SHOW_ITEM, new BaseComponent[]{ new TextComponent(item.toString()) });
JsonObject tag = new JsonObject();
item.add("tag", tag);
JsonObject display = new JsonObject();
tag.add("display", display);
display.addProperty("Name", ChatColor.WHITE + lines.get(0));
if (lines.size() > 1) {
JsonArray lore = new JsonArray();
for (int i = 1; i < lines.size(); i++)
lore.add(new JsonPrimitive(ChatColor.WHITE + lines.get(i)));
display.add("Lore", lore);
}
return new HoverEvent(HoverEvent.Action.SHOW_ITEM, new BaseComponent[]{ new TextComponent(item.toString()) });
}
}
| |
package test.jycessing;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import jycessing.MixedModeError;
import jycessing.PAppletJythonDriver;
import jycessing.Printer;
import jycessing.PythonSketchError;
import jycessing.Runner;
import jycessing.StreamPrinter;
import org.junit.Test;
public class JycessingTests {
private static class CapturingPrinter implements Printer {
private final ByteArrayOutputStream baos = new ByteArrayOutputStream();
private final PrintStream out = new PrintStream(baos, true);
@Override
public void print(final Object o) {
out.print(String.valueOf(o));
}
public String getText() {
try {
return new String(baos.toByteArray(), "utf-8").replaceAll("\r\n", "\n").replaceAll("\r",
"\n");
} catch (final UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
}
private static String run(final String testResource) throws Exception {
System.err.println("Running " + testResource + " test.");
final Path source = Paths.get("testing/resources/test_" + testResource + ".py");
final String sourceText = new String(Files.readAllBytes(source), "utf-8");
final TestSketch sketch = new TestSketch(source, sourceText, "test " + testResource);
final CapturingPrinter out = new CapturingPrinter();
try {
Runner.runSketchBlocking(sketch, out, new StreamPrinter(System.err));
} finally {
System.err.println(out.getText());
}
return out.getText();
}
private static void testImport(final String module) throws Exception {
final Path tmp = Files.createTempDirectory("jycessing");
final Path src = Paths.get(tmp.toString(), "test_import_" + module + ".pyde");
try {
final String testText = "import " + module + "\nprint 'OK'\nexit()";
Files.copy(new ByteArrayInputStream(testText.getBytes("utf-8")), src,
StandardCopyOption.REPLACE_EXISTING);
final CapturingPrinter out = new CapturingPrinter();
System.err.println("Running import " + module + " test.");
final TestSketch sketch = new TestSketch(src, testText, "test import " + module);
Runner.runSketchBlocking(sketch, out, new StreamPrinter(System.err));
assertEquals("OK\n", out.getText());
} finally {
Files.delete(src);
Files.delete(tmp);
}
}
private static void expectOK(final String testName) throws Exception {
assertEquals("OK\n", run(testName));
}
@Test
public void inherit_str() throws Exception {
assertEquals("cosmic\n12\n[12, 13]\n", run("inherit_str"));
}
@Test
public void static_size() throws Exception {
expectOK("static_size");
}
@Test
public void filter_builtins() throws Exception {
expectOK("filter");
}
@Test
public void set_builtins() throws Exception {
expectOK("set");
}
@Test
public void map_builtins() throws Exception {
assertEquals("50\n13\n", run("map"));
}
@Test
public void md5() throws Exception {
expectOK("md5");
}
@Test
public void urllib2() throws Exception {
testImport("urllib2");
}
@Test
public void urllib() throws Exception {
testImport("urllib");
}
@Test
public void load_in_initializer() throws Exception {
expectOK("load_in_initializer");
}
@Test
public void datetime() throws Exception {
testImport("datetime");
}
@Test
public void calendar() throws Exception {
testImport("calendar");
}
@Test
public void processing_core() throws Exception {
assertEquals("[ 1.0, 2.0, 3.0 ]\n<type 'processing.core.PFont'>\n", run("pcore"));
}
@Test
public void pvector() throws Exception {
expectOK("pvector");
}
@Test
public void pixels() throws Exception {
expectOK("pixels");
}
@Test
public void unicode() throws Exception {
expectOK("unicode");
}
@Test
public void primitives() throws Exception {
assertEquals("66.7\n", run("primitives"));
}
@Test
public void millis() throws Exception {
expectOK("millis");
}
@Test
public void imports() throws Exception {
expectOK("import");
}
@Test
public void pvector_import() throws Exception {
expectOK("pvector_in_imported_module");
}
@Test
public void exit_builtin() throws Exception {
expectOK("exit");
}
@Test
public void exit_builtin_twice() throws Exception {
expectOK("exit");
expectOK("exit");
}
@Test
public void csv() throws Exception {
// We do it twice because this exposed a critical bug in the
// re-initialization code (namely, that Py.None had different
// values on successive runs, but something was holding on to
// the old value.
testImport("csv");
testImport("csv");
}
@Test
public void hex() throws Exception {
expectOK("hex");
}
@Test
public void color() throws Exception {
expectOK("color");
}
@Test
public void loadThings() throws Exception {
expectOK("loadthings");
}
@Test
public void constrain() throws Exception {
expectOK("constrain");
}
@Test
public void detectMixedMode() throws Exception {
try {
run("mixed_mode_error");
fail("Expected mixed mode error.");
} catch (final MixedModeError expected) {
// noop
}
}
@Test
public void c_logical_and() throws Exception {
try {
run("c_logical_and");
fail("Expected syntax error.");
} catch (final PythonSketchError expected) {
assertEquals(PAppletJythonDriver.C_LIKE_LOGICAL_AND_ERROR_MESSAGE, expected.getMessage());
}
}
@Test
public void c_logical_or() throws Exception {
try {
run("c_logical_or");
fail("Expected syntax error.");
} catch (final PythonSketchError expected) {
assertEquals(PAppletJythonDriver.C_LIKE_LOGICAL_OR_ERROR_MESSAGE, expected.getMessage());
}
}
@Test
public void lerpColorStaticMode() throws Exception {
expectOK("lerp_color_static_mode");
}
@Test
public void lerpColorBeforeSetup() throws Exception {
expectOK("lerp_color_before_setup");
}
@Test
public void keyDefinedBeforeKeyEvent() throws Exception {
expectOK("key_before_key_event");
}
@Test
public void randintDomainRegression() throws Exception {
expectOK("randint_domain_regression");
}
}
| |
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.security.authentication.server;
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.security.authentication.client.AuthenticationException;
import org.apache.hadoop.security.authentication.client.KerberosAuthenticator;
import org.apache.commons.codec.binary.Base64;
import org.apache.hadoop.security.authentication.util.KerberosName;
import org.apache.hadoop.security.authentication.util.KerberosUtil;
import org.ietf.jgss.GSSException;
import org.ietf.jgss.GSSContext;
import org.ietf.jgss.GSSCredential;
import org.ietf.jgss.GSSManager;
import org.ietf.jgss.Oid;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.security.auth.Subject;
import javax.security.auth.kerberos.KerberosPrincipal;
import javax.security.auth.kerberos.KeyTab;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.IOException;
import java.security.Principal;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.Collection;
import java.util.HashSet;
import java.util.Properties;
import java.util.Set;
import java.util.regex.Pattern;
/**
* The {@link KerberosAuthenticationHandler} implements the Kerberos SPNEGO
* authentication mechanism for HTTP.
* <p>
* The supported configuration properties are:
* <ul>
* <li>kerberos.principal: the Kerberos principal to used by the server. As
* stated by the Kerberos SPNEGO specification, it should be
* <code>HTTP/${HOSTNAME}@{REALM}</code>. The realm can be omitted from the
* principal as the JDK GSS libraries will use the realm name of the configured
* default realm.
* It does not have a default value.</li>
* <li>kerberos.keytab: the keytab file containing the credentials for the
* Kerberos principal.
* It does not have a default value.</li>
* <li>kerberos.name.rules: kerberos names rules to resolve principal names, see
* {@link KerberosName#setRules(String)}</li>
* </ul>
*/
public class KerberosAuthenticationHandler implements AuthenticationHandler {
public static final Logger LOG = LoggerFactory.getLogger(
KerberosAuthenticationHandler.class);
/**
* Constant that identifies the authentication mechanism.
*/
public static final String TYPE = "kerberos";
/**
* Constant for the configuration property that indicates the kerberos
* principal.
*/
public static final String PRINCIPAL = TYPE + ".principal";
/**
* Constant for the configuration property that indicates the keytab
* file path.
*/
public static final String KEYTAB = TYPE + ".keytab";
/**
* Constant for the configuration property that indicates the Kerberos name
* rules for the Kerberos principals.
*/
public static final String NAME_RULES = TYPE + ".name.rules";
/**
* Constant for the configuration property that indicates how auth_to_local
* rules are evaluated.
*/
public static final String RULE_MECHANISM = TYPE + ".name.rules.mechanism";
/**
* Constant for the list of endpoints that skips Kerberos authentication.
*/
@VisibleForTesting
static final String ENDPOINT_WHITELIST = TYPE + ".endpoint.whitelist";
private static final Pattern ENDPOINT_PATTERN = Pattern.compile("^/[\\w]+");
private String type;
private String keytab;
private GSSManager gssManager;
private Subject serverSubject = new Subject();
private final Collection<String> whitelist = new HashSet<>();
/**
* Creates a Kerberos SPNEGO authentication handler with the default
* auth-token type, <code>kerberos</code>.
*/
public KerberosAuthenticationHandler() {
this(TYPE);
}
/**
* Creates a Kerberos SPNEGO authentication handler with a custom auth-token
* type.
*
* @param type auth-token type.
*/
public KerberosAuthenticationHandler(String type) {
this.type = type;
}
/**
* Initializes the authentication handler instance.
* <p>
* It creates a Kerberos context using the principal and keytab specified in
* the configuration.
* <p>
* This method is invoked by the {@link AuthenticationFilter#init} method.
*
* @param config configuration properties to initialize the handler.
*
* @throws ServletException thrown if the handler could not be initialized.
*/
@Override
public void init(Properties config) throws ServletException {
try {
String principal = config.getProperty(PRINCIPAL);
if (principal == null || principal.trim().length() == 0) {
throw new ServletException("Principal not defined in configuration");
}
keytab = config.getProperty(KEYTAB, keytab);
if (keytab == null || keytab.trim().length() == 0) {
throw new ServletException("Keytab not defined in configuration");
}
File keytabFile = new File(keytab);
if (!keytabFile.exists()) {
throw new ServletException("Keytab does not exist: " + keytab);
}
// use all SPNEGO principals in the keytab if a principal isn't
// specifically configured
final String[] spnegoPrincipals;
if (principal.equals("*")) {
spnegoPrincipals = KerberosUtil.getPrincipalNames(
keytab, Pattern.compile("HTTP/.*"));
if (spnegoPrincipals.length == 0) {
throw new ServletException("Principals do not exist in the keytab");
}
} else {
spnegoPrincipals = new String[]{principal};
}
KeyTab keytabInstance = KeyTab.getInstance(keytabFile);
serverSubject.getPrivateCredentials().add(keytabInstance);
for (String spnegoPrincipal : spnegoPrincipals) {
Principal krbPrincipal = new KerberosPrincipal(spnegoPrincipal);
LOG.info("Using keytab {}, for principal {}",
keytab, krbPrincipal);
serverSubject.getPrincipals().add(krbPrincipal);
}
String nameRules = config.getProperty(NAME_RULES, null);
if (nameRules != null) {
KerberosName.setRules(nameRules);
}
String ruleMechanism = config.getProperty(RULE_MECHANISM, null);
if (ruleMechanism != null) {
KerberosName.setRuleMechanism(ruleMechanism);
}
final String whitelistStr = config.getProperty(ENDPOINT_WHITELIST, null);
if (whitelistStr != null) {
final String[] strs = whitelistStr.trim().split("\\s*[,\n]\\s*");
for (String s: strs) {
if (s.isEmpty()) continue;
if (ENDPOINT_PATTERN.matcher(s).matches()) {
whitelist.add(s);
} else {
throw new ServletException(
"The element of the whitelist: " + s + " must start with '/'"
+ " and must not contain special characters afterwards");
}
}
}
try {
gssManager = Subject.doAs(serverSubject,
new PrivilegedExceptionAction<GSSManager>() {
@Override
public GSSManager run() throws Exception {
return GSSManager.getInstance();
}
});
} catch (PrivilegedActionException ex) {
throw ex.getException();
}
} catch (Exception ex) {
throw new ServletException(ex);
}
}
/**
* Releases any resources initialized by the authentication handler.
* <p>
* It destroys the Kerberos context.
*/
@Override
public void destroy() {
keytab = null;
serverSubject = null;
}
/**
* Returns the authentication type of the authentication handler, 'kerberos'.
* <p>
*
* @return the authentication type of the authentication handler, 'kerberos'.
*/
@Override
public String getType() {
return type;
}
/**
* Returns the Kerberos principals used by the authentication handler.
*
* @return the Kerberos principals used by the authentication handler.
*/
protected Set<KerberosPrincipal> getPrincipals() {
return serverSubject.getPrincipals(KerberosPrincipal.class);
}
/**
* Returns the keytab used by the authentication handler.
*
* @return the keytab used by the authentication handler.
*/
protected String getKeytab() {
return keytab;
}
/**
* This is an empty implementation, it always returns <code>TRUE</code>.
*
*
*
* @param token the authentication token if any, otherwise <code>NULL</code>.
* @param request the HTTP client request.
* @param response the HTTP client response.
*
* @return <code>TRUE</code>
* @throws IOException it is never thrown.
* @throws AuthenticationException it is never thrown.
*/
@Override
public boolean managementOperation(AuthenticationToken token,
HttpServletRequest request,
HttpServletResponse response)
throws IOException, AuthenticationException {
return true;
}
/**
* It enforces the the Kerberos SPNEGO authentication sequence returning an
* {@link AuthenticationToken} only after the Kerberos SPNEGO sequence has
* completed successfully.
*
* @param request the HTTP client request.
* @param response the HTTP client response.
*
* @return an authentication token if the Kerberos SPNEGO sequence is complete
* and valid, <code>null</code> if it is in progress (in this case the handler
* handles the response to the client).
*
* @throws IOException thrown if an IO error occurred.
* @throws AuthenticationException thrown if Kerberos SPNEGO sequence failed.
*/
@Override
public AuthenticationToken authenticate(HttpServletRequest request,
final HttpServletResponse response)
throws IOException, AuthenticationException {
// If the request servlet path is in the whitelist,
// skip Kerberos authentication and return anonymous token.
final String path = request.getServletPath();
for(final String endpoint: whitelist) {
if (endpoint.equals(path)) {
return AuthenticationToken.ANONYMOUS;
}
}
AuthenticationToken token = null;
String authorization = request.getHeader(
KerberosAuthenticator.AUTHORIZATION);
if (authorization == null
|| !authorization.startsWith(KerberosAuthenticator.NEGOTIATE)) {
response.setHeader(WWW_AUTHENTICATE, KerberosAuthenticator.NEGOTIATE);
response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
if (authorization == null) {
LOG.trace("SPNEGO starting for url: {}", request.getRequestURL());
} else {
LOG.warn("'" + KerberosAuthenticator.AUTHORIZATION +
"' does not start with '" +
KerberosAuthenticator.NEGOTIATE + "' : {}", authorization);
}
} else {
authorization = authorization.substring(
KerberosAuthenticator.NEGOTIATE.length()).trim();
final Base64 base64 = new Base64(0);
final byte[] clientToken = base64.decode(authorization);
try {
final String serverPrincipal =
KerberosUtil.getTokenServerName(clientToken);
if (!serverPrincipal.startsWith("HTTP/")) {
throw new IllegalArgumentException(
"Invalid server principal " + serverPrincipal +
"decoded from client request");
}
token = Subject.doAs(serverSubject,
new PrivilegedExceptionAction<AuthenticationToken>() {
@Override
public AuthenticationToken run() throws Exception {
return runWithPrincipal(serverPrincipal, clientToken,
base64, response);
}
});
} catch (PrivilegedActionException ex) {
if (ex.getException() instanceof IOException) {
throw (IOException) ex.getException();
} else {
throw new AuthenticationException(ex.getException());
}
} catch (Exception ex) {
throw new AuthenticationException(ex);
}
}
return token;
}
private AuthenticationToken runWithPrincipal(String serverPrincipal,
byte[] clientToken, Base64 base64, HttpServletResponse response) throws
IOException, GSSException {
GSSContext gssContext = null;
GSSCredential gssCreds = null;
AuthenticationToken token = null;
try {
LOG.trace("SPNEGO initiated with server principal [{}]", serverPrincipal);
gssCreds = this.gssManager.createCredential(
this.gssManager.createName(serverPrincipal,
KerberosUtil.NT_GSS_KRB5_PRINCIPAL_OID),
GSSCredential.INDEFINITE_LIFETIME,
new Oid[]{
KerberosUtil.GSS_SPNEGO_MECH_OID,
KerberosUtil.GSS_KRB5_MECH_OID },
GSSCredential.ACCEPT_ONLY);
gssContext = this.gssManager.createContext(gssCreds);
byte[] serverToken = gssContext.acceptSecContext(clientToken, 0,
clientToken.length);
if (serverToken != null && serverToken.length > 0) {
String authenticate = base64.encodeToString(serverToken);
response.setHeader(KerberosAuthenticator.WWW_AUTHENTICATE,
KerberosAuthenticator.NEGOTIATE + " " +
authenticate);
}
if (!gssContext.isEstablished()) {
response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
LOG.trace("SPNEGO in progress");
} else {
String clientPrincipal = gssContext.getSrcName().toString();
KerberosName kerberosName = new KerberosName(clientPrincipal);
String userName = kerberosName.getShortName();
token = new AuthenticationToken(userName, clientPrincipal, getType());
response.setStatus(HttpServletResponse.SC_OK);
LOG.trace("SPNEGO completed for client principal [{}]",
clientPrincipal);
}
} finally {
if (gssContext != null) {
gssContext.dispose();
}
if (gssCreds != null) {
gssCreds.dispose();
}
}
return token;
}
}
| |
/* -*- mode: java; c-basic-offset: 8; indent-tabs-mode: t; tab-width: 8 -*- */
package vib;
import ij.ImagePlus;
import ij.measure.Calibration;
import java.util.StringTokenizer;
import java.util.Vector;
import math3d.FastMatrixN;
import math3d.JacobiDouble;
import math3d.Point3d;
import math3d.Triangle;
public class FastMatrix {
public double x, y, z;
protected double a00, a01, a02, a03,
a10, a11, a12, a13,
a20, a21, a22, a23;
public FastMatrix() { }
public FastMatrix(double f) { a00 = a11 = a22 = f; }
public FastMatrix(double[][] m) {
if ((m.length != 3 && m.length != 4)
|| m[0].length != 4)
throw new RuntimeException("Wrong dimensions: "
+ m.length + "x"
+ m[0].length);
a00 = (double)m[0][0];
a01 = (double)m[0][1];
a02 = (double)m[0][2];
a03 = (double)m[0][3];
a10 = (double)m[1][0];
a11 = (double)m[1][1];
a12 = (double)m[1][2];
a13 = (double)m[1][3];
a20 = (double)m[2][0];
a21 = (double)m[2][1];
a22 = (double)m[2][2];
a23 = (double)m[2][3];
}
public FastMatrix(FastMatrix f) {
x = y = z = 0;
a00 = f.a00;
a01 = f.a01;
a02 = f.a02;
a03 = f.a03;
a10 = f.a10;
a11 = f.a11;
a12 = f.a12;
a13 = f.a13;
a20 = f.a20;
a21 = f.a21;
a22 = f.a22;
a23 = f.a23;
}
public FastMatrix copyFrom(FloatMatrix f) {
x = y = z = 0;
a00 = f.a00;
a01 = f.a01;
a02 = f.a02;
a03 = f.a03;
a10 = f.a10;
a11 = f.a11;
a12 = f.a12;
a13 = f.a13;
a20 = f.a20;
a21 = f.a21;
a22 = f.a22;
a23 = f.a23;
return this;
}
public boolean isJustTranslation() {
FastMatrix toTest = new FastMatrix(this);
toTest.a03 -= a03;
toTest.a13 -= a13;
toTest.a23 -= a23;
return toTest.isIdentity();
}
public boolean noTranslation() {
double eps = (double)1e-10;
return ((double)Math.abs(a03) < eps)
&& ((double)Math.abs(a13) < eps)
&& ((double)Math.abs(a23) < eps);
}
public FastMatrix composeWith(FastMatrix followedBy) {
// Alias this and followedBy to A and B, with entries a_ij...
FastMatrix A = this;
FastMatrix B = followedBy;
FastMatrix result = new FastMatrix();
result.a00 = (A.a00 * B.a00) + (A.a10 * B.a01) + (A.a20 * B.a02);
result.a10 = (A.a00 * B.a10) + (A.a10 * B.a11) + (A.a20 * B.a12);
result.a20 = (A.a00 * B.a20) + (A.a10 * B.a21) + (A.a20 * B.a22);
result.a01 = (A.a01 * B.a00) + (A.a11 * B.a01) + (A.a21 * B.a02);
result.a11 = (A.a01 * B.a10) + (A.a11 * B.a11) + (A.a21 * B.a12);
result.a21 = (A.a01 * B.a20) + (A.a11 * B.a21) + (A.a21 * B.a22);
result.a02 = (A.a02 * B.a00) + (A.a12 * B.a01) + (A.a22 * B.a02);
result.a12 = (A.a02 * B.a10) + (A.a12 * B.a11) + (A.a22 * B.a12);
result.a22 = (A.a02 * B.a20) + (A.a12 * B.a21) + (A.a22 * B.a22);
result.a03 = (A.a03 * B.a00) + (A.a13 * B.a01) + (A.a23 * B.a02) + B.a03;
result.a13 = (A.a03 * B.a10) + (A.a13 * B.a11) + (A.a23 * B.a12) + B.a13;
result.a23 = (A.a03 * B.a20) + (A.a13 * B.a21) + (A.a23 * B.a22) + B.a23;
return result;
}
/* This decomposes the transformation into the 3x3 part (the first
FastMatrix in the returned array) and the translation (the
second FastMatrix). (So, applying these two in the order
returned in the array should be the same as applying the
original.)
*/
public FastMatrix[] decompose() {
FastMatrix[] result = new FastMatrix[2];
result[0].a00 = a00;
result[0].a01 = a01;
result[0].a02 = a02;
result[0].a10 = a10;
result[0].a11 = a11;
result[0].a12 = a12;
result[0].a20 = a20;
result[0].a21 = a21;
result[0].a22 = a22;
result[1].a03 = a03;
result[1].a13 = a13;
result[1].a23 = a23;
return result;
}
public FastMatrix plus(FastMatrix other) {
FastMatrix result = new FastMatrix();
result.a00 = other.a00 + this.a00;
result.a01 = other.a01 + this.a01;
result.a02 = other.a02 + this.a02;
result.a03 = other.a03 + this.a03;
result.a10 = other.a10 + this.a10;
result.a11 = other.a11 + this.a11;
result.a12 = other.a12 + this.a12;
result.a13 = other.a13 + this.a13;
result.a20 = other.a20 + this.a20;
result.a21 = other.a21 + this.a21;
result.a22 = other.a22 + this.a22;
result.a23 = other.a23 + this.a23;
return result;
}
/*
public FastMatrix(Jama.Matrix m) {
if ((m.getRowDimension() != 3 && m.getRowDimension() != 4)
|| m.getColumnDimension() != 4)
throw new RuntimeException("Wrong dimensions: "
+ m.getRowDimension() + "x"
+ m.getColumnDimension());
a00 = (double)m.get(0,0);
a01 = (double)m.get(0,1);
a02 = (double)m.get(0,2);
a03 = (double)m.get(0,3);
a10 = (double)m.get(1,0);
a11 = (double)m.get(1,1);
a12 = (double)m.get(1,2);
a13 = (double)m.get(1,3);
a20 = (double)m.get(2,0);
a21 = (double)m.get(2,1);
a22 = (double)m.get(2,2);
a23 = (double)m.get(2,3);
}
*/
public void apply(double x, double y, double z) {
this.x = x * a00 + y * a01 + z * a02 + a03;
this.y = x * a10 + y * a11 + z * a12 + a13;
this.z = x * a20 + y * a21 + z * a22 + a23;
}
public void apply(Point3d p) {
this.x = (double)(p.x * a00 + p.y * a01 + p.z * a02 + a03);
this.y = (double)(p.x * a10 + p.y * a11 + p.z * a12 + a13);
this.z = (double)(p.x * a20 + p.y * a21 + p.z * a22 + a23);
}
public void apply(double[] p) {
this.x = (double)(p[0] * a00 + p[1] * a01 + p[2] * a02 + a03);
this.y = (double)(p[0] * a10 + p[1] * a11 + p[2] * a12 + a13);
this.z = (double)(p[0] * a20 + p[1] * a21 + p[2] * a22 + a23);
}
public void applyWithoutTranslation(double x, double y, double z) {
this.x = x * a00 + y * a01 + z * a02;
this.y = x * a10 + y * a11 + z * a12;
this.z = x * a20 + y * a21 + z * a22;
}
public void applyWithoutTranslation(Point3d p) {
this.x = (double)(p.x * a00 + p.y * a01 + p.z * a02);
this.y = (double)(p.x * a10 + p.y * a11 + p.z * a12);
this.z = (double)(p.x * a20 + p.y * a21 + p.z * a22);
}
public Point3d getResult() {
return new Point3d(x, y, z);
}
public FastMatrix scale(double x, double y, double z) {
FastMatrix result = new FastMatrix();
result.a00 = a00 * x;
result.a01 = a01 * x;
result.a02 = a02 * x;
result.a03 = a03 * x;
result.a10 = a10 * y;
result.a11 = a11 * y;
result.a12 = a12 * y;
result.a13 = a13 * y;
result.a20 = a20 * z;
result.a21 = a21 * z;
result.a22 = a22 * z;
result.a23 = a23 * z;
return result;
}
public FastMatrix times(FastMatrix o) {
FastMatrix result = new FastMatrix();
result.a00 = o.a00 * a00 + o.a10 * a01 + o.a20 * a02;
result.a10 = o.a00 * a10 + o.a10 * a11 + o.a20 * a12;
result.a20 = o.a00 * a20 + o.a10 * a21 + o.a20 * a22;
result.a01 = o.a01 * a00 + o.a11 * a01 + o.a21 * a02;
result.a11 = o.a01 * a10 + o.a11 * a11 + o.a21 * a12;
result.a21 = o.a01 * a20 + o.a11 * a21 + o.a21 * a22;
result.a02 = o.a02 * a00 + o.a12 * a01 + o.a22 * a02;
result.a12 = o.a02 * a10 + o.a12 * a11 + o.a22 * a12;
result.a22 = o.a02 * a20 + o.a12 * a21 + o.a22 * a22;
apply(o.a03, o.a13, o.a23);
result.a03 = x;
result.a13 = y;
result.a23 = z;
return result;
}
public double det( ) {
double sub00 = a11 * a22 - a12 * a21;
double sub01 = a10 * a22 - a12 * a20;
double sub02 = a10 * a21 - a11 * a20;
double sub10 = a01 * a22 - a02 * a21;
double sub11 = a00 * a22 - a02 * a20;
double sub12 = a00 * a21 - a01 * a20;
double sub20 = a01 * a12 - a02 * a11;
double sub21 = a00 * a12 - a02 * a10;
double sub22 = a00 * a11 - a01 * a10;
return a00 * sub00 - a01 * sub01 + a02 * sub02;
}
/* this inverts just the first 3 columns, interpreted as 3x3 matrix */
private FastMatrix invert3x3() {
double sub00 = a11 * a22 - a12 * a21;
double sub01 = a10 * a22 - a12 * a20;
double sub02 = a10 * a21 - a11 * a20;
double sub10 = a01 * a22 - a02 * a21;
double sub11 = a00 * a22 - a02 * a20;
double sub12 = a00 * a21 - a01 * a20;
double sub20 = a01 * a12 - a02 * a11;
double sub21 = a00 * a12 - a02 * a10;
double sub22 = a00 * a11 - a01 * a10;
double det = a00 * sub00 - a01 * sub01 + a02 * sub02;
FastMatrix result = new FastMatrix();
result.a00 = sub00 / det;
result.a01 = -sub10 / det;
result.a02 = sub20 / det;
result.a10 = -sub01 / det;
result.a11 = sub11 / det;
result.a12 = -sub21 / det;
result.a20 = sub02 / det;
result.a21 = -sub12 / det;
result.a22 = sub22 / det;
return result;
}
public FastMatrix inverse() {
FastMatrix result = invert3x3();
result.apply(-a03, -a13, -a23);
result.a03 = result.x;
result.a13 = result.y;
result.a23 = result.z;
return result;
}
public static FastMatrix rotate(double angle, int axis) {
FastMatrix result = new FastMatrix();
double c = (double)Math.cos(angle);
double s = (double)Math.sin(angle);
switch(axis) {
case 0:
result.a11 = result.a22 = c;
result.a12 = -(result.a21 = s);
result.a00 = (double)1.0;
break;
case 1:
result.a00 = result.a22 = c;
result.a02 = -(result.a20 = s);
result.a11 = (double)1.0;
break;
case 2:
result.a00 = result.a11 = c;
result.a01 = -(result.a10 = s);
result.a22 = (double)1.0;
break;
default:
throw new RuntimeException("Illegal axis: "+axis);
}
return result;
}
/*
* This rotates in a "geodesic" fashion: if you imagine an equator
* through a and b, the resulting rotation will have the poles as
* invariants.
*/
public static FastMatrix rotateFromTo(double aX, double aY, double aZ,
double bX, double bY, double bZ) {
double l = (double)Math.sqrt(aX * aX + aY * aY + aZ * aZ);
aX /= l; aY /= l; aZ /= l;
l = (double)Math.sqrt(bX * bX + bY * bY + bZ * bZ);
bX /= l; bY /= l; bZ /= l;
double cX, cY, cZ;
cX = aY * bZ - aZ * bY;
cY = aZ * bX - aX * bZ;
cZ = aX * bY - aY * bX;
double pX, pY, pZ;
pX = cY * aZ - cZ * aY;
pY = cZ * aX - cX * aZ;
pZ = cX * aY - cY * aX;
double qX, qY, qZ;
qX = cY * bZ - cZ * bY;
qY = cZ * bX - cX * bZ;
qZ = cX * bY - cY * bX;
FastMatrix result = new FastMatrix();
result.a00 = aX; result.a01 = aY; result.a02 = aZ;
result.a10 = cX; result.a11 = cY; result.a12 = cZ;
result.a20 = pX; result.a21 = pY; result.a22 = pZ;
FastMatrix transp = new FastMatrix();
transp.a00 = bX; transp.a01 = cX; transp.a02 = qX;
transp.a10 = bY; transp.a11 = cY; transp.a12 = qY;
transp.a20 = bZ; transp.a21 = cZ; transp.a22 = qZ;
return transp.times(result);
}
// ------------------------------------------------------------------------
// FIXME: This probably isn't the best place for these static functions...
public static double dotProduct(double[] a, double[] b) {
double result = 0;
if (a.length != b.length)
throw new IllegalArgumentException(
"In dotProduct, the vectors must be of the same length.");
if (a.length < 1)
throw new IllegalArgumentException(
"Can't dotProduct vectors of zero length.");
for (int i = 0; i < a.length; ++i)
result += (a[i] * b[i]);
return result;
}
public static double sizeSquared(double[] a) {
return dotProduct(a,a);
}
public static double size(double[] a) {
return (double)Math.sqrt(dotProduct(a,a));
}
public static double angleBetween(double[] v1, double[] v2) {
return (double)Math.acos(dotProduct(v1,v2)/(size(v1)*size(v2)));
}
public static double[] crossProduct(double[] a, double[] b) {
double[] result = { a[1] * b[2] - a[2] * b[1],
a[2] * b[0] - a[0] * b[2],
a[0] * b[1] - a[1] * b[0] };
return result;
}
public static double[] normalize( double[] a ) {
double magnitude = size(a);
double[] result = new double[a.length];
for (int i = 0; i < a.length; ++i)
result[i] = a[i]/magnitude;
return result;
}
/* Find a first rotation to map v2_domain to v2_template.
Then project the v1s down onto the plane defined by
v2_template, and find the rotation about v2_domain that
lines up the projected V1s. */
/* FIXME: with the PCA results, then v2 and v1 are always
going to be orthogonal anyway, so in fact the code that
projects onto the plane hasn't been sensibly tested (and is
useless for the moment.). */
public static FastMatrix rotateToAlignVectors(double[] v2_template,
double[] v1_template,
double[] v2_domain,
double[] v1_domain ) {
double angleBetween = angleBetween(v2_domain,
v2_template);
double[] normal = crossProduct(v2_domain,
v2_template);
double[] normalUnit = normalize(normal);
FastMatrix rotation = FastMatrix.rotateAround(normalUnit[0],
normalUnit[1],
normalUnit[2],
angleBetween);
/*
If v2 is the vector with the largest eigenvalue and v1 is
that with the second largest, then the projection of v1
onto the plane defined by v2 as a normal is:
v1 - ( (v1.v2) / |v2||v2| ) v2
*/
double scale_v2_domain = dotProduct(v1_domain,v2_domain)
/ sizeSquared(v2_domain);
double[] v1_orthogonal_domain = new double[3];
v1_orthogonal_domain[0] = v1_domain[0] - scale_v2_domain * v2_domain[0];
v1_orthogonal_domain[1] = v1_domain[1] - scale_v2_domain * v2_domain[1];
v1_orthogonal_domain[2] = v1_domain[2] - scale_v2_domain * v2_domain[2];
// Now for the template as well:
double scale_v2_template = dotProduct(v1_template,v2_template)
/ sizeSquared(v2_template);
double [] v1_orthogonal_template = new double[3];
v1_orthogonal_template[0] = v1_template[0] - scale_v2_template * v2_template[0];
v1_orthogonal_template[1] = v1_template[1] - scale_v2_template * v2_template[1];
v1_orthogonal_template[2] = v1_template[2] - scale_v2_template * v2_template[2];
// Now we should rotate the one in the domain by the same
// rotation as we applied to the most significant eigenvector...
rotation.apply(v1_orthogonal_domain[0],
v1_orthogonal_domain[1],
v1_orthogonal_domain[2]);
double[] v1_orthogonal_domain_rotated = new double[3];
v1_orthogonal_domain_rotated[0] = rotation.x;
v1_orthogonal_domain_rotated[1] = rotation.y;
v1_orthogonal_domain_rotated[2] = rotation.z;
// Now we need to find the rotation around v2 in the template
// that will line up the projected v1s...
double angleBetweenV1sA = angleBetween(v1_orthogonal_domain_rotated,
v1_orthogonal_template );
double[] normalToV1sA = crossProduct(v1_orthogonal_domain_rotated,
v1_orthogonal_template );
double[] normalToV1sAUnit = normalize(normalToV1sA);
FastMatrix secondRotationA = FastMatrix.rotateAround(normalToV1sAUnit[0],
normalToV1sAUnit[1],
normalToV1sAUnit[2],
angleBetweenV1sA);
return rotation.composeWith(secondRotationA);
}
public static FastMatrix rotateAround(double nx, double ny, double nz,
double angle) {
FastMatrix r = new FastMatrix();
double c = (double)Math.cos(angle), s = (double)Math.sin(angle);
r.a00 = -(c-1)*nx*nx + c;
r.a01 = -(c-1)*nx*ny - s*nz;
r.a02 = -(c-1)*nx*nz + s*ny;
r.a03 = 0;
r.a10 = -(c-1)*nx*ny + s*nz;
r.a11 = -(c-1)*ny*ny + c;
r.a12 = -(c-1)*ny*nz - s*nx;
r.a13 = 0;
r.a20 = -(c-1)*nx*nz - s*ny;
r.a21 = -(c-1)*ny*nz + s*nx;
r.a22 = -(c-1)*nz*nz + c;
r.a23 = 0;
return r;
}
/*
* Euler rotation means to rotate around the z axis first, then
* around the rotated x axis, and then around the (twice) rotated
* z axis.
*/
public static FastMatrix rotateEuler(double a1, double a2, double a3) {
FastMatrix r = new FastMatrix();
double c1 = (double)Math.cos(a1), s1 = (double)Math.sin(a1);
double c2 = (double)Math.cos(a2), s2 = (double)Math.sin(a2);
double c3 = (double)Math.cos(a3), s3 = (double)Math.sin(a3);
r.a00 = c3*c1-c2*s1*s3;
r.a01 = -s3*c1-c2*s1*c3;
r.a02 = s2*s1;
r.a03 = 0;
r.a10 = c3*s1+c2*c1*s3;
r.a11 = -s3*s1+c2*c1*c3;
r.a12 = -s2*c1;
r.a13 = 0;
r.a20 = s2*s3;
r.a21 = s2*c3;
r.a22 = c2;
r.a23 = 0;
return r;
}
/*
* same as rotateEuler, but with a center different from the origin
*/
public static FastMatrix rotateEulerAt(double a1, double a2, double a3,
double cx, double cy, double cz) {
FastMatrix r = new FastMatrix();
double c1 = (double)Math.cos(a1), s1 = (double)Math.sin(a1);
double c2 = (double)Math.cos(a2), s2 = (double)Math.sin(a2);
double c3 = (double)Math.cos(a3), s3 = (double)Math.sin(a3);
r.a00 = c3*c1-c2*s1*s3;
r.a01 = -s3*c1-c2*s1*c3;
r.a02 = s2*s1;
r.a03 = 0;
r.a10 = c3*s1+c2*c1*s3;
r.a11 = -s3*s1+c2*c1*c3;
r.a12 = -s2*c1;
r.a13 = 0;
r.a20 = s2*s3;
r.a21 = s2*c3;
r.a22 = c2;
r.a23 = 0;
r.apply(cx, cy, cz);
r.a03 = cx - r.x;
r.a13 = cy - r.y;
r.a23 = cz - r.z;
return r;
}
/*
* Calculate the parameters needed to generate this matrix by
* rotateEulerAt()
*/
public void guessEulerParameters(double[] parameters) {
if (parameters.length != 6)
throw new IllegalArgumentException(
"Need 6 parameters, got "
+ parameters.length);
guessEulerParameters(parameters, null);
}
public void guessEulerParameters(double[] parameters, Point3d center) {
if (center != null && parameters.length != 9)
throw new IllegalArgumentException(
"Need 9 parameters, got "
+ parameters.length);
if (a21 == 0.0 && a20 == 0.0) {
/*
* s2 == 0, therefore a2 == 0, therefore a1 and a3
* are not determined (they are both rotations around
* the z axis. Choose a3 = 0.
*/
parameters[2] = 0;
parameters[1] = 0;
parameters[0] = (double)Math.atan2(a10, a00);
} else {
parameters[2] = (double)Math.atan2(a20, a21);
parameters[1] = (double)Math.atan2(
Math.sqrt(a21 * a21 + a20 * a20), a22);
parameters[0] = (double)Math.atan2(a02, -a12);
}
/*
* If a center of rotation was given, the parameters will
* contain:
* (angleZ, angleX, angleZ2, transX, transY, transZ,
* centerX, centerY, centerZ) where trans is the translation
* _after_ the rotation around center.
*/
if (center != null) {
parameters[6] = (double)center.x;
parameters[7] = (double)center.y;
parameters[8] = (double)center.z;
apply(center);
parameters[3] = x - (double)center.x;
parameters[4] = y - (double)center.y;
parameters[5] = z - (double)center.z;
return;
}
/*
* The center (if none was specified) is ambiguous along
* the rotation axis.
* To find a center, we rotate the origin twice, and
* calculate the circumcenter of the resulting triangle.
* This also happens to be the point on the axis which
* is closest to the origin.
*/
if (a03 == 0.0 && a13 == 0.0 && a23 == 0.0) {
parameters[3] = parameters[4] = parameters[5] = 0;
} else {
apply(a03, a13, a23);
Triangle t = new Triangle(
new Point3d(0, 0, 0),
new Point3d(a03, a13, a23),
new Point3d(x, y, z));
t.calculateCircumcenter2();
parameters[3] = (double)t.center.x;
parameters[4] = (double)t.center.y;
parameters[5] = (double)t.center.z;
}
}
public static FastMatrix translate(double x, double y, double z) {
FastMatrix result = new FastMatrix();
result.a00 = result.a11 = result.a22 = (double)1.0;
result.a03 = x;
result.a13 = y;
result.a23 = z;
return result;
}
/*
* least squares fitting of a linear transformation which maps
* the points x[i] to y[i] as best as possible.
*/
public static FastMatrix bestLinear(Point3d[] x, Point3d[] y) {
if (x.length != y.length)
throw new RuntimeException("different lengths");
if (x.length != 4 )
throw new RuntimeException("The arrays passed to bestLinear must be of length 4");
double[][] a = new double[4][4];
double[][] b = new double[4][4];
for (int i = 0; i < a.length; i++) {
a[0][0] += (double)(x[i].x * x[i].x);
a[0][1] += (double)(x[i].x * x[i].y);
a[0][2] += (double)(x[i].x * x[i].z);
a[0][3] += (double)(x[i].x);
a[1][1] += (double)(x[i].y * x[i].y);
a[1][2] += (double)(x[i].y * x[i].z);
a[1][3] += (double)(x[i].y);
a[2][2] += (double)(x[i].z * x[i].z);
a[2][3] += (double)(x[i].z);
b[0][0] += (double)(x[i].x * y[i].x);
b[0][1] += (double)(x[i].y * y[i].x);
b[0][2] += (double)(x[i].z * y[i].x);
b[0][3] += (double)(y[i].x);
b[1][0] += (double)(x[i].x * y[i].y);
b[1][1] += (double)(x[i].y * y[i].y);
b[1][2] += (double)(x[i].z * y[i].y);
b[1][3] += (double)(y[i].y);
b[2][0] += (double)(x[i].x * y[i].z);
b[2][1] += (double)(x[i].y * y[i].z);
b[2][2] += (double)(x[i].z * y[i].z);
b[2][3] += (double)(y[i].z);
}
a[1][0] = a[0][1];
a[2][0] = a[0][2];
a[2][1] = a[1][2];
a[3][0] = a[0][3];
a[3][1] = a[1][3];
a[3][2] = a[2][3];
a[3][3] = 1;
FastMatrixN.invert(a);
double[][] r = FastMatrixN.times(b, a);
FastMatrix result = new FastMatrix();
result.a00 = r[0][0];
result.a01 = r[0][1];
result.a02 = r[0][2];
result.a03 = r[0][3];
result.a10 = r[1][0];
result.a11 = r[1][1];
result.a12 = r[1][2];
result.a13 = r[1][3];
result.a20 = r[2][0];
result.a21 = r[2][1];
result.a22 = r[2][2];
result.a23 = r[2][3];
return result;
}
/**
* Find the best rigid transformation from set1 to set2.
* This function uses the method by Horn, using quaternions:
* Closed-form solution of absolute orientation using unit quaternions,
* Horn, B. K. P., Journal of the Optical Society of America A,
* Vol. 4, page 629, April 1987
*/
public static FastMatrix bestRigid(Point3d[] set1, Point3d[] set2) {
return bestRigid(set1, set2, true);
}
public static FastMatrix bestRigid(Point3d[] set1, Point3d[] set2,
boolean allowScaling) {
if (set1.length != set2.length)
throw new RuntimeException("different lengths");
double c1x, c1y, c1z, c2x, c2y, c2z;
c1x = c1y = c1z = c2x = c2y = c2z = 0;
for (int i = 0; i < set1.length; i++) {
c1x += (double)set1[i].x;
c1y += (double)set1[i].y;
c1z += (double)set1[i].z;
c2x += (double)set2[i].x;
c2y += (double)set2[i].y;
c2z += (double)set2[i].z;
}
c1x /= set1.length;
c1y /= set1.length;
c1z /= set1.length;
c2x /= set1.length;
c2y /= set1.length;
c2z /= set1.length;
double s = 1;
if (allowScaling) {
double r1, r2;
r1 = r2 = 0;
for (int i = 0; i < set1.length; i++) {
double x1 = (double)set1[i].x - c1x;
double y1 = (double)set1[i].y - c1y;
double z1 = (double)set1[i].z - c1z;
double x2 = (double)set2[i].x - c2x;
double y2 = (double)set2[i].y - c2y;
double z2 = (double)set2[i].z - c2z;
r1 += x1 * x1 + y1 * y1 + z1 * z1;
r2 += x2 * x2 + y2 * y2 + z2 * z2;
}
s = (double)Math.sqrt(r2 / r1);
}
// calculate N
double Sxx, Sxy, Sxz, Syx, Syy, Syz, Szx, Szy, Szz;
Sxx = Sxy = Sxz = Syx = Syy = Syz = Szx = Szy = Szz = 0;
for (int i = 0; i < set1.length; i++) {
double x1 = ((double)set1[i].x - c1x) * s;
double y1 = ((double)set1[i].y - c1y) * s;
double z1 = ((double)set1[i].z - c1z) * s;
double x2 = (double)set2[i].x - c2x;
double y2 = (double)set2[i].y - c2y;
double z2 = (double)set2[i].z - c2z;
Sxx += x1 * x2;
Sxy += x1 * y2;
Sxz += x1 * z2;
Syx += y1 * x2;
Syy += y1 * y2;
Syz += y1 * z2;
Szx += z1 * x2;
Szy += z1 * y2;
Szz += z1 * z2;
}
double[][] N = new double[4][4];
N[0][0] = Sxx + Syy + Szz;
N[0][1] = Syz - Szy;
N[0][2] = Szx - Sxz;
N[0][3] = Sxy - Syx;
N[1][0] = Syz - Szy;
N[1][1] = Sxx - Syy - Szz;
N[1][2] = Sxy + Syx;
N[1][3] = Szx + Sxz;
N[2][0] = Szx - Sxz;
N[2][1] = Sxy + Syx;
N[2][2] = -Sxx + Syy - Szz;
N[2][3] = Syz + Szy;
N[3][0] = Sxy - Syx;
N[3][1] = Szx + Sxz;
N[3][2] = Syz + Szy;
N[3][3] = -Sxx - Syy + Szz;
// calculate eigenvector with maximal eigenvalue
JacobiDouble jacobi = new JacobiDouble(N);
double[][] eigenvectors = jacobi.getEigenVectors();
double[] eigenvalues = jacobi.getEigenValues();
int index = 0;
for (int i = 1; i < 4; i++)
if (eigenvalues[i] > eigenvalues[index])
index = i;
double[] q = eigenvectors[index];
double q0 = q[0], qx = q[1], qy = q[2], qz = q[3];
// turn into matrix
FastMatrix result = new FastMatrix();
// rotational part
result.a00 = s * (q0 * q0 + qx * qx - qy * qy - qz * qz);
result.a01 = s * 2 * (qx * qy - q0 * qz);
result.a02 = s * 2 * (qx * qz + q0 * qy);
result.a10 = s * 2 * (qy * qx + q0 * qz);
result.a11 = s * (q0 * q0 - qx * qx + qy * qy - qz * qz);
result.a12 = s * 2 * (qy * qz - q0 * qx);
result.a20 = s * 2 * (qz * qx - q0 * qy);
result.a21 = s * 2 * (qz * qy + q0 * qx);
result.a22 = s * (q0 * q0 - qx * qx - qy * qy + qz * qz);
// translational part
result.apply(c1x, c1y, c1z);
result.a03 = c2x - result.x;
result.a13 = c2y - result.y;
result.a23 = c2z - result.z;
return result;
}
public static FastMatrix average(FastMatrix[] array) {
FastMatrix result = new FastMatrix();
int n = 0;
for (int i = 0; i < array.length; i++)
if (array[i] != null) {
n++;
result.a00 += array[i].a00;
result.a01 += array[i].a01;
result.a02 += array[i].a02;
result.a03 += array[i].a03;
result.a10 += array[i].a10;
result.a11 += array[i].a11;
result.a12 += array[i].a12;
result.a13 += array[i].a13;
result.a20 += array[i].a20;
result.a21 += array[i].a21;
result.a22 += array[i].a22;
result.a23 += array[i].a23;
}
if (n > 0) {
result.a00 /= (double)n;
result.a01 /= (double)n;
result.a02 /= (double)n;
result.a03 /= (double)n;
result.a10 /= (double)n;
result.a11 /= (double)n;
result.a12 /= (double)n;
result.a13 /= (double)n;
result.a20 /= (double)n;
result.a21 /= (double)n;
result.a22 /= (double)n;
result.a23 /= (double)n;
}
return result;
}
public double[] rowwise16() {
return new double[] {
a00, a01, a02, a03,
a10, a11, a12, a13,
a20, a21, a22, a23,
0, 0, 0, 1};
}
/*
* parses both uniform 4x4 matrices (column by column), and
* 3x4 matrices (row by row).
*/
public static FastMatrix parseMatrix(String m) {
FastMatrix matrix = new FastMatrix();
StringTokenizer tokenizer = new StringTokenizer(m);
try {
/*
* Amira notates a uniform matrix in 4x4 notation,
* column by column.
* Common notation is to notate 3x4 notation, row by
* row, since the last row does not bear any
* information (but is always "0 0 0 1").
*/
boolean is4x4Columns = true;
matrix.a00 = (double)Double.parseDouble(tokenizer.nextToken());
matrix.a10 = (double)Double.parseDouble(tokenizer.nextToken());
matrix.a20 = (double)Double.parseDouble(tokenizer.nextToken());
double dummy = (double)Double.parseDouble(tokenizer.nextToken());
if (dummy != 0.0) {
is4x4Columns = false;
matrix.a03 = dummy;
}
matrix.a01 = (double)Double.parseDouble(tokenizer.nextToken());
matrix.a11 = (double)Double.parseDouble(tokenizer.nextToken());
matrix.a21 = (double)Double.parseDouble(tokenizer.nextToken());
dummy = (double)Double.parseDouble(tokenizer.nextToken());
if (is4x4Columns && dummy != 0.0)
is4x4Columns = false;
if (!is4x4Columns)
matrix.a13 = dummy;
matrix.a02 = (double)Double.parseDouble(tokenizer.nextToken());
matrix.a12 = (double)Double.parseDouble(tokenizer.nextToken());
matrix.a22 = (double)Double.parseDouble(tokenizer.nextToken());
dummy = (double)Double.parseDouble(tokenizer.nextToken());
if (is4x4Columns && dummy != 0.0)
is4x4Columns = false;
if (!is4x4Columns)
matrix.a23 = dummy;
if (is4x4Columns) {
if (!tokenizer.hasMoreTokens())
is4x4Columns = false;
} else if (tokenizer.hasMoreTokens())
throw new RuntimeException("Not a uniform matrix: "+m);
if (is4x4Columns) {
matrix.a03 = (double)Double.parseDouble(tokenizer.nextToken());
matrix.a13 = (double)Double.parseDouble(tokenizer.nextToken());
matrix.a23 = (double)Double.parseDouble(tokenizer.nextToken());
if (Double.parseDouble(tokenizer.nextToken()) != 1.0)
throw new RuntimeException("Not a uniform matrix: "+m);
} else {
// swap rotation part
dummy = matrix.a01; matrix.a01 = matrix.a10; matrix.a10 = dummy;
dummy = matrix.a02; matrix.a02 = matrix.a20; matrix.a20 = dummy;
dummy = matrix.a12; matrix.a12 = matrix.a21; matrix.a21 = dummy;
}
} catch(Exception e) {
throw new RuntimeException(e);
}
return matrix;
}
public static FastMatrix[] parseMatrices(String m) {
Vector vector = new Vector();
StringTokenizer tokenizer = new StringTokenizer(m, ",");
while (tokenizer.hasMoreTokens()) {
String matrix = tokenizer.nextToken().trim();
if (matrix.equals(""))
vector.add(null);
else
vector.add(parseMatrix(matrix));
}
FastMatrix[] result = new FastMatrix[vector.size()];
for (int i = 0; i < result.length; i++)
result[i] = (FastMatrix)vector.get(i);
return result;
}
public static FastMatrix fromCalibration(ImagePlus image) {
Calibration calib = image.getCalibration();
FastMatrix result = new FastMatrix();
result.a00 = (double)Math.abs(calib.pixelWidth);
result.a11 = (double)Math.abs(calib.pixelHeight);
result.a22 = (double)Math.abs(calib.pixelDepth);
result.a03 = (double)calib.xOrigin;
result.a13 = (double)calib.yOrigin;
result.a23 = (double)calib.zOrigin;
return result;
}
//
public static FastMatrix translateToCenter(ImagePlus image) {
Calibration calib = image.getCalibration();
FastMatrix result = new FastMatrix();
result.a00 = (double)1;
result.a11 = (double)1;
result.a22 = (double)1;
result.a03 = (double)(calib.xOrigin + calib.pixelWidth * image.getWidth() / 2.0);
result.a13 = (double)(calib.yOrigin + calib.pixelHeight * image.getHeight() / 2.0);
result.a23 = (double)(calib.yOrigin + calib.pixelDepth * image.getStack().getSize() / 2.0);
return result;
}
final public boolean isIdentity() {
return isIdentity((double)1e-10);
}
final public boolean equals( FastMatrix other ) {
double eps = (double)1e-10;
return eps > (double)Math.abs( a00 - other.a00 ) &&
eps > (double)Math.abs( a01 - other.a01 ) &&
eps > (double)Math.abs( a02 - other.a02 ) &&
eps > (double)Math.abs( a03 - other.a03 ) &&
eps > (double)Math.abs( a10 - other.a10 ) &&
eps > (double)Math.abs( a11 - other.a11 ) &&
eps > (double)Math.abs( a12 - other.a12 ) &&
eps > (double)Math.abs( a13 - other.a13 ) &&
eps > (double)Math.abs( a20 - other.a20 ) &&
eps > (double)Math.abs( a21 - other.a21 ) &&
eps > (double)Math.abs( a22 - other.a22 ) &&
eps > (double)Math.abs( a23 - other.a23 );
}
final public boolean isIdentity(double eps) {
return eps > (double)Math.abs(a00 - 1) &&
eps > (double)Math.abs(a11 - 1) &&
eps > (double)Math.abs(a22 - 1) &&
eps > (double)Math.abs(a01) &&
eps > (double)Math.abs(a02) &&
eps > (double)Math.abs(a03) &&
eps > (double)Math.abs(a10) &&
eps > (double)Math.abs(a12) &&
eps > (double)Math.abs(a13) &&
eps > (double)Math.abs(a20) &&
eps > (double)Math.abs(a21) &&
eps > (double)Math.abs(a23);
}
public void copyToFlatDoubleArray( double [] result ) {
result[0] = a00;
result[1] = a01;
result[2] = a02;
result[3] = a03;
result[4] = a10;
result[5] = a11;
result[6] = a12;
result[7] = a13;
result[8] = a20;
result[9] = a21;
result[10] = a22;
result[11] = a23;
}
public void setFromFlatDoubleArray( double [] result ) {
a00 = result[0];
a01 = result[1];
a02 = result[2];
a03 = result[3];
a10 = result[4];
a11 = result[5];
a12 = result[6];
a13 = result[7];
a20 = result[8];
a21 = result[9];
a22 = result[10];
a23 = result[11];
}
public String resultToString() {
return "" + x + " " + y + " " + z;
}
public String toStringIndented( String indent ) {
String result = indent + a00 + ", " + a01 + ", " + a02 + ", " + a03 + "\n";
result += indent + a10 + ", " + a11 + ", " + a12 + ", " + a13 + "\n";
result += indent + a20 + ", " + a21 + ", " + a22 + ", " + a23 + "\n";
return result;
}
public String toString() {
return "" + a00 + " " + a01 + " " + a02 + " " + a03 + " "
+ a10 + " " + a11 + " " + a12 + " " + a13 + " "
+ a20 + " " + a21 + " " + a22 + " " + a23 + " ";
}
public String toStringForAmira() {
return "" + a00 + " " + a10 + " " + a20 + " 0 "
+ a01 + " " + a11 + " " + a21 + " 0 "
+ a02 + " " + a12 + " " + a22 + " 0 "
+ a03 + " " + a13 + " " + a23 + " 1";
}
public static void main(String[] args) {
FastMatrix ma = rotateFromTo(1, 0, 0, 0, 1, 0);
ma.apply(0, 0, 1);
System.err.println("expect 0 0 1: " +
ma.x + " " + ma.y + " " + ma.z);
ma.apply(1, 0, 0);
System.err.println("expect 0 1 0: " +
ma.x + " " + ma.y + " " + ma.z);
ma.apply(0, 1, 0);
System.err.println("expect -1 0 0: " +
ma.x + " " + ma.y + " " + ma.z);
}
}
| |
package org.cagrid.dorian.policy;
import java.io.Serializable;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlType;
import org.jvnet.jaxb2_commons.lang.Equals;
import org.jvnet.jaxb2_commons.lang.EqualsStrategy;
import org.jvnet.jaxb2_commons.lang.HashCode;
import org.jvnet.jaxb2_commons.lang.HashCodeStrategy;
import org.jvnet.jaxb2_commons.lang.JAXBEqualsStrategy;
import org.jvnet.jaxb2_commons.lang.JAXBHashCodeStrategy;
import org.jvnet.jaxb2_commons.lang.JAXBToStringStrategy;
import org.jvnet.jaxb2_commons.lang.ToString;
import org.jvnet.jaxb2_commons.lang.ToStringStrategy;
import org.jvnet.jaxb2_commons.locator.ObjectLocator;
import org.jvnet.jaxb2_commons.locator.util.LocatorUtils;
/**
* <p>Java class for PasswordLockoutPolicy complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="PasswordLockoutPolicy">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <attribute name="Hours" use="required" type="{http://www.w3.org/2001/XMLSchema}int" />
* <attribute name="Minutes" use="required" type="{http://www.w3.org/2001/XMLSchema}int" />
* <attribute name="Seconds" use="required" type="{http://www.w3.org/2001/XMLSchema}int" />
* <attribute name="ConsecutiveInvalidLogins" use="required" type="{http://www.w3.org/2001/XMLSchema}int" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "PasswordLockoutPolicy")
public class PasswordLockoutPolicy
implements Serializable, Equals, HashCode, ToString
{
@XmlAttribute(name = "Hours", namespace = "http://cagrid.nci.nih.gov/1/dorian-policy", required = true)
protected int hours;
@XmlAttribute(name = "Minutes", namespace = "http://cagrid.nci.nih.gov/1/dorian-policy", required = true)
protected int minutes;
@XmlAttribute(name = "Seconds", namespace = "http://cagrid.nci.nih.gov/1/dorian-policy", required = true)
protected int seconds;
@XmlAttribute(name = "ConsecutiveInvalidLogins", namespace = "http://cagrid.nci.nih.gov/1/dorian-policy", required = true)
protected int consecutiveInvalidLogins;
/**
* Gets the value of the hours property.
*
*/
public int getHours() {
return hours;
}
/**
* Sets the value of the hours property.
*
*/
public void setHours(int value) {
this.hours = value;
}
/**
* Gets the value of the minutes property.
*
*/
public int getMinutes() {
return minutes;
}
/**
* Sets the value of the minutes property.
*
*/
public void setMinutes(int value) {
this.minutes = value;
}
/**
* Gets the value of the seconds property.
*
*/
public int getSeconds() {
return seconds;
}
/**
* Sets the value of the seconds property.
*
*/
public void setSeconds(int value) {
this.seconds = value;
}
/**
* Gets the value of the consecutiveInvalidLogins property.
*
*/
public int getConsecutiveInvalidLogins() {
return consecutiveInvalidLogins;
}
/**
* Sets the value of the consecutiveInvalidLogins property.
*
*/
public void setConsecutiveInvalidLogins(int value) {
this.consecutiveInvalidLogins = value;
}
public String toString() {
final ToStringStrategy strategy = JAXBToStringStrategy.INSTANCE;
final StringBuilder buffer = new StringBuilder();
append(null, buffer, strategy);
return buffer.toString();
}
public StringBuilder append(ObjectLocator locator, StringBuilder buffer, ToStringStrategy strategy) {
strategy.appendStart(locator, this, buffer);
appendFields(locator, buffer, strategy);
strategy.appendEnd(locator, this, buffer);
return buffer;
}
public StringBuilder appendFields(ObjectLocator locator, StringBuilder buffer, ToStringStrategy strategy) {
{
int theHours;
theHours = (true?this.getHours(): 0);
strategy.appendField(locator, this, "hours", buffer, theHours);
}
{
int theMinutes;
theMinutes = (true?this.getMinutes(): 0);
strategy.appendField(locator, this, "minutes", buffer, theMinutes);
}
{
int theSeconds;
theSeconds = (true?this.getSeconds(): 0);
strategy.appendField(locator, this, "seconds", buffer, theSeconds);
}
{
int theConsecutiveInvalidLogins;
theConsecutiveInvalidLogins = (true?this.getConsecutiveInvalidLogins(): 0);
strategy.appendField(locator, this, "consecutiveInvalidLogins", buffer, theConsecutiveInvalidLogins);
}
return buffer;
}
public int hashCode(ObjectLocator locator, HashCodeStrategy strategy) {
int currentHashCode = 1;
{
int theHours;
theHours = (true?this.getHours(): 0);
currentHashCode = strategy.hashCode(LocatorUtils.property(locator, "hours", theHours), currentHashCode, theHours);
}
{
int theMinutes;
theMinutes = (true?this.getMinutes(): 0);
currentHashCode = strategy.hashCode(LocatorUtils.property(locator, "minutes", theMinutes), currentHashCode, theMinutes);
}
{
int theSeconds;
theSeconds = (true?this.getSeconds(): 0);
currentHashCode = strategy.hashCode(LocatorUtils.property(locator, "seconds", theSeconds), currentHashCode, theSeconds);
}
{
int theConsecutiveInvalidLogins;
theConsecutiveInvalidLogins = (true?this.getConsecutiveInvalidLogins(): 0);
currentHashCode = strategy.hashCode(LocatorUtils.property(locator, "consecutiveInvalidLogins", theConsecutiveInvalidLogins), currentHashCode, theConsecutiveInvalidLogins);
}
return currentHashCode;
}
public int hashCode() {
final HashCodeStrategy strategy = JAXBHashCodeStrategy.INSTANCE;
return this.hashCode(null, strategy);
}
public boolean equals(ObjectLocator thisLocator, ObjectLocator thatLocator, Object object, EqualsStrategy strategy) {
if (!(object instanceof PasswordLockoutPolicy)) {
return false;
}
if (this == object) {
return true;
}
final PasswordLockoutPolicy that = ((PasswordLockoutPolicy) object);
{
int lhsHours;
lhsHours = (true?this.getHours(): 0);
int rhsHours;
rhsHours = (true?that.getHours(): 0);
if (!strategy.equals(LocatorUtils.property(thisLocator, "hours", lhsHours), LocatorUtils.property(thatLocator, "hours", rhsHours), lhsHours, rhsHours)) {
return false;
}
}
{
int lhsMinutes;
lhsMinutes = (true?this.getMinutes(): 0);
int rhsMinutes;
rhsMinutes = (true?that.getMinutes(): 0);
if (!strategy.equals(LocatorUtils.property(thisLocator, "minutes", lhsMinutes), LocatorUtils.property(thatLocator, "minutes", rhsMinutes), lhsMinutes, rhsMinutes)) {
return false;
}
}
{
int lhsSeconds;
lhsSeconds = (true?this.getSeconds(): 0);
int rhsSeconds;
rhsSeconds = (true?that.getSeconds(): 0);
if (!strategy.equals(LocatorUtils.property(thisLocator, "seconds", lhsSeconds), LocatorUtils.property(thatLocator, "seconds", rhsSeconds), lhsSeconds, rhsSeconds)) {
return false;
}
}
{
int lhsConsecutiveInvalidLogins;
lhsConsecutiveInvalidLogins = (true?this.getConsecutiveInvalidLogins(): 0);
int rhsConsecutiveInvalidLogins;
rhsConsecutiveInvalidLogins = (true?that.getConsecutiveInvalidLogins(): 0);
if (!strategy.equals(LocatorUtils.property(thisLocator, "consecutiveInvalidLogins", lhsConsecutiveInvalidLogins), LocatorUtils.property(thatLocator, "consecutiveInvalidLogins", rhsConsecutiveInvalidLogins), lhsConsecutiveInvalidLogins, rhsConsecutiveInvalidLogins)) {
return false;
}
}
return true;
}
public boolean equals(Object object) {
final EqualsStrategy strategy = JAXBEqualsStrategy.INSTANCE;
return equals(null, null, object, strategy);
}
}
| |
/*
* Copyright 2013, TopicQuests
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
package org.topicquests.topicmap.json.model;
import java.util.*;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
//import org.json.simple.JSONObject;
//import org.json.simple.parser.JSONParser;
import org.topicquests.common.ResultPojo;
import org.topicquests.common.api.IResult;
import org.topicquests.common.api.ITopicQuestsOntology;
import org.topicquests.model.api.ITicket;
import org.topicquests.model.api.node.INode;
import org.topicquests.model.api.node.ITuple;
import org.topicquests.model.api.query.ITupleQuery;
import org.topicquests.persist.json.api.IJSONDocStoreModel;
import org.topicquests.topicmap.json.model.api.IJSONTopicDataProvider;
import org.topicquests.topicmap.json.model.api.IJSONTopicMapOntology;
import org.topicquests.util.LoggingPlatform;
import org.topicquests.model.Node;
import net.minidev.json.JSONObject;
import net.minidev.json.parser.JSONParser;
/**
* @author park
*
*/
public class TupleQuery implements ITupleQuery {
private LoggingPlatform log = LoggingPlatform.getLiveInstance();
private IJSONTopicDataProvider database;
private IJSONDocStoreModel jsonModel;
private CredentialUtility credentialUtil;
// private JSONParser parser;
private final String
//defined in jsonblobstore-props.xml
TOPIC_INDEX = IJSONTopicMapOntology.TOPIC_INDEX,
CORE_TYPE = IJSONTopicMapOntology.CORE_TYPE;
/**
*
*/
public TupleQuery(IJSONTopicDataProvider d, IJSONDocStoreModel j) {
database = d;
jsonModel = j;
// parser = new JSONParser();
credentialUtil = new CredentialUtility(database,jsonModel);
}
/* (non-Javadoc)
* @see org.topicquests.model.api.ITupleQuery#listObjectNodesByRelationAndObjectRole(java.lang.String, java.lang.String, int, int, java.util.Set)
*/
@Override
public IResult listObjectNodesByRelationAndObjectRole(String relationLocator, String objectRoleLocator, int start, int count, ITicket credentials) {
BoolQueryBuilder qba = QueryBuilders.boolQuery();
QueryBuilder qb1 = QueryBuilders.termQuery(ITopicQuestsOntology.INSTANCE_OF_PROPERTY_TYPE, relationLocator);
QueryBuilder qb2 = QueryBuilders.termQuery(ITopicQuestsOntology.TUPLE_OBJECT_ROLE_PROPERTY, objectRoleLocator);
qba.must(qb1);
qba.must(qb2);
log.logDebug("TupleQuery.listObjectNodesByRelationAndObjectRole- "+qba.toString());
IResult result = this.pluckNodes(qba, ITopicQuestsOntology.TUPLE_SUBJECT_PROPERTY, start, credentials, count);
return result;
}
/* (non-Javadoc)
* @see org.topicquests.model.api.ITupleQuery#listObjectNodesByRelationAndSubjectRole(java.lang.String, java.lang.String, int, int, java.util.Set)
*/
@Override
public IResult listObjectNodesByRelationAndSubjectRole(String relationLocator, String subjectRoleLocator, int start, int count, ITicket credentials) {
BoolQueryBuilder qba = QueryBuilders.boolQuery();
QueryBuilder qb1 = QueryBuilders.termQuery(ITopicQuestsOntology.INSTANCE_OF_PROPERTY_TYPE, relationLocator);
QueryBuilder qb2 = QueryBuilders.termQuery(ITopicQuestsOntology.TUPLE_SUBJECT_ROLE_PROPERTY, subjectRoleLocator);
qba.must(qb1);
qba.must(qb2);
log.logDebug("TupleQuery.listObjectNodesByRelationAndSubjectRole- "+qba.toString());
IResult result = this.pluckNodes(qba, ITopicQuestsOntology.TUPLE_SUBJECT_PROPERTY, start, credentials, count);
return result;
}
/* (non-Javadoc)
* @see org.topicquests.model.api.ITupleQuery#listObjectNodesBySubjectAndRelation(java.lang.String, java.lang.String, int, int, java.util.Set)
*/
@Override
public IResult listObjectNodesBySubjectAndRelation(String subjectLocator, String relationLocator, int start, int count, ITicket credentials) {
BoolQueryBuilder qba = QueryBuilders.boolQuery();
QueryBuilder qb1 = QueryBuilders.termQuery(ITopicQuestsOntology.INSTANCE_OF_PROPERTY_TYPE, relationLocator);
QueryBuilder qb2 = QueryBuilders.termQuery(ITopicQuestsOntology.TUPLE_SUBJECT_PROPERTY, subjectLocator);
qba.must(qb1);
qba.must(qb2);
log.logDebug("TupleQuery.listObjectNodesBySubjectAndRelation- "+qba.toString());
IResult result = this.pluckNodes(qba, ITopicQuestsOntology.TUPLE_SUBJECT_PROPERTY, start, credentials, count);
return result;
}
/* (non-Javadoc)
* @see org.topicquests.model.api.ITupleQuery#listObjectNodesBySubjectAndRelationAndScope(java.lang.String, java.lang.String, java.lang.String, int, int, java.util.Set)
*/
@Override
public IResult listObjectNodesBySubjectAndRelationAndScope(String subjectLocator, String relationLocator, String scopeLocator, int start, int count, ITicket credentials) {
BoolQueryBuilder qba = QueryBuilders.boolQuery();
QueryBuilder qb1 = QueryBuilders.termQuery(ITopicQuestsOntology.INSTANCE_OF_PROPERTY_TYPE, relationLocator);
QueryBuilder qb2 = QueryBuilders.termQuery(ITopicQuestsOntology.TUPLE_SUBJECT_PROPERTY, subjectLocator);
QueryBuilder qb3 = QueryBuilders.termQuery(ITopicQuestsOntology.SCOPE_LIST_PROPERTY_TYPE, scopeLocator);
qba.must(qb1);
qba.must(qb2);
qba.must(qb3);
log.logDebug("TupleQuery.listObjectNodesBySubjectAndRelationAndScope- "+qba.toString());
IResult result = this.pluckNodes(qba, ITopicQuestsOntology.TUPLE_OBJECT_PROPERTY, start, credentials, count);
return result;
}
/* (non-Javadoc)
* @see org.topicquests.model.api.ITupleQuery#listSubjectNodesByObjectAndRelation(java.lang.String, java.lang.String, int, int, java.util.Set)
*/
@Override
public IResult listSubjectNodesByObjectAndRelation(String objectLocator, String relationLocator, int start, int count, ITicket credentials) {
BoolQueryBuilder qba = QueryBuilders.boolQuery();
QueryBuilder qb1 = QueryBuilders.termQuery(ITopicQuestsOntology.INSTANCE_OF_PROPERTY_TYPE, relationLocator);
QueryBuilder qb2 = QueryBuilders.termQuery(ITopicQuestsOntology.TUPLE_OBJECT_PROPERTY, objectLocator);
qba.must(qb1);
qba.must(qb2);
log.logDebug("TupleQuery.listSubjectNodesByObjectAndRelation- "+qba.toString());
IResult result = this.pluckNodes(qba, ITopicQuestsOntology.TUPLE_SUBJECT_PROPERTY, start, credentials, count);
return result;
}
/* (non-Javadoc)
* @see org.topicquests.model.api.ITupleQuery#listSubjectNodesByObjectAndRelationAndScope(java.lang.String, java.lang.String, java.lang.String, int, int, java.util.Set)
*/
@Override
public IResult listSubjectNodesByObjectAndRelationAndScope(String objectLocator, String relationLocator, String scopeLocator, int start, int count, ITicket credentials) {
BoolQueryBuilder qba = QueryBuilders.boolQuery();
QueryBuilder qb1 = QueryBuilders.termQuery(ITopicQuestsOntology.INSTANCE_OF_PROPERTY_TYPE, relationLocator);
QueryBuilder qb2 = QueryBuilders.termQuery(ITopicQuestsOntology.TUPLE_OBJECT_PROPERTY, objectLocator);
QueryBuilder qb3 = QueryBuilders.termQuery(ITopicQuestsOntology.SCOPE_LIST_PROPERTY_TYPE, scopeLocator);
qba.must(qb1);
qba.must(qb2);
qba.must(qb3);
log.logDebug("TupleQuery.listSubjectNodesByObjectAndRelationAndScope- "+qba.toString());
IResult result = this.pluckNodes(qba, ITopicQuestsOntology.TUPLE_SUBJECT_PROPERTY, start, credentials, count);
return result;
}
/* (non-Javadoc)
* @see org.topicquests.model.api.ITupleQuery#listSubjectNodesByRelationAndObjectRole(java.lang.String, java.lang.String, int, int, java.util.Set)
*/
@Override
public IResult listSubjectNodesByRelationAndObjectRole(String relationLocator, String objectRoleLocator, int start, int count, ITicket credentials) {
BoolQueryBuilder qba = QueryBuilders.boolQuery();
QueryBuilder qb1 = QueryBuilders.termQuery(ITopicQuestsOntology.INSTANCE_OF_PROPERTY_TYPE, relationLocator);
QueryBuilder qb2 = QueryBuilders.termQuery(ITopicQuestsOntology.TUPLE_OBJECT_ROLE_PROPERTY, objectRoleLocator);
qba.must(qb1);
qba.must(qb2);
log.logDebug("TupleQuery.listSubjectNodesByRelationAndObjectRole- "+qba.toString());
IResult result = this.pluckNodes(qba, ITopicQuestsOntology.TUPLE_SUBJECT_PROPERTY, start, credentials, count);
return result;
}
/* (non-Javadoc)
* @see org.topicquests.model.api.ITupleQuery#listSubjectNodesByRelationAndSubjectRole(java.lang.String, java.lang.String, int, int, java.util.Set)
*/
@Override
public IResult listSubjectNodesByRelationAndSubjectRole(String relationLocator, String subjectRoleLocator, int start, int count, ITicket credentials) {
BoolQueryBuilder qba = QueryBuilders.boolQuery();
QueryBuilder qb1 = QueryBuilders.termQuery(ITopicQuestsOntology.INSTANCE_OF_PROPERTY_TYPE, relationLocator);
QueryBuilder qb2 = QueryBuilders.termQuery(ITopicQuestsOntology.TUPLE_SUBJECT_ROLE_PROPERTY, subjectRoleLocator);
qba.must(qb1);
qba.must(qb2);
log.logDebug("TupleQuery.listSubjectNodesByRelationAndSubjectRole- "+qba.toString());
IResult result = this.pluckNodes(qba, ITopicQuestsOntology.TUPLE_SUBJECT_PROPERTY, start, credentials, count);
return result;
}
/* (non-Javadoc)
* @see org.topicquests.model.api.ITupleQuery#listTuplesByObjectLocator(java.lang.String, int, int, java.util.Set)
*/
@Override
public IResult listTuplesByObjectLocator(String objectLocator, int start, int count,
ITicket credentials) {
IResult result = jsonModel.listDocumentsByProperty(TOPIC_INDEX, ITopicQuestsOntology.TUPLE_OBJECT_PROPERTY,
objectLocator, start, count, CORE_TYPE);
if (result.getResultObject() != null) {
List<String>docs = (List<String>)result.getResultObject();
String json;
Iterator<String>itr = docs.iterator();
List<INode>nl = new ArrayList<INode>();
result.setResultObject(nl);
INode n;
JSONObject jo;
try {
while(itr.hasNext()) {
json = itr.next();
jo = jsonToJSON(json);
if (credentialUtil.checkCredentials(jo,credentials))
nl.add(new Node(jo));
}
} catch (Exception e) {
log.logError(e.getMessage(), e);
result.addErrorString(e.getMessage());
}
}
return result;
}
/* (non-Javadoc)
* @see org.topicquests.model.api.ITupleQuery#listTuplesByPredTypeAndObject(java.lang.String, java.lang.String, int, int, java.util.Set)
*/
@Override
public IResult listTuplesByPredTypeAndObject(String predType, String obj, int start, int count, ITicket credentials) {
BoolQueryBuilder qba = QueryBuilders.boolQuery();
QueryBuilder qb1 = QueryBuilders.termQuery(ITopicQuestsOntology.INSTANCE_OF_PROPERTY_TYPE, predType);
QueryBuilder qb2 = QueryBuilders.termQuery(ITopicQuestsOntology.TUPLE_OBJECT_PROPERTY, obj);
qba.must(qb1);
qba.must(qb2);
log.logDebug("TupleQuery.listTuplesByPredTypeAndObject- "+qba.toString());
IResult result = jsonModel.runQuery(TOPIC_INDEX, qba, 0, -1, CORE_TYPE);
if (result.getResultObject() != null) {
result.setResultObject(null);
List<String>docs = (List<String>)result.getResultObject();
if (docs != null && !docs.isEmpty()) {
List<ITuple>tups = new ArrayList<ITuple>();
result.setResultObject(tups);
String json;
ITuple n;
JSONObject jo;
Iterator<String>itr = docs.iterator();
try {
while(itr.hasNext()) {
json = itr.next();
jo = jsonToJSON(json);
if (credentialUtil.checkCredentials(jo,credentials))
tups.add((ITuple)new Node(jo));
}
} catch (Exception e) {
log.logError(e.getMessage(), e);
result.addErrorString(e.getMessage());
}
}
}
return result;
}
/**
* Pluck objects from a list of tuples
* @param qb
* @param type subject or object
* @param start TODO
* @param credentials
* @param count TODO
* @return <code>null</code> or <code>List<INode></code>
*/
private IResult pluckNodes(QueryBuilder qb, String type, int start, ITicket credentials, int count) {
IResult result = jsonModel.runQuery(TOPIC_INDEX, qb, 0, -1, CORE_TYPE);
if (result.getResultObject() != null) {
result.setResultObject(null);
List<String>tupleDocs = (List<String>)result.getResultObject();
try {
if (tupleDocs != null && !tupleDocs.isEmpty()) {
String json;
Iterator<String>itr = tupleDocs.iterator();
List<INode>nl = new ArrayList<INode>();
result.setResultObject(nl);
INode n;
JSONObject jo;
IResult r;
while(itr.hasNext()) {
json = itr.next();
jo = jsonToJSON(json);
r = database.getNode((String)jo.get(type), credentials);
if (r.hasError())
result.addErrorString(r.getErrorString());
if (r.getResultObject() != null)
nl.add((INode)r.getResultObject());
}
}
} catch (Exception e) {
log.logError(e.getMessage(),e);
result.addErrorString(e.getMessage());
}
}
return result;
}
/* (non-Javadoc)
* @see org.topicquests.model.api.ITupleQuery#listTuplesBySubject(java.lang.String, int, int, java.util.Set)
*/
@Override
public IResult listTuplesBySubject(String subjectLocator, int start, int count,
ITicket credentials) {
//listDocumentsByProperty(String index, String key, String value, int start, int count, String... types)
IResult result = jsonModel.listDocumentsByProperty(TOPIC_INDEX, ITopicQuestsOntology.TUPLE_SUBJECT_PROPERTY,
subjectLocator, start, count, CORE_TYPE);
if (result.getResultObject() != null) {
List<String>docs = (List<String>)result.getResultObject();
String json;
Iterator<String>itr = docs.iterator();
List<INode>nl = new ArrayList<INode>();
result.setResultObject(nl);
INode n;
JSONObject jo;
try {
while(itr.hasNext()) {
json = itr.next();
jo = jsonToJSON(json);
if (credentialUtil.checkCredentials(jo,credentials))
nl.add(new Node(jo));
}
} catch (Exception e) {
log.logError(e.getMessage(), e);
result.addErrorString(e.getMessage());
}
}
return result;
}
/* (non-Javadoc)
* @see org.topicquests.model.api.ITupleQuery#listTuplesBySubjectAndPredType(java.lang.String, java.lang.String, int, int, java.util.Set)
*/
@Override
public IResult listTuplesBySubjectAndPredType(String subjectLocator, String predType, int start, int count, ITicket credentials) {
BoolQueryBuilder qba = QueryBuilders.boolQuery();
QueryBuilder qb1 = QueryBuilders.termQuery(ITopicQuestsOntology.INSTANCE_OF_PROPERTY_TYPE, predType);
QueryBuilder qb2 = QueryBuilders.termQuery(ITopicQuestsOntology.TUPLE_SUBJECT_PROPERTY, subjectLocator);
qba.must(qb1);
qba.must(qb2);
log.logDebug("TupleQuery.listTuplesBySubjectAndPredType- "+qba.toString());
IResult result = jsonModel.runQuery(TOPIC_INDEX, qba, 0, -1, CORE_TYPE);
if (result.getResultObject() != null) {
result.setResultObject(null);
List<String>docs = (List<String>)result.getResultObject();
if (docs != null && !docs.isEmpty()) {
List<ITuple>tups = new ArrayList<ITuple>();
result.setResultObject(tups);
String json;
ITuple n;
JSONObject jo;
Iterator<String>itr = docs.iterator();
try {
while(itr.hasNext()) {
json = itr.next();
jo = jsonToJSON(json);
if (credentialUtil.checkCredentials(jo,credentials))
tups.add((ITuple)new Node(jo));
}
} catch (Exception e) {
log.logError(e.getMessage(), e);
result.addErrorString(e.getMessage());
}
}
}
return result;
}
private JSONObject jsonToJSON(String json) throws Exception {
return (JSONObject)new JSONParser(JSONParser.MODE_JSON_SIMPLE).parse(json);
}
@Override
public IResult listTuplesByLabel(String [] labels, int start, int count, ITicket credentials) {
IResult result = new ResultPojo();
BoolQueryBuilder qba =null;
QueryBuilder qb1 = null;
QueryBuilder qb2 =null;
ITuple t;
IResult r;
Set<String> subresult = new HashSet<String> ();
for (String lax: labels) {
qba = QueryBuilders.boolQuery();
qb1 = QueryBuilders.termQuery(ITopicQuestsOntology.LABEL_PROPERTY, lax);
qb2 = QueryBuilders.wildcardQuery(ITopicQuestsOntology.TUPLE_SUBJECT_PROPERTY, "*");
qba.must(qb1);
qba.must(qb2);
log.logDebug("TupleQuery.listTuplesByLabel- "+qba.toString());
r = jsonModel.runQuery(TOPIC_INDEX, qba, 0, -1, CORE_TYPE);
if (r.hasError())
result.addErrorString(r.getErrorString());
if (r.getResultObject()!= null)
subresult.addAll((List<String>)r.getResultObject());
}
if (!subresult.isEmpty()) {
List<String>rl = new ArrayList<String>();
rl.addAll(subresult);
result.setResultObject(rl);
}
return result;
}
@Override
public IResult listTuplesByPredTypeAndObjectOrSubject(String predType,
String obj, int start, int count, ITicket credentials) {
BoolQueryBuilder qba = QueryBuilders.boolQuery();
QueryBuilder qb1 = QueryBuilders.termQuery(ITopicQuestsOntology.INSTANCE_OF_PROPERTY_TYPE, predType);
QueryBuilder qb2 = QueryBuilders.termQuery(ITopicQuestsOntology.TUPLE_OBJECT_PROPERTY, obj);
QueryBuilder qb3 = QueryBuilders.termQuery(ITopicQuestsOntology.TUPLE_SUBJECT_PROPERTY, obj);
qba.must(qb1);
qba.should(qb2);
qba.should(qb3);
log.logDebug("TupleQuery.listTuplesByPredTypeAndObjectOrSubject- "+qba.toString());
IResult result = jsonModel.runQuery(IJSONTopicMapOntology.TOPIC_INDEX, qba, start, count, IJSONTopicMapOntology.CORE_TYPE);
System.out.println("AAA "+result.getResultObject());
if (result.getResultObject() != null) {
List<String>docs = (List<String>)result.getResultObject();
result.setResultObject(null);
if (docs != null && !docs.isEmpty()) {
List<JSONObject>tups = new ArrayList<JSONObject>();
result.setResultObject(tups);
String json;
ITuple n;
JSONObject jo;
Iterator<String>itr = docs.iterator();
try {
while(itr.hasNext()) {
json = itr.next();
jo = jsonToJSON(json);
if (credentialUtil.checkCredentials(jo,credentials))
tups.add(jo);
}
} catch (Exception e) {
log.logError(e.getMessage(), e);
result.addErrorString(e.getMessage());
}
}
}
return result;
}
@Override
public IResult getTupleBySignature(String signature, ITicket credentials) {
log.logDebug("TupleQuery.getTupleBySignature- "+signature);
IResult result = jsonModel.getDocumentByProperty(IJSONTopicMapOntology.TOPIC_INDEX, ITopicQuestsOntology.TUPLE_SIGNATURE_PROPERTY, signature, IJSONTopicMapOntology.CORE_TYPE);
if (result.getResultObject() != null) {
List<String>l = (List<String>)result.getResultObject();
result.setResultObject(null);
if (!l.isEmpty()) {
String json = l.get(0);
try {
JSONObject jo = jsonToJSON(json);
INode n = new Node(jo);
result.setResultObject(n);
} catch (Exception e) {
log.logError(e.getMessage(), e);
result.addErrorString(e.getMessage());
}
}
}
return result;
}
}
| |
/*
* Copyright (c) 2010-2013 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.notifications.impl.formatters;
import com.evolveum.midpoint.notifications.api.events.SimpleObjectRef;
import com.evolveum.midpoint.notifications.impl.NotificationFunctionsImpl;
import com.evolveum.midpoint.prism.*;
import com.evolveum.midpoint.prism.delta.ItemDelta;
import com.evolveum.midpoint.prism.delta.ObjectDelta;
import com.evolveum.midpoint.prism.path.IdItemPathSegment;
import com.evolveum.midpoint.prism.path.ItemPath;
import com.evolveum.midpoint.prism.path.ItemPathSegment;
import com.evolveum.midpoint.prism.path.NameItemPathSegment;
import com.evolveum.midpoint.prism.polystring.PolyString;
import com.evolveum.midpoint.prism.xml.XmlTypeConverter;
import com.evolveum.midpoint.repo.api.RepositoryService;
import com.evolveum.midpoint.schema.GetOperationOptions;
import com.evolveum.midpoint.schema.SelectorOptions;
import com.evolveum.midpoint.schema.constants.SchemaConstants;
import com.evolveum.midpoint.schema.result.OperationResult;
import com.evolveum.midpoint.schema.util.ValueDisplayUtil;
import com.evolveum.midpoint.util.DebugUtil;
import com.evolveum.midpoint.util.PrettyPrinter;
import com.evolveum.midpoint.util.exception.ObjectNotFoundException;
import com.evolveum.midpoint.util.exception.SchemaException;
import com.evolveum.midpoint.util.logging.LoggingUtils;
import com.evolveum.midpoint.util.logging.Trace;
import com.evolveum.midpoint.util.logging.TraceManager;
import com.evolveum.midpoint.xml.ns._public.common.common_3.*;
import org.apache.commons.lang.Validate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Component;
import javax.xml.datatype.XMLGregorianCalendar;
import javax.xml.namespace.QName;
import java.util.*;
import static com.evolveum.midpoint.prism.polystring.PolyString.getOrig;
/**
* @author mederly
*/
@Component
public class TextFormatter {
@Autowired @Qualifier("cacheRepositoryService") private transient RepositoryService cacheRepositoryService;
@Autowired protected NotificationFunctionsImpl functions;
private static final ResourceBundle RESOURCE_BUNDLE = ResourceBundle.getBundle(
SchemaConstants.SCHEMA_LOCALIZATION_PROPERTIES_RESOURCE_BASE_PATH);
private static final Trace LOGGER = TraceManager.getTrace(TextFormatter.class);
public String formatObjectModificationDelta(ObjectDelta<? extends Objectable> objectDelta, List<ItemPath> hiddenPaths, boolean showOperationalAttributes) {
return formatObjectModificationDelta(objectDelta, hiddenPaths, showOperationalAttributes, null, null);
}
// objectOld and objectNew are used for explaining changed container values, e.g. assignment[1]/tenantRef (see MID-2047)
// if null, they are ignored
public String formatObjectModificationDelta(ObjectDelta<? extends Objectable> objectDelta, List<ItemPath> hiddenPaths, boolean showOperationalAttributes,
PrismObject objectOld, PrismObject objectNew) {
Validate.notNull(objectDelta, "objectDelta is null");
Validate.isTrue(objectDelta.isModify(), "objectDelta is not a modification delta");
PrismObjectDefinition objectDefinition;
if (objectNew != null && objectNew.getDefinition() != null) {
objectDefinition = objectNew.getDefinition();
} else if (objectOld != null && objectOld.getDefinition() != null) {
objectDefinition = objectOld.getDefinition();
} else {
objectDefinition = null;
}
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("formatObjectModificationDelta: objectDelta = " + objectDelta.debugDump() + ", hiddenPaths = " + PrettyPrinter.prettyPrint(hiddenPaths));
}
StringBuilder retval = new StringBuilder();
List<ItemDelta> toBeDisplayed = filterAndOrderItemDeltas(objectDelta, hiddenPaths, showOperationalAttributes);
for (ItemDelta itemDelta : toBeDisplayed) {
retval.append(" - ");
retval.append(getItemDeltaLabel(itemDelta, objectDefinition));
retval.append(":\n");
formatItemDeltaContent(retval, itemDelta, hiddenPaths, showOperationalAttributes);
}
explainPaths(retval, toBeDisplayed, objectDefinition, objectOld, objectNew, hiddenPaths, showOperationalAttributes);
return retval.toString();
}
private void explainPaths(StringBuilder sb, List<ItemDelta> deltas, PrismObjectDefinition objectDefinition, PrismObject objectOld, PrismObject objectNew, List<ItemPath> hiddenPaths, boolean showOperationalAttributes) {
if (objectOld == null && objectNew == null) {
return; // no data - no point in trying
}
boolean first = true;
List<ItemPath> alreadyExplained = new ArrayList<>();
for (ItemDelta itemDelta : deltas) {
ItemPath pathToExplain = getPathToExplain(itemDelta);
if (pathToExplain == null || ItemPath.containsSubpathOrEquivalent(alreadyExplained, pathToExplain)) {
continue; // null or already processed
}
PrismObject source = null;
Object item = null;
if (objectNew != null) {
item = objectNew.find(pathToExplain);
source = objectNew;
}
if (item == null && objectOld != null) {
item = objectOld.find(pathToExplain);
source = objectOld;
}
if (item == null) {
LOGGER.warn("Couldn't find {} in {} nor {}, no explanation could be created.", pathToExplain, objectNew, objectOld);
continue;
}
if (first) {
sb.append("\nNotes:\n");
first = false;
}
String label = getItemPathLabel(pathToExplain, itemDelta.getDefinition(), objectDefinition);
// the item should be a PrismContainerValue
if (item instanceof PrismContainerValue) {
sb.append(" - ").append(label).append(":\n");
formatContainerValue(sb, " ", (PrismContainerValue) item, false, hiddenPaths, showOperationalAttributes);
} else {
LOGGER.warn("{} in {} was expected to be a PrismContainerValue; it is {} instead", pathToExplain, source, item.getClass());
if (item instanceof PrismContainer) {
formatPrismContainer(sb, " ", (PrismContainer) item, false, hiddenPaths, showOperationalAttributes);
} else if (item instanceof PrismReference) {
formatPrismReference(sb, " ", (PrismReference) item, false);
} else if (item instanceof PrismProperty) {
formatPrismProperty(sb, " ", (PrismProperty) item);
} else {
sb.append("Unexpected item: ").append(item).append("\n");
}
}
alreadyExplained.add(pathToExplain);
}
}
private void formatItemDeltaContent(StringBuilder sb, ItemDelta itemDelta, List<ItemPath> hiddenPaths, boolean showOperationalAttributes) {
formatItemDeltaValues(sb, "ADD", itemDelta.getValuesToAdd(), false, hiddenPaths, showOperationalAttributes);
formatItemDeltaValues(sb, "DELETE", itemDelta.getValuesToDelete(), true, hiddenPaths, showOperationalAttributes);
formatItemDeltaValues(sb, "REPLACE", itemDelta.getValuesToReplace(), false, hiddenPaths, showOperationalAttributes);
}
private void formatItemDeltaValues(StringBuilder sb, String type, Collection<? extends PrismValue> values, boolean mightBeRemoved, List<ItemPath> hiddenPaths, boolean showOperationalAttributes) {
if (values != null) {
for (PrismValue prismValue : values) {
sb.append(" - " + type + ": ");
String prefix = " ";
formatPrismValue(sb, prefix, prismValue, mightBeRemoved, hiddenPaths, showOperationalAttributes);
if (!(prismValue instanceof PrismContainerValue)) { // container values already end with newline
sb.append("\n");
}
}
}
}
// todo - should each hiddenAttribute be prefixed with something like F_ATTRIBUTE? Currently it should not be.
public String formatAccountAttributes(ShadowType shadowType, List<ItemPath> hiddenAttributes, boolean showOperationalAttributes) {
Validate.notNull(shadowType, "shadowType is null");
StringBuilder retval = new StringBuilder();
if (shadowType.getAttributes() != null) {
formatContainerValue(retval, "", shadowType.getAttributes().asPrismContainerValue(), false, hiddenAttributes, showOperationalAttributes);
}
if (shadowType.getCredentials() != null) {
formatContainerValue(retval, "", shadowType.getCredentials().asPrismContainerValue(), false, hiddenAttributes, showOperationalAttributes);
}
if (shadowType.getActivation() != null) {
formatContainerValue(retval, "", shadowType.getActivation().asPrismContainerValue(), false, hiddenAttributes, showOperationalAttributes);
}
if (shadowType.getAssociation() != null) {
boolean first = true;
for (ShadowAssociationType shadowAssociationType : shadowType.getAssociation()) {
if (first) {
first = false;
retval.append("\n");
}
retval.append("Association:\n");
formatContainerValue(retval, " ", shadowAssociationType.asPrismContainerValue(), false, hiddenAttributes, showOperationalAttributes);
retval.append("\n");
}
}
return retval.toString();
}
public String formatObject(PrismObject object, List<ItemPath> hiddenPaths, boolean showOperationalAttributes) {
Validate.notNull(object, "object is null");
StringBuilder retval = new StringBuilder();
formatContainerValue(retval, "", object.getValue(), false, hiddenPaths, showOperationalAttributes);
return retval.toString();
}
private void formatPrismValue(StringBuilder sb, String prefix, PrismValue prismValue, boolean mightBeRemoved, List<ItemPath> hiddenPaths, boolean showOperationalAttributes) {
if (prismValue instanceof PrismPropertyValue) {
sb.append(ValueDisplayUtil.toStringValue((PrismPropertyValue) prismValue));
} else if (prismValue instanceof PrismReferenceValue) {
sb.append(formatReferenceValue((PrismReferenceValue) prismValue, mightBeRemoved));
} else if (prismValue instanceof PrismContainerValue) {
sb.append("\n");
formatContainerValue(sb, prefix, (PrismContainerValue) prismValue, mightBeRemoved, hiddenPaths, showOperationalAttributes);
} else {
sb.append("Unexpected PrismValue type: ");
sb.append(prismValue);
LOGGER.error("Unexpected PrismValue type: " + prismValue.getClass() + ": " + prismValue);
}
}
private void formatContainerValue(StringBuilder sb, String prefix, PrismContainerValue containerValue, boolean mightBeRemoved, List<ItemPath> hiddenPaths, boolean showOperationalAttributes) {
// sb.append("Container of type " + containerValue.getParent().getDefinition().getTypeName());
// sb.append("\n");
List<Item> toBeDisplayed = filterAndOrderItems(containerValue.getItems(), hiddenPaths, showOperationalAttributes);
for (Item item : toBeDisplayed) {
if (item instanceof PrismProperty) {
formatPrismProperty(sb, prefix, item);
} else if (item instanceof PrismReference) {
formatPrismReference(sb, prefix, item, mightBeRemoved);
} else if (item instanceof PrismContainer) {
formatPrismContainer(sb, prefix, item, mightBeRemoved, hiddenPaths, showOperationalAttributes);
} else {
sb.append("Unexpected Item type: ");
sb.append(item);
sb.append("\n");
LOGGER.error("Unexpected Item type: " + item.getClass() + ": " + item);
}
}
}
private void formatPrismContainer(StringBuilder sb, String prefix, Item item, boolean mightBeRemoved, List<ItemPath> hiddenPaths, boolean showOperationalAttributes) {
for (PrismContainerValue subContainerValue : ((PrismContainer<? extends Containerable>) item).getValues()) {
sb.append(prefix);
sb.append(" - ");
sb.append(getItemLabel(item));
if (subContainerValue.getId() != null) {
sb.append(" #").append(subContainerValue.getId());
}
sb.append(":\n");
String prefixSubContainer = prefix + " ";
formatContainerValue(sb, prefixSubContainer, subContainerValue, mightBeRemoved, hiddenPaths, showOperationalAttributes);
}
}
private void formatPrismReference(StringBuilder sb, String prefix, Item item, boolean mightBeRemoved) {
sb.append(prefix);
sb.append(" - ");
sb.append(getItemLabel(item));
sb.append(": ");
if (item.size() > 1) {
for (PrismReferenceValue referenceValue : ((PrismReference) item).getValues()) {
sb.append("\n");
sb.append(prefix + " - ");
sb.append(formatReferenceValue(referenceValue, mightBeRemoved));
}
} else if (item.size() == 1) {
sb.append(formatReferenceValue(((PrismReference) item).getValue(0), mightBeRemoved));
}
sb.append("\n");
}
private void formatPrismProperty(StringBuilder sb, String prefix, Item item) {
sb.append(prefix);
sb.append(" - ");
sb.append(getItemLabel(item));
sb.append(": ");
if (item.size() > 1) {
for (PrismPropertyValue propertyValue : ((PrismProperty<? extends Object>) item).getValues()) {
sb.append("\n");
sb.append(prefix + " - ");
sb.append(ValueDisplayUtil.toStringValue(propertyValue));
}
} else if (item.size() == 1) {
sb.append(ValueDisplayUtil.toStringValue(((PrismProperty<? extends Object>) item).getValue(0)));
}
sb.append("\n");
}
private String formatReferenceValue(PrismReferenceValue value, boolean mightBeRemoved) {
OperationResult result = new OperationResult("dummy");
PrismObject<? extends ObjectType> object = value.getObject();
if (object == null) {
object = getPrismObject(value.getOid(), mightBeRemoved, result);
}
String qualifier = "";
if (object != null && object.asObjectable() instanceof ShadowType) {
ShadowType shadowType = (ShadowType) object.asObjectable();
ResourceType resourceType = shadowType.getResource();
if (resourceType == null) {
PrismObject<? extends ObjectType> resource = getPrismObject(shadowType.getResourceRef().getOid(), false, result);
if (resource != null) {
resourceType = (ResourceType) resource.asObjectable();
}
}
if (resourceType != null) {
qualifier = " on " + resourceType.getName();
} else {
qualifier = " on resource " + shadowType.getResourceRef().getOid();
}
}
String referredObjectIdentification;
if (object != null) {
referredObjectIdentification = PolyString.getOrig(object.asObjectable().getName()) +
" (" + object.toDebugType() + ")" +
qualifier;
} else {
String nameOrOid = value.getTargetName() != null ? value.getTargetName().getOrig() : value.getOid();
if (mightBeRemoved) {
referredObjectIdentification = "(cannot display the actual name of " + localPart(value.getTargetType()) + ":" + nameOrOid + ", as it might be already removed)";
} else {
referredObjectIdentification = localPart(value.getTargetType()) + ":" + nameOrOid;
}
}
return value.getRelation() != null ?
referredObjectIdentification + " [" + value.getRelation().getLocalPart() + "]"
: referredObjectIdentification;
}
private PrismObject<? extends ObjectType> getPrismObject(String oid, boolean mightBeRemoved, OperationResult result) {
try {
Collection<SelectorOptions<GetOperationOptions>> options = SelectorOptions.createCollection(GetOperationOptions.createReadOnly());
return cacheRepositoryService.getObject(ObjectType.class, oid, options, result);
} catch (ObjectNotFoundException e) {
if (!mightBeRemoved) {
LoggingUtils.logException(LOGGER, "Couldn't resolve reference when displaying object name within a notification (it might be already removed)", e);
} else {
}
} catch (SchemaException e) {
LoggingUtils.logException(LOGGER, "Couldn't resolve reference when displaying object name within a notification", e);
}
return null;
}
private String localPartOfType(Item item) {
if (item.getDefinition() != null) {
return localPart(item.getDefinition().getTypeName());
} else {
return null;
}
}
private String localPart(QName qname) {
return qname == null ? null : qname.getLocalPart();
}
// we call this on filtered list of item deltas - all of they have definition set
private String getItemDeltaLabel(ItemDelta itemDelta, PrismObjectDefinition objectDefinition) {
return getItemPathLabel(itemDelta.getPath(), itemDelta.getDefinition(), objectDefinition);
}
private String getItemPathLabel(ItemPath path, Definition deltaDefinition, PrismObjectDefinition objectDefinition) {
NameItemPathSegment lastNamedSegment = path.lastNamed();
StringBuilder sb = new StringBuilder();
for (ItemPathSegment segment : path.getSegments()) {
if (segment instanceof NameItemPathSegment) {
if (sb.length() > 0) {
sb.append("/");
}
Definition itemDefinition;
if (objectDefinition == null) {
if (segment == lastNamedSegment) { // definition for last segment is the definition taken from delta
itemDefinition = deltaDefinition; // this may be null but we don't care
} else {
itemDefinition = null; // definitions for previous segments are unknown
}
} else {
// todo we could make this iterative (resolving definitions while walking down the path); but this is definitely simpler to implement and debug :)
itemDefinition = objectDefinition.findItemDefinition(path.allUpToIncluding(segment));
}
if (itemDefinition != null && itemDefinition.getDisplayName() != null) {
sb.append(resolve(itemDefinition.getDisplayName()));
} else {
sb.append(((NameItemPathSegment) segment).getName().getLocalPart());
}
} else if (segment instanceof IdItemPathSegment) {
sb.append("[").append(((IdItemPathSegment) segment).getId()).append("]");
}
}
return sb.toString();
}
private String resolve(String key) {
if (key != null && RESOURCE_BUNDLE.containsKey(key)) {
return RESOURCE_BUNDLE.getString(key);
} else {
return key;
}
}
// we call this on filtered list of item deltas - all of they have definition set
private ItemPath getPathToExplain(ItemDelta itemDelta) {
ItemPath path = itemDelta.getPath();
for (int i = 0; i < path.size(); i++) {
ItemPathSegment segment = path.getSegments().get(i);
if (segment instanceof IdItemPathSegment) {
if (i < path.size()-1 || itemDelta.isDelete()) {
return path.allUpToIncluding(i);
} else {
// this means that the path ends with [id] segment *and* the value(s) are
// only added and deleted, i.e. they are shown in the delta anyway
// (actually it is questionable whether path in delta can end with [id] segment,
// but we test for this case just to be sure)
return null;
}
}
}
return null;
}
private List<ItemDelta> filterAndOrderItemDeltas(ObjectDelta<? extends Objectable> objectDelta, List<ItemPath> hiddenPaths, boolean showOperationalAttributes) {
List<ItemDelta> toBeDisplayed = new ArrayList<ItemDelta>(objectDelta.getModifications().size());
List<QName> noDefinition = new ArrayList<>();
for (ItemDelta itemDelta: objectDelta.getModifications()) {
if (itemDelta.getDefinition() != null) {
if ((showOperationalAttributes || !itemDelta.getDefinition().isOperational()) && !NotificationFunctionsImpl
.isAmongHiddenPaths(itemDelta.getPath(), hiddenPaths)) {
toBeDisplayed.add(itemDelta);
}
} else {
noDefinition.add(itemDelta.getElementName());
}
}
if (!noDefinition.isEmpty()) {
LOGGER.error("ItemDeltas for {} without definition - WILL NOT BE INCLUDED IN NOTIFICATION. Containing object delta:\n{}",
noDefinition, objectDelta.debugDump());
}
Collections.sort(toBeDisplayed, new Comparator<ItemDelta>() {
@Override
public int compare(ItemDelta delta1, ItemDelta delta2) {
Integer order1 = delta1.getDefinition().getDisplayOrder();
Integer order2 = delta2.getDefinition().getDisplayOrder();
if (order1 != null && order2 != null) {
return order1 - order2;
} else if (order1 == null && order2 == null) {
return 0;
} else if (order1 == null) {
return 1;
} else {
return -1;
}
}
});
return toBeDisplayed;
}
// we call this on filtered list of items - all of them have definition set
private String getItemLabel(Item item) {
return item.getDefinition().getDisplayName() != null ?
resolve(item.getDefinition().getDisplayName()) : item.getElementName().getLocalPart();
}
private List<Item> filterAndOrderItems(List<Item> items, List<ItemPath> hiddenPaths, boolean showOperationalAttributes) {
if (items == null) {
return new ArrayList<>();
}
List<Item> toBeDisplayed = new ArrayList<Item>(items.size());
List<QName> noDefinition = new ArrayList<>();
for (Item item : items) {
if (item.getDefinition() != null) {
boolean isHidden = NotificationFunctionsImpl.isAmongHiddenPaths(item.getPath(), hiddenPaths);
if (!isHidden && (showOperationalAttributes || !item.getDefinition().isOperational())) {
toBeDisplayed.add(item);
}
} else {
noDefinition.add(item.getElementName());
}
}
if (!noDefinition.isEmpty()) {
LOGGER.error("Items {} without definition - THEY WILL NOT BE INCLUDED IN NOTIFICATION.\nAll items:\n{}",
noDefinition, DebugUtil.debugDump(items));
}
Collections.sort(toBeDisplayed, new Comparator<Item>() {
@Override
public int compare(Item item1, Item item2) {
Integer order1 = item1.getDefinition().getDisplayOrder();
Integer order2 = item2.getDefinition().getDisplayOrder();
if (order1 != null && order2 != null) {
return order1 - order2;
} else if (order1 == null && order2 == null) {
return 0;
} else if (order1 == null) {
return 1;
} else {
return -1;
}
}
});
return toBeDisplayed;
}
public String formatUserName(SimpleObjectRef ref, OperationResult result) {
return formatUserName((UserType) ref.resolveObjectType(result, true), ref.getOid());
}
public String formatUserName(ObjectReferenceType ref, OperationResult result) {
UserType user = (UserType) functions.getObjectType(ref, true, result);
return formatUserName(user, ref.getOid());
}
public String formatUserName(UserType user, String oid) {
if (user == null || (user.getName() == null && user.getFullName() == null)) {
return oid;
}
if (user.getFullName() != null) {
return getOrig(user.getFullName()) + " (" + getOrig(user.getName()) + ")";
} else {
return getOrig(user.getName());
}
}
// TODO implement seriously
public String formatDateTime(XMLGregorianCalendar timestamp) {
//DateFormatUtils.format(timestamp.toGregorianCalendar(), DateFormatUtils.SMTP_DATETIME_FORMAT.getPattern());
return String.valueOf(XmlTypeConverter.toDate(timestamp));
}
}
| |
package org.quelea.mobileremote;
import android.annotation.SuppressLint;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.support.v7.widget.SearchView;
import android.text.Editable;
import android.text.Html;
import android.util.Patterns;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.view.inputmethod.EditorInfo;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.EditText;
import android.widget.ListAdapter;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import java.util.Arrays;
/*
* Class to handle all types of dialogs
* Created by Arvid on 2017-11-29.
*/
class Dialogs {
MobileRemote context;
private int urlNotFound = 0;
void setContext(MobileRemote context) {
this.context = context;
}
// Show dialog for info
void infoDialog(final String message) {
if (!context.isShown()) {
context.setShown(true);
final CustomDialog dialog = new CustomDialog(context, message, "", "", "", context.getResources().getString(R.string.ok_label), false, false);
dialog.getNeutral().setText(context.getResources().getString(R.string.ok_label));
dialog.getNeutral().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
dialog.getAlertDialog().dismiss();
context.setShown(false);
if (message.equals(context.getString(R.string.auto_connect_info))) {
context.startAutoConnect();
}
}
});
}
}
// Show dialog telling user to enter URL
void enterURLDialog(String message, String prefill) {
urlNotFound++;
context.setShown(true);
CustomDialog alert = new CustomDialog(context, message, prefill, context.getResources().getString(R.string.ok_label), context.getResources().getString(R.string.action_exit), context.getResources().getString(R.string.search_server), true, true);
final EditText input = alert.getInput();
final AlertDialog dialog = alert.getAlertDialog();
alert.getYes().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Editable value = input.getText();
String ip = value.toString();
checkUserInput(ip);
context.setShown(false);
dialog.dismiss();
}
});
alert.getNo().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
// Exit the app
closeApp();
}
});
alert.getNeutral().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
dialog.dismiss();
context.setShown(false);
context.startAutoConnect();
}
});
if (urlNotFound > 1) {
Animation scale = AnimationUtils.loadAnimation(context, R.anim.pop);
alert.getHelp().startAnimation(scale);
urlNotFound = 0;
}
alert.getHelp().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
context.startActivity(new Intent(context, Troubleshooting.class));
}
});
// Listen for enter/return press on input
TextView.OnEditorActionListener exampleListener = new TextView.OnEditorActionListener() {
@Override
public boolean onEditorAction(TextView input, int actionId,
KeyEvent event) {
if (((actionId == EditorInfo.IME_NULL && event.getAction() == KeyEvent.ACTION_DOWN))
|| (actionId == EditorInfo.IME_ACTION_DONE)) {
Editable value = input.getEditableText();
String ip = value.toString();
checkUserInput(ip);
dialog.dismiss();
}
return true;
}
};
input.setOnEditorActionListener(exampleListener);
}
void checkUserInput(String ip) {
// Check if input is empty or contains less than 7 characters
// (which both is the smallest potential IP with port
// number and the amount of characters needed to check if "http://"
// is added or not)
if ((ip.equals("")) || (ip.length() < 7)
|| (!(ip.contains(":")))) {
if (!context.isShown())
enterURLDialog(
context.getResources().getString(
R.string.incorrect_url_mr), "");
} else {
// Check if http:// needs to be added
if (ip.length() > 7 && !(ip.substring(0, 7).equals("http://")))
ip = "http://" + ip;
ip = ip.replaceAll(" ", "");
String match = Utils.matchIP(ip);
switch (match) {
case "true":
context.getSettings().saveSetting("urlMR", ip);
break;
case "ipv6":
enterURLDialog(
context.getResources().getString(
R.string.cant_use_ipv6), "");
return;
default:
enterURLDialog(context.getString(R.string.url_incorrect), ip);
return;
}
// Check if login is needed
if (Patterns.WEB_URL.matcher(ip).matches()) {
context.getSettings().setIp(ip);
context.checkIP(ip);
} else {
enterURLDialog(context.getString(R.string.url_incorrect), ip);
}
// Download lyrics if already logged in
if (context.isLoggedIn())
context.downloadLyrics();
context.setShown(false);
}
}
// Show dialog for logging in to Mobile Remote server
void loginDialog() {
context.setShown(true);
CustomDialog alert = new CustomDialog(context, context.getResources().getString(R.string.enter_password), "", context.getResources().getString(R.string.ok_label), context.getResources().getString(R.string.action_exit), "", false, true);
final EditText input = alert.getInput();
final AlertDialog alertText = alert.getAlertDialog();
alert.getYes().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Editable value = input.getText();
String password = value.toString();
alertText.dismiss();
// Log in
context.sendPassword(password);
context.setShown(false);
}
});
alert.getNo().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
// Exit app
closeApp();
}
});
// Listen for enter/return press on input
TextView.OnEditorActionListener exampleListener = new TextView.OnEditorActionListener() {
@Override
public boolean onEditorAction(TextView input, int actionId,
KeyEvent event) {
if (((actionId == EditorInfo.IME_NULL && event.getAction() == KeyEvent.ACTION_DOWN))
|| (actionId == EditorInfo.IME_ACTION_DONE)) {
alertText.getButton(Dialog.BUTTON_POSITIVE).performClick();
if (alertText.isShowing()) {
context.setShown(false);
Editable value = input.getEditableText();
String password = value.toString();
// Log in
context.sendPassword(password);
alertText.dismiss();
}
}
return true;
}
};
input.setOnEditorActionListener(exampleListener);
}
void scheduleLongClickDialog(final int i) {
context.setShown(true);
CustomSelectionDialog csd = new CustomSelectionDialog(context, String.format(context.getString(R.string.choose_action), context.getScheduleList().get(i)), "", "", "");
String[] options = {context.getString(R.string.remove_item), context.getString(R.string.move_up), context.getString(R.string.move_down)};
final AlertDialog dialog = csd.getAlertDialog();
dialog.setCancelable(true);
dialog.setOnCancelListener(new DialogInterface.OnCancelListener() {
@Override
public void onCancel(DialogInterface dialogInterface) {
context.setShown(false);
}
});
ListView list = csd.getListView();
ListAdapter adapter = new ArrayAdapter<>(context,
android.R.layout.simple_list_item_1, options);
list.setAdapter(adapter);
list.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> adapterView, View view, int which, long l) {
if (which == 0) {
String url = context.getSettings().getIp() + "/remove/" + i;
context.checkSupported(url);
} else if (which == 1) {
String url = context.getSettings().getIp() + "/moveup/" + i;
context.checkSupported(url);
} else if (which == 2) {
String url = context.getSettings().getIp() + "/movedown/" + i;
context.checkSupported(url);
}
context.setShown(false);
dialog.dismiss();
}
});
}
void selectThemeDialog(String line) {
if (!context.isShown()) {
context.setShown(true);
if (!line.contains("<!DOCTYPE html>")) {
CustomSelectionDialog csd = new CustomSelectionDialog(context, context.getString(R.string.select_theme), "", "", context.getString(R.string.cancel_label));
final String[] options = line.split("\n");
final AlertDialog dialog = csd.getAlertDialog();
dialog.setCancelable(true);
dialog.setOnCancelListener(new DialogInterface.OnCancelListener() {
@Override
public void onCancel(DialogInterface dialogInterface) {
context.setShown(false);
}
});
ListView list = csd.getListView();
ThemeAdapter adapter = new ThemeAdapter(context, Arrays.asList(options));
list.setAdapter(adapter);
list.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> adapterView, View view, int which, long l) {
Toast.makeText(context, String.format(context.getString(R.string.setting_theme), options[which]), Toast.LENGTH_SHORT).show();
context.setTheme(options[which].replaceAll(" ", "%20"));
dialog.dismiss();
context.setShown(false);
}
});
csd.getNeutral().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
dialog.dismiss();
context.setShown(false);
}
});
} else {
context.setShown(false);
infoDialog(context.getString(R.string.not_supported));
}
}
}
void exitDialog() {
CustomDialog cd = new CustomDialog(context, context.getResources().getString(R.string.want_to_exit), "", context.getResources().getString(R.string.yes_label), context.getResources().getString(R.string.no_label), "", false, false);
final AlertDialog dialog = cd.getAlertDialog();
cd.getNo().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
dialog.dismiss();
}
});
cd.getYes().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
// Exit the app
context.getLyricsAdapter().imageLoader.clearCache();
closeApp();
dialog.dismiss();
}
});
}
// Dialog for selecting what to search for
void searchDialog(boolean newSearch) {
if (newSearch)
newSearchDialog();
else {
bibleSearchDialog();
}
}
private void newSearchDialog() {
AlertDialog.Builder builder = new AlertDialog.Builder(context);
LayoutInflater factory = LayoutInflater.from(context);
@SuppressLint("InflateParams") final View customDialog = factory.inflate(
R.layout.custom_search_dialog, null);
builder.setView(customDialog);
TextView t = customDialog.findViewById(R.id.bibleSearch);
t.setText(context.getResources().getString(R.string.add_bible));
TextView t2 = customDialog.findViewById(R.id.songSearch);
t2.setText(context.getResources().getString(R.string.search_song));
builder.setCancelable(false);
final AlertDialog alertText = builder.show();
customDialog.findViewById(R.id.songButton).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
final MenuItem searchMenuItem = context.getSearchViewMenu()
.findItem(R.id.action_search);
SearchView searchView = (SearchView) searchMenuItem.getActionView();
searchView.setQueryHint(context.getResources()
.getString(R.string.enter_song));
context.setBibleSearch(false);
alertText.dismiss();
}
});
customDialog.findViewById(R.id.bibleButton).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
context.setBibleSearch(true);
context.getTranslations();
alertText.dismiss();
}
});
customDialog.findViewById(R.id.btn_yes).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
alertText.dismiss();
final MenuItem searchMenuItem = context.getSearchViewMenu()
.findItem(R.id.action_search);
searchMenuItem.collapseActionView();
context.getHelp().setVisible(false);
context.getEdit_book().setVisible(false);
}
});
}
// Dialog for bible searches
private void bibleSearchDialog() {
// Check if no translation is selected (= new search)
if (!context.isSelectedTranslations()) {
bibleTranslationSelectionDialog();
} else {
// If translation already is selected, show the books that are already downloaded
bibleBookSelectionDialog();
}
}
private void bibleBookSelectionDialog() {
if (!(context.getBibleBooks()[0].isEmpty())) {
CustomSelectionDialog csd = new CustomSelectionDialog(context, context.getResources().getString(R.string.select_book), "", context.getResources().getString(R.string.change_translation), context.getResources().getString(R.string.cancel_label));
final AlertDialog dialog = csd.getAlertDialog();
ListView list = csd.getListView();
ListAdapter adapter = new ArrayAdapter<>(context,
android.R.layout.simple_list_item_1, context.getBibleBooks());
list.setAdapter(adapter);
list.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> adapterView, View view, int which, long l) {
// The 'which' argument contains the
// index position of the selected item
context.setBibleBook(context.getBibleBooks()[which]);
final MenuItem searchMenuItem = context.getSearchViewMenu()
.findItem(R.id.action_search);
final SearchView search = (SearchView) searchMenuItem.getActionView();
search.setQueryHint(context.getResources()
.getString(
R.string.bible_query_hint)
+ " "
+ context.getBibleBook()
+ " ("
+ context.getBibleTranslation().replaceAll(
"%20", " ") + ")");
context.getEdit_book().setVisible(true);
StringBuilder sb = Utils.readAssets("chapter_lengths.txt", context);
int chapters = 0;
for (String s : sb.toString().split("\n")) {
if (s.startsWith((which + 1) + ",")) {
String[] val = s.split(",");
chapters = Integer.valueOf(val[1]);
}
}
bibleChapterSelectionDialog(chapters, (which + 1));
dialog.dismiss();
}
});
csd.getNeutral().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
// Close dialog and hide buttons
dialog.dismiss();
// Hide buttons and search bar
final MenuItem searchMenuItem = context.getSearchViewMenu()
.findItem(R.id.action_search);
searchMenuItem.collapseActionView();
context.getHelp().setVisible(false);
context.getEdit_book().setVisible(false);
}
});
csd.getNo().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
context.setSelectedTranslations(false);
searchDialog(false);
dialog.dismiss();
}
});
}
}
private void bibleChapterSelectionDialog(int chapters, final int bookNum) {
// if (!(context.getBibleBook().isEmpty())) {
final MenuItem searchMenuItem = context.getSearchViewMenu()
.findItem(R.id.action_search);
final SearchView search = (SearchView) searchMenuItem.getActionView();
CustomSelectionDialog csd = new CustomSelectionDialog(context, context.getResources().getString(R.string.select_chapter) + " (" + context.getBibleBook() + ")", "", context.getResources().getString(R.string.change_book), context.getResources().getString(R.string.cancel_label));
final AlertDialog dialog = csd.getAlertDialog();
ListView list = csd.getListView();
final String[] strings = new String[chapters];
for (int i = 0; i < chapters; i++) {
strings[i] = "" + (i + 1);
}
ListAdapter adapter = new ArrayAdapter<>(context,
android.R.layout.simple_list_item_1, strings);
list.setAdapter(adapter);
list.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> adapterView, View view, int which, long l) {
// The 'which' argument contains the
// index position of the selected item
search.setQuery(strings[which], false);
StringBuilder sb = Utils.readAssets("chapter_lengths.txt", context);
int verses = 0;
for (String s : sb.toString().split("\n")) {
if (s.startsWith(bookNum + "," + strings[which] + ",")) {
String[] val = s.split(",");
verses = Integer.valueOf(val[2]);
}
}
bibleVerseSelectionDialog(verses);
dialog.dismiss();
}
});
csd.getNeutral().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
// Close dialog
dialog.dismiss();
}
});
csd.getNo().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
context.setSelectedTranslations(false);
bibleBookSelectionDialog();
dialog.dismiss();
}
});
// }
}
private void bibleVerseSelectionDialog(int verses) {
final MenuItem searchMenuItem = context.getSearchViewMenu()
.findItem(R.id.action_search);
final SearchView search = (SearchView) searchMenuItem.getActionView();
CustomSelectionDialog csd = new CustomSelectionDialog(context, context.getResources().getString(R.string.select_verse) + " (" + context.getBibleBook() + " " + search.getQuery() + ")", "", "", context.getResources().getString(R.string.cancel_label));
final AlertDialog dialog = csd.getAlertDialog();
ListView list = csd.getListView();
final String[] strings = new String[verses];
for (int i = 0; i < verses; i++) {
strings[i] = "" + (i + 1);
}
ListAdapter adapter = new ArrayAdapter<>(context,
android.R.layout.simple_list_item_1, strings);
list.setAdapter(adapter);
list.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> adapterView, View view, int which, long l) {
// The 'which' argument contains the
// index position of the selected item
search.setQuery(search.getQuery() + ":" + strings[which], false);
StringBuilder remaining = new StringBuilder();
for (int i = (which + 1); i < strings.length; i++) {
remaining.append(strings[i]).append("\n");
}
if (!remaining.toString().isEmpty())
bibleVerseToSelectionDialog(remaining.toString().split("\n"));
dialog.dismiss();
}
});
csd.getNeutral().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
// Close dialog
dialog.dismiss();
}
});
}
private void bibleVerseToSelectionDialog(final String[] remainingVerses) {
final MenuItem searchMenuItem = context.getSearchViewMenu()
.findItem(R.id.action_search);
final SearchView search = (SearchView) searchMenuItem.getActionView();
CustomSelectionDialog csd = new CustomSelectionDialog(context, context.getResources().getString(R.string.select_verse_ending) + " (" + context.getBibleBook() + " " + search.getQuery() + ")?", "", context.getResources().getString(R.string.add), context.getResources().getString(R.string.cancel_label));
final AlertDialog dialog = csd.getAlertDialog();
ListView list = csd.getListView();
ListAdapter adapter = new ArrayAdapter<>(context,
android.R.layout.simple_list_item_1, remainingVerses);
list.setAdapter(adapter);
list.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> adapterView, View view, int which, long l) {
search.setQuery(search.getQuery() + "-" + remainingVerses[which], true);
dialog.dismiss();
}
});
csd.getNeutral().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
// Close dialog
dialog.dismiss();
}
});
csd.getNo().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
search.setQuery(search.getQuery(), true);
dialog.dismiss();
}
});
}
private void bibleTranslationSelectionDialog() {
if (!(context.getBibleTranslations()[0].isEmpty())) {
CustomSelectionDialog csd = new CustomSelectionDialog(context, context.getResources().getString(R.string.select_translation), "", "", context.getResources().getString(R.string.cancel_label));
final AlertDialog dialog = csd.getAlertDialog();
ListView list = csd.getListView();
ListAdapter adapter = new ArrayAdapter<>(context,
android.R.layout.simple_list_item_1, context.getBibleTranslations());
list.setAdapter(adapter);
list.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> adapterView, View view, int which, long l) {
context.getParseDownloadedText().downloadBooks(which);
dialog.dismiss();
}
});
csd.getNeutral().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
// Close dialog and hide buttons
dialog.dismiss();
final MenuItem searchMenuItem = context.getSearchViewMenu()
.findItem(R.id.action_search);
searchMenuItem.collapseActionView();
context.getHelp().setVisible(false);
context.getEdit_book().setVisible(false);
}
});
}
}
void showResultInDialog() {
// Show the results of the songs found
if (!(context.getSearchResult()[0].isEmpty())) {
CustomSelectionDialog csd = new CustomSelectionDialog(context, context.getResources().getString(R.string.select_song), "", "", context.getResources().getString(R.string.cancel_label));
final AlertDialog dialog = csd.getAlertDialog();
ListView list = csd.getListView();
ListAdapter adapter = new ArrayAdapter<>(context,
android.R.layout.simple_list_item_1, context.getSearchResult());
list.setAdapter(adapter);
list.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> adapterView, View view, int which, long l) {
// The 'which' argument contains the
// index position of the selected item
if (context.getLine().contains("=\"") && context.getLine().contains("=\"") && context.getLine().contains("\">")) {
context.getSong(context.getTempResult()[which].substring(
context.getTempResult()[which]
.lastIndexOf("=\"") + 2,
context.getTempResult()[which]
.lastIndexOf("\">")));
context.setSongSelected(true);
} else {
infoDialog("Something went wrong when trying to add the song.");
}
dialog.dismiss();
}
});
csd.getNeutral().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
// Close dialog
dialog.dismiss();
}
});
} else {
// Show message is no songs were found
infoDialog(context.getResources().getString(R.string.no_songs_message));
}
}
// Dialog to ask if the user wants to add the selected song to the schedule
void addSongToScheduleDialog(String string, final String songNumber) {
CustomDialog alert = new CustomDialog(context, Html.fromHtml(string).toString(), "", context.getResources().getString(R.string.yes_label), context.getResources().getString(R.string.no_label), context.getResources().getString(R.string.add_go_live), false, false);
alert.setTitle(context.getResources().getString(R.string.add_song));
final AlertDialog dialog = alert.getAlertDialog();
final MenuItem searchMenuItem = context.getSearchViewMenu()
.findItem(R.id.action_search);
final SearchView sv = (SearchView) searchMenuItem.getActionView();
context.setResultIsOpen(true);
alert.getNo().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (context.getResultView().getVisibility() == View.GONE)
showResultInDialog();
dialog.dismiss();
context.setResultIsOpen(false);
}
});
alert.getNeutral().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
context.loadSong(songNumber);
if (!context.isCanJump()) {
for (int i = context.getActiveItem(); i < (context.getScheduleList().size()); i++)
context.nextItem();
} else {
context.gotoItem(context.getScheduleList().size(), context.getActiveItem());
}
final Animation nextAnim = AnimationUtils.loadAnimation(context, R.anim.next);
context.getLyricsListView().startAnimation(nextAnim);
context.setSlide(true);
context.getLyricsListView().setAdapter(context.getLyricsAdapter());
searchMenuItem.collapseActionView();
sv.setQuery("", false);
context.setResultIsOpen(false);
context.getResultView().setVisibility(View.GONE);
dialog.dismiss();
}
});
alert.getYes().setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
context.loadSong(songNumber);
dialog.dismiss();
searchMenuItem.collapseActionView();
sv.setQuery("", false);
context.setResultIsOpen(false);
context.getResultView().setVisibility(View.GONE);
}
});
}
private void closeApp() {
context.finish();
context.getSync().stopSync();
System.exit(0);
}
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.command.impl;
import com.intellij.configurationStore.StorageManagerFileWriteRequestor;
import com.intellij.history.LocalHistory;
import com.intellij.history.core.LocalHistoryFacade;
import com.intellij.history.core.changes.Change;
import com.intellij.history.core.changes.ContentChange;
import com.intellij.history.core.changes.StructuralChange;
import com.intellij.history.integration.IdeaGateway;
import com.intellij.history.integration.LocalHistoryImpl;
import com.intellij.openapi.command.undo.*;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.newvfs.BulkFileListener;
import com.intellij.openapi.vfs.newvfs.events.*;
import com.intellij.util.FileContentUtilCore;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.util.List;
public final class FileUndoProvider implements UndoProvider, BulkFileListener {
public static final Logger LOG = Logger.getInstance(FileUndoProvider.class);
private final Key<DocumentReference> DELETION_WAS_UNDOABLE = new Key<>(FileUndoProvider.class.getName() + ".DeletionWasUndoable");
private final Project myProject;
private boolean myIsInsideCommand;
private LocalHistoryFacade myLocalHistory;
private IdeaGateway myGateway;
private long myLastChangeId;
@SuppressWarnings("UnusedDeclaration")
public FileUndoProvider() {
this(null);
}
private FileUndoProvider(Project project) {
myProject = project;
if (myProject == null) return;
@NotNull LocalHistory localHistory = LocalHistory.getInstance();
if (!(localHistory instanceof LocalHistoryImpl)) return;
myLocalHistory = ((LocalHistoryImpl)localHistory).getFacade();
myGateway = ((LocalHistoryImpl)localHistory).getGateway();
if (myLocalHistory == null || myGateway == null) return; // local history was not initialized (e.g. in headless environment)
((LocalHistoryImpl)localHistory).addVFSListenerAfterLocalHistoryOne(this, project);
myLocalHistory.addListener(new LocalHistoryFacade.Listener() {
@Override
public void changeAdded(Change c) {
if (!(c instanceof StructuralChange) || c instanceof ContentChange) return;
myLastChangeId = c.getId();
}
}, myProject);
}
@Override
public void commandStarted(Project p) {
if (myProject != p) return;
myIsInsideCommand = true;
}
@Override
public void commandFinished(Project p) {
if (myProject != p) return;
myIsInsideCommand = false;
}
@Override
public void before(@NotNull List<? extends @NotNull VFileEvent> events) {
for (VFileEvent e : events) {
if (e instanceof VFileContentChangeEvent) {
beforeContentsChange((VFileContentChangeEvent)e);
}
else if (e instanceof VFileDeleteEvent) {
beforeFileDeletion((VFileDeleteEvent)e);
}
}
}
@Override
public void after(@NotNull List<? extends @NotNull VFileEvent> events) {
for (VFileEvent e : events) {
if (e instanceof VFileCreateEvent ||
e instanceof VFileMoveEvent ||
e instanceof VFilePropertyChangeEvent && ((VFilePropertyChangeEvent)e).isRename()) {
processEvent(e, e.getFile());
}
else if (e instanceof VFileCopyEvent) {
processEvent(e, ((VFileCopyEvent)e).findCreatedFile());
}
else if (e instanceof VFileDeleteEvent) {
fileDeleted((VFileDeleteEvent)e);
}
}
}
private void processEvent(@NotNull VFileEvent e, @Nullable VirtualFile file) {
if (file == null || !shouldProcess(e, file)) return;
if (isUndoable(e, file)) {
registerUndoableAction(file);
}
else {
registerNonUndoableAction(file);
}
}
private void beforeContentsChange(@NotNull VFileContentChangeEvent e) {
VirtualFile file = e.getFile();
if (!shouldProcess(e, file)) return;
if (isUndoable(e, file)) return;
registerNonUndoableAction(file);
}
private void beforeFileDeletion(@NotNull VFileDeleteEvent e) {
VirtualFile file = e.getFile();
if (!shouldProcess(e, file)) {
invalidateActionsFor(file);
return;
}
if (isUndoable(e, file)) {
file.putUserData(DELETION_WAS_UNDOABLE, createDocumentReference(file));
}
else {
registerNonUndoableAction(file);
}
}
private void fileDeleted(@NotNull VFileDeleteEvent e) {
VirtualFile f = e.getFile();
if (!shouldProcess(e, f)) return;
DocumentReference ref = f.getUserData(DELETION_WAS_UNDOABLE);
if (ref != null) {
registerUndoableAction(ref);
f.putUserData(DELETION_WAS_UNDOABLE, null);
}
}
private boolean shouldProcess(@NotNull VFileEvent e, VirtualFile file) {
if (!myIsInsideCommand || myProject.isDisposed()) {
return false;
}
Object requestor = e.getRequestor();
if (FileContentUtilCore.FORCE_RELOAD_REQUESTOR.equals(requestor) || requestor instanceof StorageManagerFileWriteRequestor) {
return false;
}
return LocalHistory.getInstance().isUnderControl(file);
}
private static boolean isUndoable(@NotNull VFileEvent e, @NotNull VirtualFile file) {
return !e.isFromRefresh() || UndoUtil.isForceUndoFlagSet(file);
}
private void registerUndoableAction(@NotNull VirtualFile file) {
registerUndoableAction(createDocumentReference(file));
}
private void registerUndoableAction(DocumentReference ref) {
getUndoManager().undoableActionPerformed(new MyUndoableAction(ref));
}
private void registerNonUndoableAction(@NotNull VirtualFile file) {
getUndoManager().nonundoableActionPerformed(createDocumentReference(file), true);
}
private void invalidateActionsFor(@NotNull VirtualFile file) {
if (myProject == null || !myProject.isDisposed()) {
getUndoManager().invalidateActionsFor(createDocumentReference(file));
}
}
private static DocumentReference createDocumentReference(@NotNull VirtualFile file) {
return DocumentReferenceManager.getInstance().create(file);
}
private UndoManagerImpl getUndoManager() {
if (myProject != null) {
return (UndoManagerImpl)UndoManager.getInstance(myProject);
}
return (UndoManagerImpl)UndoManager.getGlobalInstance();
}
private class MyUndoableAction extends GlobalUndoableAction {
private ChangeRange myActionChangeRange;
private ChangeRange myUndoChangeRange;
MyUndoableAction(DocumentReference r) {
super(r);
myActionChangeRange = new ChangeRange(myGateway, myLocalHistory, myLastChangeId);
}
@Override
public void undo() throws UnexpectedUndoException {
try {
myUndoChangeRange = myActionChangeRange.revert(myUndoChangeRange);
}
catch (IOException e) {
LOG.warn(e);
throw new UnexpectedUndoException(e.getMessage());
}
}
@Override
public void redo() throws UnexpectedUndoException {
try {
myActionChangeRange = myUndoChangeRange.revert(myActionChangeRange);
}
catch (IOException e) {
LOG.warn(e);
throw new UnexpectedUndoException(e.getMessage());
}
}
}
}
| |
package sapphire.query;
import java.util.Map;
import java.util.LinkedHashMap;
import java.util.HashMap;
import java.io.IOException;
import java.util.ArrayList;
import java.util.PriorityQueue;
import java.util.HashSet;
import java.util.Iterator;
import org.apache.jena.query.Query;
import org.apache.jena.query.QueryExecution;
import org.apache.jena.query.QueryExecutionFactory;
import org.apache.jena.query.QueryFactory;
import org.apache.jena.query.QuerySolution;
import org.apache.jena.query.ResultSet;
import java.util.LinkedHashSet;
public class RelaxerMain {
//Final
ArrayList<ArrayList<String>> literalsToConnect = new ArrayList<ArrayList<String>>();
ArrayList <String> predicatesToFavour = new ArrayList<String>(); //+1 for these, + 5 otherwise
HashSet<String> PTFHelper = new HashSet<String>();
LinkedHashMap<Triple,Integer> uniqueVisitors = new LinkedHashMap<Triple, Integer>(); //To see if every seed group has been at this tuple
HashMap<Triple,Integer> costSoFar = new HashMap<Triple,Integer>(); //Cost so far to reach said triple, to be weighted by input predicate
ArrayList<HashSet<Triple>> expandedIn = new ArrayList<HashSet<Triple>> (); //To avoid an already expanded triple from the same seed being expanded again
ArrayList <PriorityQueue<Triple>> queues = new ArrayList <PriorityQueue<Triple>> ();
TriplePriority np = new TriplePriority(this);
int numberOfSeeds;
int positionInQueues;
int unencounteredFormatError = 0;
LinkedHashSet<Triple> exploredOnly = new LinkedHashSet<Triple> (); //only what's been taken from the head of the PQ and the literal that made them all connected
ArrayList<HashSet<Integer>> connectionSet = new ArrayList <HashSet<Integer>>(); //This will become a replacement to uniqueVisitors in connectionCheck, elsewhere uniqueVisitors is still neeeded
HashMap<Triple, HashSet<Integer>> connectionSetHelper = new HashMap<Triple, HashSet<Integer>>();
int addedInIHS = 0;
HashMap<String, Integer> makeSet = new HashMap<String, Integer>();
HashMap<Triple, Integer> edgeCost = new HashMap<Triple, Integer>();
EdgePriority ep = new EdgePriority(this); //gets the predicate from each triples and compares based on their integer value in edgeCost
PriorityQueue<Triple> edgesInIncreasingWeight = new PriorityQueue<Triple> (11, ep);
LinkedHashSet<Triple> MST = new LinkedHashSet<Triple>(); //THIS IS THE STEINER TREE
HashMap<String, Integer> degreeOfVertex = new HashMap<String, Integer>();//a vertex is either a subject or object in a triple/tuple
public RelaxerMain(ArrayList<ArrayList<String>> where) {
for(ArrayList<String> manyTriples : where){ //is there where similar literals come in? Assuming so. If not logic will have to be modified TODO clarify
String object = manyTriples.get(2);
String predicate = manyTriples.get(1);
ArrayList<String> objectsTmp = new ArrayList<String>();
ArrayList<String> alternatives =
sapphire.autoComplete.AutoComplete.warehouse.findSimilarStringsLiterals(object, 0.7);
objectsTmp.add(object);
objectsTmp.addAll(alternatives);
literalsToConnect.add(objectsTmp);
if(!PTFHelper.contains(predicate)) {
PTFHelper.add(predicate);
predicatesToFavour.add(predicate);
}
}
numberOfSeeds = literalsToConnect.size();
positionInQueues = 0;
for(int i = 0; i < numberOfSeeds; i++) {
connectionSet.add(new HashSet<Integer>());
connectionSet.get(i).add(i); //all different seed groups start in different elements (sets) in the arraylist, and get grouped together as they connect
}
}
public LinkedHashSet<Triple> runIt() {
long startTime = System.currentTimeMillis();
try {
relaxQuery_v2();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
//printResults();
//printExploredOnly();
createMST();
//printMST();
long endTime = System.currentTimeMillis();
System.out.println("Execution time is: " + ((endTime - startTime) / 1000) + " seconds");
return MST;
}
public void relaxQuery_v2() throws IOException {
int iterationCounter = 0;
for(ArrayList<String> subArray : literalsToConnect) {
PriorityQueue<Triple> pq = new PriorityQueue<Triple>(11, np);
HashSet<Triple> hs = new HashSet<Triple>();
int addedCounter = 0;
for(String literal : subArray) { //Initialize the starting triples aka triples to expand from
String queryString =
"SELECT ?s ?p WHERE { " +
" ?s ?p " + literal + " . " +
"}";
Query query = QueryFactory.create(queryString);
QueryExecution qexec = QueryExecutionFactory.sparqlService("http://dbpedia.org/sparql", query);
try {
ResultSet results = qexec.execSelect();
while(results.hasNext()) {
QuerySolution solution = results.nextSolution();
Triple triple = new Triple(solution.get("?s").toString(), solution.get("?p").toString(), literal);//HAD to take out qoutes here
System.out.println("Initializing with these tuples: " + triple.toString());
if(!uniqueVisitors.containsKey(triple)) {
uniqueVisitors.put(triple, 1);
costSoFar.put(triple, 0);
HashSet<Integer> temp = new HashSet<Integer>();
temp.add(iterationCounter);
connectionSetHelper.put(triple, temp);
}else {
uniqueVisitors.put(triple, uniqueVisitors.get(triple) + 1);
connectionSetHelper.get(triple).add(iterationCounter);
}
pq.add(triple);
hs.add(triple);
addedCounter++;
}
}catch(Exception e) {
e.printStackTrace();
}
}//end inner for
if(addedCounter == 0) {
System.err.println("!!!ERROR, literal(and like literals) of: " + subArray.get(0) + " not found in dataset!!!, RECCOMEND MISSION ABORT\n");
//break; Should we hard break out of program here?
}else {
queues.add(pq);
expandedIn.add(hs);
iterationCounter++;
}
}//end nested fors, which encompasses my initialization stages
System.out.println("Checking queues");
for(PriorityQueue<Triple> pq : queues) {
System.out.println(pq.toString());
}
System.out.println("Connection set helper / connection set validation");
System.out.println(connectionSetHelper.toString());
System.out.println(connectionSet.toString());
System.out.println("-----End---init-----");
boolean connected = connectionCheck();
if(connected) {
System.out.println("Graph is connected(before while)");
for(Map.Entry<Triple, Integer> entry : uniqueVisitors.entrySet()) {
System.out.println(entry.getKey().toString());
}
return;
}
System.out.println("Queues.size() is " + queues.size());
int expansionNullCounter = 0;
int testCounter = 0;
while(!connected && testCounter < 500) { //begin expansion while ----------------------------
if(positionInQueues == queues.size()) positionInQueues = 0;
PriorityQueue<Triple> pq = queues.get(positionInQueues);
Triple toExpand = pq.poll();
if(toExpand == null) {
expansionNullCounter++;
if(expansionNullCounter == queues.size()) {
System.out.println("Nowhere to expand anywhere!");
break;
}else {
System.out.println("This PQ is empty, trying another");
positionInQueues++;
continue;
}
}
exploredOnly.add(toExpand); //TODO MARK
addToDOV(toExpand);
expansionNullCounter = 0;
String toFixPrefixSubj = toExpand.subject;
String toFixPrefixObj = toExpand.object;
char quoteChar = '"';
if(toFixPrefixSubj.contains("http") && toFixPrefixSubj.charAt(0) != quoteChar) { //considers data in the form of: "O Canada"^^<http://www.w3.org/1999/02/22-rdf-syntax-ns#langString> as well
toFixPrefixSubj = "<" + toFixPrefixSubj + ">";
}
if(toFixPrefixObj.contains("http") && toFixPrefixObj.charAt(0) != quoteChar) {
toFixPrefixObj = "<" + toFixPrefixObj + ">";
}
System.out.println("BAH " + toFixPrefixSubj + "||" + toFixPrefixObj);
String queryStringSubjAsObjSearch =
"SELECT ?s ?p WHERE { " +
" ?s ?p " + toFixPrefixSubj + " . " +
"}";
String queryStringSubjAsSubjSearch =
"SELECT ?p ?o WHERE { " +
toFixPrefixSubj + " ?p ?o . " +
"}";
String queryStringObjAsSubjSearch =
"SELECT ?p ?o WHERE { " +
toFixPrefixObj + " ?p ?o . " +
"}";
String queryStringObjAsObjSearch =
"SELECT ?s ?p WHERE { " +
" ?s ?p " + toFixPrefixObj + " . " +
"}";
String dummyQuery = //prefixes +
"SELECT ?s ?p ?o WHERE { ?s ?p ?o . }";
Query SubjAsObjSearch = QueryFactory.create(dummyQuery);
Query SubjAsSubjSearch = QueryFactory.create(dummyQuery);
Query ObjAsSubjSearch = QueryFactory.create(dummyQuery);
Query ObjAsObjSearch = QueryFactory.create(dummyQuery);
QueryExecution SubjAsObjSearchExec = QueryExecutionFactory.create(SubjAsObjSearch); //Placeholder searches...that's their only purpose
QueryExecution SubjAsSubjSearchExec = QueryExecutionFactory.create(SubjAsObjSearch);
QueryExecution ObjAsSubjSearchExec = QueryExecutionFactory.create(SubjAsObjSearch);
QueryExecution ObjAsObjSearchExec = QueryExecutionFactory.create(SubjAsObjSearch);
try {
SubjAsObjSearch = QueryFactory.create(queryStringSubjAsObjSearch);
SubjAsObjSearchExec = QueryExecutionFactory.sparqlService("http://dbpedia.org/sparql", SubjAsObjSearch);
}catch(Exception e) {
System.out.println("Unaccounter format in " + "SAO" + e.getMessage());
e.printStackTrace();
unencounteredFormatError++;
}
try {
SubjAsSubjSearch = QueryFactory.create(queryStringSubjAsSubjSearch);
SubjAsSubjSearchExec = QueryExecutionFactory.sparqlService("http://dbpedia.org/sparql", SubjAsSubjSearch);
}catch(Exception e) {
System.out.println("Unaccounter format in " + "SAS" + e.getMessage());
e.printStackTrace();
unencounteredFormatError++;
}
try {
ObjAsSubjSearch = QueryFactory.create(queryStringObjAsSubjSearch);
ObjAsSubjSearchExec = QueryExecutionFactory.sparqlService("http://dbpedia.org/sparql", ObjAsSubjSearch);
}catch(Exception e) {
System.out.println("Unaccounter format in " + "OAS" + e.getMessage());
e.printStackTrace();
unencounteredFormatError++;
}
try {
ObjAsObjSearch = QueryFactory.create(queryStringObjAsObjSearch);
ObjAsObjSearchExec = QueryExecutionFactory.sparqlService("http://dbpedia.org/sparql", ObjAsObjSearch);
}catch(Exception e) {
System.out.println("Unaccounter format in " + "OAO" + e.getMessage());
e.printStackTrace();
unencounteredFormatError++;
}
try {
ResultSet SubjAsObjResults = SubjAsObjSearchExec.execSelect();
while(SubjAsObjResults.hasNext()) {
QuerySolution solution = SubjAsObjResults.nextSolution();
Triple triple = new Triple(solution.get("?s").toString(), solution.get("?p").toString(), toExpand.subject);
connected = expander(pq, triple, positionInQueues, toExpand);
if(connected) return;
}
}catch(Exception e) {
//Comes from unecountered format errors
}
try {
ResultSet SubjAsSubjResults = SubjAsSubjSearchExec.execSelect();
while(SubjAsSubjResults.hasNext()) {
QuerySolution solution = SubjAsSubjResults.nextSolution();
Triple triple = new Triple(toExpand.subject, solution.get("?p").toString(), solution.get("?o").toString());
connected = expander(pq, triple, positionInQueues, toExpand);
if(connected) return;
}
}catch(Exception e) {
//Comes from unecountered format errors
}
try {
ResultSet ObjAsSubjResults = ObjAsSubjSearchExec.execSelect();
while(ObjAsSubjResults.hasNext()) {
QuerySolution solution = ObjAsSubjResults.nextSolution();
Triple triple = new Triple(toExpand.object, solution.get("?p").toString(), solution.get("?o").toString());
connected = expander(pq, triple, positionInQueues, toExpand);
if(connected) return;
}
}catch(Exception e) {
//Comes from unecountered format errors
}
try {
ResultSet ObjAsObjResults = ObjAsObjSearchExec.execSelect();
while(ObjAsObjResults.hasNext()) {
QuerySolution solution = ObjAsObjResults.nextSolution();
Triple triple = new Triple(solution.get("?s").toString(), solution.get("?p").toString(), toExpand.object);
connected = expander(pq, triple, positionInQueues, toExpand);
//if(connected) return; no need here
}
}catch(Exception e) {
//Comes from unecountered format errors
}
testCounter++;
positionInQueues++;
}//end while
}
public void printResults() throws IOException {
System.out.println("Graph is connected(inside while)");
for(Map.Entry<Triple, Integer> entry : uniqueVisitors.entrySet()) {
System.out.println(entry.getKey().toString());
}
System.out.println(uniqueVisitors.size() + " is # of triples in graph");
}
public void printExploredOnly () throws IOException {
System.out.println("---Explored only---");
Iterator<Triple> it = exploredOnly.iterator();
while(it.hasNext()) {
Triple triple = it.next();
System.out.println(triple.toString());
}
System.out.println("Explored only size: " + exploredOnly.size());
System.out.println("Unecountered format errors " + unencounteredFormatError);
System.out.println("Added in IHS: " + addedInIHS);
}
public boolean expander(PriorityQueue<Triple> pq, Triple triple, int positionInQueues, Triple parent) {
//System.out.println("Attempting to add..." + triple.toString());
if(!expandedIn.get(positionInQueues).contains(triple)) {
//System.out.println("^ successfully added");
expandedIn.get(positionInQueues).add(triple);
if(!uniqueVisitors.containsKey(triple)) {
uniqueVisitors.put(triple, 1);
HashSet<Integer> temp = new HashSet<Integer>();
temp.add(positionInQueues);
connectionSetHelper.put(triple, temp);
}else {
uniqueVisitors.put(triple, uniqueVisitors.get(triple) + 1);
connectionSetHelper.get(triple).add(positionInQueues);
}
int additionCost = costToAdd(triple);
if(!costSoFar.containsKey(triple) || costSoFar.get(triple) > costSoFar.get(parent) + additionCost) { //Change this and below plus values for predicate weighting after
costSoFar.put(triple, costSoFar.get(parent) + additionCost);
}
pq.add(triple); //need to but in cost so far BEFORE its added to PQ for comparator logic in TriplePriority to work
iterateHashSet(triple);
}
return connectionCheck();
}
public void iterateHashSet(Triple triple) {
HashSet<Integer> tmp = connectionSetHelper.get(triple);
if(tmp.size() < 2) return; //speed optimization
for(int i = connectionSet.size() - 1; i > -1; i--) { //connecting the sets
HashSet<Integer> hs = connectionSet.get(i);
for(int y : tmp) {
if(hs.contains(y)) {
tmp.addAll(hs);
connectionSet.remove(i);
exploredOnly.add(triple); //TODO Mark
addToDOV(triple);
addedInIHS++;
break;
}
}
}
connectionSet.add(tmp);
if(connectionCheck()) {
exploredOnly.add(triple);//TODO MARK not polled from PQ but still part of the spanning tree!
addToDOV(triple);
addedInIHS++;
}
}
public void createMST() {
int counter = 0;
for(Triple triple : exploredOnly) {//setting up the data structures to build the MST and run kruskals
if(triple.predicate.toLowerCase().contains("wikipagewikilink")) {
triple.predicate = predicateReplacer(triple);
}
String subject = triple.subject;
String object = triple.object;
if(!makeSet.containsKey(subject)) {
makeSet.put(subject, counter);
counter++;
}
if(!makeSet.containsKey(object)) {
makeSet.put(object, counter);
counter++;
}
int cost = 20;
if(costSoFar.get(triple) == 0) {
cost = 0;
}else {
for(String predicate : predicatesToFavour) {
if(triple.predicate.toLowerCase().contains(predicate.toLowerCase()) || predicate.toLowerCase().contains(triple.predicate.toLowerCase())) {
cost = 5;
break;
}
}
}
edgeCost.put(triple, cost);
edgesInIncreasingWeight.add(triple);
}
while(!edgesInIncreasingWeight.isEmpty()) { //running the for loop part of kruskals MST algorithm
Triple triple = edgesInIncreasingWeight.poll();
String u = triple.subject;
String v = triple.object;
//Cannot ignore wikidata, CAN lead to missed connections. && is additional pruning for Steiner tree
if(makeSet.get(u) != makeSet.get(v) && Math.min(degreeOfVertex.get(u), degreeOfVertex.get(v)) > 1) {
MST.add(triple);
int min = Math.min(makeSet.get(u), makeSet.get(v));
makeSet.put(u, min);
makeSet.put(v, min);
}
}
}
public String predicateReplacer(Triple triple) { //to get rid of the annoying wikiPageWikiLink
String toReturn = triple.predicate;
String subject = triple.subject;
String object = triple.object;
if(subject.contains("http")) {
subject = "<" + subject + ">";
}
if(object.contains("http")) {
object = "<" + object + ">";
}
String queryString = //prefixes +
"SELECT ?p WHERE { " +
" " + subject + " ?p " + object + " . " +
"}";
Query query = QueryFactory.create(queryString);
QueryExecution qexec = QueryExecutionFactory.sparqlService("http://dbpedia.org/sparql", query);
try {
ResultSet results = qexec.execSelect();
while(results.hasNext()) {
QuerySolution solution = results.nextSolution();
toReturn = solution.get("?p").toString();
if(!toReturn.contains("wikiPageWikiLink")) break;
}
}catch(Exception e) {
System.out.println("In predicate replacer " + e.getMessage());
e.printStackTrace();
}
return toReturn;
}
public void printMST() {
System.out.println("Literals to connect:");
for( ArrayList<String> x : literalsToConnect) {
System.out.print("{");
for(String y : x) {
System.out.print("|" + y + "|");
}
System.out.print("}");
System.out.println();
}
System.out.println("Number of triples explored: " + uniqueVisitors.size());
System.out.println("---Begin Steiner Tree---");
for(Triple triple : MST) {
System.out.println(triple.toString());
}
System.out.println("Steiner tree size: " + MST.size());
//System.out.println(degreeOfVertex.toString());
}
public void addToDOV(Triple triple) {
if(!degreeOfVertex.containsKey(triple.subject) && isAStartingLiteral(triple.subject)) {
degreeOfVertex.put(triple.subject, 10);
}else if(!degreeOfVertex.containsKey(triple.subject)) {
degreeOfVertex.put(triple.subject, 1);
}else {
int x = (degreeOfVertex.get(triple.subject)) + 1;
degreeOfVertex.put(triple.subject, x);
}
if(!degreeOfVertex.containsKey(triple.object) && isAStartingLiteral(triple.object)) {
degreeOfVertex.put(triple.object, 10);
}else if(!degreeOfVertex.containsKey(triple.object)) {
degreeOfVertex.put(triple.object, 1);
}else {
int x = (degreeOfVertex.get(triple.object)) + 1;
degreeOfVertex.put(triple.object, x);
}
}
public boolean isAStartingLiteral(String s) {
for(ArrayList<String> subArr : literalsToConnect) {
for(String x : subArr) {
if(s.toLowerCase().equals(x.toLowerCase())) {
return true;
}
}
}
return false;
}
public boolean connectionCheck() { //O(1) runtime now, but additional cost in interateHashSet is there...
boolean connected = false;
if(connectionSet.size() == 1) {
return true;
}
return connected;
}
public int costToAdd(Triple triple) { //Helps determine priority of a triple to expand based on if its predicates match the one in the initial query
int additionCost = 20;
for(String predicate : predicatesToFavour) {
//System.out.println("Predicate: " + predicate + "||" + triple.predicate);
if(triple.predicate.toLowerCase().contains(predicate.toLowerCase()) || predicate.toLowerCase().contains(triple.predicate.toLowerCase())) {
//System.out.println("Reduced cost for " + triple.toString());
additionCost = 1;
break;
}
}
return additionCost;
}
}
| |
/* ===========================================================
* JFreeChart : a free chart library for the Java(tm) platform
* ===========================================================
*
* (C) Copyright 2000-2007, by Object Refinery Limited and Contributors.
*
* Project Info: http://www.jfree.org/jfreechart/index.html
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 2.1 of the License, or
* (at your option) any later version.
*
* This library is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
* USA.
*
* [Java is a trademark or registered trademark of Sun Microsystems, Inc.
* in the United States and other countries.]
*
* ---------------
* LineBorder.java
* ---------------
* (C) Copyright 2007, by Christo Zietsman and Contributors.
*
* Original Author: Christo Zietsman;
* Contributor(s): David Gilbert (for Object Refinery Limited);
*
* Changes:
* --------
* 16-Mar-2007 : Version 1, contributed by Christo Zietsman with
* modifications by DG (DG);
* 13-Jun-2007 : Don't draw if area doesn't have positive dimensions (DG);
*
*/
package org.jfree.chart.block;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.Paint;
import java.awt.Stroke;
import java.awt.geom.Line2D;
import java.awt.geom.Rectangle2D;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import org.jfree.io.SerialUtilities;
import org.jfree.ui.RectangleInsets;
import org.jfree.util.ObjectUtilities;
import org.jfree.util.PaintUtilities;
/**
* A line border for any {@link AbstractBlock}.
*
* @since 1.0.5
*/
public class LineBorder implements BlockFrame, Serializable {
/** For serialization. */
static final long serialVersionUID = 4630356736707233924L;
/** The line color. */
private transient Paint paint;
/** The line stroke. */
private transient Stroke stroke;
/** The insets. */
private RectangleInsets insets;
/**
* Creates a default border.
*/
public LineBorder() {
this(Color.black, new BasicStroke(1.0f), new RectangleInsets(1.0, 1.0,
1.0, 1.0));
}
/**
* Creates a new border with the specified color.
*
* @param paint the color (<code>null</code> not permitted).
* @param stroke the border stroke (<code>null</code> not permitted).
* @param insets the insets (<code>null</code> not permitted).
*/
public LineBorder(Paint paint, Stroke stroke, RectangleInsets insets) {
if (paint == null) {
throw new IllegalArgumentException("Null 'paint' argument.");
}
if (stroke == null) {
throw new IllegalArgumentException("Null 'stroke' argument.");
}
if (insets == null) {
throw new IllegalArgumentException("Null 'insets' argument.");
}
this.paint = paint;
this.stroke = stroke;
this.insets = insets;
}
/**
* Returns the paint.
*
* @return The paint (never <code>null</code>).
*/
public Paint getPaint() {
return this.paint;
}
/**
* Returns the insets.
*
* @return The insets (never <code>null</code>).
*/
public RectangleInsets getInsets() {
return this.insets;
}
/**
* Returns the stroke.
*
* @return The stroke (never <code>null</code>).
*/
public Stroke getStroke() {
return this.stroke;
}
/**
* Draws the border by filling in the reserved space (in black).
*
* @param g2 the graphics device.
* @param area the area.
*/
public void draw(Graphics2D g2, Rectangle2D area) {
double w = area.getWidth();
double h = area.getHeight();
// if the area has zero height or width, we shouldn't draw anything
if (w <= 0.0 || h <= 0.0) {
return;
}
double t = this.insets.calculateTopInset(h);
double b = this.insets.calculateBottomInset(h);
double l = this.insets.calculateLeftInset(w);
double r = this.insets.calculateRightInset(w);
double x = area.getX();
double y = area.getY();
double x0 = x + l / 2.0;
double x1 = x + w - r / 2.0;
double y0 = y + h - b / 2.0;
double y1 = y + t / 2.0;
g2.setPaint(getPaint());
g2.setStroke(getStroke());
Line2D line = new Line2D.Double();
if (t > 0.0) {
line.setLine(x0, y1, x1, y1);
g2.draw(line);
}
if (b > 0.0) {
line.setLine(x0, y0, x1, y0);
g2.draw(line);
}
if (l > 0.0) {
line.setLine(x0, y0, x0, y1);
g2.draw(line);
}
if (r > 0.0) {
line.setLine(x1, y0, x1, y1);
g2.draw(line);
}
}
/**
* Tests this border for equality with an arbitrary instance.
*
* @param obj the object (<code>null</code> permitted).
*
* @return A boolean.
*/
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof LineBorder)) {
return false;
}
LineBorder that = (LineBorder) obj;
if (!PaintUtilities.equal(this.paint, that.paint)) {
return false;
}
if (!ObjectUtilities.equal(this.stroke, that.stroke)) {
return false;
}
if (!this.insets.equals(that.insets)) {
return false;
}
return true;
}
/**
* Provides serialization support.
*
* @param stream the output stream.
*
* @throws IOException if there is an I/O error.
*/
private void writeObject(ObjectOutputStream stream) throws IOException {
stream.defaultWriteObject();
SerialUtilities.writePaint(this.paint, stream);
SerialUtilities.writeStroke(this.stroke, stream);
}
/**
* Provides serialization support.
*
* @param stream the input stream.
*
* @throws IOException if there is an I/O error.
* @throws ClassNotFoundException if there is a classpath problem.
*/
private void readObject(ObjectInputStream stream)
throws IOException, ClassNotFoundException {
stream.defaultReadObject();
this.paint = SerialUtilities.readPaint(stream);
this.stroke = SerialUtilities.readStroke(stream);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.streaming;
import java.io.IOException;
import java.io.ByteArrayOutputStream;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.util.ArrayList;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.PriorityQueue;
/**
Eventually will be fed TupleInputFormats.
For now will be fed primitive InputFormats.
@author Michel Tourn
*/
public class MergerInputFormat extends InputFormatBase {
public MergerInputFormat() {
}
void checkReady(FileSystem fs, JobConf job) {
if (ready_) {
// could complain if fs / job changes
return;
}
fs_ = fs;
job_ = job;
debug_ = (job.get("stream.debug") != null);
String someInputSpec = job_.get("stream.inputspecs.0");
CompoundDirSpec someSpec = new CompoundDirSpec(someInputSpec, true);
fmts_ = new ArrayList();
int n = someSpec.paths_.length;
inputTagged_ = job.getBoolean("stream.inputtagged", false);
// 0 is primary
// Curr. secondaries are NOT used for getSplits(), only as RecordReader factory
for (int i = 0; i < n; i++) {
// this ignores -inputreader..
// That's why if hasSimpleInputSpecs_=true (n=1) then StreamJob will set
// the top-level format to StreamInputFormat rather than MergeInputFormat.
// So we only support custom -inputformat for n=1.
// Probably OK for now since custom inputformats would be constrained (no \t and \n in payload)
fmts_.add(new StreamInputFormat()); // will be TupleInputFormat
}
primary_ = (InputFormat) fmts_.get(0);
ready_ = true;
}
/** This implementation always returns true. */
public boolean[] areValidInputDirectories(FileSystem fileSys, Path[] inputDirs) throws IOException {
// must do this without JobConf...
boolean[] b = new boolean[inputDirs.length];
for (int i = 0; i < inputDirs.length; ++i) {
b[i] = true;
}
return b;
}
/** Delegate to the primary InputFormat.
Force full-file splits since there's no index to sync secondaries.
(and if there was, this index may need to be created for the first time
full file at a time... )
*/
public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException {
return ((StreamInputFormat) primary_).getSplits(job, numSplits);
}
/**
*/
public RecordReader getRecordReader(InputSplit split, JobConf job, Reporter reporter) throws IOException {
FileSystem fs = ((FileSplit) split).getPath().getFileSystem(job);
checkReady(fs, job);
reporter.setStatus(split.toString());
ArrayList readers = new ArrayList();
String primary = ((FileSplit) split).getPath().toString();
CompoundDirSpec spec = CompoundDirSpec.findInputSpecForPrimary(primary, job);
if (spec == null) {
throw new IOException("Did not find -input spec in JobConf for primary:" + primary);
}
for (int i = 0; i < fmts_.size(); i++) {
InputFormat f = (InputFormat) fmts_.get(i);
Path path = new Path(spec.getPaths()[i][0]);
FileSplit fsplit = makeFullFileSplit(path);
RecordReader r = f.getRecordReader(fsplit, job, reporter);
readers.add(r);
}
return new MergedRecordReader(readers);
}
private FileSplit makeFullFileSplit(Path path) throws IOException {
long len = fs_.getLength(path);
return new FileSplit(path, 0, len, job_);
}
/*
private FileSplit relatedSplit(FileSplit primarySplit, int i, CompoundDirSpec spec) throws IOException
{
if(i == 0) {
return primarySplit;
}
// TODO based on custom JobConf (or indirectly: InputFormat-s?)
String path = primarySplit.getFile().getAbsolutePath();
Path rpath = new Path(path + "." + i);
long rlength = fs_.getLength(rpath);
FileSplit related = new FileSplit(rpath, 0, rlength);
return related;
}*/
class MergedRecordReader implements RecordReader {
MergedRecordReader(ArrayList/*<RecordReader>*/readers) throws IOException {
try {
readers_ = readers;
primaryReader_ = (RecordReader) readers.get(0);
q_ = new MergeQueue(readers.size(), debug_);
for (int i = 0; i < readers_.size(); i++) {
RecordReader reader = (RecordReader) readers.get(i);
WritableComparable k = (WritableComparable) job_.getInputKeyClass().newInstance();
Writable v = (Writable) job_.getInputValueClass().newInstance();
MergeRecordStream si = new MergeRecordStream(i, reader, k, v);
if (si.next()) {
q_.add(si);
}
}
} catch (Exception e) {
e.printStackTrace();
throw new IOException(e.toString());
}
}
// 1. implements RecordReader
public boolean next(Writable key, Writable value) throws IOException {
boolean more = (q_.size() > 0);
if (!more) return false;
MergeRecordStream ms = (MergeRecordStream) q_.top();
int keyTag = inputTagged_ ? (ms.index_ + 1) : NOTAG;
assignTaggedWritable(key, ms.k_, keyTag);
assignTaggedWritable(value, ms.v_, NOTAG);
if (ms.next()) { // has another entry
q_.adjustTop();
} else {
q_.pop(); // done with this file
if (ms.reader_ == primaryReader_) {
primaryClosed_ = true;
primaryLastPos_ = primaryReader_.getPos();
}
ms.reader_.close();
}
return true;
}
public long getPos() throws IOException {
if (primaryClosed_) {
return primaryLastPos_;
} else {
return primaryReader_.getPos();
}
}
public float getProgress() throws IOException {
if (primaryClosed_) {
return 1.0f;
} else {
return primaryReader_.getProgress();
}
}
public void close() throws IOException {
IOException firstErr = null;
for (int i = 0; i < readers_.size(); i++) {
RecordReader r = (RecordReader) readers_.get(i);
try {
r.close();
} catch (IOException io) {
io.printStackTrace();
if (firstErr == null) {
firstErr = io;
}
}
}
if (firstErr != null) {
throw firstErr;
}
}
public WritableComparable createKey() {
return new Text();
}
public Writable createValue() {
return new Text();
}
// 2. utilities
final static int NOTAG = -1;
private void assignTaggedWritable(Writable dst, Writable src, int tag) {
try {
outBuf.reset();
if (tag != NOTAG) {
if (src instanceof UTF8) {
src = new UTF8(">" + tag + "\t" + src.toString()); // breaks anything?
} else if (src instanceof Text) {
src = new Text(">" + tag + "\t" + src.toString()); // breaks anything?
} else {
throw new UnsupportedOperationException("Cannot use with tags with key class "
+ src.getClass());
}
}
src.write(outBuf);
inBuf.reset(outBuf.getData(), outBuf.getLength());
dst.readFields(inBuf); // throws..
} catch (IOException io) {
// streams are backed by buffers, but buffers can run out
throw new IllegalStateException(io);
}
}
private DataInputBuffer inBuf = new DataInputBuffer();
private DataOutputBuffer outBuf = new DataOutputBuffer();
ArrayList/*<RecordReader>*/readers_;
RecordReader primaryReader_;
boolean primaryClosed_;
long primaryLastPos_;
MergeQueue q_;
}
boolean ready_;
FileSystem fs_;
JobConf job_;
boolean debug_;
// we need the JobConf: the other delegated InputFormat-s
// will only be created in the delegator RecordReader
InputFormat primary_;
boolean inputTagged_;
ArrayList/*<InputFormat>*/fmts_;
}
class MergeQueue extends PriorityQueue // <MergeRecordStream>
{
private boolean done;
private boolean debug;
public void add(MergeRecordStream reader) throws IOException {
super.put(reader);
}
public MergeQueue(int size, boolean debug) throws IOException {
initialize(size);
this.debug = debug;
}
protected boolean lessThan(Object a, Object b) {
MergeRecordStream ra = (MergeRecordStream) a;
MergeRecordStream rb = (MergeRecordStream) b;
int cp = ra.k_.compareTo(rb.k_);
if (debug) {
System.err.println("MergerInputFormat:lessThan " + ra.k_ + ", " + rb.k_ + " cp=" + cp);
}
if (cp == 0) {
return (ra.index_ < rb.index_);
} else {
return (cp < 0);
}
}
public void close() throws IOException {
IOException firstErr = null;
MergeRecordStream mr;
while ((mr = (MergeRecordStream) pop()) != null) {
try {
mr.reader_.close();
} catch (IOException io) {
io.printStackTrace();
if (firstErr == null) {
firstErr = io;
}
}
}
if (firstErr != null) {
throw firstErr;
}
}
}
class MergeRecordStream {
int index_;
RecordReader reader_;
WritableComparable k_;
Writable v_;
public MergeRecordStream(int index, RecordReader reader, WritableComparable k, Writable v)
throws IOException {
index_ = index;
reader_ = reader;
k_ = k;
v_ = v;
}
public boolean next() throws IOException {
boolean more = reader_.next(k_, v_);
return more;
}
}
| |
package com.dhl.serv.config;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.web.cors.CorsConfiguration;
/**
* Properties specific to JHipster.
*
* <p>
* Properties are configured in the application.yml file.
* </p>
*/
@ConfigurationProperties(prefix = "jhipster", ignoreUnknownFields = false)
public class JHipsterProperties {
private final Async async = new Async();
private final Http http = new Http();
private final Cache cache = new Cache();
private final Mail mail = new Mail();
private final Security security = new Security();
private final Swagger swagger = new Swagger();
private final Metrics metrics = new Metrics();
private final CorsConfiguration cors = new CorsConfiguration();
private final Ribbon ribbon = new Ribbon();
public Async getAsync() {
return async;
}
public Http getHttp() {
return http;
}
public Cache getCache() {
return cache;
}
public Mail getMail() {
return mail;
}
public Security getSecurity() {
return security;
}
public Swagger getSwagger() {
return swagger;
}
public Metrics getMetrics() {
return metrics;
}
public CorsConfiguration getCors() {
return cors;
}
public Ribbon getRibbon() {
return ribbon;
}
public static class Async {
private int corePoolSize = 2;
private int maxPoolSize = 50;
private int queueCapacity = 10000;
public int getCorePoolSize() {
return corePoolSize;
}
public void setCorePoolSize(int corePoolSize) {
this.corePoolSize = corePoolSize;
}
public int getMaxPoolSize() {
return maxPoolSize;
}
public void setMaxPoolSize(int maxPoolSize) {
this.maxPoolSize = maxPoolSize;
}
public int getQueueCapacity() {
return queueCapacity;
}
public void setQueueCapacity(int queueCapacity) {
this.queueCapacity = queueCapacity;
}
}
public static class Http {
private final Cache cache = new Cache();
public Cache getCache() {
return cache;
}
public static class Cache {
private int timeToLiveInDays = 1461;
public int getTimeToLiveInDays() {
return timeToLiveInDays;
}
public void setTimeToLiveInDays(int timeToLiveInDays) {
this.timeToLiveInDays = timeToLiveInDays;
}
}
}
public static class Cache {
private int timeToLiveSeconds = 3600;
public int getTimeToLiveSeconds() {
return timeToLiveSeconds;
}
public void setTimeToLiveSeconds(int timeToLiveSeconds) {
this.timeToLiveSeconds = timeToLiveSeconds;
}
}
public static class Mail {
private String from = "ProyService1@localhost";
public String getFrom() {
return from;
}
public void setFrom(String from) {
this.from = from;
}
}
public static class Security {
private final Authentication authentication = new Authentication();
public Authentication getAuthentication() {
return authentication;
}
public static class Authentication {
private final Jwt jwt = new Jwt();
public Jwt getJwt() {
return jwt;
}
public static class Jwt {
private String secret;
private long tokenValidityInSeconds = 1800;
private long tokenValidityInSecondsForRememberMe = 2592000;
public String getSecret() {
return secret;
}
public void setSecret(String secret) {
this.secret = secret;
}
public long getTokenValidityInSeconds() {
return tokenValidityInSeconds;
}
public void setTokenValidityInSeconds(long tokenValidityInSeconds) {
this.tokenValidityInSeconds = tokenValidityInSeconds;
}
public long getTokenValidityInSecondsForRememberMe() {
return tokenValidityInSecondsForRememberMe;
}
public void setTokenValidityInSecondsForRememberMe(long tokenValidityInSecondsForRememberMe) {
this.tokenValidityInSecondsForRememberMe = tokenValidityInSecondsForRememberMe;
}
}
}
}
public static class Swagger {
private String title = "ProyService1 API";
private String description = "ProyService1 API documentation";
private String version = "0.0.1";
private String termsOfServiceUrl;
private String contactName;
private String contactUrl;
private String contactEmail;
private String license;
private String licenseUrl;
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getVersion() {
return version;
}
public void setVersion(String version) {
this.version = version;
}
public String getTermsOfServiceUrl() {
return termsOfServiceUrl;
}
public void setTermsOfServiceUrl(String termsOfServiceUrl) {
this.termsOfServiceUrl = termsOfServiceUrl;
}
public String getContactName() {
return contactName;
}
public void setContactName(String contactName) {
this.contactName = contactName;
}
public String getContactUrl() {
return contactUrl;
}
public void setContactUrl(String contactUrl) {
this.contactUrl = contactUrl;
}
public String getContactEmail() {
return contactEmail;
}
public void setContactEmail(String contactEmail) {
this.contactEmail = contactEmail;
}
public String getLicense() {
return license;
}
public void setLicense(String license) {
this.license = license;
}
public String getLicenseUrl() {
return licenseUrl;
}
public void setLicenseUrl(String licenseUrl) {
this.licenseUrl = licenseUrl;
}
}
public static class Metrics {
private final Jmx jmx = new Jmx();
private final Spark spark = new Spark();
private final Graphite graphite = new Graphite();
private final Logs logs = new Logs();
public Jmx getJmx() {
return jmx;
}
public Spark getSpark() {
return spark;
}
public Graphite getGraphite() {
return graphite;
}
public Logs getLogs() {
return logs;
}
public static class Jmx {
private boolean enabled = true;
public boolean isEnabled() {
return enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
}
public static class Spark {
private boolean enabled = false;
private String host = "localhost";
private int port = 9999;
public boolean isEnabled() {
return enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
public int getPort() {
return port;
}
public void setPort(int port) {
this.port = port;
}
}
public static class Graphite {
private boolean enabled = false;
private String host = "localhost";
private int port = 2003;
private String prefix = "ProyService1";
public boolean isEnabled() {
return enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
public int getPort() {
return port;
}
public void setPort(int port) {
this.port = port;
}
public String getPrefix() {
return prefix;
}
public void setPrefix(String prefix) {
this.prefix = prefix;
}
}
public static class Logs {
private boolean enabled = false;
private long reportFrequency = 60;
public long getReportFrequency() {
return reportFrequency;
}
public void setReportFrequency(int reportFrequency) {
this.reportFrequency = reportFrequency;
}
public boolean isEnabled() {
return enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
}
}
private final Logging logging = new Logging();
public Logging getLogging() { return logging; }
public static class Logging {
private final Logstash logstash = new Logstash();
public Logstash getLogstash() { return logstash; }
public static class Logstash {
private boolean enabled = false;
private String host = "localhost";
private int port = 5000;
private int queueSize = 512;
public boolean isEnabled() { return enabled; }
public void setEnabled(boolean enabled) { this.enabled = enabled; }
public String getHost() { return host; }
public void setHost(String host) { this.host = host; }
public int getPort() { return port; }
public void setPort(int port) { this.port = port; }
public int getQueueSize() { return queueSize; }
public void setQueueSize(int queueSize) { this.queueSize = queueSize; }
}
}
public static class Ribbon {
private String[] displayOnActiveProfiles;
public String[] getDisplayOnActiveProfiles() {
return displayOnActiveProfiles;
}
public void setDisplayOnActiveProfiles(String[] displayOnActiveProfiles) {
this.displayOnActiveProfiles = displayOnActiveProfiles;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.ode.store;
import junit.framework.TestCase;
import org.apache.ode.dao.store.ConfStoreDAOConnection;
import org.apache.ode.dao.store.ConfStoreDAOConnectionFactory;
import org.apache.ode.dao.store.DeploymentUnitDAO;
import org.apache.ode.dao.store.ProcessConfDAO;
import org.apache.ode.il.config.OdeConfigProperties;
import org.apache.ode.il.dbutil.Database;
import org.apache.ode.il.txutil.TxManager;
import java.util.Properties;
import javax.transaction.TransactionManager;
import javax.xml.namespace.QName;
public class DaoTest extends TestCase {
ConfStoreDAOConnectionFactory _cf;
Database _db;
TransactionManager _txm;
public void setUp() throws Exception {
Properties props = new Properties();
props.setProperty(OdeConfigProperties.PROP_DAOCF_STORE,System.getProperty(OdeConfigProperties.PROP_DAOCF_STORE,OdeConfigProperties.DEFAULT_DAOCF_STORE_CLASS));
OdeConfigProperties odeProps = new OdeConfigProperties(props, "");
TxManager tx = new TxManager(odeProps);
_txm = tx.createTransactionManager();
_db = new Database(odeProps);
_db.setTransactionManager(_txm);
_db.start();
_cf = _db.createDaoStoreCF();
}
public void tearDown() throws Exception {
_cf.shutdown();
_db.shutdown();
}
public void testEmpty() throws Exception {
ConfStoreDAOConnection conn = _cf.getConnection();
_txm.begin();
assertEquals(0, conn.getDeploymentUnits().size());
assertNull(conn.getDeploymentUnit("foobar"));
_txm.commit();
conn.close();
}
public void testCreateDU() throws Exception{
ConfStoreDAOConnection conn = _cf.getConnection();
_txm.begin();
try {
DeploymentUnitDAO du = conn.createDeploymentUnit("foo");
assertNotNull(du);
assertEquals("foo", du.getName());
assertNotNull(du.getDeployDate());
} finally {
_txm.commit();
conn.close();
}
conn = _cf.getConnection();
_txm.begin();
try {
DeploymentUnitDAO du = conn.getDeploymentUnit("foo");
assertNotNull(du);
assertEquals("foo", du.getName());
} finally {
_txm.commit();
}
}
public void testRollback() throws Exception {
ConfStoreDAOConnection conn = _cf.getConnection();
_txm.begin();
try {
DeploymentUnitDAO du = conn.createDeploymentUnit("foo");
assertNotNull(du);
assertEquals("foo", du.getName());
assertNotNull(du.getDeployDate());
} finally {
_txm.rollback();
conn.close();
}
conn = _cf.getConnection();
_txm.begin();
try {
DeploymentUnitDAO du = conn.getDeploymentUnit("foo");
assertNull(du);
} finally {
_txm.commit();
}
}
public void testGetDeploymentUnits() throws Exception {
ConfStoreDAOConnection conn = _cf.getConnection();
_txm.begin();
try {
conn.createDeploymentUnit("foo1");
conn.createDeploymentUnit("foo2");
conn.createDeploymentUnit("foo3");
conn.createDeploymentUnit("foo4");
} finally {
_txm.commit();
conn.close();
}
conn = _cf.getConnection();
_txm.begin();
try {
assertNotNull(conn.getDeploymentUnit("foo1"));
assertNotNull(conn.getDeploymentUnit("foo2"));
assertNotNull(conn.getDeploymentUnit("foo3"));
assertNotNull(conn.getDeploymentUnit("foo4"));
assertNull(conn.getDeploymentUnit("foo5"));
} finally {
_txm.commit();
}
}
public void testCreateProcess() throws Exception {
QName foobar = new QName("foo","bar");
ConfStoreDAOConnection conn = _cf.getConnection();
_txm.begin();
try {
DeploymentUnitDAO du = conn.createDeploymentUnit("foo1");
ProcessConfDAO p = du.createProcess(foobar,foobar,1);
assertEquals(foobar,p.getPID());
assertEquals(foobar,p.getType());
assertNotNull(p.getDeploymentUnit());
assertEquals("foo1", p.getDeploymentUnit().getName());
} finally {
_txm.commit();
conn.close();
}
conn = _cf.getConnection();
_txm.begin();
try {
DeploymentUnitDAO du = conn.getDeploymentUnit("foo1");
ProcessConfDAO p = du.getProcess(foobar);
assertNotNull(p);
assertNotNull(du.getProcesses());
assertEquals(foobar,p.getPID());
assertEquals(foobar,p.getType());
} finally {
_txm.commit();
conn.close();
}
}
public void testProcessProperties() throws Exception {
QName foobar = new QName("foo","bar");
ConfStoreDAOConnection conn = _cf.getConnection();
_txm.begin();
try {
DeploymentUnitDAO du = conn.createDeploymentUnit("foo1");
ProcessConfDAO p = du.createProcess(foobar,foobar,1);
p.setProperty(foobar,"baz");
} finally {
_txm.commit();
conn.close();
}
conn = _cf.getConnection();
_txm.begin();
try {
DeploymentUnitDAO du = conn.getDeploymentUnit("foo1");
ProcessConfDAO p = du.getProcess(foobar);
assertNotNull(p.getProperty(foobar));
assertEquals("baz", p.getProperty(foobar));
assertNotNull(p.getPropertyNames());
assertTrue(p.getPropertyNames().contains(foobar));
} finally {
_txm.commit();
conn.close();
}
}
}
| |
/*
* Copyright 2013 NGDATA nv
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ngdata.hbaseindexer.indexer;
import static com.ngdata.hbaseindexer.metrics.IndexerMetricsUtil.metricName;
import static com.ngdata.sep.impl.HBaseShims.newResultFromObject;
import static com.ngdata.sep.impl.HBaseShims.isDelete;
import static com.ngdata.sep.impl.HBaseShims.getTypeByte;
import static com.ngdata.sep.impl.HBaseShims.cloneRow;
import static com.ngdata.sep.impl.HBaseShims.cloneFamily;
import static com.ngdata.sep.impl.HBaseShims.castToCellOrKey;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.TimeUnit;
import javax.annotation.Nullable;
import com.google.common.base.Charsets;
import com.google.common.base.Function;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Multimap;
import com.google.common.collect.Multimaps;
import com.google.common.collect.Table;
import com.ngdata.hbaseindexer.ConfigureUtil;
import com.ngdata.hbaseindexer.conf.IndexerConf;
import com.ngdata.hbaseindexer.conf.IndexerConf.RowReadMode;
import com.ngdata.hbaseindexer.metrics.IndexerMetricsUtil;
import com.ngdata.hbaseindexer.parse.ResultToSolrMapper;
import com.ngdata.hbaseindexer.parse.SolrUpdateWriter;
import com.ngdata.hbaseindexer.uniquekey.UniqueKeyFormatter;
import com.ngdata.hbaseindexer.uniquekey.UniqueTableKeyFormatter;
import com.ngdata.sep.util.io.Closer;
import com.yammer.metrics.Metrics;
import com.yammer.metrics.core.Timer;
import com.yammer.metrics.core.TimerContext;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.HTablePool;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.common.SolrInputDocument;
/**
* The indexing algorithm. It receives an event from the SEP, handles it based on the configuration, and eventually
* calls Solr.
*/
public abstract class Indexer {
protected Log log = LogFactory.getLog(getClass());
private String indexerName;
protected IndexerConf conf;
protected final String tableName;
private Sharder sharder;
private SolrInputDocumentWriter solrWriter;
protected ResultToSolrMapper mapper;
protected UniqueKeyFormatter uniqueKeyFormatter;
private Timer indexingTimer;
/**
* Instantiate an indexer based on the given {@link IndexerConf}.
*/
public static Indexer createIndexer(String indexerName, IndexerConf conf, String tableName, ResultToSolrMapper mapper,
HTablePool tablePool, Sharder sharder, SolrInputDocumentWriter solrWriter) {
switch (conf.getMappingType()) {
case COLUMN:
return new ColumnBasedIndexer(indexerName, conf, tableName, mapper, sharder, solrWriter);
case ROW:
return new RowBasedIndexer(indexerName, conf, tableName, mapper, tablePool, sharder, solrWriter);
default:
throw new IllegalStateException("Can't determine the type of indexing to use for mapping type "
+ conf.getMappingType());
}
}
Indexer(String indexerName, IndexerConf conf, String tableName, ResultToSolrMapper mapper, Sharder sharder,
SolrInputDocumentWriter solrWriter) {
this.indexerName = indexerName;
this.conf = conf;
this.tableName = tableName;
this.mapper = mapper;
try {
this.uniqueKeyFormatter = conf.getUniqueKeyFormatterClass().newInstance();
} catch (Exception e) {
throw new RuntimeException("Problem instantiating the UniqueKeyFormatter.", e);
}
ConfigureUtil.configure(uniqueKeyFormatter, conf.getGlobalParams());
this.sharder = sharder;
this.solrWriter = solrWriter;
this.indexingTimer = Metrics.newTimer(metricName(getClass(),
"Index update calculation timer", indexerName),
TimeUnit.MILLISECONDS, TimeUnit.SECONDS);
}
/**
* Returns the name of this indexer.
*
* @return indexer name
*/
public String getName() {
return indexerName;
}
/**
* Build all new documents and ids to delete based on a list of {@code RowData}s.
*
* @param rowDataList list of RowData instances to be considered for indexing
* @param updateCollector collects updates to be written to Solr
*/
abstract void calculateIndexUpdates(List<RowData> rowDataList, SolrUpdateCollector updateCollector) throws IOException;
/**
* Create index documents based on a nested list of RowData instances.
*
* @param rowDataList list of RowData instances to be considered for indexing
*/
public void indexRowData(List<RowData> rowDataList) throws IOException, SolrServerException, SharderException {
SolrUpdateCollector updateCollector = new SolrUpdateCollector(rowDataList.size());
TimerContext timerContext = indexingTimer.time();
try {
calculateIndexUpdates(rowDataList, updateCollector);
} finally {
timerContext.stop();
}
if (log.isDebugEnabled()) {
log.debug(String.format("Indexer %s will send to Solr %s adds and %s deletes", getName(),
updateCollector.getDocumentsToAdd().size(), updateCollector.getIdsToDelete().size()));
}
if (sharder == null) {
// don't shard
if (!updateCollector.getDocumentsToAdd().isEmpty()) {
solrWriter.add(-1, updateCollector.getDocumentsToAdd());
}
if (!updateCollector.getIdsToDelete().isEmpty()) {
solrWriter.deleteById(-1, updateCollector.getIdsToDelete());
}
} else {
// with sharding
if (!updateCollector.getDocumentsToAdd().isEmpty()) {
Map<Integer, Map<String, SolrInputDocument>> addsByShard = shardByMapKey(updateCollector.getDocumentsToAdd());
for (Map.Entry<Integer, Map<String, SolrInputDocument>> entry : addsByShard.entrySet()) {
solrWriter.add(entry.getKey(), entry.getValue());
}
}
if (!updateCollector.getIdsToDelete().isEmpty()) {
Map<Integer, Collection<String>> idsByShard = shardByValue(updateCollector.getIdsToDelete());
for (Map.Entry<Integer, Collection<String>> entry : idsByShard.entrySet()) {
solrWriter.deleteById(entry.getKey(), Lists.newArrayList(entry.getValue()));
}
}
}
for (String deleteQuery : updateCollector.getDeleteQueries()) {
solrWriter.deleteByQuery(deleteQuery);
}
}
/**
* groups a map of (id->document) pairs by shard
* (consider moving this to a BaseSharder class)
*/
private Map<Integer, Map<String, SolrInputDocument>> shardByMapKey(Map<String, SolrInputDocument> documentsToAdd)
throws SharderException {
Table<Integer, String, SolrInputDocument> table = HashBasedTable.create();
for (Map.Entry<String, SolrInputDocument> entry : documentsToAdd.entrySet()) {
table.put(sharder.getShard(entry.getKey()), entry.getKey(), entry.getValue());
}
return table.rowMap();
}
/**
* groups a list of ids by shard
* (consider moving this to a BaseSharder class)
*/
private Map<Integer, Collection<String>> shardByValue(List<String> idsToDelete) {
Multimap<Integer, String> map = Multimaps.index(idsToDelete, new Function<String, Integer>() {
@Override
public Integer apply(@Nullable String id) {
try {
return sharder.getShard(id);
} catch (SharderException e) {
throw new RuntimeException("error calculating hash", e);
}
}
});
return map.asMap();
}
public void stop() {
Closer.close(mapper);
Closer.close(uniqueKeyFormatter);
IndexerMetricsUtil.shutdownMetrics(indexerName);
}
static class RowBasedIndexer extends Indexer {
private HTablePool tablePool;
private Timer rowReadTimer;
public RowBasedIndexer(String indexerName, IndexerConf conf, String tableName, ResultToSolrMapper mapper,
HTablePool tablePool,
Sharder sharder, SolrInputDocumentWriter solrWriter) {
super(indexerName, conf, tableName, mapper, sharder, solrWriter);
this.tablePool = tablePool;
rowReadTimer = Metrics.newTimer(metricName(getClass(), "Row read timer", indexerName), TimeUnit.MILLISECONDS,
TimeUnit.SECONDS);
}
private Result readRow(RowData rowData) throws IOException {
TimerContext timerContext = rowReadTimer.time();
try {
HTableInterface table = tablePool.getTable(rowData.getTable());
try {
Get get = mapper.getGet(rowData.getRow());
return table.get(get);
} finally {
table.close();
}
} finally {
timerContext.stop();
}
}
@Override
protected void calculateIndexUpdates(List<RowData> rowDataList, SolrUpdateCollector updateCollector) throws IOException {
Map<String, RowData> idToRowData = calculateUniqueEvents(rowDataList);
for (RowData rowData : idToRowData.values()) {
String tableName = new String(rowData.getTable(), Charsets.UTF_8);
Result result = rowData.toResult();
if (conf.getRowReadMode() == RowReadMode.DYNAMIC) {
if (!mapper.containsRequiredData(result)) {
result = readRow(rowData);
}
}
boolean rowDeleted = result.isEmpty();
String documentId;
if (uniqueKeyFormatter instanceof UniqueTableKeyFormatter) {
documentId = ((UniqueTableKeyFormatter) uniqueKeyFormatter).formatRow(rowData.getRow(),
rowData.getTable());
} else {
documentId = uniqueKeyFormatter.formatRow(rowData.getRow());
}
if (rowDeleted) {
// Delete row from Solr as well
updateCollector.deleteById(documentId);
if (log.isDebugEnabled()) {
log.debug("Row " + Bytes.toString(rowData.getRow()) + ": deleted from Solr");
}
} else {
IdAddingSolrUpdateWriter idAddingUpdateWriter = new IdAddingSolrUpdateWriter(
conf.getUniqueKeyField(),
documentId,
conf.getTableNameField(),
tableName,
updateCollector);
mapper.map(result, idAddingUpdateWriter);
}
}
}
/**
* Calculate a map of Solr document ids to relevant RowData, only taking the most recent event for each document id..
*/
private Map<String, RowData> calculateUniqueEvents(List<RowData> rowDataList) {
Map<String, RowData> idToEvent = Maps.newHashMap();
for (RowData rowData : rowDataList) {
// Check if the event contains changes to relevant key values
boolean relevant = false;
for (Object kv : rowData.getKeyValues()) {
if (mapper.isRelevantKV(kv) || isDelete(kv)) {
relevant = true;
break;
}
}
if (!relevant) {
continue;
}
if (uniqueKeyFormatter instanceof UniqueTableKeyFormatter) {
idToEvent.put(((UniqueTableKeyFormatter) uniqueKeyFormatter).formatRow(rowData.getRow(),
rowData.getTable()), rowData);
} else {
idToEvent.put(uniqueKeyFormatter.formatRow(rowData.getRow()), rowData);
}
}
return idToEvent;
}
}
static class ColumnBasedIndexer extends Indexer {
public ColumnBasedIndexer(String indexerName, IndexerConf conf, String tableName, ResultToSolrMapper mapper,
Sharder sharder, SolrInputDocumentWriter solrWriter) {
super(indexerName, conf, tableName, mapper, sharder, solrWriter);
}
@Override
protected void calculateIndexUpdates(List<RowData> rowDataList, SolrUpdateCollector updateCollector) throws IOException {
Map<String, Object> idToKeyValue = calculateUniqueEvents(rowDataList);
for (Entry<String, Object> idToKvEntry : idToKeyValue.entrySet()) {
String documentId = idToKvEntry.getKey();
Object keyValue = idToKvEntry.getValue();
if (isDelete(keyValue)) {
handleDelete(documentId, keyValue, updateCollector, uniqueKeyFormatter);
} else {
Result result = newResultFromObject(Collections.singletonList(keyValue));
SolrUpdateWriter updateWriter = new RowAndFamilyAddingSolrUpdateWriter(
conf.getRowField(),
conf.getColumnFamilyField(),
uniqueKeyFormatter,
castToCellOrKey(keyValue),
new IdAddingSolrUpdateWriter(
conf.getUniqueKeyField(),
documentId,
conf.getTableNameField(),
tableName,
updateCollector));
mapper.map(result, updateWriter);
}
}
}
private void handleDelete(String documentId, Object deleteKeyValue, SolrUpdateCollector updateCollector,
UniqueKeyFormatter uniqueKeyFormatter) {
byte deleteType = getTypeByte(deleteKeyValue);
if (deleteType == KeyValue.Type.DeleteColumn.getCode()) {
updateCollector.deleteById(documentId);
} else if (deleteType == KeyValue.Type.DeleteFamily.getCode()) {
if (uniqueKeyFormatter instanceof UniqueTableKeyFormatter) {
deleteFamily(deleteKeyValue, updateCollector, uniqueKeyFormatter,
((UniqueTableKeyFormatter) uniqueKeyFormatter).unformatTable(documentId));
} else {
deleteFamily(deleteKeyValue, updateCollector, uniqueKeyFormatter, null);
}
} else if (deleteType == KeyValue.Type.Delete.getCode()) {
if (uniqueKeyFormatter instanceof UniqueTableKeyFormatter) {
deleteRow(deleteKeyValue, updateCollector, uniqueKeyFormatter,
((UniqueTableKeyFormatter) uniqueKeyFormatter).unformatTable(documentId));
} else {
deleteRow(deleteKeyValue, updateCollector, uniqueKeyFormatter, null);
}
} else {
log.error(String.format("Unknown delete type %d for document %s, not doing anything", deleteType, documentId));
}
}
/**
* Delete all values for a single column family from Solr.
*/
private void deleteFamily(Object deleteKeyValue, SolrUpdateCollector updateCollector,
UniqueKeyFormatter uniqueKeyFormatter, byte[] tableName) {
String rowField = conf.getRowField();
String cfField = conf.getColumnFamilyField();
String rowValue;
String familyValue;
if (uniqueKeyFormatter instanceof UniqueTableKeyFormatter) {
UniqueTableKeyFormatter uniqueTableKeyFormatter = (UniqueTableKeyFormatter) uniqueKeyFormatter;
rowValue = uniqueTableKeyFormatter.formatRow(cloneRow(deleteKeyValue), tableName);
familyValue = uniqueTableKeyFormatter.formatFamily(cloneFamily(deleteKeyValue), tableName);
} else {
rowValue = uniqueKeyFormatter.formatRow(cloneRow(deleteKeyValue));
familyValue = uniqueKeyFormatter.formatFamily(cloneFamily(deleteKeyValue));
}
if (rowField != null && cfField != null) {
updateCollector.deleteByQuery(String.format("(%s:%s)AND(%s:%s)", rowField, rowValue, cfField, familyValue));
} else {
log.warn(String.format(
"Can't delete row %s and family %s from Solr because row and/or family fields not included in the indexer configuration",
rowValue, familyValue));
}
}
/**
* Delete all values for a single row from Solr.
*/
private void deleteRow(Object deleteKeyValue, SolrUpdateCollector updateCollector,
UniqueKeyFormatter uniqueKeyFormatter, byte[] tableName) {
String rowField = conf.getRowField();
String rowValue = uniqueKeyFormatter.formatRow(cloneRow(deleteKeyValue));
if (rowField != null) {
updateCollector.deleteByQuery(String.format("%s:%s", rowField, rowValue));
} else {
log.warn(String.format(
"Can't delete row %s from Solr because row field not included in indexer configuration",
rowValue));
}
}
/**
* Calculate a map of Solr document ids to KeyValue, only taking the most recent event for each document id.
*/
private Map<String, Object> calculateUniqueEvents(List<RowData> rowDataList) {
Map<String, Object> idToKeyValue = Maps.newHashMap();
for (RowData rowData : rowDataList) {
for (Object kv : rowData.getKeyValues()) {
if (mapper.isRelevantKV(kv)) {
String id;
if (uniqueKeyFormatter instanceof UniqueTableKeyFormatter) {
id = ((UniqueTableKeyFormatter) uniqueKeyFormatter).formatKeyValue(kv, rowData.getTable());
} else {
id = uniqueKeyFormatter.formatKeyValue(kv);
}
idToKeyValue.put(id, kv);
}
}
}
return idToKeyValue;
}
}
}
| |
/* Generated file, do not modify. See jython/src/templates/gderived.py. */
package org.python.antlr.ast;
import java.io.Serializable;
import org.python.core.*;
import org.python.core.finalization.FinalizeTrigger;
import org.python.core.finalization.FinalizablePyObjectDerived;
public class WhileDerived extends While implements Slotted,FinalizablePyObjectDerived,TraverseprocDerived {
public PyObject getSlot(int index) {
return slots[index];
}
public void setSlot(int index,PyObject value) {
slots[index]=value;
}
private PyObject[]slots;
public void __del_derived__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__del__");
if (impl!=null) {
impl.__get__(this,self_type).__call__();
}
}
public void __ensure_finalizer__() {
FinalizeTrigger.ensureFinalizer(this);
}
/* TraverseprocDerived implementation */
public int traverseDerived(Visitproc visit,Object arg) {
int retVal;
for(int i=0;i<slots.length;++i) {
if (slots[i]!=null) {
retVal=visit.visit(slots[i],arg);
if (retVal!=0) {
return retVal;
}
}
}
retVal=visit.visit(objtype,arg);
return retVal!=0?retVal:traverseDictIfAny(visit,arg);
}
/* end of TraverseprocDerived implementation */
private PyObject dict;
public PyObject fastGetDict() {
return dict;
}
public PyObject getDict() {
return dict;
}
public void setDict(PyObject newDict) {
if (newDict instanceof PyStringMap||newDict instanceof PyDictionary) {
dict=newDict;
if (dict.__finditem__(PyString.fromInterned("__del__"))!=null&&!JyAttribute.hasAttr(this,JyAttribute.FINALIZE_TRIGGER_ATTR)) {
FinalizeTrigger.ensureFinalizer(this);
}
} else {
throw Py.TypeError("__dict__ must be set to a Dictionary "+newDict.getClass().getName());
}
}
public void delDict() {
// deleting an object's instance dict makes it grow a new one
dict=new PyStringMap();
}
public WhileDerived(PyType subtype) {
super(subtype);
slots=new PyObject[subtype.getNumSlots()];
dict=subtype.instDict();
if (subtype.needsFinalizer()) {
FinalizeTrigger.ensureFinalizer(this);
}
}
public int traverseDictIfAny(Visitproc visit,Object arg) {
return visit.visit(dict,arg);
}
public PyString __str__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__str__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__();
if (res instanceof PyString)
return(PyString)res;
throw Py.TypeError("__str__"+" returned non-"+"string"+" (type "+res.getType().fastGetName()+")");
}
return super.__str__();
}
public PyString __repr__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__repr__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__();
if (res instanceof PyString)
return(PyString)res;
throw Py.TypeError("__repr__"+" returned non-"+"string"+" (type "+res.getType().fastGetName()+")");
}
return super.__repr__();
}
public PyString __hex__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__hex__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__();
if (res instanceof PyString)
return(PyString)res;
throw Py.TypeError("__hex__"+" returned non-"+"string"+" (type "+res.getType().fastGetName()+")");
}
return super.__hex__();
}
public PyString __oct__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__oct__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__();
if (res instanceof PyString)
return(PyString)res;
throw Py.TypeError("__oct__"+" returned non-"+"string"+" (type "+res.getType().fastGetName()+")");
}
return super.__oct__();
}
public PyFloat __float__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__float__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__();
if (res instanceof PyFloat)
return(PyFloat)res;
throw Py.TypeError("__float__"+" returned non-"+"float"+" (type "+res.getType().fastGetName()+")");
}
return super.__float__();
}
public PyComplex __complex__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__complex__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__();
if (res instanceof PyComplex)
return(PyComplex)res;
throw Py.TypeError("__complex__"+" returned non-"+"complex"+" (type "+res.getType().fastGetName()+")");
}
return super.__complex__();
}
public PyObject __pos__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__pos__");
if (impl!=null)
return impl.__get__(this,self_type).__call__();
return super.__pos__();
}
public PyObject __neg__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__neg__");
if (impl!=null)
return impl.__get__(this,self_type).__call__();
return super.__neg__();
}
public PyObject __abs__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__abs__");
if (impl!=null)
return impl.__get__(this,self_type).__call__();
return super.__abs__();
}
public PyObject __invert__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__invert__");
if (impl!=null)
return impl.__get__(this,self_type).__call__();
return super.__invert__();
}
public PyObject __reduce__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__reduce__");
if (impl!=null)
return impl.__get__(this,self_type).__call__();
return super.__reduce__();
}
public PyObject __dir__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__dir__");
if (impl!=null)
return impl.__get__(this,self_type).__call__();
return super.__dir__();
}
public PyObject __add__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__add__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__add__(other);
}
public PyObject __radd__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__radd__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__radd__(other);
}
public PyObject __sub__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__sub__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__sub__(other);
}
public PyObject __rsub__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__rsub__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__rsub__(other);
}
public PyObject __mul__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__mul__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__mul__(other);
}
public PyObject __rmul__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__rmul__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__rmul__(other);
}
public PyObject __div__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__div__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__div__(other);
}
public PyObject __rdiv__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__rdiv__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__rdiv__(other);
}
public PyObject __floordiv__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__floordiv__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__floordiv__(other);
}
public PyObject __rfloordiv__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__rfloordiv__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__rfloordiv__(other);
}
public PyObject __truediv__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__truediv__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__truediv__(other);
}
public PyObject __rtruediv__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__rtruediv__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__rtruediv__(other);
}
public PyObject __mod__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__mod__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__mod__(other);
}
public PyObject __rmod__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__rmod__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__rmod__(other);
}
public PyObject __divmod__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__divmod__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__divmod__(other);
}
public PyObject __rdivmod__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__rdivmod__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__rdivmod__(other);
}
public PyObject __rpow__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__rpow__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__rpow__(other);
}
public PyObject __lshift__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__lshift__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__lshift__(other);
}
public PyObject __rlshift__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__rlshift__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__rlshift__(other);
}
public PyObject __rshift__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__rshift__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__rshift__(other);
}
public PyObject __rrshift__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__rrshift__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__rrshift__(other);
}
public PyObject __and__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__and__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__and__(other);
}
public PyObject __rand__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__rand__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__rand__(other);
}
public PyObject __or__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__or__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__or__(other);
}
public PyObject __ror__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__ror__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__ror__(other);
}
public PyObject __xor__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__xor__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__xor__(other);
}
public PyObject __rxor__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__rxor__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__rxor__(other);
}
public PyObject __lt__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__lt__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__lt__(other);
}
public PyObject __le__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__le__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__le__(other);
}
public PyObject __gt__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__gt__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__gt__(other);
}
public PyObject __ge__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__ge__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__ge__(other);
}
public PyObject __eq__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__eq__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__eq__(other);
}
public PyObject __ne__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__ne__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__ne__(other);
}
public PyObject __format__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__format__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__format__(other);
}
public PyObject __iadd__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__iadd__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__iadd__(other);
}
public PyObject __isub__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__isub__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__isub__(other);
}
public PyObject __imul__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__imul__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__imul__(other);
}
public PyObject __idiv__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__idiv__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__idiv__(other);
}
public PyObject __ifloordiv__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__ifloordiv__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__ifloordiv__(other);
}
public PyObject __itruediv__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__itruediv__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__itruediv__(other);
}
public PyObject __imod__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__imod__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__imod__(other);
}
public PyObject __ipow__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__ipow__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__ipow__(other);
}
public PyObject __ilshift__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__ilshift__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__ilshift__(other);
}
public PyObject __irshift__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__irshift__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__irshift__(other);
}
public PyObject __iand__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__iand__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__iand__(other);
}
public PyObject __ior__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__ior__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__ior__(other);
}
public PyObject __ixor__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__ixor__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__ixor__(other);
}
public PyObject __int__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__int__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__();
if (res instanceof PyInteger||res instanceof PyLong)
return res;
throw Py.TypeError("__int__"+" should return an integer");
}
return super.__int__();
}
public PyObject __long__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__long__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__();
if (res instanceof PyLong||res instanceof PyInteger)
return res;
throw Py.TypeError("__long__"+" returned non-"+"long"+" (type "+res.getType().fastGetName()+")");
}
return super.__long__();
}
public int hashCode() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__hash__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__();
if (res instanceof PyInteger) {
return((PyInteger)res).getValue();
} else
if (res instanceof PyLong) {
return((PyLong)res).getValue().intValue();
}
throw Py.TypeError("__hash__ should return a int");
}
if (self_type.lookup("__eq__")!=null||self_type.lookup("__cmp__")!=null) {
throw Py.TypeError(String.format("unhashable type: '%.200s'",getType().fastGetName()));
}
return super.hashCode();
}
public PyUnicode __unicode__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__unicode__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__();
if (res instanceof PyUnicode)
return(PyUnicode)res;
if (res instanceof PyString)
return new PyUnicode((PyString)res);
throw Py.TypeError("__unicode__"+" should return a "+"unicode");
}
return super.__unicode__();
}
public int __cmp__(PyObject other) {
PyType self_type=getType();
PyObject[]where_type=new PyObject[1];
PyObject impl=self_type.lookup_where("__cmp__",where_type);
// Full Compatibility with CPython __cmp__:
// If the derived type don't override __cmp__, the
// *internal* super().__cmp__ should be called, not the
// exposed one. The difference is that the exposed __cmp__
// throws a TypeError if the argument is an instance of the same type.
if (impl==null||where_type[0]==TYPE||Py.isSubClass(TYPE,where_type[0])) {
return super.__cmp__(other);
}
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented) {
return-2;
}
int c=res.asInt();
return c<0?-1:c>0?1:0;
}
public boolean __nonzero__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__nonzero__");
if (impl==null) {
impl=self_type.lookup("__len__");
if (impl==null)
return super.__nonzero__();
}
PyObject o=impl.__get__(this,self_type).__call__();
Class c=o.getClass();
if (c!=PyInteger.class&&c!=PyBoolean.class) {
throw Py.TypeError(String.format("__nonzero__ should return bool or int, returned %s",self_type.getName()));
}
return o.__nonzero__();
}
public boolean __contains__(PyObject o) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__contains__");
if (impl==null)
return super.__contains__(o);
return impl.__get__(this,self_type).__call__(o).__nonzero__();
}
public int __len__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__len__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__();
return res.asInt();
}
return super.__len__();
}
public PyObject __iter__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__iter__");
if (impl!=null)
return impl.__get__(this,self_type).__call__();
impl=self_type.lookup("__getitem__");
if (impl==null)
return super.__iter__();
return new PySequenceIter(this);
}
public PyObject __iternext__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("next");
if (impl!=null) {
try {
return impl.__get__(this,self_type).__call__();
} catch (PyException exc) {
if (exc.match(Py.StopIteration))
return null;
throw exc;
}
}
return super.__iternext__(); // ???
}
public PyObject __finditem__(PyObject key) { // ???
PyType self_type=getType();
PyObject impl=self_type.lookup("__getitem__");
if (impl!=null)
try {
return impl.__get__(this,self_type).__call__(key);
} catch (PyException exc) {
if (exc.match(Py.LookupError))
return null;
throw exc;
}
return super.__finditem__(key);
}
public PyObject __finditem__(int key) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__getitem__");
if (impl!=null)
try {
return impl.__get__(this,self_type).__call__(new PyInteger(key));
} catch (PyException exc) {
if (exc.match(Py.LookupError))
return null;
throw exc;
}
return super.__finditem__(key);
}
public PyObject __getitem__(PyObject key) {
// Same as __finditem__, without swallowing LookupErrors. This allows
// __getitem__ implementations written in Python to raise custom
// exceptions (such as subclasses of KeyError).
//
// We are forced to duplicate the code, instead of defining __finditem__
// in terms of __getitem__. That's because PyObject defines __getitem__
// in terms of __finditem__. Therefore, we would end with an infinite
// loop when self_type.lookup("__getitem__") returns null:
//
// __getitem__ -> super.__getitem__ -> __finditem__ -> __getitem__
//
// By duplicating the (short) lookup and call code, we are safe, because
// the call chains will be:
//
// __finditem__ -> super.__finditem__
//
// __getitem__ -> super.__getitem__ -> __finditem__ -> super.__finditem__
PyType self_type=getType();
PyObject impl=self_type.lookup("__getitem__");
if (impl!=null)
return impl.__get__(this,self_type).__call__(key);
return super.__getitem__(key);
}
public void __setitem__(PyObject key,PyObject value) { // ???
PyType self_type=getType();
PyObject impl=self_type.lookup("__setitem__");
if (impl!=null) {
impl.__get__(this,self_type).__call__(key,value);
return;
}
super.__setitem__(key,value);
}
public PyObject __getslice__(PyObject start,PyObject stop,PyObject step) { // ???
if (step!=null) {
return __getitem__(new PySlice(start,stop,step));
}
PyType self_type=getType();
PyObject impl=self_type.lookup("__getslice__");
if (impl!=null) {
PyObject[]indices=PySlice.indices2(this,start,stop);
return impl.__get__(this,self_type).__call__(indices[0],indices[1]);
}
return super.__getslice__(start,stop,step);
}
public void __setslice__(PyObject start,PyObject stop,PyObject step,PyObject value) {
if (step!=null) {
__setitem__(new PySlice(start,stop,step),value);
return;
}
PyType self_type=getType();
PyObject impl=self_type.lookup("__setslice__");
if (impl!=null) {
PyObject[]indices=PySlice.indices2(this,start,stop);
impl.__get__(this,self_type).__call__(indices[0],indices[1],value);
return;
}
super.__setslice__(start,stop,step,value);
}
public void __delslice__(PyObject start,PyObject stop,PyObject step) {
if (step!=null) {
__delitem__(new PySlice(start,stop,step));
return;
}
PyType self_type=getType();
PyObject impl=self_type.lookup("__delslice__");
if (impl!=null) {
PyObject[]indices=PySlice.indices2(this,start,stop);
impl.__get__(this,self_type).__call__(indices[0],indices[1]);
return;
}
super.__delslice__(start,stop,step);
}
public void __delitem__(PyObject key) { // ???
PyType self_type=getType();
PyObject impl=self_type.lookup("__delitem__");
if (impl!=null) {
impl.__get__(this,self_type).__call__(key);
return;
}
super.__delitem__(key);
}
public PyObject __call__(PyObject args[],String keywords[]) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__call__");
if (impl!=null) {
return impl.__get__(this,self_type).__call__(args,keywords);
}
return super.__call__(args,keywords);
}
public PyObject __findattr_ex__(String name) {
return Deriveds.__findattr_ex__(this,name);
}
public void __setattr__(String name,PyObject value) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__setattr__");
if (impl!=null) {
impl.__get__(this,self_type).__call__(PyString.fromInterned(name),value);
//CPython does not support instance-acquired finalizers.
//So we don't check for __del__ here.
return;
}
super.__setattr__(name,value);
}
public void __delattr__(String name) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__delattr__");
if (impl!=null) {
impl.__get__(this,self_type).__call__(PyString.fromInterned(name));
return;
}
super.__delattr__(name);
}
public PyObject __get__(PyObject obj,PyObject type) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__get__");
if (impl!=null) {
if (obj==null)
obj=Py.None;
if (type==null)
type=Py.None;
return impl.__get__(this,self_type).__call__(obj,type);
}
return super.__get__(obj,type);
}
public void __set__(PyObject obj,PyObject value) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__set__");
if (impl!=null) {
impl.__get__(this,self_type).__call__(obj,value);
return;
}
super.__set__(obj,value);
}
public void __delete__(PyObject obj) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__delete__");
if (impl!=null) {
impl.__get__(this,self_type).__call__(obj);
return;
}
super.__delete__(obj);
}
public PyObject __pow__(PyObject other,PyObject modulo) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__pow__");
if (impl!=null) {
PyObject res;
if (modulo==null) {
res=impl.__get__(this,self_type).__call__(other);
} else {
res=impl.__get__(this,self_type).__call__(other,modulo);
}
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__pow__(other,modulo);
}
public void dispatch__init__(PyObject[]args,String[]keywords) {
Deriveds.dispatch__init__(this,args,keywords);
}
public PyObject __index__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__index__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__();
if (res instanceof PyInteger||res instanceof PyLong) {
return res;
}
throw Py.TypeError(String.format("__index__ returned non-(int,long) (type %s)",res.getType().fastGetName()));
}
return super.__index__();
}
public Object __tojava__(Class c) {
// If we are not being asked by the "default" conversion to java, then
// we can provide this as the result, as long as it is a instance of the
// specified class. Without this, derived.__tojava__(PyObject.class)
// would broke. (And that's not pure speculation: PyReflectedFunction's
// ReflectedArgs asks for things like that).
if ((c!=Object.class)&&(c!=Serializable.class)&&(c.isInstance(this))) {
return this;
}
// Otherwise, we call the derived __tojava__, if it exists:
PyType self_type=getType();
PyObject impl=self_type.lookup("__tojava__");
if (impl!=null) {
PyObject delegate=impl.__get__(this,self_type).__call__(Py.java2py(c));
if (delegate!=this)
return delegate.__tojava__(Object.class);
}
return super.__tojava__(c);
}
public Object __coerce_ex__(PyObject o) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__coerce__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(o);
if (res==Py.NotImplemented)
return Py.None;
if (!(res instanceof PyTuple))
throw Py.TypeError("__coerce__ didn't return a 2-tuple");
return((PyTuple)res).getArray();
}
return super.__coerce_ex__(o);
}
public String toString() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__repr__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__();
if (!(res instanceof PyString))
throw Py.TypeError("__repr__ returned non-string (type "+res.getType().fastGetName()+")");
return((PyString)res).toString();
}
return super.toString();
}
}
| |
package org.opencds.cqf.ruler.cpg.r4.provider;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.commons.lang3.NotImplementedException;
import org.apache.commons.lang3.tuple.Pair;
import org.cqframework.cql.elm.execution.VersionedIdentifier;
import org.hl7.fhir.r4.model.BooleanType;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.CanonicalType;
import org.hl7.fhir.r4.model.DataRequirement;
import org.hl7.fhir.r4.model.Endpoint;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.Library;
import org.hl7.fhir.r4.model.Parameters;
import org.hl7.fhir.r4.model.Parameters.ParametersParameterComponent;
import org.hl7.fhir.r4.model.PrimitiveType;
import org.hl7.fhir.r4.model.Resource;
import org.hl7.fhir.r4.model.StringType;
import org.hl7.fhir.r4.model.Type;
import org.opencds.cqf.cql.engine.data.CompositeDataProvider;
import org.opencds.cqf.cql.engine.data.DataProvider;
import org.opencds.cqf.cql.engine.debug.DebugMap;
import org.opencds.cqf.cql.engine.execution.CqlEngine;
import org.opencds.cqf.cql.engine.execution.EvaluationResult;
import org.opencds.cqf.cql.engine.execution.LibraryLoader;
import org.opencds.cqf.cql.engine.fhir.retrieve.RestFhirRetrieveProvider;
import org.opencds.cqf.cql.engine.fhir.searchparam.SearchParameterResolver;
import org.opencds.cqf.cql.engine.fhir.terminology.R4FhirTerminologyProvider;
import org.opencds.cqf.cql.engine.model.ModelResolver;
import org.opencds.cqf.cql.engine.retrieve.RetrieveProvider;
import org.opencds.cqf.cql.engine.terminology.TerminologyProvider;
import org.opencds.cqf.cql.evaluator.builder.library.FhirRestLibraryContentProviderFactory;
import org.opencds.cqf.cql.evaluator.cql2elm.content.InMemoryLibraryContentProvider;
import org.opencds.cqf.cql.evaluator.cql2elm.content.LibraryContentProvider;
import org.opencds.cqf.cql.evaluator.engine.retrieve.BundleRetrieveProvider;
import org.opencds.cqf.cql.evaluator.engine.retrieve.PriorityRetrieveProvider;
import org.opencds.cqf.ruler.cpg.r4.util.FhirMeasureBundler;
import org.opencds.cqf.ruler.cql.CqlProperties;
import org.opencds.cqf.ruler.cql.JpaFhirDal;
import org.opencds.cqf.ruler.cql.JpaFhirDalFactory;
import org.opencds.cqf.ruler.cql.JpaFhirRetrieveProvider;
import org.opencds.cqf.ruler.cql.JpaLibraryContentProviderFactory;
import org.opencds.cqf.ruler.cql.JpaTerminologyProviderFactory;
import org.opencds.cqf.ruler.cql.LibraryLoaderFactory;
import org.opencds.cqf.ruler.provider.DaoRegistryOperationProvider;
import org.opencds.cqf.ruler.utility.Canonicals;
import org.opencds.cqf.ruler.utility.Clients;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import ca.uhn.fhir.model.api.annotation.Description;
import ca.uhn.fhir.rest.annotation.Operation;
import ca.uhn.fhir.rest.annotation.OperationParam;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.client.api.IGenericClient;
/**
* This class is used to provide an {@link DaoRegistryOperationProvider
* OperationProvider}
* implementation that supports cql expression evaluation
* Created by Bryn on 1/16/2017.
*/
public class CqlExecutionProvider extends DaoRegistryOperationProvider {
private static final Logger logger = LoggerFactory.getLogger(CqlExecutionProvider.class);
private FhirMeasureBundler bundler = new FhirMeasureBundler();
@Autowired
private LibraryLoaderFactory libraryLoaderFactory;
@Autowired
private JpaLibraryContentProviderFactory jpaLibraryContentProviderFactory;
@Autowired
private FhirRestLibraryContentProviderFactory fhirRestLibraryContentProviderFactory;
@Autowired
private JpaTerminologyProviderFactory jpaTerminologyProviderFactory;
@Autowired
private JpaFhirDalFactory jpaFhirDalFactory;
@Autowired
ModelResolver myModelResolver;
@Autowired
private CqlProperties myCqlProperties;
@Autowired
Map<VersionedIdentifier, org.cqframework.cql.elm.execution.Library> globalLibraryCache;
/**
* A library to be included. The library is resolved by url and made available
* by name within the expression to be evaluated.
*/
class LibraryParameter {
/**
* The {@link CanonicalType} canonical url (with optional version) of the
* library to be included
*/
CanonicalType url;
/**
* The name of the library to be used to reference the library within the CQL
* expression. If no name is provided, the name of the library will be used
*/
String name;
public LibraryParameter withUrl(CanonicalType url) {
this.url = url;
return this;
}
public LibraryParameter withName(String name) {
this.name = name;
return this;
}
}
/**
* Data to be made available to the library evaluation, organized as prefetch
* response bundles. Each prefetchData parameter specifies either the name of
* the prefetchKey it is satisfying, a DataRequirement describing the prefetch,
* or both.
*/
class PrefetchData {
/**
* The key of the prefetch item. This typically corresponds to the name of a
* parameter in a library, or the name of a prefetch item in a CDS Hooks
* discovery response
*/
String key;
/**
* A {@link DataRequirement} DataRequirement describing the content of the
* prefetch item.
*/
DataRequirement descriptor;
/**
* The prefetch data as a {@link Bundle} Bundle. If the prefetchData has no
* prefetchResult part, it indicates there is no data associated with this
* prefetch item.
*/
Bundle data;
public PrefetchData withKey(String key) {
this.key = key;
return this;
}
public PrefetchData withDescriptor(DataRequirement descriptor) {
this.descriptor = descriptor;
return this;
}
public PrefetchData withData(Bundle data) {
this.data = data;
return this;
}
}
/**
* Evaluates a CQL expression and returns the results as a Parameters resource.
*
* @param theRequestDetails the {@link RequestDetails RequestDetails}
* @param subject Subject for which the expression will be
* evaluated. This corresponds to the context in
* which the expression will be evaluated and is
* represented as a relative FHIR id (e.g.
* Patient/123), which establishes both the context
* and context value for the evaluation
* @param expression Expression to be evaluated. Note that this is an
* expression of CQL, not the text of a library with
* definition statements.
* @param parameters Any input parameters for the expression.
* {@link Parameters} Parameters defined in this
* input will be made available by name to the CQL
* expression. Parameter types are mapped to CQL as
* specified in the Using CQL section of the CPG
* Implementation guide. If a parameter appears more
* than once in the input Parameters resource, it is
* represented with a List in the input CQL. If a
* parameter has parts, it is represented as a Tuple
* in the input CQL.
* @param library A library to be included. The {@link Library}
* library is resolved by url and made available by
* name within the expression to be evaluated.
* @param useServerData Whether to use data from the server performing the
* evaluation. If this parameter is true (the
* default), then the operation will use data first
* from any bundles provided as parameters (through
* the data and prefetch parameters), second data
* from the server performing the operation, and
* third, data from the dataEndpoint parameter (if
* provided). If this parameter is false, the
* operation will use data first from the bundles
* provided in the data or prefetch parameters, and
* second from the dataEndpoint parameter (if
* provided).
* @param data Data to be made available to the library
* evaluation. This parameter is exclusive with the
* prefetchData parameter (i.e. either provide all
* data as a single bundle, or provide data using
* multiple bundles with prefetch descriptions).
* @param prefetchData ***Not Yet Implemented***
* @param dataEndpoint An {@link Endpoint} endpoint to use to access data
* referenced by retrieve operations in the library.
* If provided, this endpoint is used after the data
* or prefetchData bundles, and the server, if the
* useServerData parameter is true.
* @param contentEndpoint An {@link Endpoint} endpoint to use to access
* content (i.e. libraries) referenced by the
* library. If no content endpoint is supplied, the
* evaluation will attempt to retrieve content from
* the server on which the operation is being
* performed.
* @param terminologyEndpoint An {@link Endpoint} endpoint to use to access
* terminology (i.e. valuesets, codesystems, and
* membership testing) referenced by the library. If
* no terminology endpoint is supplied, the
* evaluation will attempt to use the server on which
* the operation is being performed as the
* terminology server.
* @return The result of evaluating the given expression, returned as a FHIR
* type, either a {@link Resource} resource, or a FHIR-defined type
* corresponding to the CQL return type, as defined in the Using CQL
* section of the CPG Implementation guide. If the result is a List of
* resources, the result will be a {@link Bundle} Bundle . If the result
* is a CQL system-defined or FHIR-defined type, the result is returned
* as a {@link Parameters} Parameters resource
*/
@Operation(name = "$cql")
@Description(shortDefinition = "$cql", value = "Evaluates a CQL expression and returns the results as a Parameters resource. Defined: http://build.fhir.org/ig/HL7/cqf-recommendations/OperationDefinition-cpg-cql.html", example = "$cql?expression=5*5")
public Parameters evaluate(RequestDetails theRequestDetails,
@OperationParam(name = "subject", max = 1) String subject,
@OperationParam(name = "expression", min = 1, max = 1) String expression,
@OperationParam(name = "parameters", max = 1) Parameters parameters,
@OperationParam(name = "library") List<Parameters> library,
@OperationParam(name = "useServerData", max = 1) BooleanType useServerData,
@OperationParam(name = "data", max = 1) Bundle data,
@OperationParam(name = "prefetchData") List<Parameters> prefetchData,
@OperationParam(name = "dataEndpoint", max = 1) Endpoint dataEndpoint,
@OperationParam(name = "contentEndpoint", max = 1) Endpoint contentEndpoint,
@OperationParam(name = "terminologyEndpoint", max = 1) Endpoint terminologyEndpoint) {
if (prefetchData != null) {
throw new NotImplementedException("prefetchData is not yet supported.");
}
if (useServerData == null) {
useServerData = new BooleanType(true);
}
List<LibraryParameter> libraryParameters = new ArrayList<>();
if (library != null) {
for (Parameters libraryParameter : library) {
CanonicalType url = null;
String name = null;
for (ParametersParameterComponent param : libraryParameter.getParameter()) {
switch (param.getName()) {
case "url":
url = ((CanonicalType) param.getValue());
break;
case "name":
name = ((StringType) param.getValue()).asStringValue();
break;
default:
throw new IllegalArgumentException("Only url and name parts are allowed for Parameter: library");
}
}
if (url == null) {
throw new IllegalArgumentException("If library parameter must provide a url parameter part.");
}
libraryParameters.add(new LibraryParameter().withUrl(url).withName(name));
} // Remove LocalLibrary from cache first...
}
VersionedIdentifier localLibraryIdentifier = new VersionedIdentifier().withId("LocalLibrary")
.withVersion("1.0.0");
globalLibraryCache.remove(localLibraryIdentifier);
CqlEngine engine = setupEngine(localLibraryIdentifier, expression, libraryParameters, subject, parameters,
contentEndpoint,
dataEndpoint, terminologyEndpoint, data, useServerData.booleanValue(), theRequestDetails);
Map<String, Object> resolvedParameters = new HashMap<>();
if (parameters != null) {
for (Parameters.ParametersParameterComponent pc : parameters.getParameter()) {
resolvedParameters.put(pc.getName(), pc.getValue());
}
}
String contextType = subject != null ? subject.substring(0, subject.lastIndexOf("/") - 1) : null;
String subjectId = subject != null ? subject.substring(0, subject.lastIndexOf("/") - 1) : null;
EvaluationResult evalResult = engine.evaluate(localLibraryIdentifier, null,
Pair.of(contextType != null ? contextType : "Unspecified", subjectId == null ? "null" : subject),
resolvedParameters, this.getDebugMap());
if (evalResult != null && evalResult.expressionResults != null) {
if (evalResult.expressionResults.size() > 1) {
logger.debug("Evaluation resulted in more than one expression result. ");
}
Parameters result = new Parameters();
resolveResult(theRequestDetails, evalResult, result);
return result;
}
return null;
}
private CqlEngine setupEngine(VersionedIdentifier localLibraryIdentifier, String expression,
List<LibraryParameter> library, String subject,
Parameters parameters, Endpoint contentEndpoint, Endpoint dataEndpoint, Endpoint terminologyEndpoint,
Bundle data, boolean useServerData,
RequestDetails theRequestDetails) {
JpaFhirDal jpaFhirDal = jpaFhirDalFactory.create(theRequestDetails);
// temporary LibraryLoader to resolve library dependencies when building
// includes
List<LibraryContentProvider> libraryProviders = new ArrayList<>();
libraryProviders.add(jpaLibraryContentProviderFactory.create(theRequestDetails));
if (contentEndpoint != null) {
libraryProviders.add(fhirRestLibraryContentProviderFactory.create(contentEndpoint.getAddress(), contentEndpoint
.getHeader().stream().map(PrimitiveType::asStringValue).collect(Collectors.toList())));
}
LibraryLoader tempLibraryLoader = libraryLoaderFactory.create(
new ArrayList<>(libraryProviders));
String cql = buildCqlLibrary(library, jpaFhirDal, tempLibraryLoader, expression, parameters, theRequestDetails);
libraryProviders.add(new InMemoryLibraryContentProvider(Arrays.asList(cql)));
LibraryLoader libraryLoader = libraryLoaderFactory.create(
new ArrayList<>(libraryProviders));
return setupEngine(subject, parameters, dataEndpoint, terminologyEndpoint, data, useServerData, libraryLoader,
localLibraryIdentifier, theRequestDetails);
}
private CqlEngine setupEngine(String subject, Parameters parameters, Endpoint dataEndpoint,
Endpoint terminologyEndpoint, Bundle data, boolean useServerData, LibraryLoader libraryLoader,
VersionedIdentifier libraryIdentifier, RequestDetails theRequestDetails) {
TerminologyProvider terminologyProvider;
if (terminologyEndpoint != null) {
IGenericClient client = Clients.forEndpoint(getFhirContext(), terminologyEndpoint);
terminologyProvider = new R4FhirTerminologyProvider(client);
} else {
terminologyProvider = jpaTerminologyProviderFactory.create(theRequestDetails);
}
DataProvider dataProvider;
List<RetrieveProvider> retrieveProviderList = new ArrayList<>();
if (useServerData) {
JpaFhirRetrieveProvider jpaRetriever = new JpaFhirRetrieveProvider(getDaoRegistry(),
new SearchParameterResolver(getFhirContext()));
jpaRetriever.setTerminologyProvider(terminologyProvider);
// Assume it's a different server, therefore need to expand.
if (terminologyEndpoint != null) {
jpaRetriever.setExpandValueSets(true);
}
retrieveProviderList.add(jpaRetriever);
}
if (dataEndpoint != null) {
IGenericClient client = Clients.forEndpoint(dataEndpoint);
RestFhirRetrieveProvider restRetriever = new RestFhirRetrieveProvider(
new SearchParameterResolver(getFhirContext()),
client);
restRetriever.setTerminologyProvider(terminologyProvider);
if (terminologyEndpoint == null || (terminologyEndpoint != null
&& !terminologyEndpoint.getAddress().equals(dataEndpoint.getAddress()))) {
restRetriever.setExpandValueSets(true);
}
retrieveProviderList.add(restRetriever);
}
if (data != null) {
BundleRetrieveProvider bundleRetriever = new BundleRetrieveProvider(getFhirContext(), data);
bundleRetriever.setTerminologyProvider(terminologyProvider);
retrieveProviderList.add(bundleRetriever);
}
PriorityRetrieveProvider priorityProvider = new PriorityRetrieveProvider(retrieveProviderList);
dataProvider = new CompositeDataProvider(myModelResolver, priorityProvider);
return new CqlEngine(libraryLoader, Collections.singletonMap("http://hl7.org/fhir", dataProvider),
terminologyProvider);
}
private String buildCqlLibrary(List<LibraryParameter> library, JpaFhirDal jpaFhirDal, LibraryLoader libraryLoader,
String expression,
Parameters parameters,
RequestDetails theRequestDetails) {
String cql = null;
logger.debug("Constructing expression for local evaluation");
StringBuilder sb = new StringBuilder();
constructHeader(sb);
constructUsings(sb);
constructIncludes(sb, jpaFhirDal, library, libraryLoader, theRequestDetails);
constructParameters(sb, parameters);
constructExpression(sb, expression);
cql = sb.toString();
logger.debug(cql);
return cql;
}
private void constructHeader(StringBuilder sb) {
sb.append("library LocalLibrary version \'1.0.0\'\n\n");
}
private void constructUsings(StringBuilder sb) {
sb.append(String.format("using FHIR version \'%s\'\n\n",
getFhirVersion()));
}
private String getFhirVersion() {
return this.getFhirContext().getVersion().getVersion().getFhirVersionString();
}
private void constructParameters(StringBuilder sb, Parameters parameters) {
if (parameters == null) {
return;
}
for (ParametersParameterComponent param : parameters.getParameter()) {
sb.append("parameter \"" + param.getName() + "\" " + param.getValue().fhirType() + "\n");
}
}
private void constructIncludes(StringBuilder sb, JpaFhirDal jpaFhirDal, List<LibraryParameter> library,
LibraryLoader libraryLoader,
RequestDetails requestDetails) {
StringBuilder builder = new StringBuilder();
builder.append("include FHIRHelpers version " + "\'" + getFhirVersion() + "\'" + "\n");
for (LibraryParameter libraryParameter : library) {
builder.append("include ");
String libraryName = resolveLibraryName(requestDetails, jpaFhirDal, libraryParameter, libraryLoader);
builder.append(libraryName);
if (Canonicals.getVersion(libraryParameter.url) != null) {
builder.append(" version '");
builder.append(Canonicals.getVersion(libraryParameter.url));
builder.append("'");
}
builder.append(" called ");
builder.append(libraryName + "\n");
}
sb.append(builder.toString());
}
private void constructExpression(StringBuilder sb, String expression) {
sb.append(String.format("\ndefine \"return\":\n %s", expression));
}
private String resolveLibraryName(RequestDetails requestDetails, JpaFhirDal jpaFhirDal,
LibraryParameter libraryParameter,
LibraryLoader libraryLoader) {
String libraryName;
if (libraryParameter.name == null) {
VersionedIdentifier libraryIdentifier = new VersionedIdentifier()
.withId(Canonicals.getIdPart(libraryParameter.url));
String version = Canonicals.getVersion(libraryParameter.url);
if (version != null) {
libraryIdentifier.setVersion(version);
}
org.cqframework.cql.elm.execution.Library executionLibrary = null;
try {
executionLibrary = libraryLoader.load(libraryIdentifier);
} catch (Exception e) {
logger.debug("Unable to load executable library {}", libraryParameter.name);
}
if (executionLibrary != null) {
libraryName = executionLibrary.getIdentifier().getId();
} else {
Library library = (Library) jpaFhirDal.read(new IdType("Library", libraryIdentifier.getId()));
libraryName = library.getName();
}
} else {
libraryName = libraryParameter.name;
}
return libraryName;
}
@SuppressWarnings("unchecked")
private void resolveResult(RequestDetails theRequestDetails, EvaluationResult evalResult, Parameters result) {
try {
Object res = evalResult.forExpression("return");
// String location = String.format("[%d:%d]",
// locations.get(def.getName()).get(0),
// locations.get(def.getName()).get(1));
// result.addParameter().setName("location").setValue(new StringType(location));
// Object res = def instanceof org.cqframework.cql.elm.execution.FunctionDef
// ? "Definition successfully validated"
// : def.getExpression().evaluate(context);
if (res == null) {
result.addParameter().setName("value").setValue(new StringType("null"));
} else if (res instanceof List<?>) {
if (!((List<?>) res).isEmpty() && ((List<?>) res).get(0) instanceof Resource) {
result.addParameter().setName("value")
.setResource(bundler.bundle((Iterable<Resource>) res, theRequestDetails.getFhirServerBase()));
} else {
result.addParameter().setName("value").setValue(new StringType(res.toString()));
}
} else if (res instanceof Iterable) {
result.addParameter().setName("value")
.setResource(bundler.bundle((Iterable<Resource>) res, theRequestDetails.getFhirServerBase()));
} else if (res instanceof Resource) {
result.addParameter().setName("value").setResource((Resource) res);
} else if (res instanceof Type) {
result.addParameter().setName("value").setValue((Type) res);
} else {
result.addParameter().setName("value").setValue(new StringType(res.toString()));
}
result.addParameter().setName("resultType").setValue(new StringType(resolveType(res)));
} catch (RuntimeException re) {
re.printStackTrace();
String message = re.getMessage() != null ? re.getMessage() : re.getClass().getName();
result.addParameter().setName("error").setValue(new StringType(message));
}
}
private String resolveType(Object result) {
String type = result == null ? "Null" : result.getClass().getSimpleName();
switch (type) {
case "BigDecimal":
return "Decimal";
case "ArrayList":
return "List";
case "FhirBundleCursor":
return "Retrieve";
default:
return type;
}
}
private DebugMap getDebugMap() {
DebugMap debugMap = new DebugMap();
if (myCqlProperties.getCql_logging_enabled()) {
debugMap.setIsLoggingEnabled(true);
}
return debugMap;
}
}
| |
/*******************************************************************************
* Copyright (c) 2010 Haifeng Li
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package smile.classification;
import java.util.Arrays;
import smile.math.Math;
import smile.math.matrix.EigenValueDecomposition;
import smile.projection.Projection;
/**
* Fisher's linear discriminant. Fisher defined the separation between two
* distributions to be the ratio of the variance between the classes to
* the variance within the classes, which is, in some sense, a measure
* of the signal-to-noise ratio for the class labeling. FLD finds a linear
* combination of features which maximizes the separation after the projection.
* The resulting combination may be used for dimensionality reduction
* before later classification.
* <p>
* The terms Fisher's linear discriminant and LDA are often used
* interchangeably, although FLD actually describes a slightly different
* discriminant, which does not make some of the assumptions of LDA such
* as normally distributed classes or equal class covariances.
* When the assumptions of LDA are satisfied, FLD is equivalent to LDA.
* <p>
* FLD is also closely related to principal component analysis (PCA), which also
* looks for linear combinations of variables which best explain the data.
* As a supervised method, FLD explicitly attempts to model the
* difference between the classes of data. On the other hand, PCA is a
* unsupervised method and does not take into account any difference in class.
* <p>
* One complication in applying FLD (and LDA) to real data
* occurs when the number of variables/features does not exceed
* the number of samples. In this case, the covariance estimates do not have
* full rank, and so cannot be inverted. This is known as small sample size
* problem.
*
* @see LDA
* @see smile.projection.PCA
*
* @author Haifeng Li
*/
public class FLD implements Classifier<double[]>, Projection<double[]> {
/**
* The dimensionality of data.
*/
private final int p;
/**
* The number of classes.
*/
private final int k;
/**
* Original common mean vector.
*/
private final double[] mean;
/**
* Original class mean vectors.
*/
private final double[][] mu;
/**
* Project matrix.
*/
private final double[][] scaling;
/**
* Projected common mean vector.
*/
private final double[] smean;
/**
* Projected class mean vectors.
*/
private final double[][] smu;
/**
* Trainer for Fisher's linear discriminant.
*/
public static class Trainer extends ClassifierTrainer<double[]> {
/**
* The dimensionality of mapped space.
*/
private int L = -1;
/**
* A tolerance to decide if a covariance matrix is singular. The trainer
* will reject variables whose variance is less than tol<sup>2</sup>.
*/
private double tol = 1E-4;
/**
* Constructor. The dimensionality of mapped space will be k - 1,
* where k is the number of classes of data. The default tolerance
* to covariance matrix singularity is 1E-4.
*/
public Trainer() {
}
/**
* Sets the dimensionality of mapped space.
*
* @param L the dimensionality of mapped space.
*/
public void setDimension(int L) {
if (L < 1) {
throw new IllegalArgumentException("Invalid mapping space dimension: " + L);
}
this.L = L;
}
/**
* Sets covariance matrix singular tolerance.
*
* @param tol a tolerance to decide if a covariance matrix is singular.
* The trainer will reject variables whose variance is less than tol<sup>2</sup>.
*/
public void setTolerance(double tol) {
if (tol < 0.0) {
throw new IllegalArgumentException("Invalid tol: " + tol);
}
this.tol = tol;
}
@Override
public FLD train(double[][] x, int[] y) {
return new FLD(x, y, L, tol);
}
}
/**
* Constructor. Learn Fisher's linear discriminant.
* @param x training instances.
* @param y training labels in [0, k), where k is the number of classes.
*/
public FLD(double[][] x, int[] y) {
this(x, y, -1);
}
/**
* Constructor. Learn Fisher's linear discriminant.
* @param x training instances.
* @param y training labels in [0, k), where k is the number of classes.
* @param L the dimensionality of mapped space.
*/
public FLD(double[][] x, int[] y, int L) {
this(x, y, L, 1E-4);
}
/**
* Constructor. Learn Fisher's linear discriminant.
* @param x training instances.
* @param y training labels in [0, k), where k is the number of classes.
* @param L the dimensionality of mapped space.
* @param tol a tolerance to decide if a covariance matrix is singular; it
* will reject variables whose variance is less than tol<sup>2</sup>.
*/
public FLD(double[][] x, int[] y, int L, double tol) {
if (x.length != y.length) {
throw new IllegalArgumentException(String.format("The sizes of X and Y don't match: %d != %d", x.length, y.length));
}
// class label set.
int[] labels = Math.unique(y);
Arrays.sort(labels);
for (int i = 0; i < labels.length; i++) {
if (labels[i] < 0) {
throw new IllegalArgumentException("Negative class label: " + labels[i]);
}
if (i > 0 && labels[i] - labels[i-1] > 1) {
throw new IllegalArgumentException("Missing class: " + labels[i]+1);
}
}
k = labels.length;
if (k < 2) {
throw new IllegalArgumentException("Only one class.");
}
if (tol < 0.0) {
throw new IllegalArgumentException("Invalid tol: " + tol);
}
if (x.length <= k) {
throw new IllegalArgumentException(String.format("Sample size is too small: %d <= %d", x.length, k));
}
if (L >= k) {
throw new IllegalArgumentException(String.format("The dimensionality of mapped space is too high: %d >= %d", L, k));
}
if (L <= 0) {
L = k - 1;
}
final int n = x.length;
p = x[0].length;
// The number of instances in each class.
int[] ni = new int[k];
// Common mean vector.
mean = Math.colMean(x);
// Common covariance.
double[][] T = new double[p][p];
// Class mean vectors.
mu = new double[k][p];
for (int i = 0; i < n; i++) {
int c = y[i];
ni[c]++;
for (int j = 0; j < p; j++) {
mu[c][j] += x[i][j];
}
}
for (int i = 0; i < k; i++) {
for (int j = 0; j < p; j++) {
mu[i][j] = mu[i][j] / ni[i] - mean[j];
}
}
for (int i = 0; i < n; i++) {
for (int j = 0; j < p; j++) {
for (int l = 0; l <= j; l++) {
T[j][l] += (x[i][j] - mean[j]) * (x[i][l] - mean[l]);
}
}
}
for (int j = 0; j < p; j++) {
for (int l = 0; l <= j; l++) {
T[j][l] /= n;
T[l][j] = T[j][l];
}
}
// Between class scatter
double[][] B = new double[p][p];
for (int i = 0; i < k; i++) {
for (int j = 0; j < p; j++) {
for (int l = 0; l <= j; l++) {
B[j][l] += mu[i][j] * mu[i][l];
}
}
}
for (int j = 0; j < p; j++) {
for (int l = 0; l <= j; l++) {
B[j][l] /= k;
B[l][j] = B[j][l];
}
}
EigenValueDecomposition eigen = EigenValueDecomposition.decompose(T, true);
tol = tol * tol;
double[] s = eigen.getEigenValues();
for (int i = 0; i < s.length; i++) {
if (s[i] < tol) {
throw new IllegalArgumentException("The covariance matrix is close to singular.");
}
s[i] = 1.0 / s[i];
}
double[][] U = eigen.getEigenVectors();
double[][] UB = Math.atbmm(U, B);
for (int i = 0; i < k; i++) {
for (int j = 0; j < p; j++) {
UB[i][j] *= s[j];
}
}
Math.abmm(U, UB, B);
eigen = EigenValueDecomposition.decompose(B, true);
U = eigen.getEigenVectors();
scaling = new double[p][L];
for (int i = 0; i < p; i++) {
System.arraycopy(U[i], 0, scaling[i], 0, L);
}
smean = new double[L];
Math.atx(scaling, mean, smean);
smu = Math.abmm(mu, scaling);
}
@Override
public int predict(double[] x) {
if (x.length != p) {
throw new IllegalArgumentException(String.format("Invalid input vector size: %d, expected: %d", x.length, p));
}
double[] wx = project(x);
int y = 0;
double nearest = Double.POSITIVE_INFINITY;
for (int i = 0; i < k; i++) {
double d = Math.distance(wx, smu[i]);
if (d < nearest) {
nearest = d;
y = i;
}
}
return y;
}
/**
* Predicts the class label of an instance and also calculate a posteriori
* probabilities. Not supported.
*/
@Override
public int predict(double[] x, double[] posteriori) {
throw new UnsupportedOperationException("Not supported.");
}
@Override
public double[] project(double[] x) {
if (x.length != p) {
throw new IllegalArgumentException(String.format("Invalid input vector size: %d, expected: %d", x.length, p));
}
double[] y = new double[scaling[0].length];
Math.atx(scaling, x, y);
Math.minus(y, smean);
return y;
}
@Override
public double[][] project(double[][] x) {
double[][] y = new double[x.length][scaling[0].length];
for (int i = 0; i < x.length; i++) {
if (x[i].length != p) {
throw new IllegalArgumentException(String.format("Invalid input vector size: %d, expected: %d", x[i].length, p));
}
Math.atx(scaling, x[i], y[i]);
Math.minus(y[i], smean);
}
return y;
}
/**
* Returns the projection matrix W. The dimension reduced data can be obtained
* by y = W' * x.
*/
public double[][] getProjection() {
return scaling;
}
}
| |
/*
Copyright 2006 by Sean Luke and George Mason University
Licensed under the Academic Free License version 3.0
See the file "LICENSE" for more information
*/
package ec.rule;
import ec.*;
import ec.util.*;
import java.io.*;
/*
* Rule.java
*
* Created: Tue Feb 20 13:19:00 2001
* By: Liviu Panait and Sean Luke
*/
/**
* Rule is an abstract class for describing rules. It is abstract
* because it is supposed to be extended by different classes
* modelling different kinds of rules.
* It provides the reset abstract method for randomizing the individual.
* It also provides the mutate function for mutating an individual rule
* It also provides the clone function for cloning the rule.
*
* <p>You will need to implement some kind of artificial ordering between
* rules in a ruleset using the Comparable interface,
* so the ruleset can be sorted in such a way that it can be compared with
* another ruleset for equality. You should also implement hashCode
* and equals
* in such a way that they aren't based on pointer information, but on actual
* internal features.
*
* <p>Every rule points to a RuleConstraints which handles information that
* Rule shares with all the other Rules in a RuleSet.
* <p>In addition to serialization for checkpointing, Rules may read and write themselves to streams in three ways.
*
* <ul>
* <li><b>writeRule(...,DataOutput)/readRule(...,DataInput)</b> This method
* transmits or receives a Rule in binary. It is the most efficient approach to sending
* Rules over networks, etc. The default versions of writeRule/readRule throw errors.
* You don't need to implement them if you don't plan on using read/writeRule.
*
* <li><b>printRule(...,PrintWriter)/readRule(...,LineNumberReader)</b> This
* approach transmits or receives a Rule in text encoded such that the Rule is largely readable
* by humans but can be read back in 100% by ECJ as well. To do this, these methods will typically encode numbers
* using the <tt>ec.util.Code</tt> class. These methods are mostly used to write out populations to
* files for inspection, slight modification, then reading back in later on. <b>readRule</b>
* reads in a line, then calls <b>readRuleFromString</b> on that line.
* You are responsible for implementing readRuleFromString: the Code class is there to help you.
* The default version throws an error if called.
* <b>printRule</b> calls <b>printRuleToString<b>
* and printlns the resultant string. You are responsible for implementing the printRuleToString method in such
* a way that readRuleFromString can read back in the Rule println'd with printRuleToString. The default form
* of printRuleToString() simply calls <b>toString()</b>
* by default. You might override <b>printRuleToString()</b> to provide better information. You are not required to implement these methods, but without
* them you will not be able to write Rules to files in a simultaneously computer- and human-readable fashion.
*
* <li><b>printRuleForHumans(...,PrintWriter)</b> This
* approach prints a Rule in a fashion intended for human consumption only.
* <b>printRuleForHumans</b> calls <b>printRuleToStringForHumans()<b>
* and printlns the resultant string. The default form of this method just returns the value of
* <b>toString()</b>. You may wish to override this to provide more information instead.
* You should handle one of these methods properly
* to ensure Rules can be printed by ECJ.
* </ul>
<p><b>Parameters</b><br>
<table>
<tr><td valign=top><i>base</i>.<tt>constraints</tt><br>
<font size=-1>string</font></td>
<td valign=top>(name of the rule constraint)</td></tr>
</table>
<p><b>Default Base</b><br>
rule.rule
* @author Liviu Panait and Sean luke
* @version 1.0
*/
public abstract class Rule implements Prototype, Comparable
{
public static final String P_RULE = "rule";
public static final String P_CONSTRAINTS = "constraints";
/**
An index to a RuleConstraints
*/
public byte constraints;
/* Returns the Rule's constraints. A good JIT compiler should inline this. */
public final RuleConstraints constraints(final RuleInitializer initializer)
{
return initializer.ruleConstraints[constraints];
}
/** Rulerates a hash code for this rule -- the rule for this is that the hash code
must be the same for two rules that are equal to each other genetically. */
public abstract int hashCode();
/** Unlike the standard form for Java, this function should return true if this
rule is "genetically identical" to the other rule. The default calls compareTo() */
public boolean equals( final Object other )
{
return compareTo(other) == 0;
}
/**
The reset method randomly reinitializes the rule.
*/
public abstract void reset(final EvolutionState state, final int thread);
/**
Mutate the rule. The default form just resets the rule.
*/
public void mutate(final EvolutionState state, final int thread)
{
reset(state,thread);
}
/**
Nice printing. The default form simply calls printRuleToStringForHumans and prints the result,
but you might want to override this.
*/
public void printRuleForHumans( final EvolutionState state, final int log )
{ printRuleForHumans(state, log, Output.V_VERBOSE); }
/**
Nice printing. The default form simply calls printRuleToStringForHumans and prints the result,
but you might want to override this.
@deprecated Verbosity no longer has an effect
*/
public void printRuleForHumans( final EvolutionState state, final int log, final int verbosity )
{ state.output.println(printRuleToStringForHumans(),log);}
/** Nice printing to a string. The default form calls toString(). */
public String printRuleToStringForHumans()
{ return toString(); }
/** Prints the rule to a string in a fashion readable by readRuleFromString.
The default form calls printRuleToString().
@deprecated */
public String printRuleToString(final EvolutionState state)
{ return printRuleToString(); }
/** Prints the rule to a string in a fashion readable by readRuleFromString.
The default form simply calls toString() -- you should just override toString()
if you don't need the EvolutionState. */
public String printRuleToString()
{ return toString(); }
/** Reads a rule from a string, which may contain a final '\n'.
Override this method. The default form generates an error. */
public void readRuleFromString(final String string, final EvolutionState state)
{ state.output.error("readRuleFromString(string,state) unimplemented in " + this.getClass()); }
/**
Prints the rule in a way that can be read by readRule(). The default form simply
calls printRuleToString(state). Override this rule to do custom writing to the log,
or just override printRuleToString(...), which is probably easier to do.
*/
public void printRule( final EvolutionState state, final int log )
{ printRule(state, log, Output.V_VERBOSE); }
/**
Prints the rule in a way that can be read by readRule(). The default form simply
calls printRuleToString(state). Override this rule to do custom writing to the log,
or just override printRuleToString(...), which is probably easier to do.
@deprecated Verbosity no longer has an effect
*/
public void printRule( final EvolutionState state, final int log, final int verbosity )
{ state.output.println(printRuleToString(state),log); }
/**
Prints the rule in a way that can be read by readRule(). The default form simply
calls printRuleToString(state). Override this rule to do custom writing,
or just override printRuleToString(...), which is probably easier to do.
*/
public void printRule( final EvolutionState state, final PrintWriter writer )
{ writer.println(printRuleToString(state)); }
/**
Reads a rule printed by printRule(...). The default form simply reads a line into
a string, and then calls readRuleFromString() on that line. Override this rule to do
custom reading, or just override readRuleFromString(...), which is probably easier to do.
*/
public void readRule(final EvolutionState state,
final LineNumberReader reader)
throws IOException
{ readRuleFromString(reader.readLine(),state); }
/** Override this if you need to write rules out to a binary stream */
public void writeRule(final EvolutionState state,
final DataOutput dataOutput) throws IOException
{
state.output.fatal("writeRule(EvolutionState, DataOutput) not implemented in " + this.getClass());
}
/** Override this if you need to read rules in from a binary stream */
public void readRule(final EvolutionState state,
final DataInput dataInput) throws IOException
{
state.output.fatal("readRule(EvolutionState, DataInput) not implemented in " + this.getClass());
}
public Parameter defaultBase()
{
return RuleDefaults.base().push(P_RULE);
}
public Object clone()
{
try { return super.clone(); }
catch (CloneNotSupportedException e)
{ throw new InternalError(); } // never happens
}
public void setup(EvolutionState state, Parameter base)
{
String constraintname = state.parameters.getString(
base.push( P_CONSTRAINTS ),defaultBase().push(P_CONSTRAINTS));
if (constraintname == null)
state.output.fatal("No RuleConstraints name given",
base.push( P_CONSTRAINTS ),defaultBase().push(P_CONSTRAINTS));
constraints = RuleConstraints.constraintsFor(constraintname,state).constraintNumber;
state.output.exitIfErrors();
}
/** This function replaces the old gt and lt functions that Rule used to require
as it implemented the SortComparator interface. If you had implemented those
old functions, you can simply implement this function as:
<tt><pre>
public abstract int compareTo(Object o)
{
if (gt(this,o)) return 1;
if (lt(this,o)) return -1;
return 0;
}
</pre></tt>
*/
public abstract int compareTo(Object o);
}
| |
/*
* Copyright (c) 2008-2016 Haulmont.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.haulmont.cuba.security.sys;
import com.google.common.base.Strings;
import com.haulmont.cuba.core.EntityManager;
import com.haulmont.cuba.core.Persistence;
import com.haulmont.cuba.core.Transaction;
import com.haulmont.cuba.core.app.ServerConfig;
import com.haulmont.cuba.core.global.EntityStates;
import com.haulmont.cuba.core.global.Metadata;
import com.haulmont.cuba.core.global.UserSessionSource;
import com.haulmont.cuba.core.global.UuidSource;
import com.haulmont.cuba.core.sys.DefaultPermissionValuesConfig;
import com.haulmont.cuba.security.app.UserSessionsAPI;
import com.haulmont.cuba.security.app.group.AccessGroupDefinitionsComposer;
import com.haulmont.cuba.security.app.role.RolesHelper;
import com.haulmont.cuba.security.entity.*;
import com.haulmont.cuba.security.global.NoUserSessionException;
import com.haulmont.cuba.security.global.UserSession;
import com.haulmont.cuba.security.group.AccessGroupDefinition;
import com.haulmont.cuba.security.role.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import javax.inject.Inject;
import java.io.Serializable;
import java.util.*;
import java.util.stream.Collectors;
/**
* INTERNAL.
* <p>
* System-level class managing {@link UserSession}s.
*/
@Component(UserSessionManager.NAME)
public class UserSessionManager {
private final Logger log = LoggerFactory.getLogger(UserSessionManager.class);
public static final String NAME = "cuba_UserSessionManager";
@Inject
protected UuidSource uuidSource;
@Inject
protected UserSessionsAPI sessions;
@Inject
protected UserSessionSource userSessionSource;
@Inject
protected Persistence persistence;
@Inject
protected EntityStates entityStates;
@Inject
protected Metadata metadata;
@Inject
protected DefaultPermissionValuesConfig defaultPermissionValuesConfig;
@Inject
protected ServerConfig serverConfig;
@Inject
protected RolesHelper rolesHelper;
@Inject
protected AccessGroupDefinitionsComposer groupsComposer;
/**
* Create a new session and fill it with security data. Must be called inside a transaction.
*
* @param user user instance
* @param locale user locale
* @param system create system session
* @return new session instance
*/
public UserSession createSession(User user, Locale locale, boolean system) {
return createSession(uuidSource.createUuid(), user, locale, system, null);
}
/**
* Create a new session and fill it with security data. Must be called inside a transaction.
*
* @param user user instance
* @param locale user locale
* @param system create system session
* @param securityScope security scope
* @return new session instance
*/
public UserSession createSession(User user, Locale locale, boolean system, String securityScope) {
return createSession(uuidSource.createUuid(), user, locale, system, securityScope);
}
/**
* Create a new session and fill it with security data. Must be called inside a transaction.
*
* @param sessionId target session id
* @param user user instance
* @param locale user locale
* @param system create system session
* @return new session instance
*/
public UserSession createSession(UUID sessionId, User user, Locale locale, boolean system) {
return createSession(sessionId, user, locale, system, null);
}
/**
* Create a new session and fill it with security data. Must be called inside a transaction.
*
* @param sessionId target session id
* @param user user instance
* @param locale user locale
* @param system create system session
* @param securityScope security profile
* @return new session instance
*/
public UserSession createSession(UUID sessionId, User user, Locale locale, boolean system, String securityScope) {
List<RoleDefinition> roles = new ArrayList<>();
for (RoleDefinition role : rolesHelper.getRoleDefinitionsForUser(user, false)) {
if (role != null) {
String expectedScope = securityScope == null ? SecurityScope.DEFAULT_SCOPE_NAME : securityScope;
String actualScope = role.getSecurityScope() == null ? SecurityScope.DEFAULT_SCOPE_NAME : role.getSecurityScope();
if (Objects.equals(expectedScope, actualScope)) {
roles.add(role);
}
}
}
UserSession session = new UserSession(sessionId, user, roles, locale, system);
compilePermissions(session, roles);
if (user.getGroup() == null && Strings.isNullOrEmpty(user.getGroupNames())) {
throw new IllegalStateException("User is not in a Group");
}
AccessGroupDefinition groupDefinition = compileGroupDefinition(user.getGroup(), user.getGroupNames());
compileConstraints(session, groupDefinition);
compileSessionAttributes(session, groupDefinition);
session.setPermissionUndefinedAccessPolicy(rolesHelper.getPermissionUndefinedAccessPolicy());
return session;
}
/**
* Create a new session from existing for another user and fill it with security data for that new user.
* Must be called inside a transaction.
*
* @param src existing session
* @param user another user instance
* @return new session with the same ID as existing
*/
public UserSession createSession(UserSession src, User user) {
List<RoleDefinition> roles = new ArrayList<>();
for (RoleDefinition role : rolesHelper.getRoleDefinitionsForUser(user, false)) {
if (role != null) {
roles.add(role);
}
}
UserSession session = new UserSession(src, user, roles, src.getLocale());
compilePermissions(session, roles);
if (user.getGroup() == null && Strings.isNullOrEmpty(user.getGroupNames())) {
throw new IllegalStateException("User is not in a Group");
}
AccessGroupDefinition groupDefinition = compileGroupDefinition(user.getGroup(), user.getGroupNames());
compileConstraints(session, groupDefinition);
compileSessionAttributes(session, groupDefinition);
session.setPermissionUndefinedAccessPolicy(rolesHelper.getPermissionUndefinedAccessPolicy());
return session;
}
protected void compilePermissions(UserSession session, List<RoleDefinition> roles) {
session.setJoinedRole(buildJoinedRoleDefinition(roles));
}
protected RoleDefinition buildJoinedRoleDefinition(List<RoleDefinition> roles) {
RoleDefinition effectiveRole = BasicRoleDefinition.builder().build();
for (RoleDefinition role : roles) {
effectiveRole = RoleDefinitionsJoiner.join(effectiveRole, role);
}
if (serverConfig.getDefaultPermissionValuesConfigEnabled()) {
for (Map.Entry<String, Permission> entry : defaultPermissionValuesConfig.getDefaultPermissionValues()
.entrySet()) {
String target = entry.getKey();
Permission permission = entry.getValue();
PermissionsContainer permissionsContainer = PermissionsUtils.getPermissionsByType(effectiveRole,
permission.getType());
if (permissionsContainer.getExplicitPermissions().get(target) == null) {
permissionsContainer.getExplicitPermissions().put(target, permission.getValue());
if (permission.getType() == PermissionType.ENTITY_OP ||
permission.getType() == PermissionType.ENTITY_ATTR) {
String extendedTarget = PermissionsUtils.evaluateExtendedEntityTarget(target);
if (!Strings.isNullOrEmpty(extendedTarget)) {
permissionsContainer.getExplicitPermissions().put(extendedTarget, permission.getValue());
}
}
}
}
}
return effectiveRole;
}
protected AccessGroupDefinition compileGroupDefinition(Group group, String groupName) {
AccessGroupDefinition groupDefinition;
if (group != null) {
groupDefinition = groupsComposer.composeGroupDefinitionFromDb(group.getId());
} else {
groupDefinition = groupsComposer.composeGroupDefinition(groupName);
}
return groupDefinition;
}
protected void compileConstraints(UserSession session, AccessGroupDefinition groupDefinition) {
session.setConstraints(groupDefinition.accessConstraints());
}
protected void compileSessionAttributes(UserSession session, AccessGroupDefinition groupDefinition) {
Map<String, Serializable> sessionAttributes = groupDefinition.sessionAttributes();
for (Map.Entry<String, Serializable> entry : sessionAttributes.entrySet()) {
if (entry.getValue() != null) {
session.setAttribute(entry.getKey(), entry.getValue());
} else {
session.removeAttribute(entry.getKey());
}
}
}
/**
* @deprecated use {@link UserSessionsAPI#add(UserSession)}}
*/
@Deprecated
public void storeSession(UserSession session) {
sessions.add(session);
}
/**
* @deprecated use {@link UserSessionsAPI#remove(UserSession)}}
*/
@Deprecated
public void removeSession(UserSession session) {
sessions.remove(session);
}
/**
* @deprecated use {@link UserSessionsAPI#getNN(UUID)}}
*/
@Deprecated
public UserSession getSession(UUID sessionId) {
UserSession session = findSession(sessionId);
if (session == null) {
throw new NoUserSessionException(sessionId);
}
return session;
}
/**
* @deprecated use {@link UserSessionsAPI#get(UUID)}
*/
@Deprecated
public UserSession findSession(UUID sessionId) {
return sessions.getAndRefresh(sessionId, false);
}
public Integer getPermissionValue(User user, PermissionType permissionType, String target) {
Integer result;
List<RoleDefinition> roles = new ArrayList<>();
Transaction tx = persistence.createTransaction();
try {
EntityManager em = persistence.getEntityManager();
user = em.find(User.class, user.getId());
for (RoleDefinition role : rolesHelper.getRoleDefinitionsForUser(user, false)) {
if (role != null) {
roles.add(role);
}
}
RoleDefinition joinedRole = buildJoinedRoleDefinition(roles);
result = PermissionsUtils.getResultingPermissionValue(joinedRole, permissionType, target,
rolesHelper.getPermissionUndefinedAccessPolicy());
tx.commit();
} finally {
tx.end();
}
return result;
}
/**
* INTERNAL
*/
public void clearPermissionsOnUser(UserSession session) {
List<User> users = new ArrayList<>();
users.add(session.getUser());
if (session.getSubstitutedUser() != null) {
users.add(session.getSubstitutedUser());
}
for (User user : users) {
if (entityStates.isDetached(user) && user.getUserRoles() != null) {
List<UserRole> userRoles = user.getUserRoles().stream()
.filter(ur -> entityStates.isLoaded(ur, "role"))
.collect(Collectors.toList());
for (UserRole ur : userRoles) {
if (entityStates.isLoaded(ur, "role") && ur.getRole() != null) {
ur.getRole().setPermissions(null);
}
}
}
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.operator;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import io.airlift.stats.CounterStat;
import io.airlift.units.Duration;
import io.prestosql.Session;
import io.prestosql.memory.QueryContextVisitor;
import io.prestosql.memory.context.AggregatedMemoryContext;
import io.prestosql.memory.context.LocalMemoryContext;
import io.prestosql.memory.context.MemoryTrackingContext;
import io.prestosql.operator.OperationTimer.OperationTiming;
import io.prestosql.spi.Page;
import io.prestosql.spi.PrestoException;
import io.prestosql.sql.planner.plan.PlanNodeId;
import javax.annotation.Nullable;
import javax.annotation.concurrent.GuardedBy;
import javax.annotation.concurrent.ThreadSafe;
import java.util.Optional;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Supplier;
import static com.google.common.base.MoreObjects.toStringHelper;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.util.concurrent.MoreExecutors.directExecutor;
import static io.airlift.units.DataSize.succinctBytes;
import static io.prestosql.operator.BlockedReason.WAITING_FOR_MEMORY;
import static io.prestosql.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR;
import static java.lang.Math.max;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.TimeUnit.NANOSECONDS;
/**
* Only {@link #getOperatorStats()} and revocable-memory-related operations are ThreadSafe
*/
public class OperatorContext
{
private final int operatorId;
private final PlanNodeId planNodeId;
private final String operatorType;
private final DriverContext driverContext;
private final Executor executor;
private final CounterStat rawInputDataSize = new CounterStat();
private final OperationTiming addInputTiming = new OperationTiming();
private final CounterStat inputDataSize = new CounterStat();
private final CounterStat inputPositions = new CounterStat();
private final OperationTiming getOutputTiming = new OperationTiming();
private final CounterStat outputDataSize = new CounterStat();
private final CounterStat outputPositions = new CounterStat();
private final AtomicLong physicalWrittenDataSize = new AtomicLong();
private final AtomicReference<SettableFuture<?>> memoryFuture;
private final AtomicReference<SettableFuture<?>> revocableMemoryFuture;
private final AtomicReference<BlockedMonitor> blockedMonitor = new AtomicReference<>();
private final AtomicLong blockedWallNanos = new AtomicLong();
private final OperationTiming finishTiming = new OperationTiming();
private final SpillContext spillContext;
private final AtomicReference<Supplier<OperatorInfo>> infoSupplier = new AtomicReference<>();
private final AtomicLong peakUserMemoryReservation = new AtomicLong();
private final AtomicLong peakSystemMemoryReservation = new AtomicLong();
private final AtomicLong peakTotalMemoryReservation = new AtomicLong();
@GuardedBy("this")
private boolean memoryRevokingRequested;
@Nullable
@GuardedBy("this")
private Runnable memoryRevocationRequestListener;
private final MemoryTrackingContext operatorMemoryContext;
public OperatorContext(
int operatorId,
PlanNodeId planNodeId,
String operatorType,
DriverContext driverContext,
Executor executor,
MemoryTrackingContext operatorMemoryContext)
{
checkArgument(operatorId >= 0, "operatorId is negative");
this.operatorId = operatorId;
this.planNodeId = requireNonNull(planNodeId, "planNodeId is null");
this.operatorType = requireNonNull(operatorType, "operatorType is null");
this.driverContext = requireNonNull(driverContext, "driverContext is null");
this.spillContext = new OperatorSpillContext(this.driverContext);
this.executor = requireNonNull(executor, "executor is null");
this.memoryFuture = new AtomicReference<>(SettableFuture.create());
this.memoryFuture.get().set(null);
this.revocableMemoryFuture = new AtomicReference<>(SettableFuture.create());
this.revocableMemoryFuture.get().set(null);
this.operatorMemoryContext = requireNonNull(operatorMemoryContext, "operatorMemoryContext is null");
operatorMemoryContext.initializeLocalMemoryContexts(operatorType);
}
public int getOperatorId()
{
return operatorId;
}
public String getOperatorType()
{
return operatorType;
}
public DriverContext getDriverContext()
{
return driverContext;
}
public Session getSession()
{
return driverContext.getSession();
}
public boolean isDone()
{
return driverContext.isDone();
}
void recordAddInput(OperationTimer operationTimer, Page page)
{
operationTimer.recordOperationComplete(addInputTiming);
if (page != null) {
inputDataSize.update(page.getSizeInBytes());
inputPositions.update(page.getPositionCount());
}
}
/**
* Record the amount of physical bytes that were read by an operator.
* This metric is valid only for source operators.
*/
public void recordRawInput(long sizeInBytes)
{
rawInputDataSize.update(sizeInBytes);
}
/**
* Record the amount of physical bytes that were read by an operator and
* the time it took to read the data. This metric is valid only for source operators.
*/
public void recordRawInputWithTiming(long sizeInBytes, long readNanos)
{
rawInputDataSize.update(sizeInBytes);
addInputTiming.record(readNanos, 0);
}
/**
* Record the size in bytes of input blocks that were processed by an operator.
* This metric is valid only for source operators.
*/
public void recordProcessedInput(long sizeInBytes, long positions)
{
inputDataSize.update(sizeInBytes);
inputPositions.update(positions);
}
void recordGetOutput(OperationTimer operationTimer, Page page)
{
operationTimer.recordOperationComplete(getOutputTiming);
if (page != null) {
outputDataSize.update(page.getSizeInBytes());
outputPositions.update(page.getPositionCount());
}
}
public void recordOutput(long sizeInBytes, long positions)
{
outputDataSize.update(sizeInBytes);
outputPositions.update(positions);
}
public void recordPhysicalWrittenData(long sizeInBytes)
{
physicalWrittenDataSize.getAndAdd(sizeInBytes);
}
public void recordBlocked(ListenableFuture<?> blocked)
{
requireNonNull(blocked, "blocked is null");
BlockedMonitor monitor = new BlockedMonitor();
BlockedMonitor oldMonitor = blockedMonitor.getAndSet(monitor);
if (oldMonitor != null) {
oldMonitor.run();
}
blocked.addListener(monitor, executor);
// Do not register blocked with driver context. The driver handles this directly.
}
void recordFinish(OperationTimer operationTimer)
{
operationTimer.recordOperationComplete(finishTiming);
}
public ListenableFuture<?> isWaitingForMemory()
{
return memoryFuture.get();
}
public ListenableFuture<?> isWaitingForRevocableMemory()
{
return revocableMemoryFuture.get();
}
// caller should close this context as it's a new context
public LocalMemoryContext newLocalSystemMemoryContext(String allocationTag)
{
return new InternalLocalMemoryContext(operatorMemoryContext.newSystemMemoryContext(allocationTag), memoryFuture, this::updatePeakMemoryReservations, true);
}
// caller shouldn't close this context as it's managed by the OperatorContext
public LocalMemoryContext localUserMemoryContext()
{
return new InternalLocalMemoryContext(operatorMemoryContext.localUserMemoryContext(), memoryFuture, this::updatePeakMemoryReservations, false);
}
// caller shouldn't close this context as it's managed by the OperatorContext
public LocalMemoryContext localSystemMemoryContext()
{
return new InternalLocalMemoryContext(operatorMemoryContext.localSystemMemoryContext(), memoryFuture, this::updatePeakMemoryReservations, false);
}
// caller shouldn't close this context as it's managed by the OperatorContext
public LocalMemoryContext localRevocableMemoryContext()
{
return new InternalLocalMemoryContext(operatorMemoryContext.localRevocableMemoryContext(), revocableMemoryFuture, () -> {}, false);
}
// caller shouldn't close this context as it's managed by the OperatorContext
public AggregatedMemoryContext aggregateUserMemoryContext()
{
return new InternalAggregatedMemoryContext(operatorMemoryContext.aggregateUserMemoryContext(), memoryFuture, this::updatePeakMemoryReservations, false);
}
// caller should close this context as it's a new context
public AggregatedMemoryContext newAggregateSystemMemoryContext()
{
return new InternalAggregatedMemoryContext(operatorMemoryContext.newAggregateSystemMemoryContext(), memoryFuture, this::updatePeakMemoryReservations, true);
}
// listen to all memory allocations and update the peak memory reservations accordingly
private void updatePeakMemoryReservations()
{
long userMemory = operatorMemoryContext.getUserMemory();
long systemMemory = operatorMemoryContext.getSystemMemory();
long totalMemory = userMemory + systemMemory;
peakUserMemoryReservation.accumulateAndGet(userMemory, Math::max);
peakSystemMemoryReservation.accumulateAndGet(systemMemory, Math::max);
peakTotalMemoryReservation.accumulateAndGet(totalMemory, Math::max);
}
public long getReservedRevocableBytes()
{
return operatorMemoryContext.getRevocableMemory();
}
private static void updateMemoryFuture(ListenableFuture<?> memoryPoolFuture, AtomicReference<SettableFuture<?>> targetFutureReference)
{
if (!memoryPoolFuture.isDone()) {
SettableFuture<?> currentMemoryFuture = targetFutureReference.get();
while (currentMemoryFuture.isDone()) {
SettableFuture<?> settableFuture = SettableFuture.create();
// We can't replace one that's not done, because the task may be blocked on that future
if (targetFutureReference.compareAndSet(currentMemoryFuture, settableFuture)) {
currentMemoryFuture = settableFuture;
}
else {
currentMemoryFuture = targetFutureReference.get();
}
}
SettableFuture<?> finalMemoryFuture = currentMemoryFuture;
// Create a new future, so that this operator can un-block before the pool does, if it's moved to a new pool
memoryPoolFuture.addListener(() -> finalMemoryFuture.set(null), directExecutor());
}
}
public void destroy()
{
// reset memory revocation listener so that OperatorContext doesn't hold any references to Driver instance
synchronized (this) {
memoryRevocationRequestListener = null;
}
operatorMemoryContext.close();
if (operatorMemoryContext.getSystemMemory() != 0) {
throw new PrestoException(GENERIC_INTERNAL_ERROR, format("Operator %s has non-zero system memory (%d bytes) after destroy()", this, operatorMemoryContext.getSystemMemory()));
}
if (operatorMemoryContext.getUserMemory() != 0) {
throw new PrestoException(GENERIC_INTERNAL_ERROR, format("Operator %s has non-zero user memory (%d bytes) after destroy()", this, operatorMemoryContext.getUserMemory()));
}
if (operatorMemoryContext.getRevocableMemory() != 0) {
throw new PrestoException(GENERIC_INTERNAL_ERROR, format("Operator %s has non-zero revocable memory (%d bytes) after destroy()", this, operatorMemoryContext.getRevocableMemory()));
}
}
public SpillContext getSpillContext()
{
return spillContext;
}
public void moreMemoryAvailable()
{
memoryFuture.get().set(null);
}
public synchronized boolean isMemoryRevokingRequested()
{
return memoryRevokingRequested;
}
/**
* Returns how much revocable memory will be revoked by the operator
*/
public long requestMemoryRevoking()
{
long revokedMemory = 0L;
Runnable listener = null;
synchronized (this) {
if (!isMemoryRevokingRequested() && operatorMemoryContext.getRevocableMemory() > 0) {
memoryRevokingRequested = true;
revokedMemory = operatorMemoryContext.getRevocableMemory();
listener = memoryRevocationRequestListener;
}
}
if (listener != null) {
runListener(listener);
}
return revokedMemory;
}
public synchronized void resetMemoryRevokingRequested()
{
memoryRevokingRequested = false;
}
public void setMemoryRevocationRequestListener(Runnable listener)
{
requireNonNull(listener, "listener is null");
boolean shouldNotify;
synchronized (this) {
checkState(memoryRevocationRequestListener == null, "listener already set");
memoryRevocationRequestListener = listener;
shouldNotify = memoryRevokingRequested;
}
// if memory revoking is requested immediately run the listener
if (shouldNotify) {
runListener(listener);
}
}
private static void runListener(Runnable listener)
{
requireNonNull(listener, "listener is null");
try {
listener.run();
}
catch (RuntimeException e) {
throw new PrestoException(GENERIC_INTERNAL_ERROR, "Exception while running the listener", e);
}
}
public void setInfoSupplier(Supplier<OperatorInfo> infoSupplier)
{
requireNonNull(infoSupplier, "infoProvider is null");
this.infoSupplier.set(infoSupplier);
}
public CounterStat getInputDataSize()
{
return inputDataSize;
}
public CounterStat getInputPositions()
{
return inputPositions;
}
public CounterStat getOutputDataSize()
{
return outputDataSize;
}
public CounterStat getOutputPositions()
{
return outputPositions;
}
public long getPhysicalWrittenDataSize()
{
return physicalWrittenDataSize.get();
}
@Override
public String toString()
{
return format("%s-%s", operatorType, planNodeId);
}
public OperatorStats getOperatorStats()
{
Supplier<OperatorInfo> infoSupplier = this.infoSupplier.get();
OperatorInfo info = Optional.ofNullable(infoSupplier).map(Supplier::get).orElse(null);
long inputPositionsCount = inputPositions.getTotalCount();
return new OperatorStats(
driverContext.getTaskId().getStageId().getId(),
driverContext.getPipelineContext().getPipelineId(),
operatorId,
planNodeId,
operatorType,
1,
addInputTiming.getCalls(),
new Duration(addInputTiming.getWallNanos(), NANOSECONDS).convertToMostSuccinctTimeUnit(),
new Duration(addInputTiming.getCpuNanos(), NANOSECONDS).convertToMostSuccinctTimeUnit(),
succinctBytes(rawInputDataSize.getTotalCount()),
succinctBytes(inputDataSize.getTotalCount()),
inputPositionsCount,
(double) inputPositionsCount * inputPositionsCount,
getOutputTiming.getCalls(),
new Duration(getOutputTiming.getWallNanos(), NANOSECONDS).convertToMostSuccinctTimeUnit(),
new Duration(getOutputTiming.getCpuNanos(), NANOSECONDS).convertToMostSuccinctTimeUnit(),
succinctBytes(outputDataSize.getTotalCount()),
outputPositions.getTotalCount(),
succinctBytes(physicalWrittenDataSize.get()),
new Duration(blockedWallNanos.get(), NANOSECONDS).convertToMostSuccinctTimeUnit(),
finishTiming.getCalls(),
new Duration(finishTiming.getWallNanos(), NANOSECONDS).convertToMostSuccinctTimeUnit(),
new Duration(finishTiming.getCpuNanos(), NANOSECONDS).convertToMostSuccinctTimeUnit(),
succinctBytes(operatorMemoryContext.getUserMemory()),
succinctBytes(getReservedRevocableBytes()),
succinctBytes(operatorMemoryContext.getSystemMemory()),
succinctBytes(peakUserMemoryReservation.get()),
succinctBytes(peakSystemMemoryReservation.get()),
succinctBytes(peakTotalMemoryReservation.get()),
memoryFuture.get().isDone() ? Optional.empty() : Optional.of(WAITING_FOR_MEMORY),
info);
}
public <C, R> R accept(QueryContextVisitor<C, R> visitor, C context)
{
return visitor.visitOperatorContext(this, context);
}
private static long nanosBetween(long start, long end)
{
return max(0, end - start);
}
private class BlockedMonitor
implements Runnable
{
private final long start = System.nanoTime();
private boolean finished;
@Override
public synchronized void run()
{
if (finished) {
return;
}
finished = true;
blockedMonitor.compareAndSet(this, null);
blockedWallNanos.getAndAdd(getBlockedTime());
}
public long getBlockedTime()
{
return nanosBetween(start, System.nanoTime());
}
}
@ThreadSafe
private static class OperatorSpillContext
implements SpillContext
{
private final DriverContext driverContext;
private final AtomicLong reservedBytes = new AtomicLong();
public OperatorSpillContext(DriverContext driverContext)
{
this.driverContext = driverContext;
}
@Override
public void updateBytes(long bytes)
{
if (bytes >= 0) {
reservedBytes.addAndGet(bytes);
driverContext.reserveSpill(bytes);
}
else {
reservedBytes.accumulateAndGet(-bytes, this::decrementSpilledReservation);
driverContext.freeSpill(-bytes);
}
}
private long decrementSpilledReservation(long reservedBytes, long bytesBeingFreed)
{
checkArgument(bytesBeingFreed >= 0);
checkArgument(bytesBeingFreed <= reservedBytes, "tried to free %s spilled bytes from %s bytes reserved", bytesBeingFreed, reservedBytes);
return reservedBytes - bytesBeingFreed;
}
@Override
public void close()
{
// Only products of SpillContext.newLocalSpillContext() should be closed.
throw new UnsupportedOperationException(format("%s should not be closed directly", getClass()));
}
@Override
public String toString()
{
return toStringHelper(this)
.add("usedBytes", reservedBytes.get())
.toString();
}
}
private static class InternalLocalMemoryContext
implements LocalMemoryContext
{
private final LocalMemoryContext delegate;
private final AtomicReference<SettableFuture<?>> memoryFuture;
private final Runnable allocationListener;
private final boolean closeable;
InternalLocalMemoryContext(LocalMemoryContext delegate, AtomicReference<SettableFuture<?>> memoryFuture, Runnable allocationListener, boolean closeable)
{
this.delegate = requireNonNull(delegate, "delegate is null");
this.memoryFuture = requireNonNull(memoryFuture, "memoryFuture is null");
this.allocationListener = requireNonNull(allocationListener, "allocationListener is null");
this.closeable = closeable;
}
@Override
public long getBytes()
{
return delegate.getBytes();
}
@Override
public ListenableFuture<?> setBytes(long bytes)
{
ListenableFuture<?> blocked = delegate.setBytes(bytes);
updateMemoryFuture(blocked, memoryFuture);
allocationListener.run();
return blocked;
}
@Override
public boolean trySetBytes(long bytes)
{
return delegate.trySetBytes(bytes);
}
@Override
public void close()
{
if (!closeable) {
throw new UnsupportedOperationException("Called close on unclosable local memory context");
}
delegate.close();
}
}
private static class InternalAggregatedMemoryContext
implements AggregatedMemoryContext
{
private final AggregatedMemoryContext delegate;
private final AtomicReference<SettableFuture<?>> memoryFuture;
private final Runnable allocationListener;
private final boolean closeable;
InternalAggregatedMemoryContext(AggregatedMemoryContext delegate, AtomicReference<SettableFuture<?>> memoryFuture, Runnable allocationListener, boolean closeable)
{
this.delegate = requireNonNull(delegate, "delegate is null");
this.memoryFuture = requireNonNull(memoryFuture, "memoryFuture is null");
this.allocationListener = requireNonNull(allocationListener, "allocationListener is null");
this.closeable = closeable;
}
@Override
public AggregatedMemoryContext newAggregatedMemoryContext()
{
return delegate.newAggregatedMemoryContext();
}
@Override
public LocalMemoryContext newLocalMemoryContext(String allocationTag)
{
return new InternalLocalMemoryContext(delegate.newLocalMemoryContext(allocationTag), memoryFuture, allocationListener, true);
}
@Override
public long getBytes()
{
return delegate.getBytes();
}
@Override
public void close()
{
if (!closeable) {
throw new UnsupportedOperationException("Called close on unclosable aggregated memory context");
}
delegate.close();
}
}
@VisibleForTesting
public MemoryTrackingContext getOperatorMemoryContext()
{
return operatorMemoryContext;
}
}
| |
package org.apache.rya.indexing;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.IOException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.AccumuloSecurityException;
import org.apache.accumulo.core.client.TableExistsException;
import org.apache.accumulo.core.client.TableNotFoundException;
import org.apache.commons.lang.Validate;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.log4j.Logger;
import org.openrdf.model.Resource;
import org.openrdf.model.URI;
import org.openrdf.model.Value;
import org.openrdf.model.ValueFactory;
import org.openrdf.model.impl.URIImpl;
import org.openrdf.model.impl.ValueFactoryImpl;
import org.openrdf.query.BindingSet;
import org.openrdf.query.Dataset;
import org.openrdf.query.algebra.And;
import org.openrdf.query.algebra.Filter;
import org.openrdf.query.algebra.FunctionCall;
import org.openrdf.query.algebra.Join;
import org.openrdf.query.algebra.LeftJoin;
import org.openrdf.query.algebra.QueryModelNode;
import org.openrdf.query.algebra.StatementPattern;
import org.openrdf.query.algebra.TupleExpr;
import org.openrdf.query.algebra.ValueConstant;
import org.openrdf.query.algebra.ValueExpr;
import org.openrdf.query.algebra.Var;
import org.openrdf.query.algebra.evaluation.QueryOptimizer;
import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
import com.google.common.collect.Lists;
import org.apache.rya.accumulo.AccumuloRdfConfiguration;
import org.apache.rya.indexing.IndexingFunctionRegistry.FUNCTION_TYPE;
import org.apache.rya.indexing.accumulo.ConfigUtils;
import org.apache.rya.indexing.accumulo.freetext.AccumuloFreeTextIndexer;
import org.apache.rya.indexing.accumulo.freetext.FreeTextTupleSet;
import org.apache.rya.indexing.accumulo.temporal.AccumuloTemporalIndexer;
import org.apache.rya.indexing.mongodb.freetext.MongoFreeTextIndexer;
import org.apache.rya.indexing.mongodb.temporal.MongoTemporalIndexer;
public class FilterFunctionOptimizer implements QueryOptimizer, Configurable {
private static final Logger LOG = Logger.getLogger(FilterFunctionOptimizer.class);
private final ValueFactory valueFactory = new ValueFactoryImpl();
private Configuration conf;
private FreeTextIndexer freeTextIndexer;
private TemporalIndexer temporalIndexer;
private boolean init = false;
public FilterFunctionOptimizer() {
}
public FilterFunctionOptimizer(final AccumuloRdfConfiguration conf) throws AccumuloException, AccumuloSecurityException,
TableNotFoundException, IOException, TableExistsException, NumberFormatException, UnknownHostException {
this.conf = conf;
init();
}
//setConf initializes FilterFunctionOptimizer so reflection can be used
//to create optimizer in RdfCloudTripleStoreConnection
@Override
public void setConf(final Configuration conf) {
this.conf = conf;
//reset the init.
init = false;
init();
}
private synchronized void init() {
if (!init) {
if (ConfigUtils.getUseMongo(conf)) {
freeTextIndexer = new MongoFreeTextIndexer();
freeTextIndexer.setConf(conf);
temporalIndexer = new MongoTemporalIndexer();
temporalIndexer.setConf(conf);
} else {
freeTextIndexer = new AccumuloFreeTextIndexer();
freeTextIndexer.setConf(conf);
temporalIndexer = new AccumuloTemporalIndexer();
temporalIndexer.setConf(conf);
}
init = true;
}
}
@Override
public void optimize(final TupleExpr tupleExpr, final Dataset dataset, final BindingSet bindings) {
// find variables used in property and resource based searches:
final SearchVarVisitor searchVars = new SearchVarVisitor();
tupleExpr.visit(searchVars);
// rewrites for property searches:
processPropertySearches(tupleExpr, searchVars.searchProperties);
}
private void processPropertySearches(final TupleExpr tupleExpr, final Collection<Var> searchProperties) {
final MatchStatementVisitor matchStatements = new MatchStatementVisitor(searchProperties);
tupleExpr.visit(matchStatements);
for (final StatementPattern matchStatement: matchStatements.matchStatements) {
final Var subject = matchStatement.getSubjectVar();
if (subject.hasValue() && !(subject.getValue() instanceof Resource)) {
throw new IllegalArgumentException("Query error: Found " + subject.getValue() + ", expected an URI or BNode");
}
Validate.isTrue(subject.hasValue() || subject.getName() != null);
Validate.isTrue(!matchStatement.getObjectVar().hasValue() && matchStatement.getObjectVar().getName() != null);
buildQuery(tupleExpr, matchStatement);
}
}
private void buildQuery(final TupleExpr tupleExpr, final StatementPattern matchStatement) {
//If our IndexerExpr (to be) is the rhs-child of LeftJoin, we can safely make that a Join:
// the IndexerExpr will (currently) not return results that can deliver unbound variables.
//This optimization should probably be generalized into a LeftJoin -> Join optimizer under certain conditions. Until that
// has been done, this code path at least takes care of queries generated by OpenSahara SparqTool that filter on OPTIONAL
// projections. E.g. summary~'full text search' (summary is optional). See #379
if (matchStatement.getParentNode() instanceof LeftJoin) {
final LeftJoin leftJoin = (LeftJoin)matchStatement.getParentNode();
if (leftJoin.getRightArg() == matchStatement && leftJoin.getCondition() == null) {
matchStatement.getParentNode().replaceWith(new Join(leftJoin.getLeftArg(), leftJoin.getRightArg()));
}
}
final FilterFunction fVisitor = new FilterFunction(matchStatement.getObjectVar().getName());
tupleExpr.visit(fVisitor);
final List<IndexingExpr> results = Lists.newArrayList();
for(int i = 0; i < fVisitor.func.size(); i++){
results.add(new IndexingExpr(fVisitor.func.get(i), matchStatement, fVisitor.args.get(i)));
}
removeMatchedPattern(tupleExpr, matchStatement, new IndexerExprReplacer(results));
}
//find vars contained in filters
private static class SearchVarVisitor extends QueryModelVisitorBase<RuntimeException> {
private final Collection<Var> searchProperties = new ArrayList<Var>();
@Override
public void meet(final FunctionCall fn) {
final URI fun = new URIImpl(fn.getURI());
final Var result = IndexingFunctionRegistry.getResultVarFromFunctionCall(fun, fn.getArgs());
if (result != null && !searchProperties.contains(result)) {
searchProperties.add(result);
}
}
}
//find StatementPatterns containing filter variables
private static class MatchStatementVisitor extends QueryModelVisitorBase<RuntimeException> {
private final Collection<Var> propertyVars;
private final Collection<Var> usedVars = new ArrayList<Var>();
private final List<StatementPattern> matchStatements = new ArrayList<StatementPattern>();
public MatchStatementVisitor(final Collection<Var> propertyVars) {
this.propertyVars = propertyVars;
}
@Override public void meet(final StatementPattern statement) {
final Var object = statement.getObjectVar();
if (propertyVars.contains(object)) {
if (usedVars.contains(object)) {
throw new IllegalArgumentException("Illegal search, variable is used multiple times as object: " + object.getName());
} else {
usedVars.add(object);
matchStatements.add(statement);
}
}
}
}
private abstract class AbstractEnhanceVisitor extends QueryModelVisitorBase<RuntimeException> {
final String matchVar;
List<URI> func = Lists.newArrayList();
List<Value[]> args = Lists.newArrayList();
public AbstractEnhanceVisitor(final String matchVar) {
this.matchVar = matchVar;
}
protected void addFilter(final URI uri, final Value[] values) {
func.add(uri);
args.add(values);
}
}
//create indexing expression for each filter matching var in filter StatementPattern
//replace old filter condition with true condition
private class FilterFunction extends AbstractEnhanceVisitor {
public FilterFunction(final String matchVar) {
super(matchVar);
}
@Override
public void meet(final FunctionCall call) {
final URI fnUri = valueFactory.createURI(call.getURI());
final Var resultVar = IndexingFunctionRegistry.getResultVarFromFunctionCall(fnUri, call.getArgs());
if (resultVar != null && resultVar.getName().equals(matchVar)) {
addFilter(valueFactory.createURI(call.getURI()), extractArguments(matchVar, call));
if (call.getParentNode() instanceof Filter || call.getParentNode() instanceof And || call.getParentNode() instanceof LeftJoin) {
call.replaceWith(new ValueConstant(valueFactory.createLiteral(true)));
} else {
throw new IllegalArgumentException("Query error: Found " + call + " as part of an expression that is too complex");
}
}
}
private Value[] extractArguments(final String matchName, final FunctionCall call) {
final Value args[] = new Value[call.getArgs().size() - 1];
int argI = 0;
for (int i = 0; i != call.getArgs().size(); ++i) {
final ValueExpr arg = call.getArgs().get(i);
if (argI == i && arg instanceof Var && matchName.equals(((Var)arg).getName())) {
continue;
}
if (arg instanceof ValueConstant) {
args[argI] = ((ValueConstant)arg).getValue();
} else if (arg instanceof Var && ((Var)arg).hasValue()) {
args[argI] = ((Var)arg).getValue();
} else {
throw new IllegalArgumentException("Query error: Found " + arg + ", expected a Literal, BNode or URI");
}
++argI;
}
return args;
}
@Override
public void meet(final Filter filter) {
//First visit children, then condition (reverse of default):
filter.getArg().visit(this);
filter.getCondition().visit(this);
}
}
private void removeMatchedPattern(final TupleExpr tupleExpr, final StatementPattern pattern, final TupleExprReplacer replacer) {
final List<TupleExpr> indexTuples = replacer.createReplacement(pattern);
if (indexTuples.size() > 1) {
final VarExchangeVisitor vev = new VarExchangeVisitor(pattern);
tupleExpr.visit(vev);
Join join = new Join(indexTuples.remove(0), indexTuples.remove(0));
for (final TupleExpr geo : indexTuples) {
join = new Join(join, geo);
}
pattern.replaceWith(join);
} else if (indexTuples.size() == 1) {
pattern.replaceWith(indexTuples.get(0));
pattern.setParentNode(null);
} else {
throw new IllegalStateException("Must have at least one replacement for matched StatementPattern.");
}
}
private interface TupleExprReplacer {
List<TupleExpr> createReplacement(TupleExpr org);
}
//replace each filter pertinent StatementPattern with corresponding index expr
private class IndexerExprReplacer implements TupleExprReplacer {
private final List<IndexingExpr> indxExpr;
private final FUNCTION_TYPE type;
public IndexerExprReplacer(final List<IndexingExpr> indxExpr) {
this.indxExpr = indxExpr;
final URI func = indxExpr.get(0).getFunction();
type = IndexingFunctionRegistry.getFunctionType(func);
}
@Override
public List<TupleExpr> createReplacement(final TupleExpr org) {
final List<TupleExpr> indexTuples = Lists.newArrayList();
switch (type) {
case FREETEXT:
for (final IndexingExpr indx : indxExpr) {
indexTuples.add(new FreeTextTupleSet(indx, freeTextIndexer));
}
break;
case TEMPORAL:
for (final IndexingExpr indx : indxExpr) {
indexTuples.add(new TemporalTupleSet(indx, temporalIndexer));
}
break;
default:
throw new IllegalArgumentException("Incorrect type!");
}
return indexTuples;
}
}
private static class VarExchangeVisitor extends QueryModelVisitorBase<RuntimeException> {
private final StatementPattern exchangeVar;
public VarExchangeVisitor(final StatementPattern sp) {
exchangeVar = sp;
}
@Override
public void meet(final Join node) {
final QueryModelNode lNode = node.getLeftArg();
if (lNode instanceof StatementPattern) {
exchangeVar.replaceWith(lNode);
node.setLeftArg(exchangeVar);
} else {
super.meet(node);
}
}
}
@Override
public Configuration getConf() {
return conf;
}
}
| |
package com.zimbra.qa.selenium.projects.octopus.core;
import java.util.ArrayList;
import com.zimbra.qa.selenium.framework.items.FolderItem;
import com.zimbra.qa.selenium.framework.items.FolderItem.SystemFolder;
import com.zimbra.qa.selenium.framework.util.HarnessException;
import com.zimbra.qa.selenium.framework.util.OctopusAccount;
import com.zimbra.qa.selenium.framework.util.SleepUtil;
import com.zimbra.qa.selenium.framework.util.ZAssert;
import com.zimbra.qa.selenium.framework.util.ZimbraAccount;
import com.zimbra.qa.selenium.framework.util.ZimbraSeleniumProperties;
public class CommonMethods {
public CommonMethods() {}
// revoke sharing a folder via soap
protected void revokeShareFolderViaSoap(ZimbraAccount account, ZimbraAccount grantee, FolderItem folder) throws HarnessException {
account.soapSend("<FolderActionRequest xmlns='urn:zimbraMail'>"
+ "<action id='" + folder.getId()
+ "' op='!grant' zid='" + grantee.ZimbraId +"'" + ">"
+ "</action>"
+ "</FolderActionRequest>");
}
// create mountpoint via soap
protected void mountFolderViaSoap(ZimbraAccount account, ZimbraAccount grantee, FolderItem folder,
String permission, FolderItem mountPointFolder, String mountPointName) throws HarnessException {
account.soapSend("<FolderActionRequest xmlns='urn:zimbraMail'>"
+ "<action id='" + folder.getId()
+ "' op='grant'>" + "<grant d='"
+ grantee.EmailAddress + "' gt='usr' perm='" + permission + "'/>"
+ "</action>" + "</FolderActionRequest>");
grantee.soapSend("<CreateMountpointRequest xmlns='urn:zimbraMail'>"
+ "<link l='" + mountPointFolder.getId()
+ "' name='" + mountPointName
+ "' view='document' rid='" + folder.getId()
+ "' zid='" + account.ZimbraId + "'/>"
+ "</CreateMountpointRequest>");
}
// share a folder via soap
protected void shareFolderViaSoap(ZimbraAccount account, ZimbraAccount grantee, FolderItem folder,
String permission) throws HarnessException {
account.soapSend("<FolderActionRequest xmlns='urn:zimbraMail'>"
+ "<action id='" + folder.getId()
+ "' op='grant'>" + "<grant d='"
+ grantee.EmailAddress + "' gt='usr' perm='" + permission + "'/>"
+ "</action>" + "</FolderActionRequest>");
account.soapSend("<SendShareNotificationRequest xmlns='urn:zimbraMail'>"
+ "<item id='"
+ folder.getId()
+ "'/>"
+ "<e a='"
+ grantee.EmailAddress
+ "'/>"
+ "<notes _content='You are invited to view my shared folder " + folder.getName() + " '/>"
+ "</SendShareNotificationRequest>");
}
// create a new folder via soap
protected FolderItem createFolderViaSoap(ZimbraAccount account, FolderItem ...folderItemArray) throws HarnessException {
FolderItem folderItem = FolderItem.importFromSOAP(account, SystemFolder.Briefcase);
if ((folderItemArray != null) && folderItemArray.length >0) {
folderItem = folderItemArray[0];
}
// generate folder name
String foldername = "Folder " + ZimbraSeleniumProperties.getUniqueString();
// send soap request
account.soapSend("<CreateFolderRequest xmlns='urn:zimbraMail'>"
+ "<folder name='" + foldername + "' l='"
+ folderItem.getId()
+ "' view='document'/>" + "</CreateFolderRequest>");
// verify folder creation on the server
return FolderItem.importFromSOAP(account, foldername);
}
// Create a new folder, pass the required folder name
protected FolderItem createFolderViaSoap(ZimbraAccount account, String folderName, FolderItem ...folderItemArray) throws HarnessException
{
FolderItem folderItem = FolderItem.importFromSOAP(account, SystemFolder.Briefcase);
if ((folderItemArray != null) && folderItemArray.length >0) {
folderItem = folderItemArray[0];
}
// send soap request
account.soapSend("<CreateFolderRequest xmlns='urn:zimbraMail'>"
+ "<folder name='" + folderName + "' l='"
+ folderItem.getId()
+ "' view='document'/>" + "</CreateFolderRequest>");
// verify folder creation on the server
return FolderItem.importFromSOAP(account, folderName);
}
// create a new zimbra account
protected OctopusAccount getNewAccount() {
OctopusAccount newAccount = new OctopusAccount();
newAccount.provision();
newAccount.authenticate();
return newAccount;
}
// return comment id
protected String makeCommentViaSoap(ZimbraAccount account, String fileId, String comment)
throws HarnessException {
// Add comments to the file using SOAP
account.soapSend("<AddCommentRequest xmlns='urn:zimbraMail'> <comment parentId='"
+ fileId + "' text='" + comment + "'/></AddCommentRequest>");
SleepUtil.sleepVerySmall();
//TODO: verify valid id?
return account.soapSelectValue("//mail:AddCommentResponse//mail:comment", "id");
}
//Rename a file via Soap
protected String renameViaSoap(ZimbraAccount account, String fileId, String newName)
throws HarnessException {
// Rename file using SOAP
account.soapSend("<ItemActionRequest xmlns='urn:zimbraMail'> <action id='"
+ fileId + "' name='" + newName + "' op='rename' /></ItemActionRequest>");
SleepUtil.sleepVerySmall();
//verification
ZAssert.assertTrue(account.soapMatch(
"//mail:ItemActionResponse//mail:action", "op", "rename"),
"Verify file is renamed to " + newName);
return newName;
}
// Mark file favorite using soap
protected void markFileFavoriteViaSoap(ZimbraAccount account, String fileId)
throws HarnessException {
account.soapSend("<DocumentActionRequest xmlns='urn:zimbraMail'>"
+ "<action id='" + fileId + "' op='watch' /></DocumentActionRequest>");
SleepUtil.sleepVerySmall();
//verification
ZAssert.assertTrue(account.soapMatch(
"//mail:DocumentActionResponse//mail:action", "op", "watch"),
"Verify file is marked as favorite");
}
// Unmark file favorite using soap
protected void unMarkFileFavoriteViaSoap(ZimbraAccount account, String fileId)
throws HarnessException {
account.soapSend("<DocumentActionRequest xmlns='urn:zimbraMail'>"
+ "<action id='" + fileId + "' op='!watch' /></DocumentActionRequest>");
SleepUtil.sleepVerySmall();
//verification
ZAssert.assertTrue(account.soapMatch(
"//mail:DocumentActionResponse//mail:action", "op", "!watch"),
"Verify file is inmarked favorite");
}
// upload file
protected String uploadFileViaSoap(ZimbraAccount account, String fileName, FolderItem ...folderItemArray)
throws HarnessException {
FolderItem folderItem = FolderItem.importFromSOAP(account, SystemFolder.Briefcase);
if ((folderItemArray != null) && folderItemArray.length >0) {
folderItem = folderItemArray[0];
}
// Create file item
String filePath = ZimbraSeleniumProperties.getBaseDirectory()
+ "/data/public/other/" + fileName;
// Upload file to server through RestUtil
String attachmentId = account.uploadFile(filePath);
// if file already upload, then delete and upload it again
if (attachmentId == null) {
account.soapSend("<ItemActionRequest xmlns='urn:zimbraMail'>"
+ "<action id='" + attachmentId + "' op='trash'/>"
+ "</ItemActionRequest>");
attachmentId = account.uploadFile(filePath);
}
// Save uploaded file to the root folder through SOAP
account.soapSend(
"<SaveDocumentRequest xmlns='urn:zimbraMail'>" + "<doc l='"
+ folderItem.getId() + "'>" + "<upload id='"
+ attachmentId + "'/>" + "</doc></SaveDocumentRequest>");
//return id
return account.soapSelectValue(
"//mail:SaveDocumentResponse//mail:doc", "id");
}
// save document request
protected void saveDocumentRequestViaSoap(ZimbraAccount account, FolderItem folder,String attachmentId) throws HarnessException
{
account.soapSend("<SaveDocumentRequest xmlns='urn:zimbraMail'>"
+ "<doc l='" + folder.getId() + "'><upload id='"
+ attachmentId + "'/></doc></SaveDocumentRequest>");
}
// get activity stream request
protected void getActivityStreamRequest(ZimbraAccount account,FolderItem folder) throws HarnessException
{
account.soapSend(
"<GetActivityStreamRequest xmlns='urn:zimbraMail' offset='0' limit='250' id='"
+ folder.getId() + "'/>"
);
}
// delete folder via Soap
protected void deleteFolderViaSoap(ZimbraAccount account, FolderItem folder)throws HarnessException
{
account.soapSend(
"<ItemActionRequest xmlns='urn:zimbraMail'>"
+ "<action id='" + folder.getId() + "' op='delete'/>"
+ "</ItemActionRequest>"
);
}
//Function returns the array list containing folder Items. folder structure gets created is with hierarchy folder1>folder2>folder3.
protected ArrayList<FolderItem> createMultipleSubfolders(ZimbraAccount act,String ParentFolder,int noOfSubFolders) throws HarnessException
{
ArrayList<FolderItem> folderNames = new ArrayList<FolderItem>();
String _parent = ParentFolder;
for(int i=0;i<noOfSubFolders;i++)
{
FolderItem newParentFolder = FolderItem.importFromSOAP(act, _parent);
folderNames.add(newParentFolder);
String subFolderName = "childFolder"+ZimbraSeleniumProperties.getUniqueString();
// Create sub folder Using SOAP under a folder created
act.soapSend(
"<CreateFolderRequest xmlns='urn:zimbraMail'>"
+"<folder name='" + subFolderName + "' l='" + newParentFolder.getId() + "' view='document'/>"
+"</CreateFolderRequest>");
_parent =subFolderName;
}
return folderNames;
}
// Function for checking if required document is present in destination folder or not.
public boolean isDocumentPresentInFolder(ZimbraAccount acount,String folderName, String fileName)throws HarnessException
{
boolean docPresent= false;
FolderItem FolderName = FolderItem.importFromSOAP(acount, folderName);
acount.soapSend(
"<SearchRequest xmlns=\"urn:zimbraMail\" types=\"document\">"
+"<query>inid:"+FolderName.getId()+"</query>"
+"</SearchRequest>"
);
docPresent= acount.soapMatch("//mail:SearchResponse/mail:doc", "name", fileName);
return docPresent;
}
// create mountpoint request via soap
protected void mountRequestViaSoap(ZimbraAccount account,ZimbraAccount grantee,FolderItem folder,FolderItem mountPointFolder,
String mountPointName) throws HarnessException {
grantee
.soapSend("<CreateMountpointRequest xmlns='urn:zimbraMail'>"
+ "<link l='" + mountPointFolder.getId()
+ "' name='" + mountPointName
+ "' view='document' rid='" + folder.getId()
+ "' zid='" + account.ZimbraId + "'/>"
+ "</CreateMountpointRequest>");
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.noctarius.castmapr;
import static org.junit.Assert.assertEquals;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Semaphore;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import com.hazelcast.client.HazelcastClient;
import com.hazelcast.config.Config;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.IMap;
import com.hazelcast.test.HazelcastJUnit4ClassRunner;
import com.hazelcast.test.annotation.SerialTest;
import com.noctarius.castmapr.MapReduceTask;
import com.noctarius.castmapr.MapReduceTaskFactory;
import com.noctarius.castmapr.spi.Collator;
import com.noctarius.castmapr.spi.Collector;
import com.noctarius.castmapr.spi.Distributable;
import com.noctarius.castmapr.spi.DistributableReducer;
import com.noctarius.castmapr.spi.MapReduceCollatorListener;
import com.noctarius.castmapr.spi.MapReduceListener;
import com.noctarius.castmapr.spi.Mapper;
import com.noctarius.castmapr.spi.Reducer;
@RunWith( HazelcastJUnit4ClassRunner.class )
@Category( SerialTest.class )
@SuppressWarnings( "unused" )
public class DistributedMapperClientMapReduceTest
extends AbstractMapReduceTaskTest
{
private static final String MAP_NAME = "default";
@Before
public void gc()
{
Runtime.getRuntime().gc();
}
@After
public void cleanup()
{
HazelcastClient.shutdownAll();
Hazelcast.shutdownAll();
}
@Test( timeout = 30000 )
public void testMapperReducer()
throws Exception
{
Config config = buildConfig();
CountingManagedContext context = (CountingManagedContext) config.getManagedContext();
HazelcastInstance h1 = Hazelcast.newHazelcastInstance( config );
HazelcastInstance h2 = Hazelcast.newHazelcastInstance( config );
HazelcastInstance h3 = Hazelcast.newHazelcastInstance( config );
HazelcastInstance client = HazelcastClient.newHazelcastClient( null );
IMap<Integer, Integer> m1 = client.getMap( MAP_NAME );
for ( int i = 0; i < 100; i++ )
{
m1.put( i, i );
}
MapReduceTaskFactory factory = MapReduceTaskFactory.newInstance( client );
MapReduceTask<Integer, Integer, String, Integer> task = factory.build( m1 );
Map<String, Integer> result =
task.mapper( new GroupingTestMapper() ).reducer( new TestReducer( client, context ) ).submit();
// Precalculate results
int[] expectedResults = new int[4];
for ( int i = 0; i < 100; i++ )
{
int index = i % 4;
expectedResults[index] += i;
}
for ( int i = 0; i < 4; i++ )
{
assertEquals( expectedResults[i], (int) result.get( String.valueOf( i ) ) );
}
Set<String> hazelcastNames = context.getHazelcastNames();
assertEquals( 4, hazelcastNames.size() );
}
@Test( timeout = 30000 )
public void testMapperReducerInterface()
throws Exception
{
Config config = buildConfig();
CountingManagedContext context = (CountingManagedContext) config.getManagedContext();
HazelcastInstance h1 = Hazelcast.newHazelcastInstance( config );
HazelcastInstance h2 = Hazelcast.newHazelcastInstance( config );
HazelcastInstance h3 = Hazelcast.newHazelcastInstance( config );
HazelcastInstance client = HazelcastClient.newHazelcastClient( null );
IMap<Integer, Integer> m1 = client.getMap( MAP_NAME );
for ( int i = 0; i < 100; i++ )
{
m1.put( i, i );
}
MapReduceTaskFactory factory = MapReduceTaskFactory.newInstance( client );
MapReduceTask<Integer, Integer, String, Integer> task = factory.build( m1 );
Map<String, Integer> result =
task.mapper( new GroupingTestMapper() ).reducer( new TestReducer2( client, context ) ).submit();
// Precalculate results
int[] expectedResults = new int[4];
for ( int i = 0; i < 100; i++ )
{
int index = i % 4;
expectedResults[index] += i;
}
for ( int i = 0; i < 4; i++ )
{
assertEquals( expectedResults[i], (int) result.get( String.valueOf( i ) ) );
}
Set<String> hazelcastNames = context.getHazelcastNames();
assertEquals( 4, hazelcastNames.size() );
}
@Test( timeout = 30000 )
public void testMapperReducerCollator()
throws Exception
{
Config config = buildConfig();
CountingManagedContext context = (CountingManagedContext) config.getManagedContext();
HazelcastInstance h1 = Hazelcast.newHazelcastInstance( config );
HazelcastInstance h2 = Hazelcast.newHazelcastInstance( config );
HazelcastInstance h3 = Hazelcast.newHazelcastInstance( config );
HazelcastInstance client = HazelcastClient.newHazelcastClient( null );
IMap<Integer, Integer> m1 = client.getMap( MAP_NAME );
for ( int i = 0; i < 100; i++ )
{
m1.put( i, i );
}
MapReduceTaskFactory factory = MapReduceTaskFactory.newInstance( client );
MapReduceTask<Integer, Integer, String, Integer> task = factory.build( m1 );
int result =
task.mapper( new GroupingTestMapper() ).reducer( new TestReducer( client, context ) ).submit( new TestCollator() );
// Precalculate result
int expectedResult = 0;
for ( int i = 0; i < 100; i++ )
{
expectedResult += i;
}
for ( int i = 0; i < 4; i++ )
{
assertEquals( expectedResult, result );
}
Set<String> hazelcastNames = context.getHazelcastNames();
assertEquals( 4, hazelcastNames.size() );
}
@Test( timeout = 30000 )
public void testMapperReducerCollatorInterface()
throws Exception
{
Config config = buildConfig();
CountingManagedContext context = (CountingManagedContext) config.getManagedContext();
HazelcastInstance h1 = Hazelcast.newHazelcastInstance( config );
HazelcastInstance h2 = Hazelcast.newHazelcastInstance( config );
HazelcastInstance h3 = Hazelcast.newHazelcastInstance( config );
HazelcastInstance client = HazelcastClient.newHazelcastClient( null );
IMap<Integer, Integer> m1 = client.getMap( MAP_NAME );
for ( int i = 0; i < 100; i++ )
{
m1.put( i, i );
}
MapReduceTaskFactory factory = MapReduceTaskFactory.newInstance( client );
MapReduceTask<Integer, Integer, String, Integer> task = factory.build( m1 );
int result =
task.mapper( new GroupingTestMapper() ).reducer( new TestReducer2( client, context ) ).submit( new TestCollator() );
// Precalculate result
int expectedResult = 0;
for ( int i = 0; i < 100; i++ )
{
expectedResult += i;
}
for ( int i = 0; i < 4; i++ )
{
assertEquals( expectedResult, result );
}
Set<String> hazelcastNames = context.getHazelcastNames();
assertEquals( 4, hazelcastNames.size() );
}
@Test( timeout = 30000 )
public void testAsyncMapperReducer()
throws Exception
{
Config config = buildConfig();
CountingManagedContext context = (CountingManagedContext) config.getManagedContext();
HazelcastInstance h1 = Hazelcast.newHazelcastInstance( config );
HazelcastInstance h2 = Hazelcast.newHazelcastInstance( config );
HazelcastInstance h3 = Hazelcast.newHazelcastInstance( config );
HazelcastInstance client = HazelcastClient.newHazelcastClient( null );
IMap<Integer, Integer> m1 = client.getMap( MAP_NAME );
for ( int i = 0; i < 100; i++ )
{
m1.put( i, i );
}
final Map<String, Integer> listenerResults = new HashMap<String, Integer>();
final Semaphore semaphore = new Semaphore( 1 );
semaphore.acquire();
MapReduceTaskFactory factory = MapReduceTaskFactory.newInstance( client );
MapReduceTask<Integer, Integer, String, Integer> task = factory.build( m1 );
task.mapper( new GroupingTestMapper() ).reducer( new TestReducer( client, context ) ) //
.submitAsync( new MapReduceListener<String, Integer>()
{
@Override
public void onCompletion( Map<String, Integer> reducedResults )
{
listenerResults.putAll( reducedResults );
semaphore.release();
}
} );
// Precalculate results
int[] expectedResults = new int[4];
for ( int i = 0; i < 100; i++ )
{
int index = i % 4;
expectedResults[index] += i;
}
semaphore.acquire();
for ( int i = 0; i < 4; i++ )
{
assertEquals( expectedResults[i], (int) listenerResults.get( String.valueOf( i ) ) );
}
Set<String> hazelcastNames = context.getHazelcastNames();
assertEquals( 4, hazelcastNames.size() );
}
@Test( timeout = 30000 )
public void testAsyncMapperReducerInterface()
throws Exception
{
Config config = buildConfig();
CountingManagedContext context = (CountingManagedContext) config.getManagedContext();
HazelcastInstance h1 = Hazelcast.newHazelcastInstance( config );
HazelcastInstance h2 = Hazelcast.newHazelcastInstance( config );
HazelcastInstance h3 = Hazelcast.newHazelcastInstance( config );
HazelcastInstance client = HazelcastClient.newHazelcastClient( null );
IMap<Integer, Integer> m1 = client.getMap( MAP_NAME );
for ( int i = 0; i < 100; i++ )
{
m1.put( i, i );
}
final Map<String, Integer> listenerResults = new HashMap<String, Integer>();
final Semaphore semaphore = new Semaphore( 1 );
semaphore.acquire();
MapReduceTaskFactory factory = MapReduceTaskFactory.newInstance( client );
MapReduceTask<Integer, Integer, String, Integer> task = factory.build( m1 );
task.mapper( new GroupingTestMapper() ).reducer( new TestReducer2( client, context ) ) //
.submitAsync( new MapReduceListener<String, Integer>()
{
@Override
public void onCompletion( Map<String, Integer> reducedResults )
{
listenerResults.putAll( reducedResults );
semaphore.release();
}
} );
// Precalculate results
int[] expectedResults = new int[4];
for ( int i = 0; i < 100; i++ )
{
int index = i % 4;
expectedResults[index] += i;
}
semaphore.acquire();
for ( int i = 0; i < 4; i++ )
{
assertEquals( expectedResults[i], (int) listenerResults.get( String.valueOf( i ) ) );
}
Set<String> hazelcastNames = context.getHazelcastNames();
assertEquals( 4, hazelcastNames.size() );
}
@Test( timeout = 30000 )
public void testAsyncMapperReducerCollator()
throws Exception
{
Config config = buildConfig();
CountingManagedContext context = (CountingManagedContext) config.getManagedContext();
HazelcastInstance h1 = Hazelcast.newHazelcastInstance( config );
HazelcastInstance h2 = Hazelcast.newHazelcastInstance( config );
HazelcastInstance h3 = Hazelcast.newHazelcastInstance( config );
HazelcastInstance client = HazelcastClient.newHazelcastClient( null );
IMap<Integer, Integer> m1 = client.getMap( MAP_NAME );
for ( int i = 0; i < 100; i++ )
{
m1.put( i, i );
}
final int[] result = new int[1];
final Semaphore semaphore = new Semaphore( 1 );
semaphore.acquire();
MapReduceTaskFactory factory = MapReduceTaskFactory.newInstance( client );
MapReduceTask<Integer, Integer, String, Integer> task = factory.build( m1 );
task.mapper( new GroupingTestMapper() ).reducer( new TestReducer( client, context ) )//
.submitAsync( new TestCollator(), new MapReduceCollatorListener<Integer>()
{
@Override
public void onCompletion( Integer r )
{
result[0] = r.intValue();
semaphore.release();
}
} );
// Precalculate result
int expectedResult = 0;
for ( int i = 0; i < 100; i++ )
{
expectedResult += i;
}
semaphore.acquire();
for ( int i = 0; i < 4; i++ )
{
assertEquals( expectedResult, result[0] );
}
Set<String> hazelcastNames = context.getHazelcastNames();
assertEquals( 4, hazelcastNames.size() );
}
@Test( timeout = 30000 )
public void testAsyncMapperReducerCollatorInterface()
throws Exception
{
Config config = buildConfig();
CountingManagedContext context = (CountingManagedContext) config.getManagedContext();
HazelcastInstance h1 = Hazelcast.newHazelcastInstance( config );
HazelcastInstance h2 = Hazelcast.newHazelcastInstance( config );
HazelcastInstance h3 = Hazelcast.newHazelcastInstance( config );
HazelcastInstance client = HazelcastClient.newHazelcastClient( null );
IMap<Integer, Integer> m1 = client.getMap( MAP_NAME );
for ( int i = 0; i < 100; i++ )
{
m1.put( i, i );
}
final int[] result = new int[1];
final Semaphore semaphore = new Semaphore( 1 );
semaphore.acquire();
MapReduceTaskFactory factory = MapReduceTaskFactory.newInstance( client );
MapReduceTask<Integer, Integer, String, Integer> task = factory.build( m1 );
task.mapper( new GroupingTestMapper() ).reducer( new TestReducer2( client, context ) )//
.submitAsync( new TestCollator(), new MapReduceCollatorListener<Integer>()
{
@Override
public void onCompletion( Integer r )
{
result[0] = r.intValue();
semaphore.release();
}
} );
// Precalculate result
int expectedResult = 0;
for ( int i = 0; i < 100; i++ )
{
expectedResult += i;
}
semaphore.acquire();
for ( int i = 0; i < 4; i++ )
{
assertEquals( expectedResult, result[0] );
}
Set<String> hazelcastNames = context.getHazelcastNames();
assertEquals( 4, hazelcastNames.size() );
}
@SuppressWarnings( "serial" )
public static class GroupingTestMapper
extends Mapper<Integer, Integer, String, Integer>
{
@Override
public void map( Integer key, Integer value, Collector<String, Integer> collector )
{
collector.emit( String.valueOf( key % 4 ), value );
}
}
@Distributable
@SuppressWarnings( "serial" )
public static class TestReducer
implements Reducer<String, Integer>, CountingAware
{
private transient HazelcastInstance hazelcastInstance;
private transient Set<String> hazelcastNames;
public TestReducer()
{
}
public TestReducer( HazelcastInstance hazelcastInstance, CountingManagedContext context )
{
this.hazelcastInstance = hazelcastInstance;
this.hazelcastNames = context.getHazelcastNames();
}
@Override
public Integer reduce( String key, Iterator<Integer> values )
{
hazelcastNames.add( hazelcastInstance.getName() );
int sum = 0;
while ( values.hasNext() )
{
sum += values.next();
}
return sum;
}
@Override
public void setHazelcastInstance( HazelcastInstance hazelcastInstance )
{
this.hazelcastInstance = hazelcastInstance;
}
@Override
public void setCouter( Set<String> hazelcastNames )
{
this.hazelcastNames = hazelcastNames;
}
}
@SuppressWarnings( "serial" )
public static class TestReducer2
implements DistributableReducer<String, Integer>, CountingAware
{
private transient HazelcastInstance hazelcastInstance;
private transient Set<String> hazelcastNames;
public TestReducer2()
{
}
public TestReducer2( HazelcastInstance hazelcastInstance, CountingManagedContext context )
{
this.hazelcastInstance = hazelcastInstance;
this.hazelcastNames = context.getHazelcastNames();
}
@Override
public Integer reduce( String key, Iterator<Integer> values )
{
hazelcastNames.add( hazelcastInstance.getName() );
int sum = 0;
while ( values.hasNext() )
{
sum += values.next();
}
return sum;
}
@Override
public void setHazelcastInstance( HazelcastInstance hazelcastInstance )
{
this.hazelcastInstance = hazelcastInstance;
}
@Override
public void setCouter( Set<String> hazelcastNames )
{
this.hazelcastNames = hazelcastNames;
}
}
public static class TestCollator
implements Collator<String, Integer, Integer>
{
@Override
public Integer collate( Map<String, Integer> reducedResults )
{
int sum = 0;
for ( Integer value : reducedResults.values() )
{
sum += value;
}
return sum;
}
}
}
| |
/*
* JBoss, Home of Professional Open Source
* Copyright 2010 Red Hat Inc. and/or its affiliates and other contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.arquillian.container.test.impl.client.deployment;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.List;
import java.util.logging.Handler;
import java.util.logging.Logger;
import java.util.logging.StreamHandler;
import org.jboss.arquillian.container.spi.client.deployment.DeploymentDescription;
import org.jboss.arquillian.container.spi.client.deployment.TargetDescription;
import org.jboss.arquillian.container.spi.client.deployment.Validate;
import org.jboss.arquillian.container.spi.client.protocol.ProtocolDescription;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.container.test.api.OverProtocol;
import org.jboss.arquillian.container.test.api.ShouldThrowException;
import org.jboss.arquillian.container.test.api.TargetsContainer;
import org.jboss.arquillian.test.spi.TestClass;
import org.jboss.shrinkwrap.api.Archive;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.spec.JavaArchive;
import org.jboss.shrinkwrap.api.spec.WebArchive;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
/**
* AnnotationDeploymentScenarioGeneratorTestCase
*
* @author <a href="mailto:aslak@redhat.com">Aslak Knutsen</a>
* @version $Revision: $
*/
public class AnnotationDeploymentScenarioGeneratorTestCase
{
private static Logger log = Logger.getLogger(AnnotationDeploymentScenarioGenerator.class.getName());
private static OutputStream logCapturingStream;
private static StreamHandler customLogHandler;
private final static String expectedLogPartForArchiveWithUnexpectedFileExtension = "unexpected file extension";
@Before
public void attachLogCapturer()
{
logCapturingStream = new ByteArrayOutputStream();
Handler[] handlers = log.getParent().getHandlers();
customLogHandler = new StreamHandler(logCapturingStream, handlers[0].getFormatter());
log.addHandler(customLogHandler);
}
@After
public void detachLagCapturer()
{
log.removeHandler(customLogHandler);
customLogHandler = null;
try
{
logCapturingStream.close();
} catch (IOException e)
{
throw new IllegalStateException("Potential memory leak as log capturing stream could not be closed");
}
logCapturingStream = null;
}
public String getTestCapturedLog() throws IOException
{
customLogHandler.flush();
return logCapturingStream.toString();
}
@Test
public void shouldHandleMultipleDeploymentsAllDefault() throws Exception
{
List<DeploymentDescription> scenario = generate(MultiDeploymentsDefault.class);
Assert.assertNotNull(scenario);
Assert.assertEquals(
"Verify all deployments were found",
2, scenario.size());
for(DeploymentDescription deployment : scenario)
{
Assert.assertEquals(
"Verify deployment has default target",
TargetDescription.DEFAULT,
deployment.getTarget());
Assert.assertEquals(
"Verify deployment has default protocol",
ProtocolDescription.DEFAULT,
deployment.getProtocol());
Assert.assertEquals(-1, deployment.getOrder());
Assert.assertEquals(true, deployment.managed());
Assert.assertTrue(Validate.isArchiveOfType(JavaArchive.class, deployment.getArchive()));
}
}
@Test
public void shouldHandleMultipleDeploymentsAllSet() throws Exception
{
List<DeploymentDescription> scenario = generate(MultiDeploymentsSet.class);
Assert.assertNotNull(scenario);
Assert.assertEquals(
"Verify all deployments were found",
2, scenario.size());
for(DeploymentDescription deploymentDesc : scenario) {
if(deploymentDesc.getOrder() == 1) {
Assert.assertEquals(
"Verify deployment has specified target",
new TargetDescription("target-first"),
deploymentDesc.getTarget());
Assert.assertEquals(
"Verify deployment has specified protocol",
new ProtocolDescription("protocol-first"),
deploymentDesc.getProtocol());
Assert.assertEquals(1, deploymentDesc.getOrder());
Assert.assertEquals(false, deploymentDesc.managed());
Assert.assertEquals(false, deploymentDesc.testable());
Assert.assertTrue(Validate.isArchiveOfType(JavaArchive.class, deploymentDesc.getArchive()));
Assert.assertNull(deploymentDesc.getExpectedException());
} else {
Assert.assertEquals(
"Verify deployment has specified target",
new TargetDescription("target-second"),
deploymentDesc.getTarget());
Assert.assertEquals(
"Verify deployment has specified protocol",
new ProtocolDescription("protocol-second"),
deploymentDesc.getProtocol());
Assert.assertEquals(2, deploymentDesc.getOrder());
Assert.assertEquals(false, deploymentDesc.managed());
Assert.assertEquals(true, deploymentDesc.testable());
Assert.assertTrue(Validate.isArchiveOfType(JavaArchive.class, deploymentDesc.getArchive()));
Assert.assertNull(deploymentDesc.getExpectedException());
}
}
}
@Test
public void shouldSortDeploymentsByOrder() throws Exception
{
List<DeploymentDescription> scenario = generate(MultiDeploymentsInReverseOrder.class);
Assert.assertNotNull(scenario);
Assert.assertEquals(
"Verify all deployments were found",
3, scenario.size());
Assert.assertTrue(
"Deployments are not sorted by order",
scenario.get(0).getOrder() < scenario.get(1).getOrder()
);
Assert.assertTrue(
"Deployments are not sorted by order",
scenario.get(1).getOrder() < scenario.get(2).getOrder()
);
}
@Test
public void shouldReadExpectedAndOverrideDeployment()
{
List<DeploymentDescription> scenario = generate(ExpectedDeploymentExceptionSet.class);
Assert.assertNotNull(scenario);
Assert.assertEquals(
"Verify all deployments were found",
1, scenario.size());
DeploymentDescription deploymentOne = scenario.get(0);
Assert.assertEquals(false, deploymentOne.testable());
Assert.assertTrue(Validate.isArchiveOfType(JavaArchive.class, deploymentOne.getArchive()));
Assert.assertEquals(Exception.class, deploymentOne.getExpectedException());
}
@Test
public void shouldAllowNoDeploymentPresent() throws Exception
{
List<DeploymentDescription> descriptors = generate(DeploymentNotPresent.class);
Assert.assertNotNull(descriptors);
Assert.assertEquals(0, descriptors.size());
}
@Test
public void shouldAllowNonPublicDeploymentMethods() throws Exception {
List<DeploymentDescription> descriptors = generate(DeploymentProtectedMethods.class);
Assert.assertNotNull(descriptors);
Assert.assertEquals(3, descriptors.size());
}
@Test
public void shouldAllowNonPublicDeploymentMethodsFromSuperClass() throws Exception {
List<DeploymentDescription> descriptors = generate(DeploymentProtectedMethodsInherited.class);
Assert.assertNotNull(descriptors);
Assert.assertEquals(3, descriptors.size());
}
@Test(expected = IllegalArgumentException.class)
public void shouldThrowExceptionOnDeploymentNotStatic() throws Exception
{
new AnnotationDeploymentScenarioGenerator().generate(
new TestClass(DeploymentNotStatic.class));
}
@Test(expected = IllegalArgumentException.class)
public void shouldThrowExceptionOnDeploymentWrongReturnType() throws Exception
{
new AnnotationDeploymentScenarioGenerator().generate(
new TestClass(DeploymentWrongReturnType.class));
}
@Test
public void shouldLogWarningForMismatchingArchiveTypeAndFileExtension() throws Exception
{
new AnnotationDeploymentScenarioGenerator().generate(
new TestClass(DeploymentWithMismatchingTypeAndFileExtension.class));
String capturedLog = getTestCapturedLog();
Assert.assertTrue(capturedLog.contains(expectedLogPartForArchiveWithUnexpectedFileExtension));
}
@Test
public void shouldNotLogWarningForMatchingArchiveTypeAndFileExtension() throws Exception
{
new AnnotationDeploymentScenarioGenerator().generate(
new TestClass(DeploymentWithSpecifiedFileExtension.class));
String capturedLog = getTestCapturedLog();
Assert.assertFalse(capturedLog.contains(expectedLogPartForArchiveWithUnexpectedFileExtension));
}
@Test
public void shouldLogWarningForDeploymentWithMissingFileExtension() throws Exception
{
new AnnotationDeploymentScenarioGenerator().generate(
new TestClass(DeploymentWithMissingFileExtension.class));
String capturedLog = getTestCapturedLog();
Assert.assertTrue(capturedLog.contains(expectedLogPartForArchiveWithUnexpectedFileExtension));
}
@Test // should not log warning when using the default archive name
public void shouldNotLogWarningForDeploymentWithoutSpecifiedName() throws Exception
{
new AnnotationDeploymentScenarioGenerator().generate(
new TestClass(DeploymentWithoutSpecifiedName.class));
String capturedLog = getTestCapturedLog();
Assert.assertFalse(capturedLog.contains(expectedLogPartForArchiveWithUnexpectedFileExtension));
}
@SuppressWarnings("unused")
private static class MultiDeploymentsDefault
{
@Deployment
public static Archive<?> deploymentOne()
{
return ShrinkWrap.create(JavaArchive.class);
}
@Deployment
public static Archive<?> deploymentTwo()
{
return ShrinkWrap.create(JavaArchive.class);
}
}
@SuppressWarnings("unused")
private static class MultiDeploymentsSet
{
@OverProtocol("protocol-first")
@TargetsContainer("target-first")
@Deployment(name = "first", order = 1, managed = false, testable = false)
public static Archive<?> deploymentOne()
{
return ShrinkWrap.create(JavaArchive.class);
}
@OverProtocol("protocol-second")
@TargetsContainer("target-second")
@Deployment(name = "second", order = 2, managed = false)
public static Archive<?> deploymentTwo()
{
return ShrinkWrap.create(JavaArchive.class);
}
}
@SuppressWarnings("unused")
private static class MultiDeploymentsInReverseOrder
{
@Deployment(name = "second", order = 2)
public static Archive<?> deploymentOne()
{
return ShrinkWrap.create(JavaArchive.class);
}
@Deployment(name = "third", order = 3)
public static Archive<?> deploymentThree()
{
return ShrinkWrap.create(JavaArchive.class);
}
@Deployment(name = "first", order = 1)
public static Archive<?> deploymentTwo()
{
return ShrinkWrap.create(JavaArchive.class);
}
}
@SuppressWarnings("unused")
private static class ExpectedDeploymentExceptionSet
{
@Deployment(name = "second", testable = true) // testable should be overwritten by @Expected
@ShouldThrowException
public static Archive<?> deploymentOne()
{
return ShrinkWrap.create(JavaArchive.class);
}
}
@SuppressWarnings("unused")
private static class DeploymentProtectedMethods {
@Deployment
static JavaArchive one() {
return ShrinkWrap.create(JavaArchive.class);
}
@Deployment
private static JavaArchive two() {
return ShrinkWrap.create(JavaArchive.class);
}
@Deployment
protected static JavaArchive tree() {
return ShrinkWrap.create(JavaArchive.class);
}
}
private static class DeploymentProtectedMethodsInherited extends DeploymentProtectedMethods {
}
private static class DeploymentNotPresent
{
}
@SuppressWarnings("unused")
private static class DeploymentNotStatic
{
@Deployment
public Archive<?> test()
{
return ShrinkWrap.create(JavaArchive.class);
}
}
@SuppressWarnings("unused")
private static class DeploymentWrongReturnType
{
@Deployment
public Object test()
{
return ShrinkWrap.create(JavaArchive.class);
}
}
@SuppressWarnings("unused")
private static class DeploymentWithMismatchingTypeAndFileExtension
{
@Deployment
public static WebArchive test()
{
return ShrinkWrap.create(WebArchive.class, "test.jar");
}
}
@SuppressWarnings("unused")
private static class DeploymentWithSpecifiedFileExtension
{
@Deployment
public static WebArchive test()
{
return ShrinkWrap.create(WebArchive.class, "test.war");
}
}
@SuppressWarnings("unused")
private static class DeploymentWithMissingFileExtension
{
@Deployment
public static WebArchive test()
{
return ShrinkWrap.create(WebArchive.class, "test");
}
}
@SuppressWarnings("unused")
private static class DeploymentWithoutSpecifiedName
{
@Deployment
public static WebArchive test()
{
return ShrinkWrap.create(WebArchive.class);
}
}
private List<DeploymentDescription> generate(Class<?> testClass)
{
return new AnnotationDeploymentScenarioGenerator().generate(new TestClass(testClass));
}
}
| |
/*******************************************************************************
* Copyright (c)
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
******************************************************************************/
/**
*/
package bvr.impl;
import bvr.BvrPackage;
import bvr.CompoundNode;
import bvr.Constraint;
import bvr.MultiplicityInterval;
import bvr.Target;
import bvr.VClassifier;
import bvr.VNode;
import bvr.Variable;
import java.util.Collection;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.util.EObjectContainmentEList;
import org.eclipse.emf.ecore.util.InternalEList;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>VClassifier</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link bvr.impl.VClassifierImpl#getGroupMultiplicity <em>Group Multiplicity</em>}</li>
* <li>{@link bvr.impl.VClassifierImpl#getOwnedConstraint <em>Owned Constraint</em>}</li>
* <li>{@link bvr.impl.VClassifierImpl#getVariable <em>Variable</em>}</li>
* <li>{@link bvr.impl.VClassifierImpl#getMember <em>Member</em>}</li>
* <li>{@link bvr.impl.VClassifierImpl#getOwnedTargets <em>Owned Targets</em>}</li>
* <li>{@link bvr.impl.VClassifierImpl#getInstanceMultiplicity <em>Instance Multiplicity</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class VClassifierImpl extends VSpecImpl implements VClassifier {
/**
* The cached value of the '{@link #getGroupMultiplicity() <em>Group Multiplicity</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getGroupMultiplicity()
* @generated
* @ordered
*/
protected MultiplicityInterval groupMultiplicity;
/**
* The cached value of the '{@link #getOwnedConstraint() <em>Owned Constraint</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getOwnedConstraint()
* @generated
* @ordered
*/
protected EList<Constraint> ownedConstraint;
/**
* The cached value of the '{@link #getVariable() <em>Variable</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getVariable()
* @generated
* @ordered
*/
protected EList<Variable> variable;
/**
* The cached value of the '{@link #getMember() <em>Member</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getMember()
* @generated
* @ordered
*/
protected EList<VNode> member;
/**
* The cached value of the '{@link #getOwnedTargets() <em>Owned Targets</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getOwnedTargets()
* @generated
* @ordered
*/
protected EList<Target> ownedTargets;
/**
* The cached value of the '{@link #getInstanceMultiplicity() <em>Instance Multiplicity</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getInstanceMultiplicity()
* @generated
* @ordered
*/
protected MultiplicityInterval instanceMultiplicity;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected VClassifierImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return BvrPackage.Literals.VCLASSIFIER;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public MultiplicityInterval getGroupMultiplicity() {
return groupMultiplicity;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetGroupMultiplicity(MultiplicityInterval newGroupMultiplicity, NotificationChain msgs) {
MultiplicityInterval oldGroupMultiplicity = groupMultiplicity;
groupMultiplicity = newGroupMultiplicity;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, BvrPackage.VCLASSIFIER__GROUP_MULTIPLICITY, oldGroupMultiplicity, newGroupMultiplicity);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setGroupMultiplicity(MultiplicityInterval newGroupMultiplicity) {
if (newGroupMultiplicity != groupMultiplicity) {
NotificationChain msgs = null;
if (groupMultiplicity != null)
msgs = ((InternalEObject)groupMultiplicity).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - BvrPackage.VCLASSIFIER__GROUP_MULTIPLICITY, null, msgs);
if (newGroupMultiplicity != null)
msgs = ((InternalEObject)newGroupMultiplicity).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - BvrPackage.VCLASSIFIER__GROUP_MULTIPLICITY, null, msgs);
msgs = basicSetGroupMultiplicity(newGroupMultiplicity, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, BvrPackage.VCLASSIFIER__GROUP_MULTIPLICITY, newGroupMultiplicity, newGroupMultiplicity));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<Constraint> getOwnedConstraint() {
if (ownedConstraint == null) {
ownedConstraint = new EObjectContainmentEList<Constraint>(Constraint.class, this, BvrPackage.VCLASSIFIER__OWNED_CONSTRAINT);
}
return ownedConstraint;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<Variable> getVariable() {
if (variable == null) {
variable = new EObjectContainmentEList<Variable>(Variable.class, this, BvrPackage.VCLASSIFIER__VARIABLE);
}
return variable;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<VNode> getMember() {
if (member == null) {
member = new EObjectContainmentEList<VNode>(VNode.class, this, BvrPackage.VCLASSIFIER__MEMBER);
}
return member;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<Target> getOwnedTargets() {
if (ownedTargets == null) {
ownedTargets = new EObjectContainmentEList<Target>(Target.class, this, BvrPackage.VCLASSIFIER__OWNED_TARGETS);
}
return ownedTargets;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public MultiplicityInterval getInstanceMultiplicity() {
return instanceMultiplicity;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetInstanceMultiplicity(MultiplicityInterval newInstanceMultiplicity, NotificationChain msgs) {
MultiplicityInterval oldInstanceMultiplicity = instanceMultiplicity;
instanceMultiplicity = newInstanceMultiplicity;
if (eNotificationRequired()) {
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, BvrPackage.VCLASSIFIER__INSTANCE_MULTIPLICITY, oldInstanceMultiplicity, newInstanceMultiplicity);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setInstanceMultiplicity(MultiplicityInterval newInstanceMultiplicity) {
if (newInstanceMultiplicity != instanceMultiplicity) {
NotificationChain msgs = null;
if (instanceMultiplicity != null)
msgs = ((InternalEObject)instanceMultiplicity).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - BvrPackage.VCLASSIFIER__INSTANCE_MULTIPLICITY, null, msgs);
if (newInstanceMultiplicity != null)
msgs = ((InternalEObject)newInstanceMultiplicity).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - BvrPackage.VCLASSIFIER__INSTANCE_MULTIPLICITY, null, msgs);
msgs = basicSetInstanceMultiplicity(newInstanceMultiplicity, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, BvrPackage.VCLASSIFIER__INSTANCE_MULTIPLICITY, newInstanceMultiplicity, newInstanceMultiplicity));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case BvrPackage.VCLASSIFIER__GROUP_MULTIPLICITY:
return basicSetGroupMultiplicity(null, msgs);
case BvrPackage.VCLASSIFIER__OWNED_CONSTRAINT:
return ((InternalEList<?>)getOwnedConstraint()).basicRemove(otherEnd, msgs);
case BvrPackage.VCLASSIFIER__VARIABLE:
return ((InternalEList<?>)getVariable()).basicRemove(otherEnd, msgs);
case BvrPackage.VCLASSIFIER__MEMBER:
return ((InternalEList<?>)getMember()).basicRemove(otherEnd, msgs);
case BvrPackage.VCLASSIFIER__OWNED_TARGETS:
return ((InternalEList<?>)getOwnedTargets()).basicRemove(otherEnd, msgs);
case BvrPackage.VCLASSIFIER__INSTANCE_MULTIPLICITY:
return basicSetInstanceMultiplicity(null, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case BvrPackage.VCLASSIFIER__GROUP_MULTIPLICITY:
return getGroupMultiplicity();
case BvrPackage.VCLASSIFIER__OWNED_CONSTRAINT:
return getOwnedConstraint();
case BvrPackage.VCLASSIFIER__VARIABLE:
return getVariable();
case BvrPackage.VCLASSIFIER__MEMBER:
return getMember();
case BvrPackage.VCLASSIFIER__OWNED_TARGETS:
return getOwnedTargets();
case BvrPackage.VCLASSIFIER__INSTANCE_MULTIPLICITY:
return getInstanceMultiplicity();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case BvrPackage.VCLASSIFIER__GROUP_MULTIPLICITY:
setGroupMultiplicity((MultiplicityInterval)newValue);
return;
case BvrPackage.VCLASSIFIER__OWNED_CONSTRAINT:
getOwnedConstraint().clear();
getOwnedConstraint().addAll((Collection<? extends Constraint>)newValue);
return;
case BvrPackage.VCLASSIFIER__VARIABLE:
getVariable().clear();
getVariable().addAll((Collection<? extends Variable>)newValue);
return;
case BvrPackage.VCLASSIFIER__MEMBER:
getMember().clear();
getMember().addAll((Collection<? extends VNode>)newValue);
return;
case BvrPackage.VCLASSIFIER__OWNED_TARGETS:
getOwnedTargets().clear();
getOwnedTargets().addAll((Collection<? extends Target>)newValue);
return;
case BvrPackage.VCLASSIFIER__INSTANCE_MULTIPLICITY:
setInstanceMultiplicity((MultiplicityInterval)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case BvrPackage.VCLASSIFIER__GROUP_MULTIPLICITY:
setGroupMultiplicity((MultiplicityInterval)null);
return;
case BvrPackage.VCLASSIFIER__OWNED_CONSTRAINT:
getOwnedConstraint().clear();
return;
case BvrPackage.VCLASSIFIER__VARIABLE:
getVariable().clear();
return;
case BvrPackage.VCLASSIFIER__MEMBER:
getMember().clear();
return;
case BvrPackage.VCLASSIFIER__OWNED_TARGETS:
getOwnedTargets().clear();
return;
case BvrPackage.VCLASSIFIER__INSTANCE_MULTIPLICITY:
setInstanceMultiplicity((MultiplicityInterval)null);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case BvrPackage.VCLASSIFIER__GROUP_MULTIPLICITY:
return groupMultiplicity != null;
case BvrPackage.VCLASSIFIER__OWNED_CONSTRAINT:
return ownedConstraint != null && !ownedConstraint.isEmpty();
case BvrPackage.VCLASSIFIER__VARIABLE:
return variable != null && !variable.isEmpty();
case BvrPackage.VCLASSIFIER__MEMBER:
return member != null && !member.isEmpty();
case BvrPackage.VCLASSIFIER__OWNED_TARGETS:
return ownedTargets != null && !ownedTargets.isEmpty();
case BvrPackage.VCLASSIFIER__INSTANCE_MULTIPLICITY:
return instanceMultiplicity != null;
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public int eBaseStructuralFeatureID(int derivedFeatureID, Class<?> baseClass) {
if (baseClass == VNode.class) {
switch (derivedFeatureID) {
case BvrPackage.VCLASSIFIER__GROUP_MULTIPLICITY: return BvrPackage.VNODE__GROUP_MULTIPLICITY;
case BvrPackage.VCLASSIFIER__OWNED_CONSTRAINT: return BvrPackage.VNODE__OWNED_CONSTRAINT;
case BvrPackage.VCLASSIFIER__VARIABLE: return BvrPackage.VNODE__VARIABLE;
default: return -1;
}
}
if (baseClass == CompoundNode.class) {
switch (derivedFeatureID) {
case BvrPackage.VCLASSIFIER__MEMBER: return BvrPackage.COMPOUND_NODE__MEMBER;
case BvrPackage.VCLASSIFIER__OWNED_TARGETS: return BvrPackage.COMPOUND_NODE__OWNED_TARGETS;
default: return -1;
}
}
return super.eBaseStructuralFeatureID(derivedFeatureID, baseClass);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public int eDerivedStructuralFeatureID(int baseFeatureID, Class<?> baseClass) {
if (baseClass == VNode.class) {
switch (baseFeatureID) {
case BvrPackage.VNODE__GROUP_MULTIPLICITY: return BvrPackage.VCLASSIFIER__GROUP_MULTIPLICITY;
case BvrPackage.VNODE__OWNED_CONSTRAINT: return BvrPackage.VCLASSIFIER__OWNED_CONSTRAINT;
case BvrPackage.VNODE__VARIABLE: return BvrPackage.VCLASSIFIER__VARIABLE;
default: return -1;
}
}
if (baseClass == CompoundNode.class) {
switch (baseFeatureID) {
case BvrPackage.COMPOUND_NODE__MEMBER: return BvrPackage.VCLASSIFIER__MEMBER;
case BvrPackage.COMPOUND_NODE__OWNED_TARGETS: return BvrPackage.VCLASSIFIER__OWNED_TARGETS;
default: return -1;
}
}
return super.eDerivedStructuralFeatureID(baseFeatureID, baseClass);
}
} //VClassifierImpl
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.exec.repl.util;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.hdfs.protocol.SnapshotException;
import org.apache.hadoop.hive.common.TableName;
import org.apache.hadoop.hive.common.repl.ReplConst;
import org.apache.hadoop.hive.common.repl.ReplScope;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
import org.apache.hadoop.hive.metastore.utils.StringUtils;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.ddl.DDLWork;
import org.apache.hadoop.hive.ql.ddl.table.misc.properties.AlterTableSetPropertiesDesc;
import org.apache.hadoop.hive.ql.ddl.table.partition.PartitionUtils;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.exec.repl.ReplAck;
import org.apache.hadoop.hive.ql.exec.repl.ReplStateLogWork;
import org.apache.hadoop.hive.ql.exec.util.DAGTraversal;
import org.apache.hadoop.hive.ql.exec.util.Retryable;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.parse.EximUtil;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.parse.repl.ReplLogger;
import org.apache.hadoop.hive.ql.parse.repl.dump.Utils;
import org.apache.hadoop.hive.ql.parse.repl.dump.metric.BootstrapDumpMetricCollector;
import org.apache.hadoop.hive.ql.parse.repl.dump.metric.IncrementalDumpMetricCollector;
import org.apache.hadoop.hive.ql.parse.repl.load.metric.BootstrapLoadMetricCollector;
import org.apache.hadoop.hive.ql.parse.repl.load.metric.IncrementalLoadMetricCollector;
import org.apache.hadoop.hive.ql.parse.repl.metric.ReplicationMetricCollector;
import org.apache.hadoop.hive.ql.parse.repl.metric.event.Status;
import org.apache.hadoop.hive.ql.plan.ColumnStatsUpdateWork;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.ImportTableDesc;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.ql.parse.repl.load.UpdatedMetaDataTracker;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.thrift.TException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Base64;
import static org.apache.hadoop.hive.conf.Constants.SCHEDULED_QUERY_EXECUTIONID;
import static org.apache.hadoop.hive.conf.Constants.SCHEDULED_QUERY_SCHEDULENAME;
import static org.apache.hadoop.hive.ql.exec.repl.ReplAck.NON_RECOVERABLE_MARKER;
public class ReplUtils {
public static final String LAST_REPL_ID_KEY = "hive.repl.last.repl.id";
public static final String REPL_CHECKPOINT_KEY = ReplConst.REPL_TARGET_DB_PROPERTY;
public static final String REPL_FIRST_INC_PENDING_FLAG = "hive.repl.first.inc.pending";
// write id allocated in the current execution context which will be passed through config to be used by different
// tasks.
public static final String REPL_CURRENT_TBL_WRITE_ID = "hive.repl.current.table.write.id";
public static final String REPL_IS_CUSTOM_DB_LOC = "hive.repl.is.custom.db.loc";
public static final String REPL_IS_CUSTOM_DB_MANAGEDLOC = "hive.repl.is.custom.db.managedloc";
public static final String FUNCTIONS_ROOT_DIR_NAME = "_functions";
public static final String CONSTRAINTS_ROOT_DIR_NAME = "_constraints";
// Root directory for dumping bootstrapped tables along with incremental events dump.
public static final String INC_BOOTSTRAP_ROOT_DIR_NAME = "_bootstrap";
// Root base directory name for hive.
public static final String REPL_HIVE_BASE_DIR = "hive";
// Root base directory name for ranger.
public static final String REPL_RANGER_BASE_DIR = "ranger";
// Root base directory name for atlas.
public static final String REPL_ATLAS_BASE_DIR = "atlas";
// Atlas meta data export file.
public static final String REPL_ATLAS_EXPORT_FILE_NAME = "atlas_export.zip";
// Config for hadoop default file system.
public static final String DEFAULT_FS_CONFIG = "fs.defaultFS";
// Name of the directory which stores the list of tables included in the policy in case of table level replication.
// One file per database, named after the db name. The directory is not created for db level replication.
public static final String REPL_TABLE_LIST_DIR_NAME = "_tables";
// Configuration to enable/disable dumping ACID tables. Used only for testing and shouldn't be
// seen in production or in case of tests other than the ones where it's required.
public static final String REPL_DUMP_INCLUDE_ACID_TABLES = "hive.repl.dump.include.acid.tables";
// HDFS Config to define the maximum number of items a directory may contain.
public static final String DFS_MAX_DIR_ITEMS_CONFIG = "dfs.namenode.fs-limits.max-directory-items";
// Reserved number of items to accommodate operational files in the dump root dir.
public static final int RESERVED_DIR_ITEMS_COUNT = 10;
public static final String RANGER_AUTHORIZER = "ranger";
public static final String HIVE_RANGER_POLICIES_FILE_NAME = "ranger_policies.json";
public static final String RANGER_REST_URL = "ranger.plugin.hive.policy.rest.url";
public static final String RANGER_HIVE_SERVICE_NAME = "ranger.plugin.hive.service.name";
public static final String RANGER_CONFIGURATION_RESOURCE_NAME = "ranger-hive-security.xml";
// Service name for hive.
public static final String REPL_HIVE_SERVICE = "hive";
// Service name for ranger.
public static final String REPL_RANGER_SERVICE = "ranger";
// Service name for atlas.
public static final String REPL_ATLAS_SERVICE = "atlas";
/**
* Bootstrap REPL LOAD operation type on the examined object based on ckpt state.
*/
public enum ReplLoadOpType {
LOAD_NEW, LOAD_SKIP, LOAD_REPLACE
}
/**
* Replication Metrics.
*/
public enum MetricName {
TABLES, FUNCTIONS, EVENTS, POLICIES, ENTITIES
}
public static final String DISTCP_JOB_ID_CONF = "distcp.job.id";
public static final String DISTCP_JOB_ID_CONF_DEFAULT = "UNAVAILABLE";
private static transient Logger LOG = LoggerFactory.getLogger(ReplUtils.class);
public static Map<Integer, List<ExprNodeGenericFuncDesc>> genPartSpecs(
Table table, List<Map<String, String>> partitions) throws SemanticException {
Map<Integer, List<ExprNodeGenericFuncDesc>> partSpecs = new HashMap<>();
int partPrefixLength = 0;
if (partitions.size() > 0) {
partPrefixLength = partitions.get(0).size();
// pick the length of the first ptn, we expect all ptns listed to have the same number of
// key-vals.
}
List<ExprNodeGenericFuncDesc> partitionDesc = new ArrayList<>();
for (Map<String, String> ptn : partitions) {
// convert each key-value-map to appropriate expression.
ExprNodeGenericFuncDesc expr = null;
for (Map.Entry<String, String> kvp : ptn.entrySet()) {
String key = kvp.getKey();
Object val = kvp.getValue();
String type = table.getPartColByName(key).getType();
PrimitiveTypeInfo pti = TypeInfoFactory.getPrimitiveTypeInfo(type);
ExprNodeColumnDesc column = new ExprNodeColumnDesc(pti, key, null, true);
ExprNodeGenericFuncDesc op = PartitionUtils.makeBinaryPredicate(
"=", column, new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, val));
expr = (expr == null) ? op : PartitionUtils.makeBinaryPredicate("and", expr, op);
}
if (expr != null) {
partitionDesc.add(expr);
}
}
if (partitionDesc.size() > 0) {
partSpecs.put(partPrefixLength, partitionDesc);
}
return partSpecs;
}
public static Task<?> getTableReplLogTask(ImportTableDesc tableDesc, ReplLogger replLogger, HiveConf conf,
ReplicationMetricCollector metricCollector)
throws SemanticException {
TableType tableType = tableDesc.isExternal() ? TableType.EXTERNAL_TABLE : tableDesc.tableType();
ReplStateLogWork replLogWork = new ReplStateLogWork(replLogger, metricCollector,
tableDesc.getTableName(), tableType);
return TaskFactory.get(replLogWork, conf);
}
public static Task<?> getTableReplLogTask(ImportTableDesc tableDesc, ReplLogger replLogger, HiveConf conf,
ReplicationMetricCollector metricCollector,
String dumpRoot)
throws SemanticException {
TableType tableType = tableDesc.isExternal() ? TableType.EXTERNAL_TABLE : tableDesc.tableType();
ReplStateLogWork replLogWork = new ReplStateLogWork(replLogger, metricCollector,
tableDesc.getTableName(), tableType, dumpRoot);
return TaskFactory.get(replLogWork, conf);
}
public static Task<?> getTableCheckpointTask(ImportTableDesc tableDesc, HashMap<String, String> partSpec,
String dumpRoot, HiveConf conf) throws SemanticException {
HashMap<String, String> mapProp = new HashMap<>();
mapProp.put(REPL_CHECKPOINT_KEY, dumpRoot);
final TableName tName = TableName.fromString(tableDesc.getTableName(), null, tableDesc.getDatabaseName());
AlterTableSetPropertiesDesc alterTblDesc = new AlterTableSetPropertiesDesc(tName, partSpec, null, false,
mapProp, false, false, null);
return TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), alterTblDesc), conf);
}
public static Task<?> getTableCheckpointTask(ImportTableDesc tableDesc, HashMap<String, String> partSpec,
String dumpRoot, ReplicationMetricCollector metricCollector,
HiveConf conf) throws SemanticException {
HashMap<String, String> mapProp = new HashMap<>();
mapProp.put(REPL_CHECKPOINT_KEY, dumpRoot);
final TableName tName = TableName.fromString(tableDesc.getTableName(), null, tableDesc.getDatabaseName());
AlterTableSetPropertiesDesc alterTblDesc = new AlterTableSetPropertiesDesc(tName, partSpec, null, false,
mapProp, false, false, null);
return TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), alterTblDesc,
true, (new Path(dumpRoot)).getParent().toString(), metricCollector), conf);
}
public static boolean replCkptStatus(String dbName, Map<String, String> props, String dumpRoot)
throws InvalidOperationException {
// If ckpt property not set or empty means, bootstrap is not run on this object.
if ((props != null) && props.containsKey(REPL_CHECKPOINT_KEY) && !props.get(REPL_CHECKPOINT_KEY).isEmpty()) {
if (props.get(REPL_CHECKPOINT_KEY).equals(dumpRoot)) {
return true;
}
throw new InvalidOperationException(ErrorMsg.REPL_BOOTSTRAP_LOAD_PATH_NOT_VALID.format(dumpRoot,
props.get(REPL_CHECKPOINT_KEY)));
}
return false;
}
public static String getNonEmpty(String configParam, HiveConf hiveConf, String errorMsgFormat)
throws SemanticException {
String val = hiveConf.get(configParam);
if (StringUtils.isEmpty(val)) {
throw new SemanticException(ErrorMsg.REPL_INVALID_CONFIG_FOR_SERVICE.format(String.format(
errorMsgFormat, configParam), ReplUtils.REPL_ATLAS_SERVICE));
}
return val;
}
public static List<Task<?>> addChildTask(Task<?> childTask) {
List<Task<?>> taskList = new ArrayList<>();
taskList.add(childTask);
return taskList;
}
public static List<Task<?>> addTasksForLoadingColStats(ColumnStatistics colStats,
HiveConf conf,
UpdatedMetaDataTracker updatedMetadata,
org.apache.hadoop.hive.metastore.api.Table tableObj,
long writeId)
throws IOException, TException {
List<Task<?>> taskList = new ArrayList<>();
ColumnStatsUpdateWork work = new ColumnStatsUpdateWork(colStats);
work.setWriteId(writeId);
Task<?> task = TaskFactory.get(work, conf);
taskList.add(task);
return taskList;
}
public static List<Task<?>> addTasksForLoadingColStats(ColumnStatistics colStats,
HiveConf conf,
UpdatedMetaDataTracker updatedMetadata,
org.apache.hadoop.hive.metastore.api.Table tableObj,
long writeId,
String nonRecoverableMarkPath,
ReplicationMetricCollector metricCollector)
throws IOException, TException {
List<Task<?>> taskList = new ArrayList<>();
ColumnStatsUpdateWork work = new ColumnStatsUpdateWork(colStats, nonRecoverableMarkPath, metricCollector, true);
work.setWriteId(writeId);
Task<?> task = TaskFactory.get(work, conf);
taskList.add(task);
return taskList;
}
// Path filters to filter only events (directories) excluding "_bootstrap"
public static PathFilter getEventsDirectoryFilter(final FileSystem fs) {
return p -> {
try {
return fs.isDirectory(p) && !p.getName().equalsIgnoreCase(ReplUtils.INC_BOOTSTRAP_ROOT_DIR_NAME)
&& !p.getName().equalsIgnoreCase(ReplUtils.REPL_TABLE_LIST_DIR_NAME)
&& !p.getName().equalsIgnoreCase(EximUtil.METADATA_PATH_NAME);
} catch (IOException e) {
throw new RuntimeException(e);
}
};
}
public static PathFilter getBootstrapDirectoryFilter(final FileSystem fs) {
return p -> {
try {
return fs.isDirectory(p) && !p.getName().equalsIgnoreCase(ReplUtils.REPL_TABLE_LIST_DIR_NAME)
&& !p.getName().equalsIgnoreCase(EximUtil.METADATA_PATH_NAME);
} catch (IOException e) {
throw new RuntimeException(e);
}
};
}
public static int handleException(boolean isReplication, Throwable e, String nonRecoverablePath,
ReplicationMetricCollector metricCollector, String stageName, HiveConf conf){
int errorCode;
if (isReplication && e instanceof SnapshotException) {
errorCode = ErrorMsg.getErrorMsg("SNAPSHOT_ERROR").getErrorCode();
} else {
errorCode = ErrorMsg.getErrorMsg(e.getMessage()).getErrorCode();
}
if(isReplication){
try {
if (nonRecoverablePath != null) {
final int recoverableLimit = ErrorMsg.GENERIC_ERROR.getErrorCode();
String metricStage = getMetricStageName(stageName, metricCollector);
if(errorCode > recoverableLimit){
Path nonRecoverableMarker = new Path(new Path(nonRecoverablePath), ReplAck.NON_RECOVERABLE_MARKER.toString());
Utils.writeStackTrace(e, nonRecoverableMarker, conf);
metricCollector.reportStageEnd(metricStage, Status.FAILED_ADMIN, nonRecoverableMarker.toString());
}
else {
metricCollector.reportStageEnd(metricStage, Status.FAILED);
}
}
} catch (Exception ex) {
LOG.error("Failed to collect Metrics ", ex);
}
}
return errorCode;
}
private static String getMetricStageName(String stageName, ReplicationMetricCollector metricCollector) {
if( stageName == "REPL_DUMP" || stageName == "REPL_LOAD" || stageName == "ATLAS_DUMP" || stageName == "ATLAS_LOAD"
|| stageName == "RANGER_DUMP" || stageName == "RANGER_LOAD"){
return stageName;
}
if(isDumpMetricCollector(metricCollector)){
return "REPL_DUMP";
} else {
return "REPL_LOAD";
}
}
private static boolean isDumpMetricCollector(ReplicationMetricCollector metricCollector) {
return metricCollector instanceof BootstrapDumpMetricCollector ||
metricCollector instanceof IncrementalDumpMetricCollector;
}
private static boolean isLoadMetricCollector(ReplicationMetricCollector metricCollector) {
return metricCollector instanceof BootstrapLoadMetricCollector ||
metricCollector instanceof IncrementalLoadMetricCollector;
}
public static boolean isFirstIncPending(Map<String, String> parameters) {
if (parameters == null) {
return false;
}
String firstIncPendFlag = parameters.get(ReplUtils.REPL_FIRST_INC_PENDING_FLAG);
// If flag is not set, then we assume first incremental load is done as the database/table may be created by user
// and not through replication.
return firstIncPendFlag != null && !firstIncPendFlag.isEmpty() && "true".equalsIgnoreCase(firstIncPendFlag);
}
public static EnvironmentContext setReplDataLocationChangedFlag(EnvironmentContext envContext) {
if (envContext == null) {
envContext = new EnvironmentContext();
}
envContext.putToProperties(ReplConst.REPL_DATA_LOCATION_CHANGED, ReplConst.TRUE);
return envContext;
}
// Only for testing, we do not include ACID tables in the dump (and replicate) if config says so.
public static boolean includeAcidTableInDump(HiveConf conf) {
if (conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST_REPL)) {
return conf.getBoolean(REPL_DUMP_INCLUDE_ACID_TABLES, true);
}
return true;
}
public static boolean tableIncludedInReplScope(ReplScope replScope, String tableName) {
return ((replScope == null) || replScope.tableIncludedInReplScope(tableName));
}
public static boolean failedWithNonRecoverableError(Path dumpRoot, HiveConf conf) throws SemanticException {
if (dumpRoot == null) {
return false;
}
Retryable retryable = Retryable.builder()
.withHiveConf(conf)
.withRetryOnException(IOException.class).build();
try {
return retryable.executeCallable(() -> {
FileSystem fs = dumpRoot.getFileSystem(conf);
if (fs.exists(new Path(dumpRoot, NON_RECOVERABLE_MARKER.toString()))) {
return true;
}
return false;
});
} catch (Exception e) {
throw new SemanticException(e);
}
}
public static Path getEncodedDumpRootPath(HiveConf conf, String dbname) throws UnsupportedEncodingException {
return new Path(conf.getVar(HiveConf.ConfVars.REPLDIR),
Base64.getEncoder().encodeToString(dbname
.getBytes(StandardCharsets.UTF_8.name())));
}
public static Path getLatestDumpPath(Path dumpRoot, HiveConf conf) throws IOException {
FileSystem fs = dumpRoot.getFileSystem(conf);
if (fs.exists(dumpRoot)) {
FileStatus[] statuses = fs.listStatus(dumpRoot);
if (statuses.length > 0) {
FileStatus latestValidStatus = statuses[0];
for (FileStatus status : statuses) {
LOG.info("Evaluating previous dump dir path:{}", status.getPath());
if (status.getModificationTime() > latestValidStatus.getModificationTime()) {
latestValidStatus = status;
}
}
return latestValidStatus.getPath();
}
}
return null;
}
public static String getDistCpCustomName(HiveConf conf, String dbName) {
String userChosenName = conf.get(JobContext.JOB_NAME);
if (StringUtils.isEmpty(userChosenName)) {
String policyName = conf.get(SCHEDULED_QUERY_SCHEDULENAME, "");
if (policyName.isEmpty()) {
userChosenName = "Repl#" + dbName;
} else {
String executionId = conf.get(SCHEDULED_QUERY_EXECUTIONID, "");
userChosenName = "Repl#" + policyName + "#" + executionId + "#" + dbName;
}
LOG.info("Using {} as job name for map-reduce jobs.", userChosenName);
} else {
LOG.info("Job Name is explicitly configured as {}, not using " + "replication job custom name.", userChosenName);
}
return userChosenName;
}
/**
* Convert to a human time of minutes:seconds.millis.
* @param time time to humanize.
* @return a printable value.
*/
public static String convertToHumanReadableTime(long time) {
long seconds = (time / 1000);
long minutes = (seconds / 60);
return String.format("%d:%02d.%03ds", minutes, seconds % 60, time % 1000);
}
/**
* Adds a logger task at the end of the tasks passed.
*/
public static void addLoggerTask(ReplLogger replLogger, List<Task<?>> tasks, HiveConf conf) {
String message = "Completed all external table copy tasks.";
ReplStateLogWork replStateLogWork = new ReplStateLogWork(replLogger, message);
Task<ReplStateLogWork> task = TaskFactory.get(replStateLogWork, conf);
if (tasks.isEmpty()) {
tasks.add(task);
} else {
DAGTraversal.traverse(tasks, new AddDependencyToLeaves(Collections.singletonList(task)));
}
}
}
| |
/*******************************************************************************
* Copyright (c) 2000, 2011 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.jdt.internal.ui.javadocexport;
import java.io.File;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.StringTokenizer;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CLabel;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Combo;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.FileDialog;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Text;
import org.eclipse.swt.widgets.TreeItem;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Path;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.resources.IWorkspaceRoot;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.jface.dialogs.Dialog;
import org.eclipse.jface.viewers.CheckStateChangedEvent;
import org.eclipse.jface.viewers.ICheckStateListener;
import org.eclipse.jface.viewers.ITreeContentProvider;
import org.eclipse.jface.viewers.StructuredSelection;
import org.eclipse.ui.PlatformUI;
import org.eclipse.jdt.core.ICompilationUnit;
import org.eclipse.jdt.core.IJavaElement;
import org.eclipse.jdt.core.IJavaProject;
import org.eclipse.jdt.core.IPackageFragment;
import org.eclipse.jdt.core.IPackageFragmentRoot;
import org.eclipse.jdt.core.JavaConventions;
import org.eclipse.jdt.core.JavaCore;
import org.eclipse.jdt.core.JavaModelException;
import org.eclipse.jdt.launching.JavaRuntime;
import org.eclipse.jdt.ui.JavaElementComparator;
import org.eclipse.jdt.ui.JavaElementLabelProvider;
import org.eclipse.jdt.internal.ui.IJavaHelpContextIds;
import org.eclipse.jdt.internal.ui.JavaPlugin;
import org.eclipse.jdt.internal.ui.dialogs.StatusInfo;
import org.eclipse.jdt.internal.ui.dialogs.StatusUtil;
import org.eclipse.jdt.internal.ui.jarpackager.CheckboxTreeAndListGroup;
import org.eclipse.jdt.internal.ui.util.SWTUtil;
public class JavadocTreeWizardPage extends JavadocWizardPage {
private CheckboxTreeAndListGroup fInputGroup;
private Text fDestinationText;
private Combo fJavadocCommandText;
private Text fDocletText;
private Text fDocletTypeText;
private Button fStandardButton;
private Button fDestinationBrowserButton;
private Button fCustomButton;
private Button fPrivateVisibility;
private Button fProtectedVisibility;
private Button fPackageVisibility;
private Button fPublicVisibility;
private Label fDocletLabel;
private Label fDocletTypeLabel;
private Label fDestinationLabel;
private CLabel fDescriptionLabel;
private String fVisibilitySelection;
private JavadocOptionsManager fStore;
private StatusInfo fJavadocStatus;
private StatusInfo fDestinationStatus;
private StatusInfo fDocletStatus;
private StatusInfo fTreeStatus;
private StatusInfo fPreferenceStatus;
private StatusInfo fWizardStatus;
private final int PREFERENCESTATUS= 0;
private final int CUSTOMSTATUS= 1;
private final int STANDARDSTATUS= 2;
private final int TREESTATUS= 3;
private final int JAVADOCSTATUS= 4;
/**
* Constructor for JavadocTreeWizardPage.
* @param pageName
* @param store
*/
protected JavadocTreeWizardPage(String pageName, JavadocOptionsManager store) {
super(pageName);
setDescription(JavadocExportMessages.JavadocTreeWizardPage_javadoctreewizardpage_description);
fStore= store;
// Status variables
fJavadocStatus= new StatusInfo();
fDestinationStatus= new StatusInfo();
fDocletStatus= new StatusInfo();
fTreeStatus= new StatusInfo();
fPreferenceStatus= new StatusInfo();
fWizardStatus= store.getWizardStatus();
}
/*
* @see IDialogPage#createControl(Composite)
*/
public void createControl(Composite parent) {
initializeDialogUnits(parent);
final Composite composite= new Composite(parent, SWT.NONE);
final GridLayout layout= new GridLayout();
layout.numColumns= 6;
composite.setLayout(layout);
createJavadocCommandSet(composite);
createInputGroup(composite);
createVisibilitySet(composite);
createOptionsSet(composite);
setControl(composite);
Dialog.applyDialogFont(composite);
PlatformUI.getWorkbench().getHelpSystem().setHelp(composite, IJavaHelpContextIds.JAVADOC_TREE_PAGE);
}
protected void createJavadocCommandSet(Composite composite) {
final int numColumns= 2;
GridLayout layout= createGridLayout(numColumns);
layout.marginHeight= 0;
layout.marginWidth= 0;
Composite group = new Composite(composite, SWT.NONE);
group.setLayoutData(createGridData(GridData.FILL_HORIZONTAL, 6, 0));
group.setLayout(layout);
createLabel(group, SWT.NONE, JavadocExportMessages.JavadocTreeWizardPage_javadoccommand_label, createGridData(GridData.HORIZONTAL_ALIGN_BEGINNING, numColumns, 0));
fJavadocCommandText= createCombo(group, SWT.NONE, null, createGridData(GridData.FILL_HORIZONTAL, numColumns - 1, 0));
fJavadocCommandText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
doValidation(JAVADOCSTATUS);
}
});
final Button javadocCommandBrowserButton= createButton(group, SWT.PUSH, JavadocExportMessages.JavadocTreeWizardPage_javadoccommand_button_label, createGridData(GridData.HORIZONTAL_ALIGN_FILL, 1, 0));
SWTUtil.setButtonDimensionHint(javadocCommandBrowserButton);
javadocCommandBrowserButton.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent event) {
browseForJavadocCommand();
}
});
}
protected void createInputGroup(Composite composite) {
createLabel(composite, SWT.NONE, JavadocExportMessages.JavadocTreeWizardPage_checkboxtreeandlistgroup_label, createGridData(6));
Composite c= new Composite(composite, SWT.NONE);
GridLayout layout= new GridLayout();
layout.numColumns= 1;
layout.makeColumnsEqualWidth= true;
layout.marginWidth= 0;
layout.marginHeight= 0;
c.setLayout(layout);
c.setLayoutData(createGridData(GridData.FILL_BOTH, 6, 0));
ITreeContentProvider treeContentProvider= new JavadocProjectContentProvider();
ITreeContentProvider listContentProvider= new JavadocMemberContentProvider();
fInputGroup= new CheckboxTreeAndListGroup(c, this, treeContentProvider, new JavaElementLabelProvider(JavaElementLabelProvider.SHOW_DEFAULT), listContentProvider, new JavaElementLabelProvider(JavaElementLabelProvider.SHOW_DEFAULT), SWT.NONE, convertWidthInCharsToPixels(60), convertHeightInCharsToPixels(7));
fInputGroup.addCheckStateListener(new ICheckStateListener() {
public void checkStateChanged(CheckStateChangedEvent e) {
doValidation(TREESTATUS);
}
});
fInputGroup.setTreeComparator(new JavaElementComparator());
SWTUtil.setAccessibilityText(fInputGroup.getTree(), JavadocExportMessages.JavadocTreeWizardPage_tree_accessibility_message);
SWTUtil.setAccessibilityText(fInputGroup.getTable(), JavadocExportMessages.JavadocTreeWizardPage_table_accessibility_message);
IJavaElement[] elements= fStore.getInitialElements();
setTreeChecked(elements);
if (elements.length > 0) {
fInputGroup.setTreeSelection(new StructuredSelection(elements[0].getJavaProject()));
}
fInputGroup.aboutToOpen();
}
private void createVisibilitySet(Composite composite) {
GridLayout visibilityLayout= createGridLayout(4);
visibilityLayout.marginHeight= 0;
visibilityLayout.marginWidth= 0;
Composite visibilityGroup= new Composite(composite, SWT.NONE);
visibilityGroup.setLayoutData(createGridData(GridData.FILL_HORIZONTAL, 6, 0));
visibilityGroup.setLayout(visibilityLayout);
createLabel(visibilityGroup, SWT.NONE, JavadocExportMessages.JavadocTreeWizardPage_visibilitygroup_label, createGridData(GridData.FILL_HORIZONTAL, 4, 0));
fPrivateVisibility= createButton(visibilityGroup, SWT.RADIO, JavadocExportMessages.JavadocTreeWizardPage_privatebutton_label, createGridData(GridData.FILL_HORIZONTAL, 1, 0));
fPackageVisibility= createButton(visibilityGroup, SWT.RADIO, JavadocExportMessages.JavadocTreeWizardPage_packagebutton_label, createGridData(GridData.FILL_HORIZONTAL, 1, 0));
fProtectedVisibility= createButton(visibilityGroup, SWT.RADIO, JavadocExportMessages.JavadocTreeWizardPage_protectedbutton_label, createGridData(GridData.FILL_HORIZONTAL, 1, 0));
fPublicVisibility= createButton(visibilityGroup, SWT.RADIO, JavadocExportMessages.JavadocTreeWizardPage_publicbutton_label, createGridData(GridData.FILL_HORIZONTAL, 1, 0));
fDescriptionLabel= new CLabel(visibilityGroup, SWT.LEFT);
fDescriptionLabel.setLayoutData(createGridData(GridData.FILL_HORIZONTAL, 4, convertWidthInCharsToPixels(3) - 3)); // INDENT of CLabel
fPrivateVisibility.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (((Button) e.widget).getSelection()) {
fVisibilitySelection= fStore.PRIVATE;
fDescriptionLabel.setText(JavadocExportMessages.JavadocTreeWizardPage_privatevisibilitydescription_label);
}
}
});
fPackageVisibility.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (((Button) e.widget).getSelection()) {
fVisibilitySelection= fStore.PACKAGE;
fDescriptionLabel.setText(JavadocExportMessages.JavadocTreeWizardPage_packagevisibledescription_label);
}
}
});
fProtectedVisibility.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (((Button) e.widget).getSelection()) {
fVisibilitySelection= fStore.PROTECTED;
fDescriptionLabel.setText(JavadocExportMessages.JavadocTreeWizardPage_protectedvisibilitydescription_label);
}
}
});
fPublicVisibility.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (((Button) e.widget).getSelection()) {
fVisibilitySelection= fStore.PUBLIC;
fDescriptionLabel.setText(JavadocExportMessages.JavadocTreeWizardPage_publicvisibilitydescription_label);
}
}
});
setVisibilitySettings();
}
protected void setVisibilitySettings() {
fVisibilitySelection= fStore.getAccess();
fPrivateVisibility.setSelection(fVisibilitySelection.equals(fStore.PRIVATE));
if (fPrivateVisibility.getSelection())
fDescriptionLabel.setText(JavadocExportMessages.JavadocTreeWizardPage_privatevisibilitydescription_label);
fProtectedVisibility.setSelection(fVisibilitySelection.equals(fStore.PROTECTED));
if (fProtectedVisibility.getSelection())
fDescriptionLabel.setText(JavadocExportMessages.JavadocTreeWizardPage_protectedvisibilitydescription_label);
fPackageVisibility.setSelection(fVisibilitySelection.equals(fStore.PACKAGE));
if (fPackageVisibility.getSelection())
fDescriptionLabel.setText(JavadocExportMessages.JavadocTreeWizardPage_packagevisibledescription_label);
fPublicVisibility.setSelection(fVisibilitySelection.equals(fStore.PUBLIC));
if (fPublicVisibility.getSelection())
fDescriptionLabel.setText(JavadocExportMessages.JavadocTreeWizardPage_publicvisibilitydescription_label);
}
private void createOptionsSet(Composite composite) {
final int numColumns= 4;
final GridLayout layout= createGridLayout(numColumns);
layout.marginHeight= 0;
layout.marginWidth= 0;
Composite group= new Composite(composite, SWT.NONE);
group.setLayoutData(createGridData(GridData.FILL_HORIZONTAL, 6, 0));
group.setLayout(layout);
fStandardButton= createButton(group, SWT.RADIO, JavadocExportMessages.JavadocTreeWizardPage_standarddocletbutton_label, createGridData(GridData.HORIZONTAL_ALIGN_FILL, numColumns, 0));
fDestinationLabel= createLabel(group, SWT.NONE, JavadocExportMessages.JavadocTreeWizardPage_destinationfield_label, createGridData(GridData.HORIZONTAL_ALIGN_FILL, 1, convertWidthInCharsToPixels(3)));
fDestinationText= createText(group, SWT.SINGLE | SWT.BORDER, null, createGridData(GridData.FILL_HORIZONTAL, numColumns - 2, 0));
((GridData) fDestinationText.getLayoutData()).widthHint= 0;
fDestinationText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
doValidation(STANDARDSTATUS);
}
});
fDestinationBrowserButton= createButton(group, SWT.PUSH, JavadocExportMessages.JavadocTreeWizardPage_destinationbrowse_label, createGridData(GridData.HORIZONTAL_ALIGN_END, 1, 0));
SWTUtil.setButtonDimensionHint(fDestinationBrowserButton);
//Option to use custom doclet
fCustomButton= createButton(group, SWT.RADIO, JavadocExportMessages.JavadocTreeWizardPage_customdocletbutton_label, createGridData(GridData.HORIZONTAL_ALIGN_FILL, numColumns, 0));
//For Entering location of custom doclet
fDocletTypeLabel= createLabel(group, SWT.NONE, JavadocExportMessages.JavadocTreeWizardPage_docletnamefield_label, createGridData(GridData.HORIZONTAL_ALIGN_BEGINNING, 1, convertWidthInCharsToPixels(3)));
fDocletTypeText= createText(group, SWT.SINGLE | SWT.BORDER, null, createGridData(GridData.HORIZONTAL_ALIGN_FILL, numColumns - 1, 0));
((GridData) fDocletTypeText.getLayoutData()).widthHint= 0;
fDocletTypeText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
doValidation(CUSTOMSTATUS);
}
});
fDocletLabel= createLabel(group, SWT.NONE, JavadocExportMessages.JavadocTreeWizardPage_docletpathfield_label, createGridData(GridData.HORIZONTAL_ALIGN_BEGINNING, 1, convertWidthInCharsToPixels(3)));
fDocletText= createText(group, SWT.SINGLE | SWT.BORDER, null, createGridData(GridData.HORIZONTAL_ALIGN_FILL, numColumns - 1, 0));
((GridData) fDocletText.getLayoutData()).widthHint= 0;
fDocletText.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
doValidation(CUSTOMSTATUS);
}
});
//Add Listeners
fCustomButton.addSelectionListener(new EnableSelectionAdapter(new Control[] { fDocletLabel, fDocletText, fDocletTypeLabel, fDocletTypeText }, new Control[] { fDestinationLabel, fDestinationText, fDestinationBrowserButton }));
fCustomButton.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
doValidation(CUSTOMSTATUS);
}
});
fStandardButton.addSelectionListener(new EnableSelectionAdapter(new Control[] { fDestinationLabel, fDestinationText, fDestinationBrowserButton }, new Control[] { fDocletLabel, fDocletText, fDocletTypeLabel, fDocletTypeText }));
fStandardButton.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
doValidation(STANDARDSTATUS);
}
});
fDestinationBrowserButton.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent event) {
String text= handleFolderBrowseButtonPressed(fDestinationText.getText(), JavadocExportMessages.JavadocTreeWizardPage_destinationbrowsedialog_title,
JavadocExportMessages.JavadocTreeWizardPage_destinationbrowsedialog_label);
fDestinationText.setText(text);
}
});
setOptionSetSettings();
}
public boolean getCustom() {
return fCustomButton.getSelection();
}
private void setOptionSetSettings() {
if (!fStore.isFromStandard()) {
fCustomButton.setSelection(true);
fDocletText.setText(fStore.getDocletPath());
fDocletTypeText.setText(fStore.getDocletName());
fDestinationText.setText(fStore.getDestination());
fDestinationText.setEnabled(false);
fDestinationBrowserButton.setEnabled(false);
fDestinationLabel.setEnabled(false);
} else {
fStandardButton.setSelection(true);
fDestinationText.setText(fStore.getDestination());
fDocletText.setText(fStore.getDocletPath());
fDocletTypeText.setText(fStore.getDocletName());
fDocletText.setEnabled(false);
fDocletLabel.setEnabled(false);
fDocletTypeText.setEnabled(false);
fDocletTypeLabel.setEnabled(false);
}
fJavadocCommandText.setItems(fStore.getJavadocCommandHistory());
fJavadocCommandText.select(0);
}
/**
* Receives of list of elements selected by the user and passes them
* to the CheckedTree. List can contain multiple projects and elements from
* different projects. If the list of seletected elements is empty a default
* project is selected.
* @param sourceElements
*/
private void setTreeChecked(IJavaElement[] sourceElements) {
for (int i= 0; i < sourceElements.length; i++) {
IJavaElement curr= sourceElements[i];
if (curr instanceof ICompilationUnit) {
fInputGroup.initialCheckListItem(curr);
} else if (curr instanceof IPackageFragment) {
fInputGroup.initialCheckTreeItem(curr);
} else if (curr instanceof IJavaProject) {
fInputGroup.initialCheckTreeItem(curr);
} else if (curr instanceof IPackageFragmentRoot) {
IPackageFragmentRoot root= (IPackageFragmentRoot) curr;
if (!root.isArchive())
fInputGroup.initialCheckTreeItem(curr);
}
}
}
private IPath[] getSourcePath(IJavaProject[] projects) {
HashSet<IPath> res= new HashSet<IPath>();
//loops through all projects and gets a list if of their source paths
for (int k= 0; k < projects.length; k++) {
IJavaProject iJavaProject= projects[k];
try {
IPackageFragmentRoot[] roots= iJavaProject.getPackageFragmentRoots();
for (int i= 0; i < roots.length; i++) {
IPackageFragmentRoot curr= roots[i];
if (curr.getKind() == IPackageFragmentRoot.K_SOURCE) {
IResource resource= curr.getResource();
if (resource != null) {
// Using get location is OK here. If the source folder
// isn't local we can't create Javadoc for it.
IPath p= resource.getLocation();
if (p != null) {
res.add(p);
}
}
}
}
} catch (JavaModelException e) {
JavaPlugin.log(e);
}
}
return res.toArray(new IPath[res.size()]);
}
private IPath[] getClassPath(IJavaProject[] javaProjects) {
HashSet<IPath> res= new HashSet<IPath>();
IWorkspaceRoot root= ResourcesPlugin.getWorkspace().getRoot();
for (int j= 0; j < javaProjects.length; j++) {
IJavaProject curr= javaProjects[j];
try {
IPath outputLocation= null;
// Not really clear yet what to do here for EFS. See bug
// https://bugs.eclipse.org/bugs/show_bug.cgi?id=113233.
// However if the output location is not local it is currently
// not part of JavaRuntime.computeDefaultRuntimeClassPath either
// so it will be simply not added to the result which would be
// correct.
IResource outputPathFolder= root.findMember(curr.getOutputLocation());
if (outputPathFolder != null)
outputLocation= outputPathFolder.getLocation();
String[] classPath= JavaRuntime.computeDefaultRuntimeClassPath(curr);
for (int i= 0; i < classPath.length; i++) {
IPath path= Path.fromOSString(classPath[i]);
if (!path.equals(outputLocation)) {
res.add(path);
}
}
} catch (CoreException e) {
JavaPlugin.log(e);
}
}
return res.toArray(new IPath[res.size()]);
}
/**
* Gets a list of elements to generated javadoc for from each project.
* Javadoc can be generated for either a IPackageFragment or a ICompilationUnit.
* @param projects
* @return source elements
*/
private IJavaElement[] getSourceElements(IJavaProject[] projects) {
ArrayList<IJavaElement> res= new ArrayList<IJavaElement>();
try {
Set<Object> allChecked= fInputGroup.getAllCheckedTreeItems();
Set<String> incompletePackages= new HashSet<String>();
for (int h= 0; h < projects.length; h++) {
IJavaProject iJavaProject= projects[h];
IPackageFragmentRoot[] roots= iJavaProject.getPackageFragmentRoots();
for (int i= 0; i < roots.length; i++) {
IPackageFragmentRoot root= roots[i];
if (root.getKind() == IPackageFragmentRoot.K_SOURCE) {
IPath rootLocation= root.getResource().getLocation();
IJavaElement[] packs= root.getChildren();
for (int k= 0; k < packs.length; k++) {
IJavaElement curr= packs[k];
if (curr.getElementType() == IJavaElement.PACKAGE_FRAGMENT) {
// default packages are always incomplete
if (curr.getElementName().length() == 0 || !allChecked.contains(curr)
|| fInputGroup.isTreeItemGreyChecked(curr) || !isAccessibleLocation(curr.getResource().getLocation(), rootLocation)) {
incompletePackages.add(curr.getElementName());
}
}
}
}
}
}
Iterator<Object> checkedElements= fInputGroup.getAllCheckedListItems();
while (checkedElements.hasNext()) {
Object element= checkedElements.next();
if (element instanceof ICompilationUnit) {
ICompilationUnit unit= (ICompilationUnit) element;
if (incompletePackages.contains(unit.getParent().getElementName())) {
res.add(unit);
}
}
}
Set<String> addedPackages= new HashSet<String>();
checkedElements= allChecked.iterator();
while (checkedElements.hasNext()) {
Object element= checkedElements.next();
if (element instanceof IPackageFragment) {
IPackageFragment fragment= (IPackageFragment) element;
String name= fragment.getElementName();
if (!incompletePackages.contains(name) && !addedPackages.contains(name)) {
res.add(fragment);
addedPackages.add(name);
}
}
}
} catch (JavaModelException e) {
JavaPlugin.log(e);
}
return res.toArray(new IJavaElement[res.size()]);
}
private boolean isAccessibleLocation(IPath packageLocation, IPath rootLocation) {
return rootLocation != null && packageLocation != null && rootLocation.isPrefixOf(packageLocation);
}
protected void updateStore() {
IJavaProject[] checkedProjects= getCheckedProjects();
if (fCustomButton.getSelection()) {
fStore.setDocletName(fDocletTypeText.getText());
fStore.setDocletPath(fDocletText.getText());
fStore.setFromStandard(false);
}
if (fStandardButton.getSelection()) {
fStore.setFromStandard(true);
//the destination used in javadoc generation
fStore.setDestination(fDestinationText.getText());
}
fStore.setSourcepath(getSourcePath(checkedProjects));
fStore.setClasspath(getClassPath(checkedProjects));
fStore.setAccess(fVisibilitySelection);
fStore.setSelectedElements(getSourceElements(checkedProjects));
ArrayList<String> commands= new ArrayList<String>();
commands.add(fJavadocCommandText.getText()); // must be first
String[] items= fJavadocCommandText.getItems();
for (int i= 0; i < items.length; i++) {
String curr= items[i];
if (!commands.contains(curr)) {
commands.add(curr);
}
}
fStore.setJavadocCommandHistory(commands.toArray(new String[commands.size()]));
}
public IJavaProject[] getCheckedProjects() {
ArrayList<Object> res= new ArrayList<Object>();
TreeItem[] treeItems= fInputGroup.getTree().getItems();
for (int i= 0; i < treeItems.length; i++) {
if (treeItems[i].getChecked()) {
Object curr= treeItems[i].getData();
if (curr instanceof IJavaProject) {
res.add(curr);
}
}
}
return res.toArray(new IJavaProject[res.size()]);
}
protected void doValidation(int validate) {
switch (validate) {
case PREFERENCESTATUS :
fPreferenceStatus= new StatusInfo();
fDocletStatus= new StatusInfo();
updateStatus(findMostSevereStatus());
break;
case CUSTOMSTATUS :
if (fCustomButton.getSelection()) {
fDestinationStatus= new StatusInfo();
fDocletStatus= new StatusInfo();
String doclet= fDocletTypeText.getText();
String docletPath= fDocletText.getText();
if (doclet.length() == 0) {
fDocletStatus.setError(JavadocExportMessages.JavadocTreeWizardPage_nodocletname_error);
} else if (JavaConventions.validateJavaTypeName(doclet, JavaCore.VERSION_1_3, JavaCore.VERSION_1_3).matches(IStatus.ERROR)) {
fDocletStatus.setError(JavadocExportMessages.JavadocTreeWizardPage_invaliddocletname_error);
} else if ((docletPath.length() == 0) || !validDocletPath(docletPath)) {
fDocletStatus.setError(JavadocExportMessages.JavadocTreeWizardPage_invaliddocletpath_error);
}
updateStatus(findMostSevereStatus());
}
break;
case STANDARDSTATUS :
if (fStandardButton.getSelection()) {
fDestinationStatus= new StatusInfo();
fDocletStatus= new StatusInfo();
String dest= fDestinationText.getText();
if (dest.length() == 0) {
fDestinationStatus.setError(JavadocExportMessages.JavadocTreeWizardPage_nodestination_error);
}
File file= new File(dest);
if (!Path.ROOT.isValidPath(dest) || file.isFile()) {
fDestinationStatus.setError(JavadocExportMessages.JavadocTreeWizardPage_invaliddestination_error);
}
if (new File(dest, "package-list").exists() || new File(dest, "index.html").exists()) //$NON-NLS-1$//$NON-NLS-2$
fDestinationStatus.setWarning(JavadocExportMessages.JavadocTreeWizardPage_warning_mayoverwritefiles);
updateStatus(findMostSevereStatus());
}
break;
case TREESTATUS :
fTreeStatus= new StatusInfo();
if (!fInputGroup.getAllCheckedListItems().hasNext())
fTreeStatus.setError(JavadocExportMessages.JavadocTreeWizardPage_invalidtreeselection_error);
updateStatus(findMostSevereStatus());
break;
case JAVADOCSTATUS:
fJavadocStatus= new StatusInfo();
String text= fJavadocCommandText.getText();
if (text.length() == 0) {
fJavadocStatus.setError(JavadocExportMessages.JavadocTreeWizardPage_javadoccmd_error_enterpath);
} else {
File file= new File(text);
if (!file.isFile()) {
fJavadocStatus.setError(JavadocExportMessages.JavadocTreeWizardPage_javadoccmd_error_notexists);
}
}
updateStatus(findMostSevereStatus());
break;
} //end switch
}
protected void browseForJavadocCommand() {
FileDialog dialog= new FileDialog(getShell());
dialog.setText(JavadocExportMessages.JavadocTreeWizardPage_javadoccmd_dialog_title);
String dirName= fJavadocCommandText.getText();
dialog.setFileName(dirName);
String selectedDirectory= dialog.open();
if (selectedDirectory != null) {
ArrayList<String> newItems= new ArrayList<String>();
String[] items= fJavadocCommandText.getItems();
newItems.add(selectedDirectory);
for (int i= 0; i < items.length && newItems.size() < 5; i++) { // only keep the last 5 entries
String curr= items[i];
if (!newItems.contains(curr)) {
newItems.add(curr);
}
}
fJavadocCommandText.setItems(newItems.toArray(new String[newItems.size()]));
fJavadocCommandText.select(0);
}
}
private boolean validDocletPath(String docletPath) {
StringTokenizer tokens= new StringTokenizer(docletPath, ";"); //$NON-NLS-1$
while (tokens.hasMoreTokens()) {
File file= new File(tokens.nextToken());
if (!file.exists())
return false;
}
return true;
}
/**
* @return the most severe error (if there is one)
*/
private IStatus findMostSevereStatus() {
return StatusUtil.getMostSevere(new IStatus[] { fJavadocStatus, fPreferenceStatus, fDestinationStatus, fDocletStatus, fTreeStatus, fWizardStatus });
}
public void init() {
updateStatus(new StatusInfo());
}
@Override
public void setVisible(boolean visible) {
if (visible) {
doValidation(STANDARDSTATUS);
doValidation(CUSTOMSTATUS);
doValidation(TREESTATUS);
doValidation(PREFERENCESTATUS);
doValidation(JAVADOCSTATUS);
} else {
updateStore();
}
super.setVisible(visible);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.raid;
import junit.framework.TestCase;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.util.StringUtils;
import org.apache.log4j.Level;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.util.Random;
/**
* If a file gets deleted, then verify that the parity file gets deleted too.
*/
public class TestRaidPurge extends TestCase {
final static String TEST_DIR = new File(System.getProperty("test.build.data",
"build/contrib/raid/test/data")).getAbsolutePath();
final static String CONFIG_FILE = new File(TEST_DIR,
"test-raid.xml").getAbsolutePath();
final static long RELOAD_INTERVAL = 1000;
final static Log LOG = LogFactory.getLog("org.apache.hadoop.raid.TestRaidNode");
final Random rand = new Random();
{
((Log4JLogger)RaidNode.LOG).getLogger().setLevel(Level.ALL);
}
Configuration conf;
String namenode = null;
MiniDFSCluster dfs = null;
FileSystem fileSys = null;
/**
* create mapreduce and dfs clusters
*/
private void createClusters(boolean local) throws Exception {
new File(TEST_DIR).mkdirs(); // Make sure data directory exists
conf = new Configuration();
conf.set("raid.config.file", CONFIG_FILE);
conf.setBoolean("raid.config.reload", true);
conf.setLong("raid.config.reload.interval", RELOAD_INTERVAL);
// scan all policies once every 5 second
conf.setLong("raid.policy.rescan.interval", 5000);
// make all deletions not go through Trash
conf.set("fs.shell.delete.classname", "org.apache.hadoop.dfs.DFSClient");
// the RaidNode does the raiding inline (instead of submitting to map/reduce)
conf.setBoolean("fs.raidnode.local", local);
// create a dfs and map-reduce cluster
final int taskTrackers = 4;
final int jobTrackerPort = 60050;
dfs = new MiniDFSCluster(conf, 3, true, null);
dfs.waitActive();
fileSys = dfs.getFileSystem();
namenode = fileSys.getUri().toString();
}
/**
* create raid.xml file for RaidNode
*/
private void mySetup(long targetReplication,
long metaReplication, long stripeLength) throws Exception {
FileWriter fileWriter = new FileWriter(CONFIG_FILE);
fileWriter.write("<?xml version=\"1.0\"?>\n");
String str = "<configuration> " +
"<srcPath prefix=\"/user/dhruba/raidtest\"> " +
"<policy name = \"RaidTest1\"> " +
"<destPath> /destraid</destPath> " +
"<property> " +
"<name>targetReplication</name> " +
"<value>" + targetReplication + "</value> " +
"<description>after RAIDing, decrease the replication factor of a file to this value." +
"</description> " +
"</property> " +
"<property> " +
"<name>metaReplication</name> " +
"<value>" + metaReplication + "</value> " +
"<description> replication factor of parity file" +
"</description> " +
"</property> " +
"<property> " +
"<name>stripeLength</name> " +
"<value>" + stripeLength + "</value> " +
"<description> the max number of blocks in a file to RAID together " +
"</description> " +
"</property> " +
"<property> " +
"<name>modTimePeriod</name> " +
"<value>2000</value> " +
"<description> time (milliseconds) after a file is modified to make it " +
"a candidate for RAIDing " +
"</description> " +
"</property> " +
"</policy>" +
"</srcPath>" +
"</configuration>";
fileWriter.write(str);
fileWriter.close();
}
/**
* stop clusters created earlier
*/
private void stopClusters() throws Exception {
if (dfs != null) { dfs.shutdown(); }
}
/**
* Test that parity files that do not have an associated master file
* get deleted.
*/
public void testPurge() throws Exception {
LOG.info("Test testPurge started.");
long blockSizes [] = {1024L};
long stripeLengths [] = {5};
long targetReplication = 1;
long metaReplication = 1;
int numBlock = 9;
int iter = 0;
createClusters(true);
try {
for (long blockSize : blockSizes) {
for (long stripeLength : stripeLengths) {
doTestPurge(iter, targetReplication, metaReplication,
stripeLength, blockSize, numBlock);
iter++;
}
}
} finally {
stopClusters();
}
LOG.info("Test testPurge completed.");
}
/**
* Create parity file, delete original file and then validate that
* parity file is automatically deleted.
*/
private void doTestPurge(int iter, long targetReplication,
long metaReplication, long stripeLength,
long blockSize, int numBlock) throws Exception {
LOG.info("doTestPurge started---------------------------:" + " iter " + iter +
" blockSize=" + blockSize + " stripeLength=" + stripeLength);
mySetup(targetReplication, metaReplication, stripeLength);
RaidShell shell = null;
Path dir = new Path("/user/dhruba/raidtest/");
Path file1 = new Path(dir + "/file" + iter);
RaidNode cnode = null;
try {
Path userDir = new Path("/destraid/user/dhruba");
Path recover1 = new Path("/destraid/" + file1 + ".recovered");
Path destPath = new Path("/destraid/user/dhruba/raidtest");
fileSys.delete(dir, true);
fileSys.delete(destPath, true);
long crc1 = TestRaidNode.createOldFile(fileSys, file1, 1, numBlock, blockSize);
LOG.info("doTestPurge created test files for iteration " + iter);
// create an instance of the RaidNode
cnode = RaidNode.createRaidNode(null, conf);
int times = 10;
while (times-- > 0) {
try {
shell = new RaidShell(conf);
} catch (Exception e) {
LOG.info("doTestPurge unable to connect to " + RaidNode.getAddress(conf) +
" retrying....");
Thread.sleep(1000);
continue;
}
break;
}
LOG.info("doTestPurge created RaidShell.");
FileStatus[] listPaths = null;
// wait till file is raided
while (true) {
try {
listPaths = fileSys.listStatus(destPath);
int count = 0;
if (listPaths != null && listPaths.length == 1) {
for (FileStatus s : listPaths) {
LOG.info("doTestPurge found path " + s.getPath());
if (!s.getPath().toString().endsWith(".tmp")) {
count++;
}
}
}
if (count > 0) {
break;
}
} catch (FileNotFoundException e) {
//ignore
}
LOG.info("doTestPurge waiting for files to be raided. Found " +
(listPaths == null ? "none" : listPaths.length));
Thread.sleep(1000); // keep waiting
}
// assertEquals(listPaths.length, 1); // all files raided
LOG.info("doTestPurge all files found in Raid.");
// delete original file
assertTrue("Unable to delete original file " + file1 ,
fileSys.delete(file1, true));
LOG.info("deleted file " + file1);
// wait till parity file is automatically deleted
while (true) {
listPaths = fileSys.listStatus(destPath);
int count = 0;
if (listPaths != null && listPaths.length == 1) {
for (FileStatus s : listPaths) {
LOG.info("doTestPurge found path " + s.getPath());
if (!s.getPath().toString().endsWith(".tmp")) {
count++;
}
}
}
if (count == 0) {
break;
}
LOG.info("doTestPurge waiting for parity files to be removed. Found " +
(listPaths == null ? "none" : listPaths.length));
Thread.sleep(1000); // keep waiting
}
// verify that if we delete the directory itself, then the correspoding
// directory in the parity space is deleted too.
assertTrue("The directory " + userDir + " should have one entry",
fileSys.listStatus(userDir).length == 1);
assertTrue("Unable to delete original directory " + dir,
fileSys.delete(dir, true));
LOG.info("deleted dir " + dir);
// wait till parity directory is automatically deleted
while (true) {
listPaths = fileSys.listStatus(userDir);
int count = 0;
if (listPaths != null) {
for (FileStatus s : listPaths) {
LOG.info("doTestPurge found path " + s.getPath());
count++;
}
}
if (count == 0) {
break;
}
LOG.info("doTestPurge waiting for parity dir to be removed. Found " +
(listPaths == null ? "none" : listPaths.length));
Thread.sleep(1000); // keep waiting
}
} catch (Exception e) {
LOG.info("doTestPurge Exception " + e +
StringUtils.stringifyException(e));
throw e;
} finally {
shell.close();
if (cnode != null) { cnode.stop(); cnode.join(); }
LOG.info("doTestPurge delete file " + file1);
fileSys.delete(file1, true);
}
LOG.info("doTestPurge completed:" + " blockSize=" + blockSize +
" stripeLength=" + stripeLength);
}
}
| |
// Copyright 2017 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.cpp;
import static com.google.common.truth.Truth.assertThat;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.devtools.build.lib.analysis.configuredtargets.RuleConfiguredTarget;
import com.google.devtools.build.lib.analysis.util.AnalysisMock;
import com.google.devtools.build.lib.analysis.util.BuildViewTestCase;
import com.google.devtools.build.lib.packages.util.Crosstool.CcToolchainConfig;
import com.google.devtools.build.lib.packages.util.MockPlatformSupport;
import com.google.devtools.build.lib.vfs.ModifiedFileSet;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.lib.vfs.Root;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests that {@code CppCompileAction} is populated with the correct build variables. */
@RunWith(JUnit4.class)
public class CompileBuildVariablesTest extends BuildViewTestCase {
private CppCompileAction getCppCompileAction(final String label, final String name) throws
Exception {
return (CppCompileAction)
getGeneratingAction(
Iterables.find(
getGeneratingAction(getFilesToBuild(getConfiguredTarget(label)).getSingleton())
.getInputs()
.toList(),
(artifact) -> artifact.getExecPath().getBaseName().startsWith(name)));
}
/** Returns active build variables for a compile action of given type for given target. */
protected CcToolchainVariables getCompileBuildVariables(String label, String name)
throws Exception {
return getCppCompileAction(label, name).getCompileCommandLine().getVariables();
}
@Test
public void testPresenceOfBasicVariables() throws Exception {
scratch.file("x/BUILD", "cc_binary(name = 'bin', srcs = ['bin.cc'])");
scratch.file("x/bin.cc");
CcToolchainVariables variables = getCompileBuildVariables("//x:bin", "bin");
assertThat(variables.getStringVariable(CompileBuildVariables.SOURCE_FILE.getVariableName()))
.contains("x/bin.cc");
assertThat(variables.getStringVariable(CompileBuildVariables.OUTPUT_FILE.getVariableName()))
.contains("_objs/bin/bin");
}
@Test
public void testPresenceOfConfigurationCompileFlags() throws Exception {
useConfiguration("--copt=-foo");
scratch.file("x/BUILD", "cc_binary(name = 'bin', srcs = ['bin.cc'], copts = ['-bar'],)");
scratch.file("x/bin.cc");
CcToolchainVariables variables = getCompileBuildVariables("//x:bin", "bin");
ImmutableList<String> userCopts =
CcToolchainVariables.toStringList(
variables, CompileBuildVariables.USER_COMPILE_FLAGS.getVariableName());
assertThat(userCopts)
.containsAtLeastElementsIn(ImmutableList.<String>of("-foo", "-bar"))
.inOrder();
}
@Test
public void testPresenceOfUserCompileFlags() throws Exception {
useConfiguration();
scratch.file("x/BUILD", "cc_binary(name = 'bin', srcs = ['bin.cc'], copts = ['-foo'])");
scratch.file("x/bin.cc");
CcToolchainVariables variables = getCompileBuildVariables("//x:bin", "bin");
ImmutableList<String> copts =
CcToolchainVariables.toStringList(
variables, CompileBuildVariables.USER_COMPILE_FLAGS.getVariableName());
assertThat(copts).contains("-foo");
}
@Test
public void testPerFileCoptsAreInUserCompileFlags() throws Exception {
scratch.file("x/BUILD", "cc_binary(name = 'bin', srcs = ['bin.cc'])");
scratch.file("x/bin.cc");
useConfiguration("--per_file_copt=//x:bin@-foo", "--per_file_copt=//x:bar\\.cc@-bar");
CcToolchainVariables variables = getCompileBuildVariables("//x:bin", "bin");
ImmutableList<String> copts =
CcToolchainVariables.toStringList(
variables, CompileBuildVariables.USER_COMPILE_FLAGS.getVariableName());
assertThat(copts).containsExactly("-foo").inOrder();
}
@Test
public void testPresenceOfSysrootBuildVariable() throws Exception {
AnalysisMock.get()
.ccSupport()
.setupCcToolchainConfig(
mockToolsConfig, CcToolchainConfig.builder().withSysroot("/usr/local/custom-sysroot"));
useConfiguration();
scratch.file("x/BUILD", "cc_binary(name = 'bin', srcs = ['bin.cc'])");
scratch.file("x/bin.cc");
CcToolchainVariables variables = getCompileBuildVariables("//x:bin", "bin");
assertThat(variables.getStringVariable(CcCommon.SYSROOT_VARIABLE_NAME))
.isEqualTo("/usr/local/custom-sysroot");
}
@Test
public void testTargetSysrootWithoutPlatforms() throws Exception {
useConfiguration("--grte_top=//target_libc", "--host_grte_top=//host_libc");
scratch.file("x/BUILD", "cc_binary(name = 'bin', srcs = ['bin.cc'])");
scratch.file("x/bin.cc");
scratch.file("target_libc/BUILD", "filegroup(name = 'everything')");
scratch.file("host_libc/BUILD", "filegroup(name = 'everything')");
CcToolchainVariables variables = getCompileBuildVariables("//x:bin", "bin");
assertThat(variables.getStringVariable(CcCommon.SYSROOT_VARIABLE_NAME))
.isEqualTo("target_libc");
}
@Test
public void testTargetSysrootWithPlatforms() throws Exception {
MockPlatformSupport.addMockK8Platform(
mockToolsConfig, analysisMock.ccSupport().getMockCrosstoolLabel());
useConfiguration(
"--experimental_platforms=//mock_platform:mock-k8-platform",
"--extra_toolchains=//mock_platform:toolchain_cc-compiler-k8",
"--incompatible_enable_cc_toolchain_resolution",
"--grte_top=//target_libc",
"--host_grte_top=//host_libc");
scratch.file("x/BUILD", "cc_binary(name = 'bin', srcs = ['bin.cc'])");
scratch.file("x/bin.cc");
scratch.file("target_libc/BUILD", "filegroup(name = 'everything')");
scratch.file("host_libc/BUILD", "filegroup(name = 'everything')");
CcToolchainVariables variables = getCompileBuildVariables("//x:bin", "bin");
assertThat(variables.getStringVariable(CcCommon.SYSROOT_VARIABLE_NAME))
.isEqualTo("target_libc");
}
@Test
public void testPresenceOfPerObjectDebugFileBuildVariable() throws Exception {
AnalysisMock.get()
.ccSupport()
.setupCcToolchainConfig(
mockToolsConfig,
CcToolchainConfig.builder().withFeatures(CppRuleClasses.PER_OBJECT_DEBUG_INFO));
useConfiguration("--fission=yes");
scratch.file("x/BUILD", "cc_binary(name = 'bin', srcs = ['bin.cc'])");
scratch.file("x/bin.cc");
CcToolchainVariables variables = getCompileBuildVariables("//x:bin", "bin");
assertThat(
variables.getStringVariable(
CompileBuildVariables.PER_OBJECT_DEBUG_INFO_FILE.getVariableName()))
.isNotNull();
}
@Test
public void testPresenceOfIsUsingFissionVariable() throws Exception {
AnalysisMock.get()
.ccSupport()
.setupCcToolchainConfig(
mockToolsConfig,
CcToolchainConfig.builder().withFeatures(CppRuleClasses.PER_OBJECT_DEBUG_INFO));
useConfiguration("--fission=yes");
scratch.file("x/BUILD", "cc_binary(name = 'bin', srcs = ['bin.cc'])");
scratch.file("x/bin.cc");
CcToolchainVariables variables = getCompileBuildVariables("//x:bin", "bin");
assertThat(
variables.getStringVariable(CompileBuildVariables.IS_USING_FISSION.getVariableName()))
.isNotNull();
}
@Test
public void testPresenceOfIsUsingFissionAndPerDebugObjectFileVariablesWithThinlto()
throws Exception {
AnalysisMock.get()
.ccSupport()
.setupCcToolchainConfig(
mockToolsConfig,
CcToolchainConfig.builder()
.withFeatures(
"fission_flags_for_lto_backend",
CppRuleClasses.PER_OBJECT_DEBUG_INFO,
CppRuleClasses.SUPPORTS_START_END_LIB,
CppRuleClasses.THIN_LTO));
useConfiguration("--fission=yes", "--features=thin_lto");
scratch.file("x/BUILD", "cc_binary(name = 'bin', srcs = ['bin.cc'])");
scratch.file("x/bin.cc");
RuleConfiguredTarget target = (RuleConfiguredTarget) getConfiguredTarget("//x:bin");
LtoBackendAction backendAction =
(LtoBackendAction)
target.getActions().stream()
.filter(a -> a.getMnemonic().equals("CcLtoBackendCompile"))
.findFirst()
.get();
CppCompileAction bitcodeAction =
(CppCompileAction)
target.getActions().stream()
.filter(a -> a.getMnemonic().equals("CppCompile"))
.findFirst()
.get();
// We don't pass per_object_debug_info_file to bitcode compiles
assertThat(
bitcodeAction
.getCompileCommandLine()
.getVariables()
.isAvailable(CompileBuildVariables.IS_USING_FISSION.getVariableName()))
.isTrue();
assertThat(
bitcodeAction
.getCompileCommandLine()
.getVariables()
.isAvailable(CompileBuildVariables.PER_OBJECT_DEBUG_INFO_FILE.getVariableName()))
.isFalse();
// We do pass per_object_debug_info_file to backend compiles
assertThat(backendAction.getArguments()).contains("-<PER_OBJECT_DEBUG_INFO_FILE>");
assertThat(backendAction.getArguments()).contains("-<IS_USING_FISSION>");
}
@Test
public void testPresenceOfPerObjectDebugFileBuildVariableUsingLegacyFields() throws Exception {
AnalysisMock.get()
.ccSupport()
.setupCcToolchainConfig(
mockToolsConfig,
CcToolchainConfig.builder().withFeatures(CppRuleClasses.PER_OBJECT_DEBUG_INFO));
useConfiguration("--fission=yes");
scratch.file("x/BUILD", "cc_binary(name = 'bin', srcs = ['bin.cc'])");
scratch.file("x/bin.cc");
CcToolchainVariables variables = getCompileBuildVariables("//x:bin", "bin");
assertThat(
variables.getStringVariable(
CompileBuildVariables.PER_OBJECT_DEBUG_INFO_FILE.getVariableName()))
.isNotNull();
}
@Test
public void testPresenceOfMinOsVersionBuildVariable() throws Exception {
AnalysisMock.get()
.ccSupport()
.setupCcToolchainConfig(
mockToolsConfig, CcToolchainConfig.builder().withFeatures("min_os_version_flag"));
useConfiguration("--minimum_os_version=6");
scratch.file("x/BUILD", "cc_binary(name = 'bin', srcs = ['bin.cc'])");
scratch.file("x/bin.cc");
CcToolchainVariables variables = getCompileBuildVariables("//x:bin", "bin");
assertThat(variables.getStringVariable(CcCommon.MINIMUM_OS_VERSION_VARIABLE_NAME))
.isEqualTo("6");
}
@Test
public void testExternalIncludePathsVariable() throws Exception {
AnalysisMock.get()
.ccSupport()
.setupCcToolchainConfig(
mockToolsConfig,
CcToolchainConfig.builder().withFeatures(CppRuleClasses.EXTERNAL_INCLUDE_PATHS));
useConfiguration("--features=external_include_paths");
scratch.appendFile("WORKSPACE", "local_repository(", " name = 'pkg',", " path = '/foo')");
getSkyframeExecutor()
.invalidateFilesUnderPathForTesting(
reporter,
new ModifiedFileSet.Builder().modify(PathFragment.create("WORKSPACE")).build(),
Root.fromPath(rootDirectory));
scratch.file("/foo/WORKSPACE", "workspace(name = 'pkg')");
scratch.file(
"/foo/BUILD",
"cc_library(name = 'foo',",
" hdrs = ['foo.hpp'])",
"cc_library(name = 'foo2',",
" hdrs = ['foo.hpp'],",
" include_prefix = 'prf')");
scratch.file(
"x/BUILD",
"cc_library(name = 'bar',",
" hdrs = ['bar.hpp'])",
"cc_binary(name = 'bin',",
" srcs = ['bin.cc'],",
" deps = ['bar', '@pkg//:foo', '@pkg//:foo2'])");
CcToolchainVariables variables = getCompileBuildVariables("//x:bin", "bin");
ImmutableList.Builder<String> entries =
ImmutableList.<String>builder()
.add(
"/k8-fastbuild/bin/external/pkg/_virtual_includes/foo2",
"external/pkg",
"/k8-fastbuild/bin/external/pkg");
if (analysisMock.isThisBazel()) {
entries.add("external/bazel_tools", "/k8-fastbuild/bin/external/bazel_tools");
}
assertThat(
CcToolchainVariables.toStringList(
variables, CompileBuildVariables.EXTERNAL_INCLUDE_PATHS.getVariableName())
.stream()
.map(x -> removeOutDirectory(x))
.collect(ImmutableList.toImmutableList()))
.containsExactlyElementsIn(entries.build());
}
private String removeOutDirectory(String s) {
return s.replace("blaze-out", "").replace("bazel-out", "");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.cep.operator;
import org.apache.flink.annotation.Internal;
import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.api.common.functions.util.FunctionUtils;
import org.apache.flink.api.common.state.MapState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.api.common.typeutils.base.ListSerializer;
import org.apache.flink.api.common.typeutils.base.LongSerializer;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.cep.EventComparator;
import org.apache.flink.cep.functions.PatternProcessFunction;
import org.apache.flink.cep.functions.TimedOutPartialMatchHandler;
import org.apache.flink.cep.nfa.NFA;
import org.apache.flink.cep.nfa.NFA.MigratedNFA;
import org.apache.flink.cep.nfa.NFAState;
import org.apache.flink.cep.nfa.NFAStateSerializer;
import org.apache.flink.cep.nfa.aftermatch.AfterMatchSkipStrategy;
import org.apache.flink.cep.nfa.compiler.NFACompiler;
import org.apache.flink.cep.nfa.sharedbuffer.SharedBuffer;
import org.apache.flink.cep.nfa.sharedbuffer.SharedBufferAccessor;
import org.apache.flink.cep.time.TimerService;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.metrics.Counter;
import org.apache.flink.runtime.state.KeyedStateFunction;
import org.apache.flink.runtime.state.StateInitializationContext;
import org.apache.flink.runtime.state.VoidNamespace;
import org.apache.flink.runtime.state.VoidNamespaceSerializer;
import org.apache.flink.streaming.api.graph.StreamConfig;
import org.apache.flink.streaming.api.operators.AbstractUdfStreamOperator;
import org.apache.flink.streaming.api.operators.InternalTimer;
import org.apache.flink.streaming.api.operators.InternalTimerService;
import org.apache.flink.streaming.api.operators.OneInputStreamOperator;
import org.apache.flink.streaming.api.operators.Output;
import org.apache.flink.streaming.api.operators.TimestampedCollector;
import org.apache.flink.streaming.api.operators.Triggerable;
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
import org.apache.flink.streaming.runtime.tasks.StreamTask;
import org.apache.flink.util.OutputTag;
import org.apache.flink.util.Preconditions;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.PriorityQueue;
import java.util.stream.Stream;
/**
* CEP pattern operator for a keyed input stream. For each key, the operator creates a {@link NFA}
* and a priority queue to buffer out of order elements. Both data structures are stored using the
* managed keyed state.
*
* @param <IN> Type of the input elements
* @param <KEY> Type of the key on which the input stream is keyed
* @param <OUT> Type of the output elements
*/
@Internal
public class CepOperator<IN, KEY, OUT>
extends AbstractUdfStreamOperator<OUT, PatternProcessFunction<IN, OUT>>
implements OneInputStreamOperator<IN, OUT>, Triggerable<KEY, VoidNamespace> {
private static final long serialVersionUID = -4166778210774160757L;
private static final String LATE_ELEMENTS_DROPPED_METRIC_NAME = "numLateRecordsDropped";
private final boolean isProcessingTime;
private final TypeSerializer<IN> inputSerializer;
/////////////// State //////////////
private static final String NFA_STATE_NAME = "nfaStateName";
private static final String EVENT_QUEUE_STATE_NAME = "eventQueuesStateName";
private final NFACompiler.NFAFactory<IN> nfaFactory;
private transient ValueState<NFAState> computationStates;
private transient MapState<Long, List<IN>> elementQueueState;
private transient SharedBuffer<IN> partialMatches;
private transient InternalTimerService<VoidNamespace> timerService;
private transient NFA<IN> nfa;
/** Comparator for secondary sorting. Primary sorting is always done on time. */
private final EventComparator<IN> comparator;
/**
* {@link OutputTag} to use for late arriving events. Elements with timestamp smaller than the
* current watermark will be emitted to this.
*/
private final OutputTag<IN> lateDataOutputTag;
/** Strategy which element to skip after a match was found. */
private final AfterMatchSkipStrategy afterMatchSkipStrategy;
/** Context passed to user function. */
private transient ContextFunctionImpl context;
/** Main output collector, that sets a proper timestamp to the StreamRecord. */
private transient TimestampedCollector<OUT> collector;
/** Wrapped RuntimeContext that limits the underlying context features. */
private transient CepRuntimeContext cepRuntimeContext;
/** Thin context passed to NFA that gives access to time related characteristics. */
private transient TimerService cepTimerService;
// ------------------------------------------------------------------------
// Metrics
// ------------------------------------------------------------------------
private transient Counter numLateRecordsDropped;
public CepOperator(
final TypeSerializer<IN> inputSerializer,
final boolean isProcessingTime,
final NFACompiler.NFAFactory<IN> nfaFactory,
@Nullable final EventComparator<IN> comparator,
@Nullable final AfterMatchSkipStrategy afterMatchSkipStrategy,
final PatternProcessFunction<IN, OUT> function,
@Nullable final OutputTag<IN> lateDataOutputTag) {
super(function);
this.inputSerializer = Preconditions.checkNotNull(inputSerializer);
this.nfaFactory = Preconditions.checkNotNull(nfaFactory);
this.isProcessingTime = isProcessingTime;
this.comparator = comparator;
this.lateDataOutputTag = lateDataOutputTag;
if (afterMatchSkipStrategy == null) {
this.afterMatchSkipStrategy = AfterMatchSkipStrategy.noSkip();
} else {
this.afterMatchSkipStrategy = afterMatchSkipStrategy;
}
}
@Override
public void setup(
StreamTask<?, ?> containingTask,
StreamConfig config,
Output<StreamRecord<OUT>> output) {
super.setup(containingTask, config, output);
this.cepRuntimeContext = new CepRuntimeContext(getRuntimeContext());
FunctionUtils.setFunctionRuntimeContext(getUserFunction(), this.cepRuntimeContext);
}
@Override
public void initializeState(StateInitializationContext context) throws Exception {
super.initializeState(context);
// initializeState through the provided context
computationStates =
context.getKeyedStateStore()
.getState(
new ValueStateDescriptor<>(
NFA_STATE_NAME, new NFAStateSerializer()));
partialMatches = new SharedBuffer<>(context.getKeyedStateStore(), inputSerializer);
elementQueueState =
context.getKeyedStateStore()
.getMapState(
new MapStateDescriptor<>(
EVENT_QUEUE_STATE_NAME,
LongSerializer.INSTANCE,
new ListSerializer<>(inputSerializer)));
if (context.isRestored()) {
migrateOldState();
}
}
private void migrateOldState() throws Exception {
getKeyedStateBackend()
.applyToAllKeys(
VoidNamespace.INSTANCE,
VoidNamespaceSerializer.INSTANCE,
new ValueStateDescriptor<>(
"nfaOperatorStateName", new NFA.NFASerializer<>(inputSerializer)),
new KeyedStateFunction<Object, ValueState<MigratedNFA<IN>>>() {
@Override
public void process(Object key, ValueState<MigratedNFA<IN>> state)
throws Exception {
MigratedNFA<IN> oldState = state.value();
computationStates.update(
new NFAState(oldState.getComputationStates()));
org.apache.flink.cep.nfa.SharedBuffer<IN> sharedBuffer =
oldState.getSharedBuffer();
partialMatches.init(
sharedBuffer.getEventsBuffer(), sharedBuffer.getPages());
state.clear();
}
});
}
@Override
public void open() throws Exception {
super.open();
timerService =
getInternalTimerService(
"watermark-callbacks", VoidNamespaceSerializer.INSTANCE, this);
nfa = nfaFactory.createNFA();
nfa.open(cepRuntimeContext, new Configuration());
context = new ContextFunctionImpl();
collector = new TimestampedCollector<>(output);
cepTimerService = new TimerServiceImpl();
// metrics
this.numLateRecordsDropped = metrics.counter(LATE_ELEMENTS_DROPPED_METRIC_NAME);
}
@Override
public void close() throws Exception {
super.close();
if (nfa != null) {
nfa.close();
}
}
@Override
public void processElement(StreamRecord<IN> element) throws Exception {
if (isProcessingTime) {
if (comparator == null) {
// there can be no out of order elements in processing time
NFAState nfaState = getNFAState();
long timestamp = getProcessingTimeService().getCurrentProcessingTime();
advanceTime(nfaState, timestamp);
processEvent(nfaState, element.getValue(), timestamp);
updateNFA(nfaState);
} else {
long currentTime = timerService.currentProcessingTime();
bufferEvent(element.getValue(), currentTime);
// register a timer for the next millisecond to sort and emit buffered data
timerService.registerProcessingTimeTimer(VoidNamespace.INSTANCE, currentTime + 1);
}
} else {
long timestamp = element.getTimestamp();
IN value = element.getValue();
// In event-time processing we assume correctness of the watermark.
// Events with timestamp smaller than or equal with the last seen watermark are
// considered late.
// Late events are put in a dedicated side output, if the user has specified one.
if (timestamp > timerService.currentWatermark()) {
// we have an event with a valid timestamp, so
// we buffer it until we receive the proper watermark.
saveRegisterWatermarkTimer();
bufferEvent(value, timestamp);
} else if (lateDataOutputTag != null) {
output.collect(lateDataOutputTag, element);
} else {
numLateRecordsDropped.inc();
}
}
}
/**
* Registers a timer for {@code current watermark + 1}, this means that we get triggered
* whenever the watermark advances, which is what we want for working off the queue of buffered
* elements.
*/
private void saveRegisterWatermarkTimer() {
long currentWatermark = timerService.currentWatermark();
// protect against overflow
if (currentWatermark + 1 > currentWatermark) {
timerService.registerEventTimeTimer(VoidNamespace.INSTANCE, currentWatermark + 1);
}
}
private void bufferEvent(IN event, long currentTime) throws Exception {
List<IN> elementsForTimestamp = elementQueueState.get(currentTime);
if (elementsForTimestamp == null) {
elementsForTimestamp = new ArrayList<>();
}
elementsForTimestamp.add(event);
elementQueueState.put(currentTime, elementsForTimestamp);
}
@Override
public void onEventTime(InternalTimer<KEY, VoidNamespace> timer) throws Exception {
// 1) get the queue of pending elements for the key and the corresponding NFA,
// 2) process the pending elements in event time order and custom comparator if exists
// by feeding them in the NFA
// 3) advance the time to the current watermark, so that expired patterns are discarded.
// 4) update the stored state for the key, by only storing the new NFA and MapState iff they
// have state to be used later.
// 5) update the last seen watermark.
// STEP 1
PriorityQueue<Long> sortedTimestamps = getSortedTimestamps();
NFAState nfaState = getNFAState();
// STEP 2
while (!sortedTimestamps.isEmpty()
&& sortedTimestamps.peek() <= timerService.currentWatermark()) {
long timestamp = sortedTimestamps.poll();
advanceTime(nfaState, timestamp);
try (Stream<IN> elements = sort(elementQueueState.get(timestamp))) {
elements.forEachOrdered(
event -> {
try {
processEvent(nfaState, event, timestamp);
} catch (Exception e) {
throw new RuntimeException(e);
}
});
}
elementQueueState.remove(timestamp);
}
// STEP 3
advanceTime(nfaState, timerService.currentWatermark());
// STEP 4
updateNFA(nfaState);
if (!sortedTimestamps.isEmpty() || !partialMatches.isEmpty()) {
saveRegisterWatermarkTimer();
}
}
@Override
public void onProcessingTime(InternalTimer<KEY, VoidNamespace> timer) throws Exception {
// 1) get the queue of pending elements for the key and the corresponding NFA,
// 2) process the pending elements in process time order and custom comparator if exists
// by feeding them in the NFA
// 3) update the stored state for the key, by only storing the new NFA and MapState iff they
// have state to be used later.
// STEP 1
PriorityQueue<Long> sortedTimestamps = getSortedTimestamps();
NFAState nfa = getNFAState();
// STEP 2
while (!sortedTimestamps.isEmpty()) {
long timestamp = sortedTimestamps.poll();
advanceTime(nfa, timestamp);
try (Stream<IN> elements = sort(elementQueueState.get(timestamp))) {
elements.forEachOrdered(
event -> {
try {
processEvent(nfa, event, timestamp);
} catch (Exception e) {
throw new RuntimeException(e);
}
});
}
elementQueueState.remove(timestamp);
}
// STEP 3
updateNFA(nfa);
}
private Stream<IN> sort(Collection<IN> elements) {
Stream<IN> stream = elements.stream();
return (comparator == null) ? stream : stream.sorted(comparator);
}
private NFAState getNFAState() throws IOException {
NFAState nfaState = computationStates.value();
return nfaState != null ? nfaState : nfa.createInitialNFAState();
}
private void updateNFA(NFAState nfaState) throws IOException {
if (nfaState.isStateChanged()) {
nfaState.resetStateChanged();
computationStates.update(nfaState);
}
}
private PriorityQueue<Long> getSortedTimestamps() throws Exception {
PriorityQueue<Long> sortedTimestamps = new PriorityQueue<>();
for (Long timestamp : elementQueueState.keys()) {
sortedTimestamps.offer(timestamp);
}
return sortedTimestamps;
}
/**
* Process the given event by giving it to the NFA and outputting the produced set of matched
* event sequences.
*
* @param nfaState Our NFAState object
* @param event The current event to be processed
* @param timestamp The timestamp of the event
*/
private void processEvent(NFAState nfaState, IN event, long timestamp) throws Exception {
try (SharedBufferAccessor<IN> sharedBufferAccessor = partialMatches.getAccessor()) {
Collection<Map<String, List<IN>>> patterns =
nfa.process(
sharedBufferAccessor,
nfaState,
event,
timestamp,
afterMatchSkipStrategy,
cepTimerService);
processMatchedSequences(patterns, timestamp);
}
}
/**
* Advances the time for the given NFA to the given timestamp. This means that no more events
* with timestamp <b>lower</b> than the given timestamp should be passed to the nfa, This can
* lead to pruning and timeouts.
*/
private void advanceTime(NFAState nfaState, long timestamp) throws Exception {
try (SharedBufferAccessor<IN> sharedBufferAccessor = partialMatches.getAccessor()) {
Collection<Tuple2<Map<String, List<IN>>, Long>> timedOut =
nfa.advanceTime(sharedBufferAccessor, nfaState, timestamp);
if (!timedOut.isEmpty()) {
processTimedOutSequences(timedOut);
}
}
}
private void processMatchedSequences(
Iterable<Map<String, List<IN>>> matchingSequences, long timestamp) throws Exception {
PatternProcessFunction<IN, OUT> function = getUserFunction();
setTimestamp(timestamp);
for (Map<String, List<IN>> matchingSequence : matchingSequences) {
function.processMatch(matchingSequence, context, collector);
}
}
private void processTimedOutSequences(
Collection<Tuple2<Map<String, List<IN>>, Long>> timedOutSequences) throws Exception {
PatternProcessFunction<IN, OUT> function = getUserFunction();
if (function instanceof TimedOutPartialMatchHandler) {
@SuppressWarnings("unchecked")
TimedOutPartialMatchHandler<IN> timeoutHandler =
(TimedOutPartialMatchHandler<IN>) function;
for (Tuple2<Map<String, List<IN>>, Long> matchingSequence : timedOutSequences) {
setTimestamp(matchingSequence.f1);
timeoutHandler.processTimedOutMatch(matchingSequence.f0, context);
}
}
}
private void setTimestamp(long timestamp) {
if (!isProcessingTime) {
collector.setAbsoluteTimestamp(timestamp);
}
context.setTimestamp(timestamp);
}
/**
* Gives {@link NFA} access to {@link InternalTimerService} and tells if {@link CepOperator}
* works in processing time. Should be instantiated once per operator.
*/
private class TimerServiceImpl implements TimerService {
@Override
public long currentProcessingTime() {
return timerService.currentProcessingTime();
}
}
/**
* Implementation of {@link PatternProcessFunction.Context}. Design to be instantiated once per
* operator. It serves three methods:
*
* <ul>
* <li>gives access to currentProcessingTime through {@link InternalTimerService}
* <li>gives access to timestamp of current record (or null if Processing time)
* <li>enables side outputs with proper timestamp of StreamRecord handling based on either
* Processing or Event time
* </ul>
*/
private class ContextFunctionImpl implements PatternProcessFunction.Context {
private Long timestamp;
@Override
public <X> void output(final OutputTag<X> outputTag, final X value) {
final StreamRecord<X> record;
if (isProcessingTime) {
record = new StreamRecord<>(value);
} else {
record = new StreamRecord<>(value, timestamp());
}
output.collect(outputTag, record);
}
void setTimestamp(long timestamp) {
this.timestamp = timestamp;
}
@Override
public long timestamp() {
return timestamp;
}
@Override
public long currentProcessingTime() {
return timerService.currentProcessingTime();
}
}
////////////////////// Testing Methods //////////////////////
@VisibleForTesting
boolean hasNonEmptySharedBuffer(KEY key) throws Exception {
setCurrentKey(key);
return !partialMatches.isEmpty();
}
@VisibleForTesting
boolean hasNonEmptyPQ(KEY key) throws Exception {
setCurrentKey(key);
return !elementQueueState.isEmpty();
}
@VisibleForTesting
int getPQSize(KEY key) throws Exception {
setCurrentKey(key);
int counter = 0;
for (List<IN> elements : elementQueueState.values()) {
counter += elements.size();
}
return counter;
}
@VisibleForTesting
long getLateRecordsNumber() {
return numLateRecordsDropped.getCount();
}
}
| |
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.math.curve;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertFalse;
import static org.testng.internal.junit.ArrayAsserts.assertArrayEquals;
import org.testng.annotations.Test;
import com.opengamma.analytics.math.interpolation.ExponentialInterpolator1D;
import com.opengamma.analytics.math.interpolation.Interpolator1D;
import com.opengamma.analytics.math.interpolation.LinearInterpolator1D;
import com.opengamma.analytics.math.interpolation.StepInterpolator1D;
import com.opengamma.util.test.TestGroup;
/**
* Test.
*/
@Test(groups = TestGroup.UNIT)
public class InterpolatedDoublesCurveTest extends DoublesCurveTestCase {
private static final Interpolator1D LINEAR = new LinearInterpolator1D();
private static final Interpolator1D STEP = new StepInterpolator1D();
private static final Interpolator1D EXPONENTIAL = new ExponentialInterpolator1D();
private static final double EPS = 1e-15;
@Test
public void testEqualsAndHashCode() {
final InterpolatedDoublesCurve curve = new InterpolatedDoublesCurve(X_PRIMITIVE, Y_PRIMITIVE, LINEAR, false, NAME1);
InterpolatedDoublesCurve other = new InterpolatedDoublesCurve(X_PRIMITIVE, Y_PRIMITIVE, LINEAR, false, NAME1);
assertEquals(curve, other);
assertEquals(curve.hashCode(), other.hashCode());
other = new InterpolatedDoublesCurve(X_PRIMITIVE, Y_PRIMITIVE, LINEAR, false);
assertFalse(curve.equals(other));
other = new InterpolatedDoublesCurve(Y_PRIMITIVE, Y_PRIMITIVE, LINEAR, false, NAME1);
assertFalse(curve.equals(other));
other = new InterpolatedDoublesCurve(X_PRIMITIVE, X_PRIMITIVE, LINEAR, false, NAME1);
assertFalse(curve.equals(other));
other = new InterpolatedDoublesCurve(X_PRIMITIVE, Y_PRIMITIVE, STEP, false, NAME1);
assertFalse(curve.equals(other));
other = new InterpolatedDoublesCurve(X_PRIMITIVE, Y_PRIMITIVE, LINEAR, true, NAME1);
assertFalse(curve.equals(other));
other = new InterpolatedDoublesCurve(X_PRIMITIVE, Y_PRIMITIVE, LINEAR, false);
assertFalse(curve.equals(other));
other = new InterpolatedDoublesCurve(X_PRIMITIVE_SORTED, Y_PRIMITIVE_SORTED, LINEAR, true, NAME1);
assertEquals(curve, other);
assertEquals(curve.hashCode(), other.hashCode());
other = new InterpolatedDoublesCurve(X_OBJECT, Y_OBJECT, LINEAR, false, NAME1);
assertEquals(curve, other);
assertEquals(curve.hashCode(), other.hashCode());
other = new InterpolatedDoublesCurve(X_OBJECT_SORTED, Y_OBJECT_SORTED, LINEAR, true, NAME1);
assertEquals(curve, other);
assertEquals(curve.hashCode(), other.hashCode());
other = new InterpolatedDoublesCurve(MAP, LINEAR, false, NAME1);
assertEquals(curve, other);
assertEquals(curve.hashCode(), other.hashCode());
other = new InterpolatedDoublesCurve(MAP_SORTED, LINEAR, true, NAME1);
assertEquals(curve, other);
assertEquals(curve.hashCode(), other.hashCode());
other = new InterpolatedDoublesCurve(PAIR_ARRAY, LINEAR, false, NAME1);
assertEquals(curve, other);
assertEquals(curve.hashCode(), other.hashCode());
other = new InterpolatedDoublesCurve(PAIR_ARRAY_SORTED, LINEAR, true, NAME1);
assertEquals(curve, other);
assertEquals(curve.hashCode(), other.hashCode());
other = new InterpolatedDoublesCurve(PAIR_SET, LINEAR, false, NAME1);
assertEquals(curve, other);
assertEquals(curve.hashCode(), other.hashCode());
other = new InterpolatedDoublesCurve(PAIR_SET_SORTED, LINEAR, true, NAME1);
assertEquals(curve, other);
assertEquals(curve.hashCode(), other.hashCode());
assertEquals(curve.hashCode(), other.hashCode());
other = new InterpolatedDoublesCurve(X_LIST, Y_LIST, LINEAR, false, NAME1);
assertEquals(curve, other);
assertEquals(curve.hashCode(), other.hashCode());
other = new InterpolatedDoublesCurve(X_LIST_SORTED, Y_LIST_SORTED, LINEAR, true, NAME1);
assertEquals(curve, other);
assertEquals(curve.hashCode(), other.hashCode());
other = new InterpolatedDoublesCurve(PAIR_LIST, LINEAR, false, NAME1);
assertEquals(curve, other);
assertEquals(curve.hashCode(), other.hashCode());
other = new InterpolatedDoublesCurve(PAIR_LIST_SORTED, LINEAR, true, NAME1);
assertEquals(curve, other);
assertEquals(curve.hashCode(), other.hashCode());
}
@Test
public void testStaticConstruction() {
InterpolatedDoublesCurve curve = new InterpolatedDoublesCurve(X_PRIMITIVE, Y_PRIMITIVE, LINEAR, false, NAME1);
InterpolatedDoublesCurve other = InterpolatedDoublesCurve.from(X_PRIMITIVE, Y_PRIMITIVE, LINEAR, NAME1);
assertEquals(curve, other);
curve = new InterpolatedDoublesCurve(X_OBJECT, Y_OBJECT, LINEAR, false, NAME1);
other = InterpolatedDoublesCurve.from(X_OBJECT, Y_OBJECT, LINEAR, NAME1);
assertEquals(curve, other);
curve = new InterpolatedDoublesCurve(MAP, LINEAR, false, NAME1);
other = InterpolatedDoublesCurve.from(MAP, LINEAR, NAME1);
assertEquals(curve, other);
curve = new InterpolatedDoublesCurve(PAIR_ARRAY, LINEAR, false, NAME1);
other = InterpolatedDoublesCurve.from(PAIR_ARRAY, LINEAR, NAME1);
assertEquals(curve, other);
curve = new InterpolatedDoublesCurve(PAIR_SET, LINEAR, false, NAME1);
other = InterpolatedDoublesCurve.from(PAIR_SET, LINEAR, NAME1);
assertEquals(curve, other);
curve = new InterpolatedDoublesCurve(X_PRIMITIVE_SORTED, Y_PRIMITIVE_SORTED, LINEAR, true, NAME1);
other = InterpolatedDoublesCurve.fromSorted(X_PRIMITIVE_SORTED, Y_PRIMITIVE_SORTED, LINEAR, NAME1);
assertEquals(curve, other);
curve = new InterpolatedDoublesCurve(X_OBJECT_SORTED, Y_OBJECT_SORTED, LINEAR, true, NAME1);
other = InterpolatedDoublesCurve.fromSorted(X_OBJECT_SORTED, Y_OBJECT_SORTED, LINEAR, NAME1);
assertEquals(curve, other);
curve = new InterpolatedDoublesCurve(MAP_SORTED, LINEAR, true, NAME1);
other = InterpolatedDoublesCurve.fromSorted(MAP_SORTED, LINEAR, NAME1);
assertEquals(curve, other);
curve = new InterpolatedDoublesCurve(PAIR_ARRAY_SORTED, LINEAR, true, NAME1);
other = InterpolatedDoublesCurve.fromSorted(PAIR_ARRAY_SORTED, LINEAR, NAME1);
assertEquals(curve, other);
curve = new InterpolatedDoublesCurve(PAIR_SET_SORTED, LINEAR, true, NAME1);
other = InterpolatedDoublesCurve.fromSorted(PAIR_SET_SORTED, LINEAR, NAME1);
assertEquals(curve, other);
curve = new InterpolatedDoublesCurve(X_LIST, Y_LIST, LINEAR, false, NAME1);
other = InterpolatedDoublesCurve.from(X_LIST, Y_LIST, LINEAR, NAME1);
assertEquals(curve, other);
curve = new InterpolatedDoublesCurve(X_LIST_SORTED, Y_LIST_SORTED, LINEAR, true, NAME1);
other = InterpolatedDoublesCurve.fromSorted(X_LIST_SORTED, Y_LIST_SORTED, LINEAR, NAME1);
assertEquals(curve, other);
curve = new InterpolatedDoublesCurve(PAIR_LIST, LINEAR, false, NAME1);
other = InterpolatedDoublesCurve.from(PAIR_LIST, LINEAR, NAME1);
assertEquals(curve, other);
curve = new InterpolatedDoublesCurve(PAIR_LIST_SORTED, LINEAR, true, NAME1);
other = InterpolatedDoublesCurve.fromSorted(PAIR_LIST_SORTED, LINEAR, NAME1);
assertEquals(curve, other);
curve = new InterpolatedDoublesCurve(X_PRIMITIVE, Y_PRIMITIVE, LINEAR, false);
other = InterpolatedDoublesCurve.from(X_PRIMITIVE, Y_PRIMITIVE, LINEAR);
assertArrayEquals(curve.getXDataAsPrimitive(), other.getXDataAsPrimitive(), 0);
assertArrayEquals(curve.getYDataAsPrimitive(), other.getYDataAsPrimitive(), 0);
curve = new InterpolatedDoublesCurve(X_OBJECT, Y_OBJECT, LINEAR, false);
other = InterpolatedDoublesCurve.from(X_OBJECT, Y_OBJECT, LINEAR);
assertArrayEquals(curve.getXDataAsPrimitive(), other.getXDataAsPrimitive(), 0);
assertArrayEquals(curve.getYDataAsPrimitive(), other.getYDataAsPrimitive(), 0);
curve = new InterpolatedDoublesCurve(MAP, LINEAR, false);
other = InterpolatedDoublesCurve.from(MAP, LINEAR);
assertArrayEquals(curve.getXDataAsPrimitive(), other.getXDataAsPrimitive(), 0);
assertArrayEquals(curve.getYDataAsPrimitive(), other.getYDataAsPrimitive(), 0);
curve = new InterpolatedDoublesCurve(PAIR_ARRAY, LINEAR, false);
other = InterpolatedDoublesCurve.from(PAIR_ARRAY, LINEAR);
assertArrayEquals(curve.getXDataAsPrimitive(), other.getXDataAsPrimitive(), 0);
assertArrayEquals(curve.getYDataAsPrimitive(), other.getYDataAsPrimitive(), 0);
curve = new InterpolatedDoublesCurve(PAIR_SET, LINEAR, false);
other = InterpolatedDoublesCurve.from(PAIR_SET, LINEAR);
assertArrayEquals(curve.getXDataAsPrimitive(), other.getXDataAsPrimitive(), 0);
assertArrayEquals(curve.getYDataAsPrimitive(), other.getYDataAsPrimitive(), 0);
curve = new InterpolatedDoublesCurve(X_PRIMITIVE_SORTED, Y_PRIMITIVE_SORTED, LINEAR, true);
other = InterpolatedDoublesCurve.fromSorted(X_PRIMITIVE_SORTED, Y_PRIMITIVE_SORTED, LINEAR);
assertArrayEquals(curve.getXDataAsPrimitive(), other.getXDataAsPrimitive(), 0);
assertArrayEquals(curve.getYDataAsPrimitive(), other.getYDataAsPrimitive(), 0);
curve = new InterpolatedDoublesCurve(X_OBJECT_SORTED, Y_OBJECT_SORTED, LINEAR, true);
other = InterpolatedDoublesCurve.fromSorted(X_OBJECT_SORTED, Y_OBJECT_SORTED, LINEAR);
assertArrayEquals(curve.getXDataAsPrimitive(), other.getXDataAsPrimitive(), 0);
assertArrayEquals(curve.getYDataAsPrimitive(), other.getYDataAsPrimitive(), 0);
curve = new InterpolatedDoublesCurve(MAP_SORTED, LINEAR, true);
other = InterpolatedDoublesCurve.fromSorted(MAP_SORTED, LINEAR);
assertArrayEquals(curve.getXDataAsPrimitive(), other.getXDataAsPrimitive(), 0);
assertArrayEquals(curve.getYDataAsPrimitive(), other.getYDataAsPrimitive(), 0);
curve = new InterpolatedDoublesCurve(PAIR_ARRAY_SORTED, LINEAR, true);
other = InterpolatedDoublesCurve.fromSorted(PAIR_ARRAY_SORTED, LINEAR);
assertArrayEquals(curve.getXDataAsPrimitive(), other.getXDataAsPrimitive(), 0);
assertArrayEquals(curve.getYDataAsPrimitive(), other.getYDataAsPrimitive(), 0);
curve = new InterpolatedDoublesCurve(PAIR_SET_SORTED, LINEAR, true);
other = InterpolatedDoublesCurve.fromSorted(PAIR_SET_SORTED, LINEAR);
assertArrayEquals(curve.getXDataAsPrimitive(), other.getXDataAsPrimitive(), 0);
assertArrayEquals(curve.getYDataAsPrimitive(), other.getYDataAsPrimitive(), 0);
curve = new InterpolatedDoublesCurve(X_LIST, Y_LIST, LINEAR, false);
other = InterpolatedDoublesCurve.from(X_LIST, Y_LIST, LINEAR);
assertArrayEquals(curve.getXDataAsPrimitive(), other.getXDataAsPrimitive(), 0);
assertArrayEquals(curve.getYDataAsPrimitive(), other.getYDataAsPrimitive(), 0);
curve = new InterpolatedDoublesCurve(X_LIST_SORTED, Y_LIST_SORTED, LINEAR, true);
other = InterpolatedDoublesCurve.fromSorted(X_LIST_SORTED, Y_LIST_SORTED, LINEAR);
assertArrayEquals(curve.getXDataAsPrimitive(), other.getXDataAsPrimitive(), 0);
assertArrayEquals(curve.getYDataAsPrimitive(), other.getYDataAsPrimitive(), 0);
curve = new InterpolatedDoublesCurve(PAIR_LIST, LINEAR, false);
other = InterpolatedDoublesCurve.from(PAIR_LIST, LINEAR);
assertArrayEquals(curve.getXDataAsPrimitive(), other.getXDataAsPrimitive(), 0);
assertArrayEquals(curve.getYDataAsPrimitive(), other.getYDataAsPrimitive(), 0);
curve = new InterpolatedDoublesCurve(PAIR_LIST_SORTED, LINEAR, true);
other = InterpolatedDoublesCurve.fromSorted(PAIR_LIST_SORTED, LINEAR);
assertArrayEquals(curve.getXDataAsPrimitive(), other.getXDataAsPrimitive(), 0);
assertArrayEquals(curve.getYDataAsPrimitive(), other.getYDataAsPrimitive(), 0);
}
@Test
public void testGetters() {
final InterpolatedDoublesCurve curve = InterpolatedDoublesCurve.from(PAIR_SET, EXPONENTIAL, NAME1);
assertEquals(curve.getName(), NAME1);
assertArrayEquals(curve.getXData(), X_OBJECT_SORTED);
assertArrayEquals(curve.getXDataAsPrimitive(), X_PRIMITIVE_SORTED, 0);
assertArrayEquals(curve.getYData(), Y_OBJECT_SORTED);
assertArrayEquals(curve.getYDataAsPrimitive(), Y_PRIMITIVE_SORTED, 0);
assertEquals(curve.getInterpolator(), EXPONENTIAL);
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testNonExtrapolatingInterpolator1() {
final InterpolatedDoublesCurve curve = InterpolatedDoublesCurve.from(MAP, LINEAR, NAME1);
curve.getYValue(-20.);
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testNonExtrapolatingInterpolator2() {
final InterpolatedDoublesCurve curve = InterpolatedDoublesCurve.from(MAP, LINEAR, NAME1);
curve.getYValue(120.);
}
@Test
public void testGetYValueSingleInterpolator() {
InterpolatedDoublesCurve curve = InterpolatedDoublesCurve.from(MAP, LINEAR, NAME1);
assertEquals(curve.getYValue(2.), 6, 0);
for (double i = 0; i < 9; i += 0.2) {
assertEquals(curve.getYValue(i), 3 * i, EPS);
}
curve = InterpolatedDoublesCurve.from(MAP, LINEAR, NAME1);
assertEquals(curve.getYValue(2.), 6, 0);
for (double i = 0; i < 9; i += 0.2) {
assertEquals(curve.getYValue(i), 3 * i, EPS);
}
}
@Test
public void testGetYValueManyInterpolators() {
InterpolatedDoublesCurve curve = InterpolatedDoublesCurve.from(MAP, LINEAR, NAME1);
for (double i = 0; i < 6; i += 1) {
assertEquals(curve.getYValue(i), 3 * i, EPS);
}
for (double i = 0; i <= 5.5; i += 0.1) {
assertEquals(curve.getYValue(i), 3 * i, EPS);
}
curve = InterpolatedDoublesCurve.from(MAP, STEP, NAME1);
for (double i = 6; i < 9; i += 1) {
assertEquals(curve.getYValue(i), 3 * Math.floor(i), EPS);
}
for (double i = 5.6; i < 9; i += 0.1) {
assertEquals(curve.getYValue(i), 3 * Math.floor(i), EPS);
}
}
}
| |
package lejos.robotics.filter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Properties;
import java.util.StringTokenizer;
import lejos.robotics.Calibrate;
import lejos.robotics.SampleProvider;
public abstract class AbstractCalibrationFilter extends AbstractFilter implements Calibrate{
public class CalibrationFileException extends RuntimeException {
/**
*
*/
private static final long serialVersionUID = -4292630590012678509L;
public CalibrationFileException(String string) {
super(string);
}
}
protected LowPassFilter lowPassFilter = null;
protected float[] min;
protected float[] max;
protected float[] sum;
protected boolean calibrating = false;
private final static String DIRECTORY = "/home/root/sensorCalibration/";
private final static String EXT = ".cal";
private final Properties props = new Properties();
protected int numberOfSamplesInCalibration;
private float timeConstant=0;
public AbstractCalibrationFilter(SampleProvider source) {
super(source);
min=new float[sampleSize];
max=new float[sampleSize];
sum=new float[sampleSize];
}
/**
* Fetches a sample from the sensor and updates array with minimum and maximum values when
* the calibration process is running.
*/
public void fetchSample(float[] dst, int off) {
if (!calibrating) {
source.fetchSample(dst, off);
}
else {
lowPassFilter.fetchSample(dst, off);
numberOfSamplesInCalibration++;
for (int i = 0; i < sampleSize; i++) {
if (min[i] > dst[i + off])
min[i] = dst[i + off];
if (max[i] < dst[i + off])
max[i] = dst[i + off];
sum[i]+=dst[i+off];
}
}
}
/** Sets the time constant for the lowpass filter that is used when calibrating. <br>
* A value of zero will effectivly disable the lowpass filter.
* Higher values will remove more noise from the signal and give better results, especially when calibraating for scale.
* The downside of higher timeConstants is that calibrating takes more time.
* @param timeConstant
* between 0 and 1
*/
public void setTimeConstant(float timeConstant) {
this.timeConstant=timeConstant;
}
/**
* Starts a calibration proces. Resets collected minimum and maximum values.
* After starting calibration new minimum and maximum values are calculated on
* each fetched sample. From this calibration parameters can be calculated.
*/
public void startCalibration() {
lowPassFilter = new LowPassFilter(source, timeConstant);
calibrating = true;
numberOfSamplesInCalibration=0;
for (int i = 0; i < sampleSize; i++) {
min[i] = Float.MAX_VALUE;
max[i] = Float.MIN_VALUE;
sum[i]=0;
}
}
/**
* Halts the process of updating calibration parameters.
*/
public void stopCalibration() {
calibrating = false;
}
/**
* Halts the process of updating calibration parameters.
*/
public void suspendCalibration() {
calibrating = false;
}
/**
* Resumes the process of updating calibration parameters after a stop.
*/
public void resumeCalibration() {
calibrating = true;
}
/*
* Methods involved in loading and storing
* calibration parameters on the file system
*/
private File getFile(String filename) {
return new File(DIRECTORY + filename + EXT);
}
/** Loads calibration parameters from the file system. <br>
* This method raises an exception when the stored calibration parameters do not match the sensor or the calibration class.
* @param filename
* filename of the stored calibration parameters
* @throws FileNotFoundException
*/
protected void load(String filename) throws FileNotFoundException, IOException {
FileInputStream in=null;
props.clear();
File f = getFile(filename);
in = new FileInputStream(f);
props.load(in);
if (!props.getProperty("type").equals(this.toString()))
throw new CalibrationFileException("Invalid Calibration file. Wrong type for filter.");
if (Integer.parseInt(props.getProperty("sampleSize"))!=sampleSize)
throw new CalibrationFileException("Invalid Calibration file. Sample size does not match.");
if (in != null)
in.close();
}
/** Saves the current set of calibration parameters to the file system. <p>
* Calibration files are stored in /home/root/sensorCalibration/filename
* @param filename
* Name of the file to store calibration parameters in.
*/
protected void store(String filename) {
try {
new File(DIRECTORY).mkdir();
File f = getFile(filename);
f.createNewFile();
FileOutputStream out = new FileOutputStream(f);
props.setProperty("sampleSize", Integer.toString(sampleSize));
props.setProperty("type", this.toString());
props.store(out, "Parameters for sensor calibration");
out.close();
}
catch (IOException e) {
e.printStackTrace();
}
}
protected float[] getPropertyArray(String key) {
String raw = props.getProperty(key);
StringTokenizer tokenizer = new StringTokenizer(raw, " ");
int n = tokenizer.countTokens();
float[] values = new float[n];
for (int i = 0; i < n; i++) {
values[i] = Float.parseFloat(tokenizer.nextToken());
}
return values;
}
protected void setPropertyArray(String key, float[] values) {
StringBuilder builder = new StringBuilder();
int n = values.length;
for (int i = 0; i < n; i++) {
if (i != 0)
builder.append(" ");
builder.append(values[i]);
}
props.setProperty(key, builder.toString());
}
protected void setProperty(String key, float value) {
props.setProperty(key, Float.toString(value));
}
protected float getProperty(String key) {
return Float.parseFloat(props.getProperty(key));
}
}
| |
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.components.browser_ui.site_settings;
import static org.chromium.components.browser_ui.site_settings.WebsitePreferenceBridge.SITE_WILDCARD;
import android.util.Pair;
import androidx.annotation.VisibleForTesting;
import org.chromium.base.Callback;
import org.chromium.base.CommandLine;
import org.chromium.components.content_settings.ContentSettingsType;
import org.chromium.content_public.browser.BrowserContextHandle;
import org.chromium.content_public.browser.ContentFeatureList;
import org.chromium.content_public.common.ContentSwitches;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Map;
/**
* Utility class that asynchronously fetches any Websites and the permissions
* that the user has set for them.
*/
public class WebsitePermissionsFetcher {
/**
* An enum describing the types of permissions that exist in website settings.
*/
public enum WebsitePermissionsType {
CONTENT_SETTING_EXCEPTION,
PERMISSION_INFO,
CHOSEN_OBJECT_INFO
}
private BrowserContextHandle mBrowserContextHandle;
private WebsitePreferenceBridge mWebsitePreferenceBridge;
/**
* A callback to pass to WebsitePermissionsFetcher. This is run when the
* website permissions have been fetched.
*/
public interface WebsitePermissionsCallback {
void onWebsitePermissionsAvailable(Collection<Website> sites);
}
/**
* A helper function to get the associated WebsitePermissionsType of a particular
* ContentSettingsType
* @param contentSettingsType The ContentSettingsType int of the permission.
*/
public static WebsitePermissionsType getPermissionsType(
@ContentSettingsType int contentSettingsType) {
switch (contentSettingsType) {
case ContentSettingsType.ADS:
case ContentSettingsType.AUTO_DARK_WEB_CONTENT:
case ContentSettingsType.AUTOMATIC_DOWNLOADS:
case ContentSettingsType.BACKGROUND_SYNC:
case ContentSettingsType.BLUETOOTH_SCANNING:
case ContentSettingsType.COOKIES:
case ContentSettingsType.REQUEST_DESKTOP_SITE:
case ContentSettingsType.JAVASCRIPT:
case ContentSettingsType.JAVASCRIPT_JIT:
case ContentSettingsType.POPUPS:
case ContentSettingsType.SOUND:
return WebsitePermissionsType.CONTENT_SETTING_EXCEPTION;
case ContentSettingsType.AR:
case ContentSettingsType.CLIPBOARD_READ_WRITE:
case ContentSettingsType.GEOLOCATION:
case ContentSettingsType.IDLE_DETECTION:
case ContentSettingsType.MEDIASTREAM_CAMERA:
case ContentSettingsType.MEDIASTREAM_MIC:
case ContentSettingsType.MIDI_SYSEX:
case ContentSettingsType.NFC:
case ContentSettingsType.NOTIFICATIONS:
case ContentSettingsType.PROTECTED_MEDIA_IDENTIFIER:
case ContentSettingsType.SENSORS:
case ContentSettingsType.VR:
return WebsitePermissionsType.PERMISSION_INFO;
case ContentSettingsType.BLUETOOTH_GUARD:
case ContentSettingsType.USB_GUARD:
return WebsitePermissionsType.CHOSEN_OBJECT_INFO;
default:
return null;
}
}
/**
* A specialization of Pair to hold an (origin, embedder) tuple. This overrides
* android.util.Pair#hashCode, which simply XORs the hashCodes of the pair of values together.
* Having origin == embedder (a fix for a crash in crbug.com/636330) results in pathological
* performance and causes Site Settings/All Sites to lag significantly on opening. See
* crbug.com/732907.
*/
public static class OriginAndEmbedder extends Pair<WebsiteAddress, WebsiteAddress> {
public OriginAndEmbedder(WebsiteAddress origin, WebsiteAddress embedder) {
super(origin, embedder);
}
public static OriginAndEmbedder create(WebsiteAddress origin, WebsiteAddress embedder) {
return new OriginAndEmbedder(origin, embedder);
}
@Override
public int hashCode() {
// This is the calculation used by Arrays#hashCode().
int result = 31 + (first == null ? 0 : first.hashCode());
return 31 * result + (second == null ? 0 : second.hashCode());
}
}
// This map looks up Websites by their origin and embedder.
private final Map<OriginAndEmbedder, Website> mSites = new HashMap<>();
private final boolean mFetchSiteImportantInfo;
public WebsitePermissionsFetcher(BrowserContextHandle browserContextHandle) {
this(browserContextHandle, false);
}
/**
* @param fetchSiteImportantInfo if the fetcher should query whether each site is 'important'.
*/
public WebsitePermissionsFetcher(
BrowserContextHandle browserContextHandle, boolean fetchSiteImportantInfo) {
mBrowserContextHandle = browserContextHandle;
mFetchSiteImportantInfo = fetchSiteImportantInfo;
mWebsitePreferenceBridge = new WebsitePreferenceBridge();
}
/**
* Fetches preferences for all sites that have them.
* TODO(mvanouwerkerk): Add an argument |url| to only fetch permissions for
* sites from the same origin as that of |url| - https://crbug.com/459222.
* @param callback The callback to run when the fetch is complete.
*
* NB: you should call either this method or {@link #fetchPreferencesForCategory} only once per
* instance.
*/
public void fetchAllPreferences(WebsitePermissionsCallback callback) {
TaskQueue queue = new TaskQueue();
addFetcherForStorage(queue);
for (@ContentSettingsType int type = 0; type < ContentSettingsType.NUM_TYPES; type++) {
addFetcherForContentSettingsType(queue, type);
}
queue.add(new PermissionsAvailableCallbackRunner(callback));
queue.next();
}
/**
* Fetches all preferences within a specific category.
*
* @param category A category to fetch.
* @param callback The callback to run when the fetch is complete.
*
* NB: you should call either this method or {@link #fetchAllPreferences} only once per
* instance.
*/
public void fetchPreferencesForCategory(
SiteSettingsCategory category, WebsitePermissionsCallback callback) {
if (category.showSites(SiteSettingsCategory.Type.ALL_SITES)) {
fetchAllPreferences(callback);
return;
}
TaskQueue queue = new TaskQueue();
if (category.showSites(SiteSettingsCategory.Type.USE_STORAGE)) {
addFetcherForStorage(queue);
} else {
assert getPermissionsType(category.getContentSettingsType()) != null;
addFetcherForContentSettingsType(queue, category.getContentSettingsType());
}
queue.add(new PermissionsAvailableCallbackRunner(callback));
queue.next();
}
private void addFetcherForStorage(TaskQueue queue) {
// Local storage info is per-origin.
queue.add(new LocalStorageInfoFetcher());
// Website storage is per-host.
queue.add(new WebStorageInfoFetcher());
}
private void addFetcherForContentSettingsType(
TaskQueue queue, @ContentSettingsType int contentSettingsType) {
WebsitePermissionsType websitePermissionsType = getPermissionsType(contentSettingsType);
if (websitePermissionsType == null) {
return;
}
// Remove this check after the flag is removed.
// The Bluetooth Scanning permission controls access to the Web Bluetooth
// Scanning API, which enables sites to scan for and receive events for
// advertisement packets received from nearby Bluetooth devices.
if (contentSettingsType == ContentSettingsType.BLUETOOTH_SCANNING) {
CommandLine commandLine = CommandLine.getInstance();
if (!commandLine.hasSwitch(ContentSwitches.ENABLE_EXPERIMENTAL_WEB_PLATFORM_FEATURES)) {
return;
}
}
// Remove this check after the flag is removed.
if (contentSettingsType == ContentSettingsType.NFC
&& !ContentFeatureList.isEnabled(ContentFeatureList.WEB_NFC)) {
return;
}
// The Bluetooth guard permission controls access to the Web Bluetooth
// API, which enables sites to request access to connect to specific
// Bluetooth devices. Users are presented with a chooser prompt in which
// they must select the Bluetooth device that they would like to allow
// the site to connect to. Therefore, this permission also displays a
// list of permitted Bluetooth devices that each site can connect to.
// Remove this check after the flag is removed.
if (contentSettingsType == ContentSettingsType.BLUETOOTH_GUARD
&& !ContentFeatureList.isEnabled(
ContentFeatureList.WEB_BLUETOOTH_NEW_PERMISSIONS_BACKEND)) {
return;
}
switch (websitePermissionsType) {
case CONTENT_SETTING_EXCEPTION:
queue.add(new ExceptionInfoFetcher(contentSettingsType));
return;
case PERMISSION_INFO:
queue.add(new PermissionInfoFetcher(contentSettingsType));
return;
case CHOSEN_OBJECT_INFO:
queue.add(new ChooserExceptionInfoFetcher(contentSettingsType));
return;
}
}
private Website findOrCreateSite(String origin, String embedder) {
// This allows us to show multiple entries in "All sites" for the same origin, based on
// the (origin, embedder) combination. For example, "cnn.com", "cnn.com all cookies on this
// site only", and "cnn.com embedded on example.com" are all possible. In the future, this
// should be collapsed into "cnn.com" and you can see the different options after clicking.
if (embedder != null && (embedder.equals(origin) || embedder.equals(SITE_WILDCARD))) {
embedder = null;
}
WebsiteAddress permissionOrigin = WebsiteAddress.create(origin);
WebsiteAddress permissionEmbedder = WebsiteAddress.create(embedder);
OriginAndEmbedder key = OriginAndEmbedder.create(permissionOrigin, permissionEmbedder);
Website site = mSites.get(key);
if (site == null) {
site = new Website(permissionOrigin, permissionEmbedder);
mSites.put(key, site);
}
return site;
}
private void setException(int contentSettingsType) {
for (ContentSettingException exception :
mWebsitePreferenceBridge.getContentSettingsExceptions(
mBrowserContextHandle, contentSettingsType)) {
String address = exception.getPrimaryPattern();
String embedder = exception.getSecondaryPattern();
// If both patterns are the wildcard, dont display this rule.
if (address == null || (address.equals(embedder) && address.equals(SITE_WILDCARD))) {
continue;
}
Website site = findOrCreateSite(address, embedder);
site.setContentSettingException(contentSettingsType, exception);
}
}
@VisibleForTesting
public void resetContentSettingExceptions() {
mSites.clear();
}
/**
* A single task in the WebsitePermissionsFetcher task queue. We need fetching of features to be
* serialized, as we need to have all the origins in place prior to populating the hosts.
*/
private abstract class Task {
/** Override this method to implement a synchronous task. */
void run() {}
/**
* Override this method to implement an asynchronous task. Call queue.next() once execution
* is complete.
*/
void runAsync(TaskQueue queue) {
run();
queue.next();
}
}
/**
* A queue used to store the sequence of tasks to run to fetch the website preferences. Each
* task is run sequentially, and some of the tasks may run asynchronously.
*/
private static class TaskQueue extends LinkedList<Task> {
void next() {
if (!isEmpty()) removeFirst().runAsync(this);
}
}
private class PermissionInfoFetcher extends Task {
final @ContentSettingsType int mType;
public PermissionInfoFetcher(@ContentSettingsType int type) {
mType = type;
}
@Override
public void run() {
for (PermissionInfo info :
mWebsitePreferenceBridge.getPermissionInfo(mBrowserContextHandle, mType)) {
String origin = info.getOrigin();
if (origin == null) continue;
String embedder = mType == ContentSettingsType.SENSORS ? null : info.getEmbedder();
findOrCreateSite(origin, embedder).setPermissionInfo(info);
}
}
}
private class ChooserExceptionInfoFetcher extends Task {
final @ContentSettingsType int mChooserDataType;
public ChooserExceptionInfoFetcher(@ContentSettingsType int type) {
mChooserDataType = SiteSettingsCategory.objectChooserDataTypeFromGuard(type);
}
@Override
public void run() {
if (mChooserDataType == -1) return;
for (ChosenObjectInfo info : mWebsitePreferenceBridge.getChosenObjectInfo(
mBrowserContextHandle, mChooserDataType)) {
String origin = info.getOrigin();
if (origin == null) continue;
findOrCreateSite(origin, null).addChosenObjectInfo(info);
}
}
}
private class ExceptionInfoFetcher extends Task {
final int mContentSettingsType;
public ExceptionInfoFetcher(int contentSettingsType) {
mContentSettingsType = contentSettingsType;
}
@Override
public void run() {
setException(mContentSettingsType);
}
}
private class LocalStorageInfoFetcher extends Task {
@Override
public void runAsync(final TaskQueue queue) {
mWebsitePreferenceBridge.fetchLocalStorageInfo(
mBrowserContextHandle, new Callback<HashMap>() {
@Override
public void onResult(HashMap result) {
for (Object o : result.entrySet()) {
@SuppressWarnings("unchecked")
Map.Entry<String, LocalStorageInfo> entry =
(Map.Entry<String, LocalStorageInfo>) o;
String address = entry.getKey();
if (address == null) continue;
findOrCreateSite(address, null)
.setLocalStorageInfo(entry.getValue());
}
queue.next();
}
}, mFetchSiteImportantInfo);
}
}
private class WebStorageInfoFetcher extends Task {
@Override
public void runAsync(final TaskQueue queue) {
mWebsitePreferenceBridge.fetchStorageInfo(
mBrowserContextHandle, new Callback<ArrayList>() {
@Override
public void onResult(ArrayList result) {
@SuppressWarnings("unchecked")
ArrayList<StorageInfo> infoArray = result;
for (StorageInfo info : infoArray) {
String address = info.getHost();
if (address == null) continue;
findOrCreateSite(address, null).addStorageInfo(info);
}
queue.next();
}
});
}
}
private class PermissionsAvailableCallbackRunner extends Task {
private final WebsitePermissionsCallback mCallback;
private PermissionsAvailableCallbackRunner(WebsitePermissionsCallback callback) {
mCallback = callback;
}
@Override
public void run() {
mCallback.onWebsitePermissionsAvailable(mSites.values());
}
}
@VisibleForTesting
public void setWebsitePreferenceBridgeForTesting(
WebsitePreferenceBridge websitePreferenceBridge) {
mWebsitePreferenceBridge = websitePreferenceBridge;
}
}
| |
package org.cobbzilla.wizard.model.entityconfig;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.exc.MismatchedInputException;
import com.fasterxml.jackson.databind.node.ObjectNode;
import lombok.AllArgsConstructor;
import lombok.Cleanup;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import net.sf.cglib.proxy.Enhancer;
import net.sf.cglib.proxy.InvocationHandler;
import org.apache.commons.lang3.ArrayUtils;
import org.cobbzilla.util.daemon.AwaitResult;
import org.cobbzilla.util.reflect.ReflectionUtil;
import org.cobbzilla.util.string.StringUtil;
import org.cobbzilla.wizard.api.ValidationException;
import org.cobbzilla.wizard.client.ApiClientBase;
import org.cobbzilla.wizard.model.Identifiable;
import org.cobbzilla.wizard.model.NamedEntity;
import org.cobbzilla.wizard.util.RestResponse;
import java.lang.reflect.Array;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import static org.cobbzilla.util.daemon.Await.awaitAll;
import static org.cobbzilla.util.daemon.DaemonThreadFactory.fixedPool;
import static org.cobbzilla.util.daemon.ZillaRuntime.*;
import static org.cobbzilla.util.http.HttpMethods.POST;
import static org.cobbzilla.util.http.HttpMethods.PUT;
import static org.cobbzilla.util.http.HttpStatusCodes.NOT_FOUND;
import static org.cobbzilla.util.http.HttpStatusCodes.OK;
import static org.cobbzilla.util.io.FileUtil.dirname;
import static org.cobbzilla.util.io.StreamUtil.stream2string;
import static org.cobbzilla.util.json.JsonUtil.*;
import static org.cobbzilla.util.reflect.ReflectionUtil.forName;
import static org.cobbzilla.util.reflect.ReflectionUtil.getSimpleClass;
import static org.cobbzilla.util.security.ShaUtil.sha256_hex;
import static org.cobbzilla.util.string.StringUtil.urlEncode;
import static org.cobbzilla.wizard.model.entityconfig.EntityConfig.URI_NOT_SUPPORTED;
import static org.cobbzilla.wizard.model.entityconfig.EntityFieldReference.REF_PARENT;
@Slf4j
public class ModelSetup {
public static final String ALLOW_UPDATE_PROPERTY = "_update";
public static final String PERFORM_SUBST_PROPERTY = "_subst";
public static final String PERFORM_JSON_SUBST_PROPERTY = "_jsonSubst";
public static final String[] SPECIAL_PROPERTIES = {
ALLOW_UPDATE_PROPERTY,
PERFORM_SUBST_PROPERTY,
PERFORM_JSON_SUBST_PROPERTY
};
public static <T> T[] scrubSpecial(JsonNode json, Class<T> clazz) {
if (!json.isArray()) return die("scrubSpecialFields: expected JSON array");
final T[] array = (T[]) Array.newInstance(clazz, json.size());
for (int i=0; i<json.size(); i++) {
final ObjectNode object = (ObjectNode) json.get(i);
for (String field : SPECIAL_PROPERTIES) {
object.remove(field);
}
array[i] = json(json(object), clazz);
}
return array;
}
// 2 x processorCount, max of 50
public static int maxConcurrency = Math.min(50, 2 * processorCount());
public static final long CHILD_TIMEOUT = TimeUnit.MINUTES.toMillis(30);
static { log.info("ModelSetup: maxConcurrency="+maxConcurrency); }
public static final Map<Integer, Map<String, Identifiable>> entityCache = new HashMap<>();
private static boolean isVerify() { return getVerifyLog() != null; }
private static ModelVerifyLog verifyLog = null;
public static ModelVerifyLog getVerifyLog () { return verifyLog; }
public static void setVerifyLog (ModelVerifyLog vlog) { verifyLog = vlog; }
public static LinkedHashMap<String, String> setupModel(ApiClientBase api,
String entityConfigsEndpoint,
String prefix,
ModelSetupListener listener,
String runName) throws Exception {
return setupModel(api, entityConfigsEndpoint, prefix, "manifest", listener, runName);
}
public static LinkedHashMap<String, String> setupModel(ApiClientBase api,
String entityConfigsEndpoint,
String prefix,
String manifest,
ModelSetupListener listener,
String runName) throws Exception {
final String[] models = json(stream2string(prefix + manifest + ".json"), String[].class, FULL_MAPPER_ALLOW_COMMENTS);
final LinkedHashMap<String, String> modelJson = loadModels(prefix, models);
return setupModel(api, entityConfigsEndpoint, modelJson, new ManifestClasspathResolver(prefix), listener, runName);
}
private static LinkedHashMap<String, String> loadModels(String prefix, String[] models) {
final LinkedHashMap<String, String> modelJson = new LinkedHashMap<>(models.length);
for (String model : models) {
final String resourcePath = prefix + model + ".json";
final String json;
try {
json = stream2string(resourcePath);
} catch (IllegalArgumentException e) {
return die("loadModels: model resource not found: "+resourcePath);
}
try {
// If the json is an array of strings, treat it like a manifest
final String[] includes = fromJson(json, String[].class);
log.debug("loadModels: including manifest: "+model);
final LinkedHashMap<String, String> includedModels = loadModels(prefix, includes);
for (Map.Entry<String, String> entry : includedModels.entrySet()) {
final String path = entry.getKey();
if (modelJson.containsKey(path)) {
log.debug("loadModels: already included "+ path +", not including again");
} else {
log.debug("loadModels: adding included model: "+ path);
modelJson.put(path, entry.getValue());
}
}
} catch (MismatchedInputException e) {
log.debug("loadModels: including regular model file: "+model);
modelJson.put(model, json);
} catch (IllegalArgumentException e) {
throw e;
} catch (Exception e) {
return die("loadModels("+prefix+", "+Arrays.toString(models)+"): "+e);
}
}
return modelJson;
}
public static LinkedHashMap<String, String> setupModel(ApiClientBase api,
String entityConfigsEndpoint,
LinkedHashMap<String, String> models,
ModelManifestResolver resolver,
ModelSetupListener listener,
String runName) throws Exception {
return setupModel(api, entityConfigsEndpoint, models, resolver, listener, false, runName);
}
public static LinkedHashMap<String, String> setupModel(ApiClientBase api,
String entityConfigsEndpoint,
LinkedHashMap<String, String> models,
ModelManifestResolver resolver,
ModelSetupListener listener,
boolean update,
String runName) throws Exception {
for (Map.Entry<String, String> model : models.entrySet()) {
final String modelName = model.getKey();
final String json = model.getValue();
if (empty(json)) return die("JSON file not found or empty: "+modelName+".json");
final String entityType = getEntityTypeFromString(modelName);
if (entityType.equals("manifest")) {
final LinkedHashMap<String, String> nested = resolver.buildModel(modelName + ".json");
setupModel(api, entityConfigsEndpoint, nested, resolver.subResolver(dirname(modelName)), listener, update, runName);
} else {
try {
setupJson(api, entityConfigsEndpoint, entityType, json, listener, update, runName);
} catch (Exception e) {
log.error("setupModel: api=" + api.getBaseUri() + ", model=" + modelName + ", exception=" + e.getClass().getSimpleName() + ": " + e.getMessage());
throw e;
}
}
}
return models;
}
private static Map<String, String> modelHashCache = new HashMap<>();
public static String modelHash(String prefix, String manifest) {
final String cacheKey = prefix + "/" + manifest;
String hash = modelHashCache.get(cacheKey);
if (hash == null) {
final String[] models = json(stream2string(prefix + manifest + ".json"), String[].class, FULL_MAPPER_ALLOW_COMMENTS);
StringBuilder b = new StringBuilder();
for (String model : models) {
b.append(stream2string(prefix + model + ".json"));
}
hash = sha256_hex(b.toString());
modelHashCache.put(cacheKey, hash);
}
return hash;
}
public static void setupJson(ApiClientBase api,
String entityConfigsEndpoint,
String entityType,
String json,
ModelSetupListener listener,
String runName) throws Exception {
setupJson(api, entityConfigsEndpoint, entityType, json, listener, false, runName);
}
public static void setupJson(ApiClientBase api,
String entityConfigsEndpoint,
String entityType,
String json,
ModelSetupListener listener,
boolean update,
String runName) throws Exception {
if (listener != null) listener.preEntityConfig(entityType);
final EntityConfig entityConfig = api.get(entityConfigsEndpoint + "/" + entityType, EntityConfig.class);
if (listener != null) listener.postEntityConfig(entityType, entityConfig);
final Class<? extends Identifiable> entityClass = forName(entityConfig.getClassName());
final ModelEntity[] entities = parseEntities(json, entityClass, listener);
for (ModelEntity entity : entities) {
final LinkedHashMap<String, Identifiable> context = new LinkedHashMap<>();
createEntity(api, entityConfig, entity, context, listener, update, runName);
}
}
public static ModelEntity[] parseEntities(String json,
Class<? extends Identifiable> entityClass,
ModelSetupListener listener) {
final JsonNode[] nodes = jsonWithComments(json, JsonNode[].class);
final ModelEntity[] entities = new ModelEntity[nodes.length];
for (int i=0; i<nodes.length; i++) {
final JsonNode node = nodes[i];
if (!(node instanceof ObjectNode)) {
log.error("parseEntities: not an ObjectNode, skipping: "+node);
continue;
}
entities[i] = buildModelEntity((ObjectNode) node, entityClass, listener);
}
return entities;
}
public static ModelEntity buildModelEntity(ObjectNode node,
Class<? extends Identifiable> entityClass, ModelSetupListener listener) {
final Enhancer enhancer = new Enhancer();
enhancer.setInterfaces(new Class[]{ModelEntity.class});
enhancer.setSuperclass(entityClass);
enhancer.setCallback(new ModelEntityInvocationHandler(node, entityClass, listener));
return (ModelEntity) enhancer.create();
}
// strip off anything after the first underscore (or period, in case a ".json" file is given)
public static String getEntityTypeFromString(String entityType) {
if (entityType.contains("/")) entityType = entityType.substring(entityType.lastIndexOf("/")+1);
if (entityType.contains("_")) return entityType.substring(0, entityType.indexOf("_"));
if (entityType.contains(".")) return entityType.substring(0, entityType.indexOf("."));
return entityType;
}
protected static void createEntity(final ApiClientBase api,
EntityConfig entityConfig,
ModelEntity request,
final LinkedHashMap<String, Identifiable> context,
final ModelSetupListener listener,
final String runName) throws Exception {
createEntity(api, entityConfig, request, context, listener, false, runName);
}
protected static void createEntity(final ApiClientBase api,
EntityConfig entityConfig,
ModelEntity request,
final LinkedHashMap<String, Identifiable> context,
final ModelSetupListener listener,
boolean update,
final String runName) throws Exception {
createEntity(api, entityConfig, request, context, listener, update, false, runName);
}
protected static void createEntity(final ApiClientBase api,
EntityConfig entityConfig,
ModelEntity request,
final LinkedHashMap<String, Identifiable> context,
final ModelSetupListener listener,
boolean update,
boolean strict,
final String runName) throws Exception {
Identifiable entity = request;
// does it already exist?
final String entityType = getRawClass(entity.getClass().getSimpleName());
final String updateUri = entityConfig.getUpdateUri();
final String logPrefix = "createEntity(" + runName + "): " + entityType + ": ";
if (updateUri != null && !updateUri.equals(URI_NOT_SUPPORTED)) {
final String getUri = processUri(context, entity, updateUri);
if (getUri != null) {
if (listener != null) listener.preLookup(entity);
final RestResponse response = api.doGet(getUri);
if (listener != null) listener.postLookup(entity, request, response);
final boolean verify = isVerify();
switch (response.status) {
case OK:
if (verify && request.hasData(strict)) {
entity = buildModelEntity(json(response.json, ObjectNode.class), request.getEntity().getClass(), listener);
if (listener != null && ((ModelEntity) entity).performSubstitutions()) {
entity = listener.subst(entity);
}
log.info(logPrefix + "diffing: " + id(entity));
if (listener != null && request.performSubstitutions()) {
request = listener.subst(request);
}
getVerifyLog().logDifference(getUri, api, context, entityConfig, entity, request);
} else if ((update && request.hasData(strict)) || request.forceUpdate()) {
final Identifiable existing = getCached(api, json(response.json, request.getEntity().getClass()));
Identifiable toUpdate;
if (existing != null) {
existing.update(entity);
toUpdate = existing;
} else {
toUpdate = entity;
}
if (listener != null && ((ModelEntity) entity).performSubstitutions()) {
toUpdate = listener.subst(toUpdate);
}
log.info(logPrefix + "already exists, updating: " + id(toUpdate));
entity = update(api, context, entityConfig, toUpdate, listener);
} else {
log.info(logPrefix + "already exists: " + getUri);
entity = json(response.json, request.getEntity().getClass());
}
break;
case NOT_FOUND:
if (verify) {
getVerifyLog().logCreation(getUri, entity instanceof ModelEntity ? ((ModelEntity) entity).getEntity() : entity);
} else {
entity = create(api, context, entityConfig, entity, listener, runName);
}
break;
default:
die(logPrefix + "error creating " + entityType + ": " + response);
}
} else {
entity = create(api, context, entityConfig, entity, listener, runName);
}
} else {
entity = create(api, context, entityConfig, entity, listener, runName);
}
if (entity == null) return;
addToCache(api, entity);
// copy children if present in request (they wouldn't be in object returned from server)
if (entity instanceof ParentEntity) {
((ParentEntity) entity).setChildren(((ParentEntity) request).getChildren());
}
// create and add to context
context.put(entityType, entity);
// check for child objects
if (entity instanceof ParentEntity) {
final ParentEntity parent = (ParentEntity) entity;
if (parent.hasChildren()) {
// sanity check
if (!entityConfig.hasChildren()) die("createEntity: input data has children but entity config does not support them: "+entityConfig.getClassName());
// sanity are there any children provided that are unsupported by entity config?
final Map<String, EntityConfig> entityChildren = entityConfig.getChildren();
final Set<String> entityChildTypes = entityChildren.keySet();
final Map<String, JsonNode[]> allChildren = parent.getChildren();
for (String childType : allChildren.keySet()) {
if (!entityChildTypes.contains(childType)) {
die("createEntity: input data has children of type "+childType+" but entity config only supports: "+StringUtil.toString(entityChildTypes));
}
}
for (String childEntityType : entityChildTypes) {
// these are the objects we want to create
final JsonNode[] children = allChildren.get(childEntityType);
if (children == null || children.length == 0) continue;
// this tells us how to create them
final EntityConfig childConfig = entityChildren.get(childEntityType);
// needed to read/write JSON correctly
String childClassName = childConfig.getClassName();
if (childClassName == null) childClassName = entity.getClass().getPackage().getName() + "." + childEntityType;
final Class<? extends Identifiable> childClass = forName(childClassName);
@Cleanup("shutdownNow") final ExecutorService exec = fixedPool(Math.min(children.length, maxConcurrency), "ModelSetup.exec");
final Set<Future<?>> futures = new HashSet<>();
for (final JsonNode child : children) {
if (!(child instanceof ObjectNode)) {
log.error("createEntity: not an ObjectNode: "+child);
continue;
}
futures.add(exec.submit(new CreateEntityJob(api, childConfig, child, childClass, context, listener, update, runName)));
}
final AwaitResult<?> result = awaitAll(futures, CHILD_TIMEOUT);
if (!result.allSucceeded()) {
final Map<Future, Exception> failures = result.getFailures();
if (!empty(failures)) {
final String failureMessages = StringUtil.toString(failures.values().stream().map(Throwable::getMessage).collect(Collectors.toList()), "\n");
log.error("createEntity: failures: "+failureMessages);
}
die("createEntity: "+result);
}
}
}
}
}
private static void addToCache(ApiClientBase api, Identifiable entity) {
synchronized (entityCache) {
Map<String, Identifiable> cache = entityCache.get(api.hashCode());
if (cache == null) {
cache = new HashMap<>();
entityCache.put(api.hashCode(), cache);
}
final String key = cacheKey(entity);
cache.put(key, entity);
}
}
private static Identifiable getCached(ApiClientBase api, Identifiable entity) {
synchronized (entityCache) {
final Map<String, Identifiable> cache = entityCache.get(api.hashCode());
final String key = cacheKey(entity);
Identifiable cachedEntity = null;
if (cache != null) cachedEntity = cache.get(key);
if (cachedEntity != null) return cachedEntity;
addToCache(api, entity);
return entity;
}
}
private static String cacheKey(Identifiable entity) {
if (entity instanceof ModelEntity) entity = ((ModelEntity) entity).getEntity();
return getRawClass(entity.getClass().getName())+"/"+entity.getUuid();
}
protected static <T extends Identifiable> T create(ApiClientBase api,
LinkedHashMap<String, Identifiable> ctx,
EntityConfig entityConfig,
T entity,
ModelSetupListener listener,
String runName) throws Exception {
if (isVerify()) {
log.info("create: in verify mode, not creating "+getSimpleClass(entity).getSimpleName()+": " + id(entity));
return entity;
}
final String uri = processUri(ctx, entity, entityConfig.getCreateUri());
// if the entity has a parent, it will want that parent's UUID in that field
setParentFields(ctx, entityConfig, entity);
if (listener != null) {
if (entity instanceof ModelEntity && ((ModelEntity) entity).performSubstitutions()) {
entity = listener.subst(entity);
}
listener.preCreate(entityConfig, entity);
}
log.info("create("+runName+"): creating " + entityConfig.getName() + ": "+ id(entity));
T created;
try {
switch (entityConfig.getCreateMethod().toUpperCase()) {
case PUT: created = api.put(uri, entity); break;
case POST: created = api.post(uri, entity); break;
default: return die("invalid create method: "+entityConfig.getCreateMethod());
}
} catch (ValidationException e) {
// try the get again, did it just appear?
final String getUri = processUri(ctx, entity, entityConfig.getUpdateUri());
if (getUri == null) {
if (entity instanceof ParentEntity && ((ParentEntity) entity).hasChildren()) {
return die("create: error creating and cannot check for existence: " + entityConfig.getName());
} else {
log.warn("create: error creating and cannot check for existence: " + entityConfig.getName()+", but has no children, skipping");
return null;
}
}
try {
created = api.get(getUri, (Class<T>) getSimpleClass(entity));
// we're OK, someone else already created it
} catch (Exception e2) {
log.error("error creating: "+entityConfig.getCreateMethod()+": "+e2, e2);
throw e;
}
} catch (Exception e) {
return die("error creating: "+entityConfig.getCreateMethod()+": "+e, e);
}
if (listener != null) listener.postCreate(entityConfig, entity, created);
return created;
}
public static <T extends Identifiable> String id(T entity) {
if (entity == null) return "null";
if (entity instanceof NamedEntity) return ((NamedEntity) entity).getName();
if (entity.getUuid() != null) return entity.getUuid();
return entity.toString();
}
protected static <T extends Identifiable> T update(ApiClientBase api,
LinkedHashMap<String, Identifiable> ctx,
EntityConfig entityConfig,
T entity,
ModelSetupListener listener) throws Exception {
if (isVerify()) {
log.info("update: in verify mode, not updating: " + id(entity));
return entity;
}
final String uri = processUri(ctx, entity, entityConfig.getUpdateUri());
// if the entity has a parent, it will want that parent's UUID in that field
setParentFields(ctx, entityConfig, entity);
if (listener != null) listener.preUpdate(entityConfig, entity);
final T updated;
switch (entityConfig.getUpdateMethod().toLowerCase()) {
case "put": updated = api.put(uri, entity); break;
case "post": updated = api.post(uri, entity); break;
default: return die("invalid update method: "+entityConfig.getCreateMethod());
}
if (listener != null) listener.postUpdate(entityConfig, entity, updated);
return updated;
}
private static <T extends Identifiable> void setParentFields(LinkedHashMap<String, Identifiable> ctx, EntityConfig entityConfig, T entity) {
// if the entity has a parent, it will want that parent's UUID in that field
if (entityConfig.hasParentField()) {
final EntityFieldConfig parentField = entityConfig.getParentField();
String parentFieldName = parentField.getName();
if (parentFieldName != null) {
String parentEntityType = parentField.getReference().getEntity();
if (parentEntityType.equals(REF_PARENT)) parentEntityType = parentFieldName;
boolean ok = false;
for (Identifiable candidate : ctx.values()) {
if (candidate.getClass().getSimpleName().equalsIgnoreCase(parentEntityType)) {
ReflectionUtil.set(entity, parentFieldName, ReflectionUtil.get(candidate, parentField.getReference().getField()));
ok = true;
break;
}
}
if (!ok) {
die("create: could not find parent (type=" + parentEntityType + ", field=" + parentFieldName + ") of entity (" + entity.getClass().getSimpleName() + "): " + entity);
}
} else {
log.debug("no parentFieldName found for " + entity.getClass().getSimpleName() + ", not setting");
}
}
}
private static String processUri(LinkedHashMap<String, Identifiable> ctx, Identifiable entity, String uri) {
if (entity instanceof ModelEntity) entity = ((ModelEntity) entity).getEntity();
for (Map.Entry<String, Identifiable> entry : ctx.entrySet()) {
final String type = getRawClass(entry.getKey());
final Identifiable value = entry.getValue();
final Map<String, Object> ctxEntryProps = ReflectionUtil.toMap(value instanceof ModelEntity ? ((ModelEntity) value).getEntity() : value);
for (String name : ctxEntryProps.keySet()) {
uri = uri.replace("{" + type + "." + name + "}", urlEncode(ctxEntryProps.get(name).toString()));
}
}
final Map<String, Object> entityProps = ReflectionUtil.toMap(entity);
for (String name : entityProps.keySet()) {
if (name.contains("$$")) name = name.substring(0, name.indexOf("$$"));
uri = uri.replace("{" + name + "}", urlEncode(entityProps.get(name).toString()));
}
// if a {uuid} remains, try putting in the name, if we have one
if (uri.contains("{uuid}") && entityProps.containsKey("name")) {
uri = uri.replace("{uuid}", urlEncode(entityProps.get("name").toString()));
}
if (uri.contains("{uuid}")) {
log.debug("Could not replace {uuid} found in URL, returning null: "+uri);
return null;
}
if (uri.contains("{")) die("Could not replace all variables in URL: "+uri);
return uri.startsWith("/") ? uri : "/" + uri;
}
private static String getRawClass(String className) {
return className.contains("$$") ? className.substring(0, className.indexOf("$$")) : className;
}
public static void scrubSpecialProperties(JsonNode[] nodes) {
for (JsonNode n : nodes) scrubSpecialProperties(n);
}
public static void scrubSpecialProperties(Collection<JsonNode> nodes) {
for (JsonNode n : nodes) scrubSpecialProperties(n);
}
public static void scrubSpecialProperties(JsonNode node) {
if (node instanceof ObjectNode) {
// clear special flags if present
if (node.has(ALLOW_UPDATE_PROPERTY)) ((ObjectNode) node).remove(ALLOW_UPDATE_PROPERTY);
if (node.has(PERFORM_SUBST_PROPERTY)) ((ObjectNode) node).remove(PERFORM_SUBST_PROPERTY);
}
}
private static class ModelEntityInvocationHandler implements InvocationHandler {
@Getter @JsonIgnore private ObjectNode node;
private final boolean update;
private final boolean subst;
private final boolean jsonSubst;
@Getter private final Identifiable entity;
public ModelEntityInvocationHandler(ObjectNode node, Class<? extends Identifiable> entityClass, ModelSetupListener listener) {
this.node = node;
update = hasSpecialProperty(node, ALLOW_UPDATE_PROPERTY);
subst = hasSpecialProperty(node, PERFORM_SUBST_PROPERTY);
jsonSubst = hasSpecialProperty(node, PERFORM_JSON_SUBST_PROPERTY);
if (listener != null && jsonSubst) node = listener.jsonSubst(node);
this.entity = json(node, entityClass);
}
private boolean hasSpecialProperty(ObjectNode node, String prop) {
boolean val = node.has(prop) && node.get(prop).booleanValue();
node.remove(prop);
return val;
}
public boolean hasData(final boolean strict) {
return toList(node.fieldNames()).stream().anyMatch((n) -> !ArrayUtils.contains(entity.excludeUpdateFields(strict), n));
}
@Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
switch (method.getName()) {
case "jsonNode": return node;
case "updateNode": node = json(json(entity), ObjectNode.class); return null;
case "forceUpdate": return update;
case "performSubstitutions": return subst;
case "getEntity": return entity;
case "hasData": return hasData((Boolean) args[0]);
case "equals": return entity.equals(args[0]);
default:
try {
return method.invoke(entity, args);
} catch (InvocationTargetException e) {
log.error("invoke("+method.getName()+"): "+e, e);
throw e;
}
}
}
}
@AllArgsConstructor
private static class CreateEntityJob implements Runnable {
private final ApiClientBase api;
private final EntityConfig childConfig;
private final JsonNode child;
private final Class<? extends Identifiable> childClass;
private final LinkedHashMap<String, Identifiable> context;
private final ModelSetupListener listener;
private final boolean update;
private final String runName;
@Override public void run() {
try {
createEntity((ApiClientBase) api.clone(), childConfig, buildModelEntity((ObjectNode) child, childClass, listener), new LinkedHashMap<>(context), listener, update, runName);
} catch (Exception e) {
die("run: "+e, e);
}
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/asset/v1beta1/assets.proto
package com.google.cloud.asset.v1beta1;
/**
*
*
* <pre>
* A time window of (start_time, end_time].
* </pre>
*
* Protobuf type {@code google.cloud.asset.v1beta1.TimeWindow}
*/
public final class TimeWindow extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.asset.v1beta1.TimeWindow)
TimeWindowOrBuilder {
private static final long serialVersionUID = 0L;
// Use TimeWindow.newBuilder() to construct.
private TimeWindow(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TimeWindow() {}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private TimeWindow(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.protobuf.Timestamp.Builder subBuilder = null;
if (startTime_ != null) {
subBuilder = startTime_.toBuilder();
}
startTime_ =
input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(startTime_);
startTime_ = subBuilder.buildPartial();
}
break;
}
case 18:
{
com.google.protobuf.Timestamp.Builder subBuilder = null;
if (endTime_ != null) {
subBuilder = endTime_.toBuilder();
}
endTime_ =
input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(endTime_);
endTime_ = subBuilder.buildPartial();
}
break;
}
default:
{
if (!parseUnknownFieldProto3(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.asset.v1beta1.AssetProto
.internal_static_google_cloud_asset_v1beta1_TimeWindow_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.asset.v1beta1.AssetProto
.internal_static_google_cloud_asset_v1beta1_TimeWindow_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.asset.v1beta1.TimeWindow.class,
com.google.cloud.asset.v1beta1.TimeWindow.Builder.class);
}
public static final int START_TIME_FIELD_NUMBER = 1;
private com.google.protobuf.Timestamp startTime_;
/**
*
*
* <pre>
* Start time of the time window (exclusive).
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 1;</code>
*/
public boolean hasStartTime() {
return startTime_ != null;
}
/**
*
*
* <pre>
* Start time of the time window (exclusive).
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 1;</code>
*/
public com.google.protobuf.Timestamp getStartTime() {
return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_;
}
/**
*
*
* <pre>
* Start time of the time window (exclusive).
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 1;</code>
*/
public com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder() {
return getStartTime();
}
public static final int END_TIME_FIELD_NUMBER = 2;
private com.google.protobuf.Timestamp endTime_;
/**
*
*
* <pre>
* End time of the time window (inclusive).
* Current timestamp if not specified.
* </pre>
*
* <code>.google.protobuf.Timestamp end_time = 2;</code>
*/
public boolean hasEndTime() {
return endTime_ != null;
}
/**
*
*
* <pre>
* End time of the time window (inclusive).
* Current timestamp if not specified.
* </pre>
*
* <code>.google.protobuf.Timestamp end_time = 2;</code>
*/
public com.google.protobuf.Timestamp getEndTime() {
return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_;
}
/**
*
*
* <pre>
* End time of the time window (inclusive).
* Current timestamp if not specified.
* </pre>
*
* <code>.google.protobuf.Timestamp end_time = 2;</code>
*/
public com.google.protobuf.TimestampOrBuilder getEndTimeOrBuilder() {
return getEndTime();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (startTime_ != null) {
output.writeMessage(1, getStartTime());
}
if (endTime_ != null) {
output.writeMessage(2, getEndTime());
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (startTime_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getStartTime());
}
if (endTime_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getEndTime());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.asset.v1beta1.TimeWindow)) {
return super.equals(obj);
}
com.google.cloud.asset.v1beta1.TimeWindow other =
(com.google.cloud.asset.v1beta1.TimeWindow) obj;
boolean result = true;
result = result && (hasStartTime() == other.hasStartTime());
if (hasStartTime()) {
result = result && getStartTime().equals(other.getStartTime());
}
result = result && (hasEndTime() == other.hasEndTime());
if (hasEndTime()) {
result = result && getEndTime().equals(other.getEndTime());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasStartTime()) {
hash = (37 * hash) + START_TIME_FIELD_NUMBER;
hash = (53 * hash) + getStartTime().hashCode();
}
if (hasEndTime()) {
hash = (37 * hash) + END_TIME_FIELD_NUMBER;
hash = (53 * hash) + getEndTime().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.asset.v1beta1.TimeWindow parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.asset.v1beta1.TimeWindow parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.asset.v1beta1.TimeWindow parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.asset.v1beta1.TimeWindow parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.asset.v1beta1.TimeWindow parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.asset.v1beta1.TimeWindow parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.asset.v1beta1.TimeWindow parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.asset.v1beta1.TimeWindow parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.asset.v1beta1.TimeWindow parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.asset.v1beta1.TimeWindow parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.asset.v1beta1.TimeWindow parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.asset.v1beta1.TimeWindow parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.asset.v1beta1.TimeWindow prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A time window of (start_time, end_time].
* </pre>
*
* Protobuf type {@code google.cloud.asset.v1beta1.TimeWindow}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.asset.v1beta1.TimeWindow)
com.google.cloud.asset.v1beta1.TimeWindowOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.asset.v1beta1.AssetProto
.internal_static_google_cloud_asset_v1beta1_TimeWindow_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.asset.v1beta1.AssetProto
.internal_static_google_cloud_asset_v1beta1_TimeWindow_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.asset.v1beta1.TimeWindow.class,
com.google.cloud.asset.v1beta1.TimeWindow.Builder.class);
}
// Construct using com.google.cloud.asset.v1beta1.TimeWindow.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
if (startTimeBuilder_ == null) {
startTime_ = null;
} else {
startTime_ = null;
startTimeBuilder_ = null;
}
if (endTimeBuilder_ == null) {
endTime_ = null;
} else {
endTime_ = null;
endTimeBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.asset.v1beta1.AssetProto
.internal_static_google_cloud_asset_v1beta1_TimeWindow_descriptor;
}
@java.lang.Override
public com.google.cloud.asset.v1beta1.TimeWindow getDefaultInstanceForType() {
return com.google.cloud.asset.v1beta1.TimeWindow.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.asset.v1beta1.TimeWindow build() {
com.google.cloud.asset.v1beta1.TimeWindow result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.asset.v1beta1.TimeWindow buildPartial() {
com.google.cloud.asset.v1beta1.TimeWindow result =
new com.google.cloud.asset.v1beta1.TimeWindow(this);
if (startTimeBuilder_ == null) {
result.startTime_ = startTime_;
} else {
result.startTime_ = startTimeBuilder_.build();
}
if (endTimeBuilder_ == null) {
result.endTime_ = endTime_;
} else {
result.endTime_ = endTimeBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return (Builder) super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.asset.v1beta1.TimeWindow) {
return mergeFrom((com.google.cloud.asset.v1beta1.TimeWindow) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.asset.v1beta1.TimeWindow other) {
if (other == com.google.cloud.asset.v1beta1.TimeWindow.getDefaultInstance()) return this;
if (other.hasStartTime()) {
mergeStartTime(other.getStartTime());
}
if (other.hasEndTime()) {
mergeEndTime(other.getEndTime());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.asset.v1beta1.TimeWindow parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.cloud.asset.v1beta1.TimeWindow) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private com.google.protobuf.Timestamp startTime_ = null;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
startTimeBuilder_;
/**
*
*
* <pre>
* Start time of the time window (exclusive).
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 1;</code>
*/
public boolean hasStartTime() {
return startTimeBuilder_ != null || startTime_ != null;
}
/**
*
*
* <pre>
* Start time of the time window (exclusive).
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 1;</code>
*/
public com.google.protobuf.Timestamp getStartTime() {
if (startTimeBuilder_ == null) {
return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_;
} else {
return startTimeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Start time of the time window (exclusive).
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 1;</code>
*/
public Builder setStartTime(com.google.protobuf.Timestamp value) {
if (startTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
startTime_ = value;
onChanged();
} else {
startTimeBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Start time of the time window (exclusive).
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 1;</code>
*/
public Builder setStartTime(com.google.protobuf.Timestamp.Builder builderForValue) {
if (startTimeBuilder_ == null) {
startTime_ = builderForValue.build();
onChanged();
} else {
startTimeBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Start time of the time window (exclusive).
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 1;</code>
*/
public Builder mergeStartTime(com.google.protobuf.Timestamp value) {
if (startTimeBuilder_ == null) {
if (startTime_ != null) {
startTime_ =
com.google.protobuf.Timestamp.newBuilder(startTime_).mergeFrom(value).buildPartial();
} else {
startTime_ = value;
}
onChanged();
} else {
startTimeBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* Start time of the time window (exclusive).
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 1;</code>
*/
public Builder clearStartTime() {
if (startTimeBuilder_ == null) {
startTime_ = null;
onChanged();
} else {
startTime_ = null;
startTimeBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* Start time of the time window (exclusive).
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 1;</code>
*/
public com.google.protobuf.Timestamp.Builder getStartTimeBuilder() {
onChanged();
return getStartTimeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Start time of the time window (exclusive).
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 1;</code>
*/
public com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder() {
if (startTimeBuilder_ != null) {
return startTimeBuilder_.getMessageOrBuilder();
} else {
return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_;
}
}
/**
*
*
* <pre>
* Start time of the time window (exclusive).
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
getStartTimeFieldBuilder() {
if (startTimeBuilder_ == null) {
startTimeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>(
getStartTime(), getParentForChildren(), isClean());
startTime_ = null;
}
return startTimeBuilder_;
}
private com.google.protobuf.Timestamp endTime_ = null;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
endTimeBuilder_;
/**
*
*
* <pre>
* End time of the time window (inclusive).
* Current timestamp if not specified.
* </pre>
*
* <code>.google.protobuf.Timestamp end_time = 2;</code>
*/
public boolean hasEndTime() {
return endTimeBuilder_ != null || endTime_ != null;
}
/**
*
*
* <pre>
* End time of the time window (inclusive).
* Current timestamp if not specified.
* </pre>
*
* <code>.google.protobuf.Timestamp end_time = 2;</code>
*/
public com.google.protobuf.Timestamp getEndTime() {
if (endTimeBuilder_ == null) {
return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_;
} else {
return endTimeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* End time of the time window (inclusive).
* Current timestamp if not specified.
* </pre>
*
* <code>.google.protobuf.Timestamp end_time = 2;</code>
*/
public Builder setEndTime(com.google.protobuf.Timestamp value) {
if (endTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
endTime_ = value;
onChanged();
} else {
endTimeBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* End time of the time window (inclusive).
* Current timestamp if not specified.
* </pre>
*
* <code>.google.protobuf.Timestamp end_time = 2;</code>
*/
public Builder setEndTime(com.google.protobuf.Timestamp.Builder builderForValue) {
if (endTimeBuilder_ == null) {
endTime_ = builderForValue.build();
onChanged();
} else {
endTimeBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* End time of the time window (inclusive).
* Current timestamp if not specified.
* </pre>
*
* <code>.google.protobuf.Timestamp end_time = 2;</code>
*/
public Builder mergeEndTime(com.google.protobuf.Timestamp value) {
if (endTimeBuilder_ == null) {
if (endTime_ != null) {
endTime_ =
com.google.protobuf.Timestamp.newBuilder(endTime_).mergeFrom(value).buildPartial();
} else {
endTime_ = value;
}
onChanged();
} else {
endTimeBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* End time of the time window (inclusive).
* Current timestamp if not specified.
* </pre>
*
* <code>.google.protobuf.Timestamp end_time = 2;</code>
*/
public Builder clearEndTime() {
if (endTimeBuilder_ == null) {
endTime_ = null;
onChanged();
} else {
endTime_ = null;
endTimeBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* End time of the time window (inclusive).
* Current timestamp if not specified.
* </pre>
*
* <code>.google.protobuf.Timestamp end_time = 2;</code>
*/
public com.google.protobuf.Timestamp.Builder getEndTimeBuilder() {
onChanged();
return getEndTimeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* End time of the time window (inclusive).
* Current timestamp if not specified.
* </pre>
*
* <code>.google.protobuf.Timestamp end_time = 2;</code>
*/
public com.google.protobuf.TimestampOrBuilder getEndTimeOrBuilder() {
if (endTimeBuilder_ != null) {
return endTimeBuilder_.getMessageOrBuilder();
} else {
return endTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : endTime_;
}
}
/**
*
*
* <pre>
* End time of the time window (inclusive).
* Current timestamp if not specified.
* </pre>
*
* <code>.google.protobuf.Timestamp end_time = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
getEndTimeFieldBuilder() {
if (endTimeBuilder_ == null) {
endTimeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>(
getEndTime(), getParentForChildren(), isClean());
endTime_ = null;
}
return endTimeBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFieldsProto3(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.asset.v1beta1.TimeWindow)
}
// @@protoc_insertion_point(class_scope:google.cloud.asset.v1beta1.TimeWindow)
private static final com.google.cloud.asset.v1beta1.TimeWindow DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.asset.v1beta1.TimeWindow();
}
public static com.google.cloud.asset.v1beta1.TimeWindow getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TimeWindow> PARSER =
new com.google.protobuf.AbstractParser<TimeWindow>() {
@java.lang.Override
public TimeWindow parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new TimeWindow(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<TimeWindow> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<TimeWindow> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.asset.v1beta1.TimeWindow getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Copyright 2020 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.config.materials.dependency;
import com.thoughtworks.go.config.*;
import com.thoughtworks.go.config.materials.MaterialConfigs;
import com.thoughtworks.go.domain.materials.Material;
import com.thoughtworks.go.domain.materials.Modification;
import com.thoughtworks.go.domain.materials.Modifications;
import com.thoughtworks.go.domain.materials.dependency.DependencyMaterialRevision;
import com.thoughtworks.go.helper.GoConfigMother;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.util.*;
import static com.thoughtworks.go.domain.materials.dependency.DependencyMaterialRevision.create;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
class DependencyMaterialTest {
private DependencyMaterial dependencyMaterial;
@BeforeEach
void setup() {
dependencyMaterial = new DependencyMaterial(new CaseInsensitiveString("pipeline"), new CaseInsensitiveString("stage"));
}
@Test
void shouldReturnCruiseAsUser() {
assertThat(dependencyMaterial.getUserName()).isEqualTo("cruise");
}
@Test
void shouldReturnJson() {
Map<String, String> json = new LinkedHashMap<>();
dependencyMaterial.toJson(json, create("pipeline", 10, "1.0.123", "stage", 1));
assertThat(json.get("location")).isEqualTo("pipeline/stage");
assertThat(json.get("scmType")).isEqualTo("Dependency");
assertThat(json.get("folder")).isEqualTo("");
assertThat(json.get("action")).isEqualTo("Completed");
}
@Test
void shouldDifferIfStageCounterHasChanged() {
DependencyMaterialRevision rev1 = create("pipeline", 10, "1.0.123", "stage", 1);
DependencyMaterialRevision rev2 = create("pipeline", 10, "1.0.123", "stage", 2);
DependencyMaterialRevision rev3 = create("pipeline", 11, "1.0.123", "stage", 1);
assertThat(rev1).isNotEqualTo(rev2);
assertThat(rev2).isNotEqualTo(rev3);
assertThat(rev3).isNotEqualTo(rev1);
}
@Test
void shouldParseMaterialRevisionWithPipelineLabel() {
ArrayList<Modification> mods = new ArrayList<>();
Modification mod = new Modification(new Date(), "pipelineName/123/stageName/2", "pipeline-label-123", null);
mods.add(mod);
DependencyMaterialRevision revision = (DependencyMaterialRevision) new Modifications(mods).latestRevision(dependencyMaterial);
assertThat(revision.getRevision()).isEqualTo("pipelineName/123/stageName/2");
assertThat(revision.getPipelineLabel()).isEqualTo("pipeline-label-123");
assertThat(revision.getPipelineCounter()).isEqualTo(123);
assertThat(revision.getPipelineName()).isEqualTo("pipelineName");
assertThat(revision.getStageName()).isEqualTo("stageName");
assertThat(revision.getStageCounter()).isEqualTo(2);
}
@Test
void shouldBeUniqueBasedOnpipelineAndStageName() throws Exception {
DependencyMaterial material1 = new DependencyMaterial(new CaseInsensitiveString("pipeline1"), new CaseInsensitiveString("stage1"));
Map<String, Object> map = new HashMap<>();
material1.appendCriteria(map);
assertThat(map).containsEntry("pipelineName", "pipeline1");
assertThat(map).containsEntry("stageName", "stage1");
assertThat(map.size()).isEqualTo(2);
}
@Test
void shouldUsePipelineNameAsMaterialNameIfItIsNotSet() throws Exception {
assertThat(new DependencyMaterial(new CaseInsensitiveString("pipeline1"), new CaseInsensitiveString("stage1")).getName()).isEqualTo(new CaseInsensitiveString("pipeline1"));
}
@Test
void shouldUseMaterialNameAsMaterialNameIfItIsSet() throws Exception {
DependencyMaterial material = new DependencyMaterial(new CaseInsensitiveString("pipeline1"), new CaseInsensitiveString("stage1"));
material.setName(new CaseInsensitiveString("my-material-name"));
assertThat(material.getName()).isEqualTo(new CaseInsensitiveString("my-material-name"));
}
@Test
void shouldGenerateSqlCriteriaMapInSpecificOrder() throws Exception {
Map<String, Object> map = dependencyMaterial.getSqlCriteria();
assertThat(map.size()).isEqualTo(3);
Iterator<Map.Entry<String, Object>> iter = map.entrySet().iterator();
assertThat(iter.next().getKey()).isEqualTo("type");
assertThat(iter.next().getKey()).isEqualTo("pipelineName");
assertThat(iter.next().getKey()).isEqualTo("stageName");
}
@Test
void equalsImplementation() throws Exception {
DependencyMaterial one = new DependencyMaterial(new CaseInsensitiveString("pipelineName"), new CaseInsensitiveString("stage"));
DependencyMaterial two = new DependencyMaterial(new CaseInsensitiveString("pipelineName"), new CaseInsensitiveString("stage"));
two.setName(new CaseInsensitiveString("other-name-that-should-be-ignored-in-equals-comparison"));
assertThat(one).isEqualTo(two);
DependencyMaterial three = new DependencyMaterial(new CaseInsensitiveString("otherPipelineName"), new CaseInsensitiveString("stage"));
assertThat(three).isNotEqualTo(one);
}
@Test
void hashCodeImplementation() throws Exception {
DependencyMaterial one = new DependencyMaterial(new CaseInsensitiveString("pipelineName"), new CaseInsensitiveString("stage"));
DependencyMaterial two = new DependencyMaterial(new CaseInsensitiveString("pipelineName"), new CaseInsensitiveString("stage"));
two.setName(new CaseInsensitiveString("other-name-that-should-be-ignored-in-hashcode-generation"));
assertThat(one.hashCode()).isEqualTo(two.hashCode());
DependencyMaterial three = new DependencyMaterial(new CaseInsensitiveString("otherPipelineName"), new CaseInsensitiveString("stage"));
assertThat(three.hashCode()).isNotEqualTo(one.hashCode());
}
@Test
void shouldReturnUpstreamPipelineNameAsDisplayNameIfMaterialNameIsNotDefined() throws Exception {
DependencyMaterial material = new DependencyMaterial(new CaseInsensitiveString("upstream"), new CaseInsensitiveString("first"));
assertThat(material.getDisplayName()).isEqualTo("upstream");
}
@Test
void shouldReturnMaterialNameIfDefined() throws Exception {
DependencyMaterial material = new DependencyMaterial(new CaseInsensitiveString("upstream"), new CaseInsensitiveString("first"));
material.setName(new CaseInsensitiveString("my_name"));
assertThat(material.getDisplayName()).isEqualTo("my_name");
}
@Test
void shouldNotTruncateshortRevision() throws Exception {
Material material = new DependencyMaterial(new CaseInsensitiveString("upstream"), new CaseInsensitiveString("first"));
assertThat(material.getShortRevision("pipeline-name/1/stage-name/5")).isEqualTo("pipeline-name/1/stage-name/5");
}
@Test
void shouldUseACombinationOfPipelineAndStageNameAsURI() {
Material material = new DependencyMaterial(new CaseInsensitiveString("pipeline-foo"), new CaseInsensitiveString("stage-bar"));
assertThat(material.getUriForDisplay()).isEqualTo("pipeline-foo / stage-bar");
}
@Test
void shouldDetectDependencyMaterialUsedInFetchArtifact() {
DependencyMaterial material = new DependencyMaterial(new CaseInsensitiveString("pipeline-foo"), new CaseInsensitiveString("stage-bar"));
PipelineConfig pipelineConfig = mock(PipelineConfig.class);
ArrayList<FetchTask> fetchTasks = new ArrayList<>();
fetchTasks.add(new FetchTask(new CaseInsensitiveString("something"), new CaseInsensitiveString("new"), "src", "dest"));
fetchTasks.add(new FetchTask(new CaseInsensitiveString("pipeline-foo"), new CaseInsensitiveString("stage-bar"), new CaseInsensitiveString("job"), "src", "dest"));
when(pipelineConfig.getFetchTasks()).thenReturn(fetchTasks);
assertThat(material.isUsedInFetchArtifact(pipelineConfig)).isTrue();
}
@Test
void shouldDetectDependencyMaterialUsedInFetchArtifactFromAncestor() {
DependencyMaterial material = new DependencyMaterial(new CaseInsensitiveString("parent-pipeline"), new CaseInsensitiveString("stage-bar"));
PipelineConfig pipelineConfig = mock(PipelineConfig.class);
ArrayList<FetchTask> fetchTasks = new ArrayList<>();
fetchTasks.add(new FetchTask(new CaseInsensitiveString("grandparent-pipeline/parent-pipeline"), new CaseInsensitiveString("grandparent-stage"), new CaseInsensitiveString("grandparent-job"), "src", "dest"));
when(pipelineConfig.getFetchTasks()).thenReturn(fetchTasks);
assertThat(material.isUsedInFetchArtifact(pipelineConfig)).isTrue();
}
@Test
void shouldDetectDependencyMaterialNotUsedInFetchArtifact() {
DependencyMaterial material = new DependencyMaterial(new CaseInsensitiveString("pipeline-foo"), new CaseInsensitiveString("stage-bar"));
PipelineConfig pipelineConfig = mock(PipelineConfig.class);
ArrayList<FetchTask> fetchTasks = new ArrayList<>();
fetchTasks.add(new FetchTask(new CaseInsensitiveString("something"), new CaseInsensitiveString("new"), "src", "dest"));
fetchTasks.add(new FetchTask(new CaseInsensitiveString("another"), new CaseInsensitiveString("boo"), new CaseInsensitiveString("foo"), "src", "dest"));
when(pipelineConfig.getFetchTasks()).thenReturn(fetchTasks);
assertThat(material.isUsedInFetchArtifact(pipelineConfig)).isFalse();
}
@Test
void shouldGetAttributesAllFields() {
DependencyMaterial material = new DependencyMaterial(new CaseInsensitiveString("pipeline-name"), new CaseInsensitiveString("stage-name"));
Map<String, Object> attributesWithSecureFields = material.getAttributes(true);
assertAttributes(attributesWithSecureFields);
Map<String, Object> attributesWithoutSecureFields = material.getAttributes(false);
assertAttributes(attributesWithoutSecureFields);
}
@Test
void shouldHandleNullOriginDuringValidationWhenUpstreamPipelineDoesNotExist() {
DependencyMaterialConfig dependencyMaterialConfig = new DependencyMaterialConfig(new CaseInsensitiveString("upstream_stage"), new CaseInsensitiveString("upstream_pipeline"), new CaseInsensitiveString("stage"));
PipelineConfig pipeline = new PipelineConfig(new CaseInsensitiveString("p"), new MaterialConfigs());
pipeline.setOrigin(null);
dependencyMaterialConfig.validateTree(PipelineConfigSaveValidationContext.forChain(true, "group", new BasicCruiseConfig(), pipeline));
assertThat(dependencyMaterialConfig.errors().on(DependencyMaterialConfig.PIPELINE_STAGE_NAME)).isEqualTo("Pipeline with name 'upstream_pipeline' does not exist, it is defined as a dependency for pipeline 'p' (cruise-config.xml)");
}
@Test
void shouldHandleNullOriginDuringValidationWhenUpstreamStageDoesNotExist() {
CruiseConfig cruiseConfig = GoConfigMother.pipelineHavingJob("upstream_pipeline", "upstream_stage", "j1", null, null);
DependencyMaterialConfig dependencyMaterialConfig = new DependencyMaterialConfig(new CaseInsensitiveString("upstream_pipeline"), new CaseInsensitiveString("does_not_exist"));
PipelineConfig pipeline = new PipelineConfig(new CaseInsensitiveString("downstream"), new MaterialConfigs());
pipeline.setOrigin(null);
dependencyMaterialConfig.validateTree(PipelineConfigSaveValidationContext.forChain(true, "group", cruiseConfig, pipeline));
assertThat(dependencyMaterialConfig.errors().on(DependencyMaterialConfig.PIPELINE_STAGE_NAME)).isEqualTo("Stage with name 'does_not_exist' does not exist on pipeline 'upstream_pipeline', it is being referred to from pipeline 'downstream' (cruise-config.xml)");
}
private void assertAttributes(Map<String, Object> attributes) {
assertThat(attributes.get("type")).isEqualTo("pipeline");
Map<String, Object> configuration = (Map<String, Object>) attributes.get("pipeline-configuration");
assertThat(configuration.get("pipeline-name")).isEqualTo("pipeline-name");
assertThat(configuration.get("stage-name")).isEqualTo("stage-name");
}
@Test
void shouldReturnFalseForDependencyMaterial_supportsDestinationFolder() throws Exception {
DependencyMaterial material = new DependencyMaterial();
assertThat(material.supportsDestinationFolder()).isFalse();
}
@Test
void shouldSetLongDescriptionAsCombinationOfPipelineAndStageName() {
DependencyMaterial material = new DependencyMaterial(new CaseInsensitiveString("pipeline-name"), new CaseInsensitiveString("stage-name"));
assertThat(material.getLongDescription()).isEqualTo("pipeline-name [ stage-name ]");
}
}
| |
package com.google.android.accessibility.utils.accessibilitybutton;
import android.accessibilityservice.AccessibilityButtonController;
import android.accessibilityservice.AccessibilityService;
import android.accessibilityservice.AccessibilityServiceInfo;
import android.annotation.TargetApi;
import android.content.Context;
import android.hardware.display.DisplayManager;
import android.os.Build;
import android.os.Message;
import androidx.annotation.NonNull;
import android.view.Display;
import android.view.accessibility.AccessibilityManager;
import com.google.android.accessibility.utils.AccessibilityServiceCompatUtils;
import com.google.android.accessibility.utils.BuildVersionUtils;
import com.google.android.accessibility.utils.FeatureSupport;
import com.google.android.accessibility.utils.WeakReferenceHandler;
import com.google.android.libraries.accessibility.utils.log.LogUtils;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* Monitors whether accessibility button is supported on devices and notifies accessibility button
* click event on the display.
*
* <p>{@link AccessibilityButtonController} provides API to listen to availability of the
* accessibility button. The availability changes during runtime when the device goes into/out of
* fullscreen mode. SelectToSpeak service needs an API to check whether the accessibility button is
* supported on device, regardless of the "fullscreen mode" scenario. This class is a work around
* for the problem, it wraps {@link AccessibilityButtonController.AccessibilityButtonCallback} and
* exposes another callback to notify button click actions and the detect the supportability of a11y
* button.
*
* <p>If the build supports a11y multi-display, {@link AccessibilityButtonController} should handle
* the a11y button callback registration and callback unregistration for multi-display.
*/
public class AccessibilityButtonMonitor {
private static final String TAG = "A11yMenuButtonMonitor";
/** Callbacks for click action and confirmation of supportability for the a11y button. */
public interface AccessibilityButtonMonitorCallback {
/** Called when the a11y button is clicked. */
void onAccessibilityButtonClicked();
/**
* Called when we can confirm the a11y button is supported or not supported on device.
* <strong>Note:</strong> This callback method will only be called once.
*/
void onConfirmSupportability(boolean isSupported);
}
// The state when we cannot confirm whether the button is supported or not.
public static final int PENDING = 0;
// The state when we can confirm that the button is not supported on device.
public static final int NOT_SUPPORTED = 1;
// The state when we can confirm that the button is supported on device.
public static final int SUPPORTED = 2;
/** Defines whether a11y button is supported on the device. */
@Retention(RetentionPolicy.SOURCE)
public @interface ButtonSupportability {}
// Time out to post delayed confirmation of a11y button supportability.
private static final long TIMEOUT = 1000;
private final AccessibilityService mService;
private final AccessibilityButtonCallBackHandler mHandler;
// Callback used to notify AccessibilityService of button availability and click action.
private AccessibilityButtonMonitorCallback mCallback;
// Callback to be registered in AccessibilityButtonController.
private AccessibilityButtonController.AccessibilityButtonCallback accessibilityButtonCallback;
private final DisplayManager displayManager;
// Listener that monitors the display change to support a11y button in multi-display.
// AccessibilityButtonMonitor has to register or unregister the a11y button controller callback
// for each display when the specified display is just added or removed.
private final DisplayManager.DisplayListener displayListener =
new DisplayManager.DisplayListener() {
@Override
public void onDisplayAdded(int displayId) {
if (FeatureSupport.supportAccessibilityMultiDisplay()
&& accessibilityButtonCallback != null) {
mService
.getAccessibilityButtonController(displayId)
.registerAccessibilityButtonCallback(accessibilityButtonCallback);
}
}
@Override
public void onDisplayChanged(int displayId) {}
@Override
public void onDisplayRemoved(int displayId) {
if (FeatureSupport.supportAccessibilityMultiDisplay()
&& accessibilityButtonCallback != null) {
mService
.getAccessibilityButtonController(displayId)
.unregisterAccessibilityButtonCallback(accessibilityButtonCallback);
}
}
};
@ButtonSupportability private int mButtonState = PENDING;
public AccessibilityButtonMonitor(@NonNull AccessibilityService service) {
mHandler = new AccessibilityButtonCallBackHandler(this);
mService = service;
displayManager = (DisplayManager) mService.getSystemService(Context.DISPLAY_SERVICE);
}
@TargetApi(Build.VERSION_CODES.O)
public void initAccessibilityButton(@NonNull AccessibilityButtonMonitorCallback callback) {
mCallback = callback;
if (!FeatureSupport.supportAccessibilityButton()) {
LogUtils.d(TAG, "Accessibility button is not supported for pre-O devices.");
// A11y button is not supported on pre-O devices.
mHandler.confirmAccessibilityButtonSupportability(false);
return;
}
// Ensure the flag is added to AccessibilityServiceInfo.
AccessibilityServiceInfo info = mService.getServiceInfo();
if (info != null) {
info.flags |= AccessibilityServiceInfo.FLAG_REQUEST_ACCESSIBILITY_BUTTON;
mService.setServiceInfo(info);
}
@NonNull
AccessibilityButtonController accessibilityButtonController =
mService.getAccessibilityButtonController();
if (AccessibilityServiceCompatUtils.isAccessibilityButtonAvailableCompat(
accessibilityButtonController)) {
LogUtils.d(TAG, "Accessibility button is available on initialization.");
// If a11y button is available at the very beginning when the monitor is initialized, we can
// confirm that the a11y button is supported on the device.
mHandler.confirmAccessibilityButtonSupportability(true);
} else {
LogUtils.d(TAG, "Accessibility button is not available on initialization.");
// If a11y button is not available when monitor is initialized, there could be two reasons:
// 1. The device has physical nav bar button and the virtual nav bar is not supported on the
// device, which is permanent unavailability.
// 2. Race condition during framework initialization, it returns false when we call
// AccessibilityButtonController.isAccessibilityButtonAvailable(), but soon the
// AccessibilityButtonCallback.onAvailabilityChanged will be called to update availability.
//
// In both cases, it's acceptable to post delay to notify unavailability. If we get notified
// that the availability changes before time out, we can cancel this delayed message and
// update the availability with another message.
mHandler.postDelayedConfirmAccessibilityButtonSupportability(TIMEOUT);
}
accessibilityButtonCallback =
new AccessibilityButtonController.AccessibilityButtonCallback() {
@Override
public void onClicked(AccessibilityButtonController controller) {
LogUtils.d(TAG, "Accessibility button clicked.");
handleControllerCallbackButtonClicked();
}
@Override
public void onAvailabilityChanged(
AccessibilityButtonController controller, boolean available) {
LogUtils.d(TAG, "Accessibility button availability changed. isAvailable=%s", available);
handleControllerCallbackAvailabilityChanged(available);
}
};
// Register callback to AccessibilityButtonController.
if (FeatureSupport.supportAccessibilityMultiDisplay()) {
displayManager.registerDisplayListener(displayListener, null);
for (Display display : displayManager.getDisplays()) {
mService
.getAccessibilityButtonController(display.getDisplayId())
.registerAccessibilityButtonCallback(accessibilityButtonCallback);
}
} else {
accessibilityButtonController.registerAccessibilityButtonCallback(
accessibilityButtonCallback);
}
}
@TargetApi(Build.VERSION_CODES.O)
public void shutdown() {
if (!FeatureSupport.supportAccessibilityButton()) {
return;
}
// Unregister callback from AccessibilityButtonController.
if (FeatureSupport.supportAccessibilityMultiDisplay()) {
displayManager.unregisterDisplayListener(displayListener);
for (Display display : displayManager.getDisplays()) {
mService
.getAccessibilityButtonController(display.getDisplayId())
.unregisterAccessibilityButtonCallback(accessibilityButtonCallback);
}
} else {
mService
.getAccessibilityButtonController()
.unregisterAccessibilityButtonCallback(accessibilityButtonCallback);
}
}
/**
* Returns {@code true} if accessibility button is detected and supported on the device.
* <strong>Note:</strong> When it returns {@code false}, it could either because the device
* doesn't support a11y nav bar button, or the a11y button is supported but not detected yet.
*/
public boolean isAccessibilityButtonSupported() {
return mButtonState == SUPPORTED;
}
/** Handles the callback AccessibilityButtonCallback.onClicked() */
private void handleControllerCallbackButtonClicked() {
// Override button state, and notify callback if necessary.
if (mButtonState == PENDING) {
mHandler.confirmAccessibilityButtonSupportability(true);
} else if (mButtonState == NOT_SUPPORTED) {
// If the previous state detection is a false negative, override the state without notifying
// availability change.
LogUtils.w(
TAG,
"A11y button is clicked after it's reported as NOT_SUPPORTED. "
+ "Update state from NOT_SUPPORTED to SUPPORTED.");
mButtonState = SUPPORTED;
}
mHandler.notifyButtonClicked();
}
/** Handles the callback AccessibilityButtonCallback.onAvailabilityChanged(). */
private void handleControllerCallbackAvailabilityChanged(boolean available) {
switch (mButtonState) {
case NOT_SUPPORTED:
if (available) {
// The previous detection indicates that the a11y button is not supported on device, but
// the callback shows that the button is actually supported. we should update the state
// quietly without duplicate notifying the confirmation of button availability.
LogUtils.w(
TAG,
"A11y button availability is changed after it's reported as NOT_SUPPORTED. "
+ "Update state from NOT_SUPPORTED to SUPPORTED.");
mButtonState = SUPPORTED;
}
break;
case PENDING:
if (available) {
// Available is a strong signal, we can confirm the availability immediately.
mHandler.confirmAccessibilityButtonSupportability(true);
} else {
// Unavailable is a weak signal, we should post delay to confirm the unavailability in
// case that something will be changed during the delay timeout.
mHandler.postDelayedConfirmAccessibilityButtonSupportability(TIMEOUT);
}
break;
case SUPPORTED:
default:
// Do nothing.
break;
}
}
/**
* A {@link WeakReferenceHandler} to handle the callback for button click actions and button
* support confirmation.
*/
private static final class AccessibilityButtonCallBackHandler
extends WeakReferenceHandler<AccessibilityButtonMonitor> {
private static final int MSG_BUTTON_CLICKED = 0;
private static final int MSG_CONFIRM_BUTTON_NOT_SUPPORTED = 1;
private static final int MSG_CONFIRM_BUTTON_SUPPORTED = 2;
private static final int MSG_CONFIRM_BUTTON_SUPPORTABILITY_DELAYED = 3;
// Whether we have already notified the confirmation of button support.
private boolean mHasNotifiedSupportability = false;
public AccessibilityButtonCallBackHandler(AccessibilityButtonMonitor parent) {
super(parent);
}
@Override
protected void handleMessage(Message msg, AccessibilityButtonMonitor parent) {
if (parent == null) {
return;
}
switch (msg.what) {
case MSG_BUTTON_CLICKED:
parent.mCallback.onAccessibilityButtonClicked();
break;
case MSG_CONFIRM_BUTTON_NOT_SUPPORTED:
parent.mButtonState = NOT_SUPPORTED;
// Make sure that we only notify once.
if (!mHasNotifiedSupportability) {
LogUtils.d(TAG, "Notify that a11y button is not supported.");
mHasNotifiedSupportability = true;
parent.mCallback.onConfirmSupportability(false);
}
break;
case MSG_CONFIRM_BUTTON_SUPPORTED:
parent.mButtonState = SUPPORTED;
// Make sure that we only notify once.
if (!mHasNotifiedSupportability) {
LogUtils.d(TAG, "Notify that a11y button is supported.");
parent.mCallback.onConfirmSupportability(true);
mHasNotifiedSupportability = true;
}
break;
case MSG_CONFIRM_BUTTON_SUPPORTABILITY_DELAYED:
boolean isAvailable;
if (BuildVersionUtils.isAtLeastOMR1()) {
isAvailable = AccessibilityManager.isAccessibilityButtonSupported();
} else {
isAvailable =
AccessibilityServiceCompatUtils.isAccessibilityButtonAvailableCompat(
parent.mService.getAccessibilityButtonController());
}
parent.mButtonState = isAvailable ? SUPPORTED : NOT_SUPPORTED;
if (!mHasNotifiedSupportability) {
LogUtils.d(
TAG,
"Delayed. Notify that a11y button is %s.",
(isAvailable ? "supported" : "not supported"));
parent.mCallback.onConfirmSupportability(isAvailable);
mHasNotifiedSupportability = true;
}
break;
default:
break;
}
}
private void postDelayedConfirmAccessibilityButtonSupportability(long delay) {
LogUtils.d(TAG, "Post delay to confirm supportability.");
removeMessages(MSG_CONFIRM_BUTTON_SUPPORTED);
removeMessages(MSG_CONFIRM_BUTTON_NOT_SUPPORTED);
removeMessages(MSG_CONFIRM_BUTTON_SUPPORTABILITY_DELAYED);
sendEmptyMessageDelayed(MSG_CONFIRM_BUTTON_SUPPORTABILITY_DELAYED, delay);
}
private void confirmAccessibilityButtonSupportability(boolean isSupported) {
removeMessages(MSG_CONFIRM_BUTTON_SUPPORTED);
removeMessages(MSG_CONFIRM_BUTTON_NOT_SUPPORTED);
removeMessages(MSG_CONFIRM_BUTTON_SUPPORTABILITY_DELAYED);
obtainMessage(isSupported ? MSG_CONFIRM_BUTTON_SUPPORTED : MSG_CONFIRM_BUTTON_NOT_SUPPORTED)
.sendToTarget();
}
private void notifyButtonClicked() {
obtainMessage(MSG_BUTTON_CLICKED).sendToTarget();
}
}
}
| |
/*
* Copyright 2019 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.gestalt.assets;
import android.support.annotation.Nullable;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Collections2;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.MapMaker;
import com.google.common.collect.Multimaps;
import com.google.common.collect.Sets;
import net.jcip.annotations.ThreadSafe;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.terasology.gestalt.module.sandbox.API;
import org.terasology.gestalt.naming.Name;
import org.terasology.gestalt.util.reflection.GenericsUtil;
import java.io.Closeable;
import java.io.IOException;
import java.lang.ref.PhantomReference;
import java.lang.ref.Reference;
import java.lang.ref.ReferenceQueue;
import java.lang.ref.WeakReference;
import java.lang.reflect.Type;
import java.security.AccessController;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.Semaphore;
/**
* AssetType manages all assets of a particular type/class. It provides the ability to resolve and load assets by Urn, and caches assets so that there is only
* a single instance of a given asset shared by all users.
* <p>
* AssetType is thread safe.
* </p>
*
* @param <T> The type of asset this AssetType manages
* @param <U> The type of asset data required by the assets this AssetType manages
*/
@API
@ThreadSafe
public final class AssetType<T extends Asset<U>, U extends AssetData> implements Closeable {
private static final Logger logger = LoggerFactory.getLogger(AssetType.class);
private final Class<T> assetClass;
private final Class<U> assetDataClass;
private final AssetFactory<T, U> factory;
private final List<AssetDataProducer<U>> producers = Lists.newCopyOnWriteArrayList();
private final Map<ResourceUrn, T> loadedAssets = new MapMaker().concurrencyLevel(4).makeMap();
private final ListMultimap<ResourceUrn, WeakReference<T>> instanceAssets = Multimaps.synchronizedListMultimap(ArrayListMultimap.<ResourceUrn, WeakReference<T>>create());
// Per-asset locks to deal with situations where multiple threads attempt to obtain or create the same unloaded asset concurrently
private final Map<ResourceUrn, ResourceLock> locks = new MapMaker().concurrencyLevel(1).makeMap();
private final Set<AssetReference<? extends Asset<U>>> references = Sets.newConcurrentHashSet();
private final ReferenceQueue<Asset<U>> disposalQueue = new ReferenceQueue<>();
private volatile boolean closed;
private volatile ResolutionStrategy resolutionStrategy = (modules, context) -> {
if (modules.contains(context)) {
return ImmutableSet.of(context);
} else {
return modules;
}
};
/**
* Constructs an AssetType for managing assets of the provided Asset class. The Asset class must have its AssetData generic parameter bound via inheritance
* (e.g. MyType extends Asset<MyDataType>)
*
* @param assetClass The class of asset this AssetType will manage.
* @param factory The factory used to convert AssetData to Assets for this type
*/
@SuppressWarnings("unchecked")
public AssetType(Class<T> assetClass, AssetFactory<T, U> factory) {
Preconditions.checkNotNull(assetClass);
Preconditions.checkNotNull(factory);
this.factory = factory;
this.assetClass = assetClass;
Optional<Type> assetDataType = GenericsUtil.getTypeParameterBindingForInheritedClass(assetClass, Asset.class, 0);
if (assetDataType.isPresent()) {
assetDataClass = (Class<U>) GenericsUtil.getClassOfType(assetDataType.get());
} else {
throw new IllegalArgumentException("Asset class must have a bound AssetData parameter - " + assetClass);
}
}
/**
* Closes the AssetType, disposing all assets, closing the producers and preventing further asset creation.
*/
@Override
public synchronized void close() {
if (!closed) {
closed = true;
disposeAll();
clearProducers();
}
}
/**
* Disposes any assets queued for disposal. This occurs if an asset is no longer referenced by anything.
*/
@SuppressWarnings("unchecked")
public void processDisposal() {
Reference<? extends Asset<U>> ref = disposalQueue.poll();
while (ref != null) {
AssetReference<? extends Asset<U>> assetRef = (AssetReference<? extends Asset<U>>) ref;
assetRef.dispose();
references.remove(assetRef);
ref = disposalQueue.poll();
}
}
/**
* @return Whether the AssetType is closed.
*/
public synchronized boolean isClosed() {
return closed;
}
/**
* Disposes all assets of this type.
*/
public synchronized void disposeAll() {
loadedAssets.values().forEach(T::dispose);
for (WeakReference<T> assetRef : ImmutableList.copyOf(instanceAssets.values())) {
T asset = assetRef.get();
if (asset != null) {
asset.dispose();
}
}
processDisposal();
if (!loadedAssets.isEmpty()) {
logger.error("Assets remained loaded after disposal - {}", loadedAssets.keySet());
loadedAssets.clear();
}
if (!instanceAssets.isEmpty()) {
logger.error("Asset instances remained loaded after disposal - {}", instanceAssets.keySet());
instanceAssets.clear();
}
}
/**
* Refreshes the AssetType. All loaded assets that are provided by the producers are reloaded, all other assets are disposed. Asset instances are reloaded with
* the data of their parents or disposed along with them.
* <p>
* This method is useful when switching contexts (such as changing module environment)
* </p>
*/
public void refresh() {
if (!closed) {
for (T asset : loadedAssets.values()) {
if (!followRedirects(asset.getUrn()).equals(asset.getUrn()) || !reloadFromProducers(asset)) {
asset.dispose();
for (WeakReference<T> instanceRef : ImmutableList.copyOf(instanceAssets.get(asset.getUrn().getInstanceUrn()))) {
T instance = instanceRef.get();
if (instance != null) {
instance.dispose();
}
}
}
}
}
}
/**
* @return The class of Asset managed by this AssetType.
*/
public Class<T> getAssetClass() {
return assetClass;
}
/**
* @return The class of AssetData used to generate the Assets managed by this AssetType.
*/
public Class<U> getAssetDataClass() {
return assetDataClass;
}
/**
* By default a simple strategy is used returns the context module if it is one of the options, and all the options otherwise.
*
* @param strategy The strategy used to filter modules during partial urn resolution.
*/
public void setResolutionStrategy(ResolutionStrategy strategy) {
this.resolutionStrategy = strategy;
}
/**
* Adds an AssetDataProducer for generating assets of for this AssetType
*
* @param producer The producer to add
*/
public synchronized void addProducer(AssetDataProducer<U> producer) {
if (!closed) {
producers.add(producer);
}
}
/**
* @return An unmodifiable list of all the AssetDataProducers
*/
public List<AssetDataProducer<U>> getProducers() {
return Collections.unmodifiableList(producers);
}
/**
* @param producer The producer to remove;
* @return Whether the producer was removed
*/
public synchronized boolean removeProducer(AssetDataProducer<U> producer) {
return producers.remove(producer);
}
/**
* Removes all the AssetDataProducers
*/
public synchronized void clearProducers() {
producers.clear();
}
/**
* Obtains an asset by urn, loading it if necessary. If the urn is a instance urn, then a new asset will be created from the parent asset.
*
* @param urn The urn of the resource to get
* @return The asset if available
*/
public Optional<T> getAsset(ResourceUrn urn) {
Preconditions.checkNotNull(urn);
if (urn.isInstance()) {
return getInstanceAsset(urn);
} else {
return getNormalAsset(urn);
}
}
/**
* Notifies the asset type when an asset is disposed
*
* @param asset The asset that was disposed.
*/
void onAssetDisposed(Asset<U> asset) {
if (asset.getUrn().isInstance()) {
instanceAssets.get(asset.getUrn()).remove(new WeakReference<>(assetClass.cast(asset)));
} else {
loadedAssets.remove(asset.getUrn());
}
}
/**
* Notifies the asset type when an asset is created
*
* @param asset The asset that was created
*/
synchronized void registerAsset(Asset<U> asset, DisposalHook disposer) {
if (closed) {
throw new IllegalStateException("Cannot create asset for disposed asset type: " + assetClass);
} else {
if (asset.getUrn().isInstance()) {
instanceAssets.put(asset.getUrn(), new WeakReference<>(assetClass.cast(asset)));
} else {
loadedAssets.put(asset.getUrn(), assetClass.cast(asset));
}
references.add(new AssetReference<>(asset, disposalQueue, disposer));
}
}
/**
* Creates and returns an instance of an asset, if possible. The following methods are used to create the copy, in order, with the first technique to succeeed used:
* <ol>
* <li>Delegate to the parent asset to create the copy</li>
* <li>Loads the AssetData of the parent asset and create a new instance from that</li>
* </ol>
*
* @param urn The urn of the asset to create an instance of
* @return An instance of the desired asset
*/
@SuppressWarnings("unchecked")
public Optional<T> getInstanceAsset(ResourceUrn urn) {
Optional<? extends T> parentAsset = getAsset(urn.getParentUrn());
if (parentAsset.isPresent()) {
return createInstance(parentAsset.get());
} else {
return Optional.empty();
}
}
/**
* Creates an instance of the given asset
*
* @param asset The asset to create an instance of
* @return The new instance, or {@link Optional#empty} if it could not be created
*/
Optional<T> createInstance(Asset<U> asset) {
Preconditions.checkArgument(assetClass.isAssignableFrom(asset.getClass()));
Optional<? extends Asset<U>> result = asset.createCopy(asset.getUrn().getInstanceUrn());
if (!result.isPresent()) {
try {
return AccessController.doPrivileged((PrivilegedExceptionAction<Optional<T>>) () -> {
for (AssetDataProducer<U> producer : producers) {
Optional<U> data = producer.getAssetData(asset.getUrn());
if (data.isPresent()) {
return Optional.of(loadAsset(asset.getUrn().getInstanceUrn(), data.get()));
}
}
return Optional.ofNullable(assetClass.cast(result.get()));
});
} catch (PrivilegedActionException e) {
logger.error("Failed to load asset '" + asset.getUrn().getInstanceUrn() + "'", e.getCause());
}
}
return Optional.ofNullable(assetClass.cast(result.get()));
}
/**
* Forces a reload of an asset from a data producer, if possible. The resource urn must not be an instance urn (it doesn't make sense to reload an instance by urn).
* If there is no available source for the asset (it has no producer) then it will not be reloaded.
*
* @param urn The urn of the resource to reload.
* @return The asset if it exists (regardless of whether it was reloaded or not)
*/
public Optional<T> reload(ResourceUrn urn) {
Preconditions.checkArgument(!urn.isInstance(), "Cannot reload an asset instance urn");
ResourceUrn redirectUrn = followRedirects(urn);
try {
return AccessController.doPrivileged((PrivilegedExceptionAction<Optional<T>>) () -> {
for (AssetDataProducer<U> producer : producers) {
Optional<U> data = producer.getAssetData(redirectUrn);
if (data.isPresent()) {
return Optional.of(loadAsset(redirectUrn, data.get()));
}
}
return Optional.ofNullable(loadedAssets.get(redirectUrn));
});
} catch (PrivilegedActionException e) {
if (redirectUrn.equals(urn)) {
logger.error("Failed to load asset '{}'", redirectUrn, e.getCause());
} else {
logger.error("Failed to load asset '{}' redirected from '{}'", redirectUrn, urn, e.getCause());
}
}
return Optional.empty();
}
/**
* Obtains a non-instance asset
*
* @param urn The urn of the asset
* @return The asset if available
*/
private Optional<T> getNormalAsset(ResourceUrn urn) {
ResourceUrn redirectUrn = followRedirects(urn);
T asset = loadedAssets.get(redirectUrn);
if (asset == null) {
return reload(redirectUrn);
}
return Optional.ofNullable(asset);
}
/**
* Follows any redirects to determine the actual resource urn to use for a given urn
*
* @param urn The urn to resolve redirects for
* @return The final urn to use
*/
private ResourceUrn followRedirects(ResourceUrn urn) {
ResourceUrn lastUrn;
ResourceUrn finalUrn = urn;
do {
lastUrn = finalUrn;
for (AssetDataProducer<U> producer : producers) {
finalUrn = producer.redirect(finalUrn);
}
} while (!lastUrn.equals(finalUrn));
return finalUrn;
}
/**
* Obtains an asset from a string that may be a full or partial urn
*
* @param urn The full or partial urn of the asset
* @return The requested asset if the urn was successfully resolved
*/
public Optional<T> getAsset(String urn) {
return getAsset(urn, Name.EMPTY);
}
/**
* Obtains an asset from a string that may be a full or partial urn
*
* @param urn The full or partial urn of the asset
* @param moduleContext The context to resolve the urn in
* @return The requested asset if the urn was successfully resolved
*/
public Optional<T> getAsset(String urn, Name moduleContext) {
Set<ResourceUrn> resolvedUrns = resolve(urn, moduleContext);
if (resolvedUrns.size() == 1) {
return getAsset(resolvedUrns.iterator().next());
} else if (resolvedUrns.size() > 1) {
logger.warn("Failed to resolve asset '{}' - multiple possibilities discovered", urn);
} else {
logger.warn("Failed to resolve asset '{}' - no matches found", urn);
}
return Optional.empty();
}
/**
* Resolves a string urn that may be a full or partial urn, providing the available urns that match
*
* @param urn The string to resolve
* @return A set of possible matching urns
*/
public Set<ResourceUrn> resolve(String urn) {
return resolve(urn, Name.EMPTY);
}
/**
* Resolves a string urn that may be a full or partial urn, providing the available urns that match
*
* @param urn The string to resolve
* @param moduleContext The context to resolve within
* @return A set of possible matching urns
*/
public Set<ResourceUrn> resolve(String urn, Name moduleContext) {
if (ResourceUrn.isValid(urn)) {
return ImmutableSet.of(new ResourceUrn(urn));
}
String urnToResolve = urn;
final boolean instance = urn.endsWith(ResourceUrn.INSTANCE_INDICATOR);
if (instance) {
urnToResolve = urn.substring(0, urn.length() - ResourceUrn.INSTANCE_INDICATOR.length());
}
int fragmentSeparatorIndex = urnToResolve.indexOf('#');
final Name fragmentName;
final Name resourceName;
if (fragmentSeparatorIndex != -1) {
resourceName = new Name(urnToResolve.substring(0, fragmentSeparatorIndex));
fragmentName = new Name(urnToResolve.substring(fragmentSeparatorIndex + 1));
} else {
resourceName = new Name(urnToResolve);
fragmentName = Name.EMPTY;
}
Set<Name> possibleModules = Sets.newLinkedHashSet();
for (AssetDataProducer<U> producer : producers) {
possibleModules.addAll(producer.getModulesProviding(resourceName));
}
if (!moduleContext.isEmpty()) {
possibleModules = resolutionStrategy.resolve(possibleModules, moduleContext);
}
return Sets.newLinkedHashSet(Collections2.transform(possibleModules, new Function<Name, ResourceUrn>() {
@Nullable
@Override
public ResourceUrn apply(Name input) {
return new ResourceUrn(input, resourceName, fragmentName, instance);
}
}));
}
/**
* Reloads an asset from the current producers, if one of them can produce it
*
* @param asset The asset to reload
* @return Whether the asset was reloaded
*/
private boolean reloadFromProducers(Asset<U> asset) {
try {
for (AssetDataProducer<U> producer : producers) {
Optional<U> data = producer.getAssetData(asset.getUrn());
if (data.isPresent()) {
asset.reload(data.get());
for (WeakReference<T> assetInstanceRef : instanceAssets.get(asset.getUrn().getInstanceUrn())) {
T assetInstance = assetInstanceRef.get();
if (assetInstance != null) {
assetInstance.reload(data.get());
}
}
return true;
}
}
} catch (IOException e) {
logger.error("Failed to reload asset '{}', disposing", asset.getUrn());
}
return false;
}
/**
* Loads an asset with the given urn and data. If the asset already exists, it is reloaded with the data instead
*
* @param urn The urn of the asset
* @param data The data to load the asset with
* @return The loaded (or reloaded) asset
*/
public T loadAsset(ResourceUrn urn, U data) {
if (urn.isInstance()) {
return factory.build(urn, this, data);
} else {
T asset = loadedAssets.get(urn);
if (asset != null) {
asset.reload(data);
} else {
ResourceLock lock;
synchronized (locks) {
lock = locks.computeIfAbsent(urn, k -> new ResourceLock(urn));
}
try {
lock.lock();
if (!closed) {
asset = loadedAssets.get(urn);
if (asset == null) {
asset = factory.build(urn, this, data);
} else {
asset.reload(data);
}
}
synchronized (locks) {
if (lock.unlock()) {
locks.remove(urn);
}
}
} catch (InterruptedException e) {
logger.error("Failed to load asset - interrupted awaiting lock on resource {}", urn);
}
}
return asset;
}
}
/**
* @param urn The urn of the asset to check. Must not be an instance urn
* @return Whether an asset is loaded with the given urn
*/
public boolean isLoaded(ResourceUrn urn) {
Preconditions.checkArgument(!urn.isInstance(), "Urn must not be an instance urn");
return loadedAssets.containsKey(urn);
}
/**
* @return A set of the urns of all the loaded assets.
*/
public Set<ResourceUrn> getLoadedAssetUrns() {
return ImmutableSet.copyOf(loadedAssets.keySet());
}
/**
* @return A list of all the loaded assets.
*/
public Set<T> getLoadedAssets() {
return ImmutableSet.copyOf(loadedAssets.values());
}
/**
* @return A set of the urns of all the loaded assets and all the assets available from producers
*/
public Set<ResourceUrn> getAvailableAssetUrns() {
Set<ResourceUrn> availableAssets = Sets.newLinkedHashSet(getLoadedAssetUrns());
for (AssetDataProducer<U> producer : producers) {
availableAssets.addAll(producer.getAvailableAssetUrns());
}
return availableAssets;
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj instanceof AssetType) {
AssetType other = (AssetType) obj;
return assetClass.equals(other.assetClass);
}
return false;
}
@Override
public int hashCode() {
return assetClass.hashCode();
}
@Override
public String toString() {
return assetClass.getSimpleName();
}
private static final class ResourceLock {
private final ResourceUrn urn;
private final Semaphore semaphore = new Semaphore(1);
public ResourceLock(ResourceUrn urn) {
this.urn = urn;
}
public void lock() throws InterruptedException {
semaphore.acquire();
}
public boolean unlock() {
boolean lockFinished = !semaphore.hasQueuedThreads();
semaphore.release();
return lockFinished;
}
@Override
public String toString() {
return "lock(" + urn + ")";
}
}
private static final class AssetReference<T> extends PhantomReference<T> {
private final DisposalHook disposalHook;
public AssetReference(T asset, ReferenceQueue<T> queue, DisposalHook hook) {
super(asset, queue);
this.disposalHook = hook;
}
public void dispose() {
disposalHook.dispose();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.clients.producer.internals;
import org.apache.kafka.clients.ApiVersions;
import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.common.Cluster;
import org.apache.kafka.common.MetricName;
import org.apache.kafka.common.Node;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.errors.UnsupportedVersionException;
import org.apache.kafka.common.header.Header;
import org.apache.kafka.common.metrics.Measurable;
import org.apache.kafka.common.metrics.MetricConfig;
import org.apache.kafka.common.metrics.Metrics;
import org.apache.kafka.common.metrics.Sensor;
import org.apache.kafka.common.metrics.stats.Meter;
import org.apache.kafka.common.record.AbstractRecords;
import org.apache.kafka.common.record.CompressionRatioEstimator;
import org.apache.kafka.common.record.CompressionType;
import org.apache.kafka.common.record.Record;
import org.apache.kafka.common.record.RecordBatch;
import org.apache.kafka.common.record.MemoryRecords;
import org.apache.kafka.common.record.MemoryRecordsBuilder;
import org.apache.kafka.common.record.TimestampType;
import org.apache.kafka.common.utils.CopyOnWriteMap;
import org.apache.kafka.common.utils.LogContext;
import org.apache.kafka.common.utils.Time;
import org.apache.kafka.common.utils.Utils;
import org.slf4j.Logger;
import java.nio.ByteBuffer;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Deque;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicInteger;
/**
* This class acts as a queue that accumulates records into {@link MemoryRecords}
* instances to be sent to the server.
* <p>
* The accumulator uses a bounded amount of memory and append calls will block when that memory is exhausted, unless
* this behavior is explicitly disabled.
*/
public final class RecordAccumulator {
private final Logger log;
private volatile boolean closed;
private final AtomicInteger flushesInProgress;
private final AtomicInteger appendsInProgress;
private final int batchSize;
private final CompressionType compression;
private final long lingerMs;
private final long retryBackoffMs;
private final BufferPool free;
private final Time time;
private final ApiVersions apiVersions;
private final ConcurrentMap<TopicPartition, Deque<ProducerBatch>> batches;
private final IncompleteBatches incomplete;
// The following variables are only accessed by the sender thread, so we don't need to protect them.
private final Set<TopicPartition> muted;
private int drainIndex;
private final TransactionManager transactionManager;
/**
* Create a new record accumulator
*
* @param logContext The log context used for logging
* @param batchSize The size to use when allocating {@link MemoryRecords} instances
* @param totalSize The maximum memory the record accumulator can use.
* @param compression The compression codec for the records
* @param lingerMs An artificial delay time to add before declaring a records instance that isn't full ready for
* sending. This allows time for more records to arrive. Setting a non-zero lingerMs will trade off some
* latency for potentially better throughput due to more batching (and hence fewer, larger requests).
* @param retryBackoffMs An artificial delay time to retry the produce request upon receiving an error. This avoids
* exhausting all retries in a short period of time.
* @param metrics The metrics
* @param time The time instance to use
* @param apiVersions Request API versions for current connected brokers
* @param transactionManager The shared transaction state object which tracks producer IDs, epochs, and sequence
* numbers per partition.
*/
public RecordAccumulator(LogContext logContext,
int batchSize,
long totalSize,
CompressionType compression,
long lingerMs,
long retryBackoffMs,
Metrics metrics,
Time time,
ApiVersions apiVersions,
TransactionManager transactionManager) {
this.log = logContext.logger(RecordAccumulator.class);
this.drainIndex = 0;
this.closed = false;
this.flushesInProgress = new AtomicInteger(0);
this.appendsInProgress = new AtomicInteger(0);
this.batchSize = batchSize;
this.compression = compression;
this.lingerMs = lingerMs;
this.retryBackoffMs = retryBackoffMs;
this.batches = new CopyOnWriteMap<>();
String metricGrpName = "producer-metrics";
this.free = new BufferPool(totalSize, batchSize, metrics, time, metricGrpName);
this.incomplete = new IncompleteBatches();
this.muted = new HashSet<>();
this.time = time;
this.apiVersions = apiVersions;
this.transactionManager = transactionManager;
registerMetrics(metrics, metricGrpName);
}
private void registerMetrics(Metrics metrics, String metricGrpName) {
MetricName metricName = metrics.metricName("waiting-threads", metricGrpName, "The number of user threads blocked waiting for buffer memory to enqueue their records");
Measurable waitingThreads = new Measurable() {
public double measure(MetricConfig config, long now) {
return free.queued();
}
};
metrics.addMetric(metricName, waitingThreads);
metricName = metrics.metricName("buffer-total-bytes", metricGrpName, "The maximum amount of buffer memory the client can use (whether or not it is currently used).");
Measurable totalBytes = new Measurable() {
public double measure(MetricConfig config, long now) {
return free.totalMemory();
}
};
metrics.addMetric(metricName, totalBytes);
metricName = metrics.metricName("buffer-available-bytes", metricGrpName, "The total amount of buffer memory that is not being used (either unallocated or in the free list).");
Measurable availableBytes = new Measurable() {
public double measure(MetricConfig config, long now) {
return free.availableMemory();
}
};
metrics.addMetric(metricName, availableBytes);
Sensor bufferExhaustedRecordSensor = metrics.sensor("buffer-exhausted-records");
MetricName rateMetricName = metrics.metricName("buffer-exhausted-rate", metricGrpName, "The average per-second number of record sends that are dropped due to buffer exhaustion");
MetricName totalMetricName = metrics.metricName("buffer-exhausted-total", metricGrpName, "The total number of record sends that are dropped due to buffer exhaustion");
bufferExhaustedRecordSensor.add(new Meter(rateMetricName, totalMetricName));
}
/**
* Add a record to the accumulator, return the append result
* <p>
* The append result will contain the future metadata, and flag for whether the appended batch is full or a new batch is created
* <p>
*
* @param tp The topic/partition to which this record is being sent
* @param timestamp The timestamp of the record
* @param key The key for the record
* @param value The value for the record
* @param headers the Headers for the record
* @param callback The user-supplied callback to execute when the request is complete
* @param maxTimeToBlock The maximum time in milliseconds to block for buffer memory to be available
*/
public RecordAppendResult append(TopicPartition tp,
long timestamp,
byte[] key,
byte[] value,
Header[] headers,
Callback callback,
long maxTimeToBlock) throws InterruptedException {
// We keep track of the number of appending thread to make sure we do not miss batches in
// abortIncompleteBatches().
appendsInProgress.incrementAndGet();
ByteBuffer buffer = null;
if (headers == null) headers = Record.EMPTY_HEADERS;
try {
// check if we have an in-progress batch
Deque<ProducerBatch> dq = getOrCreateDeque(tp);
synchronized (dq) {
if (closed)
throw new IllegalStateException("Cannot send after the producer is closed.");
RecordAppendResult appendResult = tryAppend(timestamp, key, value, headers, callback, dq);
if (appendResult != null)
return appendResult;
}
// we don't have an in-progress record batch try to allocate a new batch
byte maxUsableMagic = apiVersions.maxUsableProduceMagic();
int size = Math.max(this.batchSize, AbstractRecords.estimateSizeInBytesUpperBound(maxUsableMagic, compression, key, value, headers));
log.trace("Allocating a new {} byte message buffer for topic {} partition {}", size, tp.topic(), tp.partition());
buffer = free.allocate(size, maxTimeToBlock);
synchronized (dq) {
// Need to check if producer is closed again after grabbing the dequeue lock.
if (closed)
throw new IllegalStateException("Cannot send after the producer is closed.");
RecordAppendResult appendResult = tryAppend(timestamp, key, value, headers, callback, dq);
if (appendResult != null) {
// Somebody else found us a batch, return the one we waited for! Hopefully this doesn't happen often...
return appendResult;
}
MemoryRecordsBuilder recordsBuilder = recordsBuilder(buffer, maxUsableMagic);
ProducerBatch batch = new ProducerBatch(tp, recordsBuilder, time.milliseconds());
FutureRecordMetadata future = Utils.notNull(batch.tryAppend(timestamp, key, value, headers, callback, time.milliseconds()));
dq.addLast(batch);
incomplete.add(batch);
// Don't deallocate this buffer in the finally block as it's being used in the record batch
buffer = null;
return new RecordAppendResult(future, dq.size() > 1 || batch.isFull(), true);
}
} finally {
if (buffer != null)
free.deallocate(buffer);
appendsInProgress.decrementAndGet();
}
}
private MemoryRecordsBuilder recordsBuilder(ByteBuffer buffer, byte maxUsableMagic) {
if (transactionManager != null && maxUsableMagic < RecordBatch.MAGIC_VALUE_V2) {
throw new UnsupportedVersionException("Attempting to use idempotence with a broker which does not " +
"support the required message format (v2). The broker must be version 0.11 or later.");
}
return MemoryRecords.builder(buffer, maxUsableMagic, compression, TimestampType.CREATE_TIME, 0L);
}
/**
* Try to append to a ProducerBatch.
*
* If it is full, we return null and a new batch is created. We also close the batch for record appends to free up
* resources like compression buffers. The batch will be fully closed (ie. the record batch headers will be written
* and memory records built) in one of the following cases (whichever comes first): right before send,
* if it is expired, or when the producer is closed.
*/
private RecordAppendResult tryAppend(long timestamp, byte[] key, byte[] value, Header[] headers,
Callback callback, Deque<ProducerBatch> deque) {
ProducerBatch last = deque.peekLast();
if (last != null) {
FutureRecordMetadata future = last.tryAppend(timestamp, key, value, headers, callback, time.milliseconds());
if (future == null)
last.closeForRecordAppends();
else
return new RecordAppendResult(future, deque.size() > 1 || last.isFull(), false);
}
return null;
}
/**
* Get a list of batches which have been sitting in the accumulator too long and need to be expired.
*/
public List<ProducerBatch> expiredBatches(int requestTimeout, long now) {
List<ProducerBatch> expiredBatches = new ArrayList<>();
for (Map.Entry<TopicPartition, Deque<ProducerBatch>> entry : this.batches.entrySet()) {
Deque<ProducerBatch> dq = entry.getValue();
TopicPartition tp = entry.getKey();
// We only check if the batch should be expired if the partition does not have a batch in flight.
// This is to prevent later batches from being expired while an earlier batch is still in progress.
// Note that `muted` is only ever populated if `max.in.flight.request.per.connection=1` so this protection
// is only active in this case. Otherwise the expiration order is not guaranteed.
if (!muted.contains(tp)) {
synchronized (dq) {
// iterate over the batches and expire them if they have been in the accumulator for more than requestTimeOut
ProducerBatch lastBatch = dq.peekLast();
Iterator<ProducerBatch> batchIterator = dq.iterator();
while (batchIterator.hasNext()) {
ProducerBatch batch = batchIterator.next();
boolean isFull = batch != lastBatch || batch.isFull();
// Check if the batch has expired. Expired batches are closed by maybeExpire, but callbacks
// are invoked after completing the iterations, since sends invoked from callbacks
// may append more batches to the deque being iterated. The batch is deallocated after
// callbacks are invoked.
if (batch.maybeExpire(requestTimeout, retryBackoffMs, now, this.lingerMs, isFull)) {
expiredBatches.add(batch);
batchIterator.remove();
} else {
// Stop at the first batch that has not expired.
break;
}
}
}
}
}
return expiredBatches;
}
/**
* Re-enqueue the given record batch in the accumulator to retry
*/
public void reenqueue(ProducerBatch batch, long now) {
batch.reenqueued(now);
Deque<ProducerBatch> deque = getOrCreateDeque(batch.topicPartition);
synchronized (deque) {
if (transactionManager != null)
insertInSequenceOrder(deque, batch);
else
deque.addFirst(batch);
}
}
/**
* Split the big batch that has been rejected and reenqueue the split batches in to the accumulator.
* @return the number of split batches.
*/
public int splitAndReenqueue(ProducerBatch bigBatch) {
// Reset the estimated compression ratio to the initial value or the big batch compression ratio, whichever
// is bigger. There are several different ways to do the reset. We chose the most conservative one to ensure
// the split doesn't happen too often.
CompressionRatioEstimator.setEstimation(bigBatch.topicPartition.topic(), compression,
Math.max(1.0f, (float) bigBatch.compressionRatio()));
Deque<ProducerBatch> dq = bigBatch.split(this.batchSize);
int numSplitBatches = dq.size();
Deque<ProducerBatch> partitionDequeue = getOrCreateDeque(bigBatch.topicPartition);
while (!dq.isEmpty()) {
ProducerBatch batch = dq.pollLast();
incomplete.add(batch);
// We treat the newly split batches as if they are not even tried.
synchronized (partitionDequeue) {
if (transactionManager != null) {
// We should track the newly created batches since they already have assigned sequences.
transactionManager.addInFlightBatch(batch);
insertInSequenceOrder(partitionDequeue, batch);
} else {
partitionDequeue.addFirst(batch);
}
}
}
return numSplitBatches;
}
// We will have to do extra work to ensure the queue is in order when requests are being retried and there are
// multiple requests in flight to that partition. If the first inflight request fails to append, then all the subsequent
// in flight requests will also fail because the sequence numbers will not be accepted.
//
// Further, once batches are being retried, we are reduced to a single in flight request for that partition. So when
// the subsequent batches come back in sequence order, they will have to be placed further back in the queue.
//
// Note that this assumes that all the batches in the queue which have an assigned sequence also have the current
// producer id. We will not attempt to reorder messages if the producer id has changed, we will throw an
// IllegalStateException instead.
private void insertInSequenceOrder(Deque<ProducerBatch> deque, ProducerBatch batch) {
// When we are requeing and have enabled idempotence, the reenqueued batch must always have a sequence.
if (batch.baseSequence() == RecordBatch.NO_SEQUENCE)
throw new IllegalStateException("Trying to reenqueue a batch which doesn't have a sequence even " +
"though idempotence is enabled.");
if (transactionManager.nextBatchBySequence(batch.topicPartition) == null)
throw new IllegalStateException("We are reenqueueing a batch which is not tracked as part of the in flight " +
"requests. batch.topicPartition: " + batch.topicPartition + "; batch.baseSequence: " + batch.baseSequence());
ProducerBatch firstBatchInQueue = deque.peekFirst();
if (firstBatchInQueue != null && firstBatchInQueue.hasSequence() && firstBatchInQueue.baseSequence() < batch.baseSequence()) {
// The incoming batch can't be inserted at the front of the queue without violating the sequence ordering.
// This means that the incoming batch should be placed somewhere further back.
// We need to find the right place for the incoming batch and insert it there.
// We will only enter this branch if we have multiple inflights sent to different brokers and we need to retry
// the inflight batches.
//
// Since we reenqueue exactly one batch a time and ensure that the queue is ordered by sequence always, it
// is a simple linear scan of a subset of the in flight batches to find the right place in the queue each time.
List<ProducerBatch> orderedBatches = new ArrayList<>();
while (deque.peekFirst() != null && deque.peekFirst().hasSequence() && deque.peekFirst().baseSequence() < batch.baseSequence())
orderedBatches.add(deque.pollFirst());
log.debug("Reordered incoming batch with sequence {} for partition {}. It was placed in the queue at " +
"position {}", batch.baseSequence(), batch.topicPartition, orderedBatches.size());
// Either we have reached a point where there are batches without a sequence (ie. never been drained
// and are hence in order by default), or the batch at the front of the queue has a sequence greater
// than the incoming batch. This is the right place to add the incoming batch.
deque.addFirst(batch);
// Now we have to re insert the previously queued batches in the right order.
for (int i = orderedBatches.size() - 1; i >= 0; --i) {
deque.addFirst(orderedBatches.get(i));
}
// At this point, the incoming batch has been queued in the correct place according to its sequence.
} else {
deque.addFirst(batch);
}
}
/**
* Get a list of nodes whose partitions are ready to be sent, and the earliest time at which any non-sendable
* partition will be ready; Also return the flag for whether there are any unknown leaders for the accumulated
* partition batches.
* <p>
* A destination node is ready to send data if:
* <ol>
* <li>There is at least one partition that is not backing off its send
* <li><b>and</b> those partitions are not muted (to prevent reordering if
* {@value org.apache.kafka.clients.producer.ProducerConfig#MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION}
* is set to one)</li>
* <li><b>and <i>any</i></b> of the following are true</li>
* <ul>
* <li>The record set is full</li>
* <li>The record set has sat in the accumulator for at least lingerMs milliseconds</li>
* <li>The accumulator is out of memory and threads are blocking waiting for data (in this case all partitions
* are immediately considered ready).</li>
* <li>The accumulator has been closed</li>
* </ul>
* </ol>
*/
public ReadyCheckResult ready(Cluster cluster, long nowMs) {
Set<Node> readyNodes = new HashSet<>();
long nextReadyCheckDelayMs = Long.MAX_VALUE;
Set<String> unknownLeaderTopics = new HashSet<>();
boolean exhausted = this.free.queued() > 0;
for (Map.Entry<TopicPartition, Deque<ProducerBatch>> entry : this.batches.entrySet()) {
TopicPartition part = entry.getKey();
Deque<ProducerBatch> deque = entry.getValue();
Node leader = cluster.leaderFor(part);
synchronized (deque) {
if (leader == null && !deque.isEmpty()) {
// This is a partition for which leader is not known, but messages are available to send.
// Note that entries are currently not removed from batches when deque is empty.
unknownLeaderTopics.add(part.topic());
} else if (!readyNodes.contains(leader) && !muted.contains(part)) {
ProducerBatch batch = deque.peekFirst();
if (batch != null) {
long waitedTimeMs = batch.waitedTimeMs(nowMs);
boolean backingOff = batch.attempts() > 0 && waitedTimeMs < retryBackoffMs;
long timeToWaitMs = backingOff ? retryBackoffMs : lingerMs;
boolean full = deque.size() > 1 || batch.isFull();
boolean expired = waitedTimeMs >= timeToWaitMs;
boolean sendable = full || expired || exhausted || closed || flushInProgress();
if (sendable && !backingOff) {
readyNodes.add(leader);
} else {
long timeLeftMs = Math.max(timeToWaitMs - waitedTimeMs, 0);
// Note that this results in a conservative estimate since an un-sendable partition may have
// a leader that will later be found to have sendable data. However, this is good enough
// since we'll just wake up and then sleep again for the remaining time.
nextReadyCheckDelayMs = Math.min(timeLeftMs, nextReadyCheckDelayMs);
}
}
}
}
}
return new ReadyCheckResult(readyNodes, nextReadyCheckDelayMs, unknownLeaderTopics);
}
/**
* Check whether there are any batches which haven't been drained
*/
public boolean hasUndrained() {
for (Map.Entry<TopicPartition, Deque<ProducerBatch>> entry : this.batches.entrySet()) {
Deque<ProducerBatch> deque = entry.getValue();
synchronized (deque) {
if (!deque.isEmpty())
return true;
}
}
return false;
}
/**
* Drain all the data for the given nodes and collate them into a list of batches that will fit within the specified
* size on a per-node basis. This method attempts to avoid choosing the same topic-node over and over.
*
* @param cluster The current cluster metadata
* @param nodes The list of node to drain
* @param maxSize The maximum number of bytes to drain
* @param now The current unix time in milliseconds
* @return A list of {@link ProducerBatch} for each node specified with total size less than the requested maxSize.
*/
public Map<Integer, List<ProducerBatch>> drain(Cluster cluster,
Set<Node> nodes,
int maxSize,
long now) {
if (nodes.isEmpty())
return Collections.emptyMap();
Map<Integer, List<ProducerBatch>> batches = new HashMap<>();
for (Node node : nodes) {
int size = 0;
List<PartitionInfo> parts = cluster.partitionsForNode(node.id());
List<ProducerBatch> ready = new ArrayList<>();
/* to make starvation less likely this loop doesn't start at 0 */
int start = drainIndex = drainIndex % parts.size();
do {
PartitionInfo part = parts.get(drainIndex);
TopicPartition tp = new TopicPartition(part.topic(), part.partition());
// Only proceed if the partition has no in-flight batches.
if (!muted.contains(tp)) {
Deque<ProducerBatch> deque = getDeque(tp);
if (deque != null) {
synchronized (deque) {
ProducerBatch first = deque.peekFirst();
if (first != null) {
boolean backoff = first.attempts() > 0 && first.waitedTimeMs(now) < retryBackoffMs;
// Only drain the batch if it is not during backoff period.
if (!backoff) {
if (size + first.estimatedSizeInBytes() > maxSize && !ready.isEmpty()) {
// there is a rare case that a single batch size is larger than the request size due
// to compression; in this case we will still eventually send this batch in a single
// request
break;
} else {
ProducerIdAndEpoch producerIdAndEpoch = null;
boolean isTransactional = false;
if (transactionManager != null) {
if (!transactionManager.isSendToPartitionAllowed(tp))
break;
producerIdAndEpoch = transactionManager.producerIdAndEpoch();
if (!producerIdAndEpoch.isValid())
// we cannot send the batch until we have refreshed the producer id
break;
isTransactional = transactionManager.isTransactional();
if (!first.hasSequence() && transactionManager.hasUnresolvedSequence(first.topicPartition))
// Don't drain any new batches while the state of previous sequence numbers
// is unknown. The previous batches would be unknown if they were aborted
// on the client after being sent to the broker at least once.
break;
int firstInFlightSequence = transactionManager.firstInFlightSequence(first.topicPartition);
if (firstInFlightSequence != RecordBatch.NO_SEQUENCE && first.hasSequence()
&& first.baseSequence() != firstInFlightSequence)
// If the queued batch already has an assigned sequence, then it is being
// retried. In this case, we wait until the next immediate batch is ready
// and drain that. We only move on when the next in line batch is complete (either successfully
// or due to a fatal broker error). This effectively reduces our
// in flight request count to 1.
break;
}
ProducerBatch batch = deque.pollFirst();
if (producerIdAndEpoch != null && !batch.hasSequence()) {
// If the batch already has an assigned sequence, then we should not change the producer id and
// sequence number, since this may introduce duplicates. In particular,
// the previous attempt may actually have been accepted, and if we change
// the producer id and sequence here, this attempt will also be accepted,
// causing a duplicate.
//
// Additionally, we update the next sequence number bound for the partition,
// and also have the transaction manager track the batch so as to ensure
// that sequence ordering is maintained even if we receive out of order
// responses.
batch.setProducerState(producerIdAndEpoch, transactionManager.sequenceNumber(batch.topicPartition), isTransactional);
transactionManager.incrementSequenceNumber(batch.topicPartition, batch.recordCount);
log.debug("Assigned producerId {} and producerEpoch {} to batch with base sequence " +
"{} being sent to partition {}", producerIdAndEpoch.producerId,
producerIdAndEpoch.epoch, batch.baseSequence(), tp);
transactionManager.addInFlightBatch(batch);
}
batch.close();
size += batch.records().sizeInBytes();
ready.add(batch);
batch.drained(now);
}
}
}
}
}
}
this.drainIndex = (this.drainIndex + 1) % parts.size();
} while (start != drainIndex);
batches.put(node.id(), ready);
}
return batches;
}
private Deque<ProducerBatch> getDeque(TopicPartition tp) {
return batches.get(tp);
}
/**
* Get the deque for the given topic-partition, creating it if necessary.
*/
private Deque<ProducerBatch> getOrCreateDeque(TopicPartition tp) {
Deque<ProducerBatch> d = this.batches.get(tp);
if (d != null)
return d;
d = new ArrayDeque<>();
Deque<ProducerBatch> previous = this.batches.putIfAbsent(tp, d);
if (previous == null)
return d;
else
return previous;
}
/**
* Deallocate the record batch
*/
public void deallocate(ProducerBatch batch) {
incomplete.remove(batch);
// Only deallocate the batch if it is not a split batch because split batch are allocated outside the
// buffer pool.
if (!batch.isSplitBatch())
free.deallocate(batch.buffer(), batch.initialCapacity());
}
/**
* Package private for unit test. Get the buffer pool remaining size in bytes.
*/
long bufferPoolAvailableMemory() {
return free.availableMemory();
}
/**
* Are there any threads currently waiting on a flush?
*
* package private for test
*/
boolean flushInProgress() {
return flushesInProgress.get() > 0;
}
/* Visible for testing */
Map<TopicPartition, Deque<ProducerBatch>> batches() {
return Collections.unmodifiableMap(batches);
}
/**
* Initiate the flushing of data from the accumulator...this makes all requests immediately ready
*/
public void beginFlush() {
this.flushesInProgress.getAndIncrement();
}
/**
* Are there any threads currently appending messages?
*/
private boolean appendsInProgress() {
return appendsInProgress.get() > 0;
}
/**
* Mark all partitions as ready to send and block until the send is complete
*/
public void awaitFlushCompletion() throws InterruptedException {
try {
for (ProducerBatch batch : this.incomplete.copyAll())
batch.produceFuture.await();
} finally {
this.flushesInProgress.decrementAndGet();
}
}
/**
* Check whether there are any pending batches (whether sent or unsent).
*/
public boolean hasIncomplete() {
return !this.incomplete.isEmpty();
}
/**
* This function is only called when sender is closed forcefully. It will fail all the
* incomplete batches and return.
*/
public void abortIncompleteBatches() {
// We need to keep aborting the incomplete batch until no thread is trying to append to
// 1. Avoid losing batches.
// 2. Free up memory in case appending threads are blocked on buffer full.
// This is a tight loop but should be able to get through very quickly.
do {
abortBatches();
} while (appendsInProgress());
// After this point, no thread will append any messages because they will see the close
// flag set. We need to do the last abort after no thread was appending in case there was a new
// batch appended by the last appending thread.
abortBatches();
this.batches.clear();
}
/**
* Go through incomplete batches and abort them.
*/
private void abortBatches() {
abortBatches(new IllegalStateException("Producer is closed forcefully."));
}
/**
* Abort all incomplete batches (whether they have been sent or not)
*/
void abortBatches(final RuntimeException reason) {
for (ProducerBatch batch : incomplete.copyAll()) {
Deque<ProducerBatch> dq = getDeque(batch.topicPartition);
synchronized (dq) {
batch.abortRecordAppends();
dq.remove(batch);
}
batch.abort(reason);
deallocate(batch);
}
}
/**
* Abort any batches which have not been drained
*/
void abortUndrainedBatches(RuntimeException reason) {
for (ProducerBatch batch : incomplete.copyAll()) {
Deque<ProducerBatch> dq = getDeque(batch.topicPartition);
boolean aborted = false;
synchronized (dq) {
if ((transactionManager != null && !batch.hasSequence()) || (transactionManager == null && !batch.isClosed())) {
aborted = true;
batch.abortRecordAppends();
dq.remove(batch);
}
}
if (aborted) {
batch.abort(reason);
deallocate(batch);
}
}
}
public void mutePartition(TopicPartition tp) {
muted.add(tp);
}
public void unmutePartition(TopicPartition tp) {
muted.remove(tp);
}
/**
* Close this accumulator and force all the record buffers to be drained
*/
public void close() {
this.closed = true;
}
/*
* Metadata about a record just appended to the record accumulator
*/
public final static class RecordAppendResult {
public final FutureRecordMetadata future;
public final boolean batchIsFull;
public final boolean newBatchCreated;
public RecordAppendResult(FutureRecordMetadata future, boolean batchIsFull, boolean newBatchCreated) {
this.future = future;
this.batchIsFull = batchIsFull;
this.newBatchCreated = newBatchCreated;
}
}
/*
* The set of nodes that have at least one complete record batch in the accumulator
*/
public final static class ReadyCheckResult {
public final Set<Node> readyNodes;
public final long nextReadyCheckDelayMs;
public final Set<String> unknownLeaderTopics;
public ReadyCheckResult(Set<Node> readyNodes, long nextReadyCheckDelayMs, Set<String> unknownLeaderTopics) {
this.readyNodes = readyNodes;
this.nextReadyCheckDelayMs = nextReadyCheckDelayMs;
this.unknownLeaderTopics = unknownLeaderTopics;
}
}
}
| |
package org.spongycastle.jcajce.provider.asymmetric.rsa;
import java.io.ByteArrayOutputStream;
import java.security.AlgorithmParameters;
import java.security.InvalidKeyException;
import java.security.InvalidParameterException;
import java.security.PrivateKey;
import java.security.PublicKey;
import java.security.SecureRandom;
import java.security.SignatureException;
import java.security.SignatureSpi;
import java.security.interfaces.RSAPrivateKey;
import java.security.interfaces.RSAPublicKey;
import java.security.spec.AlgorithmParameterSpec;
import java.security.spec.MGF1ParameterSpec;
import java.security.spec.PSSParameterSpec;
import org.spongycastle.asn1.pkcs.PKCSObjectIdentifiers;
import org.spongycastle.crypto.AsymmetricBlockCipher;
import org.spongycastle.crypto.CryptoException;
import org.spongycastle.crypto.Digest;
import org.spongycastle.crypto.engines.RSABlindedEngine;
import org.spongycastle.crypto.params.ParametersWithRandom;
import org.spongycastle.jcajce.provider.util.DigestFactory;
import org.spongycastle.jcajce.util.BCJcaJceHelper;
import org.spongycastle.jcajce.util.JcaJceHelper;
import org.spongycastle.jce.provider.BouncyCastleProvider;
public class PSSSignatureSpi
extends SignatureSpi
{
private final JcaJceHelper helper = new BCJcaJceHelper();
private AlgorithmParameters engineParams;
private PSSParameterSpec paramSpec;
private PSSParameterSpec originalSpec;
private AsymmetricBlockCipher signer;
private Digest contentDigest;
private Digest mgfDigest;
private int saltLength;
private byte trailer;
private boolean isRaw;
private org.spongycastle.crypto.signers.PSSSigner pss;
private byte getTrailer(
int trailerField)
{
if (trailerField == 1)
{
return org.spongycastle.crypto.signers.PSSSigner.TRAILER_IMPLICIT;
}
throw new IllegalArgumentException("unknown trailer field");
}
private void setupContentDigest()
{
if (isRaw)
{
this.contentDigest = new NullPssDigest(mgfDigest);
}
else
{
this.contentDigest = mgfDigest;
}
}
// care - this constructor is actually used by outside organisations
protected PSSSignatureSpi(
AsymmetricBlockCipher signer,
PSSParameterSpec paramSpecArg)
{
this(signer, paramSpecArg, false);
}
// care - this constructor is actually used by outside organisations
protected PSSSignatureSpi(
AsymmetricBlockCipher signer,
PSSParameterSpec baseParamSpec,
boolean isRaw)
{
this.signer = signer;
this.originalSpec = baseParamSpec;
if (baseParamSpec == null)
{
this.paramSpec = PSSParameterSpec.DEFAULT;
}
else
{
this.paramSpec = baseParamSpec;
}
this.mgfDigest = DigestFactory.getDigest(paramSpec.getDigestAlgorithm());
this.saltLength = paramSpec.getSaltLength();
this.trailer = getTrailer(paramSpec.getTrailerField());
this.isRaw = isRaw;
setupContentDigest();
}
protected void engineInitVerify(
PublicKey publicKey)
throws InvalidKeyException
{
if (!(publicKey instanceof RSAPublicKey))
{
throw new InvalidKeyException("Supplied key is not a RSAPublicKey instance");
}
pss = new org.spongycastle.crypto.signers.PSSSigner(signer, contentDigest, mgfDigest, saltLength, trailer);
pss.init(false,
RSAUtil.generatePublicKeyParameter((RSAPublicKey)publicKey));
}
protected void engineInitSign(
PrivateKey privateKey,
SecureRandom random)
throws InvalidKeyException
{
if (!(privateKey instanceof RSAPrivateKey))
{
throw new InvalidKeyException("Supplied key is not a RSAPrivateKey instance");
}
pss = new org.spongycastle.crypto.signers.PSSSigner(signer, contentDigest, mgfDigest, saltLength, trailer);
pss.init(true, new ParametersWithRandom(RSAUtil.generatePrivateKeyParameter((RSAPrivateKey)privateKey), random));
}
protected void engineInitSign(
PrivateKey privateKey)
throws InvalidKeyException
{
if (!(privateKey instanceof RSAPrivateKey))
{
throw new InvalidKeyException("Supplied key is not a RSAPrivateKey instance");
}
pss = new org.spongycastle.crypto.signers.PSSSigner(signer, contentDigest, mgfDigest, saltLength, trailer);
pss.init(true, RSAUtil.generatePrivateKeyParameter((RSAPrivateKey)privateKey));
}
protected void engineUpdate(
byte b)
throws SignatureException
{
pss.update(b);
}
protected void engineUpdate(
byte[] b,
int off,
int len)
throws SignatureException
{
pss.update(b, off, len);
}
protected byte[] engineSign()
throws SignatureException
{
try
{
return pss.generateSignature();
}
catch (CryptoException e)
{
throw new SignatureException(e.getMessage());
}
}
protected boolean engineVerify(
byte[] sigBytes)
throws SignatureException
{
return pss.verifySignature(sigBytes);
}
protected void engineSetParameter(
AlgorithmParameterSpec params)
throws InvalidParameterException
{
if (params instanceof PSSParameterSpec)
{
PSSParameterSpec newParamSpec = (PSSParameterSpec)params;
if (originalSpec != null)
{
if (!DigestFactory.isSameDigest(originalSpec.getDigestAlgorithm(), newParamSpec.getDigestAlgorithm()))
{
throw new InvalidParameterException("parameter must be using " + originalSpec.getDigestAlgorithm());
}
}
if (!newParamSpec.getMGFAlgorithm().equalsIgnoreCase("MGF1") && !newParamSpec.getMGFAlgorithm().equals(PKCSObjectIdentifiers.id_mgf1.getId()))
{
throw new InvalidParameterException("unknown mask generation function specified");
}
if (!(newParamSpec.getMGFParameters() instanceof MGF1ParameterSpec))
{
throw new InvalidParameterException("unkown MGF parameters");
}
MGF1ParameterSpec mgfParams = (MGF1ParameterSpec)newParamSpec.getMGFParameters();
if (!DigestFactory.isSameDigest(mgfParams.getDigestAlgorithm(), newParamSpec.getDigestAlgorithm()))
{
throw new InvalidParameterException("digest algorithm for MGF should be the same as for PSS parameters.");
}
Digest newDigest = DigestFactory.getDigest(mgfParams.getDigestAlgorithm());
if (newDigest == null)
{
throw new InvalidParameterException("no match on MGF digest algorithm: "+ mgfParams.getDigestAlgorithm());
}
this.engineParams = null;
this.paramSpec = newParamSpec;
this.mgfDigest = newDigest;
this.saltLength = paramSpec.getSaltLength();
this.trailer = getTrailer(paramSpec.getTrailerField());
setupContentDigest();
}
else
{
throw new InvalidParameterException("Only PSSParameterSpec supported");
}
}
protected AlgorithmParameters engineGetParameters()
{
if (engineParams == null)
{
if (paramSpec != null)
{
try
{
engineParams = helper.createAlgorithmParameters("PSS");
engineParams.init(paramSpec);
}
catch (Exception e)
{
throw new RuntimeException(e.toString());
}
}
}
return engineParams;
}
/**
* @deprecated replaced with <a href = "#engineSetParameter(java.security.spec.AlgorithmParameterSpec)">
*/
protected void engineSetParameter(
String param,
Object value)
{
throw new UnsupportedOperationException("engineSetParameter unsupported");
}
protected Object engineGetParameter(
String param)
{
throw new UnsupportedOperationException("engineGetParameter unsupported");
}
static public class nonePSS
extends PSSSignatureSpi
{
public nonePSS()
{
super(new RSABlindedEngine(), null, true);
}
}
static public class PSSwithRSA
extends PSSSignatureSpi
{
public PSSwithRSA()
{
super(new RSABlindedEngine(), null);
}
}
static public class SHA1withRSA
extends PSSSignatureSpi
{
public SHA1withRSA()
{
super(new RSABlindedEngine(), PSSParameterSpec.DEFAULT);
}
}
static public class SHA224withRSA
extends PSSSignatureSpi
{
public SHA224withRSA()
{
super(new RSABlindedEngine(), new PSSParameterSpec("SHA-224", "MGF1", new MGF1ParameterSpec("SHA-224"), 28, 1));
}
}
static public class SHA256withRSA
extends PSSSignatureSpi
{
public SHA256withRSA()
{
super(new RSABlindedEngine(), new PSSParameterSpec("SHA-256", "MGF1", new MGF1ParameterSpec("SHA-256"), 32, 1));
}
}
static public class SHA384withRSA
extends PSSSignatureSpi
{
public SHA384withRSA()
{
super(new RSABlindedEngine(), new PSSParameterSpec("SHA-384", "MGF1", new MGF1ParameterSpec("SHA-384"), 48, 1));
}
}
static public class SHA512withRSA
extends PSSSignatureSpi
{
public SHA512withRSA()
{
super(new RSABlindedEngine(), new PSSParameterSpec("SHA-512", "MGF1", new MGF1ParameterSpec("SHA-512"), 64, 1));
}
}
private class NullPssDigest
implements Digest
{
private ByteArrayOutputStream bOut = new ByteArrayOutputStream();
private Digest baseDigest;
private boolean oddTime = true;
public NullPssDigest(Digest mgfDigest)
{
this.baseDigest = mgfDigest;
}
public String getAlgorithmName()
{
return "NULL";
}
public int getDigestSize()
{
return baseDigest.getDigestSize();
}
public void update(byte in)
{
bOut.write(in);
}
public void update(byte[] in, int inOff, int len)
{
bOut.write(in, inOff, len);
}
public int doFinal(byte[] out, int outOff)
{
byte[] res = bOut.toByteArray();
if (oddTime)
{
System.arraycopy(res, 0, out, outOff, res.length);
}
else
{
baseDigest.update(res, 0, res.length);
baseDigest.doFinal(out, outOff);
}
reset();
oddTime = !oddTime;
return res.length;
}
public void reset()
{
bOut.reset();
baseDigest.reset();
}
public int getByteLength()
{
return 0;
}
}
}
| |
package com.metova.cappuccino;
import android.support.annotation.IdRes;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.test.espresso.Espresso;
import android.view.View;
import java.util.HashMap;
import java.util.Map;
public class Cappuccino {
// TODO find an elegant way to remove items from this map once they are no longer needed
private static final Map<String, CappuccinoResourceWatcher> mResourceWatcherRegistry = new HashMap<>();
private static final Map<String, CappuccinoIdlingResource> mIdlingResourceRegistry = new HashMap<>();
private Cappuccino() {
// satisfy checkstyle
}
/**
* Returns a new {@code CappuccinoResourceWatcher}, which will be associated internally with a name derived from
* {@param resource}. Internally, this uses either the canonical or simple name of the resource's class,
* but this may change.
*
* @param resource The object for which this {@link CappuccinoResourceWatcher} will be a member.
* @return an {@link CappuccinoResourceWatcher}.
*/
@NonNull
public static CappuccinoResourceWatcher newIdlingResourceWatcher(@NonNull Object resource) {
return newIdlingResourceWatcher(nameOf(resource));
}
/**
* Returns a new {@code CappuccinoResourceWatcher}, which will be associated internally with the name supplied.
*
* @param name The name of this {@link CappuccinoResourceWatcher}.
* @return an {@link CappuccinoResourceWatcher}.
*/
@NonNull
public static CappuccinoResourceWatcher newIdlingResourceWatcher(@NonNull String name) {
CappuccinoResourceWatcher watcher = new CappuccinoResourceWatcher();
mResourceWatcherRegistry.put(name, watcher);
return watcher;
}
/**
* Returns a name for the supplied {@code object}. Uses either {@code object.getClass().getCanonicalName()},
* or {@code object.getClass().getSimpleName()} if the former returns null.
*
* @param object The {@code object} for which to generate a name.
* @return a name for the supplied {@code object}.
*/
@NonNull
public static String nameOf(@NonNull Object object) {
String name = object.getClass().getCanonicalName();
name = name != null ? name : object.getClass().getSimpleName();
return name;
}
/**
* Returns the {@code CappuccinoResourceWatcher}, from the internal registry, associated
* with the given {@param object}.
*
* @param object The object associated with the {@link CappuccinoResourceWatcher}.
* @return the {@code CappuccinoResourceWatcher}, from the internal registry, associated
* with the given {@param object}.
* @throws CappuccinoException if there is no {@code CappuccinoResourceWatcher} associated
* with the given {@param object}.
*/
@NonNull
public static CappuccinoResourceWatcher getResourceWatcher(@NonNull Object object) {
return getResourceWatcher(nameOf(object));
}
/**
* Returns the {@code CappuccinoResourceWatcher}, from the internal registry, associated
* with the given {@param name}.
*
* @param name The name associated with the {@link CappuccinoResourceWatcher}.
* @return the {@code CappuccinoResourceWatcher}, from the internal registry, associated
* with the given {@param name}.
* @throws CappuccinoException if there is no {@code CappuccinoResourceWatcher} associated
* with the given {@param name}.
*/
@NonNull
public static CappuccinoResourceWatcher getResourceWatcher(@NonNull String name) {
throwIfAbsent(name);
return mResourceWatcherRegistry.get(name);
}
/**
* Marks the {@link CappuccinoResourceWatcher} keyed to the {@param Object} as
* {@link CappuccinoResourceWatcher#busy() busy}. A convenience, and directly equivalent
* to {@code getResourceWatcher(Object).busy()}.
*
* @param object The {@code Object} used as the key for the {@code CappuccinoResourceWatcher}
* you want to mark as being busy.
*/
public static void markAsBusy(@NonNull Object object) {
markAsBusy(nameOf(object));
}
/**
* Marks the {@link CappuccinoResourceWatcher} keyed to the {@param name} as
* {@link CappuccinoResourceWatcher#busy() busy}. A convenience, and directly equivalent
* to {@code getResourceWatcher(String).busy()}.
*
* @param name The {@code name} used as the key for the {@code CappuccinoResourceWatcher}
* you want to mark as being busy.
*/
public static void markAsBusy(@NonNull String name) {
getResourceWatcher(name).busy();
}
/**
* Marks the {@link CappuccinoResourceWatcher} keyed to the {@param Object} as
* {@link CappuccinoResourceWatcher#idle() idle}. A convenience, and directly equivalent
* to {@code getResourceWatcher(Object).idle()}.
*
* @param object The {@code Object} used as the key for the {@code CappuccinoResourceWatcher}
* you want to mark as being idle.
*/
public static void markAsIdle(@NonNull Object object) {
markAsIdle(nameOf(object));
}
/**
* Marks the {@link CappuccinoResourceWatcher} keyed to the {@param name} as
* {@link CappuccinoResourceWatcher#idle() idle}. A convenience, and directly equivalent
* to {@code getResourceWatcher(String).idle()}.
*
* @param name The {@code name} used as the key for the {@code CappuccinoResourceWatcher}
* you want to mark as being idle.
*/
public static void markAsIdle(@NonNull String name) {
getResourceWatcher(name).idle();
}
/**
* Throws {@link CappuccinoException} if no {@link CappuccinoResourceWatcher} has yet been associated with
* {@param name}.
*
* @param name The name associated with the {@link CappuccinoResourceWatcher}.
* @throws CappuccinoException if there is no {@code CappuccinoResourceWatcher} associated
* with the given {@param name}.
*/
private static void throwIfAbsent(@NonNull String name) {
if (!mResourceWatcherRegistry.containsKey(name)) {
throw new CappuccinoException(
String.format("There is no %s associated with the name `%s`", CappuccinoResourceWatcher.class.getSimpleName(), name));
}
}
/**
* Convenience method for {@link Espresso#registerIdlingResources(android.support.test.espresso.IdlingResource...)
* Espresso#registerIdlingResources(IdlingResource...)}, which first instantiates an {@link CappuccinoIdlingResource},
* then registers it with {@code Espresso}.
*
* @param object The object from which to generate an {@code CappuccinoIdlingResource}.
* @throws CappuccinoException if there is no {@code CappuccinoResourceWatcher} associated
* with the given {@param name}.
*/
public static void registerIdlingResource(@NonNull Object object) {
registerIdlingResource(nameOf(object));
}
/**
* Convenience method for {@link Espresso#registerIdlingResources(android.support.test.espresso.IdlingResource...)
* Espresso#registerIdlingResources(IdlingResource...)}, which first instantiates an {@link CappuccinoIdlingResource},
* then registers it with {@code Espresso}.
*
* @param name The name from which to generate an {@code CappuccinoIdlingResource}.
* @throws CappuccinoException if there is no {@code CappuccinoResourceWatcher} associated
* with the given {@param name}.
*/
public static void registerIdlingResource(@NonNull String name) {
throwIfAbsent(name);
CappuccinoIdlingResource idlingResource = new CappuccinoIdlingResource(name);
mIdlingResourceRegistry.put(name, idlingResource);
Espresso.registerIdlingResources(idlingResource);
}
/**
* Convenience method for {@link Espresso#unregisterIdlingResources(android.support.test.espresso.IdlingResource...)
* Espresso#registerIdlingResources(IdlingResource...)}, which is the twin of {@link #registerIdlingResource(Object)}.
*
* @param object The object associated with the {@link CappuccinoIdlingResource} you wish to
* unregister.
* @throws CappuccinoException if there is no {@code CappuccinoResourceWatcher} associated
* with the given {@param name}.
*/
public static void unregisterIdlingResource(@NonNull Object object) {
unregisterIdlingResource(nameOf(object));
}
/**
* Convenience method for {@link Espresso#unregisterIdlingResources(android.support.test.espresso.IdlingResource...)
* Espresso#registerIdlingResources(IdlingResource...)}, which is the twin of {@link #registerIdlingResource(String)}.
*
* @param name The name associated with the {@link CappuccinoIdlingResource} you wish to
* unregister.
* @throws CappuccinoException if there is no {@code CappuccinoResourceWatcher} associated
* with the given {@param name}.
*/
public static void unregisterIdlingResource(@NonNull String name) {
throwIfAbsent(name);
CappuccinoIdlingResource idlingResource = mIdlingResourceRegistry.get(name);
Espresso.unregisterIdlingResources(idlingResource);
mIdlingResourceRegistry.remove(name);
}
/**
* Resets {@code Cappuccino}'s internal state, for use in a {@code tearDown()}-type method during testing.
* This will also ensure that no {@code IdlingResource}s remain registered with Espresso.
*/
public static void reset() {
// TODO Is this necessary? My concern is a failing test that, because it fails, does not unregister a resource
// TODO this will throw an NPE during a unit test
// Espresso.unregisterIdlingResources((IdlingResource[]) Espresso.getIdlingResources().toArray());
mResourceWatcherRegistry.clear();
mIdlingResourceRegistry.clear();
}
/**
* Use this this to set a tag on a {@code View}. This is a convenience for finding {@code View}s with
* {@link android.support.test.espresso.matcher.ViewMatchers#withTagValue(org.hamcrest.Matcher) Espresso.onView(withTagValue(tag))}.
* The no-op version does nothing (of course). Use this only to help navigate complex view hierarchies, because you're
* lazy and don't want to use {@code Espresso.onView(allOf(...))}.
*
* <p>See also {@link View#setTag(Object)}.</p>
*
* @param view The {@code View} to tag.
* @param tag The tag.
*/
public static void setTagForTesting(@NonNull View view, @Nullable Object tag) {
view.setTag(tag);
}
/**
* Use this this to set a tag on a {@code View}. This is a convenience for finding {@code View}s with
* {@link android.support.test.espresso.matcher.ViewMatchers#withTagKey(int, org.hamcrest.Matcher)} Espresso.onView(withTagKey(int))}.
* The no-op version does nothing (of course). Use this only to help navigate complex view hierarchies, because you're
* lazy and don't want to use {@code Espresso.onView(allOf(...))}.
*
* <p>See also {@link View#setTag(int, Object)}.</p>
*
* @param view The {@code View} to tag.
* @param key The key.
* @param tag The tag. A default tag has been made available via {@code R.id.cappuccino_testing_tag}.
*/
public static void setTagForTesting(@NonNull View view, @IdRes int key, @Nullable Object tag) {
view.setTag(key, tag);
}
}
| |
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.stream.app.gpfdist.sink;
import com.codahale.metrics.Meter;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.reactivestreams.Processor;
import org.springframework.cloud.stream.app.gpfdist.sink.support.GreenplumLoad;
import org.springframework.cloud.stream.app.gpfdist.sink.support.NetworkUtils;
import org.springframework.cloud.stream.app.gpfdist.sink.support.RuntimeContext;
import org.springframework.data.hadoop.util.net.HostInfoDiscovery;
import org.springframework.messaging.Message;
import org.springframework.messaging.MessageHandlingException;
import org.springframework.scheduling.TaskScheduler;
import org.springframework.util.StringUtils;
import org.springframework.util.concurrent.SettableListenableFuture;
import reactor.Environment;
import reactor.core.processor.RingBufferProcessor;
import reactor.io.buffer.Buffer;
import java.util.Date;
import java.util.concurrent.FutureTask;
import java.util.concurrent.TimeUnit;
/**
* Gpfdist related {@code MessageHandler}.
*
* @author Janne Valkealahti
*/
public class GpfdistMessageHandler extends AbstractGpfdistMessageHandler {
private final Log log = LogFactory.getLog(GpfdistMessageHandler.class);
private final int port;
private final int flushCount;
private final int flushTime;
private final int batchTimeout;
private final int batchCount;
private final int batchPeriod;
private final String delimiter;
private GreenplumLoad greenplumLoad;
private Processor<Buffer, Buffer> processor;
private GpfdistServer gpfdistServer;
private TaskScheduler sqlTaskScheduler;
private final TaskFuture taskFuture = new TaskFuture();
private int rateInterval = 0;
private Meter meter = null;
private int meterCount = 0;
private final HostInfoDiscovery hostInfoDiscovery;
/**
* Instantiates a new gpfdist message handler.
*
* @param port the port
* @param flushCount the flush count
* @param flushTime the flush time
* @param batchTimeout the batch timeout
* @param batchCount the batch count
* @param batchPeriod the batch period
* @param delimiter the delimiter
* @param hostInfoDiscovery the host info discovery
*/
public GpfdistMessageHandler(int port, int flushCount, int flushTime, int batchTimeout, int batchCount,
int batchPeriod, String delimiter, HostInfoDiscovery hostInfoDiscovery) {
super();
this.port = port;
this.flushCount = flushCount;
this.flushTime = flushTime;
this.batchTimeout = batchTimeout;
this.batchCount = batchCount;
this.batchPeriod = batchPeriod;
this.delimiter = StringUtils.hasLength(delimiter) ? delimiter : null;
this.hostInfoDiscovery = hostInfoDiscovery;
}
@Override
protected void doWrite(Message<?> message) throws Exception {
Object payload = message.getPayload();
if (payload instanceof String) {
String data = (String)payload;
if (delimiter != null) {
processor.onNext(Buffer.wrap(data+delimiter));
} else {
processor.onNext(Buffer.wrap(data));
}
if (meter != null) {
if ((meterCount++ % rateInterval) == 0) {
meter.mark(rateInterval);
log.info("METER: 1 minute rate = " + meter.getOneMinuteRate() + " mean rate = " + meter.getMeanRate());
}
}
} else {
throw new MessageHandlingException(message, "message not a String");
}
}
@Override
protected void onInit() throws Exception {
super.onInit();
Environment.initializeIfEmpty().assignErrorJournal();
processor = RingBufferProcessor.create(false);
}
@Override
protected void doStart() {
try {
log.info("Creating gpfdist protocol listener on port=" + port);
gpfdistServer = new GpfdistServer(processor, port, flushCount, flushTime, batchTimeout, batchCount);
gpfdistServer.start();
log.info("gpfdist protocol listener running on port=" + gpfdistServer.getLocalPort());
} catch (Exception e) {
throw new RuntimeException("Error starting protocol listener", e);
}
if (greenplumLoad != null) {
log.info("Scheduling gpload task with batchPeriod=" + batchPeriod);
final RuntimeContext context = new RuntimeContext(
NetworkUtils.getGPFDistUri(hostInfoDiscovery.getHostInfo().getAddress(), gpfdistServer.getLocalPort()));
sqlTaskScheduler.schedule((new FutureTask<Void>(new Runnable() {
@Override
public void run() {
boolean taskValue = true;
try {
while(!taskFuture.interrupted) {
try {
greenplumLoad.load(context);
} catch (Exception e) {
log.error("Error in load", e);
}
Thread.sleep(batchPeriod*1000);
}
} catch (Exception e) {
taskValue = false;
}
taskFuture.set(taskValue);
}
}, null)), new Date());
} else {
log.info("Skipping gpload tasks because greenplumLoad is not set");
}
}
@Override
protected void doStop() {
if (greenplumLoad != null) {
taskFuture.interruptTask();
try {
long now = System.currentTimeMillis();
// wait a bit more than batch period
Boolean value = taskFuture.get(batchTimeout + batchPeriod + 2, TimeUnit.SECONDS);
log.info("Stopping, got future value " + value + " from task which took "
+ (System.currentTimeMillis() - now) + "ms");
} catch (Exception e) {
log.warn("Got error from task wait value which may indicate trouble", e);
}
}
try {
processor.onComplete();
gpfdistServer.stop();
} catch (Exception e) {
log.warn("Error shutting down protocol listener", e);
}
}
/**
* Sets the sql task scheduler.
*
* @param sqlTaskScheduler the new sql task scheduler
*/
public void setSqlTaskScheduler(TaskScheduler sqlTaskScheduler) {
this.sqlTaskScheduler = sqlTaskScheduler;
}
/**
* Sets the greenplum load.
*
* @param greenplumLoad the new greenplum load
*/
public void setGreenplumLoad(GreenplumLoad greenplumLoad) {
this.greenplumLoad = greenplumLoad;
}
/**
* Sets the rate interval.
*
* @param rateInterval the new rate interval
*/
public void setRateInterval(int rateInterval) {
this.rateInterval = rateInterval;
if (rateInterval > 0) {
meter = new Meter();
}
}
private static class TaskFuture extends SettableListenableFuture<Boolean> {
boolean interrupted = false;
@Override
protected void interruptTask() {
interrupted = true;
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpHead;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
import org.elasticsearch.action.admin.indices.flush.FlushRequest;
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest;
import org.elasticsearch.action.admin.indices.open.OpenIndexRequest;
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest;
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
import org.elasticsearch.action.admin.indices.shrink.ResizeType;
import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest;
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest;
import org.elasticsearch.client.indices.AnalyzeRequest;
import org.elasticsearch.client.indices.CloseIndexRequest;
import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.client.indices.DeleteAliasRequest;
import org.elasticsearch.client.indices.DeleteIndexTemplateV2Request;
import org.elasticsearch.client.indices.FreezeIndexRequest;
import org.elasticsearch.client.indices.GetFieldMappingsRequest;
import org.elasticsearch.client.indices.GetIndexRequest;
import org.elasticsearch.client.indices.GetIndexTemplatesRequest;
import org.elasticsearch.client.indices.GetIndexTemplateV2Request;
import org.elasticsearch.client.indices.GetMappingsRequest;
import org.elasticsearch.client.indices.IndexTemplateV2ExistRequest;
import org.elasticsearch.client.indices.IndexTemplatesExistRequest;
import org.elasticsearch.client.indices.PutIndexTemplateRequest;
import org.elasticsearch.client.indices.PutIndexTemplateV2Request;
import org.elasticsearch.client.indices.PutMappingRequest;
import org.elasticsearch.client.indices.ReloadAnalyzersRequest;
import org.elasticsearch.client.indices.ResizeRequest;
import org.elasticsearch.client.indices.UnfreezeIndexRequest;
import org.elasticsearch.client.indices.rollover.RolloverRequest;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import java.io.IOException;
import java.util.Locale;
final class IndicesRequestConverters {
private IndicesRequestConverters() {}
static Request deleteIndex(DeleteIndexRequest deleteIndexRequest) {
String endpoint = RequestConverters.endpoint(deleteIndexRequest.indices());
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(deleteIndexRequest.timeout());
parameters.withMasterTimeout(deleteIndexRequest.masterNodeTimeout());
parameters.withIndicesOptions(deleteIndexRequest.indicesOptions());
request.addParameters(parameters.asMap());
return request;
}
static Request openIndex(OpenIndexRequest openIndexRequest) {
String endpoint = RequestConverters.endpoint(openIndexRequest.indices(), "_open");
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(openIndexRequest.timeout());
parameters.withMasterTimeout(openIndexRequest.masterNodeTimeout());
parameters.withWaitForActiveShards(openIndexRequest.waitForActiveShards());
parameters.withIndicesOptions(openIndexRequest.indicesOptions());
request.addParameters(parameters.asMap());
return request;
}
static Request closeIndex(CloseIndexRequest closeIndexRequest) {
String endpoint = RequestConverters.endpoint(closeIndexRequest.indices(), "_close");
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(closeIndexRequest.timeout());
parameters.withMasterTimeout(closeIndexRequest.masterNodeTimeout());
parameters.withIndicesOptions(closeIndexRequest.indicesOptions());
request.addParameters(parameters.asMap());
return request;
}
static Request createIndex(CreateIndexRequest createIndexRequest) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPart(createIndexRequest.index()).build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(createIndexRequest.timeout());
parameters.withMasterTimeout(createIndexRequest.masterNodeTimeout());
parameters.withWaitForActiveShards(createIndexRequest.waitForActiveShards());
request.addParameters(parameters.asMap());
request.setEntity(RequestConverters.createEntity(createIndexRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request;
}
static Request updateAliases(IndicesAliasesRequest indicesAliasesRequest) throws IOException {
Request request = new Request(HttpPost.METHOD_NAME, "/_aliases");
RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(indicesAliasesRequest.timeout());
parameters.withMasterTimeout(indicesAliasesRequest.masterNodeTimeout());
request.addParameters(parameters.asMap());
request.setEntity(RequestConverters.createEntity(indicesAliasesRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request;
}
static Request putMapping(PutMappingRequest putMappingRequest) throws IOException {
Request request = new Request(HttpPut.METHOD_NAME, RequestConverters.endpoint(putMappingRequest.indices(), "_mapping"));
RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(putMappingRequest.timeout());
parameters.withMasterTimeout(putMappingRequest.masterNodeTimeout());
request.addParameters(parameters.asMap());
request.setEntity(RequestConverters.createEntity(putMappingRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request;
}
static Request getMappings(GetMappingsRequest getMappingsRequest) {
String[] indices = getMappingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getMappingsRequest.indices();
Request request = new Request(HttpGet.METHOD_NAME, RequestConverters.endpoint(indices, "_mapping"));
RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withMasterTimeout(getMappingsRequest.masterNodeTimeout());
parameters.withIndicesOptions(getMappingsRequest.indicesOptions());
parameters.withLocal(getMappingsRequest.local());
request.addParameters(parameters.asMap());
return request;
}
static Request getFieldMapping(GetFieldMappingsRequest getFieldMappingsRequest) {
String[] indices = getFieldMappingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.indices();
String[] fields = getFieldMappingsRequest.fields() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.fields();
String endpoint = new RequestConverters.EndpointBuilder()
.addCommaSeparatedPathParts(indices)
.addPathPartAsIs("_mapping")
.addPathPartAsIs("field")
.addCommaSeparatedPathParts(fields)
.build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withIndicesOptions(getFieldMappingsRequest.indicesOptions());
parameters.withIncludeDefaults(getFieldMappingsRequest.includeDefaults());
request.addParameters(parameters.asMap());
return request;
}
static Request refresh(RefreshRequest refreshRequest) {
String[] indices = refreshRequest.indices() == null ? Strings.EMPTY_ARRAY : refreshRequest.indices();
Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_refresh"));
RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withIndicesOptions(refreshRequest.indicesOptions());
request.addParameters(parameters.asMap());
return request;
}
static Request flush(FlushRequest flushRequest) {
String[] indices = flushRequest.indices() == null ? Strings.EMPTY_ARRAY : flushRequest.indices();
Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_flush"));
RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withIndicesOptions(flushRequest.indicesOptions());
parameters.putParam("wait_if_ongoing", Boolean.toString(flushRequest.waitIfOngoing()));
parameters.putParam("force", Boolean.toString(flushRequest.force()));
request.addParameters(parameters.asMap());
return request;
}
static Request forceMerge(ForceMergeRequest forceMergeRequest) {
String[] indices = forceMergeRequest.indices() == null ? Strings.EMPTY_ARRAY : forceMergeRequest.indices();
Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_forcemerge"));
RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withIndicesOptions(forceMergeRequest.indicesOptions());
parameters.putParam("max_num_segments", Integer.toString(forceMergeRequest.maxNumSegments()));
parameters.putParam("only_expunge_deletes", Boolean.toString(forceMergeRequest.onlyExpungeDeletes()));
parameters.putParam("flush", Boolean.toString(forceMergeRequest.flush()));
request.addParameters(parameters.asMap());
return request;
}
static Request clearCache(ClearIndicesCacheRequest clearIndicesCacheRequest) {
String[] indices = clearIndicesCacheRequest.indices() == null ? Strings.EMPTY_ARRAY :clearIndicesCacheRequest.indices();
Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_cache/clear"));
RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withIndicesOptions(clearIndicesCacheRequest.indicesOptions());
parameters.putParam("query", Boolean.toString(clearIndicesCacheRequest.queryCache()));
parameters.putParam("fielddata", Boolean.toString(clearIndicesCacheRequest.fieldDataCache()));
parameters.putParam("request", Boolean.toString(clearIndicesCacheRequest.requestCache()));
parameters.putParam("fields", String.join(",", clearIndicesCacheRequest.fields()));
request.addParameters(parameters.asMap());
return request;
}
static Request existsAlias(GetAliasesRequest getAliasesRequest) {
if ((getAliasesRequest.indices() == null || getAliasesRequest.indices().length == 0) &&
(getAliasesRequest.aliases() == null || getAliasesRequest.aliases().length == 0)) {
throw new IllegalArgumentException("existsAlias requires at least an alias or an index");
}
String[] indices = getAliasesRequest.indices() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.indices();
String[] aliases = getAliasesRequest.aliases() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.aliases();
Request request = new Request(HttpHead.METHOD_NAME, RequestConverters.endpoint(indices, "_alias", aliases));
RequestConverters.Params params = new RequestConverters.Params();
params.withIndicesOptions(getAliasesRequest.indicesOptions());
params.withLocal(getAliasesRequest.local());
request.addParameters(params.asMap());
return request;
}
static Request split(ResizeRequest resizeRequest) throws IOException {
if (IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.exists(resizeRequest.getSettings()) == false) {
throw new IllegalArgumentException("index.number_of_shards is required for split operations");
}
return resize(resizeRequest, ResizeType.SPLIT);
}
@Deprecated
static Request split(org.elasticsearch.action.admin.indices.shrink.ResizeRequest resizeRequest) throws IOException {
if (resizeRequest.getResizeType() != ResizeType.SPLIT) {
throw new IllegalArgumentException("Wrong resize type [" + resizeRequest.getResizeType() + "] for indices split request");
}
return resize(resizeRequest);
}
static Request shrink(ResizeRequest resizeRequest) throws IOException {
return resize(resizeRequest, ResizeType.SHRINK);
}
@Deprecated
static Request shrink(org.elasticsearch.action.admin.indices.shrink.ResizeRequest resizeRequest) throws IOException {
if (resizeRequest.getResizeType() != ResizeType.SHRINK) {
throw new IllegalArgumentException("Wrong resize type [" + resizeRequest.getResizeType() + "] for indices shrink request");
}
return resize(resizeRequest);
}
static Request clone(ResizeRequest resizeRequest) throws IOException {
return resize(resizeRequest, ResizeType.CLONE);
}
@Deprecated
static Request clone(org.elasticsearch.action.admin.indices.shrink.ResizeRequest resizeRequest) throws IOException {
if (resizeRequest.getResizeType() != ResizeType.CLONE) {
throw new IllegalArgumentException("Wrong resize type [" + resizeRequest.getResizeType() + "] for indices clone request");
}
return resize(resizeRequest);
}
private static Request resize(ResizeRequest resizeRequest, ResizeType type) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder().addPathPart(resizeRequest.getSourceIndex())
.addPathPartAsIs("_" + type.name().toLowerCase(Locale.ROOT))
.addPathPart(resizeRequest.getTargetIndex()).build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
params.withTimeout(resizeRequest.timeout());
params.withMasterTimeout(resizeRequest.masterNodeTimeout());
params.withWaitForActiveShards(resizeRequest.getWaitForActiveShards());
request.addParameters(params.asMap());
request.setEntity(RequestConverters.createEntity(resizeRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request;
}
@Deprecated
private static Request resize(org.elasticsearch.action.admin.indices.shrink.ResizeRequest resizeRequest) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder().addPathPart(resizeRequest.getSourceIndex())
.addPathPartAsIs("_" + resizeRequest.getResizeType().name().toLowerCase(Locale.ROOT))
.addPathPart(resizeRequest.getTargetIndexRequest().index()).build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
params.withTimeout(resizeRequest.timeout());
params.withMasterTimeout(resizeRequest.masterNodeTimeout());
params.withWaitForActiveShards(resizeRequest.getTargetIndexRequest().waitForActiveShards());
request.addParameters(params.asMap());
request.setEntity(RequestConverters.createEntity(resizeRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request;
}
static Request rollover(RolloverRequest rolloverRequest) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder().addPathPart(rolloverRequest.getAlias()).addPathPartAsIs("_rollover")
.addPathPart(rolloverRequest.getNewIndexName()).build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
params.withTimeout(rolloverRequest.timeout());
params.withMasterTimeout(rolloverRequest.masterNodeTimeout());
params.withWaitForActiveShards(rolloverRequest.getCreateIndexRequest().waitForActiveShards());
if (rolloverRequest.isDryRun()) {
params.putParam("dry_run", Boolean.TRUE.toString());
}
request.addParameters(params.asMap());
request.setEntity(RequestConverters.createEntity(rolloverRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request;
}
static Request getSettings(GetSettingsRequest getSettingsRequest) {
String[] indices = getSettingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getSettingsRequest.indices();
String[] names = getSettingsRequest.names() == null ? Strings.EMPTY_ARRAY : getSettingsRequest.names();
String endpoint = RequestConverters.endpoint(indices, "_settings", names);
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
params.withIndicesOptions(getSettingsRequest.indicesOptions());
params.withLocal(getSettingsRequest.local());
params.withIncludeDefaults(getSettingsRequest.includeDefaults());
params.withMasterTimeout(getSettingsRequest.masterNodeTimeout());
request.addParameters(params.asMap());
return request;
}
static Request getIndex(GetIndexRequest getIndexRequest) {
String[] indices = getIndexRequest.indices() == null ? Strings.EMPTY_ARRAY : getIndexRequest.indices();
String endpoint = RequestConverters.endpoint(indices);
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
params.withIndicesOptions(getIndexRequest.indicesOptions());
params.withLocal(getIndexRequest.local());
params.withIncludeDefaults(getIndexRequest.includeDefaults());
params.withHuman(getIndexRequest.humanReadable());
params.withMasterTimeout(getIndexRequest.masterNodeTimeout());
request.addParameters(params.asMap());
return request;
}
static Request indicesExist(GetIndexRequest getIndexRequest) {
if (getIndexRequest.indices() == null || getIndexRequest.indices().length == 0) {
throw new IllegalArgumentException("indices are mandatory");
}
String endpoint = RequestConverters.endpoint(getIndexRequest.indices(), "");
Request request = new Request(HttpHead.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
params.withLocal(getIndexRequest.local());
params.withHuman(getIndexRequest.humanReadable());
params.withIndicesOptions(getIndexRequest.indicesOptions());
params.withIncludeDefaults(getIndexRequest.includeDefaults());
request.addParameters(params.asMap());
return request;
}
static Request indexPutSettings(UpdateSettingsRequest updateSettingsRequest) throws IOException {
String[] indices = updateSettingsRequest.indices() == null ? Strings.EMPTY_ARRAY : updateSettingsRequest.indices();
Request request = new Request(HttpPut.METHOD_NAME, RequestConverters.endpoint(indices, "_settings"));
RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(updateSettingsRequest.timeout());
parameters.withMasterTimeout(updateSettingsRequest.masterNodeTimeout());
parameters.withIndicesOptions(updateSettingsRequest.indicesOptions());
parameters.withPreserveExisting(updateSettingsRequest.isPreserveExisting());
request.addParameters(parameters.asMap());
request.setEntity(RequestConverters.createEntity(updateSettingsRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request;
}
static Request putTemplate(PutIndexTemplateRequest putIndexTemplateRequest) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template")
.addPathPart(putIndexTemplateRequest.name()).build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
params.withMasterTimeout(putIndexTemplateRequest.masterNodeTimeout());
if (putIndexTemplateRequest.create()) {
params.putParam("create", Boolean.TRUE.toString());
}
if (Strings.hasText(putIndexTemplateRequest.cause())) {
params.putParam("cause", putIndexTemplateRequest.cause());
}
request.addParameters(params.asMap());
request.setEntity(RequestConverters.createEntity(putIndexTemplateRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request;
}
static Request putIndexTemplate(PutIndexTemplateV2Request putIndexTemplateRequest) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_index_template")
.addPathPart(putIndexTemplateRequest.name()).build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
params.withMasterTimeout(putIndexTemplateRequest.masterNodeTimeout());
if (putIndexTemplateRequest.create()) {
params.putParam("create", Boolean.TRUE.toString());
}
if (Strings.hasText(putIndexTemplateRequest.cause())) {
params.putParam("cause", putIndexTemplateRequest.cause());
}
request.addParameters(params.asMap());
request.setEntity(RequestConverters.createEntity(putIndexTemplateRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request;
}
static Request validateQuery(ValidateQueryRequest validateQueryRequest) throws IOException {
String[] indices = validateQueryRequest.indices() == null ? Strings.EMPTY_ARRAY : validateQueryRequest.indices();
String endpoint = RequestConverters.endpoint(indices, "_validate/query");
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
params.withIndicesOptions(validateQueryRequest.indicesOptions());
params.putParam("explain", Boolean.toString(validateQueryRequest.explain()));
params.putParam("all_shards", Boolean.toString(validateQueryRequest.allShards()));
params.putParam("rewrite", Boolean.toString(validateQueryRequest.rewrite()));
request.addParameters(params.asMap());
request.setEntity(RequestConverters.createEntity(validateQueryRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request;
}
static Request getAlias(GetAliasesRequest getAliasesRequest) {
String[] indices = getAliasesRequest.indices() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.indices();
String[] aliases = getAliasesRequest.aliases() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.aliases();
String endpoint = RequestConverters.endpoint(indices, "_alias", aliases);
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
params.withIndicesOptions(getAliasesRequest.indicesOptions());
params.withLocal(getAliasesRequest.local());
request.addParameters(params.asMap());
return request;
}
static Request getTemplates(GetIndexTemplatesRequest getIndexTemplatesRequest) {
final String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_template")
.addCommaSeparatedPathParts(getIndexTemplatesRequest.names())
.build();
final Request request = new Request(HttpGet.METHOD_NAME, endpoint);
final RequestConverters.Params params = new RequestConverters.Params();
params.withLocal(getIndexTemplatesRequest.isLocal());
params.withMasterTimeout(getIndexTemplatesRequest.getMasterNodeTimeout());
request.addParameters(params.asMap());
return request;
}
static Request getIndexTemplates(GetIndexTemplateV2Request getIndexTemplatesRequest) {
final String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_index_template")
.addPathPart(getIndexTemplatesRequest.name())
.build();
final Request request = new Request(HttpGet.METHOD_NAME, endpoint);
final RequestConverters.Params params = new RequestConverters.Params();
params.withLocal(getIndexTemplatesRequest.isLocal());
params.withMasterTimeout(getIndexTemplatesRequest.getMasterNodeTimeout());
request.addParameters(params.asMap());
return request;
}
static Request templatesExist(IndexTemplatesExistRequest indexTemplatesExistRequest) {
final String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_template")
.addCommaSeparatedPathParts(indexTemplatesExistRequest.names())
.build();
final Request request = new Request(HttpHead.METHOD_NAME, endpoint);
final RequestConverters.Params params = new RequestConverters.Params();
params.withLocal(indexTemplatesExistRequest.isLocal());
params.withMasterTimeout(indexTemplatesExistRequest.getMasterNodeTimeout());
request.addParameters(params.asMap());
return request;
}
static Request templatesExist(IndexTemplateV2ExistRequest indexTemplatesExistRequest) {
final String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_index_template")
.addPathPart(indexTemplatesExistRequest.name())
.build();
final Request request = new Request(HttpHead.METHOD_NAME, endpoint);
final RequestConverters.Params params = new RequestConverters.Params();
params.withLocal(indexTemplatesExistRequest.isLocal());
params.withMasterTimeout(indexTemplatesExistRequest.getMasterNodeTimeout());
request.addParameters(params.asMap());
return request;
}
static Request analyze(AnalyzeRequest request) throws IOException {
RequestConverters.EndpointBuilder builder = new RequestConverters.EndpointBuilder();
String index = request.index();
if (index != null) {
builder.addPathPart(index);
}
builder.addPathPartAsIs("_analyze");
Request req = new Request(HttpGet.METHOD_NAME, builder.build());
req.setEntity(RequestConverters.createEntity(request, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return req;
}
static Request freezeIndex(FreezeIndexRequest freezeIndexRequest) {
String endpoint = RequestConverters.endpoint(freezeIndexRequest.getIndices(), "_freeze");
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(freezeIndexRequest.timeout());
parameters.withMasterTimeout(freezeIndexRequest.masterNodeTimeout());
parameters.withIndicesOptions(freezeIndexRequest.indicesOptions());
parameters.withWaitForActiveShards(freezeIndexRequest.getWaitForActiveShards());
request.addParameters(parameters.asMap());
return request;
}
static Request unfreezeIndex(UnfreezeIndexRequest unfreezeIndexRequest) {
String endpoint = RequestConverters.endpoint(unfreezeIndexRequest.getIndices(), "_unfreeze");
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(unfreezeIndexRequest.timeout());
parameters.withMasterTimeout(unfreezeIndexRequest.masterNodeTimeout());
parameters.withIndicesOptions(unfreezeIndexRequest.indicesOptions());
parameters.withWaitForActiveShards(unfreezeIndexRequest.getWaitForActiveShards());
request.addParameters(parameters.asMap());
return request;
}
static Request deleteTemplate(DeleteIndexTemplateRequest deleteIndexTemplateRequest) {
String name = deleteIndexTemplateRequest.name();
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template").addPathPart(name).build();
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
params.withMasterTimeout(deleteIndexTemplateRequest.masterNodeTimeout());
request.addParameters(params.asMap());
return request;
}
static Request deleteIndexTemplate(DeleteIndexTemplateV2Request deleteIndexTemplateRequest) {
String name = deleteIndexTemplateRequest.getName();
String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_index_template").addPathPart(name).build();
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params();
params.withMasterTimeout(deleteIndexTemplateRequest.masterNodeTimeout());
request.addParameters(params.asMap());
return request;
}
static Request reloadAnalyzers(ReloadAnalyzersRequest reloadAnalyzersRequest) {
String endpoint = RequestConverters.endpoint(reloadAnalyzersRequest.getIndices(), "_reload_search_analyzers");
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withIndicesOptions(reloadAnalyzersRequest.indicesOptions());
request.addParameters(parameters.asMap());
return request;
}
static Request deleteAlias(DeleteAliasRequest deleteAliasRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPart(deleteAliasRequest.getIndex())
.addPathPartAsIs("_alias")
.addPathPart(deleteAliasRequest.getAlias()).build();
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withTimeout(deleteAliasRequest.timeout());
parameters.withMasterTimeout(deleteAliasRequest.masterNodeTimeout());
request.addParameters(parameters.asMap());
return request;
}
}
| |
/*************************************************************************************************************************************************
* Copyright (c) 2015, Nordic Semiconductor
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
* USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
************************************************************************************************************************************************/
package no.nordicsemi.android.dfu.internal;
import android.support.annotation.NonNull;
import com.google.gson.Gson;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.zip.CRC32;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import no.nordicsemi.android.dfu.DfuBaseService;
import no.nordicsemi.android.dfu.internal.manifest.FileInfo;
import no.nordicsemi.android.dfu.internal.manifest.Manifest;
import no.nordicsemi.android.dfu.internal.manifest.ManifestFile;
import no.nordicsemi.android.dfu.internal.manifest.SoftDeviceBootloaderFileInfo;
/**
* <p>Reads the firmware files from the a ZIP file. The ZIP file must be either created using the <b>nrf utility</b> tool, available together with Master Control Panel v3.8.0+,
* or follow the backward compatibility syntax: must contain only files with names: application.hex/bin, softdevice.hex/dat or bootloader.hex/bin, optionally also application.dat
* and/or system.dat with init packets.</p>
* <p>The ArchiveInputStream will read only files with types specified by <b>types</b> parameter of the constructor.</p>
*/
public class ArchiveInputStream extends ZipInputStream {
/** The name of the manifest file is fixed. */
private static final String MANIFEST = "manifest.json";
// Those file names are for backwards compatibility mode
private static final String SOFTDEVICE_HEX = "softdevice.hex";
private static final String SOFTDEVICE_BIN = "softdevice.bin";
private static final String BOOTLOADER_HEX = "bootloader.hex";
private static final String BOOTLOADER_BIN = "bootloader.bin";
private static final String APPLICATION_HEX = "application.hex";
private static final String APPLICATION_BIN = "application.bin";
private static final String SYSTEM_INIT = "system.dat";
private static final String APPLICATION_INIT = "application.dat";
/** Contains bytes arrays with BIN files. HEX files are converted to BIN before being added to this map. */
private Map<String, byte[]> entries;
private Manifest manifest;
private CRC32 crc32;
private byte[] applicationBytes;
private byte[] softDeviceBytes;
private byte[] bootloaderBytes;
private byte[] softDeviceAndBootloaderBytes;
private byte[] systemInitBytes;
private byte[] applicationInitBytes;
private byte[] currentSource;
private int bytesReadFromCurrentSource;
private int softDeviceSize;
private int bootloaderSize;
private int applicationSize;
private int bytesRead;
private byte[] markedSource;
private int bytesReadFromMarkedSource;
/**
* <p>
* The ArchiveInputStream read HEX or BIN files from the Zip stream. It may skip some of them, depending on the value of the types parameter.
* This is useful if the DFU service wants to send the Soft Device and Bootloader only, and then the Application in the following connection, despite
* the ZIP file contains all 3 HEX/BIN files.
* When types is equal to {@link DfuBaseService#TYPE_AUTO} all present files are read.
* </p>
* <p>Use bit combination of the following types:</p>
* <ul>
* <li>{@link DfuBaseService#TYPE_SOFT_DEVICE}</li>
* <li>{@link DfuBaseService#TYPE_BOOTLOADER}</li>
* <li>{@link DfuBaseService#TYPE_APPLICATION}</li>
* <li>{@link DfuBaseService#TYPE_AUTO}</li>
* </ul>
*
* @param stream
* the Zip Input Stream
* @param mbrSize
* The size of the MRB segment (Master Boot Record) on the device. The parser will cut data from addresses below that number from all HEX files.
* @param types
* File types that are to be read from the ZIP. Use {@link DfuBaseService#TYPE_APPLICATION} etc.
* @throws java.io.IOException
*/
public ArchiveInputStream(final InputStream stream, final int mbrSize, final int types) throws IOException {
super(stream);
this.crc32 = new CRC32();
this.entries = new HashMap<>();
this.bytesRead = 0;
this.bytesReadFromCurrentSource = 0;
try {
/*
* This method reads all entries from the ZIP file and puts them to entries map.
* The 'manifest.json' file, if exists, is converted to the manifestData String.
*/
parseZip(mbrSize);
/*
* Let's read and parse the 'manifest.json' file.
*/
if (manifest != null) {
boolean valid = false;
// Read the application
if (manifest.getApplicationInfo() != null && (types == DfuBaseService.TYPE_AUTO || (types & DfuBaseService.TYPE_APPLICATION) > 0)) {
final FileInfo application = manifest.getApplicationInfo();
applicationBytes = entries.get(application.getBinFileName());
applicationInitBytes = entries.get(application.getDatFileName());
if (applicationBytes == null)
throw new IOException("Application file " + application.getBinFileName() + " not found.");
applicationSize = applicationBytes.length;
currentSource = applicationBytes;
valid = true;
}
// Read the Bootloader
if (manifest.getBootloaderInfo() != null && (types == DfuBaseService.TYPE_AUTO || (types & DfuBaseService.TYPE_BOOTLOADER) > 0)) {
if (systemInitBytes != null)
throw new IOException("Manifest: softdevice and bootloader specified. Use softdevice_bootloader instead.");
final FileInfo bootloader = manifest.getBootloaderInfo();
bootloaderBytes = entries.get(bootloader.getBinFileName());
systemInitBytes = entries.get(bootloader.getDatFileName());
if (bootloaderBytes == null)
throw new IOException("Bootloader file " + bootloader.getBinFileName() + " not found.");
bootloaderSize = bootloaderBytes.length;
currentSource = bootloaderBytes;
valid = true;
}
// Read the Soft Device
if (manifest.getSoftdeviceInfo() != null && (types == DfuBaseService.TYPE_AUTO || (types & DfuBaseService.TYPE_SOFT_DEVICE) > 0)) {
final FileInfo softdevice = manifest.getSoftdeviceInfo();
softDeviceBytes = entries.get(softdevice.getBinFileName());
systemInitBytes = entries.get(softdevice.getDatFileName());
if (softDeviceBytes == null)
throw new IOException("SoftDevice file " + softdevice.getBinFileName() + " not found.");
softDeviceSize = softDeviceBytes.length;
currentSource = softDeviceBytes;
valid = true;
}
// Read the combined Soft Device and Bootloader
if (manifest.getSoftdeviceBootloaderInfo() != null && (types == DfuBaseService.TYPE_AUTO ||
((types & DfuBaseService.TYPE_SOFT_DEVICE) > 0) && (types & DfuBaseService.TYPE_BOOTLOADER) > 0)) {
if (systemInitBytes != null)
throw new IOException("Manifest: The softdevice_bootloader may not be used together with softdevice or bootloader.");
final SoftDeviceBootloaderFileInfo system = manifest.getSoftdeviceBootloaderInfo();
softDeviceAndBootloaderBytes = entries.get(system.getBinFileName());
systemInitBytes = entries.get(system.getDatFileName());
if (softDeviceAndBootloaderBytes == null)
throw new IOException("File " + system.getBinFileName() + " not found.");
softDeviceSize = system.getSoftdeviceSize();
bootloaderSize = system.getBootloaderSize();
currentSource = softDeviceAndBootloaderBytes;
valid = true;
}
if (!valid) {
throw new IOException("Manifest file must specify at least one file.");
}
} else {
/*
* Compatibility mode. The 'manifest.json' file does not exist.
*
* In that case the ZIP file must contain one or more of the following files:
*
* - application.hex/dat
* + application.dat
* - softdevice.hex/dat
* - bootloader.hex/dat
* + system.dat
*/
boolean valid = false;
// Search for the application
if (types == DfuBaseService.TYPE_AUTO || (types & DfuBaseService.TYPE_APPLICATION) > 0) {
applicationBytes = entries.get(APPLICATION_HEX); // the entry bytes has already been converted to BIN, just the name remained.
if (applicationBytes == null)
applicationBytes = entries.get(APPLICATION_BIN);
if (applicationBytes != null) {
applicationSize = applicationBytes.length;
applicationInitBytes = entries.get(APPLICATION_INIT);
currentSource = applicationBytes;
valid = true;
}
}
// Search for theBootloader
if (types == DfuBaseService.TYPE_AUTO || (types & DfuBaseService.TYPE_BOOTLOADER) > 0) {
bootloaderBytes = entries.get(BOOTLOADER_HEX); // the entry bytes has already been converted to BIN, just the name remained.
if (bootloaderBytes == null)
bootloaderBytes = entries.get(BOOTLOADER_BIN);
if (bootloaderBytes != null) {
bootloaderSize = bootloaderBytes.length;
systemInitBytes = entries.get(SYSTEM_INIT);
currentSource = bootloaderBytes;
valid = true;
}
}
// Search for the Soft Device
if (types == DfuBaseService.TYPE_AUTO || (types & DfuBaseService.TYPE_SOFT_DEVICE) > 0) {
softDeviceBytes = entries.get(SOFTDEVICE_HEX); // the entry bytes has already been converted to BIN, just the name remained.
if (softDeviceBytes == null)
softDeviceBytes = entries.get(SOFTDEVICE_BIN);
if (softDeviceBytes != null) {
softDeviceSize = softDeviceBytes.length;
systemInitBytes = entries.get(SYSTEM_INIT);
currentSource = softDeviceBytes;
valid = true;
}
}
if (!valid) {
throw new IOException("The ZIP file must contain an Application, a Soft Device and/or a Bootloader.");
}
}
mark(0);
} finally {
super.close();
}
}
/**
* Reads all files into byte arrays.
* Here we don't know whether the ZIP file is valid.
*
* The ZIP file is valid when contains a 'manifest.json' file and all BIN and DAT files that are specified in the manifest.
*
* For backwards compatibility ArchiveInputStream supports also ZIP archives without 'manifest.json' file
* but than it MUST include at least one of the following files: softdevice.bin/hex, bootloader.bin/hex, application.bin/hex.
* To support the init packet such ZIP file should contain also application.dat and/or system.dat (with the CRC16 of a SD, BL or SD+BL together).
*/
private void parseZip(final int mbrSize) throws IOException {
final byte[] buffer = new byte[1024];
String manifestData = null;
ZipEntry ze;
while ((ze = getNextEntry()) != null) {
final String filename = ze.getName();
// Read file content to byte array
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
int count;
while ((count = super.read(buffer)) != -1) {
baos.write(buffer, 0, count);
}
byte[] source = baos.toByteArray();
// In case of HEX file convert it to BIN
if (filename.toLowerCase(Locale.US).endsWith("hex")) {
final HexInputStream is = new HexInputStream(source, mbrSize);
source = new byte[is.available()];
is.read(source);
is.close();
}
// Save the file content either as a manifest data or by adding it to entries
if (MANIFEST.equals(filename))
manifestData = new String(source, "UTF-8");
else
entries.put(filename, source);
}
if (manifestData != null) {
final ManifestFile manifestFile = new Gson().fromJson(manifestData, ManifestFile.class);
manifest = manifestFile.getManifest();
}
}
@Override
public void close() throws IOException {
softDeviceBytes = null;
bootloaderBytes = null;
softDeviceBytes = null;
softDeviceAndBootloaderBytes = null;
softDeviceSize = bootloaderSize = applicationSize = 0;
currentSource = null;
bytesRead = bytesReadFromCurrentSource = 0;
super.close();
}
@Override
public int read(@NonNull final byte[] buffer) throws IOException {
int maxSize = currentSource.length - bytesReadFromCurrentSource;
int size = buffer.length <= maxSize ? buffer.length : maxSize;
System.arraycopy(currentSource, bytesReadFromCurrentSource, buffer, 0, size);
bytesReadFromCurrentSource += size;
if (buffer.length > size) {
if (startNextFile() == null) {
bytesRead += size;
return size;
}
maxSize = currentSource.length;
final int nextSize = buffer.length - size <= maxSize ? buffer.length - size : maxSize;
System.arraycopy(currentSource, 0, buffer, size, nextSize);
bytesReadFromCurrentSource += nextSize;
size += nextSize;
}
bytesRead += size;
crc32.update(buffer, 0, size);
return size;
}
@Override
public boolean markSupported() {
return true;
}
/**
* Marks the current position in the stream. The parameter is ignored.
* @param readlimit this parameter is ignored, can be anything
*/
@Override
public void mark(final int readlimit) {
markedSource = currentSource;
bytesReadFromMarkedSource = bytesReadFromCurrentSource;
}
@Override
public void reset() throws IOException {
if (applicationBytes != null && (softDeviceBytes != null || bootloaderBytes != null || softDeviceAndBootloaderBytes != null))
throw new UnsupportedOperationException("Application must be sent in a separate connection.");
currentSource = markedSource;
bytesRead = bytesReadFromCurrentSource = bytesReadFromMarkedSource;
// Restore the CRC to the value is was on mark.
crc32.reset();
if (currentSource == bootloaderBytes && softDeviceBytes != null) {
crc32.update(softDeviceBytes);
bytesRead += softDeviceSize;
}
crc32.update(currentSource, 0, bytesReadFromCurrentSource);
}
/**
* Returns the CRC32 of the part of the firmware that was already read.
* @return the CRC
*/
public long getCrc32() {
return crc32.getValue();
}
/**
* Returns the manifest object if it was specified in the ZIP file.
* @return the manifest object
*/
public Manifest getManifest() {
return manifest;
}
/**
* Returns the content type based on the content of the ZIP file. The content type may be truncated using {@link #setContentType(int)}.
*
* @return a bit field of {@link DfuBaseService#TYPE_SOFT_DEVICE TYPE_SOFT_DEVICE}, {@link DfuBaseService#TYPE_BOOTLOADER TYPE_BOOTLOADER} and {@link DfuBaseService#TYPE_APPLICATION
* TYPE_APPLICATION}
*/
public int getContentType() {
byte b = 0;
// In Secure DFU the softDeviceSize and bootloaderSize may be 0 if both are in the ZIP file. The size of each part is embedded in the Init packet.
if (softDeviceAndBootloaderBytes != null)
b |= DfuBaseService.TYPE_SOFT_DEVICE | DfuBaseService.TYPE_BOOTLOADER;
// In Legacy DFU the size of each of these parts was given in the manifest file.
if (softDeviceSize > 0)
b |= DfuBaseService.TYPE_SOFT_DEVICE;
if (bootloaderSize > 0)
b |= DfuBaseService.TYPE_BOOTLOADER;
if (applicationSize > 0)
b |= DfuBaseService.TYPE_APPLICATION;
return b;
}
/**
* Truncates the current content type. May be used to hide some files, f.e. to send Soft Device and Bootloader without Application or only the Application.
*
* @param type
* the new type
* @return the final type after truncating
*/
public int setContentType(final int type) {
if (bytesRead > 0)
throw new UnsupportedOperationException("Content type must not be change after reading content");
final int t = getContentType() & type;
if ((t & DfuBaseService.TYPE_SOFT_DEVICE) == 0) {
softDeviceBytes = null;
if (softDeviceAndBootloaderBytes != null) {
softDeviceAndBootloaderBytes = null;
bootloaderSize = 0;
}
softDeviceSize = 0;
}
if ((t & DfuBaseService.TYPE_BOOTLOADER) == 0) {
bootloaderBytes = null;
if (softDeviceAndBootloaderBytes != null) {
softDeviceAndBootloaderBytes = null;
softDeviceSize = 0;
}
bootloaderSize = 0;
}
if ((t & DfuBaseService.TYPE_APPLICATION) == 0) {
applicationBytes = null;
applicationSize = 0;
}
mark(0);
return t;
}
/**
* Sets the currentSource to the new file or to <code>null</code> if the last file has been transmitted.
*
* @return the new source, the same as {@link #currentSource}
*/
private byte[] startNextFile() {
byte[] ret;
if (currentSource == softDeviceBytes && bootloaderBytes != null) {
ret = currentSource = bootloaderBytes;
} else if (currentSource != applicationBytes && applicationBytes != null) {
ret = currentSource = applicationBytes;
} else {
ret = currentSource = null;
}
bytesReadFromCurrentSource = 0;
return ret;
}
/**
* Returns the number of bytes that has not been read yet. This value includes only firmwares matching the content type set by the construcotor or the {@link #setContentType(int)} method.
*/
@Override
public int available() {
// In Secure DFU softdevice and bootloader sizes are not provided in the Init file (they are encoded inside the Init file instead).
// The service doesn't send those sizes, not the whole size of the firmware separately, like it was done in the Legacy DFU.
// This method then is just used to log file size.
// In case of SD+BL in Secure DFU:
if (softDeviceAndBootloaderBytes != null && softDeviceSize == 0 && bootloaderSize == 0)
return softDeviceAndBootloaderBytes.length + applicationSize - bytesRead;
// Otherwise:
return softDeviceSize + bootloaderSize + applicationSize - bytesRead;
}
/**
* Returns the total size of the SoftDevice firmware. In case the firmware was given as a HEX, this method returns the size of the BIN content of the file.
* @return the size of the SoftDevice firmware (BIN part)
*/
public int softDeviceImageSize() {
return softDeviceSize;
}
/**
* Returns the total size of the Bootloader firmware. In case the firmware was given as a HEX, this method returns the size of the BIN content of the file.
* @return the size of the Bootloader firmware (BIN part)
*/
public int bootloaderImageSize() {
return bootloaderSize;
}
/**
* Returns the total size of the Application firmware. In case the firmware was given as a HEX, this method returns the size of the BIN content of the file.
* @return the size of the Application firmware (BIN part)
*/
public int applicationImageSize() {
return applicationSize;
}
/**
* Returns the content of the init file for SoftDevice and/or Bootloader. When both SoftDevice and Bootloader are present in the ZIP file (as two files using the compatibility mode
* or as one file using the new Distribution packet) the system init contains validation data for those two files combined (e.g. the CRC value). This method may return
* <code>null</code> if there is no SoftDevice nor Bootloader in the ZIP or the DAT file is not present there.
* @return the content of the init packet for SoftDevice and/or Bootloader
*/
public byte[] getSystemInit() {
return systemInitBytes;
}
/**
* Returns the content of the init file for the Application or <code>null</code> if no application file in the ZIP, or the DAT file is not provided.
* @return the content of the init packet for Application
*/
public byte[] getApplicationInit() {
return applicationInitBytes;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.maven.plugin.eclipse.it;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import org.codehaus.plexus.util.IOUtil;
/**
* <p>
* Master test for eclipse .classpath and .wtpmodules generation.
* </p>
* <p>
* This test use a 2 modules project with all the mvn dependencies flavours (direct, transitive, with
* compile/test/provided/system scope, required and optional, artifacts and modules).
* </p>
* <p>
* In order to fully test the eclipse plugin execution in a such complex environment mvn is executed from a command
* line. Mvn is started using a custom settings.xml file, created on the fly. The custom settings.xml only adds a mirror
* for the central repository which is actually a local (file://) repository for loading files from
* <code>src/test/m2repo</code>
* </p>
* <p>
* The following is the base layout of modules/dependencies. The actual test is to check generated files for module-2
* </p>
*
* <pre>
*
* +----------------+ +-----------------+ +-----------------+
* /| module 1 (jar) | ----> | refproject | ----> | deps-refproject |
* / +----------------+ +-----------------+ +-----------------+
* / ˆ
* root | (depends on)
* \ |
* \ +----------------+ +-----------------+ +-----------------+
* \| module 2 (war) | ----> | direct | ----> | deps-direct |
* +----------------+ +-----------------+ +-----------------+
*
* </pre>
*
* @todo a know problem with this approach is that tests are running with the installed version of the plugin! Don't
* enable test in pom.xml at the moment or you will never be able to build.
* @author Fabrizio Giustina
* @version $Id$
*/
public class EclipsePluginMasterProjectIT
extends AbstractEclipsePluginIT
{
protected File basedir;
/**
* @see org.apache.maven.plugin.eclipse.it.AbstractEclipsePluginIT#setUp()
*/
protected void setUp()
throws Exception
{
basedir = getTestFile( "target/test-classes/projects/master-test" );
super.setUp();
}
protected void executeMaven2()
throws Exception
{
File pom = new File( basedir, "pom.xml" );
Properties properties = new Properties();
properties.setProperty( "wtpversion", "R7" );
String pluginSpec = getPluginCLISpecification();
List goals = new ArrayList();
goals.add( pluginSpec + "clean" );
goals.add( pluginSpec + "eclipse" );
executeMaven( pom, properties, goals );
}
public void testModule1Project()
throws Exception
{
executeMaven2();
assertFileEquals( new File( basedir, "module-1/expected/.project" ), new File( basedir, "module-1/.project" ) );
}
public void testModule1Classpath()
throws Exception
{
executeMaven2();
InputStream fis = new FileInputStream( new File( basedir, "module-1/.classpath" ) );
String classpath = IOUtil.toString( fis );
IOUtil.close( fis );
// direct dependencies, include all
assertContains( "Invalid classpath", classpath, "/refproject-compile" );
assertContains( "Invalid classpath", classpath, "refproject-sysdep" );
assertContains( "Invalid classpath", classpath, "/refproject-test" );
assertContains( "Invalid classpath", classpath, "/refproject-optional" );
assertContains( "Invalid classpath", classpath, "/refproject-provided" );
// transitive dependencies
assertContains( "Invalid classpath", classpath, "/deps-refproject-compile" );
assertDoesNotContain( "Invalid classpath", classpath, "/deps-refproject-test" );
assertDoesNotContain( "Invalid classpath", classpath, "/deps-refproject-optional" );
assertDoesNotContain( "Invalid classpath", classpath, "/deps-refproject-provided" );
}
public void testModule1Wtpmodules()
throws Exception
{
executeMaven2();
assertFileEquals( new File( basedir, "module-1/expected/.wtpmodules" ), new File( basedir,
"module-1/.wtpmodules" ) );
}
public void testModule2Project()
throws Exception
{
executeMaven2();
assertFileEquals( new File( basedir, "module-2/expected/.project" ), new File( basedir, "module-2/.project" ) );
}
public void testModule2Classpath()
throws Exception
{
executeMaven2();
InputStream fis = new FileInputStream( new File( basedir, "module-2/.classpath" ) );
String classpath = IOUtil.toString( fis );
IOUtil.close( fis );
// direct dependencies: include all
assertContains( "Invalid classpath", classpath, "/direct-compile" );
assertContains( "Invalid classpath", classpath, "/direct-test" );
assertContains( "Invalid classpath", classpath, "direct-sysdep" );
assertContains( "Invalid classpath", classpath, "/direct-optional" );
assertContains( "Invalid classpath", classpath, "/direct-provided" );
// referenced project: not required, but it's not a problem to have them included
assertContains( "Invalid classpath", classpath, "/module-1" );
// assertDoesNotContain( "Invalid classpath", classpath, "/refproject-compile" );
// assertDoesNotContain( "Invalid classpath", classpath, "/refproject-sysdep" );
assertDoesNotContain( "Invalid classpath", classpath, "/refproject-test" );
assertDoesNotContain( "Invalid classpath", classpath, "/refproject-optional" );
assertDoesNotContain( "Invalid classpath", classpath, "/refproject-provided" );
// transitive dependencies from referenced projects
assertContains( "Invalid classpath", classpath, "/deps-direct-compile" );
assertDoesNotContain( "Invalid classpath", classpath, "/deps-direct-test" );
assertDoesNotContain( "Invalid classpath", classpath, "/deps-direct-optional" );
// @todo should this be included? see MNG-514
assertDoesNotContain( "Invalid classpath", classpath, "/deps-direct-provided" );
// transitive dependencies from referenced projects
assertContains( "Invalid classpath", classpath, "/deps-refproject-compile" );
assertDoesNotContain( "Invalid classpath", classpath, "/deps-refproject-test" );
assertDoesNotContain( "Invalid classpath", classpath, "/deps-refproject-optional" );
assertDoesNotContain( "Invalid classpath", classpath, "/deps-refproject-provided" );
}
public void testModule2Wtpmodules()
throws Exception
{
executeMaven2();
InputStream fis = new FileInputStream( new File( basedir, "module-2/.wtpmodules" ) );
String wtpmodules = IOUtil.toString( fis );
IOUtil.close( fis );
// direct dependencies: include only runtime (also optional) dependencies
assertContains( "Invalid wtpmodules", wtpmodules, "/direct-compile" );
assertDoesNotContain( "Invalid wtpmodules", wtpmodules, "/direct-test" );
assertContains( "Invalid wtpmodules", wtpmodules, "/direct-sysdep" );
assertContains( "Invalid wtpmodules", wtpmodules, "/direct-optional" );
assertDoesNotContain( "Invalid wtpmodules", wtpmodules, "/direct-provided" );
// referenced project: only runtime deps
assertContains( "Invalid wtpmodules", wtpmodules, "/module-1" );
assertContains( "Invalid wtpmodules", wtpmodules, "/refproject-compile" );
assertContains( "Invalid wtpmodules", wtpmodules, "refproject-sysdep" );
assertDoesNotContain( "Invalid wtpmodules", wtpmodules, "/refproject-test" );
assertDoesNotContain( "Invalid wtpmodules", wtpmodules, "/refproject-optional" );
assertDoesNotContain( "Invalid wtpmodules", wtpmodules, "/refproject-provided" );
// transitive dependencies from referenced projects
assertContains( "Invalid wtpmodules", wtpmodules, "/deps-direct-compile" );
assertDoesNotContain( "Invalid wtpmodules", wtpmodules, "/deps-direct-test" );
assertDoesNotContain( "Invalid wtpmodules", wtpmodules, "/deps-direct-optional" );
assertDoesNotContain( "Invalid wtpmodules", wtpmodules, "/deps-direct-provided" );
// transitive dependencies from referenced projects
assertContains( "Invalid wtpmodules", wtpmodules, "/deps-refproject-compile" );
assertDoesNotContain( "Invalid wtpmodules", wtpmodules, "/deps-refproject-test" );
assertDoesNotContain( "Invalid wtpmodules", wtpmodules, "/deps-refproject-optional" );
assertDoesNotContain( "Invalid wtpmodules", wtpmodules, "/deps-refproject-provided" );
}
}
| |
// Copyright (C) 2009 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.query.change;
import static com.google.gerrit.server.query.change.ChangeData.asChanges;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Optional;
import com.google.common.collect.Lists;
import com.google.gerrit.common.Nullable;
import com.google.gerrit.common.data.GroupReference;
import com.google.gerrit.common.errors.NotSignedInException;
import com.google.gerrit.reviewdb.client.Account;
import com.google.gerrit.reviewdb.client.AccountGroup;
import com.google.gerrit.reviewdb.client.Branch;
import com.google.gerrit.reviewdb.client.Change;
import com.google.gerrit.reviewdb.server.ReviewDb;
import com.google.gerrit.server.CurrentUser;
import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.PatchLineCommentsUtil;
import com.google.gerrit.server.account.AccountResolver;
import com.google.gerrit.server.account.CapabilityControl;
import com.google.gerrit.server.account.GroupBackend;
import com.google.gerrit.server.account.GroupBackends;
import com.google.gerrit.server.change.ChangeTriplet;
import com.google.gerrit.server.config.AllProjectsName;
import com.google.gerrit.server.config.GerritServerConfig;
import com.google.gerrit.server.config.TrackingFooters;
import com.google.gerrit.server.git.GitRepositoryManager;
import com.google.gerrit.server.git.strategy.SubmitStrategyFactory;
import com.google.gerrit.server.index.ChangeIndex;
import com.google.gerrit.server.index.FieldDef;
import com.google.gerrit.server.index.IndexCollection;
import com.google.gerrit.server.index.Schema;
import com.google.gerrit.server.patch.PatchListCache;
import com.google.gerrit.server.project.ChangeControl;
import com.google.gerrit.server.project.ListChildProjects;
import com.google.gerrit.server.project.ProjectCache;
import com.google.gerrit.server.query.Predicate;
import com.google.gerrit.server.query.QueryBuilder;
import com.google.gerrit.server.query.QueryParseException;
import com.google.gwtorm.server.OrmException;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.ProvisionException;
import com.google.inject.util.Providers;
import org.eclipse.jgit.lib.AbbreviatedObjectId;
import org.eclipse.jgit.lib.Config;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
/**
* Parses a query string meant to be applied to change objects.
*/
public class ChangeQueryBuilder extends QueryBuilder<ChangeData> {
private static final Pattern PAT_LEGACY_ID = Pattern.compile("^[1-9][0-9]*$");
private static final Pattern PAT_CHANGE_ID =
Pattern.compile("^[iI][0-9a-f]{4,}.*$");
private static final Pattern DEF_CHANGE = Pattern.compile(
"^(?:[1-9][0-9]*|(?:[^~]+~[^~]+~)?[iI][0-9a-f]{4,}.*)$");
// NOTE: As new search operations are added, please keep the
// SearchSuggestOracle up to date.
public static final String FIELD_ADDED = "added";
public static final String FIELD_AFTER = "after";
public static final String FIELD_AGE = "age";
public static final String FIELD_BEFORE = "before";
public static final String FIELD_BRANCH = "branch";
public static final String FIELD_CHANGE = "change";
public static final String FIELD_COMMENT = "comment";
public static final String FIELD_COMMENTBY = "commentby";
public static final String FIELD_COMMIT = "commit";
public static final String FIELD_CONFLICTS = "conflicts";
public static final String FIELD_DELETED = "deleted";
public static final String FIELD_DELTA = "delta";
public static final String FIELD_DRAFTBY = "draftby";
public static final String FIELD_FILE = "file";
public static final String FIELD_IS = "is";
public static final String FIELD_HAS = "has";
public static final String FIELD_HASHTAG = "hashtag";
public static final String FIELD_LABEL = "label";
public static final String FIELD_LIMIT = "limit";
public static final String FIELD_MERGEABLE = "mergeable";
public static final String FIELD_MESSAGE = "message";
public static final String FIELD_OWNER = "owner";
public static final String FIELD_OWNERIN = "ownerin";
public static final String FIELD_PARENTPROJECT = "parentproject";
public static final String FIELD_PATH = "path";
public static final String FIELD_PROJECT = "project";
public static final String FIELD_PROJECTS = "projects";
public static final String FIELD_REF = "ref";
public static final String FIELD_REVIEWER = "reviewer";
public static final String FIELD_REVIEWERIN = "reviewerin";
public static final String FIELD_STARREDBY = "starredby";
public static final String FIELD_STATUS = "status";
public static final String FIELD_TOPIC = "topic";
public static final String FIELD_TR = "tr";
public static final String FIELD_VISIBLETO = "visibleto";
public static final String FIELD_WATCHEDBY = "watchedby";
public static final String ARG_ID_USER = "user";
public static final String ARG_ID_GROUP = "group";
private static final QueryBuilder.Definition<ChangeData, ChangeQueryBuilder> mydef =
new QueryBuilder.Definition<>(ChangeQueryBuilder.class);
@VisibleForTesting
public static class Arguments {
final Provider<ReviewDb> db;
final Provider<InternalChangeQuery> queryProvider;
final Provider<ChangeQueryRewriter> rewriter;
final IdentifiedUser.GenericFactory userFactory;
final CapabilityControl.Factory capabilityControlFactory;
final ChangeControl.GenericFactory changeControlGenericFactory;
final ChangeData.Factory changeDataFactory;
final FieldDef.FillArgs fillArgs;
final PatchLineCommentsUtil plcUtil;
final AccountResolver accountResolver;
final GroupBackend groupBackend;
final AllProjectsName allProjectsName;
final PatchListCache patchListCache;
final GitRepositoryManager repoManager;
final ProjectCache projectCache;
final Provider<ListChildProjects> listChildProjects;
final IndexCollection indexes;
final SubmitStrategyFactory submitStrategyFactory;
final ConflictsCache conflictsCache;
final TrackingFooters trackingFooters;
final boolean allowsDrafts;
private final Provider<CurrentUser> self;
@Inject
@VisibleForTesting
public Arguments(Provider<ReviewDb> db,
Provider<InternalChangeQuery> queryProvider,
Provider<ChangeQueryRewriter> rewriter,
IdentifiedUser.GenericFactory userFactory,
Provider<CurrentUser> self,
CapabilityControl.Factory capabilityControlFactory,
ChangeControl.GenericFactory changeControlGenericFactory,
ChangeData.Factory changeDataFactory,
FieldDef.FillArgs fillArgs,
PatchLineCommentsUtil plcUtil,
AccountResolver accountResolver,
GroupBackend groupBackend,
AllProjectsName allProjectsName,
PatchListCache patchListCache,
GitRepositoryManager repoManager,
ProjectCache projectCache,
Provider<ListChildProjects> listChildProjects,
IndexCollection indexes,
SubmitStrategyFactory submitStrategyFactory,
ConflictsCache conflictsCache,
TrackingFooters trackingFooters,
@GerritServerConfig Config cfg) {
this(db, queryProvider, rewriter, userFactory, self,
capabilityControlFactory, changeControlGenericFactory,
changeDataFactory, fillArgs, plcUtil, accountResolver, groupBackend,
allProjectsName, patchListCache, repoManager, projectCache,
listChildProjects, indexes, submitStrategyFactory, conflictsCache,
trackingFooters,
cfg == null ? true : cfg.getBoolean("change", "allowDrafts", true));
}
private Arguments(
Provider<ReviewDb> db,
Provider<InternalChangeQuery> queryProvider,
Provider<ChangeQueryRewriter> rewriter,
IdentifiedUser.GenericFactory userFactory,
Provider<CurrentUser> self,
CapabilityControl.Factory capabilityControlFactory,
ChangeControl.GenericFactory changeControlGenericFactory,
ChangeData.Factory changeDataFactory,
FieldDef.FillArgs fillArgs,
PatchLineCommentsUtil plcUtil,
AccountResolver accountResolver,
GroupBackend groupBackend,
AllProjectsName allProjectsName,
PatchListCache patchListCache,
GitRepositoryManager repoManager,
ProjectCache projectCache,
Provider<ListChildProjects> listChildProjects,
IndexCollection indexes,
SubmitStrategyFactory submitStrategyFactory,
ConflictsCache conflictsCache,
TrackingFooters trackingFooters,
boolean allowsDrafts) {
this.db = db;
this.queryProvider = queryProvider;
this.rewriter = rewriter;
this.userFactory = userFactory;
this.self = self;
this.capabilityControlFactory = capabilityControlFactory;
this.changeControlGenericFactory = changeControlGenericFactory;
this.changeDataFactory = changeDataFactory;
this.fillArgs = fillArgs;
this.plcUtil = plcUtil;
this.accountResolver = accountResolver;
this.groupBackend = groupBackend;
this.allProjectsName = allProjectsName;
this.patchListCache = patchListCache;
this.repoManager = repoManager;
this.projectCache = projectCache;
this.listChildProjects = listChildProjects;
this.indexes = indexes;
this.submitStrategyFactory = submitStrategyFactory;
this.conflictsCache = conflictsCache;
this.trackingFooters = trackingFooters;
this.allowsDrafts = allowsDrafts;
}
Arguments asUser(CurrentUser otherUser) {
return new Arguments(db, queryProvider, rewriter, userFactory,
Providers.of(otherUser),
capabilityControlFactory, changeControlGenericFactory,
changeDataFactory, fillArgs, plcUtil, accountResolver, groupBackend,
allProjectsName, patchListCache, repoManager, projectCache,
listChildProjects, indexes, submitStrategyFactory, conflictsCache,
trackingFooters, allowsDrafts);
}
Arguments asUser(Account.Id otherId) {
try {
CurrentUser u = self.get();
if (u.isIdentifiedUser()
&& otherId.equals(((IdentifiedUser) u).getAccountId())) {
return this;
}
} catch (ProvisionException e) {
// Doesn't match current user, continue.
}
return asUser(userFactory.create(db, otherId));
}
IdentifiedUser getIdentifiedUser() throws QueryParseException {
try {
CurrentUser u = getCurrentUser();
if (u.isIdentifiedUser()) {
return (IdentifiedUser) u;
}
throw new QueryParseException(NotSignedInException.MESSAGE);
} catch (ProvisionException e) {
throw new QueryParseException(NotSignedInException.MESSAGE, e);
}
}
CurrentUser getCurrentUser() throws QueryParseException {
try {
return self.get();
} catch (ProvisionException e) {
throw new QueryParseException(NotSignedInException.MESSAGE, e);
}
}
}
private final Arguments args;
@Inject
ChangeQueryBuilder(Arguments args) {
super(mydef);
this.args = args;
}
@VisibleForTesting
protected ChangeQueryBuilder(
Definition<ChangeData, ? extends QueryBuilder<ChangeData>> def,
Arguments args) {
super(def);
this.args = args;
}
public ChangeQueryBuilder asUser(CurrentUser user) {
return new ChangeQueryBuilder(builderDef, args.asUser(user));
}
@Operator
public Predicate<ChangeData> age(String value) {
return new AgePredicate(value);
}
@Operator
public Predicate<ChangeData> before(String value) throws QueryParseException {
return new BeforePredicate(value);
}
@Operator
public Predicate<ChangeData> until(String value) throws QueryParseException {
return before(value);
}
@Operator
public Predicate<ChangeData> after(String value) throws QueryParseException {
return new AfterPredicate(value);
}
@Operator
public Predicate<ChangeData> since(String value) throws QueryParseException {
return after(value);
}
@Operator
public Predicate<ChangeData> change(String query) throws QueryParseException {
if (PAT_LEGACY_ID.matcher(query).matches()) {
return new LegacyChangeIdPredicate(Change.Id.parse(query));
} else if (PAT_CHANGE_ID.matcher(query).matches()) {
return new ChangeIdPredicate(parseChangeId(query));
}
Optional<ChangeTriplet> triplet = ChangeTriplet.parse(query);
if (triplet.isPresent()) {
return Predicate.and(
project(triplet.get().project().get()),
branch(triplet.get().branch().get()),
new ChangeIdPredicate(parseChangeId(triplet.get().id().get())));
}
throw new QueryParseException("Invalid change format");
}
@Operator
public Predicate<ChangeData> comment(String value) {
ChangeIndex index = args.indexes.getSearchIndex();
return new CommentPredicate(index, value);
}
@Operator
public Predicate<ChangeData> status(String statusName) {
if ("reviewed".equalsIgnoreCase(statusName)) {
return new IsReviewedPredicate();
} else {
return ChangeStatusPredicate.parse(statusName);
}
}
public Predicate<ChangeData> status_open() {
return ChangeStatusPredicate.open();
}
@Operator
public Predicate<ChangeData> has(String value) throws QueryParseException {
if ("star".equalsIgnoreCase(value)) {
return new IsStarredByPredicate(args);
}
if ("draft".equalsIgnoreCase(value)) {
return new HasDraftByPredicate(args, self());
}
throw new IllegalArgumentException();
}
@Operator
public Predicate<ChangeData> is(String value) throws QueryParseException {
if ("starred".equalsIgnoreCase(value)) {
return new IsStarredByPredicate(args);
}
if ("watched".equalsIgnoreCase(value)) {
return new IsWatchedByPredicate(args, false);
}
if ("visible".equalsIgnoreCase(value)) {
return is_visible();
}
if ("reviewed".equalsIgnoreCase(value)) {
return new IsReviewedPredicate();
}
if ("owner".equalsIgnoreCase(value)) {
return new OwnerPredicate(self());
}
if ("reviewer".equalsIgnoreCase(value)) {
return new ReviewerPredicate(self(), args.allowsDrafts);
}
if ("mergeable".equalsIgnoreCase(value)) {
return new IsMergeablePredicate(schema(args.indexes), args.fillArgs);
}
try {
return status(value);
} catch (IllegalArgumentException e) {
// not status: alias?
}
throw new IllegalArgumentException();
}
@Operator
public Predicate<ChangeData> commit(String id) {
return new CommitPredicate(AbbreviatedObjectId.fromString(id));
}
@Operator
public Predicate<ChangeData> conflicts(String value) throws OrmException,
QueryParseException {
return new ConflictsPredicate(args, value, parseChange(value));
}
@Operator
public Predicate<ChangeData> p(String name) {
return project(name);
}
@Operator
public Predicate<ChangeData> project(String name) {
if (name.startsWith("^")) {
return new RegexProjectPredicate(name);
}
return new ProjectPredicate(name);
}
@Operator
public Predicate<ChangeData> projects(String name) {
return new ProjectPrefixPredicate(name);
}
@Operator
public Predicate<ChangeData> parentproject(String name) {
return new ParentProjectPredicate(args.projectCache, args.listChildProjects,
args.self, name);
}
@Operator
public Predicate<ChangeData> branch(String name) {
if (name.startsWith("^")) {
return ref("^" + branchToRef(name.substring(1)));
}
return ref(branchToRef(name));
}
private static String branchToRef(String name) {
if (!name.startsWith(Branch.R_HEADS)) {
return Branch.R_HEADS + name;
}
return name;
}
@Operator
public Predicate<ChangeData> hashtag(String hashtag) {
return new HashtagPredicate(hashtag);
}
@Operator
public Predicate<ChangeData> topic(String name) {
if (name.startsWith("^")) {
return new RegexTopicPredicate(name);
}
return new TopicPredicate(name);
}
@Operator
public Predicate<ChangeData> ref(String ref) {
if (ref.startsWith("^")) {
return new RegexRefPredicate(ref);
}
return new RefPredicate(ref);
}
@Operator
public Predicate<ChangeData> f(String file) {
return file(file);
}
@Operator
public Predicate<ChangeData> file(String file) {
if (file.startsWith("^")) {
return new RegexPathPredicate(file);
} else {
return EqualsFilePredicate.create(args, file);
}
}
@Operator
public Predicate<ChangeData> path(String path) {
if (path.startsWith("^")) {
return new RegexPathPredicate(path);
} else {
return new EqualsPathPredicate(FIELD_PATH, path);
}
}
@Operator
public Predicate<ChangeData> label(String name) throws QueryParseException,
OrmException {
Set<Account.Id> accounts = null;
AccountGroup.UUID group = null;
// Parse for:
// label:CodeReview=1,user=jsmith or
// label:CodeReview=1,jsmith or
// label:CodeReview=1,group=android_approvers or
// label:CodeReview=1,android_approvers
// user/groups without a label will first attempt to match user
String[] splitReviewer = name.split(",", 2);
name = splitReviewer[0]; // remove all but the vote piece, e.g.'CodeReview=1'
if (splitReviewer.length == 2) {
// process the user/group piece
PredicateArgs lblArgs = new PredicateArgs(splitReviewer[1]);
for (Map.Entry<String, String> pair : lblArgs.keyValue.entrySet()) {
if (pair.getKey().equalsIgnoreCase(ARG_ID_USER)) {
accounts = parseAccount(pair.getValue());
} else if (pair.getKey().equalsIgnoreCase(ARG_ID_GROUP)) {
group = parseGroup(pair.getValue()).getUUID();
} else {
throw new QueryParseException(
"Invalid argument identifier '" + pair.getKey() + "'");
}
}
for (String value : lblArgs.positional) {
if (accounts != null || group != null) {
throw new QueryParseException("more than one user/group specified (" +
value + ")");
}
try {
accounts = parseAccount(value);
} catch (QueryParseException qpex) {
// If it doesn't match an account, see if it matches a group
// (accounts get precedence)
try {
group = parseGroup(value).getUUID();
} catch (QueryParseException e) {
throw error("Neither user nor group " + value + " found");
}
}
}
}
return new LabelPredicate(args.projectCache,
args.changeControlGenericFactory, args.userFactory, args.db,
name, accounts, group);
}
@Operator
public Predicate<ChangeData> message(String text) {
ChangeIndex index = args.indexes.getSearchIndex();
return new MessagePredicate(index, text);
}
@Operator
public Predicate<ChangeData> starredby(String who)
throws QueryParseException, OrmException {
if ("self".equals(who)) {
return new IsStarredByPredicate(args);
}
Set<Account.Id> m = parseAccount(who);
List<IsStarredByPredicate> p = Lists.newArrayListWithCapacity(m.size());
for (Account.Id id : m) {
p.add(new IsStarredByPredicate(args.asUser(id)));
}
return Predicate.or(p);
}
@Operator
public Predicate<ChangeData> watchedby(String who)
throws QueryParseException, OrmException {
Set<Account.Id> m = parseAccount(who);
List<IsWatchedByPredicate> p = Lists.newArrayListWithCapacity(m.size());
Account.Id callerId;
try {
CurrentUser caller = args.self.get();
if (caller.isIdentifiedUser()) {
callerId = ((IdentifiedUser) caller).getAccountId();
} else {
callerId = null;
}
} catch (ProvisionException e) {
callerId = null;
}
for (Account.Id id : m) {
// Each child IsWatchedByPredicate includes a visibility filter for the
// corresponding user, to ensure that predicate subtree only returns
// changes visible to that user. The exception is if one of the users is
// the caller of this method, in which case visibility is already being
// checked at the top level.
p.add(new IsWatchedByPredicate(args.asUser(id), !id.equals(callerId)));
}
return Predicate.or(p);
}
@Operator
public Predicate<ChangeData> draftby(String who) throws QueryParseException,
OrmException {
Set<Account.Id> m = parseAccount(who);
List<HasDraftByPredicate> p = Lists.newArrayListWithCapacity(m.size());
for (Account.Id id : m) {
p.add(new HasDraftByPredicate(args, id));
}
return Predicate.or(p);
}
@Operator
public Predicate<ChangeData> visibleto(String who)
throws QueryParseException, OrmException {
if ("self".equals(who)) {
return is_visible();
}
Set<Account.Id> m = args.accountResolver.findAll(who);
if (!m.isEmpty()) {
List<Predicate<ChangeData>> p = Lists.newArrayListWithCapacity(m.size());
for (Account.Id id : m) {
return visibleto(args.userFactory.create(args.db, id));
}
return Predicate.or(p);
}
// If its not an account, maybe its a group?
//
Collection<GroupReference> suggestions = args.groupBackend.suggest(who, null);
if (!suggestions.isEmpty()) {
HashSet<AccountGroup.UUID> ids = new HashSet<>();
for (GroupReference ref : suggestions) {
ids.add(ref.getUUID());
}
return visibleto(new SingleGroupUser(args.capabilityControlFactory, ids));
}
throw error("No user or group matches \"" + who + "\".");
}
public Predicate<ChangeData> visibleto(CurrentUser user) {
return new IsVisibleToPredicate(args.db, //
args.changeControlGenericFactory, //
user);
}
public Predicate<ChangeData> is_visible() throws QueryParseException {
return visibleto(args.getCurrentUser());
}
@Operator
public Predicate<ChangeData> o(String who)
throws QueryParseException, OrmException {
return owner(who);
}
@Operator
public Predicate<ChangeData> owner(String who) throws QueryParseException,
OrmException {
return owner(parseAccount(who));
}
private Predicate<ChangeData> owner(Set<Account.Id> who) {
List<OwnerPredicate> p = Lists.newArrayListWithCapacity(who.size());
for (Account.Id id : who) {
p.add(new OwnerPredicate(id));
}
return Predicate.or(p);
}
@Operator
public Predicate<ChangeData> ownerin(String group)
throws QueryParseException {
GroupReference g = GroupBackends.findBestSuggestion(args.groupBackend, group);
if (g == null) {
throw error("Group " + group + " not found");
}
return new OwnerinPredicate(args.db, args.userFactory, g.getUUID());
}
@Operator
public Predicate<ChangeData> r(String who)
throws QueryParseException, OrmException {
return reviewer(who);
}
@Operator
public Predicate<ChangeData> reviewer(String who)
throws QueryParseException, OrmException {
Set<Account.Id> m = parseAccount(who);
List<ReviewerPredicate> p = Lists.newArrayListWithCapacity(m.size());
for (Account.Id id : m) {
p.add(new ReviewerPredicate(id, args.allowsDrafts));
}
return Predicate.or(p);
}
@Operator
public Predicate<ChangeData> reviewerin(String group)
throws QueryParseException {
GroupReference g = GroupBackends.findBestSuggestion(args.groupBackend, group);
if (g == null) {
throw error("Group " + group + " not found");
}
return new ReviewerinPredicate(args.db, args.userFactory, g.getUUID());
}
@Operator
public Predicate<ChangeData> tr(String trackingId) {
return new TrackingIdPredicate(args.trackingFooters, trackingId);
}
@Operator
public Predicate<ChangeData> bug(String trackingId) {
return tr(trackingId);
}
@Operator
public Predicate<ChangeData> limit(String limit) throws QueryParseException {
return new LimitPredicate(Integer.parseInt(limit));
}
@Operator
public Predicate<ChangeData> added(String value)
throws QueryParseException {
return new AddedPredicate(value);
}
@Operator
public Predicate<ChangeData> deleted(String value)
throws QueryParseException {
return new DeletedPredicate(value);
}
@Operator
public Predicate<ChangeData> size(String value)
throws QueryParseException {
return delta(value);
}
@Operator
public Predicate<ChangeData> delta(String value)
throws QueryParseException {
return new DeltaPredicate(value);
}
@Operator
public Predicate<ChangeData> commentby(String who)
throws QueryParseException, OrmException {
return commentby(parseAccount(who));
}
private Predicate<ChangeData> commentby(Set<Account.Id> who) {
List<CommentByPredicate> p = Lists.newArrayListWithCapacity(who.size());
for (Account.Id id : who) {
p.add(new CommentByPredicate(id));
}
return Predicate.or(p);
}
@Operator
public Predicate<ChangeData> from(String who)
throws QueryParseException, OrmException {
Set<Account.Id> ownerIds = parseAccount(who);
return Predicate.or(owner(ownerIds), commentby(ownerIds));
}
@Override
protected Predicate<ChangeData> defaultField(String query) throws QueryParseException {
if (query.startsWith("refs/")) {
return ref(query);
} else if (DEF_CHANGE.matcher(query).matches()) {
try {
return change(query);
} catch (QueryParseException e) {
// Skip.
}
}
List<Predicate<ChangeData>> predicates = Lists.newArrayListWithCapacity(9);
try {
predicates.add(commit(query));
} catch (IllegalArgumentException e) {
// Skip.
}
try {
predicates.add(owner(query));
} catch (OrmException | QueryParseException e) {
// Skip.
}
try {
predicates.add(reviewer(query));
} catch (OrmException | QueryParseException e) {
// Skip.
}
predicates.add(file(query));
try {
predicates.add(label(query));
} catch (OrmException | QueryParseException e) {
// Skip.
}
predicates.add(message(query));
predicates.add(comment(query));
predicates.add(projects(query));
predicates.add(ref(query));
predicates.add(branch(query));
predicates.add(topic(query));
return Predicate.or(predicates);
}
private Set<Account.Id> parseAccount(String who)
throws QueryParseException, OrmException {
if ("self".equals(who)) {
return Collections.singleton(self());
}
Set<Account.Id> matches = args.accountResolver.findAll(who);
if (matches.isEmpty()) {
throw error("User " + who + " not found");
}
return matches;
}
private GroupReference parseGroup(String group) throws QueryParseException {
GroupReference g = GroupBackends.findBestSuggestion(args.groupBackend,
group);
if (g == null) {
throw error("Group " + group + " not found");
}
return g;
}
private List<Change> parseChange(String value) throws OrmException,
QueryParseException {
if (PAT_LEGACY_ID.matcher(value).matches()) {
return Collections.singletonList(args.db.get().changes()
.get(Change.Id.parse(value)));
} else if (PAT_CHANGE_ID.matcher(value).matches()) {
List<Change> changes =
asChanges(args.queryProvider.get().byKeyPrefix(parseChangeId(value)));
if (changes.isEmpty()) {
throw error("Change " + value + " not found");
}
return changes;
}
throw error("Change " + value + " not found");
}
private static String parseChangeId(String value) {
if (value.charAt(0) == 'i') {
value = "I" + value.substring(1);
}
return value;
}
private Account.Id self() throws QueryParseException {
return args.getIdentifiedUser().getAccountId();
}
private static Schema<ChangeData> schema(@Nullable IndexCollection indexes) {
ChangeIndex index = indexes != null ? indexes.getSearchIndex() : null;
return index != null ? index.getSchema() : null;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.transport;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.util.EnumMap;
import java.util.concurrent.Callable;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLEngine;
import io.netty.util.Version;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import io.netty.util.concurrent.EventExecutor;
import io.netty.util.concurrent.GlobalEventExecutor;
import io.netty.util.internal.logging.InternalLoggerFactory;
import io.netty.util.internal.logging.Slf4JLoggerFactory;
import org.apache.cassandra.auth.IAuthenticator;
import org.apache.cassandra.auth.ISaslAwareAuthenticator;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.config.EncryptionOptions;
import org.apache.cassandra.metrics.ClientMetrics;
import org.apache.cassandra.security.SSLFactory;
import org.apache.cassandra.service.*;
import org.apache.cassandra.transport.messages.EventMessage;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.*;
import io.netty.channel.group.ChannelGroup;
import io.netty.channel.group.DefaultChannelGroup;
import io.netty.handler.ssl.SslHandler;
public class Server implements CassandraDaemon.Server
{
static
{
InternalLoggerFactory.setDefaultFactory(new Slf4JLoggerFactory());
}
private static final Logger logger = LoggerFactory.getLogger(Server.class);
public static final int VERSION_3 = 3;
public static final int CURRENT_VERSION = VERSION_3;
private final ConnectionTracker connectionTracker = new ConnectionTracker();
private final Connection.Factory connectionFactory = new Connection.Factory()
{
public Connection newConnection(Channel channel, int version)
{
return new ServerConnection(channel, version, connectionTracker);
}
};
public final InetSocketAddress socket;
private final AtomicBoolean isRunning = new AtomicBoolean(false);
private EventLoopGroup workerGroup;
private EventExecutor eventExecutorGroup;
public Server(InetSocketAddress socket)
{
this.socket = socket;
EventNotifier notifier = new EventNotifier(this);
StorageService.instance.register(notifier);
MigrationManager.instance.register(notifier);
registerMetrics();
}
public Server(String hostname, int port)
{
this(new InetSocketAddress(hostname, port));
}
public Server(InetAddress host, int port)
{
this(new InetSocketAddress(host, port));
}
public Server(int port)
{
this(new InetSocketAddress(port));
}
public void start()
{
if(!isRunning())
{
run();
}
}
public void stop()
{
if (isRunning.compareAndSet(true, false))
close();
}
public boolean isRunning()
{
return isRunning.get();
}
private void run()
{
// Check that a SaslAuthenticator can be provided by the configured
// IAuthenticator. If not, don't start the server.
IAuthenticator authenticator = DatabaseDescriptor.getAuthenticator();
if (authenticator.requireAuthentication() && !(authenticator instanceof ISaslAwareAuthenticator))
{
logger.error("Not starting native transport as the configured IAuthenticator is not capable of SASL authentication");
isRunning.compareAndSet(true, false);
return;
}
// Configure the server.
eventExecutorGroup = new RequestThreadPoolExecutor();
workerGroup = new NioEventLoopGroup();
ServerBootstrap bootstrap = new ServerBootstrap()
.group(workerGroup)
.channel(NioServerSocketChannel.class)
.childOption(ChannelOption.TCP_NODELAY, true)
.childOption(ChannelOption.SO_KEEPALIVE, DatabaseDescriptor.getRpcKeepAlive())
.childOption(ChannelOption.ALLOCATOR, CBUtil.allocator)
.childOption(ChannelOption.WRITE_BUFFER_HIGH_WATER_MARK, 32 * 1024)
.childOption(ChannelOption.WRITE_BUFFER_LOW_WATER_MARK, 8 * 1024);
final EncryptionOptions.ClientEncryptionOptions clientEnc = DatabaseDescriptor.getClientEncryptionOptions();
if (clientEnc.enabled)
{
logger.info("Enabling encrypted CQL connections between client and server");
bootstrap.childHandler(new SecureInitializer(this, clientEnc));
}
else
{
bootstrap.childHandler(new Initializer(this));
}
// Bind and start to accept incoming connections.
logger.info("Using Netty Version: {}", Version.identify().entrySet());
logger.info("Starting listening for CQL clients on {}...", socket);
Channel channel = bootstrap.bind(socket).channel();
connectionTracker.allChannels.add(channel);
isRunning.set(true);
}
private void registerMetrics()
{
ClientMetrics.instance.addCounter("connectedNativeClients", new Callable<Integer>()
{
@Override
public Integer call() throws Exception
{
return connectionTracker.getConnectedClients();
}
});
}
private void close()
{
// Close opened connections
connectionTracker.closeAll();
workerGroup.shutdownGracefully();
workerGroup = null;
eventExecutorGroup.shutdown();
eventExecutorGroup = null;
logger.info("Stop listening for CQL clients");
}
public static class ConnectionTracker implements Connection.Tracker
{
// TODO: should we be using the GlobalEventExecutor or defining our own?
public final ChannelGroup allChannels = new DefaultChannelGroup(GlobalEventExecutor.INSTANCE);
private final EnumMap<Event.Type, ChannelGroup> groups = new EnumMap<Event.Type, ChannelGroup>(Event.Type.class);
public ConnectionTracker()
{
for (Event.Type type : Event.Type.values())
groups.put(type, new DefaultChannelGroup(type.toString(), GlobalEventExecutor.INSTANCE));
}
public void addConnection(Channel ch, Connection connection)
{
allChannels.add(ch);
}
public void register(Event.Type type, Channel ch)
{
groups.get(type).add(ch);
}
public void unregister(Channel ch)
{
for (ChannelGroup group : groups.values())
group.remove(ch);
}
public void send(Event event)
{
groups.get(event.type).writeAndFlush(new EventMessage(event));
}
public void closeAll()
{
allChannels.close().awaitUninterruptibly();
}
public int getConnectedClients()
{
/*
- When server is running: allChannels contains all clients' connections (channels)
plus one additional channel used for the server's own bootstrap.
- When server is stopped: the size is 0
*/
return allChannels.size() != 0 ? allChannels.size() - 1 : 0;
}
}
private static class Initializer extends ChannelInitializer
{
// Stateless handlers
private static final Message.ProtocolDecoder messageDecoder = new Message.ProtocolDecoder();
private static final Message.ProtocolEncoder messageEncoder = new Message.ProtocolEncoder();
private static final Frame.Decompressor frameDecompressor = new Frame.Decompressor();
private static final Frame.Compressor frameCompressor = new Frame.Compressor();
private static final Frame.Encoder frameEncoder = new Frame.Encoder();
private static final Message.Dispatcher dispatcher = new Message.Dispatcher();
private final Server server;
public Initializer(Server server)
{
this.server = server;
}
protected void initChannel(Channel channel) throws Exception
{
ChannelPipeline pipeline = channel.pipeline();
//pipeline.addLast("debug", new LoggingHandler());
pipeline.addLast("frameDecoder", new Frame.Decoder(server.connectionFactory));
pipeline.addLast("frameEncoder", frameEncoder);
pipeline.addLast("frameDecompressor", frameDecompressor);
pipeline.addLast("frameCompressor", frameCompressor);
pipeline.addLast("messageDecoder", messageDecoder);
pipeline.addLast("messageEncoder", messageEncoder);
pipeline.addLast(server.eventExecutorGroup, "executor", dispatcher);
}
}
private static class SecureInitializer extends Initializer
{
private final SSLContext sslContext;
private final EncryptionOptions encryptionOptions;
public SecureInitializer(Server server, EncryptionOptions encryptionOptions)
{
super(server);
this.encryptionOptions = encryptionOptions;
try
{
this.sslContext = SSLFactory.createSSLContext(encryptionOptions, encryptionOptions.require_client_auth);
}
catch (IOException e)
{
throw new RuntimeException("Failed to setup secure pipeline", e);
}
}
protected void initChannel(Channel channel) throws Exception
{
SSLEngine sslEngine = sslContext.createSSLEngine();
sslEngine.setUseClientMode(false);
sslEngine.setEnabledCipherSuites(encryptionOptions.cipher_suites);
sslEngine.setNeedClientAuth(encryptionOptions.require_client_auth);
SslHandler sslHandler = new SslHandler(sslEngine);
super.initChannel(channel);
channel.pipeline().addFirst("ssl", sslHandler);
}
}
private static class EventNotifier implements IEndpointLifecycleSubscriber, IMigrationListener
{
private final Server server;
private static final InetAddress bindAll;
static {
try
{
bindAll = InetAddress.getByAddress(new byte[4]);
}
catch (UnknownHostException e)
{
throw new AssertionError(e);
}
}
private EventNotifier(Server server)
{
this.server = server;
}
private InetAddress getRpcAddress(InetAddress endpoint)
{
try
{
InetAddress rpcAddress = InetAddress.getByName(StorageService.instance.getRpcaddress(endpoint));
// If rpcAddress == 0.0.0.0 (i.e. bound on all addresses), returning that is not very helpful,
// so return the internal address (which is ok since "we're bound on all addresses").
// Note that after all nodes are running a version that includes CASSANDRA-5899, rpcAddress should
// never be 0.0.0.0, so this can eventually be removed.
return rpcAddress.equals(bindAll) ? endpoint : rpcAddress;
}
catch (UnknownHostException e)
{
// That should not happen, so log an error, but return the
// endpoint address since there's a good change this is right
logger.error("Problem retrieving RPC address for {}", endpoint, e);
return endpoint;
}
}
public void onJoinCluster(InetAddress endpoint)
{
server.connectionTracker.send(Event.TopologyChange.newNode(getRpcAddress(endpoint), server.socket.getPort()));
}
public void onLeaveCluster(InetAddress endpoint)
{
server.connectionTracker.send(Event.TopologyChange.removedNode(getRpcAddress(endpoint), server.socket.getPort()));
}
public void onMove(InetAddress endpoint)
{
server.connectionTracker.send(Event.TopologyChange.movedNode(getRpcAddress(endpoint), server.socket.getPort()));
}
public void onUp(InetAddress endpoint)
{
server.connectionTracker.send(Event.StatusChange.nodeUp(getRpcAddress(endpoint), server.socket.getPort()));
}
public void onDown(InetAddress endpoint)
{
server.connectionTracker.send(Event.StatusChange.nodeDown(getRpcAddress(endpoint), server.socket.getPort()));
}
public void onCreateKeyspace(String ksName)
{
server.connectionTracker.send(new Event.SchemaChange(Event.SchemaChange.Change.CREATED, ksName));
}
public void onCreateColumnFamily(String ksName, String cfName)
{
server.connectionTracker.send(new Event.SchemaChange(Event.SchemaChange.Change.CREATED, Event.SchemaChange.Target.TABLE, ksName, cfName));
}
public void onCreateUserType(String ksName, String typeName)
{
server.connectionTracker.send(new Event.SchemaChange(Event.SchemaChange.Change.CREATED, Event.SchemaChange.Target.TYPE, ksName, typeName));
}
public void onCreateFunction(String namespace, String functionName)
{
}
public void onUpdateKeyspace(String ksName)
{
server.connectionTracker.send(new Event.SchemaChange(Event.SchemaChange.Change.UPDATED, ksName));
}
public void onUpdateColumnFamily(String ksName, String cfName)
{
server.connectionTracker.send(new Event.SchemaChange(Event.SchemaChange.Change.UPDATED, Event.SchemaChange.Target.TABLE, ksName, cfName));
}
public void onUpdateUserType(String ksName, String typeName)
{
server.connectionTracker.send(new Event.SchemaChange(Event.SchemaChange.Change.UPDATED, Event.SchemaChange.Target.TYPE, ksName, typeName));
}
public void onUpdateFunction(String namespace, String functionName)
{
}
public void onDropKeyspace(String ksName)
{
server.connectionTracker.send(new Event.SchemaChange(Event.SchemaChange.Change.DROPPED, ksName));
}
public void onDropColumnFamily(String ksName, String cfName)
{
server.connectionTracker.send(new Event.SchemaChange(Event.SchemaChange.Change.DROPPED, Event.SchemaChange.Target.TABLE, ksName, cfName));
}
public void onDropUserType(String ksName, String typeName)
{
server.connectionTracker.send(new Event.SchemaChange(Event.SchemaChange.Change.DROPPED, Event.SchemaChange.Target.TYPE, ksName, typeName));
}
public void onDropFunction(String namespace, String functionName)
{
}
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.gnd.persistence.local;
import static com.google.common.truth.Truth.assertThat;
import static org.hamcrest.Matchers.samePropertyValuesAs;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.LatLngBounds;
import com.google.android.gnd.BaseHiltTest;
import com.google.android.gnd.model.Mutation;
import com.google.android.gnd.model.Mutation.SyncStatus;
import com.google.android.gnd.model.Project;
import com.google.android.gnd.model.User;
import com.google.android.gnd.model.basemap.OfflineArea;
import com.google.android.gnd.model.basemap.tile.TileSet;
import com.google.android.gnd.model.basemap.tile.TileSet.State;
import com.google.android.gnd.model.feature.Feature;
import com.google.android.gnd.model.feature.FeatureMutation;
import com.google.android.gnd.model.feature.Point;
import com.google.android.gnd.model.feature.PointFeature;
import com.google.android.gnd.model.feature.PolygonFeature;
import com.google.android.gnd.model.form.Element;
import com.google.android.gnd.model.form.Field;
import com.google.android.gnd.model.form.Form;
import com.google.android.gnd.model.layer.Layer;
import com.google.android.gnd.model.layer.Style;
import com.google.android.gnd.model.observation.Observation;
import com.google.android.gnd.model.observation.ObservationMutation;
import com.google.android.gnd.model.observation.ResponseDelta;
import com.google.android.gnd.model.observation.ResponseMap;
import com.google.android.gnd.model.observation.TextResponse;
import com.google.android.gnd.persistence.local.room.dao.FeatureDao;
import com.google.android.gnd.persistence.local.room.dao.ObservationDao;
import com.google.android.gnd.persistence.local.room.entity.FeatureEntity;
import com.google.android.gnd.persistence.local.room.models.EntityState;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import dagger.hilt.android.testing.HiltAndroidTest;
import io.reactivex.subscribers.TestSubscriber;
import java.util.AbstractCollection;
import java.util.Date;
import java8.util.Optional;
import javax.inject.Inject;
import org.hamcrest.MatcherAssert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
@HiltAndroidTest
@RunWith(RobolectricTestRunner.class)
public class LocalDataStoreTest extends BaseHiltTest {
private static final User TEST_USER =
User.builder().setId("user id").setEmail("user@gmail.com").setDisplayName("user 1").build();
private static final Field TEST_FIELD =
Field.newBuilder()
.setId("field id")
.setIndex(1)
.setLabel("field label")
.setRequired(false)
.setType(Field.Type.TEXT_FIELD)
.build();
private static final Form TEST_FORM =
Form.newBuilder()
.setId("form id")
.setElements(ImmutableList.of(Element.ofField(TEST_FIELD)))
.build();
private static final Layer TEST_LAYER =
Layer.newBuilder()
.setId("layer id")
.setName("heading title")
.setDefaultStyle(Style.builder().setColor("000").build())
.setForm(TEST_FORM)
.build();
private static final Project TEST_PROJECT =
Project.newBuilder()
.setId("project id")
.setTitle("project 1")
.setDescription("foo description")
.putLayer("layer id", TEST_LAYER)
.build();
private static final Point TEST_POINT =
Point.newBuilder().setLatitude(110.0).setLongitude(-23.1).build();
private static final Point TEST_POINT_2 =
Point.newBuilder().setLatitude(51.0).setLongitude(44.0).build();
private static final ImmutableList<Point> TEST_POLYGON_1 =
ImmutableList.<Point>builder()
.add(Point.newBuilder().setLatitude(49.874502).setLongitude(8.655993).build())
.add(Point.newBuilder().setLatitude(49.874099).setLongitude(8.651173).build())
.add(Point.newBuilder().setLatitude(49.872919).setLongitude(8.651628).build())
.add(Point.newBuilder().setLatitude(49.873164).setLongitude(8.653515).build())
.add(Point.newBuilder().setLatitude(49.874343).setLongitude(8.653038).build())
.build();
private static final ImmutableList<Point> TEST_POLYGON_2 =
ImmutableList.<Point>builder()
.add(Point.newBuilder().setLatitude(49.865374).setLongitude(8.646920).build())
.add(Point.newBuilder().setLatitude(49.864241).setLongitude(8.647286).build())
.add(Point.newBuilder().setLatitude(49.864664).setLongitude(8.650387).build())
.add(Point.newBuilder().setLatitude(49.863102).setLongitude(8.650445).build())
.add(Point.newBuilder().setLatitude(49.863051).setLongitude(8.647306).build())
.build();
private static final FeatureMutation TEST_FEATURE_MUTATION =
createTestFeatureMutation(TEST_POINT);
private static final FeatureMutation TEST_POLYGON_FEATURE_MUTATION =
createTestPolygonFeatureMutation(TEST_POLYGON_1);
private static final ObservationMutation TEST_OBSERVATION_MUTATION =
ObservationMutation.builder()
.setId(1L)
.setObservationId("observation id")
.setType(Mutation.Type.CREATE)
.setSyncStatus(SyncStatus.PENDING)
.setProjectId("project id")
.setFeatureId("feature id")
.setLayerId("layer id")
.setForm(TEST_FORM)
.setUserId("user id")
.setResponseDeltas(
ImmutableList.of(
ResponseDelta.builder()
.setFieldId("field id")
.setFieldType(Field.Type.TEXT_FIELD)
.setNewResponse(TextResponse.fromString("updated response"))
.build()))
.setClientTimestamp(new Date())
.build();
private static final TileSet TEST_PENDING_TILE_SOURCE =
TileSet.newBuilder()
.setId("id_1")
.setState(State.PENDING)
.setPath("some_path 1")
.setUrl("some_url 1")
.setOfflineAreaReferenceCount(1)
.build();
private static final TileSet TEST_DOWNLOADED_TILE_SOURCE =
TileSet.newBuilder()
.setId("id_2")
.setState(State.DOWNLOADED)
.setPath("some_path 2")
.setUrl("some_url 2")
.setOfflineAreaReferenceCount(1)
.build();
private static final TileSet TEST_FAILED_TILE_SOURCE =
TileSet.newBuilder()
.setId("id_3")
.setState(State.FAILED)
.setPath("some_path 3")
.setUrl("some_url 3")
.setOfflineAreaReferenceCount(1)
.build();
private static final OfflineArea TEST_OFFLINE_AREA =
OfflineArea.newBuilder()
.setId("id_1")
.setBounds(LatLngBounds.builder().include(new LatLng(0.0, 0.0)).build())
.setState(OfflineArea.State.PENDING)
.setName("Test Area")
.build();
@Inject LocalDataStore localDataStore;
@Inject LocalValueStore localValueStore;
@Inject ObservationDao observationDao;
@Inject FeatureDao featureDao;
private static FeatureMutation createTestFeatureMutation(Point point) {
return FeatureMutation.builder()
.setId(1L)
.setFeatureId("feature id")
.setType(Mutation.Type.CREATE)
.setSyncStatus(SyncStatus.PENDING)
.setUserId("user id")
.setProjectId("project id")
.setLayerId("layer id")
.setNewLocation(Optional.ofNullable(point))
.setNewPolygonVertices(ImmutableList.of())
.setClientTimestamp(new Date())
.build();
}
private static FeatureMutation createTestPolygonFeatureMutation(
ImmutableList<Point> polygonVertices) {
return FeatureMutation.builder()
.setId(1L)
.setFeatureId("feature id")
.setType(Mutation.Type.CREATE)
.setSyncStatus(SyncStatus.PENDING)
.setUserId("user id")
.setProjectId("project id")
.setLayerId("layer id")
.setNewLocation(Optional.empty())
.setNewPolygonVertices(polygonVertices)
.setClientTimestamp(new Date())
.build();
}
private static void assertEquivalent(ObservationMutation mutation, Observation observation) {
assertThat(mutation.getObservationId()).isEqualTo(observation.getId());
assertThat(mutation.getFeatureId()).isEqualTo(observation.getFeature().getId());
assertThat(mutation.getForm()).isEqualTo(observation.getForm());
assertThat(mutation.getProjectId()).isEqualTo(observation.getProject().getId());
assertThat(mutation.getUserId()).isEqualTo(observation.getLastModified().getUser().getId());
assertThat(mutation.getUserId()).isEqualTo(observation.getCreated().getUser().getId());
MatcherAssert.assertThat(
ResponseMap.builder().applyDeltas(mutation.getResponseDeltas()).build(),
samePropertyValuesAs(observation.getResponses()));
}
@Test
public void testInsertAndGetProjects() {
localDataStore.insertOrUpdateProject(TEST_PROJECT).test().assertComplete();
localDataStore.getProjects().test().assertValue(ImmutableList.of(TEST_PROJECT));
}
@Test
public void testGetProjectById() {
localDataStore.insertOrUpdateProject(TEST_PROJECT).blockingAwait();
localDataStore.getProjectById("project id").test().assertValue(TEST_PROJECT);
}
@Test
public void testDeleteProject() {
localDataStore.insertOrUpdateProject(TEST_PROJECT).blockingAwait();
localDataStore.deleteProject(TEST_PROJECT).test().assertComplete();
localDataStore.getProjects().test().assertValue(AbstractCollection::isEmpty);
}
@Test
public void testRemovedLayerFromProject() {
Layer layer1 =
Layer.newBuilder()
.setId("layer 1")
.setName("layer 1 name")
.setDefaultStyle(Style.builder().setColor("000").build())
.build();
Layer layer2 =
Layer.newBuilder()
.setId("layer 2")
.setName("layer 2 name")
.setDefaultStyle(Style.builder().setColor("000").build())
.build();
Project project =
Project.newBuilder()
.setId("foo id")
.setTitle("foo project")
.setDescription("foo project description")
.putLayer(layer1.getId(), layer1)
.build();
localDataStore.insertOrUpdateProject(project).blockingAwait();
project =
Project.newBuilder()
.setId("foo id")
.setTitle("foo project")
.setDescription("foo project description")
.putLayer(layer2.getId(), layer2)
.build();
localDataStore.insertOrUpdateProject(project).blockingAwait();
localDataStore
.getProjectById("foo id")
.test()
.assertValue(result -> result.getLayers().equals(ImmutableList.of(layer2)));
}
@Test
public void testInsertAndGetUser() {
localDataStore.insertOrUpdateUser(TEST_USER).test().assertComplete();
localDataStore.getUser("user id").test().assertValue(TEST_USER);
}
@Test
public void testApplyAndEnqueue_featureMutation() {
localDataStore.insertOrUpdateUser(TEST_USER).blockingAwait();
localDataStore.insertOrUpdateProject(TEST_PROJECT).blockingAwait();
localDataStore.applyAndEnqueue(TEST_FEATURE_MUTATION).test().assertComplete();
// assert that mutation is saved to local database
localDataStore
.getPendingMutations("feature id")
.test()
.assertValue(ImmutableList.of(TEST_FEATURE_MUTATION));
localDataStore
.getFeature(TEST_PROJECT, "feature id")
.test()
.assertValue(feature -> ((PointFeature) feature).getPoint().equals(TEST_POINT));
}
@Test
public void testApplyAndEnqueue_polygonFeatureMutation() {
localDataStore.insertOrUpdateUser(TEST_USER).blockingAwait();
localDataStore.insertOrUpdateProject(TEST_PROJECT).blockingAwait();
localDataStore.applyAndEnqueue(TEST_POLYGON_FEATURE_MUTATION).test().assertComplete();
// assert that mutation is saved to local database
localDataStore
.getPendingMutations("feature id")
.test()
.assertValue(ImmutableList.of(TEST_POLYGON_FEATURE_MUTATION));
localDataStore
.getFeature(TEST_PROJECT, "feature id")
.test()
.assertValue(feature -> ((PolygonFeature) feature).getVertices().equals(TEST_POLYGON_1));
}
@Test
public void testGetFeaturesOnceAndStream() {
localDataStore.insertOrUpdateUser(TEST_USER).blockingAwait();
localDataStore.insertOrUpdateProject(TEST_PROJECT).blockingAwait();
TestSubscriber<ImmutableSet<Feature>> subscriber =
localDataStore.getFeaturesOnceAndStream(TEST_PROJECT).test();
subscriber.assertValue(ImmutableSet.of());
localDataStore.applyAndEnqueue(TEST_FEATURE_MUTATION).blockingAwait();
PointFeature feature =
(PointFeature) localDataStore.getFeature(TEST_PROJECT, "feature id").blockingGet();
subscriber.assertValueSet(ImmutableSet.of(ImmutableSet.of(), ImmutableSet.of(feature)));
}
@Test
public void testUpdateMutations() {
localDataStore.insertOrUpdateUser(TEST_USER).blockingAwait();
localDataStore.insertOrUpdateProject(TEST_PROJECT).blockingAwait();
localDataStore.applyAndEnqueue(TEST_FEATURE_MUTATION).blockingAwait();
FeatureMutation mutation = createTestFeatureMutation(TEST_POINT_2);
localDataStore.updateMutations(ImmutableList.of(mutation)).test().assertComplete();
localDataStore
.getPendingMutations(TEST_FEATURE_MUTATION.getFeatureId())
.test()
.assertValue(ImmutableList.of(mutation));
}
@Test
public void testPolygonUpdateMutations() {
localDataStore.insertOrUpdateUser(TEST_USER).blockingAwait();
localDataStore.insertOrUpdateProject(TEST_PROJECT).blockingAwait();
localDataStore.applyAndEnqueue(TEST_POLYGON_FEATURE_MUTATION).blockingAwait();
FeatureMutation mutation = createTestPolygonFeatureMutation(TEST_POLYGON_2);
localDataStore.updateMutations(ImmutableList.of(mutation)).test().assertComplete();
localDataStore
.getPendingMutations(TEST_POLYGON_FEATURE_MUTATION.getFeatureId())
.test()
.assertValue(ImmutableList.of(mutation));
}
@Test
public void testFinalizePendingMutation() {
localDataStore.insertOrUpdateUser(TEST_USER).blockingAwait();
localDataStore.insertOrUpdateProject(TEST_PROJECT).blockingAwait();
localDataStore.applyAndEnqueue(TEST_FEATURE_MUTATION).blockingAwait();
localDataStore
.finalizePendingMutations(ImmutableList.of(TEST_FEATURE_MUTATION))
.test()
.assertComplete();
localDataStore
.getPendingMutations("feature id")
.test()
.assertValue(AbstractCollection::isEmpty);
}
@Test
public void testMergeFeature() {
localDataStore.insertOrUpdateUser(TEST_USER).blockingAwait();
localDataStore.insertOrUpdateProject(TEST_PROJECT).blockingAwait();
localDataStore.applyAndEnqueue(TEST_FEATURE_MUTATION).blockingAwait();
PointFeature feature =
(PointFeature) localDataStore.getFeature(TEST_PROJECT, "feature id").blockingGet();
feature = feature.toBuilder().setPoint(TEST_POINT_2).build();
localDataStore.mergeFeature(feature).test().assertComplete();
localDataStore
.getFeature(TEST_PROJECT, "feature id")
.test()
.assertValue(newFeature -> ((PointFeature) newFeature).getPoint().equals(TEST_POINT_2));
}
@Test
public void testMergePolygonFeature() {
localDataStore.insertOrUpdateUser(TEST_USER).blockingAwait();
localDataStore.insertOrUpdateProject(TEST_PROJECT).blockingAwait();
localDataStore.applyAndEnqueue(TEST_POLYGON_FEATURE_MUTATION).blockingAwait();
PolygonFeature feature =
(PolygonFeature) localDataStore.getFeature(TEST_PROJECT, "feature id").blockingGet();
feature = feature.toBuilder().setVertices(TEST_POLYGON_2).build();
localDataStore.mergeFeature(feature).test().assertComplete();
localDataStore
.getFeature(TEST_PROJECT, "feature id")
.test()
.assertValue(
newFeature -> ((PolygonFeature) newFeature).getVertices().equals(TEST_POLYGON_2));
}
@Test
public void testApplyAndEnqueue_observationMutation() {
localDataStore.insertOrUpdateUser(TEST_USER).blockingAwait();
localDataStore.insertOrUpdateProject(TEST_PROJECT).blockingAwait();
localDataStore.applyAndEnqueue(TEST_FEATURE_MUTATION).blockingAwait();
localDataStore.applyAndEnqueue(TEST_OBSERVATION_MUTATION).test().assertComplete();
localDataStore
.getPendingMutations("feature id")
.test()
.assertValue(ImmutableList.of(TEST_FEATURE_MUTATION, TEST_OBSERVATION_MUTATION));
PointFeature feature =
(PointFeature) localDataStore.getFeature(TEST_PROJECT, "feature id").blockingGet();
Observation observation =
localDataStore.getObservation(feature, "observation id").blockingGet();
assertEquivalent(TEST_OBSERVATION_MUTATION, observation);
// now update the inserted observation with new responses
ImmutableList<ResponseDelta> deltas =
ImmutableList.of(
ResponseDelta.builder()
.setFieldId("field id")
.setFieldType(Field.Type.TEXT_FIELD)
.setNewResponse(TextResponse.fromString("value for the really new field"))
.build());
ObservationMutation mutation =
TEST_OBSERVATION_MUTATION.toBuilder()
.setId(2L)
.setResponseDeltas(deltas)
.setType(Mutation.Type.UPDATE)
.build();
localDataStore.applyAndEnqueue(mutation).test().assertComplete();
localDataStore
.getPendingMutations("feature id")
.test()
.assertValue(ImmutableList.of(TEST_FEATURE_MUTATION, TEST_OBSERVATION_MUTATION, mutation));
// check if the observation was updated in the local database
observation = localDataStore.getObservation(feature, "observation id").blockingGet();
assertEquivalent(mutation, observation);
// also test that getObservations returns the same observation as well
ImmutableList<Observation> observations =
localDataStore.getObservations(feature, "form id").blockingGet();
assertThat(observations).hasSize(1);
assertEquivalent(mutation, observations.get(0));
}
@Test
public void testMergeObservation() {
localDataStore.insertOrUpdateUser(TEST_USER).blockingAwait();
localDataStore.insertOrUpdateProject(TEST_PROJECT).blockingAwait();
localDataStore.applyAndEnqueue(TEST_FEATURE_MUTATION).blockingAwait();
localDataStore.applyAndEnqueue(TEST_OBSERVATION_MUTATION).blockingAwait();
PointFeature feature =
(PointFeature) localDataStore.getFeature(TEST_PROJECT, "feature id").blockingGet();
ResponseMap responseMap =
ResponseMap.builder()
.putResponse("field id", TextResponse.fromString("foo value").get())
.build();
Observation observation =
localDataStore.getObservation(feature, "observation id").blockingGet().toBuilder()
.setResponses(responseMap)
.build();
localDataStore.mergeObservation(observation).test().assertComplete();
ResponseMap responses =
localDataStore
.getObservation(feature, observation.getId())
.test()
.values()
.get(0)
.getResponses();
assertThat("updated response").isEqualTo(responses.getResponse("field id").get().toString());
}
@Test
public void testDeleteObservation() {
// Add test observation
localDataStore.insertOrUpdateUser(TEST_USER).blockingAwait();
localDataStore.insertOrUpdateProject(TEST_PROJECT).blockingAwait();
localDataStore.applyAndEnqueue(TEST_FEATURE_MUTATION).blockingAwait();
localDataStore.applyAndEnqueue(TEST_OBSERVATION_MUTATION).blockingAwait();
ObservationMutation mutation =
TEST_OBSERVATION_MUTATION.toBuilder().setId(null).setType(Mutation.Type.DELETE).build();
// Calling applyAndEnqueue marks the local observation as deleted.
localDataStore.applyAndEnqueue(mutation).blockingAwait();
// Verify that local entity exists and its state is updated.
observationDao
.findById("observation id")
.test()
.assertValue(observationEntity -> observationEntity.getState() == EntityState.DELETED);
// Verify that the local observation doesn't end up in getObservations().
PointFeature feature =
(PointFeature) localDataStore.getFeature(TEST_PROJECT, "feature id").blockingGet();
localDataStore.getObservations(feature, "form id").test().assertValue(ImmutableList.of());
// After successful remote sync, delete observation is called by LocalMutationSyncWorker.
localDataStore.deleteObservation("observation id").blockingAwait();
// Verify that the observation doesn't exist anymore
localDataStore.getObservation(feature, "observation id").test().assertNoValues();
}
@Test
public void testDeleteFeature() {
localDataStore.insertOrUpdateUser(TEST_USER).blockingAwait();
localDataStore.insertOrUpdateProject(TEST_PROJECT).blockingAwait();
localDataStore.applyAndEnqueue(TEST_FEATURE_MUTATION).blockingAwait();
localDataStore.applyAndEnqueue(TEST_OBSERVATION_MUTATION).blockingAwait();
TestSubscriber<ImmutableSet<Feature>> subscriber =
localDataStore.getFeaturesOnceAndStream(TEST_PROJECT).test();
// Assert that one feature is streamed.
PointFeature feature =
(PointFeature) localDataStore.getFeature(TEST_PROJECT, "feature id").blockingGet();
subscriber.assertValueAt(0, ImmutableSet.of(feature));
FeatureMutation mutation =
TEST_FEATURE_MUTATION.toBuilder().setId(null).setType(Mutation.Type.DELETE).build();
// Calling applyAndEnqueue marks the local feature as deleted.
localDataStore.applyAndEnqueue(mutation).blockingAwait();
// Verify that local entity exists but its state is updated to DELETED.
featureDao
.findById("feature id")
.test()
.assertValue(featureEntity -> featureEntity.getState() == EntityState.DELETED);
// Verify that the local feature is now removed from the latest feature stream.
subscriber.assertValueAt(1, ImmutableSet.of());
// After successful remote sync, delete feature is called by LocalMutationSyncWorker.
localDataStore.deleteFeature("feature id").blockingAwait();
// Verify that the feature doesn't exist anymore
localDataStore.getFeature(TEST_PROJECT, "feature id").test().assertNoValues();
// Verify that the linked observation is also deleted.
localDataStore.getObservation(feature, "observation id").test().assertNoValues();
}
@Test
public void testInsertTile() {
localDataStore.insertOrUpdateTileSet(TEST_PENDING_TILE_SOURCE).test().assertComplete();
}
@Test
public void testGetTile() {
localDataStore.insertOrUpdateTileSet(TEST_PENDING_TILE_SOURCE).blockingAwait();
localDataStore
.getTileSet("some_url 1")
.test()
.assertValueCount(1)
.assertValue(TEST_PENDING_TILE_SOURCE);
}
@Test
public void testGetTilesOnceAndStream() {
TestSubscriber<ImmutableSet<TileSet>> subscriber =
localDataStore.getTileSetsOnceAndStream().test();
subscriber.assertValue(ImmutableSet.of());
localDataStore.insertOrUpdateTileSet(TEST_DOWNLOADED_TILE_SOURCE).blockingAwait();
localDataStore.insertOrUpdateTileSet(TEST_PENDING_TILE_SOURCE).blockingAwait();
subscriber.assertValueSet(
ImmutableSet.of(
ImmutableSet.of(),
ImmutableSet.of(TEST_DOWNLOADED_TILE_SOURCE),
ImmutableSet.of(TEST_DOWNLOADED_TILE_SOURCE, TEST_PENDING_TILE_SOURCE)));
}
@Test
public void testGetPendingTile() {
localDataStore.insertOrUpdateTileSet(TEST_DOWNLOADED_TILE_SOURCE).blockingAwait();
localDataStore.insertOrUpdateTileSet(TEST_FAILED_TILE_SOURCE).blockingAwait();
localDataStore.insertOrUpdateTileSet(TEST_PENDING_TILE_SOURCE).blockingAwait();
localDataStore
.getPendingTileSets()
.test()
.assertValue(ImmutableList.of(TEST_PENDING_TILE_SOURCE));
}
@Test
public void testInsertOfflineArea() {
localDataStore.insertOrUpdateOfflineArea(TEST_OFFLINE_AREA).test().assertComplete();
}
@Test
public void testGetOfflineAreas() {
localDataStore.insertOrUpdateOfflineArea(TEST_OFFLINE_AREA).blockingAwait();
localDataStore
.getOfflineAreasOnceAndStream()
.test()
.assertValue(ImmutableList.of(TEST_OFFLINE_AREA));
}
@Test
public void testParseVertices_emptyString() {
assertThat(FeatureEntity.parseVertices("")).isEqualTo(ImmutableList.of());
}
@Test
public void testFormatVertices_emptyList() {
assertThat(FeatureEntity.formatVertices(ImmutableList.of())).isNull();
}
@Test
public void testTermsOfServiceAccepted() {
localValueStore.setTermsOfServiceAccepted(true);
assertThat(localValueStore.isTermsOfServiceAccepted()).isTrue();
}
@Test
public void testTermsOfServiceNotAccepted() {
assertThat(localValueStore.isTermsOfServiceAccepted()).isFalse();
}
}
| |
/**
* Copyright (c) Microsoft Corporation
* <p/>
* All rights reserved.
* <p/>
* MIT License
* <p/>
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
* to permit persons to whom the Software is furnished to do so, subject to the following conditions:
* <p/>
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of
* the Software.
* <p/>
* THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
* THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.microsoft.intellij;
import com.intellij.ide.plugins.cl.PluginClassLoader;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.components.AbstractProjectComponent;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.module.ModuleTypeId;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import com.intellij.util.PlatformUtils;
import com.interopbridges.tools.windowsazure.WindowsAzureProjectManager;
import com.microsoft.applicationinsights.preference.ApplicationInsightsResource;
import com.microsoft.applicationinsights.preference.ApplicationInsightsResourceRegistry;
import com.microsoft.intellij.ui.libraries.AILibraryHandler;
import com.microsoft.intellij.ui.libraries.AzureLibrary;
import com.microsoft.intellij.ui.messages.AzureBundle;
import com.microsoft.intellij.util.AppInsightsCustomEvent;
import com.microsoft.intellij.util.PluginUtil;
import com.microsoft.intellij.util.WAHelper;
import com.microsoftopentechnologies.azurecommons.util.WAEclipseHelperMethods;
import com.microsoftopentechnologies.azurecommons.xmlhandling.DataOperations;
import com.microsoftopentechnologies.azurecommons.deploy.DeploymentEventArgs;
import com.microsoftopentechnologies.azurecommons.deploy.DeploymentEventListener;
import com.microsoftopentechnologies.azurecommons.wacommonutil.FileUtil;
import com.microsoftopentechnologies.azurecommons.xmlhandling.ParseXMLUtilMethods;
import com.microsoftopentechnologies.windowsazure.tools.cspack.Utils;
import javax.swing.event.EventListenerList;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.w3c.dom.Document;
import static com.microsoft.intellij.ui.messages.AzureBundle.message;
public class AzurePlugin extends AbstractProjectComponent {
private static final Logger LOG = Logger.getInstance("#com.microsoft.intellij.AzurePlugin");
public static final String PLUGIN_ID = "azure-toolkit-for-intellij";
public static final String COMMON_LIB_PLUGIN_ID = "azure-services-explorer-plugin";
public static final String COMPONENTSETS_VERSION = "2.8.0.1"; // todo: temporary fix!
public static final String PLUGIN_VERSION = "1.1";
private static final String PREFERENCESETS_VERSION = "2.8.0";
public static final String AZURE_LIBRARIES_VERSION = "0.9.0";
public static final String QPID_LIBRARIES_VERSION = "0.19.0";
public final static int REST_SERVICE_MAX_RETRY_COUNT = 7;
public static boolean IS_WINDOWS = System.getProperty("os.name").toLowerCase().indexOf("win") >= 0;
public static boolean IS_ANDROID_STUDIO = "AndroidStudio".equals(PlatformUtils.getPlatformPrefix());
private static final String COMPONENTSETS_TYPE = "COMPONENTSETS";
private static final String PREFERENCESETS_TYPE = "PREFERENCESETS";
public static File cmpntFile = new File(WAHelper.getTemplateFile(message("cmpntFileName")));
public static String prefFilePath = WAHelper.getTemplateFile(message("prefFileName"));
public static String pluginFolder = String.format("%s%s%s", PathManager.getPluginsPath(), File.separator, AzurePlugin.PLUGIN_ID);
private static final EventListenerList DEPLOYMENT_EVENT_LISTENERS = new EventListenerList();
public static List<DeploymentEventListener> depEveList = new ArrayList<DeploymentEventListener>();
String dataFile = WAHelper.getTemplateFile(message("dataFileName"));
private final AzureSettings azureSettings;
Project project;
public AzurePlugin(Project project) {
super(project);
this.project = project;
this.azureSettings = AzureSettings.getSafeInstance(project);
}
public void projectOpened() {
initializeAIRegistry();
}
public void projectClosed() {
}
/**
* Method is called after plugin is already created and configured. Plugin can start to communicate with
* other plugins only in this method.
*/
public void initComponent() {
if (!IS_ANDROID_STUDIO) {
LOG.info("Starting Azure Plugin");
try {
azureSettings.loadStorage();
//this code is for copying componentset.xml in plugins folder
copyPluginComponents();
initializeTelemetry();
clearTempDirectory();
} catch (Exception e) {
/* This is not a user initiated task
So user should not get any exception prompt.*/
LOG.error(AzureBundle.message("expErlStrtUp"), e);
}
}
}
private void initializeTelemetry() throws Exception {
if (new File(dataFile).exists()) {
String version = DataOperations.getProperty(dataFile, message("pluginVersion"));
if (version == null || version.isEmpty()) {
// proceed with setValues method as no version specified
setValues(dataFile);
} else {
String curVersion = PLUGIN_VERSION;
// compare version
if (curVersion.equalsIgnoreCase(version)) {
// Case of normal IntelliJ restart
// check preference-value & installation-id exists or not else copy values
String prefValue = DataOperations.getProperty(dataFile, message("prefVal"));
String instID = DataOperations.getProperty(dataFile, message("instID"));
if (prefValue == null || prefValue.isEmpty()) {
setValues(dataFile);
} else if (instID == null || instID.isEmpty()) {
Document doc = ParseXMLUtilMethods.parseFile(dataFile);
DateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmss");
DataOperations.updatePropertyValue(doc, message("instID"), dateFormat.format(new Date()));
ParseXMLUtilMethods.saveXMLDocument(dataFile, doc);
}
} else {
// proceed with setValues method. Case of new plugin installation
setValues(dataFile);
}
}
} else {
// copy file and proceed with setValues method
copyResourceFile(message("dataFileName"), dataFile);
setValues(dataFile);
}
}
private void initializeAIRegistry() {
try {
AzureSettings.getSafeInstance(project).loadAppInsights();
Module[] modules = ModuleManager.getInstance(project).getModules();
for (Module module : modules) {
if (module != null && module.isLoaded() && ModuleTypeId.JAVA_MODULE.equals(module.getOptionValue(Module.ELEMENT_TYPE))) {
String aiXMLPath = String.format("%s%s%s", PluginUtil.getModulePath(module), File.separator, message("aiXMLPath"));
if (new File(aiXMLPath).exists()) {
AILibraryHandler handler = new AILibraryHandler();
handler.parseAIConfXmlPath(aiXMLPath);
String key = handler.getAIInstrumentationKey();
if (key != null && !key.isEmpty()) {
String unknown = message("unknown");
List<ApplicationInsightsResource> list =
ApplicationInsightsResourceRegistry.getAppInsightsResrcList();
ApplicationInsightsResource resourceToAdd = new ApplicationInsightsResource(
key, key, unknown, unknown, unknown, unknown, false);
if (!list.contains(resourceToAdd)) {
ApplicationInsightsResourceRegistry.getAppInsightsResrcList().add(resourceToAdd);
}
}
}
}
}
AzureSettings.getSafeInstance(project).saveAppInsights();
} catch (Exception ex) {
AzurePlugin.log(ex.getMessage(), ex);
}
}
private void setValues(final String dataFile) throws Exception {
final Document doc = ParseXMLUtilMethods.parseFile(dataFile);
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
boolean accepted = Messages.showYesNoDialog(message("preferenceQueMsg"), message("preferenceQueTtl"), null) == Messages.YES;
DataOperations.updatePropertyValue(doc, message("prefVal"), String.valueOf(accepted));
DataOperations.updatePropertyValue(doc, message("pluginVersion"), PLUGIN_VERSION);
DateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmss");
DataOperations.updatePropertyValue(doc, message("instID"), dateFormat.format(new Date()));
try {
ParseXMLUtilMethods.saveXMLDocument(dataFile, doc);
} catch (Exception ex) {
LOG.error(message("error"), ex);
}
if (accepted) {
AppInsightsCustomEvent.create(message("telAgrEvtName"), "");
}
}
}, ModalityState.defaultModalityState());
}
/**
* Delete %proj% directory from temporary folder during IntelliJ start
* To fix #2943 : Hang invoking a new Azure project,
* PML does not delete .cspack.jar everytime new azure project is created.
* Hence its necessary to delete %proj% directory when plugin with newer version is installed.
* @throws Exception
*/
private void clearTempDirectory() throws Exception {
String tmpPath = System.getProperty("java.io.tmpdir");
String projPath = String.format("%s%s%s", tmpPath, File.separator, "%proj%");
File projFile = new File(projPath);
if (projFile != null) {
WAEclipseHelperMethods.deleteDirectory(projFile);
}
}
private void telemetryAI() {
ModuleManager.getInstance(project).getModules();
}
public String getComponentName() {
return "MSOpenTechTools.AzurePlugin";
}
/**
* Copies MS Open Tech Tools for Azure
* related files in azure-toolkit-for-intellij plugin folder at startup.
*/
private void copyPluginComponents() {
try {
String pluginInstLoc = String.format("%s%s%s", PathManager.getPluginsPath(), File.separator, PLUGIN_ID);
String cmpntFile = String.format("%s%s%s", pluginInstLoc,
File.separator, AzureBundle.message("cmpntFileName"));
String starterKit = String.format("%s%s%s", pluginInstLoc,
File.separator, AzureBundle.message("starterKitFileName"));
String enctFile = String.format("%s%s%s", pluginInstLoc,
File.separator, message("encFileName"));
String prefFile = String.format("%s%s%s", pluginInstLoc,
File.separator, AzureBundle.message("prefFileName"));
// upgrade component sets and preference sets
upgradePluginComponent(cmpntFile, AzureBundle.message("cmpntFileEntry"), AzureBundle.message("oldCmpntFileEntry"), COMPONENTSETS_TYPE);
upgradePluginComponent(prefFile, AzureBundle.message("prefFileEntry"), AzureBundle.message("oldPrefFileEntry"), PREFERENCESETS_TYPE);
// Check for WAStarterKitForJava.zip
if (new File(starterKit).exists()) {
new File(starterKit).delete();
}
// Check for encutil.exe
if (new File(enctFile).exists()) {
new File(enctFile).delete();
}
copyResourceFile(message("starterKitEntry"), starterKit);
copyResourceFile(message("encFileName"), enctFile);
for (AzureLibrary azureLibrary : AzureLibrary.LIBRARIES) {
if (!new File(pluginInstLoc + File.separator + azureLibrary.getLocation()).exists()) {
for (String entryName : Utils.getJarEntries(pluginInstLoc + File.separator + "lib" + File.separator + PLUGIN_ID + ".jar", azureLibrary.getLocation())) {
new File(pluginInstLoc + File.separator + entryName).getParentFile().mkdirs();
copyResourceFile(entryName, pluginInstLoc + File.separator + entryName);
}
}
}
} catch (Exception e) {
LOG.error(e.getMessage(), e);
}
}
/**
* Checks for pluginComponent file.
* If exists checks its version.
* If it has latest version then no upgrade action is needed,
* else checks with older componentsets.xml,
* if identical then deletes existing and copies new one
* else renames existing and copies new one.
*
* @param pluginComponentPath
* @param resource
* @param componentType
* @throws Exception
*/
private void upgradePluginComponent(String pluginComponentPath, String resource,
String oldResource, String componentType) throws Exception {
File pluginComponentFile = new File(pluginComponentPath);
if (pluginComponentFile.exists()) {
String pluginComponentVersion = null;
String resourceFileVersion = null;
// File resourceFile = new File(((PluginClassLoader)AzurePlugin.class.getClassLoader()).findResource(resource).toURI());
try {
if (COMPONENTSETS_TYPE.equals(componentType)) {
pluginComponentVersion = WindowsAzureProjectManager.getComponentSetsVersion(pluginComponentFile);
resourceFileVersion = COMPONENTSETS_VERSION; //WindowsAzureProjectManager.getComponentSetsVersion(resourceFile);
} else {
pluginComponentVersion = WindowsAzureProjectManager.getPreferenceSetsVersion(pluginComponentFile);
resourceFileVersion = PREFERENCESETS_VERSION; //WindowsAzureProjectManager.getPreferenceSetsVersion(resourceFile);
}
} catch (Exception e) {
LOG.error("Error occured while getting version of plugin component " + componentType + ", considering version as null");
}
if ((pluginComponentVersion != null
&& !pluginComponentVersion.isEmpty())
&& pluginComponentVersion.equals(resourceFileVersion)) {
// Do not do anything
} else {
// Check with old plugin component for upgrade scenarios
URL oldPluginComponentUrl = ((PluginClassLoader) AzurePlugin.class.getClassLoader()).findResource(oldResource);
// InputStream oldPluginComponentIs = AzurePlugin.class.getResourceAsStream(oldResourceFile);
boolean isIdenticalWithOld = WAHelper.isFilesIdentical(oldPluginComponentUrl, pluginComponentFile);
if (isIdenticalWithOld) {
// Delete old one
pluginComponentFile.delete();
} else {
// Rename old one
DateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmss");
Date date = new Date();
WAHelper.copyFile(pluginComponentPath, pluginComponentPath + ".old" + dateFormat.format(date));
}
copyResourceFile(resource, pluginComponentPath);
}
} else {
copyResourceFile(resource, pluginComponentPath);
}
}
/**
* Method copies specified file from plugin resources
*
* @param resourceFile
* @param destFile
*/
public static void copyResourceFile(String resourceFile, String destFile) {
try {
InputStream is = ((PluginClassLoader) AzurePlugin.class.getClassLoader()).findResource(resourceFile).openStream();
File outputFile = new File(destFile);
FileOutputStream fos = new FileOutputStream(outputFile);
FileUtil.writeFile(is, fos);
} catch (IOException e) {
LOG.error(e.getMessage(), e);
}
}
public static void fireDeploymentEvent(DeploymentEventArgs args) {
Object[] list = DEPLOYMENT_EVENT_LISTENERS.getListenerList();
for (int i = 0; i < list.length; i += 2) {
if (list[i] == DeploymentEventListener.class) {
((DeploymentEventListener) list[i + 1]).onDeploymentStep(args);
}
}
}
public static void addDeploymentEventListener(DeploymentEventListener listener) {
DEPLOYMENT_EVENT_LISTENERS.add(DeploymentEventListener.class, listener);
}
public static void removeDeploymentEventListener(DeploymentEventListener listener) {
DEPLOYMENT_EVENT_LISTENERS.remove(DeploymentEventListener.class, listener);
}
// todo: move field somewhere?
public static void removeUnNecessaryListener() {
for (int i = 0; i < depEveList.size(); i++) {
removeDeploymentEventListener(depEveList.get(i));
}
depEveList.clear();
}
public static void log(String message, Exception ex) {
LOG.error(message, ex);
LOG.info(message);
}
public static void log(String message) {
LOG.info(message);
}
}
| |
/*
* Copyright 2013-2014 Parisoft Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.parisoft.resty.request;
import static com.github.parisoft.resty.utils.ArrayUtils.isEmpty;
import static com.github.parisoft.resty.utils.StringUtils.emptyIfNull;
import static com.github.parisoft.resty.utils.StringUtils.splitAfterSlashes;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.apache.http.HttpHeaders.ACCEPT;
import static org.apache.http.HttpHeaders.CONTENT_TYPE;
import java.io.IOException;
import java.net.HttpCookie;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import javax.ws.rs.core.Cookie;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedHashMap;
import javax.ws.rs.core.MultivaluedMap;
import org.apache.http.HttpEntity;
import org.apache.http.HttpHeaders;
import org.apache.http.NameValuePair;
import org.apache.http.client.HttpClient;
import org.apache.http.client.utils.URLEncodedUtils;
import org.apache.http.entity.ContentType;
import org.apache.http.message.BasicNameValuePair;
import com.github.parisoft.resty.RESTy;
import com.github.parisoft.resty.client.Client;
import com.github.parisoft.resty.utils.CookieUtils;
/**
* Class that contains methods to configure, create and execute an HTTP request.
*
* @author Andre Paris
*
*/
public class Request {
private static final String NULL_VALUE = null;
private MultivaluedMap<String, String> headers = new MultivaluedHashMap<>();
private List<NameValuePair> queries = new ArrayList<>();
private List<String> paths = new ArrayList<>();
private Object entity;
URI rootUri;
/**
* Creates a request from a URI string.<br>
* The URI must conform the <a href=https://www.ietf.org/rfc/rfc2396.txt>RFC 2396</a> with the <i>path</i> and <i>query</i> properly encoded.<br>
* <br>
* For your convenience, call this constructor with the base address of your request - like {@literal <scheme>://<authority>} -
* and use {@link #path(String...)} and {@link #query(String, String)} to configure the URI path and query respectively without worring about escape.<br>
* <br>
* As example, you can do<br>
* <pre>
* Request request = new Request("http://some.domain.org:1234")
* .path("any unescaped path")
* .query("don't", "scape too");
* </pre>
* or
* <pre>
* Request request = new Request(http://some.domain.org:1234/any%20unescaped%20path?don%27t=scape+too);
* </pre>
* <i>Note:</i> this is equivalent to {@link RESTy#request(String)}
*
* @param uri The string to be parsed into a URI
* @throws IllegalArgumentException If the URI string is null or violates RFC 2396
*/
public Request(String uri) {
this(stringToUri(uri));
}
/**
* Creates a request from a {@link URI}.<br>
* The URI must conform the <a href=https://www.ietf.org/rfc/rfc2396.txt>RFC 2396</a> with the <i>path</i> and <i>query</i> properly encoded.<br>
* <br>
* For your convenience, call this constructor with the base address of your request - like {@literal <scheme>://<authority>} -
* and use {@link #path(String...)} and {@link #query(String, String)} to configure the URI path and query respectively without worring about escape.<br>
* <br>
* As example, you can do<br>
* <pre>
* URI partialUri = new URI("http://some.domain.org:1234");
* Request request = new Request(partialUri)
* .path("any unescaped path")
* .query("don't", "scape too");
* </pre>
* or
* <pre>
* URI fullUri = new URI("http://some.domain.org:1234/any%20unescaped%20path?don%27t=scape+too");
* Request request = new Request(fullUri);
* </pre>
* <i>Note:</i> this is equivalent to {@link RESTy#request(URI)}
*
* @param uri A {@link URI} as defined by <a href=https://www.ietf.org/rfc/rfc2396.txt>RFC 2396</a>
* @throws IllegalArgumentException If the URI is null
*/
public Request(URI uri) {
if (uri == null) {
throw new IllegalArgumentException("Cannot create a request: URI cannot be null");
}
path(emptyIfNull(uri.getPath()));
try {
query(URLEncodedUtils.parse(uri, UTF_8.name()));
rootUri = new URI(uri.getScheme(), uri.getUserInfo(), uri.getHost(), uri.getPort(), null, null, uri.getFragment());
} catch (Exception e) {
throw new IllegalArgumentException("Cannot create a request: " + e.getMessage());
}
}
private static URI stringToUri(String string) {
if (string == null) {
throw new IllegalArgumentException("Cannot create a request: URI cannot be null");
}
return URI.create(string);
}
/**
* @return The headers configured on this request
*/
public MultivaluedMap<String, String> headers() {
return headers;
}
/**
* @return The paths configured on this request
*/
public List<String> paths() {
if (!this.paths.isEmpty()) {
final List<String> normalized = new ArrayList<String>();
for (int i = 0; i < this.paths.size() - 1; i++) {
if (!this.paths.get(i).isEmpty()) {
normalized.add(this.paths.get(i));
}
}
normalized.add(this.paths.get(this.paths.size() - 1));
this.paths.clear();
this.paths.addAll(normalized);
}
return paths;
}
/**
* @return The "name=value" pairs of the query configured on this request
*/
public List<NameValuePair> queries() {
return queries;
}
/**
* @return The entity body object configured on this request
*/
public Object entity() {
return entity;
}
/**
* Sets the <code><a href=http://tools.ietf.org/html/rfc2616#section-14.1>Accept</a></code> header values.<br>
* <br>
* <i>Note:</i> this is equivalent to {@code header("Accept", String...)}
*
* @param contentTypes The {@link ContentType}(s) expected as response, or <code>null</code> to remove the Accept header
* @return this request
*/
public Request accept(ContentType... contentTypes) {
if (isEmpty(contentTypes)) {
return header(ACCEPT, NULL_VALUE);
}
for (ContentType contentType : contentTypes) {
header(ACCEPT, contentType.toString());
}
return this;
}
/**
* Sets the <a href=http://tools.ietf.org/html/rfc2616#section-14.1>Accept</a> header values.<br>
* <br>
* <i>Note:</i> this is equivalent to {@code header("Accept", String...)}
*
* @param mediaTypes The {@link MediaType}(s) expected as response, or <code>null</code> to remove the Accept header
* @return this request
*/
public Request accept(MediaType... mediaTypes) {
if (isEmpty(mediaTypes)) {
return header(ACCEPT, NULL_VALUE);
}
for (MediaType mediaType : mediaTypes) {
header(ACCEPT, mediaType.toString());
}
return this;
}
/**
* Sets the <a href=http://tools.ietf.org/html/rfc2616#section-14.17>Content-Type</a> header value.<br>
* <br>
* <i>Note:</i> this is equivalent to {@code header("Content-Type", String)}
*
* @param contentType The {@link ContentType} of the request entity, or <code>null</code> to remove the Content-Type header
* @return this request
*/
public Request type(ContentType contentType) {
if (contentType == null) {
return header(CONTENT_TYPE, NULL_VALUE);
}
return header(CONTENT_TYPE, contentType.toString());
}
/**
* Sets the <a href=http://tools.ietf.org/html/rfc2616#section-14.17>Content-Type</a> header value.<br>
* <br>
* <i>Note:</i> this is equivalent to {@code header("Content-Type", String)}
*
* @param mediaType The {@link MediaType} of the request entity, or <code>null</code> to remove the Content-Type header
* @return this request
*/
public Request type(MediaType mediaType) {
if (mediaType == null) {
return header(CONTENT_TYPE, NULL_VALUE);
}
return header(CONTENT_TYPE, mediaType.toString());
}
/**
* Sets the <a href=http://tools.ietf.org/html/rfc2109>Cookie</a> header values.<br>
* <br>
* <i>Note:</i> this is equivalent to {@code header("Cookie", String...)}
* @param cookies The {@link HttpCookie}(s) to send over request, or <code>null</code> to remove the Cookie header
* @return this request
*/
public Request cookie(HttpCookie... cookies) {
return header("Cookie", CookieUtils.toString(cookies));
}
/**
* Sets the <a href=http://tools.ietf.org/html/rfc2109>Cookie</a> header values.<br>
* <br>
* <i>Note:</i> this is equivalent to {@code header("Cookie", String...)}
*
* @param cookies The {@link Cookie}(s) to send over request, or <code>null</code> to remove the Cookie header
* @return this request
*/
public Request cookie(Cookie... cookies) {
return header("Cookie", CookieUtils.toString(cookies));
}
/**
* Sets the <a href=http://tools.ietf.org/html/rfc2109>Cookie</a> header values.<br>
* <br>
* <i>Note:</i> this is equivalent to {@code header("Cookie", String...)}
*
* @param cookies The {@link org.apache.http.cookie.Cookie}(s) to send over request, or <code>null</code> to remove the Cookie header
* @return this request
*/
public Request cookie(org.apache.http.cookie.Cookie... cookies) {
return header("Cookie", CookieUtils.toString(cookies));
}
/**
* Sets the <a href=http://tools.ietf.org/html/rfc2109>Cookie</a> header values.<br>
* <br>
* <i>Note:</i> this is equivalent to {@code header("Cookie", String)}
*
* @param cookieAsString The cookie value to send over request, or <code>null</code> to remove the Cookie header
* @return this request
*/
public Request cookie(String cookieAsString) {
return header("Cookie", cookieAsString);
}
/**
* Sets a request Authorization header field to Basic authentication <a
* href=http://tools.ietf.org/html/rfc2617#page-6>(RFC 2617)</a>. <br>
*
* @param username A username
* @param password A password
* @return this request
*/
public Request basicAuth(String username, String password) {
return header("Authorization", "Basic " + org.apache.commons.codec.binary.Base64.encodeBase64String((username + ":" + password).getBytes()));
}
/**
* Sets a request <a href=http://tools.ietf.org/html/rfc2616#section-5.3>header</a> field.<br>
* <br>
* <i>Note:</i> this is equivalent to {@link #header(String, String...)}
*
* @param name A {@link HttpHeaders} name
* @param values One or more header values, or <code>null</code> to remove the header
* @return this request
*/
public Request header(HttpHeaders name, String... values) {
return header(name.toString(), values);
}
/**
* Sets a request <a href=http://tools.ietf.org/html/rfc2616#section-5.3>header</a> field.<br>
*
* @param name The name of the header
* @param values One or more header values, or <code>null</code> to remove the header
* @return this request
*/
public Request header(String name, String... values) {
if (isEmpty(values)) {
headers.remove(name);
return this;
}
for (String value : values) {
final List<String> valueList = headers.containsKey(name) ? headers.get(name) : new ArrayList<String>();
valueList.add(value);
headers.put(name, valueList);
}
return this;
}
/**
* Adds some data to the <a href=https://tools.ietf.org/html/rfc3986#section-3.4>query</a> request in the form of "name=value" pairs.<br>
* The final URI of the request contains a query with all pairs in the order that this method was called, encoded and separated by a single '&' character.<br>
* <br>
* <i>Note:</i> the "name=value" pairs cannot be encoded.
*
* @param nameValuePairs The "name=value" pairs to be added to the request query, may not be <code>null</code>
* @return this request
*/
public Request query(List<NameValuePair> nameValuePairs) {
queries.addAll(nameValuePairs);
return this;
}
/**
* Adds some data to the <a href=https://tools.ietf.org/html/rfc3986#section-3.4>query</a> request in the form of "name=value" pairs.<br>
* The final URI of the request contains a query with all pairs in the order that this method was called, encoded and separated by a single '&' character.
*
* @param name The name component of the query, may not be encoded
* @param value The value component of the query, may not be encoded nor <code>null</code>
* @return this request
*/
public Request query(String name, String value) {
queries.add(new BasicNameValuePair(name, value));
return this;
}
/**
* Adds some <a href=https://tools.ietf.org/html/rfc3986#section-3.3>path</a>s to the request URI.<br>
* The final URI of the request contains all paths in the order that this method was called, encoded and separated by a single slash.
*
* @param paths One or more paths to append to the request URI, may not be encoded nor <code>null</code>
* @return this request
* @throws IllegalArgumentException If the paths are <code>null</code>
*/
public Request path(String... paths) {
if (isEmpty(paths)) {
throw new IllegalArgumentException("Cannot crate a request: path cannot be null");
}
for (String path : paths) {
this.paths.addAll(splitAfterSlashes(path));
}
return this;
}
/**
* Sets the <a href=http://tools.ietf.org/html/rfc2616#page-43>entity body</a> to be sent over the request.<br>
* If the entity is not a primitive type nor an instance of {@link String} or {@link HttpEntity},
* the Content-Type header must be provided to know how to be processed.
*
* @param entity The entity body to be sent, may be <code>null</code>
* @return this request
* @see {@link Class#isPrimitive()}
*/
public Request entity(Object entity) {
this.entity = entity;
return this;
}
/**
* Creates a {@link Client} instance to execute this request.
*
* @return A client to execute this request
*/
public Client client() {
return new Client(this);
}
/**
* Returns an {@link HttpRequest} instance configured according to this request.<br>
* The instance is ready to be executed via an {@link HttpClient}.<br>
* <br>
* This method is not intend to be invoked outside the RESTy library.<br>
* For your convenience just call {@link #client()} and choose the proper method
* to execute this request in place of calling a {@link HttpClient#execute(HttpUriRequest)} with this method result.<br>
* <br>
* <i>Note:</i> this is equivalent to {@link #toHttpRequest(String)}
*
* @param method One of {@link RequestMethod}s value to be the request method.
* @return An {@link HttpRequest} instance from this request
* @throws IOException In case some problem occurs during the URI's creation or the entity's processing
* @throws IllegalArgumentException If method is <code>null</code>
*/
public HttpRequest toHttpRequest(RequestMethod method) throws IOException {
return new HttpRequest(this, methodToString(method));
}
/**
* Returns an {@link HttpRequest} instance configured according to this request.<br>
* The instance is ready to be executed via an {@link HttpClient}.<br>
* <br>
* This method is not intend to be invoked outside the RESTy library.<br>
* For your convenience just call {@link #client()} and choose the proper method
* to execute this request in place of calling a {@link HttpClient#execute(HttpUriRequest)} with this method result.<br>
*
* @param method The request method of the request like GET, POST, PUT, DELETE or other.
* @return An {@link HttpRequest} instance from this request
* @throws IOException In case some problem occurs during the URI's creation or the entity's processing
* @throws IllegalArgumentException If method is <code>null</code>
*/
public HttpRequest toHttpRequest(String method) throws IOException {
return new HttpRequest(this, methodToString(method));
}
private String methodToString(Object methodObject) {
if (methodObject == null) {
throw new IllegalArgumentException("HTTP Request Method cannot be null");
}
return methodObject.toString();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.indices;
import org.apache.lucene.store.AlreadyClosedException;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags;
import org.elasticsearch.action.admin.indices.stats.IndexShardStats;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexGraveyard;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.env.ShardLockObtainFailedException;
import org.elasticsearch.gateway.GatewayMetaState;
import org.elasticsearch.gateway.LocalAllocateDangledIndices;
import org.elasticsearch.gateway.MetaStateService;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexModule;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.shard.IllegalIndexShardStateException;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.IndexShardState;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardPath;
import org.elasticsearch.index.similarity.BM25SimilarityProvider;
import org.elasticsearch.indices.IndicesService.ShardDeletionCheckResult;
import org.elasticsearch.plugins.MapperPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.IndexSettingsModule;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.not;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class IndicesServiceTests extends ESSingleNodeTestCase {
public IndicesService getIndicesService() {
return getInstanceFromNode(IndicesService.class);
}
public NodeEnvironment getNodeEnvironment() {
return getInstanceFromNode(NodeEnvironment.class);
}
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
ArrayList<Class<? extends Plugin>> plugins = new ArrayList<>(super.getPlugins());
plugins.add(TestPlugin.class);
return plugins;
}
public static class TestPlugin extends Plugin implements MapperPlugin {
public TestPlugin() {}
@Override
public Map<String, Mapper.TypeParser> getMappers() {
return Collections.singletonMap("fake-mapper", new KeywordFieldMapper.TypeParser());
}
@Override
public void onIndexModule(IndexModule indexModule) {
super.onIndexModule(indexModule);
indexModule.addSimilarity("fake-similarity", BM25SimilarityProvider::new);
}
}
@Override
protected boolean resetNodeAfterTest() {
return true;
}
public void testCanDeleteShardContent() {
IndicesService indicesService = getIndicesService();
IndexMetaData meta = IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(
1).build();
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", meta.getSettings());
ShardId shardId = new ShardId(meta.getIndex(), 0);
assertEquals("no shard location", indicesService.canDeleteShardContent(shardId, indexSettings),
ShardDeletionCheckResult.NO_FOLDER_FOUND);
IndexService test = createIndex("test");
shardId = new ShardId(test.index(), 0);
assertTrue(test.hasShard(0));
assertEquals("shard is allocated", indicesService.canDeleteShardContent(shardId, test.getIndexSettings()),
ShardDeletionCheckResult.STILL_ALLOCATED);
test.removeShard(0, "boom");
assertEquals("shard is removed", indicesService.canDeleteShardContent(shardId, test.getIndexSettings()),
ShardDeletionCheckResult.FOLDER_FOUND_CAN_DELETE);
ShardId notAllocated = new ShardId(test.index(), 100);
assertEquals("shard that was never on this node should NOT be deletable",
indicesService.canDeleteShardContent(notAllocated, test.getIndexSettings()), ShardDeletionCheckResult.NO_FOLDER_FOUND);
}
public void testDeleteIndexStore() throws Exception {
IndicesService indicesService = getIndicesService();
IndexService test = createIndex("test");
ClusterService clusterService = getInstanceFromNode(ClusterService.class);
IndexMetaData firstMetaData = clusterService.state().metaData().index("test");
assertTrue(test.hasShard(0));
try {
indicesService.deleteIndexStore("boom", firstMetaData, clusterService.state());
fail();
} catch (IllegalStateException ex) {
// all good
}
GatewayMetaState gwMetaState = getInstanceFromNode(GatewayMetaState.class);
MetaData meta = gwMetaState.loadMetaState();
assertNotNull(meta);
assertNotNull(meta.index("test"));
assertAcked(client().admin().indices().prepareDelete("test"));
meta = gwMetaState.loadMetaState();
assertNotNull(meta);
assertNull(meta.index("test"));
test = createIndex("test");
client().prepareIndex("test", "type", "1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get();
client().admin().indices().prepareFlush("test").get();
assertHitCount(client().prepareSearch("test").get(), 1);
IndexMetaData secondMetaData = clusterService.state().metaData().index("test");
assertAcked(client().admin().indices().prepareClose("test"));
ShardPath path = ShardPath.loadShardPath(logger, getNodeEnvironment(), new ShardId(test.index(), 0), test.getIndexSettings());
assertTrue(path.exists());
try {
indicesService.deleteIndexStore("boom", secondMetaData, clusterService.state());
fail();
} catch (IllegalStateException ex) {
// all good
}
assertTrue(path.exists());
// now delete the old one and make sure we resolve against the name
try {
indicesService.deleteIndexStore("boom", firstMetaData, clusterService.state());
fail();
} catch (IllegalStateException ex) {
// all good
}
assertAcked(client().admin().indices().prepareOpen("test"));
ensureGreen("test");
}
public void testPendingTasks() throws Exception {
IndicesService indicesService = getIndicesService();
IndexService test = createIndex("test");
assertTrue(test.hasShard(0));
ShardPath path = test.getShardOrNull(0).shardPath();
assertTrue(test.getShardOrNull(0).routingEntry().started());
ShardPath shardPath = ShardPath.loadShardPath(logger, getNodeEnvironment(), new ShardId(test.index(), 0), test.getIndexSettings());
assertEquals(shardPath, path);
try {
indicesService.processPendingDeletes(test.index(), test.getIndexSettings(), new TimeValue(0, TimeUnit.MILLISECONDS));
fail("can't get lock");
} catch (ShardLockObtainFailedException ex) {
}
assertTrue(path.exists());
int numPending = 1;
if (randomBoolean()) {
indicesService.addPendingDelete(new ShardId(test.index(), 0), test.getIndexSettings());
} else {
if (randomBoolean()) {
numPending++;
indicesService.addPendingDelete(new ShardId(test.index(), 0), test.getIndexSettings());
}
indicesService.addPendingDelete(test.index(), test.getIndexSettings());
}
assertAcked(client().admin().indices().prepareClose("test"));
assertTrue(path.exists());
assertEquals(indicesService.numPendingDeletes(test.index()), numPending);
assertTrue(indicesService.hasUncompletedPendingDeletes());
// shard lock released... we can now delete
indicesService.processPendingDeletes(test.index(), test.getIndexSettings(), new TimeValue(0, TimeUnit.MILLISECONDS));
assertEquals(indicesService.numPendingDeletes(test.index()), 0);
assertFalse(indicesService.hasUncompletedPendingDeletes());
assertFalse(path.exists());
if (randomBoolean()) {
indicesService.addPendingDelete(new ShardId(test.index(), 0), test.getIndexSettings());
indicesService.addPendingDelete(new ShardId(test.index(), 1), test.getIndexSettings());
indicesService.addPendingDelete(new ShardId("bogus", "_na_", 1), test.getIndexSettings());
assertEquals(indicesService.numPendingDeletes(test.index()), 2);
assertTrue(indicesService.hasUncompletedPendingDeletes());
// shard lock released... we can now delete
indicesService.processPendingDeletes(test.index(), test.getIndexSettings(), new TimeValue(0, TimeUnit.MILLISECONDS));
assertEquals(indicesService.numPendingDeletes(test.index()), 0);
assertTrue(indicesService.hasUncompletedPendingDeletes()); // "bogus" index has not been removed
}
assertAcked(client().admin().indices().prepareOpen("test"));
}
public void testVerifyIfIndexContentDeleted() throws Exception {
final Index index = new Index("test", UUIDs.randomBase64UUID());
final IndicesService indicesService = getIndicesService();
final NodeEnvironment nodeEnv = getNodeEnvironment();
final MetaStateService metaStateService = getInstanceFromNode(MetaStateService.class);
final ClusterService clusterService = getInstanceFromNode(ClusterService.class);
final Settings idxSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID())
.build();
final IndexMetaData indexMetaData = new IndexMetaData.Builder(index.getName())
.settings(idxSettings)
.numberOfShards(1)
.numberOfReplicas(0)
.build();
metaStateService.writeIndex("test index being created", indexMetaData);
final MetaData metaData = MetaData.builder(clusterService.state().metaData()).put(indexMetaData, true).build();
final ClusterState csWithIndex = new ClusterState.Builder(clusterService.state()).metaData(metaData).build();
try {
indicesService.verifyIndexIsDeleted(index, csWithIndex);
fail("Should not be able to delete index contents when the index is part of the cluster state.");
} catch (IllegalStateException e) {
assertThat(e.getMessage(), containsString("Cannot delete index"));
}
final ClusterState withoutIndex = new ClusterState.Builder(csWithIndex)
.metaData(MetaData.builder(csWithIndex.metaData()).remove(index.getName()))
.build();
indicesService.verifyIndexIsDeleted(index, withoutIndex);
assertFalse("index files should be deleted", FileSystemUtils.exists(nodeEnv.indexPaths(index)));
}
public void testDanglingIndicesWithAliasConflict() throws Exception {
final String indexName = "test-idx1";
final String alias = "test-alias";
final ClusterService clusterService = getInstanceFromNode(ClusterService.class);
createIndex(indexName);
// create the alias for the index
client().admin().indices().prepareAliases().addAlias(indexName, alias).get();
final ClusterState originalState = clusterService.state();
// try to import a dangling index with the same name as the alias, it should fail
final LocalAllocateDangledIndices dangling = getInstanceFromNode(LocalAllocateDangledIndices.class);
final Settings idxSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID())
.build();
final IndexMetaData indexMetaData = new IndexMetaData.Builder(alias)
.settings(idxSettings)
.numberOfShards(1)
.numberOfReplicas(0)
.build();
DanglingListener listener = new DanglingListener();
dangling.allocateDangled(Arrays.asList(indexMetaData), listener);
listener.latch.await();
assertThat(clusterService.state(), equalTo(originalState));
// remove the alias
client().admin().indices().prepareAliases().removeAlias(indexName, alias).get();
// now try importing a dangling index with the same name as the alias, it should succeed.
listener = new DanglingListener();
dangling.allocateDangled(Arrays.asList(indexMetaData), listener);
listener.latch.await();
assertThat(clusterService.state(), not(originalState));
assertNotNull(clusterService.state().getMetaData().index(alias));
}
/**
* This test checks an edge case where, if a node had an index (lets call it A with UUID 1), then
* deleted it (so a tombstone entry for A will exist in the cluster state), then created
* a new index A with UUID 2, then shutdown, when the node comes back online, it will look at the
* tombstones for deletions, and it should proceed with trying to delete A with UUID 1 and not
* throw any errors that the index still exists in the cluster state. This is a case of ensuring
* that tombstones that have the same name as current valid indices don't cause confusion by
* trying to delete an index that exists.
* See https://github.com/elastic/elasticsearch/issues/18054
*/
public void testIndexAndTombstoneWithSameNameOnStartup() throws Exception {
final String indexName = "test";
final Index index = new Index(indexName, UUIDs.randomBase64UUID());
final IndicesService indicesService = getIndicesService();
final Settings idxSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID())
.build();
final IndexMetaData indexMetaData = new IndexMetaData.Builder(index.getName())
.settings(idxSettings)
.numberOfShards(1)
.numberOfReplicas(0)
.build();
final Index tombstonedIndex = new Index(indexName, UUIDs.randomBase64UUID());
final IndexGraveyard graveyard = IndexGraveyard.builder().addTombstone(tombstonedIndex).build();
final MetaData metaData = MetaData.builder().put(indexMetaData, true).indexGraveyard(graveyard).build();
final ClusterState clusterState = new ClusterState.Builder(new ClusterName("testCluster")).metaData(metaData).build();
// if all goes well, this won't throw an exception, otherwise, it will throw an IllegalStateException
indicesService.verifyIndexIsDeleted(tombstonedIndex, clusterState);
}
private static class DanglingListener implements LocalAllocateDangledIndices.Listener {
final CountDownLatch latch = new CountDownLatch(1);
@Override
public void onResponse(LocalAllocateDangledIndices.AllocateDangledResponse response) {
latch.countDown();
}
@Override
public void onFailure(Throwable e) {
latch.countDown();
}
}
/**
* Tests that teh {@link MapperService} created by {@link IndicesService#createIndexMapperService(IndexMetaData)} contains
* custom types and similarities registered by plugins
*/
public void testStandAloneMapperServiceWithPlugins() throws IOException {
final String indexName = "test";
final Index index = new Index(indexName, UUIDs.randomBase64UUID());
final IndicesService indicesService = getIndicesService();
final Settings idxSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID())
.put(IndexModule.SIMILARITY_SETTINGS_PREFIX + ".test.type", "fake-similarity")
.build();
final IndexMetaData indexMetaData = new IndexMetaData.Builder(index.getName())
.settings(idxSettings)
.numberOfShards(1)
.numberOfReplicas(0)
.build();
MapperService mapperService = indicesService.createIndexMapperService(indexMetaData);
assertNotNull(mapperService.documentMapperParser().parserContext("type").typeParser("fake-mapper"));
assertThat(mapperService.documentMapperParser().parserContext("type").getSimilarity("test"),
instanceOf(BM25SimilarityProvider.class));
}
public void testStatsByShardDoesNotDieFromExpectedExceptions() {
final int shardCount = randomIntBetween(2, 5);
final int failedShardId = randomIntBetween(0, shardCount - 1);
final Index index = new Index("test-index", "abc123");
// the shard that is going to fail
final ShardId shardId = new ShardId(index, failedShardId);
final List<IndexShard> shards = new ArrayList<>(shardCount);
final List<IndexShardStats> shardStats = new ArrayList<>(shardCount - 1);
final IndexShardState state = randomFrom(IndexShardState.values());
final String message = "TEST - expected";
final RuntimeException expectedException =
randomFrom(new IllegalIndexShardStateException(shardId, state, message), new AlreadyClosedException(message));
// this allows us to control the indices that exist
final IndicesService mockIndicesService = mock(IndicesService.class);
final IndexService indexService = mock(IndexService.class);
// generate fake shards and their responses
for (int i = 0; i < shardCount; ++i) {
final IndexShard shard = mock(IndexShard.class);
shards.add(shard);
if (failedShardId != i) {
final IndexShardStats successfulShardStats = mock(IndexShardStats.class);
shardStats.add(successfulShardStats);
when(mockIndicesService.indexShardStats(mockIndicesService, shard, CommonStatsFlags.ALL)).thenReturn(successfulShardStats);
} else {
when(mockIndicesService.indexShardStats(mockIndicesService, shard, CommonStatsFlags.ALL)).thenThrow(expectedException);
}
}
when(mockIndicesService.iterator()).thenReturn(Collections.singleton(indexService).iterator());
when(indexService.iterator()).thenReturn(shards.iterator());
when(indexService.index()).thenReturn(index);
// real one, which has a logger defined
final IndicesService indicesService = getIndicesService();
final Map<Index, List<IndexShardStats>> indexStats = indicesService.statsByShard(mockIndicesService, CommonStatsFlags.ALL);
assertThat(indexStats.isEmpty(), equalTo(false));
assertThat("index not defined", indexStats.containsKey(index), equalTo(true));
assertThat("unexpected shard stats", indexStats.get(index), equalTo(shardStats));
}
}
| |
package dataMapper.diagram.navigator;
import org.eclipse.gmf.runtime.emf.type.core.IElementType;
import org.eclipse.gmf.runtime.notation.View;
import org.eclipse.jface.resource.ImageDescriptor;
import org.eclipse.jface.resource.ImageRegistry;
import org.eclipse.jface.viewers.ITreePathLabelProvider;
import org.eclipse.jface.viewers.LabelProvider;
import org.eclipse.jface.viewers.TreePath;
import org.eclipse.jface.viewers.ViewerLabel;
import org.eclipse.swt.graphics.Image;
import org.eclipse.ui.IMemento;
import org.eclipse.ui.navigator.ICommonContentExtensionSite;
import org.eclipse.ui.navigator.ICommonLabelProvider;
/**
* @generated
*/
public class DataMapperNavigatorLabelProvider extends LabelProvider
implements
ICommonLabelProvider,
ITreePathLabelProvider {
/**
* @generated
*/
static {
dataMapper.diagram.part.DataMapperDiagramEditorPlugin
.getInstance()
.getImageRegistry()
.put("Navigator?UnknownElement", ImageDescriptor.getMissingImageDescriptor()); //$NON-NLS-1$
dataMapper.diagram.part.DataMapperDiagramEditorPlugin
.getInstance()
.getImageRegistry()
.put("Navigator?ImageNotFound", ImageDescriptor.getMissingImageDescriptor()); //$NON-NLS-1$
}
/**
* @generated
*/
public void updateLabel(ViewerLabel label, TreePath elementPath) {
Object element = elementPath.getLastSegment();
if (element instanceof dataMapper.diagram.navigator.DataMapperNavigatorItem
&& !isOwnView(((dataMapper.diagram.navigator.DataMapperNavigatorItem) element)
.getView())) {
return;
}
label.setText(getText(element));
label.setImage(getImage(element));
}
/**
* @generated
*/
public Image getImage(Object element) {
if (element instanceof dataMapper.diagram.navigator.DataMapperNavigatorGroup) {
dataMapper.diagram.navigator.DataMapperNavigatorGroup group = (dataMapper.diagram.navigator.DataMapperNavigatorGroup) element;
return dataMapper.diagram.part.DataMapperDiagramEditorPlugin
.getInstance().getBundledImage(group.getIcon());
}
if (element instanceof dataMapper.diagram.navigator.DataMapperNavigatorItem) {
dataMapper.diagram.navigator.DataMapperNavigatorItem navigatorItem = (dataMapper.diagram.navigator.DataMapperNavigatorItem) element;
if (!isOwnView(navigatorItem.getView())) {
return super.getImage(element);
}
return getImage(navigatorItem.getView());
}
return super.getImage(element);
}
/**
* @generated
*/
public Image getImage(View view) {
switch (dataMapper.diagram.part.DataMapperVisualIDRegistry
.getVisualID(view)) {
case dataMapper.diagram.edit.parts.InNode2EditPart.VISUAL_ID :
return getImage(
"Navigator?Node?http:///org/wso2/developerstudio/eclipse/gmf/datamapper?InNode", dataMapper.diagram.providers.DataMapperElementTypes.InNode_3008); //$NON-NLS-1$
case dataMapper.diagram.edit.parts.ConcatEditPart.VISUAL_ID :
return getImage(
"Navigator?Node?http:///org/wso2/developerstudio/eclipse/gmf/datamapper?Concat", dataMapper.diagram.providers.DataMapperElementTypes.Concat_3013); //$NON-NLS-1$
case dataMapper.diagram.edit.parts.TreeNode3EditPart.VISUAL_ID :
return getImage(
"Navigator?Node?http:///org/wso2/developerstudio/eclipse/gmf/datamapper?TreeNode", dataMapper.diagram.providers.DataMapperElementTypes.TreeNode_3011); //$NON-NLS-1$
case dataMapper.diagram.edit.parts.AttributeEditPart.VISUAL_ID :
return getImage(
"Navigator?Node?http:///org/wso2/developerstudio/eclipse/gmf/datamapper?Attribute", dataMapper.diagram.providers.DataMapperElementTypes.Attribute_3004); //$NON-NLS-1$
case dataMapper.diagram.edit.parts.ElementEditPart.VISUAL_ID :
return getImage(
"Navigator?Node?http:///org/wso2/developerstudio/eclipse/gmf/datamapper?Element", dataMapper.diagram.providers.DataMapperElementTypes.Element_3007); //$NON-NLS-1$
case dataMapper.diagram.edit.parts.OperationsEditPart.VISUAL_ID :
return getImage(
"Navigator?Node?http:///org/wso2/developerstudio/eclipse/gmf/datamapper?Operations", dataMapper.diagram.providers.DataMapperElementTypes.Operations_3012); //$NON-NLS-1$
case dataMapper.diagram.edit.parts.OutNode3EditPart.VISUAL_ID :
return getImage(
"Navigator?Node?http:///org/wso2/developerstudio/eclipse/gmf/datamapper?OutNode", dataMapper.diagram.providers.DataMapperElementTypes.OutNode_3015); //$NON-NLS-1$
case dataMapper.diagram.edit.parts.DataMapperRootEditPart.VISUAL_ID :
return getImage(
"Navigator?Diagram?http:///org/wso2/developerstudio/eclipse/gmf/datamapper?DataMapperRoot", dataMapper.diagram.providers.DataMapperElementTypes.DataMapperRoot_1000); //$NON-NLS-1$
case dataMapper.diagram.edit.parts.OutNode2EditPart.VISUAL_ID :
return getImage(
"Navigator?Node?http:///org/wso2/developerstudio/eclipse/gmf/datamapper?OutNode", dataMapper.diagram.providers.DataMapperElementTypes.OutNode_3009); //$NON-NLS-1$
case dataMapper.diagram.edit.parts.DataMapperLinkEditPart.VISUAL_ID :
return getImage(
"Navigator?Link?http:///org/wso2/developerstudio/eclipse/gmf/datamapper?DataMapperLink", dataMapper.diagram.providers.DataMapperElementTypes.DataMapperLink_4001); //$NON-NLS-1$
case dataMapper.diagram.edit.parts.OutputEditPart.VISUAL_ID :
return getImage(
"Navigator?Node?http:///org/wso2/developerstudio/eclipse/gmf/datamapper?Output", dataMapper.diagram.providers.DataMapperElementTypes.Output_3010); //$NON-NLS-1$
case dataMapper.diagram.edit.parts.OutNodeEditPart.VISUAL_ID :
return getImage(
"Navigator?Node?http:///org/wso2/developerstudio/eclipse/gmf/datamapper?OutNode", dataMapper.diagram.providers.DataMapperElementTypes.OutNode_3006); //$NON-NLS-1$
case dataMapper.diagram.edit.parts.InputEditPart.VISUAL_ID :
return getImage(
"Navigator?Node?http:///org/wso2/developerstudio/eclipse/gmf/datamapper?Input", dataMapper.diagram.providers.DataMapperElementTypes.Input_3001); //$NON-NLS-1$
case dataMapper.diagram.edit.parts.TreeNodeEditPart.VISUAL_ID :
return getImage(
"Navigator?Node?http:///org/wso2/developerstudio/eclipse/gmf/datamapper?TreeNode", dataMapper.diagram.providers.DataMapperElementTypes.TreeNode_3002); //$NON-NLS-1$
case dataMapper.diagram.edit.parts.InNode3EditPart.VISUAL_ID :
return getImage(
"Navigator?Node?http:///org/wso2/developerstudio/eclipse/gmf/datamapper?InNode", dataMapper.diagram.providers.DataMapperElementTypes.InNode_3014); //$NON-NLS-1$
case dataMapper.diagram.edit.parts.TreeNode2EditPart.VISUAL_ID :
return getImage(
"Navigator?Node?http:///org/wso2/developerstudio/eclipse/gmf/datamapper?TreeNode", dataMapper.diagram.providers.DataMapperElementTypes.TreeNode_3003); //$NON-NLS-1$
case dataMapper.diagram.edit.parts.InNodeEditPart.VISUAL_ID :
return getImage(
"Navigator?Node?http:///org/wso2/developerstudio/eclipse/gmf/datamapper?InNode", dataMapper.diagram.providers.DataMapperElementTypes.InNode_3005); //$NON-NLS-1$
case dataMapper.diagram.edit.parts.DataMapperDiagramEditPart.VISUAL_ID :
return getImage(
"Navigator?TopLevelNode?http:///org/wso2/developerstudio/eclipse/gmf/datamapper?DataMapperDiagram", dataMapper.diagram.providers.DataMapperElementTypes.DataMapperDiagram_2001); //$NON-NLS-1$
}
return getImage("Navigator?UnknownElement", null); //$NON-NLS-1$
}
/**
* @generated
*/
private Image getImage(String key, IElementType elementType) {
ImageRegistry imageRegistry = dataMapper.diagram.part.DataMapperDiagramEditorPlugin
.getInstance().getImageRegistry();
Image image = imageRegistry.get(key);
if (image == null
&& elementType != null
&& dataMapper.diagram.providers.DataMapperElementTypes
.isKnownElementType(elementType)) {
image = dataMapper.diagram.providers.DataMapperElementTypes
.getImage(elementType);
imageRegistry.put(key, image);
}
if (image == null) {
image = imageRegistry.get("Navigator?ImageNotFound"); //$NON-NLS-1$
imageRegistry.put(key, image);
}
return image;
}
/**
* @generated
*/
public String getText(Object element) {
if (element instanceof dataMapper.diagram.navigator.DataMapperNavigatorGroup) {
dataMapper.diagram.navigator.DataMapperNavigatorGroup group = (dataMapper.diagram.navigator.DataMapperNavigatorGroup) element;
return group.getGroupName();
}
if (element instanceof dataMapper.diagram.navigator.DataMapperNavigatorItem) {
dataMapper.diagram.navigator.DataMapperNavigatorItem navigatorItem = (dataMapper.diagram.navigator.DataMapperNavigatorItem) element;
if (!isOwnView(navigatorItem.getView())) {
return null;
}
return getText(navigatorItem.getView());
}
return super.getText(element);
}
/**
* @generated
*/
public String getText(View view) {
if (view.getElement() != null && view.getElement().eIsProxy()) {
return getUnresolvedDomainElementProxyText(view);
}
switch (dataMapper.diagram.part.DataMapperVisualIDRegistry
.getVisualID(view)) {
case dataMapper.diagram.edit.parts.InNode2EditPart.VISUAL_ID :
return getInNode_3008Text(view);
case dataMapper.diagram.edit.parts.ConcatEditPart.VISUAL_ID :
return getConcat_3013Text(view);
case dataMapper.diagram.edit.parts.TreeNode3EditPart.VISUAL_ID :
return getTreeNode_3011Text(view);
case dataMapper.diagram.edit.parts.AttributeEditPart.VISUAL_ID :
return getAttribute_3004Text(view);
case dataMapper.diagram.edit.parts.ElementEditPart.VISUAL_ID :
return getElement_3007Text(view);
case dataMapper.diagram.edit.parts.OperationsEditPart.VISUAL_ID :
return getOperations_3012Text(view);
case dataMapper.diagram.edit.parts.OutNode3EditPart.VISUAL_ID :
return getOutNode_3015Text(view);
case dataMapper.diagram.edit.parts.DataMapperRootEditPart.VISUAL_ID :
return getDataMapperRoot_1000Text(view);
case dataMapper.diagram.edit.parts.OutNode2EditPart.VISUAL_ID :
return getOutNode_3009Text(view);
case dataMapper.diagram.edit.parts.DataMapperLinkEditPart.VISUAL_ID :
return getDataMapperLink_4001Text(view);
case dataMapper.diagram.edit.parts.OutputEditPart.VISUAL_ID :
return getOutput_3010Text(view);
case dataMapper.diagram.edit.parts.OutNodeEditPart.VISUAL_ID :
return getOutNode_3006Text(view);
case dataMapper.diagram.edit.parts.InputEditPart.VISUAL_ID :
return getInput_3001Text(view);
case dataMapper.diagram.edit.parts.TreeNodeEditPart.VISUAL_ID :
return getTreeNode_3002Text(view);
case dataMapper.diagram.edit.parts.InNode3EditPart.VISUAL_ID :
return getInNode_3014Text(view);
case dataMapper.diagram.edit.parts.TreeNode2EditPart.VISUAL_ID :
return getTreeNode_3003Text(view);
case dataMapper.diagram.edit.parts.InNodeEditPart.VISUAL_ID :
return getInNode_3005Text(view);
case dataMapper.diagram.edit.parts.DataMapperDiagramEditPart.VISUAL_ID :
return getDataMapperDiagram_2001Text(view);
}
return getUnknownElementText(view);
}
/**
* @generated
*/
private String getDataMapperRoot_1000Text(View view) {
return ""; //$NON-NLS-1$
}
/**
* @generated
*/
private String getDataMapperLink_4001Text(View view) {
return ""; //$NON-NLS-1$
}
/**
* @generated
*/
private String getAttribute_3004Text(View view) {
dataMapper.Attribute domainModelElement = (dataMapper.Attribute) view
.getElement();
if (domainModelElement != null) {
return domainModelElement.getName();
} else {
dataMapper.diagram.part.DataMapperDiagramEditorPlugin
.getInstance()
.logError(
"No domain element for view with visualID = " + 3004); //$NON-NLS-1$
return ""; //$NON-NLS-1$
}
}
/**
* @generated
*/
private String getTreeNode_3003Text(View view) {
dataMapper.TreeNode domainModelElement = (dataMapper.TreeNode) view
.getElement();
if (domainModelElement != null) {
return domainModelElement.getName();
} else {
dataMapper.diagram.part.DataMapperDiagramEditorPlugin
.getInstance()
.logError(
"No domain element for view with visualID = " + 3003); //$NON-NLS-1$
return ""; //$NON-NLS-1$
}
}
/**
* @generated
*/
private String getOutput_3010Text(View view) {
return ""; //$NON-NLS-1$
}
/**
* @generated
*/
private String getInNode_3008Text(View view) {
return ""; //$NON-NLS-1$
}
/**
* @generated
*/
private String getOutNode_3009Text(View view) {
return ""; //$NON-NLS-1$
}
/**
* @generated
*/
private String getDataMapperDiagram_2001Text(View view) {
return ""; //$NON-NLS-1$
}
/**
* @generated
*/
private String getOutNode_3006Text(View view) {
return ""; //$NON-NLS-1$
}
/**
* @generated
*/
private String getConcat_3013Text(View view) {
return ""; //$NON-NLS-1$
}
/**
* @generated
*/
private String getOperations_3012Text(View view) {
return ""; //$NON-NLS-1$
}
/**
* @generated
*/
private String getInput_3001Text(View view) {
return ""; //$NON-NLS-1$
}
/**
* @generated
*/
private String getInNode_3014Text(View view) {
return ""; //$NON-NLS-1$
}
/**
* @generated
*/
private String getTreeNode_3002Text(View view) {
dataMapper.TreeNode domainModelElement = (dataMapper.TreeNode) view
.getElement();
if (domainModelElement != null) {
return domainModelElement.getName();
} else {
dataMapper.diagram.part.DataMapperDiagramEditorPlugin
.getInstance()
.logError(
"No domain element for view with visualID = " + 3002); //$NON-NLS-1$
return ""; //$NON-NLS-1$
}
}
/**
* @generated
*/
private String getElement_3007Text(View view) {
dataMapper.Element domainModelElement = (dataMapper.Element) view
.getElement();
if (domainModelElement != null) {
return domainModelElement.getName();
} else {
dataMapper.diagram.part.DataMapperDiagramEditorPlugin
.getInstance()
.logError(
"No domain element for view with visualID = " + 3007); //$NON-NLS-1$
return ""; //$NON-NLS-1$
}
}
/**
* @generated
*/
private String getOutNode_3015Text(View view) {
return ""; //$NON-NLS-1$
}
/**
* @generated
*/
private String getInNode_3005Text(View view) {
return ""; //$NON-NLS-1$
}
/**
* @generated
*/
private String getTreeNode_3011Text(View view) {
dataMapper.TreeNode domainModelElement = (dataMapper.TreeNode) view
.getElement();
if (domainModelElement != null) {
return domainModelElement.getName();
} else {
dataMapper.diagram.part.DataMapperDiagramEditorPlugin
.getInstance()
.logError(
"No domain element for view with visualID = " + 3011); //$NON-NLS-1$
return ""; //$NON-NLS-1$
}
}
/**
* @generated
*/
private String getUnknownElementText(View view) {
return "<UnknownElement Visual_ID = " + view.getType() + ">"; //$NON-NLS-1$ //$NON-NLS-2$
}
/**
* @generated
*/
private String getUnresolvedDomainElementProxyText(View view) {
return "<Unresolved domain element Visual_ID = " + view.getType() + ">"; //$NON-NLS-1$ //$NON-NLS-2$
}
/**
* @generated
*/
public void init(ICommonContentExtensionSite aConfig) {
}
/**
* @generated
*/
public void restoreState(IMemento aMemento) {
}
/**
* @generated
*/
public void saveState(IMemento aMemento) {
}
/**
* @generated
*/
public String getDescription(Object anElement) {
return null;
}
/**
* @generated
*/
private boolean isOwnView(View view) {
return dataMapper.diagram.edit.parts.DataMapperRootEditPart.MODEL_ID
.equals(dataMapper.diagram.part.DataMapperVisualIDRegistry
.getModelID(view));
}
}
| |
/*
* Copyright IBM Corp. 2014
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibm.dataworks;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import javax.ws.rs.core.UriInfo;
import org.apache.http.HttpResponse;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.conn.ssl.AllowAllHostnameVerifier;
import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
import org.apache.http.conn.ssl.SSLContextBuilder;
import org.apache.http.conn.ssl.TrustStrategy;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.impl.client.HttpClientBuilder;
import javax.net.ssl.SSLContext;
import java.security.GeneralSecurityException;
import java.security.cert.X509Certificate;
import com.ibm.json.java.JSONArray;
import com.ibm.json.java.JSONObject;
/**
* This class implements a REST resource for the IBM DataWorks service.
*
* It consists of several functions:
* 1. saveActivity: Creates an IBM DataWorks activity.
*
* HTTP request:
* POST to URL .../dataworks/dc/v1/activities
* Request JSON example:
* {
* "activityPatternId": "DataLoad",
* "name": "MyActivity",
* "inputDocument": {
* ...
* }
* }
*
* Response JSON example:
* {
* "activityId": "8d3905eb.529170ae.0824huvs9.9g0gah5.pr7iln.4adcqi3mp48usq5d49s4o",
* "activityURL": "https://xxx:9443/ibm/dataworks/dc/v1/activities/8d3905eb.529170ae.0824huvs9.9g0gah5.pr7iln.4adcqi3mp48usq5d49s4o",
* "inputDocument" {
* ...
* },
* "outputDocument": {
* ...
* },
* "id": "8d3905eb.3d01858f.0824i2hqg.fcm9n4r.6m1upb.3igiaerh14nask40toouo",
* "URL": "https://vmlnxbt01:9443/ibm/dataworks/dc/v1/activities/8d3905eb.529170ae.0824huvs9.9g0gah5.pr7iln.4adcqi3mp48usq5d49s4o/activityRuns/8d3905eb.3d01858f.0824i2hqg.fcm9n4r.6m1upb.3igiaerh14nask40toouo",
* "createdUser": "user",
* "createdTimeStamp": "2014-10-13T15:57:27+00:00"
* }
*
* 2. runActivity: runs the activity created
* 3. getRun: gets the status of an activity run
* 4. getRunLogs: gets the extended logs for an activity run
*
*/
@Path("/activities")
public class DataLoadResource {
private VcapServicesInfo vcapInfo;
/**
* Initializes the IBM DataWorks resource.
*/
public DataLoadResource()
{
vcapInfo = new VcapServicesInfo("DataWorks");
}
/**
* Save an activity. Return JSON containing the activity ID.
*
* @param inputObj the input json object
*
* @return the Response.
*/
@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Response saveActivity(JSONObject inputObj)
{
try {
//
// Step 1: Post the activity definition .../activities
//
HttpClient client = getAuthenticatedHttpClient();
String activitiesUrl = vcapInfo.getDataLoadUrl() + "/activities";
HttpPost postRequest = new HttpPost(activitiesUrl);
StringEntity input = new StringEntity(inputObj.serialize());
input.setContentType(MediaType.APPLICATION_JSON);
postRequest.setEntity(input);
postRequest.setHeader("Accept", MediaType.APPLICATION_JSON);
//
// Step 2: Get the response.
//
HttpResponse response = client.execute(postRequest);
int status = response.getStatusLine().getStatusCode();
// Check the status code and return an internal server error if it is not 200
if (status != 200) {
JSONObject errorObject = createErrorObject("SavingActivityFailed",response);
return Response.status(status).entity(errorObject).build();
}
//
// Step 3: return the result.
//
JSONObject activityResponse = JSONObject.parse(response.getEntity().getContent());
return Response.status(status).entity(activityResponse).build();
} catch (Exception exc) {
JSONObject errorObject = createErrorObject(exc);
return Response.status(Status.INTERNAL_SERVER_ERROR).entity(errorObject).build();
}
}
@POST
@Path("{activityId}/activityRuns")
@Produces(MediaType.APPLICATION_JSON)
public Response runActivity(@Context HttpHeaders headers,
@Context UriInfo info,
@PathParam("activityId") String activityId) {
try {
//
// Step 1: Post the activity run .../activities/{activityId}/activityRuns
//
HttpClient client = getAuthenticatedHttpClient();
String activityRunUrl = vcapInfo.getDataLoadUrl() + "/activities/" +
activityId + "/activityRuns";
HttpPost postRequest = new HttpPost(activityRunUrl);
postRequest.setHeader("Accept", MediaType.APPLICATION_JSON);
postRequest.setHeader("Content-Type", MediaType.APPLICATION_JSON);
//
// Step 2: Get the response.
//
HttpResponse response = client.execute(postRequest);
int status = response.getStatusLine().getStatusCode();
// Check the status code and return an internal server error if it is not 200
if (status != 200) {
JSONObject errorObject = createErrorObject("RunningActivityFailed",response);
return Response.status(status).entity(errorObject).build();
}
//
// Step 3: return the result.
//
JSONObject runResponse = JSONObject.parse(response.getEntity().getContent());
return Response.status(status).entity(runResponse).build();
} catch (Exception exc) {
JSONObject errorObject = createErrorObject(exc);
return Response.status(Status.INTERNAL_SERVER_ERROR).entity(errorObject).build();
}
}
@GET
@Path("{activityId}/activityRuns/{runId}")
@Produces(MediaType.APPLICATION_JSON)
public Response getRun( @Context HttpHeaders headers,
@Context UriInfo info,
@PathParam("activityId") String activityId,
@PathParam("runId") String runId) {
try {
//
// Step 1: Get the activity status .../activities/{activityID}/activityRuns/{runID}
//
HttpClient client = getAuthenticatedHttpClient();
String activityRunUrl = vcapInfo.getDataLoadUrl() + "/activities/" + activityId +
"/activityRuns/" + runId;
HttpGet getRequest = new HttpGet(activityRunUrl);
getRequest.setHeader("Accept", "application/json");
getRequest.setHeader("Content-Type", "application/json");
//
// Step 2: Get the response.
//
HttpResponse response = client.execute(getRequest);
int status = response.getStatusLine().getStatusCode();
// Check the status code and return an internal server error if it is not 200
if (status != 200) {
JSONObject errorObject = createErrorObject("Getting Run Status",response);
return Response.status(status).entity(errorObject).build();
}
JSONObject getResponse = JSONObject.parse(response.getEntity().getContent());
return Response.status(status).entity(getResponse).build();
} catch (Exception exc) {
JSONObject errorObject = createErrorObject(exc);
return Response.status(Status.INTERNAL_SERVER_ERROR).entity(errorObject).build();
}
}
@GET
@Path("{activityId}/activityRuns/{runId}/logs")
@Produces(MediaType.APPLICATION_JSON)
public Response getRunLogs( @Context HttpHeaders headers,
@Context UriInfo info,
@PathParam("activityId") String activityId,
@PathParam("runId") String runId) {
try {
//
// Step 1: Get the activity run logs .../activities/{activityID}/activityRuns/{runID}
//
HttpClient client = getAuthenticatedHttpClient();
// '?all=true' requests all the extended logs, if available
String activityRunLogsUrl = vcapInfo.getDataLoadUrl() + "/activities/" + activityId +
"/activityRuns/" + runId + "/logs?all=true";
HttpGet getRequest = new HttpGet(activityRunLogsUrl);
getRequest.setHeader("Accept", "application/json");
getRequest.setHeader("Content-Type", "application/json");
//
// Step 2: Get the response.
//
HttpResponse response = client.execute(getRequest);
int status = response.getStatusLine().getStatusCode();
// Check the status code and return an internal server error if it is not 200
if (status != 200) {
JSONObject errorObject = createErrorObject("Getting Run Logs Failed",response);
return Response.status(status).entity(errorObject).build();
}
JSONArray getResponse = JSONArray.parse(response.getEntity().getContent());
return Response.status(status).entity(getResponse).build();
} catch (Exception exc) {
JSONObject errorObject = createErrorObject(exc);
return Response.status(Status.INTERNAL_SERVER_ERROR).entity(errorObject).build();
}
}
/**
* Create a JSON object containing a simple error message and additional details.
*/
private JSONObject createErrorObject(String errorMessage, HttpResponse response)
{
String msgId = "";
String msgSeverity = "error";
String msgText = errorMessage;
String msgExplanation = "";
String msgResponse = "";
try {
if (response.getEntity() != null) {
InputStream is = response.getEntity().getContent();
if (is != null) {
JSONObject errObj = JSONObject.parse(is);
msgId = (String)errObj.get("msgId");
msgSeverity = (String)errObj.get("msgSeverity");
msgText = (String)errObj.get("msgText");
msgExplanation = (String)errObj.get("msgExplanation");
msgResponse = (String)errObj.get("msgResponse");
}
}
} catch (IllegalStateException e) {
return createErrorObject(e);
} catch (IOException e) {
return createErrorObject(e);
}
return createErrorObject(msgId, msgSeverity,msgText,
msgExplanation, msgResponse);
}
/**
* Create an error JSON object from an exception.
*/
private JSONObject createErrorObject(Exception exc)
{
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
exc.printStackTrace(pw);
pw.close();
String details = sw.toString();
return this.createErrorObject("500", "error", details, "", "");
}
/**
* Create an appropriate response payload.
*/
private JSONObject createErrorObject(String msgId, String msgSeverity,
String msgText, String msgExplanation, String msgResponse) {
JSONObject json = new JSONObject();
json.put("msgId", msgId);
json.put("msgSeverity", msgSeverity);
json.put("msgText", msgText);
json.put("msgExplanation", msgExplanation);
json.put("msgResponse", msgResponse);
return json;
}
/**
* Create an HTTP client object that is authenticated with the user and password
* of the IBM DataWorks Service.
*/
private HttpClient getAuthenticatedHttpClient() throws GeneralSecurityException {
// NOTE: If you re-purpose this code for your own application you might want to have
// additional security mechanisms in place regarding certificate authentication.
// build credentials object
UsernamePasswordCredentials creds = new UsernamePasswordCredentials(vcapInfo.getUser(), vcapInfo.getPassword());
CredentialsProvider credsProvider = new BasicCredentialsProvider();
credsProvider.setCredentials(new AuthScope(AuthScope.ANY_HOST, AuthScope.ANY_PORT), creds);
// For demo purposes only: always accept the certificate
TrustStrategy accepAllTrustStrategy = new TrustStrategy() {
@Override
public boolean isTrusted(X509Certificate[] certificate, String authType) {
return true;
}
};
SSLContextBuilder contextBuilder = new SSLContextBuilder();
SSLContext context = contextBuilder.loadTrustMaterial(null, accepAllTrustStrategy).build();
SSLConnectionSocketFactory scsf = new SSLConnectionSocketFactory(context, new AllowAllHostnameVerifier());
HttpClient httpClient = HttpClientBuilder.create() //
.setSSLSocketFactory(scsf) //
.setDefaultCredentialsProvider(credsProvider) //
.build();
return httpClient;
}
}
| |
package org.myrobotlab.control.widget;
import java.awt.BorderLayout;
import java.awt.Container;
import java.awt.Dimension;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTree;
import javax.swing.event.TreeSelectionEvent;
import javax.swing.event.TreeSelectionListener;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreeNode;
import javax.swing.tree.TreePath;
import org.myrobotlab.framework.Status;
import org.myrobotlab.logging.Level;
import org.myrobotlab.logging.LoggerFactory;
import org.myrobotlab.logging.LoggingFactory;
import org.myrobotlab.memory.Node;
import org.myrobotlab.service.interfaces.MemoryDisplay;
import org.slf4j.Logger;
public class MemoryWidget {
public final static Logger log = LoggerFactory.getLogger(MemoryWidget.class.getCanonicalName());
private JTree tree;
private DefaultTreeModel model;
private JPanel display = new JPanel(new BorderLayout());
MemoryDisplay memoryDisplay;
private NodeGUI root;
public static void main(String args[]) {
LoggingFactory.getInstance().configure();
LoggingFactory.getInstance().setLevel(Level.INFO);
JFrame frame = new JFrame();
Container container = frame.getContentPane();
final MemoryWidget nodeTree = new MemoryWidget(null);
JButton addButton = new JButton("add node");
addButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
nodeTree.putNode();
}
});
JButton removeButton = new JButton("remove selected node");
removeButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
nodeTree.removeSelectedNode();
}
});
JButton addNodeButton = new JButton("addNode node");
addNodeButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
nodeTree.put("root", new Node("fore"));
}
});
JPanel inputPanel = new JPanel();
inputPanel.add(addButton);
inputPanel.add(removeButton);
inputPanel.add(addNodeButton);
container.add(inputPanel, BorderLayout.NORTH);
container.add(nodeTree.getDisplay(), BorderLayout.CENTER);
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.setSize(400, 300);
frame.setVisible(true);
nodeTree.put("root", new Node("foreground"));
nodeTree.put("root", new Node("background"));
nodeTree.put("root.foreground", new Node("objects"));
nodeTree.put("root.foreground.objects", new Node("known"));
nodeTree.put("root.foreground.objects", new Node("unknown"));
}
public MemoryWidget(final MemoryDisplay memDisplay) {
this.memoryDisplay = memDisplay;
root = new NodeGUI(new Node("/"));
model = new DefaultTreeModel(root);
tree = new JTree(model);
display.add(new JScrollPane(tree));
display.setPreferredSize(new Dimension(400, 500));
tree.addMouseListener(new MouseAdapter() {
@Override
public void mouseClicked(MouseEvent me) {
doMouseClicked(me);
}
});
// ------ display different types through user selection begin ---------
tree.addTreeSelectionListener(new TreeSelectionListener() {
@Override
public void valueChanged(TreeSelectionEvent e) {
// DefaultMutableTreeNode node = (DefaultMutableTreeNode)
// tree.getLastSelectedPathComponent();
// TODO - put all user data into set/get UserData ???
// MAKE NOTE - gui nodes can be many to one relation ship (or
// one to many???) to actual memory nodes
NodeGUI nodeGUI = (NodeGUI) tree.getLastSelectedPathComponent();
/* if nothing is selected */
if (nodeGUI == null) {
log.info("nothing is selected");
return;
}
TreeNode[] path = nodeGUI.getPath();
TreePath tp = new TreePath(path);
StringBuffer sb = new StringBuffer();
for (int i = 0; i < path.length; ++i) {
sb.append(((NodeGUI) path[i]).getName());
if (i != 0 && i != path.length - 1) {
sb.append("/");
}
}
// iterate through all data & display it
memoryDisplay.clear();
memoryDisplay.displayStatus(new Status(String.format("node %s", sb.toString())));
memoryDisplay.display(nodeGUI.myNode);
/* TODO ??? retrieve the node that was selected */
Object nodeInfo = nodeGUI.getUserObject();
log.info("{}", nodeInfo);
}
});
// ------ display different types through user selection end ---------
}
void doMouseClicked(MouseEvent me) {
TreePath tp = tree.getPathForLocation(me.getX(), me.getY());
if (tp != null)
log.info(tp.toString());// jtf.setText(tp.toString());
else
log.info("");// jtf.setText("");
}
public NodeGUI get(String path) {
if (path == null) {
return root;
}
return (NodeGUI) root.get(path);// root.getNode(path);
}
public JPanel getDisplay() {
return display;
}
public NodeGUI getRoot() {
return root;
}
private NodeGUI getSelectedNode() {
return (NodeGUI) tree.getLastSelectedPathComponent();
}
public NodeGUI put(String parentPath, Node node) {
// FIXME FIXME FIXME ???? - use JTree's index or NodeGUI's ?
NodeGUI parent = (NodeGUI) root.get(parentPath);
if (parent == null) {
log.error("could not add gui node {} to path {}", node.getName(), parentPath);
return null;
}
NodeGUI child = (NodeGUI) parent.get(node.getName());
if (child == null) {
child = new NodeGUI(node);
// you must insertNodeInto only if child does not previously
// exist - as
model.insertNodeInto(child, parent, parent.getChildCount());
parent.put(child);
} else {
child.refresh(node);
}
// FIXXED - NO LONGER CREATE BOGUS NODE GUIService'S --- YAY ! - JUST
// SIMPLY DISPLAY A NODE
// IMPORANT TO BE TRANSPARENT - MEMORY IS !
// FIXME - for a pre-existing NodeGUI - regen ?? all children ??? or
// refresh ???
// FIXME - only NodeGUI's are in the (or should be in) the HashMap -
// this should be strongly typed !
// ----- add non-recursive nodes - begin ------------------
/*
* for (Map.Entry<String,?> nodeData : node.getNodes().entrySet()) {
* String key = nodeData.getKey(); Object object = nodeData.getValue();
* log.info("{}{}", key, object);
*
* // display based on type for all non-recursive memory Class<?> clazz
* = object.getClass(); if (clazz != Node.class) { if (clazz ==
* OpenCVData.class) { OpenCVData data = (OpenCVData)object;
* //log.info("{}",data); // adding a "gui" node - to have a place where
* a user can highlight // to "see" the opencv image data in the video
* widget // addLeaf(newChild, o.getKey(), o.getValue().toString()); //
* TODO - compartmentalize the following to methods // TODO - type info
* should be different field - your now converting typ info through
* name..
*
* // this adds complexity - in that viewing its preferrable to be able
* to // select on different types, while "real" memory is much more
* compact // String imageKey = String.format("%s.%s", childKey,
* "image");
*
* NodeGUI images = (NodeGUI)child.get("images"); log.info("{}",images);
* if (images == null){ images = new NodeGUI(child, "images");
* model.insertNodeInto(images, child, child.getChildCount()); // FIXME
* - nodeMap - probably not necessary if you learn to use // the JTree
* system child.put(images); }
*
*
* for (Map.Entry<String,?> img : data.getImages().entrySet()) { NodeGUI
* imgDisplay = (NodeGUI)images.get(img.getKey());
*
* if (imgDisplay == null) { imgDisplay = new NodeGUI(images,
* img.getKey()); model.insertNodeInto(imgDisplay, images,
* imgDisplay.getChildCount()); images.put(imgDisplay);
*
* if (memoryDisplay != null && img != null) {
* memoryDisplay.displayFrame((SerializableImage)img.getValue()); } }
*
* } }
*
* }
*
* }
*/
return child;
}
// ----------------- user interface end -----------------------------
// ----------------- user interface begin -----------------------------
// user --> model --> add
private void putNode() {
NodeGUI parent = getSelectedNode();
if (parent == null) {
JOptionPane.showMessageDialog(display, "Select an era.", "Error", JOptionPane.ERROR_MESSAGE);
return;
}
String name = JOptionPane.showInputDialog(MemoryWidget.this, "Enter Name:");
// FIXME send to Cortex event - wait for publish
put("/", new Node(name));
}
private void removeSelectedNode() {
NodeGUI selectedNode = getSelectedNode();
if (selectedNode != null)
model.removeNodeFromParent(selectedNode);
}
}
| |
package com.planet_ink.coffee_mud.core.collections;
import java.util.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.MaskingLibrary;
public final class PairCMList<T, K> extends CMList<Pair<T, K>> implements PairList<T, K>
{
private static final long serialVersionUID = -9175373328892311411L;
@Override
public final Pair.FirstConverter<T, K> getFirstConverter()
{
return new Pair.FirstConverter<T, K>();
}
@Override
public final Pair.SecondConverter<T, K> getSecondConverter()
{
return new Pair.SecondConverter<T, K>();
}
@Override
public final Iterator<T> firstIterator()
{
return new ConvertingIterator<Pair<T, K>, T>(iterator(), getFirstConverter());
}
@Override
public final Iterator<K> secondIterator()
{
return new ConvertingIterator<Pair<T, K>, K>(iterator(), getSecondConverter());
}
@Override
public synchronized int indexOfFirst(T t)
{
return indexOfFirst(t, 0);
}
@Override
public synchronized int indexOfSecond(K k)
{
return indexOfSecond(k, 0);
}
@Override
public T getFirst(int index)
{
return get(index).first;
}
@Override
public K getSecond(int index)
{
return get(index).second;
}
@Override
public void add(T t, K k)
{
add(new Pair<T, K>(t, k));
}
public void addElement(T t, K k)
{
add(new Pair<T, K>(t, k));
}
@Override
public boolean containsFirst(T t)
{
for (final Iterator<Pair<T, K>> i = iterator(); i.hasNext();)
{
if ((t == null) ? i.next() == null : t.equals(i.next().first))
return true;
}
return false;
}
@Override
public boolean containsSecond(K k)
{
for (final Iterator<Pair<T, K>> i = iterator(); i.hasNext();)
{
if ((k == null) ? i.next() == null : k.equals(i.next().second))
return true;
}
return false;
}
@Override
public T elementAtFirst(int index)
{
return get(index).first;
}
@Override
public K elementAtSecond(int index)
{
return get(index).second;
}
@Override
public synchronized int indexOfFirst(T t, int index)
{
try
{
for (int i = index; i < size(); i++)
{
if ((t == null ? get(i).first == null : t.equals(get(i).first)))
return i;
}
}
catch (final Exception e)
{
}
return -1;
}
@Override
public synchronized int indexOfSecond(K k, int index)
{
try
{
for (int i = index; i < size(); i++)
if ((k == null ? get(i).second == null : k.equals(get(i).second)))
return i;
}
catch (final Exception e)
{
}
return -1;
}
@Override
public synchronized int lastIndexOfFirst(T t, int index)
{
try
{
for (int i = index; i >= 0; i--)
if ((t == null ? get(i).first == null : t.equals(get(i).first)))
return i;
}
catch (final Exception e)
{
}
return -1;
}
@Override
public synchronized int lastIndexOfSecond(K k, int index)
{
try
{
for (int i = index; i >= 0; i--)
if ((k == null ? get(i).second == null : k.equals(get(i).second)))
return i;
}
catch (final Exception e)
{
}
return -1;
}
@Override
public synchronized int lastIndexOfFirst(T t)
{
return lastIndexOfFirst(t, size() - 1);
}
@Override
public synchronized int lastIndexOfSecond(K k)
{
return lastIndexOfSecond(k, size() - 1);
}
@Override
public boolean removeFirst(T t)
{
Pair<T, K> pair;
for (final Iterator<Pair<T, K>> i = iterator(); i.hasNext();)
{
pair = i.next();
if ((t == null ? pair.first == null : t.equals(pair.first)))
return super.remove(pair);
}
return false;
}
@Override
public boolean removeSecond(K k)
{
Pair<T, K> pair;
for (final Iterator<Pair<T, K>> i = iterator(); i.hasNext();)
{
pair = i.next();
if ((k == null ? pair.second == null : k.equals(pair.second)))
return super.remove(pair);
}
return false;
}
@Override
public boolean removeElementFirst(T t)
{
return removeFirst(t);
}
@Override
public boolean removeElementSecond(K k)
{
return removeSecond(k);
}
@Override
public T[] toArrayFirst(T[] objs)
{
if(objs.length < size())
objs = Arrays.copyOf(objs, size());
for (int x = 0; x < size(); x++)
objs[x] = getFirst(x);
return objs;
}
@Override
public K[] toArraySecond(K[] objs)
{
if(objs.length < size())
objs = Arrays.copyOf(objs, size());
for (int x = 0; x < size(); x++)
objs[x] = getSecond(x);
return objs;
}
}
| |
/*
*
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.genie.web.services.impl;
import com.netflix.genie.common.exceptions.GenieException;
import com.netflix.genie.common.exceptions.GenieServerException;
import com.netflix.genie.test.categories.UnitTest;
import com.netflix.spectator.api.Registry;
import com.netflix.spectator.api.Timer;
import org.hamcrest.Matchers;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TemporaryFolder;
import org.mockito.Mockito;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.MediaType;
import org.springframework.test.web.client.MockRestServiceServer;
import org.springframework.test.web.client.match.MockRestRequestMatchers;
import org.springframework.test.web.client.response.MockRestResponseCreators;
import org.springframework.web.client.ResourceAccessException;
import org.springframework.web.client.RestTemplate;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.nio.charset.Charset;
import java.time.Instant;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
/**
* Tests for the HttpFileTransferImpl class.
*
* @author tgianos
* @since 3.0.0
*/
@Category(UnitTest.class)
public class HttpFileTransferImplTest {
private static final String TEST_URL = "http://localhost/myFile.txt";
/**
* Used for reading and writing files during tests. Cleaned up at end.
*/
@Rule
public TemporaryFolder temporaryFolder = new TemporaryFolder();
private MockRestServiceServer server;
private HttpFileTransferImpl httpFileTransfer;
private Timer downloadTimer;
private Timer uploadTimer;
private Timer metadataTimer;
/**
* Setup for the tests.
*/
@Before
public void setup() {
final RestTemplate restTemplate = new RestTemplate();
this.server = MockRestServiceServer.createServer(restTemplate);
this.downloadTimer = Mockito.mock(Timer.class);
this.uploadTimer = Mockito.mock(Timer.class);
this.metadataTimer = Mockito.mock(Timer.class);
final Registry registry = Mockito.mock(Registry.class);
Mockito.when(registry.timer("genie.files.http.download.timer")).thenReturn(this.downloadTimer);
Mockito.when(registry.timer("genie.files.http.upload.timer")).thenReturn(this.uploadTimer);
Mockito.when(registry.timer("genie.files.http.getLastModified.timer")).thenReturn(this.metadataTimer);
this.httpFileTransfer = new HttpFileTransferImpl(restTemplate, registry);
}
/**
* Make sure valid url's return true.
*
* @throws GenieException On error
*/
@Test
public void canValidate() throws GenieException {
Assert.assertTrue(this.httpFileTransfer.isValid("http://netflix.github.io/genie"));
Assert.assertTrue(this.httpFileTransfer.isValid("https://netflix.github.io/genie"));
Assert.assertFalse(this.httpFileTransfer.isValid("ftp://netflix.github.io/genie"));
Assert.assertFalse(this.httpFileTransfer.isValid("file:///tmp/blah"));
Assert.assertTrue(this.httpFileTransfer.isValid("http://localhost/someFile.txt"));
Assert.assertTrue(this.httpFileTransfer.isValid("https://localhost:8080/someFile.txt"));
}
/**
* Make sure we can actually get a file.
*
* @throws GenieException On error
* @throws IOException On error
*/
@Test
public void canGet() throws GenieException, IOException {
final File output = this.temporaryFolder.newFile();
final String contents = UUID.randomUUID().toString();
this.server
.expect(MockRestRequestMatchers.requestTo(TEST_URL))
.andExpect(MockRestRequestMatchers.method(HttpMethod.GET))
.andRespond(
MockRestResponseCreators
.withSuccess(contents.getBytes(Charset.forName("UTF-8")), MediaType.APPLICATION_OCTET_STREAM)
);
this.httpFileTransfer.getFile(TEST_URL, output.getCanonicalPath());
this.server.verify();
Mockito
.verify(this.downloadTimer, Mockito.times(1))
.record(Mockito.anyLong(), Mockito.eq(TimeUnit.NANOSECONDS));
}
/**
* Make sure can't get a file if the intput isn't a valid url.
*
* @throws GenieException On Error
* @throws IOException On Error
*/
@Test(expected = GenieServerException.class)
public void cantGetWithInvalidUrl() throws GenieException, IOException {
this.httpFileTransfer.getFile(UUID.randomUUID().toString(), this.temporaryFolder.getRoot().getCanonicalPath());
}
/**
* Make sure can't get a file if the output location is a directory.
*
* @throws GenieException On Error
* @throws IOException On Error
*/
@Test(expected = ResourceAccessException.class)
public void cantGetWithDirectoryAsOutput() throws GenieException, IOException {
this.server
.expect(MockRestRequestMatchers.requestTo(TEST_URL))
.andExpect(MockRestRequestMatchers.method(HttpMethod.GET))
.andRespond(
MockRestResponseCreators
.withSuccess("junk".getBytes(Charset.forName("UTF-8")), MediaType.APPLICATION_OCTET_STREAM)
);
this.httpFileTransfer.getFile(TEST_URL, this.temporaryFolder.getRoot().getCanonicalPath());
}
/**
* Make sure that there is no implementation of the putFile method.
*
* @throws GenieException on error
*/
@Test
public void cantPutFile() throws GenieException {
try {
final String file = UUID.randomUUID().toString();
this.httpFileTransfer.putFile(file, file);
Assert.fail();
} catch (final UnsupportedOperationException e) {
Mockito
.verify(this.uploadTimer, Mockito.times(1))
.record(Mockito.anyLong(), Mockito.eq(TimeUnit.NANOSECONDS));
}
}
/**
* Make sure can get the last update time of a file.
*
* @throws GenieException On error
*/
@Test
public void canGetLastModifiedTime() throws GenieException {
final long lastModified = 28424323000L;
final HttpHeaders headers = new HttpHeaders();
headers.setLastModified(lastModified);
this.server
.expect(MockRestRequestMatchers.requestTo(TEST_URL))
.andExpect(MockRestRequestMatchers.method(HttpMethod.HEAD))
.andRespond(MockRestResponseCreators.withSuccess().headers(headers));
Assert.assertThat(this.httpFileTransfer.getLastModifiedTime(TEST_URL), Matchers.is(lastModified));
this.server.verify();
Mockito
.verify(this.metadataTimer, Mockito.times(1))
.record(Mockito.anyLong(), Mockito.eq(TimeUnit.NANOSECONDS));
}
/**
* Make sure can get the last update time of a file.
*
* @throws GenieException On error
*/
@Test
public void canGetLastModifiedTimeIfNoHeader() throws GenieException {
final long time = Instant.now().toEpochMilli() - 1;
this.server
.expect(MockRestRequestMatchers.requestTo(TEST_URL))
.andExpect(MockRestRequestMatchers.method(HttpMethod.HEAD))
.andRespond(MockRestResponseCreators.withSuccess());
Assert.assertTrue(this.httpFileTransfer.getLastModifiedTime(TEST_URL) > time);
Mockito
.verify(this.metadataTimer, Mockito.times(1))
.record(Mockito.anyLong(), Mockito.eq(TimeUnit.NANOSECONDS));
}
/**
* Make sure can get the last update time of a file.
*
* @throws GenieException On error
*/
@Test
public void cantGetLastModifiedTimeIfNotURL() throws GenieException {
try {
this.httpFileTransfer.getLastModifiedTime(UUID.randomUUID().toString());
Assert.fail();
} catch (final GenieServerException e) {
Assert.assertTrue(e.getCause() instanceof MalformedURLException);
Mockito
.verify(this.metadataTimer, Mockito.times(1))
.record(Mockito.anyLong(), Mockito.eq(TimeUnit.NANOSECONDS));
}
}
}
| |
/**
* Copyright (c) 2016 DataTorrent, Inc. ALL Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.datatorrent.lib.bucket;
import java.io.IOException;
import java.util.Map;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestWatcher;
import org.junit.runner.Description;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.datatorrent.lib.helper.OperatorContextTestHelper;
public class HdfsBucketStoreTest
{
public static class TestMeta extends TestWatcher
{
protected final String APPLICATION_PATH_PREFIX = "target/HdfsBucketStoreTest";
protected final int TOTAL_BUCKETS = 1000;
protected String applicationPath;
protected Path rootBucketPath;
protected HdfsBucketStore<DummyEvent> bucketStore;
protected Map<Integer, Map<Object, DummyEvent>> data = Maps.newHashMap();
protected FileSystem fs;
protected BucketStoreTestsUtil util;
@Override
protected void starting(Description description)
{
applicationPath = OperatorContextTestHelper.getUniqueApplicationPath(APPLICATION_PATH_PREFIX);
bucketStore = getBucketStore();
bucketStore.setup();
util = new BucketStoreTestsUtil(this);
for (int bucketIdx = 0; bucketIdx < 2; bucketIdx++) {
Map<Object, DummyEvent> bucketData = Maps.newHashMap();
data.put(bucketIdx, bucketData);
for (int i = 0; i < 10; i++) {
DummyEvent event = new DummyEvent(i, System.currentTimeMillis());
bucketData.put(event.getEventKey(), event);
}
}
rootBucketPath = new Path(bucketStore.bucketRoot);
try {
fs = FileSystem.newInstance(rootBucketPath.toUri(), new Configuration());
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
protected void finished(Description description)
{
Path root = new Path(applicationPath);
try {
fs.delete(root, true);
if (fs != null) {
fs.close();
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
protected HdfsBucketStore<DummyEvent> getBucketStore()
{
HdfsBucketStore<DummyEvent> lBucketStore = new HdfsBucketStore<DummyEvent>();
lBucketStore.setNoOfBuckets(TOTAL_BUCKETS);
lBucketStore.setWriteEventKeysOnly(true);
lBucketStore.setConfiguration(7, applicationPath, Sets.newHashSet(0), 0);
return lBucketStore;
}
}
@Rule
public TestMeta testMeta = new TestMeta();
@Test
public void testStoreBucketData() throws Exception
{
testMeta.util.storeBucket(0);
}
@Test
public void testFetchBucket() throws Exception
{
testMeta.util.fetchBucket(0);
}
@Test
public void testDeleteBucket() throws Exception
{
testMeta.util.deleteBucket(0);
}
@Test
public void testValuePersistence() throws Exception
{
testMeta.bucketStore.setWriteEventKeysOnly(false);
testMeta.bucketStore.setup();
DummyEvent newEvent = new DummyEvent(0, System.currentTimeMillis());
//store data for window 0
testMeta.util.storeBucket(0);
//populate data for window 1
testMeta.data = Maps.newHashMap();
Map<Object, DummyEvent> bucketData = Maps.newHashMap();
bucketData.put(newEvent.getEventKey(), newEvent);
testMeta.data.put(0, bucketData);
//store data for window 1
testMeta.util.storeBucket(1);
Map<Object, DummyEvent> fetchBucket = testMeta.bucketStore.fetchBucket(0);
DummyEvent retrievedEvent = fetchBucket.get(newEvent.getEventKey());
Assert.assertTrue("latest value", retrievedEvent.equals(newEvent));
}
public static class BucketStoreTestsUtil
{
protected final TestMeta meta;
public BucketStoreTestsUtil(TestMeta meta)
{
this.meta = meta;
}
public void storeBucket(long window) throws Exception
{
meta.bucketStore.storeBucketData(window, 0, meta.data);
Assert.assertTrue(meta.fs.exists(meta.rootBucketPath));
}
public void fetchBucket(long window) throws Exception
{
meta.bucketStore.storeBucketData(window, 0, meta.data);
Map<Object, DummyEvent> fetchedData = meta.bucketStore.fetchBucket(0);
for (Map.Entry<Object, DummyEvent> entry : fetchedData.entrySet()) {
Assert.assertTrue(entry.getValue() == null);
Assert.assertTrue(meta.data.get(0).containsKey(entry.getKey()));
}
fetchedData = meta.bucketStore.fetchBucket(1);
for (Map.Entry<Object, DummyEvent> entry : fetchedData.entrySet()) {
Assert.assertTrue(entry.getValue() == null);
Assert.assertTrue(meta.data.get(1).containsKey(entry.getKey()));
}
}
public void deleteBucket(long window) throws Exception
{
meta.bucketStore.storeBucketData(window, 0, meta.data);
meta.bucketStore.deleteBucket(1);
Map<Object, DummyEvent> fetchedData = meta.bucketStore.fetchBucket(1);
Assert.assertNotNull(fetchedData);
Assert.assertTrue(fetchedData.size() == 0);
deleteFsPath(meta.rootBucketPath);
}
void deleteFsPath(Path path) throws IOException
{
meta.fs.delete(path, true);
}
boolean bucketExists(int fileId)
{
Path bucketPath = new Path(meta.applicationPath + HdfsBucketStore.PATH_SEPARATOR + "buckets" +
HdfsBucketStore.PATH_SEPARATOR + 7 + HdfsBucketStore.PATH_SEPARATOR + fileId);
try {
return meta.fs.exists(bucketPath);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
private static final Logger logger = LoggerFactory.getLogger(HdfsBucketStoreTest.class);
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v10/services/campaign_shared_set_service.proto
package com.google.ads.googleads.v10.services;
/**
* <pre>
* A single operation (create, remove) on an campaign shared set.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v10.services.CampaignSharedSetOperation}
*/
public final class CampaignSharedSetOperation extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v10.services.CampaignSharedSetOperation)
CampaignSharedSetOperationOrBuilder {
private static final long serialVersionUID = 0L;
// Use CampaignSharedSetOperation.newBuilder() to construct.
private CampaignSharedSetOperation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CampaignSharedSetOperation() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new CampaignSharedSetOperation();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private CampaignSharedSetOperation(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
com.google.ads.googleads.v10.resources.CampaignSharedSet.Builder subBuilder = null;
if (operationCase_ == 1) {
subBuilder = ((com.google.ads.googleads.v10.resources.CampaignSharedSet) operation_).toBuilder();
}
operation_ =
input.readMessage(com.google.ads.googleads.v10.resources.CampaignSharedSet.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom((com.google.ads.googleads.v10.resources.CampaignSharedSet) operation_);
operation_ = subBuilder.buildPartial();
}
operationCase_ = 1;
break;
}
case 26: {
java.lang.String s = input.readStringRequireUtf8();
operationCase_ = 3;
operation_ = s;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v10.services.CampaignSharedSetServiceProto.internal_static_google_ads_googleads_v10_services_CampaignSharedSetOperation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v10.services.CampaignSharedSetServiceProto.internal_static_google_ads_googleads_v10_services_CampaignSharedSetOperation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v10.services.CampaignSharedSetOperation.class, com.google.ads.googleads.v10.services.CampaignSharedSetOperation.Builder.class);
}
private int operationCase_ = 0;
private java.lang.Object operation_;
public enum OperationCase
implements com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
CREATE(1),
REMOVE(3),
OPERATION_NOT_SET(0);
private final int value;
private OperationCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static OperationCase valueOf(int value) {
return forNumber(value);
}
public static OperationCase forNumber(int value) {
switch (value) {
case 1: return CREATE;
case 3: return REMOVE;
case 0: return OPERATION_NOT_SET;
default: return null;
}
}
public int getNumber() {
return this.value;
}
};
public OperationCase
getOperationCase() {
return OperationCase.forNumber(
operationCase_);
}
public static final int CREATE_FIELD_NUMBER = 1;
/**
* <pre>
* Create operation: No resource name is expected for the new campaign
* shared set.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignSharedSet create = 1;</code>
* @return Whether the create field is set.
*/
@java.lang.Override
public boolean hasCreate() {
return operationCase_ == 1;
}
/**
* <pre>
* Create operation: No resource name is expected for the new campaign
* shared set.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignSharedSet create = 1;</code>
* @return The create.
*/
@java.lang.Override
public com.google.ads.googleads.v10.resources.CampaignSharedSet getCreate() {
if (operationCase_ == 1) {
return (com.google.ads.googleads.v10.resources.CampaignSharedSet) operation_;
}
return com.google.ads.googleads.v10.resources.CampaignSharedSet.getDefaultInstance();
}
/**
* <pre>
* Create operation: No resource name is expected for the new campaign
* shared set.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignSharedSet create = 1;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v10.resources.CampaignSharedSetOrBuilder getCreateOrBuilder() {
if (operationCase_ == 1) {
return (com.google.ads.googleads.v10.resources.CampaignSharedSet) operation_;
}
return com.google.ads.googleads.v10.resources.CampaignSharedSet.getDefaultInstance();
}
public static final int REMOVE_FIELD_NUMBER = 3;
/**
* <pre>
* Remove operation: A resource name for the removed campaign shared set is
* expected, in this format:
* `customers/{customer_id}/campaignSharedSets/{campaign_id}~{shared_set_id}`
* </pre>
*
* <code>string remove = 3 [(.google.api.resource_reference) = { ... }</code>
* @return Whether the remove field is set.
*/
public boolean hasRemove() {
return operationCase_ == 3;
}
/**
* <pre>
* Remove operation: A resource name for the removed campaign shared set is
* expected, in this format:
* `customers/{customer_id}/campaignSharedSets/{campaign_id}~{shared_set_id}`
* </pre>
*
* <code>string remove = 3 [(.google.api.resource_reference) = { ... }</code>
* @return The remove.
*/
public java.lang.String getRemove() {
java.lang.Object ref = "";
if (operationCase_ == 3) {
ref = operation_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (operationCase_ == 3) {
operation_ = s;
}
return s;
}
}
/**
* <pre>
* Remove operation: A resource name for the removed campaign shared set is
* expected, in this format:
* `customers/{customer_id}/campaignSharedSets/{campaign_id}~{shared_set_id}`
* </pre>
*
* <code>string remove = 3 [(.google.api.resource_reference) = { ... }</code>
* @return The bytes for remove.
*/
public com.google.protobuf.ByteString
getRemoveBytes() {
java.lang.Object ref = "";
if (operationCase_ == 3) {
ref = operation_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
if (operationCase_ == 3) {
operation_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (operationCase_ == 1) {
output.writeMessage(1, (com.google.ads.googleads.v10.resources.CampaignSharedSet) operation_);
}
if (operationCase_ == 3) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, operation_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (operationCase_ == 1) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, (com.google.ads.googleads.v10.resources.CampaignSharedSet) operation_);
}
if (operationCase_ == 3) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, operation_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v10.services.CampaignSharedSetOperation)) {
return super.equals(obj);
}
com.google.ads.googleads.v10.services.CampaignSharedSetOperation other = (com.google.ads.googleads.v10.services.CampaignSharedSetOperation) obj;
if (!getOperationCase().equals(other.getOperationCase())) return false;
switch (operationCase_) {
case 1:
if (!getCreate()
.equals(other.getCreate())) return false;
break;
case 3:
if (!getRemove()
.equals(other.getRemove())) return false;
break;
case 0:
default:
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
switch (operationCase_) {
case 1:
hash = (37 * hash) + CREATE_FIELD_NUMBER;
hash = (53 * hash) + getCreate().hashCode();
break;
case 3:
hash = (37 * hash) + REMOVE_FIELD_NUMBER;
hash = (53 * hash) + getRemove().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v10.services.CampaignSharedSetOperation parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.services.CampaignSharedSetOperation parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.services.CampaignSharedSetOperation parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.services.CampaignSharedSetOperation parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.services.CampaignSharedSetOperation parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.services.CampaignSharedSetOperation parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.services.CampaignSharedSetOperation parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.services.CampaignSharedSetOperation parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v10.services.CampaignSharedSetOperation parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.services.CampaignSharedSetOperation parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v10.services.CampaignSharedSetOperation parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.services.CampaignSharedSetOperation parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v10.services.CampaignSharedSetOperation prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* A single operation (create, remove) on an campaign shared set.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v10.services.CampaignSharedSetOperation}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v10.services.CampaignSharedSetOperation)
com.google.ads.googleads.v10.services.CampaignSharedSetOperationOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v10.services.CampaignSharedSetServiceProto.internal_static_google_ads_googleads_v10_services_CampaignSharedSetOperation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v10.services.CampaignSharedSetServiceProto.internal_static_google_ads_googleads_v10_services_CampaignSharedSetOperation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v10.services.CampaignSharedSetOperation.class, com.google.ads.googleads.v10.services.CampaignSharedSetOperation.Builder.class);
}
// Construct using com.google.ads.googleads.v10.services.CampaignSharedSetOperation.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
operationCase_ = 0;
operation_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v10.services.CampaignSharedSetServiceProto.internal_static_google_ads_googleads_v10_services_CampaignSharedSetOperation_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v10.services.CampaignSharedSetOperation getDefaultInstanceForType() {
return com.google.ads.googleads.v10.services.CampaignSharedSetOperation.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v10.services.CampaignSharedSetOperation build() {
com.google.ads.googleads.v10.services.CampaignSharedSetOperation result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v10.services.CampaignSharedSetOperation buildPartial() {
com.google.ads.googleads.v10.services.CampaignSharedSetOperation result = new com.google.ads.googleads.v10.services.CampaignSharedSetOperation(this);
if (operationCase_ == 1) {
if (createBuilder_ == null) {
result.operation_ = operation_;
} else {
result.operation_ = createBuilder_.build();
}
}
if (operationCase_ == 3) {
result.operation_ = operation_;
}
result.operationCase_ = operationCase_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v10.services.CampaignSharedSetOperation) {
return mergeFrom((com.google.ads.googleads.v10.services.CampaignSharedSetOperation)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v10.services.CampaignSharedSetOperation other) {
if (other == com.google.ads.googleads.v10.services.CampaignSharedSetOperation.getDefaultInstance()) return this;
switch (other.getOperationCase()) {
case CREATE: {
mergeCreate(other.getCreate());
break;
}
case REMOVE: {
operationCase_ = 3;
operation_ = other.operation_;
onChanged();
break;
}
case OPERATION_NOT_SET: {
break;
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v10.services.CampaignSharedSetOperation parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v10.services.CampaignSharedSetOperation) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int operationCase_ = 0;
private java.lang.Object operation_;
public OperationCase
getOperationCase() {
return OperationCase.forNumber(
operationCase_);
}
public Builder clearOperation() {
operationCase_ = 0;
operation_ = null;
onChanged();
return this;
}
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v10.resources.CampaignSharedSet, com.google.ads.googleads.v10.resources.CampaignSharedSet.Builder, com.google.ads.googleads.v10.resources.CampaignSharedSetOrBuilder> createBuilder_;
/**
* <pre>
* Create operation: No resource name is expected for the new campaign
* shared set.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignSharedSet create = 1;</code>
* @return Whether the create field is set.
*/
@java.lang.Override
public boolean hasCreate() {
return operationCase_ == 1;
}
/**
* <pre>
* Create operation: No resource name is expected for the new campaign
* shared set.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignSharedSet create = 1;</code>
* @return The create.
*/
@java.lang.Override
public com.google.ads.googleads.v10.resources.CampaignSharedSet getCreate() {
if (createBuilder_ == null) {
if (operationCase_ == 1) {
return (com.google.ads.googleads.v10.resources.CampaignSharedSet) operation_;
}
return com.google.ads.googleads.v10.resources.CampaignSharedSet.getDefaultInstance();
} else {
if (operationCase_ == 1) {
return createBuilder_.getMessage();
}
return com.google.ads.googleads.v10.resources.CampaignSharedSet.getDefaultInstance();
}
}
/**
* <pre>
* Create operation: No resource name is expected for the new campaign
* shared set.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignSharedSet create = 1;</code>
*/
public Builder setCreate(com.google.ads.googleads.v10.resources.CampaignSharedSet value) {
if (createBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
operation_ = value;
onChanged();
} else {
createBuilder_.setMessage(value);
}
operationCase_ = 1;
return this;
}
/**
* <pre>
* Create operation: No resource name is expected for the new campaign
* shared set.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignSharedSet create = 1;</code>
*/
public Builder setCreate(
com.google.ads.googleads.v10.resources.CampaignSharedSet.Builder builderForValue) {
if (createBuilder_ == null) {
operation_ = builderForValue.build();
onChanged();
} else {
createBuilder_.setMessage(builderForValue.build());
}
operationCase_ = 1;
return this;
}
/**
* <pre>
* Create operation: No resource name is expected for the new campaign
* shared set.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignSharedSet create = 1;</code>
*/
public Builder mergeCreate(com.google.ads.googleads.v10.resources.CampaignSharedSet value) {
if (createBuilder_ == null) {
if (operationCase_ == 1 &&
operation_ != com.google.ads.googleads.v10.resources.CampaignSharedSet.getDefaultInstance()) {
operation_ = com.google.ads.googleads.v10.resources.CampaignSharedSet.newBuilder((com.google.ads.googleads.v10.resources.CampaignSharedSet) operation_)
.mergeFrom(value).buildPartial();
} else {
operation_ = value;
}
onChanged();
} else {
if (operationCase_ == 1) {
createBuilder_.mergeFrom(value);
}
createBuilder_.setMessage(value);
}
operationCase_ = 1;
return this;
}
/**
* <pre>
* Create operation: No resource name is expected for the new campaign
* shared set.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignSharedSet create = 1;</code>
*/
public Builder clearCreate() {
if (createBuilder_ == null) {
if (operationCase_ == 1) {
operationCase_ = 0;
operation_ = null;
onChanged();
}
} else {
if (operationCase_ == 1) {
operationCase_ = 0;
operation_ = null;
}
createBuilder_.clear();
}
return this;
}
/**
* <pre>
* Create operation: No resource name is expected for the new campaign
* shared set.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignSharedSet create = 1;</code>
*/
public com.google.ads.googleads.v10.resources.CampaignSharedSet.Builder getCreateBuilder() {
return getCreateFieldBuilder().getBuilder();
}
/**
* <pre>
* Create operation: No resource name is expected for the new campaign
* shared set.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignSharedSet create = 1;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v10.resources.CampaignSharedSetOrBuilder getCreateOrBuilder() {
if ((operationCase_ == 1) && (createBuilder_ != null)) {
return createBuilder_.getMessageOrBuilder();
} else {
if (operationCase_ == 1) {
return (com.google.ads.googleads.v10.resources.CampaignSharedSet) operation_;
}
return com.google.ads.googleads.v10.resources.CampaignSharedSet.getDefaultInstance();
}
}
/**
* <pre>
* Create operation: No resource name is expected for the new campaign
* shared set.
* </pre>
*
* <code>.google.ads.googleads.v10.resources.CampaignSharedSet create = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v10.resources.CampaignSharedSet, com.google.ads.googleads.v10.resources.CampaignSharedSet.Builder, com.google.ads.googleads.v10.resources.CampaignSharedSetOrBuilder>
getCreateFieldBuilder() {
if (createBuilder_ == null) {
if (!(operationCase_ == 1)) {
operation_ = com.google.ads.googleads.v10.resources.CampaignSharedSet.getDefaultInstance();
}
createBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v10.resources.CampaignSharedSet, com.google.ads.googleads.v10.resources.CampaignSharedSet.Builder, com.google.ads.googleads.v10.resources.CampaignSharedSetOrBuilder>(
(com.google.ads.googleads.v10.resources.CampaignSharedSet) operation_,
getParentForChildren(),
isClean());
operation_ = null;
}
operationCase_ = 1;
onChanged();;
return createBuilder_;
}
/**
* <pre>
* Remove operation: A resource name for the removed campaign shared set is
* expected, in this format:
* `customers/{customer_id}/campaignSharedSets/{campaign_id}~{shared_set_id}`
* </pre>
*
* <code>string remove = 3 [(.google.api.resource_reference) = { ... }</code>
* @return Whether the remove field is set.
*/
@java.lang.Override
public boolean hasRemove() {
return operationCase_ == 3;
}
/**
* <pre>
* Remove operation: A resource name for the removed campaign shared set is
* expected, in this format:
* `customers/{customer_id}/campaignSharedSets/{campaign_id}~{shared_set_id}`
* </pre>
*
* <code>string remove = 3 [(.google.api.resource_reference) = { ... }</code>
* @return The remove.
*/
@java.lang.Override
public java.lang.String getRemove() {
java.lang.Object ref = "";
if (operationCase_ == 3) {
ref = operation_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (operationCase_ == 3) {
operation_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Remove operation: A resource name for the removed campaign shared set is
* expected, in this format:
* `customers/{customer_id}/campaignSharedSets/{campaign_id}~{shared_set_id}`
* </pre>
*
* <code>string remove = 3 [(.google.api.resource_reference) = { ... }</code>
* @return The bytes for remove.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getRemoveBytes() {
java.lang.Object ref = "";
if (operationCase_ == 3) {
ref = operation_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
if (operationCase_ == 3) {
operation_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Remove operation: A resource name for the removed campaign shared set is
* expected, in this format:
* `customers/{customer_id}/campaignSharedSets/{campaign_id}~{shared_set_id}`
* </pre>
*
* <code>string remove = 3 [(.google.api.resource_reference) = { ... }</code>
* @param value The remove to set.
* @return This builder for chaining.
*/
public Builder setRemove(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
operationCase_ = 3;
operation_ = value;
onChanged();
return this;
}
/**
* <pre>
* Remove operation: A resource name for the removed campaign shared set is
* expected, in this format:
* `customers/{customer_id}/campaignSharedSets/{campaign_id}~{shared_set_id}`
* </pre>
*
* <code>string remove = 3 [(.google.api.resource_reference) = { ... }</code>
* @return This builder for chaining.
*/
public Builder clearRemove() {
if (operationCase_ == 3) {
operationCase_ = 0;
operation_ = null;
onChanged();
}
return this;
}
/**
* <pre>
* Remove operation: A resource name for the removed campaign shared set is
* expected, in this format:
* `customers/{customer_id}/campaignSharedSets/{campaign_id}~{shared_set_id}`
* </pre>
*
* <code>string remove = 3 [(.google.api.resource_reference) = { ... }</code>
* @param value The bytes for remove to set.
* @return This builder for chaining.
*/
public Builder setRemoveBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
operationCase_ = 3;
operation_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v10.services.CampaignSharedSetOperation)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v10.services.CampaignSharedSetOperation)
private static final com.google.ads.googleads.v10.services.CampaignSharedSetOperation DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v10.services.CampaignSharedSetOperation();
}
public static com.google.ads.googleads.v10.services.CampaignSharedSetOperation getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CampaignSharedSetOperation>
PARSER = new com.google.protobuf.AbstractParser<CampaignSharedSetOperation>() {
@java.lang.Override
public CampaignSharedSetOperation parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new CampaignSharedSetOperation(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<CampaignSharedSetOperation> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CampaignSharedSetOperation> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v10.services.CampaignSharedSetOperation getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/**
*
*/
package de.saumya.mojo.ruby.script;
import de.saumya.mojo.ruby.Logger;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.taskdefs.Java;
import org.apache.tools.ant.types.Environment.Variable;
import org.apache.tools.ant.types.Path;
import java.io.*;
import java.nio.file.Files;
import java.util.List;
import java.util.Map;
class AntLauncher extends AbstractLauncher {
private static final String MAVEN_CLASSPATH = "maven.classpath";
private static final String DEFAULT_XMX = "-Xmx384m";
private static final String TEMP_FILE_PREFIX = "jruby-ant-launcher-";
private final Logger logger;
private final ScriptFactory factory;
private final Project project;
AntLauncher(final Logger logger, final ScriptFactory factory) {
this.logger = logger;
this.factory = factory;
this.project = createAntProject();
}
@Override
protected void doExecute(final File launchDirectory,
final List<String> args, File outputFile) throws ScriptException, IOException {
doExecute(launchDirectory, args,new FileOutputStream(outputFile));
}
@Override
protected void doExecute(final File launchDirectory, final List<String> args,
final OutputStream outputStream) throws ScriptException, IOException {
final Java java = new Java();
java.setProject(this.project);
java.setClassname("org.jruby.Main");
java.setFailonerror(true);
java.setFork(true);
java.setDir(launchDirectory);
for (final Map.Entry<String, String> entry : this.factory.environment().entrySet()) {
Variable v = new Variable();
v.setKey(entry.getKey());
v.setValue(entry.getValue());
java.addEnv(v);
}
// TODO add isDebugable to the logger and log only when debug is needed
this.logger.debug("java classpath : "
+ this.project.getReference(MAVEN_CLASSPATH));
if (this.factory.environment().size() > 0) {
this.logger.debug("environment :");
for (final Map.Entry<String, String> entry : this.factory.environment().entrySet()) {
this.logger.debug("\t\t" + entry.getKey() + " => "
+ entry.getValue());
}
}
for (final String arg : factory.switches.list) {
java.createArg().setValue(arg);
}
for (final String arg : args) {
java.createArg().setValue(arg);
}
Path temp = (Path) this.project.getReference(MAVEN_CLASSPATH);
if (this.factory.jrubyJar != null) {
temp.add(new Path(project, this.factory.jrubyJar.getAbsolutePath()));
}
java.createJvmarg().setLine("-XX:+IgnoreUnrecognizedVMOptions --add-opens=java.base/java.security.cert=ALL-UNNAMED --add-opens=java.base/java.security=ALL-UNNAMED --add-opens=java.base/java.util.zip=ALL-UNNAMED --add-opens=java.base/java.lang.reflect=ALL-UNNAMED --add-opens=java.base/javax.crypto=ALL-UNNAMED --illegal-access=warn");
java.createJvmarg().setValue("-cp");
java.createJvmarg().setPath(temp);
// Does not work on all JVMs
// if (!factory.jvmArgs.matches("(-client|-server)")) {
// java.createJvmarg().setValue("-client");
// }
for (String arg : factory.jvmArgs.list) {
java.createJvmarg().setValue(arg);
}
// hack to avoid jruby-core in bootclassloader where as the dependent jars are in system classloader
if (this.factory.jrubyJar != null && this.factory.jrubyJar.equals(this.factory.jrubyStdlibJar)){
java.createJvmarg().setValue("-Xbootclasspath/a:"
+ this.factory.jrubyJar.getAbsolutePath());
}
if ( this.factory.jrubyJar == null && System.getProperty( "jruby.home" ) != null ){
Variable v = new Variable();
v.setKey( "jruby.home" );
v.setValue( System.getProperty( "jruby.home" ) );
java.addSysproperty( v );
File lib = System.getProperty("jruby.lib") != null ? new File( System.getProperty("jruby.lib") ) :
new File( System.getProperty("jruby.home"), "lib" );
File jrubyJar = new File( lib, "jruby.jar" );
java.createJvmarg().setValue("-Xbootclasspath/a:"
+ jrubyJar.getAbsolutePath());
}
File outputTempFile = null;
if (outputStream != null) {
outputTempFile = File.createTempFile(TEMP_FILE_PREFIX, ".output");
java.setOutput(outputTempFile);
}
java.setLogError(true);
File errorTempFile = null;
try {
errorTempFile = File.createTempFile(TEMP_FILE_PREFIX, ".log");
errorTempFile.deleteOnExit();
java.setError(errorTempFile);
java.execute();
if (outputStream != null) {
writeInto(outputTempFile, outputStream);
outputTempFile.delete();
}
} catch (IOException e) {
logger.warn("can not create tempfile for stderr");
java.execute();
} finally {
if (errorTempFile != null && errorTempFile.length() > 0) {
try {
byte[] encoded = Files.readAllBytes(errorTempFile.toPath());
logger.warn(new String(encoded));
} catch (IOException e) {
logger.warn("can not read error file");
}
errorTempFile.delete();
}
}
}
private void writeInto(File file, OutputStream outputStream) throws IOException {
byte[] buffer = new byte[1024 * 4];
final InputStream fileIS = new FileInputStream(file);
while (fileIS.read(buffer) > 0) {
outputStream.write(buffer);
}
}
private Project createAntProject() {
final Project project = new Project();
// setup maven.plugin.classpath
final Path classPath = new Path(project);
for (final String path : this.factory.classpathElements) {
if (!path.contains("jruby-complete") || factory.jrubyJar == null) {
classPath.add(new Path(project, path));
}
}
project.addReference(MAVEN_CLASSPATH, classPath);
project.addBuildListener(new AntLogAdapter(this.logger));
return project;
}
@Override
public void execute(final List<String> args) throws ScriptException,
IOException {
doExecute(null, args, (OutputStream) null);
}
@Override
public void execute(final List<String> args, final File outputFile)
throws ScriptException, IOException {
doExecute(null, args, outputFile);
}
@Override
public void executeIn(final File launchDirectory, final List<String> args)
throws ScriptException, IOException {
doExecute(launchDirectory, args, (OutputStream) null);
}
@Override
public void executeIn(final File launchDirectory, final List<String> args,
final File outputFile) throws ScriptException, IOException {
doExecute(launchDirectory, args, outputFile);
}
@Override
public void executeScript(final String script, final List<String> args)
throws ScriptException, IOException {
executeScript(script, args, (OutputStream) null);
}
@Override
public void executeScript(final String script, final List<String> args,
final File outputFile) throws ScriptException, IOException {
executeScript(null, script, args, outputFile);
}
@Override
public void executeScript(final File launchDirectory, final String script,
final List<String> args) throws ScriptException, IOException {
executeScript(launchDirectory, script, args, (OutputStream) null);
}
@Override
public void executeScript(final File launchDirectory, final String script,
final List<String> args, final File outputFile)
throws ScriptException, IOException {
addScriptArguments(script, args);
doExecute(launchDirectory, args, outputFile);
}
@Override
public void executeScript(final File launchDirectory, final String script,
final List<String> args, final OutputStream outputStream)
throws ScriptException, IOException {
addScriptArguments(script, args);
doExecute(launchDirectory, args, outputStream);
}
private void addScriptArguments(String script, List<String> args) {
args.add(0, "-e");
args.add(1, script);
args.add(2, "--");
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: facestyle.proto
package com.learn.proto;
public final class Facestyle {
private Facestyle() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
public interface FaceStyleOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string name = 1;
/**
* <code>required string name = 1;</code>
*/
boolean hasName();
/**
* <code>required string name = 1;</code>
*/
java.lang.String getName();
/**
* <code>required string name = 1;</code>
*/
com.google.protobuf.ByteString
getNameBytes();
}
/**
* Protobuf type {@code tutorial.FaceStyle}
*/
public static final class FaceStyle extends
com.google.protobuf.GeneratedMessage
implements FaceStyleOrBuilder {
// Use FaceStyle.newBuilder() to construct.
private FaceStyle(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private FaceStyle(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final FaceStyle defaultInstance;
public static FaceStyle getDefaultInstance() {
return defaultInstance;
}
public FaceStyle getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private FaceStyle(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
name_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.learn.proto.Facestyle.internal_static_tutorial_FaceStyle_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.learn.proto.Facestyle.internal_static_tutorial_FaceStyle_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.learn.proto.Facestyle.FaceStyle.class, com.learn.proto.Facestyle.FaceStyle.Builder.class);
}
public static com.google.protobuf.Parser<FaceStyle> PARSER =
new com.google.protobuf.AbstractParser<FaceStyle>() {
public FaceStyle parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new FaceStyle(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<FaceStyle> getParserForType() {
return PARSER;
}
private int bitField0_;
// required string name = 1;
public static final int NAME_FIELD_NUMBER = 1;
private java.lang.Object name_;
/**
* <code>required string name = 1;</code>
*/
public boolean hasName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string name = 1;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
name_ = s;
}
return s;
}
}
/**
* <code>required string name = 1;</code>
*/
public com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private void initFields() {
name_ = "";
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasName()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, getNameBytes());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, getNameBytes());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static com.learn.proto.Facestyle.FaceStyle parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.learn.proto.Facestyle.FaceStyle parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.learn.proto.Facestyle.FaceStyle parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.learn.proto.Facestyle.FaceStyle parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.learn.proto.Facestyle.FaceStyle parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static com.learn.proto.Facestyle.FaceStyle parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static com.learn.proto.Facestyle.FaceStyle parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static com.learn.proto.Facestyle.FaceStyle parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static com.learn.proto.Facestyle.FaceStyle parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static com.learn.proto.Facestyle.FaceStyle parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(com.learn.proto.Facestyle.FaceStyle prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code tutorial.FaceStyle}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements com.learn.proto.Facestyle.FaceStyleOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.learn.proto.Facestyle.internal_static_tutorial_FaceStyle_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.learn.proto.Facestyle.internal_static_tutorial_FaceStyle_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.learn.proto.Facestyle.FaceStyle.class, com.learn.proto.Facestyle.FaceStyle.Builder.class);
}
// Construct using com.learn.proto.Facestyle.FaceStyle.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
name_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.learn.proto.Facestyle.internal_static_tutorial_FaceStyle_descriptor;
}
public com.learn.proto.Facestyle.FaceStyle getDefaultInstanceForType() {
return com.learn.proto.Facestyle.FaceStyle.getDefaultInstance();
}
public com.learn.proto.Facestyle.FaceStyle build() {
com.learn.proto.Facestyle.FaceStyle result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.learn.proto.Facestyle.FaceStyle buildPartial() {
com.learn.proto.Facestyle.FaceStyle result = new com.learn.proto.Facestyle.FaceStyle(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.name_ = name_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.learn.proto.Facestyle.FaceStyle) {
return mergeFrom((com.learn.proto.Facestyle.FaceStyle)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.learn.proto.Facestyle.FaceStyle other) {
if (other == com.learn.proto.Facestyle.FaceStyle.getDefaultInstance()) return this;
if (other.hasName()) {
bitField0_ |= 0x00000001;
name_ = other.name_;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasName()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.learn.proto.Facestyle.FaceStyle parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.learn.proto.Facestyle.FaceStyle) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required string name = 1;
private java.lang.Object name_ = "";
/**
* <code>required string name = 1;</code>
*/
public boolean hasName() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string name = 1;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string name = 1;</code>
*/
public com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string name = 1;</code>
*/
public Builder setName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
name_ = value;
onChanged();
return this;
}
/**
* <code>required string name = 1;</code>
*/
public Builder clearName() {
bitField0_ = (bitField0_ & ~0x00000001);
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
* <code>required string name = 1;</code>
*/
public Builder setNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
name_ = value;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:tutorial.FaceStyle)
}
static {
defaultInstance = new FaceStyle(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:tutorial.FaceStyle)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_tutorial_FaceStyle_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_tutorial_FaceStyle_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\017facestyle.proto\022\010tutorial\"\031\n\tFaceStyle" +
"\022\014\n\004name\030\001 \002(\tB\021\n\017com.learn.proto"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_tutorial_FaceStyle_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_tutorial_FaceStyle_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_tutorial_FaceStyle_descriptor,
new java.lang.String[] { "Name", });
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
}
// @@protoc_insertion_point(outer_class_scope)
}
| |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.memcache.v1beta2;
import com.google.api.pathtemplate.PathTemplate;
import com.google.api.resourcenames.ResourceName;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
@Generated("by gapic-generator-java")
public class InstanceName implements ResourceName {
private static final PathTemplate PROJECT_LOCATION_INSTANCE =
PathTemplate.createWithoutUrlEncoding(
"projects/{project}/locations/{location}/instances/{instance}");
private volatile Map<String, String> fieldValuesMap;
private final String project;
private final String location;
private final String instance;
@Deprecated
protected InstanceName() {
project = null;
location = null;
instance = null;
}
private InstanceName(Builder builder) {
project = Preconditions.checkNotNull(builder.getProject());
location = Preconditions.checkNotNull(builder.getLocation());
instance = Preconditions.checkNotNull(builder.getInstance());
}
public String getProject() {
return project;
}
public String getLocation() {
return location;
}
public String getInstance() {
return instance;
}
public static Builder newBuilder() {
return new Builder();
}
public Builder toBuilder() {
return new Builder(this);
}
public static InstanceName of(String project, String location, String instance) {
return newBuilder().setProject(project).setLocation(location).setInstance(instance).build();
}
public static String format(String project, String location, String instance) {
return newBuilder()
.setProject(project)
.setLocation(location)
.setInstance(instance)
.build()
.toString();
}
public static InstanceName parse(String formattedString) {
if (formattedString.isEmpty()) {
return null;
}
Map<String, String> matchMap =
PROJECT_LOCATION_INSTANCE.validatedMatch(
formattedString, "InstanceName.parse: formattedString not in valid format");
return of(matchMap.get("project"), matchMap.get("location"), matchMap.get("instance"));
}
public static List<InstanceName> parseList(List<String> formattedStrings) {
List<InstanceName> list = new ArrayList<>(formattedStrings.size());
for (String formattedString : formattedStrings) {
list.add(parse(formattedString));
}
return list;
}
public static List<String> toStringList(List<InstanceName> values) {
List<String> list = new ArrayList<>(values.size());
for (InstanceName value : values) {
if (value == null) {
list.add("");
} else {
list.add(value.toString());
}
}
return list;
}
public static boolean isParsableFrom(String formattedString) {
return PROJECT_LOCATION_INSTANCE.matches(formattedString);
}
@Override
public Map<String, String> getFieldValuesMap() {
if (fieldValuesMap == null) {
synchronized (this) {
if (fieldValuesMap == null) {
ImmutableMap.Builder<String, String> fieldMapBuilder = ImmutableMap.builder();
if (project != null) {
fieldMapBuilder.put("project", project);
}
if (location != null) {
fieldMapBuilder.put("location", location);
}
if (instance != null) {
fieldMapBuilder.put("instance", instance);
}
fieldValuesMap = fieldMapBuilder.build();
}
}
}
return fieldValuesMap;
}
public String getFieldValue(String fieldName) {
return getFieldValuesMap().get(fieldName);
}
@Override
public String toString() {
return PROJECT_LOCATION_INSTANCE.instantiate(
"project", project, "location", location, "instance", instance);
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o != null || getClass() == o.getClass()) {
InstanceName that = ((InstanceName) o);
return Objects.equals(this.project, that.project)
&& Objects.equals(this.location, that.location)
&& Objects.equals(this.instance, that.instance);
}
return false;
}
@Override
public int hashCode() {
int h = 1;
h *= 1000003;
h ^= Objects.hashCode(project);
h *= 1000003;
h ^= Objects.hashCode(location);
h *= 1000003;
h ^= Objects.hashCode(instance);
return h;
}
/** Builder for projects/{project}/locations/{location}/instances/{instance}. */
public static class Builder {
private String project;
private String location;
private String instance;
protected Builder() {}
public String getProject() {
return project;
}
public String getLocation() {
return location;
}
public String getInstance() {
return instance;
}
public Builder setProject(String project) {
this.project = project;
return this;
}
public Builder setLocation(String location) {
this.location = location;
return this;
}
public Builder setInstance(String instance) {
this.instance = instance;
return this;
}
private Builder(InstanceName instanceName) {
this.project = instanceName.project;
this.location = instanceName.location;
this.instance = instanceName.instance;
}
public InstanceName build() {
return new InstanceName(this);
}
}
}
| |
/*
* Copyright Terracotta, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ehcache.internal.store;
import org.ehcache.config.Eviction;
import org.ehcache.exceptions.CacheAccessException;
import org.ehcache.spi.cache.Store;
import org.ehcache.spi.test.After;
import org.ehcache.spi.test.SPITest;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
/**
* Test the {@link org.ehcache.spi.cache.Store#remove(Object, Object)} contract of the
* {@link org.ehcache.spi.cache.Store Store} interface.
* <p/>
*
* @author Aurelien Broszniowski
*/
public class StoreRemoveKeyValueTest<K, V> extends SPIStoreTester<K, V> {
public StoreRemoveKeyValueTest(final StoreFactory<K, V> factory) {
super(factory);
}
protected Store<K, V> kvStore;
protected Store kvStore2;
@After
public void tearDown() {
if (kvStore != null) {
// kvStore.close();
kvStore = null;
}
if (kvStore2 != null) {
// kvStore2.close();
kvStore2 = null;
}
}
@SPITest
public void removeEntryForKeyIfMappedToValue()
throws IllegalAccessException, InstantiationException, CacheAccessException {
kvStore = factory.newStore(factory.newConfiguration(factory.getKeyType(), factory.getValueType(), null, Eviction
.all(), null));
K key = factory.createKey(1L);
V value = factory.createValue(1L);
kvStore.put(key, value);
K equalKey = factory.createKey(1L);
V equalValue = factory.createValue(1L);
assertThat(key.equals(equalKey), is(true));
assertThat(value.equals(equalValue), is(true));
try {
kvStore.remove(equalKey, equalValue);
} catch (CacheAccessException e) {
System.err.println("Warning, an exception is thrown due to the SPI test");
e.printStackTrace();
}
assertThat(kvStore.containsKey(key), is(false));
}
@SPITest
public void doNothingForKeyNotMappedToValue()
throws IllegalAccessException, InstantiationException, CacheAccessException {
kvStore = factory.newStore(factory.newConfiguration(factory.getKeyType(), factory.getValueType(), null, Eviction.all(), null));
K key = factory.createKey(1);
V value = factory.createValue(1);
assertThat(kvStore.containsKey(key), is(false));
try {
boolean isRemoved = kvStore.remove(key, value);
assertThat(isRemoved, is(false));
} catch (CacheAccessException e) {
throw new AssertionError(e);
}
}
@SPITest
public void doNothingForWrongValue()
throws IllegalAccessException, InstantiationException, CacheAccessException {
kvStore = factory.newStore(factory.newConfiguration(factory.getKeyType(), factory.getValueType(), null, Eviction.all(), null));
K key = factory.createKey(1);
V value = factory.createValue(1L);
kvStore.put(key, value);
V notEqualValue = factory.createValue(2L);
assertThat(value.equals(notEqualValue), is(false));
try {
assertThat(kvStore.remove(key, notEqualValue), is(false));
} catch (CacheAccessException e) {
throw new AssertionError(e);
}
}
@SPITest
public void returnTrueIfValueWasRemoved()
throws IllegalAccessException, InstantiationException, CacheAccessException {
kvStore = factory.newStore(factory.newConfiguration(factory.getKeyType(), factory.getValueType(), null, Eviction.all(), null));
K key = factory.createKey(1);
V value = factory.createValue(1);
kvStore.put(key, value);
assertThat(kvStore.containsKey(key), is(true));
try {
assertThat(kvStore.remove(key, value), is(true));
} catch (CacheAccessException e) {
System.err.println("Warning, an exception is thrown due to the SPI test");
e.printStackTrace();
}
}
@SPITest
public void returnFalseIfValueWasNotRemoved()
throws IllegalAccessException, InstantiationException, CacheAccessException {
kvStore = factory.newStore(factory.newConfiguration(factory.getKeyType(), factory.getValueType(), null, Eviction.all(), null));
K key = factory.createKey(1);
V value = factory.createValue(1);
assertThat(kvStore.containsKey(key), is(false));
try {
assertThat(kvStore.remove(key, value), is(false));
} catch (CacheAccessException e) {
System.err.println("Warning, an exception is thrown due to the SPI test");
e.printStackTrace();
}
}
@SPITest
public void nullKeyThrowsException()
throws IllegalAccessException, InstantiationException {
kvStore = factory.newStore(factory.newConfiguration(factory.getKeyType(), factory.getValueType(), null, null, null));
K key = null;
V value = factory.createValue(1);
try {
kvStore.remove(key, value);
throw new AssertionError("Expected NullPointerException because the key is null");
} catch (NullPointerException e) {
// expected
} catch (CacheAccessException e) {
System.err.println("Warning, an exception is thrown due to the SPI test");
e.printStackTrace();
}
}
@SPITest
public void nullValueThrowsException()
throws IllegalAccessException, InstantiationException {
kvStore = factory.newStore(factory.newConfiguration(factory.getKeyType(), factory.getValueType(), null, null, null));
K key = factory.createKey(1);
V value = null;
try {
kvStore.remove(key, value);
throw new AssertionError("Expected NullPointerException because the value is null");
} catch (NullPointerException e) {
// expected
} catch (CacheAccessException e) {
System.err.println("Warning, an exception is thrown due to the SPI test");
e.printStackTrace();
}
}
@SPITest
@SuppressWarnings({ "unchecked", "rawtypes" })
public void wrongKeyTypeThrowsException()
throws IllegalAccessException, InstantiationException {
kvStore2 = factory.newStore(factory.newConfiguration(factory.getKeyType(), factory.getValueType(), null, null, null));
V value = factory.createValue(1);
try {
if (this.factory.getKeyType() == String.class) {
kvStore2.remove(1.0f, value);
} else {
kvStore2.remove("key", value);
}
throw new AssertionError("Expected ClassCastException because the key is of the wrong type");
} catch (ClassCastException e) {
// expected
} catch (CacheAccessException e) {
System.err.println("Warning, an exception is thrown due to the SPI test");
e.printStackTrace();
}
}
@SPITest
@SuppressWarnings({ "unchecked", "rawtypes" })
public void wrongValueTypeThrowsException()
throws IllegalAccessException, InstantiationException {
kvStore2 = factory.newStore(factory.newConfiguration(factory.getKeyType(), factory.getValueType(), null, null, null));
K key = factory.createKey(1);
try {
if (this.factory.getValueType() == String.class) {
kvStore2.remove(key, 1.0f);
} else {
kvStore2.remove(key, "value");
}
throw new AssertionError("Expected ClassCastException because the value is of the wrong type");
} catch (ClassCastException e) {
// expected
} catch (CacheAccessException e) {
System.err.println("Warning, an exception is thrown due to the SPI test");
e.printStackTrace();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.ivyde.internal.eclipse.resolve;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.apache.ivy.Ivy;
import org.apache.ivy.core.module.descriptor.ModuleDescriptor;
import org.apache.ivy.core.sort.ModuleDescriptorSorter;
import org.apache.ivy.core.sort.WarningNonMatchingVersionReporter;
import org.apache.ivy.plugins.circular.CircularDependencyStrategy;
import org.apache.ivy.plugins.circular.WarnCircularDependencyStrategy;
import org.apache.ivy.plugins.version.VersionMatcher;
import org.apache.ivyde.eclipse.IvyDEException;
import org.apache.ivyde.internal.eclipse.CachedIvy;
import org.apache.ivyde.internal.eclipse.IvyDEMessage;
import org.apache.ivyde.internal.eclipse.IvyMarkerManager;
import org.apache.ivyde.internal.eclipse.IvyPlugin;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.MultiStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.core.runtime.jobs.Job;
/**
* Eclipse classpath container that will contain the ivy resolved entries.
*/
public class IvyResolveJob extends Job {
private static final int MONITOR_LENGTH = 1000;
private static final int IVY_LOAD_LENGTH = 100;
private static final int POST_RESOLVE_LENGTH = 100;
private static final int WAIT_BEFORE_LAUNCH = 1000;
private final List<ResolveRequest> resolveQueue = new ArrayList<>();
public IvyResolveJob() {
super("IvyDE resolve");
setUser(false);
// computing the classpath is somehow building
setRule(ResourcesPlugin.getWorkspace().getRuleFactory().buildRule());
}
public IStatus launchRequest(ResolveRequest request, IProgressMonitor monitor) {
synchronized (resolveQueue) {
resolveQueue.add(request);
}
return run(monitor);
}
public void addRequest(ResolveRequest request) {
synchronized (resolveQueue) {
resolveQueue.add(request);
}
schedule(WAIT_BEFORE_LAUNCH);
}
protected IStatus run(IProgressMonitor monitor) {
try {
return doRun(monitor);
} catch (RuntimeException e) {
IvyDEMessage.error("Resolve job failed with an unexpected exception", e);
throw e;
}
}
private IStatus doRun(IProgressMonitor monitor) {
IvyDEMessage.info("Resolve job starting...");
List<ResolveRequest> toResolve;
synchronized (resolveQueue) {
toResolve = new ArrayList<>(resolveQueue);
resolveQueue.clear();
}
if (toResolve.isEmpty()) {
IvyDEMessage.info("Nothing to resolve");
return Status.OK_STATUS;
}
IvyDEMessage.verbose(toResolve.size() + " container(s) to resolve");
monitor.beginTask("Loading Ivy descriptors", MONITOR_LENGTH);
Map<ModuleDescriptor, List<ResolveRequest>> inworkspaceModules = new LinkedHashMap<>();
List<ResolveRequest> otherModules = new ArrayList<>();
Map<ResolveRequest, Ivy> ivys = new HashMap<>();
Map<ResolveRequest, ModuleDescriptor> mds = new HashMap<>();
final MultiStatus errorsStatus = new MultiStatus(IvyPlugin.ID, IStatus.ERROR,
"Some projects fail to be resolved", null);
int step = IVY_LOAD_LENGTH / toResolve.size();
boolean forceFailOnError = false;
// Ivy use the SaxParserFactory, and we want it to instantiate the xerces parser which is in
// the dependencies of IvyDE, so accessible via the current classloader
ClassLoader old = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(IvyResolveJob.class.getClassLoader());
try {
for (ResolveRequest request : toResolve) {
IvyDEMessage.info("Processing resolve request " + request.toString());
forceFailOnError = forceFailOnError || request.isForceFailOnError();
monitor.subTask("loading " + request.getResolver().toString());
IProject project = request.getResolver().getProject();
if (project != null && !project.isAccessible()) {
IvyDEMessage.warn("Skipping resolve on closed project " + project.getName());
monitor.worked(step);
continue;
}
IvyDEMessage.verbose("Loading Ivy settings for " + request.toString());
CachedIvy cachedIvy = request.getCachedIvy();
Ivy ivy;
try {
ivy = cachedIvy.getIvy();
} catch (IvyDEException e) {
cachedIvy.setErrorMarker(e);
IvyDEMessage.error("Failed to configure Ivy for " + request + ": "
+ e.getMessage());
errorsStatus.add(e.asStatus(IStatus.ERROR, "Failed to configure Ivy for "
+ request));
monitor.worked(step);
continue;
}
cachedIvy.setErrorMarker(null);
ivys.put(request, ivy);
// IVYDE-168 : Ivy needs the IvyContext in the threadlocal in order to found the
// default branch
ivy.pushContext();
ModuleDescriptor md;
try {
md = cachedIvy.getModuleDescriptor(ivy);
} catch (IvyDEException e) {
cachedIvy.setErrorMarker(e);
IvyDEMessage.error("Failed to load the descriptor for " + request + ": "
+ e.getMessage());
errorsStatus.add(e.asStatus(IStatus.ERROR, "Failed to load the descriptor for "
+ request));
monitor.worked(step);
continue;
} finally {
ivy.popContext();
}
cachedIvy.setErrorMarker(null);
mds.put(request, md);
if (request.isInWorkspace()) {
List<ResolveRequest> requests = inworkspaceModules.get(md);
if (requests == null) {
requests = new ArrayList<>();
inworkspaceModules.put(md, requests);
}
requests.add(request);
} else {
otherModules.add(request);
}
monitor.worked(step);
}
} finally {
Thread.currentThread().setContextClassLoader(old);
}
step = (MONITOR_LENGTH - IVY_LOAD_LENGTH - POST_RESOLVE_LENGTH) / toResolve.size();
if (inworkspaceModules.isEmpty()) {
IvyDEMessage.verbose("No module to resolve in workspace");
} else {
IvyDEMessage.info(inworkspaceModules.size() + " module(s) to resolve in workspace");
// for the modules which are using the workspace resolver, make sure
// we resolve them in the correct order
// The version matcher used will be the one configured for the first project
ResolveRequest request = inworkspaceModules.values().iterator().next().get(0);
VersionMatcher versionMatcher = ivys.get(request).getSettings().getVersionMatcher();
WarningNonMatchingVersionReporter vReporter = new WarningNonMatchingVersionReporter();
CircularDependencyStrategy circularDependencyStrategy = WarnCircularDependencyStrategy
.getInstance();
ModuleDescriptorSorter sorter = new ModuleDescriptorSorter(inworkspaceModules.keySet(),
versionMatcher, vReporter, circularDependencyStrategy);
for (ModuleDescriptor module : sorter.sortModuleDescriptors()) {
List<ResolveRequest> requests = inworkspaceModules.get(module);
IvyDEMessage.info(requests.size() + " container(s) of module " + module
+ " to resolve in workspace");
for (ResolveRequest req : requests) {
boolean canceled = launchResolveThread(req, monitor, step, errorsStatus,
ivys.get(req), mds.get(req));
if (canceled) {
IvyDEMessage.warn("Resolve job canceled");
return Status.CANCEL_STATUS;
}
}
}
}
if (otherModules.isEmpty()) {
IvyDEMessage.verbose("No module to resolve outside the workspace");
} else {
IvyDEMessage.info(otherModules.size() + " module(s) to resolve outside the workspace");
for (ResolveRequest request : otherModules) {
boolean canceled = launchResolveThread(request, monitor, step, errorsStatus,
ivys.get(request), mds.get(request));
if (canceled) {
IvyDEMessage.warn("Resolve job canceled");
return Status.CANCEL_STATUS;
}
}
}
step = POST_RESOLVE_LENGTH / toResolve.size();
monitor.setTaskName("Post resolve");
// launch every post batch resolve
for (ResolveRequest request : toResolve) {
if (!request.isResolveFailed()) {
monitor.setTaskName(request.getResolver().toString());
request.getResolver().postBatchResolve();
}
monitor.worked(step);
}
if (errorsStatus.getChildren().length != 0) {
// some errors happened, stop here
if (forceFailOnError || IvyPlugin.getPreferenceStoreHelper().isErrorPopup()) {
return errorsStatus;
}
return Status.OK_STATUS;
}
return Status.OK_STATUS;
}
private boolean launchResolveThread(ResolveRequest request, final IProgressMonitor monitor,
final int step, MultiStatus errorsStatus, final Ivy ivy, final ModuleDescriptor md) {
final IStatus[] status = new IStatus[1];
final IvyResolver resolver = request.getResolver();
Runnable resolveRunner = new Runnable() {
public void run() {
status[0] = resolver.resolve(ivy, md, monitor, step);
}
};
IvyRunner ivyRunner = new IvyRunner();
if (ivyRunner.launchIvyThread(resolveRunner, ivy, monitor)) {
return true;
}
IvyMarkerManager ivyMarkerManager = IvyPlugin.getDefault().getIvyMarkerManager();
ivyMarkerManager.setResolveStatus(status[0], resolver.getProject(),
resolver.getIvyXmlPath());
switch (status[0].getCode()) {
case IStatus.CANCEL:
return true;
case IStatus.OK:
case IStatus.INFO:
IvyDEMessage.info("Successful resolve of " + request);
break;
case IStatus.ERROR:
IvyDEMessage.warn("Error on resolve of " + request + ": " + status[0].getMessage());
request.setResolveFailed(true);
errorsStatus.add(status[0]);
break;
default:
IvyPlugin.logWarn("Unknown IStatus: " + status[0].getCode());
}
return false;
}
}
| |
/* CertStore -- stores and retrieves certificates.
Copyright (C) 2003, 2004 Free Software Foundation, Inc.
This file is part of GNU Classpath.
GNU Classpath is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2, or (at your option)
any later version.
GNU Classpath is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with GNU Classpath; see the file COPYING. If not, write to the
Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA.
Linking this library statically or dynamically with other modules is
making a combined work based on this library. Thus, the terms and
conditions of the GNU General Public License cover the whole
combination.
As a special exception, the copyright holders of this library give you
permission to link this library with independent modules to produce an
executable, regardless of the license terms of these independent
modules, and to copy and distribute the resulting executable under
terms of your choice, provided that you also meet, for each linked
independent module, the terms and conditions of the license of that
module. An independent module is a module which is not derived from
or based on this library. If you modify this library, you may extend
this exception to your version of the library, but you are not
obligated to do so. If you do not wish to do so, delete this
exception statement from your version. */
package java.security.cert;
import gnu.java.security.Engine;
import java.security.InvalidAlgorithmParameterException;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import java.security.PrivilegedAction;
import java.security.Provider;
import java.security.Security;
import java.util.Collection;
/**
* A CertStore is a read-only repository for certificates and
* certificate revocation lists.
*
* @since JDK 1.4
*/
public class CertStore
{
// Constants and fields.
// ------------------------------------------------------------------------
/** Service name for CertStore. */
private static final String CERT_STORE = "CertStore";
/** The underlying implementation. */
private CertStoreSpi storeSpi;
/** This implementation's provider. */
private Provider provider;
/** The name of this key store type. */
private String type;
/** The parameters used to initialize this instance, if any. */
private CertStoreParameters params;
// Constructor.
// ------------------------------------------------------------------------
/**
* Create a new CertStore.
*
* @param storeSpi The underlying implementation.
* @param provider The provider of this implementation.
* @param type The type of CertStore this class represents.
* @param params The parameters used to initialize this instance, if any.
*/
protected CertStore(CertStoreSpi storeSpi, Provider provider, String type,
CertStoreParameters params)
{
this.storeSpi = storeSpi;
this.provider = provider;
this.type = type;
this.params = params;
}
// Class methods.
// ------------------------------------------------------------------------
/**
* Returns the default certificate store type.
*
* <p>This value can be set at run-time via the security property
* "certstore.type"; if not specified than the default type will be
* "LDAP".
*
* @return The default CertStore type.
*/
public static final synchronized String getDefaultType()
{
String type = null;
type = (String) java.security.AccessController.doPrivileged(
new PrivilegedAction() {
public Object run() {
return Security.getProperty("certstore.type");
}
}
);
if (type == null)
type = "LDAP";
return type;
}
/**
* Get an instance of the given certificate store from the first
* installed provider.
*
* @param type The type of CertStore to create.
* @param params The parameters to initialize this cert store with.
* @return The new instance.
* @throws InvalidAlgorithmParameterException If the instance rejects
* the specified parameters.
* @throws NoSuchAlgorithmException If no installed provider
* implements the specified CertStore.
* @throws IllegalArgumentException If <i>provider</i> is null.
*/
public static CertStore getInstance(String type, CertStoreParameters params)
throws InvalidAlgorithmParameterException, NoSuchAlgorithmException
{
Provider[] p = Security.getProviders();
for (int i = 0; i < p.length; i++)
{
try
{
return getInstance(type, params, p[i]);
}
catch (NoSuchAlgorithmException e)
{
// Ignored.
}
}
throw new NoSuchAlgorithmException(type);
}
/**
* Get an instance of the given certificate store from the named
* provider.
*
* @param type The type of CertStore to create.
* @param params The parameters to initialize this cert store with.
* @param provider The name of the provider from which to get the
* implementation.
* @return The new instance.
* @throws InvalidAlgorithmParameterException If the instance rejects
* the specified parameters.
* @throws NoSuchAlgorithmException If the specified provider does not
* implement the specified CertStore.
* @throws NoSuchProviderException If no provider named
* <i>provider</i> is installed.
* @throws IllegalArgumentException If <i>provider</i> is null.
*/
public static CertStore getInstance(String type, CertStoreParameters params,
String provider)
throws InvalidAlgorithmParameterException, NoSuchAlgorithmException,
NoSuchProviderException
{
Provider p = Security.getProvider(provider);
if (p == null)
throw new NoSuchProviderException(provider);
return getInstance(type, params, p);
}
/**
* Get an instance of the given certificate store from the given
* provider.
*
* @param type The type of CertStore to create.
* @param params The parameters to initialize this cert store with.
* @param provider The provider from which to get the implementation.
* @return The new instance.
* @throws InvalidAlgorithmParameterException If the instance rejects
* the specified parameters.
* @throws NoSuchAlgorithmException If the specified provider does not
* implement the specified CertStore.
* @throws IllegalArgumentException If <i>provider</i> is null.
*/
public static CertStore getInstance(String type, CertStoreParameters params,
Provider provider)
throws InvalidAlgorithmParameterException, NoSuchAlgorithmException
{
if (provider == null)
throw new IllegalArgumentException("null provider");
try
{
return new CertStore((CertStoreSpi) Engine.getInstance(CERT_STORE,
type, provider, new Object[] { params }), provider, type, params);
}
catch (ClassCastException cce)
{
throw new NoSuchAlgorithmException(type);
}
catch (java.lang.reflect.InvocationTargetException ite)
{
Throwable cause = ite.getCause();
if (cause instanceof InvalidAlgorithmParameterException)
throw (InvalidAlgorithmParameterException) cause;
else
throw new NoSuchAlgorithmException(type);
}
}
// Instance methods.
// ------------------------------------------------------------------------
/**
* Return the type of certificate store this instance represents.
*
* @return The CertStore type.
*/
public final String getType()
{
return type;
}
/**
* Return the provider of this implementation.
*
* @return The provider.
*/
public final Provider getProvider()
{
return provider;
}
/**
* Get the parameters this instance was created with, if any. The
* parameters will be cloned before they are returned.
*
* @return The parameters, or null.
*/
public final CertStoreParameters getCertStoreParameters()
{
return params != null ? (CertStoreParameters) params.clone() : null;
}
/**
* Get a collection of certificates from this CertStore, optionally
* filtered by the specified CertSelector. The Collection returned may
* be empty, but will never be null.
*
* <p>Implementations may not allow a null argument, even if no
* filtering is desired.
*
* @param selector The certificate selector.
* @return The collection of certificates.
* @throws CertStoreException If the certificates cannot be retrieved.
*/
public final Collection getCertificates(CertSelector selector)
throws CertStoreException
{
return storeSpi.engineGetCertificates(selector);
}
/**
* Get a collection of certificate revocation lists from this CertStore,
* optionally filtered by the specified CRLSelector. The Collection
* returned may be empty, but will never be null.
*
* <p>Implementations may not allow a null argument, even if no
* filtering is desired.
*
* @param selector The certificate selector.
* @return The collection of certificate revocation lists.
* @throws CertStoreException If the CRLs cannot be retrieved.
*/
public final Collection getCRLs(CRLSelector selector)
throws CertStoreException
{
return storeSpi.engineGetCRLs(selector);
}
}
| |
package fr.univnantes.ttw.test;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.tuple;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.io.InputStream;
import java.net.URISyntaxException;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.StringTokenizer;
import org.apache.uima.analysis_engine.AnalysisEngineDescription;
import org.apache.uima.cas.FSIterator;
import org.apache.uima.cas.Type;
import org.apache.uima.fit.factory.AnalysisEngineFactory;
import org.apache.uima.fit.factory.ExternalResourceFactory;
import org.apache.uima.fit.factory.JCasFactory;
import org.apache.uima.fit.pipeline.SimplePipeline;
import org.apache.uima.jcas.JCas;
import org.apache.uima.jcas.tcas.Annotation;
import org.apache.uima.resource.ResourceInitializationException;
import org.apache.uima.util.InvalidXMLException;
import org.assertj.core.api.iterable.Extractor;
import org.assertj.core.groups.Tuple;
import org.junit.Before;
import org.junit.Test;
import fr.univnantes.lina.uima.engines.TreeTaggerWrapper;
import fr.univnantes.lina.uima.models.TreeTaggerParameter;
public class TreeTaggerWrapperSpec {
private static final String WORD_ANNOTATION_TYPE = "fr.univnantes.ttw.types.WordAnnotation";
public static final String PROPERTY_FILE_NAME = "tree-tagger.properties";
public static final String P_TREE_TAGGER_HOME = "tt.home";
public static final String TREE_TAGGER_CONFIG_FILE_URL = "file:english-treetagger.xml";
public static final String TEXT_FILE1 = "/fr/univnantes/ttw/test/fixtures/text1.txt";
public static final String TEXT_FILE2 = "/fr/univnantes/ttw/test/fixtures/text2.txt";
public static final String TEXT_FILE3 = "/fr/univnantes/ttw/test/fixtures/text3.txt";
public static final String TEXT_FILE4 = "/fr/univnantes/ttw/test/fixtures/text4.txt";
AnalysisEngineDescription ttwAESWWithSGML;
AnalysisEngineDescription ttwAESWNoSGML;
JCas cas1;
JCas cas2;
JCas cas3;
JCas cas4;
@Before
public void setUp() throws Exception {
Properties p = new Properties();
InputStream is = this.getClass().getResourceAsStream("/" + PROPERTY_FILE_NAME);
if (is == null)
fail(String.format("Property file %s not found.", PROPERTY_FILE_NAME));
p.load(is);
String ttHome = p.getProperty(P_TREE_TAGGER_HOME);
if (ttHome == null)
fail(String.format("Property %s not found in file %s.", P_TREE_TAGGER_HOME, PROPERTY_FILE_NAME));
ttwAESWWithSGML = initAE(ttHome, "-quiet -no-unknown -sgml -token -lemma");
ttwAESWNoSGML = initAE(ttHome, "-quiet -no-unknown -token -lemma");
cas1 = JCasFactory.createJCas();
fillCas(cas1, TEXT_FILE1);
cas2 = JCasFactory.createJCas();
fillCas(cas2, TEXT_FILE2);
cas3 = JCasFactory.createJCas();
fillCas(cas3, TEXT_FILE3);
cas4 = JCasFactory.createJCas();
fillCas(cas4, TEXT_FILE4);
}
private AnalysisEngineDescription initAE(String ttHome, String argumenAsString) throws ResourceInitializationException, InvalidXMLException {
AnalysisEngineDescription ae = AnalysisEngineFactory.createEngineDescription(TreeTaggerWrapper.class,
TreeTaggerWrapper.PARAM_ANNOTATION_TYPE, WORD_ANNOTATION_TYPE,
TreeTaggerWrapper.PARAM_TT_ARGUMENTS, argumenAsString,
TreeTaggerWrapper.PARAM_TAG_FEATURE, "tag", TreeTaggerWrapper.PARAM_LEMMA_FEATURE, "lemma",
TreeTaggerWrapper.PARAM_UPDATE_ANNOTATION_FEATURES, true, TreeTaggerWrapper.PARAM_TT_HOME_DIRECTORY,
ttHome);
ExternalResourceFactory.createDependencyAndBind(ae, TreeTaggerParameter.KEY_TT_PARAMETER,
TreeTaggerParameter.class, TREE_TAGGER_CONFIG_FILE_URL);
return ae;
}
private void fillCas(JCas cas, String textFile) throws IOException, URISyntaxException {
String theText = readFile(textFile, Charset.forName("UTF-8"));
cas.setDocumentText(theText);
StringTokenizer st = new StringTokenizer(theText, " ");
int offset = 0;
Type wordAnnoType = cas.getTypeSystem().getType(WORD_ANNOTATION_TYPE);
while(st.hasMoreTokens()) {
String token = st.nextToken();
Annotation a = (Annotation) cas.getCas().createAnnotation(
wordAnnoType,
offset,
offset + token.length());
a.addToIndexes();
offset+=token.length() + 1;
}
}
private static String readFile(String path, Charset encoding) throws IOException, URISyntaxException {
InputStream is = TreeTaggerWrapper.class.getResourceAsStream(path);
java.util.Scanner scanner = new java.util.Scanner(is);
java.util.Scanner s = scanner.useDelimiter("\\A");
String string = s.hasNext() ? s.next() : "";
scanner.close();
return string;
}
private List<Annotation> wordAnnotations(JCas cas) {
List<Annotation> list = new ArrayList<Annotation>();
FSIterator<Annotation> it = cas.getAnnotationIndex().iterator();
while (it.hasNext()) {
Annotation annotation = (Annotation) it.next();
if(annotation.getType().getName().equals(WORD_ANNOTATION_TYPE))
list.add(annotation);
}
return list;
}
Extractor<Annotation, Tuple> WORD_ANNOTATION_TUPLE_EXTRACTOR = new Extractor<Annotation, Tuple>() {
@Override
public Tuple extract(Annotation input) {
return new Tuple(
input.getBegin(),
input.getEnd(),
input.getStringValue(input.getType().getFeatureByBaseName("lemma")),
input.getStringValue(input.getType().getFeatureByBaseName("tag")));
}
};
@Test
public void testCas1WithSgml() throws Exception {
SimplePipeline.runPipeline(cas1, ttwAESWWithSGML);
assertThat(wordAnnotations(cas1)).extracting(WORD_ANNOTATION_TUPLE_EXTRACTOR).containsExactly(
tuple(0,4,"this", "DT"),
tuple(5,7,"be", "VBZ"),
tuple(8,9,"a", "DT"),
tuple(10,14,"text", "NN"),
tuple(15,22,"without", "IN"),
tuple(23,26,"any", "DT"),
tuple(27,34,"special", "JJ"),
tuple(35,44,"character", "NN"),
tuple(45,46,".", "SENT")
);
}
/*
* Test if an opening chevron "<" causes tt4j to interprete it
* as an opnening sgml tag or not.
*
*/
@Test
public void testCas2WithSgml() throws Exception {
SimplePipeline.runPipeline(cas2, ttwAESWWithSGML);
assertThat(wordAnnotations(cas2)).extracting(WORD_ANNOTATION_TUPLE_EXTRACTOR).containsExactly(
tuple(0,4,"this", "DT"),
tuple(5,7,"be", "VBZ"),
tuple(8,9,"a", "DT"),
tuple(10,14,"text", "NN"),
tuple(15,19,"with", "IN"),
tuple(20,21,"a", "DT"),
tuple(22,29,"formula", "NN"),
tuple(30,31,":", ":"),
tuple(32,33,"a", "DT"),
tuple(34,35,"<", "SYM"),
tuple(36,37,"b", "NN"),
tuple(38,39,",", ","),
tuple(40,43,"and", "CC"),
tuple(44,48,"some", "DT"),
tuple(49,53,"more", "JJR"),
tuple(54,65,"description", "NN"),
tuple(66,71,"after", "IN"),
tuple(72,75,"the", "DT"),
tuple(76,83,"formula", "NN"),
tuple(84,85,".", "SENT")
);
}
@Test
public void testCas3WithSgml() throws Exception {
SimplePipeline.runPipeline(cas3, ttwAESWWithSGML);
assertThat(wordAnnotations(cas3)).extracting(WORD_ANNOTATION_TUPLE_EXTRACTOR).containsExactly(
tuple(0,4,"this", "DT"),
tuple(5,7,"be", "VBZ"),
tuple(8,9,"a", "DT"),
tuple(10,14,"text", "NN"),
tuple(15,19,"with", "IN"),
tuple(20,21,"a", "DT"),
tuple(22,29,"formula", "NN"),
tuple(30,31,":", ":"),
tuple(32,33,"a", "DT"),
tuple(34,35,"<", "SYM"),
tuple(36,37,"b", "NN"),
tuple(38,39,",", ","),
tuple(40,47,"another", "DT"),
tuple(48,56,"equation", "NN"),
tuple(57,58,",", ","),
tuple(59,60,"b", "LS"),
tuple(61,62,">", "SYM"),
tuple(63,64,"a", "DT"),
tuple(65,66,",", ","),
tuple(67,70,"and", "CC"),
tuple(71,75,"some", "DT"),
tuple(76,80,"more", "JJR"),
tuple(81,92,"description", "NN"),
tuple(93,98,"after", "IN"),
tuple(99,102,"the", "DT"),
tuple(103,110,"formula", "NN"),
tuple(111,112,".", "SENT")
);
}
@Test
public void testCas4WithSgml() throws Exception {
SimplePipeline.runPipeline(cas4, ttwAESWWithSGML);
assertThat(wordAnnotations(cas4)).extracting(WORD_ANNOTATION_TUPLE_EXTRACTOR).containsExactly(
tuple(0,4,"this", "DT"),
tuple(5,7,"be", "VBZ"),
tuple(8,9,"a", "DT"),
tuple(10,14,"text", "NN"),
tuple(15,19,"with", "IN"),
tuple(20,21,"a", "DT"),
tuple(22,29,"formula", "NN"),
tuple(30,31,":", ":"),
tuple(32,34,"aa", "NN"),
tuple(35,37,"<b", "NN"),
tuple(38,39,",", ","),
tuple(40,47,"another", "DT"),
tuple(48,56,"equation", "NN"),
tuple(57,58,",", ","),
tuple(59,60,"b", "LS"),
tuple(61,62,">", "SYM"),
tuple(63,64,"a", "DT"),
tuple(65,66,",", ","),
tuple(67,70,"and", "CC"),
tuple(71,75,"some", "DT"),
tuple(76,80,"more", "JJR"),
tuple(81,92,"description", "NN"),
tuple(93,98,"after", "IN"),
tuple(99,102,"the", "DT"),
tuple(103,110,"formula", "NN"),
tuple(111,112,".", "SENT")
);
}
@Test
public void testCas4NoSgml() throws Exception {
SimplePipeline.runPipeline(cas4, ttwAESWNoSGML);
assertThat(wordAnnotations(cas4)).extracting(WORD_ANNOTATION_TUPLE_EXTRACTOR).containsExactly(
tuple(0,4,"this", "DT"),
tuple(5,7,"be", "VBZ"),
tuple(8,9,"a", "DT"),
tuple(10,14,"text", "NN"),
tuple(15,19,"with", "IN"),
tuple(20,21,"a", "DT"),
tuple(22,29,"formula", "NN"),
tuple(30,31,":", ":"),
tuple(32,34,"aa", "NN"),
tuple(35,37,"<b", "NN"),
tuple(38,39,",", ","),
tuple(40,47,"another", "DT"),
tuple(48,56,"equation", "NN"),
tuple(57,58,",", ","),
tuple(59,60,"b", "LS"),
tuple(61,62,">", "SYM"),
tuple(63,64,"a", "DT"),
tuple(65,66,",", ","),
tuple(67,70,"and", "CC"),
tuple(71,75,"some", "DT"),
tuple(76,80,"more", "JJR"),
tuple(81,92,"description", "NN"),
tuple(93,98,"after", "IN"),
tuple(99,102,"the", "DT"),
tuple(103,110,"formula", "NN"),
tuple(111,112,".", "SENT")
);
}
@Test
public void testCas3NoSgml() throws Exception {
SimplePipeline.runPipeline(cas3, ttwAESWNoSGML);
assertThat(wordAnnotations(cas3)).extracting(WORD_ANNOTATION_TUPLE_EXTRACTOR).containsExactly(
tuple(0,4,"this", "DT"),
tuple(5,7,"be", "VBZ"),
tuple(8,9,"a", "DT"),
tuple(10,14,"text", "NN"),
tuple(15,19,"with", "IN"),
tuple(20,21,"a", "DT"),
tuple(22,29,"formula", "NN"),
tuple(30,31,":", ":"),
tuple(32,33,"a", "DT"),
tuple(34,35,"<", "SYM"),
tuple(36,37,"b", "NN"),
tuple(38,39,",", ","),
tuple(40,47,"another", "DT"),
tuple(48,56,"equation", "NN"),
tuple(57,58,",", ","),
tuple(59,60,"b", "LS"),
tuple(61,62,">", "SYM"),
tuple(63,64,"a", "DT"),
tuple(65,66,",", ","),
tuple(67,70,"and", "CC"),
tuple(71,75,"some", "DT"),
tuple(76,80,"more", "JJR"),
tuple(81,92,"description", "NN"),
tuple(93,98,"after", "IN"),
tuple(99,102,"the", "DT"),
tuple(103,110,"formula", "NN"),
tuple(111,112,".", "SENT")
);
}
}
| |
package database.model;
import database.sql.DocumentViewLogger;
import database.xml.client.BaseXClient;
import org.jetbrains.annotations.Nullable;
import org.w3c.dom.Document;
import org.xml.sax.SAXException;
import util.ServerConfiguration;
import util.XSLTTransformer;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.StringWriter;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.List;
import java.util.UUID;
public class TeiDocument {
private String title;
private String date;
private int id;
private String fileName;
private String xmlData = "";
private static Transformer transformer;
static {
try {
transformer = TransformerFactory.newInstance().newTransformer();
} catch (TransformerConfigurationException e) {
throw new RuntimeException("Could not get transformer");
}
}
public static TeiDocument fromId(int documentId) throws IOException {
SearchChain sc = new SearchChain();
sc.addSearch(new Search("id", String.format("%d", documentId)));
List<TeiDocument> documents = sc.executeSearch();
if(documents.size() == 1) {
return documents.get(0);
}
throw new RuntimeException("Could not find document " + documentId);
}
@Nullable static TeiDocument fromSearchResults(BaseXClient.Query q) throws IOException {
if(q.more()) {
Document document;
try {
DocumentBuilder documentBuilder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
document = documentBuilder.parse(new ByteArrayInputStream(q.next().getBytes()));
} catch (ParserConfigurationException | SAXException e) {
e.printStackTrace();
throw new RuntimeException("Could not get document builder");
}
String id = document.getElementsByTagName("id").item(0).getTextContent();
String title = document.getElementsByTagName("title").item(0).getTextContent();
String date = document.getElementsByTagName("date").item(0).getTextContent();
String filename = document.getElementsByTagName("filename").item(0).getTextContent();
DOMSource source = new DOMSource(document.getElementsByTagName("xmldata").item(0).getChildNodes().item(1));
StringWriter stringWriter = new StringWriter();
StreamResult result = new StreamResult(stringWriter);
try {
transformer.transform(source, result);
} catch (TransformerException e) {
throw new RuntimeException("Could not generate XML");
}
String xmlData = stringWriter.toString();
return new TeiDocument(title, date, Integer.parseInt(id), filename, xmlData);
}
return null;
}
public void update() throws IOException {
BaseXClient client = BaseXClient.getClient();
String query = "for $x in collection() where db:node-id($x) = %d return replace node $x//TEI with %s";
query = String.format(query, getId(), getXmlData().replace("<?xml version=\"1.0\" encoding=\"UTF-8\"?>", ""));
//I know the replace is crap but time constraints.
//TODO: remove replace and parse the document properly.
//Weird bug with BaseX?? Have to enumerate query object to ensure the update happens.
BaseXClient.Query result = client.query(query);
while(result.more()) {
result.next();
}
client.close();
}
private TeiDocument(String title, String date, int id, String fileName, String xmlData) {
this.title = title;
this.date = date;
this.id = id;
this.fileName = fileName;
this.xmlData = xmlData;
}
public void tag(String tagType, String tagValue) throws IOException {
String query = "for $x in collection()\n" +
"where db:node-id($x) = %d\n" +
"where not ($x//correspDesc/note/name[@type=\"%s\"]/text() = \"%s\")\n" +
"return insert node <note><name type=\"%s\">%s</name></note> into $x//correspDesc";
query = String.format(query, getId(), tagType, tagValue, tagType, tagValue);
BaseXClient client = BaseXClient.getClient();
BaseXClient.Query result = client.query(query);
try {
while(result.more()) {
result.next();
}
} catch (IOException e) {
e.printStackTrace();
}
client.close();
}
public static TeiDocument insertFromXml(String xml, String fileName) throws IOException {
BaseXClient client = BaseXClient.getClient();
String path = UUID.randomUUID().toString() + "/" + fileName;
System.out.println(client.preparedCommand("ADD to %s %s", path, xml));
TeiDocument document = null;
if(!xml.equals("")) {
BaseXClient.Query q = client.preparedQuery("db:node-id(db:open(\"%s\", \"%s\"))", ServerConfiguration.getConfigurationString("database", "name"), path);
String documentId = q.next();
if(documentId != null) {
document = fromId(Integer.valueOf(documentId));
}
}
client.close();
return document;
}
private TeiDocument() {}
public String getTitle() {
return title;
}
public String renderHTML() throws IOException, TransformerException {
ByteArrayOutputStream string = new ByteArrayOutputStream();
XSLTTransformer.transform(getXmlData(), string);
return string.toString("utf-8");
}
public void setTitle(String title) {
this.title = title;
}
public String getDate() {
return date;
}
public void setDate(String date) {
this.date = date;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getFileName() {
return fileName;
}
public String getXmlData() {
return xmlData;
}
public void setXmlData(String xmlData) {
this.xmlData = xmlData;
}
public int getViewCount(Connection c) throws SQLException {
PreparedStatement statement = c.prepareStatement("SELECT count(*) AS view_count FROM DOCUMENT_EVENTS WHERE DOCUMENT_ID = ? AND EVENT_TYPE = ?");
statement.setInt(1, getId());
statement.setInt(2, DocumentViewLogger.DocumentEventType.VIEW.DB_TYPE);
ResultSet result = statement.executeQuery();
if(result == null || !result.next()) {
return 0;
}
return result.getInt("view_count");
}
}
| |
package es.uniovi.asw.controller;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.SessionAttributes;
import org.springframework.web.servlet.ModelAndView;
import es.uniovi.asw.factories.Factories;
import es.uniovi.asw.model.ColegioElectoral;
import es.uniovi.asw.model.Opcion;
import es.uniovi.asw.model.Usuario;
import es.uniovi.asw.model.Votacion;
import es.uniovi.asw.model.Votado;
import es.uniovi.asw.model.Voto;
import es.uniovi.asw.presentacion.BeanColegioElectoral;
import es.uniovi.asw.presentacion.BeanOpcion;
import es.uniovi.asw.presentacion.BeanUsuarios;
import es.uniovi.asw.presentacion.BeanVotacion;
import es.uniovi.asw.presentacion.BeanVotaciones;
@Controller
@SessionAttributes("vot")
public class Main {
private static final Logger LOG = LoggerFactory.getLogger(Main.class);
private BeanVotacion votacion= new BeanVotacion();
private BeanUsuarios usuario = new BeanUsuarios();
private BeanVotaciones votaciones= new BeanVotaciones();
private BeanOpcion opcion= new BeanOpcion();
private BeanColegioElectoral colegio= new BeanColegioElectoral();
private Usuario user;
@RequestMapping("/inicio")
public ModelAndView index(Model model) {
LOG.info("Index page access");
model.addAttribute("vot", usuario);
return new ModelAndView("index");
}
@RequestMapping(value="/goVotaciones",method= RequestMethod.GET)
public ModelAndView votaciones(@RequestParam(name = "u") Integer u, Model model) {
LOG.info("Votaciones page access");
user= Factories.service.createUsuarioService().findById(u);
if(user==null){
return new ModelAndView("errorInicio");//voy a error si el id de usuario no esta en la bd
}
this.usuario.setIdUsuario(usuario.getIdUsuario());
List <Votacion> listaVotaciones=new ArrayList<Votacion>();
listaVotaciones= Factories.service.createVotacionService().listadoVotaciones();
model.addAttribute("votaciones", listaVotaciones);
model.addAttribute("vot", votaciones);
return new ModelAndView("votaciones");
}
@RequestMapping(value="/irVotaciones",method= RequestMethod.POST)
public ModelAndView votaciones(BeanUsuarios usuario,Model model) {
LOG.info("Votaciones page access");
user= Factories.service.createUsuarioService().findById(Integer.valueOf(usuario.getIdUsuario()));
if(user==null){
return new ModelAndView("errorInicio");//voy a error si el id de usuario no esta en la bd
}
this.usuario.setIdUsuario(usuario.getIdUsuario());
List <Votacion> listaVotaciones=new ArrayList<Votacion>();
listaVotaciones= Factories.service.createVotacionService().listadoVotaciones();
model.addAttribute("votaciones", listaVotaciones);
model.addAttribute("vot", votaciones);
return new ModelAndView("votaciones");
}
@RequestMapping(value="/irVotaciones",method= RequestMethod.GET)
public ModelAndView votaciones(Model model) {
LOG.info("Votaciones page access");
System.out.println(this.usuario);
user= Factories.service.createUsuarioService().findById(Integer.valueOf(usuario.getIdUsuario()));
if(user==null){
return new ModelAndView("errorInicio");//voy a error si el id de usuario no esta en la bd
}
this.usuario.setIdUsuario(usuario.getIdUsuario());
List <Votacion> listaVotaciones=new ArrayList<Votacion>();
listaVotaciones= Factories.service.createVotacionService().listadoVotaciones();
model.addAttribute("votaciones", listaVotaciones);
model.addAttribute("vot", votaciones);
return new ModelAndView("votaciones");
}
@RequestMapping(value="/votacion",method= RequestMethod.POST)
public ModelAndView opciones(BeanVotaciones votaciones,Model model) {
LOG.info("Votacion page access");
boolean votado = Factories.service.createVotadoService()
.haVotado((long)user.getId(), Long.valueOf(votaciones.getIdVotacion()));
if(!votado){
List<Opcion>listaOpciones = new ArrayList<Opcion>();
Factories.service.createOpcionService()
.listadoOpciones(Long.valueOf(votaciones.getIdVotacion()));
Votacion v =Factories.service.createVotacionService()
.getTipoVotacion(Long.valueOf(votaciones.getIdVotacion()));
//Usuario u = Factories.service.createUsuarioService().findByNif(usuario)
//System.out.println(v);
if(v!=null){
listaOpciones=Factories.service.createOpcionService()
.listadoOpciones(v.getId());
this.votaciones.setIdVotacion(votaciones.getIdVotacion());
model.addAttribute("opciones", listaOpciones);
model.addAttribute("vot", opcion);
return new ModelAndView("votacion");
}
return new ModelAndView("errorEleccion");
}
return new ModelAndView("errorYaVotado");
}
@RequestMapping(value="/guardarVoto",method= RequestMethod.POST)
public ModelAndView votar(BeanOpcion opcion,Model model) {
LOG.info("Votacion page access");
Opcion op = Factories.service.createOpcionService()
.findById(Long.valueOf(opcion.getIdOpcion()));
Long idVotacon = Long.valueOf(votaciones.getIdVotacion());
if(op!=null && op.getIdVotacion().equals(idVotacon)){
Voto v =Factories.service.createVotoService().findById(op.getId(), (long)user.getCodColElectoral());
if(v==null){
v= new Voto();
v.setIdOpcion(op.getId());
v.setIdColElect((long)user.getCodColElectoral());
//v.setTotVotos((long)1);
Factories.service.createVotoService().saveVoto(v);
}else{
//Factories.service.createVotoService().incrementarVoto(v);
Factories.service.createVotoService().saveVoto(v);
}
Votado votado= new Votado(idVotacon, (long)user.getId());
Factories.service.createVotadoService().votado(votado);
return new ModelAndView("votar");
}
return new ModelAndView("errorEleccion");
}
@RequestMapping(value="/admin")
public ModelAndView admin(Model model) {
LOG.info("Admin page access");
model.addAttribute("vot", votacion);
return new ModelAndView("admin");
}
@RequestMapping(value="/colegio")
public ModelAndView colegio(Model model) {
LOG.info("Colegio page access");
model.addAttribute("vot", colegio);
return new ModelAndView("votosColegioElectoral");
}
@RequestMapping(value="/crearVotacion",method= RequestMethod.POST)
public ModelAndView guardarVotacion(BeanVotacion votacion,Model model) {
LOG.info("CrearVotacion page access");
SimpleDateFormat formato = new SimpleDateFormat("dd/MM/yyyy");
Date fechaDateFin = null;
Date fechaDateInicio = null;
try {
fechaDateFin = formato.parse(votacion.getFechaFin());
fechaDateInicio = formato.parse(votacion.getFechaInicio());
} catch (ParseException e) {
return new ModelAndView("errorCreacionVotacion");
}
if(fechaDateInicio.after(fechaDateFin)){
return new ModelAndView("errorCreacionVotacion");
}
Votacion vot= new Votacion();
vot.setDefinicion(votacion.getDescripcion());
vot.setFechaFin(fechaDateFin);
vot.setFechaInicio(fechaDateInicio);
Factories.service.createVotacionService().save(vot);
List<Votacion> lst =Factories.service.createVotacionService().listadoVotaciones();
vot=lst.get(lst.size()-1);
System.out.println(vot);
String[] elems = votacion.getOpciones().split(";");
for(int i=0;i<elems.length;i++){
Opcion opc= new Opcion();
opc.setDescripcion(elems[i]);
opc.setIdVotacion(vot.getId());
Factories.persistence.createOpcionDao().save(opc);
}
return new ModelAndView("votacionCreada");
}
@RequestMapping(value="/guardarColegio",method= RequestMethod.POST)
public ModelAndView votosColegio(BeanColegioElectoral col,Model model) {
LOG.info("Votacion page access");
model.addAttribute("vot", usuario);
System.out.println(col.getIdColegio());
System.out.println(col.getIdOpcion());
System.out.println(col.getNumVotos());
Opcion op = Factories.service.createOpcionService()
.findById(Long.valueOf(col.getIdOpcion()));
ColegioElectoral c = Factories.persistence.createColegioElectoralDao()
.findById(Long.valueOf(col.getIdColegio()));
if(op!=null && c!=null && !c.isVotoFisico()){
Voto v =Factories.service.createVotoService().findById(op.getId(), (long)c.getId());
long tot=Long.valueOf(col.getNumVotos());
for(int i=0;i<tot;i++){
if(v==null){
v= new Voto();
v.setIdOpcion(op.getId());
v.setIdColElect((long)c.getId());
//v.setTotVotos(Long.valueOf(col.getNumVotos()));
Factories.service.createVotoService().saveVoto(v);
}else{
//v.setTotVotos(v.getTotVotos()+Long.valueOf(col.getNumVotos()));
Factories.service.createVotoService().saveVoto(v);
}
}
c.setVotoFisico(true);
Factories.service.createColegioElectoralService().update(c);
return new ModelAndView("index");
}
return new ModelAndView("errorColegioElectoral");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.jcr.security.user;
import java.util.HashMap;
import javax.annotation.CheckForNull;
import javax.jcr.Node;
import javax.jcr.Property;
import javax.jcr.Session;
import org.apache.jackrabbit.api.security.user.Authorizable;
import org.apache.jackrabbit.api.security.user.User;
import org.apache.jackrabbit.oak.spi.security.ConfigurationParameters;
import org.apache.jackrabbit.oak.spi.security.user.UserConfiguration;
import org.apache.jackrabbit.oak.spi.security.user.UserConstants;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* Testing user import with default {@link org.apache.jackrabbit.oak.spi.xml.ImportBehavior}
* and pw-expiry content
*
* @see <a href="https://issues.apache.org/jira/browse/OAK-1922">OAK-1922</a>
* @see <a href="https://issues.apache.org/jira/browse/OAK-1943">OAK-1943</a>
*/
public class UserImportPwExpiryTest extends AbstractImportTest {
@Override
protected String getTargetPath() {
return USERPATH;
}
@Override
protected String getImportBehavior() {
return null;
}
@CheckForNull
protected ConfigurationParameters getConfigurationParameters() {
HashMap<String, Object> userParams = new HashMap<String, Object>() {{
put(UserConstants.PARAM_PASSWORD_MAX_AGE, 10);
}};
return ConfigurationParameters.of(UserConfiguration.NAME, ConfigurationParameters.of(userParams));
}
/**
* @since Oak 1.1
*/
@Test
public void testImportUserCreatesPasswordLastModified() throws Exception {
// import user
String xml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<sv:node sv:name=\"x\" xmlns:mix=\"http://www.jcp.org/jcr/mix/1.0\" xmlns:nt=\"http://www.jcp.org/jcr/nt/1.0\" xmlns:fn_old=\"http://www.w3.org/2004/10/xpath-functions\" xmlns:fn=\"http://www.w3.org/2005/xpath-functions\" xmlns:xs=\"http://www.w3.org/2001/XMLSchema\" xmlns:sv=\"http://www.jcp.org/jcr/sv/1.0\" xmlns:rep=\"internal\" xmlns:jcr=\"http://www.jcp.org/jcr/1.0\">" +
" <sv:property sv:name=\"jcr:primaryType\" sv:type=\"Name\">" +
" <sv:value>rep:User</sv:value>" +
" </sv:property>" +
" <sv:property sv:name=\"jcr:uuid\" sv:type=\"String\">" +
" <sv:value>9dd4e461-268c-3034-b5c8-564e155c67a6</sv:value>" +
" </sv:property>" +
" <sv:property sv:name=\"rep:password\" sv:type=\"String\">" +
" <sv:value>pw</sv:value>" +
" </sv:property>" +
" <sv:property sv:name=\"rep:principalName\" sv:type=\"String\">" +
" <sv:value>xPrincipal</sv:value>" +
" </sv:property>" +
" <sv:node sv:name=\"" + UserConstants.REP_PWD + "\">" +
" <sv:property sv:name=\"jcr:primaryType\" sv:type=\"Name\">" +
" <sv:value>"+ UserConstants.NT_REP_PASSWORD +"</sv:value>" +
" </sv:property>" +
" </sv:node>" +
"</sv:node>";
doImport(USERPATH, xml);
Authorizable authorizable = getUserManager().getAuthorizable("x");
Node userNode = getImportSession().getNode(authorizable.getPath());
assertTrue(userNode.hasNode(UserConstants.REP_PWD));
Node pwdNode = userNode.getNode(UserConstants.REP_PWD);
assertTrue(pwdNode.getDefinition().isProtected());
assertTrue(pwdNode.hasProperty(UserConstants.REP_PASSWORD_LAST_MODIFIED));
assertTrue(pwdNode.getProperty(UserConstants.REP_PASSWORD_LAST_MODIFIED).getDefinition().isProtected());
}
/**
* @since Oak 1.1
*/
@Test
public void testImportUserCreatesPasswordLastModified2() throws Exception {
// import user without rep:pwd child node defined
String xml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<sv:node sv:name=\"x\" xmlns:mix=\"http://www.jcp.org/jcr/mix/1.0\" xmlns:nt=\"http://www.jcp.org/jcr/nt/1.0\" xmlns:fn_old=\"http://www.w3.org/2004/10/xpath-functions\" xmlns:fn=\"http://www.w3.org/2005/xpath-functions\" xmlns:xs=\"http://www.w3.org/2001/XMLSchema\" xmlns:sv=\"http://www.jcp.org/jcr/sv/1.0\" xmlns:rep=\"internal\" xmlns:jcr=\"http://www.jcp.org/jcr/1.0\">" +
" <sv:property sv:name=\"jcr:primaryType\" sv:type=\"Name\">" +
" <sv:value>rep:User</sv:value>" +
" </sv:property>" +
" <sv:property sv:name=\"jcr:uuid\" sv:type=\"String\">" +
" <sv:value>9dd4e461-268c-3034-b5c8-564e155c67a6</sv:value>" +
" </sv:property>" +
" <sv:property sv:name=\"rep:password\" sv:type=\"String\">" +
" <sv:value>pw</sv:value>" +
" </sv:property>" +
" <sv:property sv:name=\"rep:principalName\" sv:type=\"String\">" +
" <sv:value>xPrincipal</sv:value>" +
" </sv:property>" +
"</sv:node>";
doImport(USERPATH, xml);
// verify that the pwd node has still been created
Authorizable authorizable = getUserManager().getAuthorizable("x");
Node userNode = getImportSession().getNode(authorizable.getPath());
assertTrue(userNode.hasNode(UserConstants.REP_PWD));
Node pwdNode = userNode.getNode(UserConstants.REP_PWD);
assertTrue(pwdNode.getDefinition().isProtected());
assertTrue(pwdNode.hasProperty(UserConstants.REP_PASSWORD_LAST_MODIFIED));
assertTrue(pwdNode.getProperty(UserConstants.REP_PASSWORD_LAST_MODIFIED).getDefinition().isProtected());
}
/**
* @since Oak 1.1
*/
@Test
public void testImportUserWithPwdProperties() throws Exception {
// import user
String xml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<sv:node sv:name=\"y\" xmlns:mix=\"http://www.jcp.org/jcr/mix/1.0\" xmlns:nt=\"http://www.jcp.org/jcr/nt/1.0\" xmlns:fn_old=\"http://www.w3.org/2004/10/xpath-functions\" xmlns:fn=\"http://www.w3.org/2005/xpath-functions\" xmlns:xs=\"http://www.w3.org/2001/XMLSchema\" xmlns:sv=\"http://www.jcp.org/jcr/sv/1.0\" xmlns:rep=\"internal\" xmlns:jcr=\"http://www.jcp.org/jcr/1.0\">" +
" <sv:property sv:name=\"jcr:primaryType\" sv:type=\"Name\">" +
" <sv:value>rep:User</sv:value>" +
" </sv:property>" +
" <sv:property sv:name=\"jcr:uuid\" sv:type=\"String\">" +
" <sv:value>41529076-9594-360e-ae48-5922904f345d</sv:value>" +
" </sv:property>" +
" <sv:property sv:name=\"rep:password\" sv:type=\"String\">" +
" <sv:value>pw</sv:value>" +
" </sv:property>" +
" <sv:property sv:name=\"rep:principalName\" sv:type=\"String\">" +
" <sv:value>yPrincipal</sv:value>" +
" </sv:property>" +
" <sv:node sv:name=\"" + UserConstants.REP_PWD + "\">" +
" <sv:property sv:name=\"jcr:primaryType\" sv:type=\"Name\">" +
" <sv:value>" + UserConstants.NT_REP_PASSWORD + "</sv:value>" +
" </sv:property>" +
" <sv:property sv:name=\"" + UserConstants.REP_PASSWORD_LAST_MODIFIED + "\" sv:type=\"Long\">" +
" <sv:value>1404036716000</sv:value>" +
" </sv:property>" +
" <sv:property sv:name=\"customProp\" sv:type=\"String\">" +
" <sv:value>abc</sv:value>" +
" </sv:property>" +
" </sv:node>" +
"</sv:node>";
doImport(USERPATH, xml);
Authorizable authorizable = getUserManager().getAuthorizable("y");
Node userNode = getImportSession().getNode(authorizable.getPath());
assertTrue(userNode.hasNode(UserConstants.REP_PWD));
Node pwdNode = userNode.getNode(UserConstants.REP_PWD);
assertTrue(pwdNode.hasProperty(UserConstants.REP_PASSWORD_LAST_MODIFIED));
assertEquals(1404036716000L, pwdNode.getProperty(UserConstants.REP_PASSWORD_LAST_MODIFIED).getLong());
assertTrue(pwdNode.hasProperty("customProp"));
Property custom = pwdNode.getProperty("customProp");
assertTrue(custom.getDefinition().isProtected());
assertEquals("abc", custom.getString());
}
/**
* @since Oak 1.1
*/
@Test
public void testImportExistingUserWithoutExpiryProperty() throws Exception {
String uid = "existing";
User user = getUserManager().createUser(uid, uid);
Session s = getImportSession();
// change password to force existence of password last modified property
user.changePassword(uid);
s.save();
Node userNode = s.getNode(user.getPath());
assertTrue(userNode.hasNode(UserConstants.REP_PWD));
Node pwdNode = userNode.getNode(UserConstants.REP_PWD);
assertTrue(pwdNode.hasProperty(UserConstants.REP_PASSWORD_LAST_MODIFIED));
// overwrite user via import
String xml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<sv:node sv:name=\"" + uid + "\" xmlns:mix=\"http://www.jcp.org/jcr/mix/1.0\" xmlns:nt=\"http://www.jcp.org/jcr/nt/1.0\" xmlns:fn_old=\"http://www.w3.org/2004/10/xpath-functions\" xmlns:fn=\"http://www.w3.org/2005/xpath-functions\" xmlns:xs=\"http://www.w3.org/2001/XMLSchema\" xmlns:sv=\"http://www.jcp.org/jcr/sv/1.0\" xmlns:rep=\"internal\" xmlns:jcr=\"http://www.jcp.org/jcr/1.0\">" +
" <sv:property sv:name=\"jcr:primaryType\" sv:type=\"Name\">" +
" <sv:value>rep:User</sv:value>" +
" </sv:property>" +
" <sv:property sv:name=\"rep:password\" sv:type=\"String\">" +
" <sv:value>" + uid + "</sv:value>" +
" </sv:property>" +
" <sv:property sv:name=\"rep:principalName\" sv:type=\"String\">" +
" <sv:value>" + uid + "Principal</sv:value>" +
" </sv:property>" +
"</sv:node>";
doImport(USERPATH, xml);
Authorizable authorizable = getUserManager().getAuthorizable(uid);
userNode = s.getNode(authorizable.getPath());
assertTrue(userNode.hasNode(UserConstants.REP_PWD));
pwdNode = userNode.getNode(UserConstants.REP_PWD);
assertTrue(pwdNode.hasProperty(UserConstants.REP_PASSWORD_LAST_MODIFIED));
}
}
| |
package com.netbrasoft.gnuob.application.product;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.authorization.Action;
import org.apache.wicket.authroles.authorization.strategies.role.annotations.AuthorizeAction;
import org.apache.wicket.markup.html.WebMarkupContainer;
import org.apache.wicket.markup.html.form.Form;
import org.apache.wicket.markup.html.form.TextArea;
import org.apache.wicket.markup.html.form.TextField;
import org.apache.wicket.markup.html.panel.Fragment;
import org.apache.wicket.markup.html.panel.Panel;
import org.apache.wicket.model.CompoundPropertyModel;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.Model;
import org.apache.wicket.validation.validator.StringValidator;
import com.netbrasoft.gnuob.api.Option;
import com.netbrasoft.gnuob.api.Product;
import com.netbrasoft.gnuob.api.SubOption;
import com.netbrasoft.gnuob.application.NetbrasoftApplicationConstants;
import com.netbrasoft.gnuob.application.security.AppRoles;
import de.agilecoders.wicket.core.markup.html.bootstrap.button.BootstrapAjaxButton;
import de.agilecoders.wicket.core.markup.html.bootstrap.button.Buttons;
import de.agilecoders.wicket.core.markup.html.bootstrap.button.LoadingBehavior;
import de.agilecoders.wicket.core.markup.html.bootstrap.form.BootstrapCheckbox;
import de.agilecoders.wicket.core.markup.html.bootstrap.form.BootstrapForm;
import de.agilecoders.wicket.core.markup.html.bootstrap.form.FormBehavior;
import de.agilecoders.wicket.core.markup.html.bootstrap.form.FormType;
import de.agilecoders.wicket.core.markup.html.bootstrap.table.TableBehavior;
import de.agilecoders.wicket.extensions.markup.html.bootstrap.form.validation.TooltipValidation;
import wicket.contrib.tinymce4.ajax.TinyMceAjaxSubmitModifier;
@SuppressWarnings("unchecked")
@AuthorizeAction(action = Action.RENDER, roles = {AppRoles.MANAGER, AppRoles.EMPLOYEE})
public class ProductSubOptionViewOrEditPanel extends Panel {
@AuthorizeAction(action = Action.ENABLE, roles = {AppRoles.MANAGER})
class ProductSubOptionEditFragment extends Fragment {
@AuthorizeAction(action = Action.ENABLE, roles = {AppRoles.MANAGER})
class SubOptionEditTable extends WebMarkupContainer {
@AuthorizeAction(action = Action.RENDER, roles = {AppRoles.MANAGER})
class SaveAjaxButton extends BootstrapAjaxButton {
private static final long serialVersionUID = 2695394292963384938L;
public SaveAjaxButton(final String id, final IModel<String> model, final Form<SubOption> form, final Buttons.Type type) {
super(id, model, form, type);
setSize(Buttons.Size.Small);
add(new LoadingBehavior(Model.of(ProductSubOptionViewOrEditPanel.this.getString(NetbrasoftApplicationConstants.SAVE_MESSAGE_KEY))), new TinyMceAjaxSubmitModifier());
}
@Override
protected void onError(final AjaxRequestTarget target, final Form<?> form) {
form.add(new TooltipValidation());
target.add(form);
target.add(SaveAjaxButton.this.add(new LoadingBehavior(Model.of(ProductSubOptionViewOrEditPanel.this.getString(NetbrasoftApplicationConstants.SAVE_MESSAGE_KEY)))));
}
@Override
protected void onSubmit(final AjaxRequestTarget target, final Form<?> form) {
if (((SubOption) form.getDefaultModelObject()).getId() == 0) {
ProductSubOptionViewOrEditPanel.this.selectedParentModel.getObject().getSubOptions().add((SubOption) form.getDefaultModelObject());
}
target.add(form.setOutputMarkupId(true));
target.add(SaveAjaxButton.this.add(new LoadingBehavior(Model.of(ProductSubOptionViewOrEditPanel.this.getString(NetbrasoftApplicationConstants.SAVE_MESSAGE_KEY)))));
target.add(ProductSubOptionViewOrEditPanel.this.getParent().setOutputMarkupId(true));
}
}
private static final String DISABLED_ID = "disabled";
private static final String DESCRIPTION_ID = "description";
private static final String VALUE_ID = "value";
private static final String SAVE_ID = "save";
private static final String SUB_OPTION_EDIT_FORM_COMPONENT_ID = "subOptionEditForm";
private static final long serialVersionUID = -7519943626345095089L;
private final BootstrapForm<SubOption> subOptionEditForm;
private final SaveAjaxButton saveAjaxButton;
public SubOptionEditTable(final String id, final IModel<Product> model) {
super(id, model);
subOptionEditForm =
new BootstrapForm<SubOption>(SUB_OPTION_EDIT_FORM_COMPONENT_ID, new CompoundPropertyModel<SubOption>(ProductSubOptionViewOrEditPanel.this.selectedModel));
saveAjaxButton = new SaveAjaxButton(SAVE_ID, Model.of(ProductSubOptionViewOrEditPanel.this.getString(NetbrasoftApplicationConstants.SAVE_MESSAGE_KEY)), subOptionEditForm,
Buttons.Type.Primary);
}
@Override
protected void onInitialize() {
subOptionEditForm.add(new TextArea<String>(VALUE_ID).add(StringValidator.maximumLength(128)).setRequired(true).setOutputMarkupId(true));
subOptionEditForm.add(new TextArea<String>(DESCRIPTION_ID).add(StringValidator.maximumLength(128)).setRequired(true).setOutputMarkupId(true));
subOptionEditForm.add(new BootstrapCheckbox(DISABLED_ID).setOutputMarkupId(true));
subOptionEditForm.add(saveAjaxButton.setOutputMarkupId(true));
add(subOptionEditForm.add(new FormBehavior(FormType.Horizontal)).setOutputMarkupId(true));
super.onInitialize();
}
}
private static final String SUB_OPTION_EDIT_TABLE_ID = "subOptionEditTable";
private static final String PRODUCT_SUB_OPTION_EDIT_FRAGMENT_MARKUP_ID = "productSubOptionEditFragment";
private static final String PRODUCT_SUB_OPTION_VIEW_OR_EDIT_FRAGMENT_ID = "productSubOptionViewOrEditFragment";
private static final long serialVersionUID = -4032029235917033204L;
private final SubOptionEditTable subOptionEditTable;
public ProductSubOptionEditFragment() {
super(PRODUCT_SUB_OPTION_VIEW_OR_EDIT_FRAGMENT_ID, PRODUCT_SUB_OPTION_EDIT_FRAGMENT_MARKUP_ID, ProductSubOptionViewOrEditPanel.this,
ProductSubOptionViewOrEditPanel.this.getDefaultModel());
subOptionEditTable = new SubOptionEditTable(SUB_OPTION_EDIT_TABLE_ID, (IModel<Product>) ProductSubOptionViewOrEditPanel.this.getDefaultModel());
}
@Override
protected void onInitialize() {
add(subOptionEditTable.add(new TableBehavior()).setOutputMarkupId(true));
super.onInitialize();
}
}
@AuthorizeAction(action = Action.RENDER, roles = {AppRoles.MANAGER})
class ProductSubOptionViewFragment extends Fragment {
@AuthorizeAction(action = Action.ENABLE, roles = {AppRoles.MANAGER, AppRoles.EMPLOYEE})
class SubOptionViewTable extends WebMarkupContainer {
private static final String DISABLED_ID = "disabled";
private static final String DESCRIPTION_ID = "description";
private static final String VALUE_ID = "value";
private static final String SUB_OPTION_VIEW_FORM_COMPONENT_ID = "subOptionViewForm";
private static final long serialVersionUID = -7519943626345095089L;
private final BootstrapForm<SubOption> subOptionViewForm;
public SubOptionViewTable(final String id, final IModel<Product> model) {
super(id, model);
subOptionViewForm =
new BootstrapForm<SubOption>(SUB_OPTION_VIEW_FORM_COMPONENT_ID, new CompoundPropertyModel<SubOption>(ProductSubOptionViewOrEditPanel.this.selectedModel));
}
@Override
protected void onInitialize() {
subOptionViewForm.add(new TextArea<String>(VALUE_ID).setOutputMarkupId(true));
subOptionViewForm.add(new TextArea<String>(DESCRIPTION_ID).setOutputMarkupId(true));
subOptionViewForm.add(new TextField<String>(DISABLED_ID).setOutputMarkupId(true));
add(subOptionViewForm.add(new FormBehavior(FormType.Horizontal)).setOutputMarkupId(true));
super.onInitialize();
}
}
private static final String SUB_OPTION_VIEW_TABLE_ID = "subOptionViewTable";
private static final String PRODUCT_SUB_OPTION_VIEW_FRAGMENT_MARKUP_ID = "productSubOptionViewFragment";
private static final String PRODUCT_SUB_OPTION_VIEW_OR_EDIT_FRAGMENT_ID = "productSubOptionViewOrEditFragment";
private static final long serialVersionUID = -4032029235917033204L;
private final SubOptionViewTable subOptionViewTable;
public ProductSubOptionViewFragment() {
super(PRODUCT_SUB_OPTION_VIEW_OR_EDIT_FRAGMENT_ID, PRODUCT_SUB_OPTION_VIEW_FRAGMENT_MARKUP_ID, ProductSubOptionViewOrEditPanel.this,
ProductSubOptionViewOrEditPanel.this.getDefaultModel());
subOptionViewTable = new SubOptionViewTable(SUB_OPTION_VIEW_TABLE_ID, (IModel<Product>) ProductSubOptionViewOrEditPanel.this.getDefaultModel());
}
@Override
protected void onInitialize() {
add(subOptionViewTable.add(new TableBehavior()).setOutputMarkupId(true));
super.onInitialize();
}
}
private static final long serialVersionUID = 8609291357690450348L;
private IModel<SubOption> selectedModel;
private IModel<Option> selectedParentModel;
public ProductSubOptionViewOrEditPanel(final String id, final IModel<Product> model) {
super(id, model);
selectedModel = Model.of(new SubOption());
selectedParentModel = Model.of(new Option());
}
public void setSelectedModel(final IModel<SubOption> selectedModel, final IModel<Option> selectedParentModel) {
this.selectedModel = selectedModel;
this.selectedParentModel = selectedParentModel;
}
}
| |
/*
* Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership. Crate licenses
* this file to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial agreement.
*/
package io.crate.analyze;
import io.crate.common.collections.Maps;
import io.crate.data.RowN;
import io.crate.exceptions.ColumnUnknownException;
import io.crate.exceptions.InvalidColumnNameException;
import io.crate.exceptions.InvalidRelationName;
import io.crate.exceptions.InvalidSchemaNameException;
import io.crate.exceptions.OperationOnInaccessibleRelationException;
import io.crate.exceptions.RelationAlreadyExists;
import io.crate.exceptions.UnsupportedFeatureException;
import io.crate.metadata.ColumnIdent;
import io.crate.metadata.FulltextAnalyzerResolver;
import io.crate.metadata.RelationName;
import io.crate.metadata.Schemas;
import io.crate.planner.PlannerContext;
import io.crate.planner.node.ddl.AlterTableAddColumnPlan;
import io.crate.planner.node.ddl.AlterTableDropCheckConstraintPlan;
import io.crate.planner.node.ddl.AlterTablePlan;
import io.crate.planner.node.ddl.CreateBlobTablePlan;
import io.crate.planner.node.ddl.CreateTablePlan;
import io.crate.planner.operators.SubQueryResults;
import io.crate.sql.parser.ParsingException;
import io.crate.sql.tree.ColumnPolicy;
import io.crate.test.integration.CrateDummyClusterServiceUnitTest;
import io.crate.testing.SQLExecutor;
import io.crate.types.DataTypes;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.AutoExpandReplicas;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider;
import org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider;
import org.elasticsearch.common.Randomness;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.test.ClusterServiceUtils;
import org.hamcrest.Matchers;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.jupiter.api.Assertions;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import static com.carrotsearch.randomizedtesting.RandomizedTest.$;
import static io.crate.metadata.FulltextAnalyzerResolver.CustomType.ANALYZER;
import static io.crate.testing.TestingHelpers.mapToSortedString;
import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_ROUTING_EXCLUDE_GROUP_SETTING;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.hasItems;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.isEmptyOrNullString;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.startsWith;
public class CreateAlterTableStatementAnalyzerTest extends CrateDummyClusterServiceUnitTest {
private SQLExecutor e;
private PlannerContext plannerContext;
@Before
public void prepare() throws IOException {
String analyzerSettings = FulltextAnalyzerResolver.encodeSettings(
Settings.builder().put("search", "foobar").build()).utf8ToString();
Metadata metadata = Metadata.builder()
.persistentSettings(
Settings.builder().put(ANALYZER.buildSettingName("ft_search"), analyzerSettings).build())
.build();
ClusterState state = ClusterState.builder(ClusterName.DEFAULT)
.metadata(metadata)
.build();
ClusterServiceUtils.setState(clusterService, state);
e = SQLExecutor.builder(clusterService, 3, Randomness.get(), List.of())
.enableDefaultTables()
.build();
plannerContext = e.getPlannerContext(clusterService.state());
}
private <S> S analyze(String stmt, Object... arguments) {
return analyze(e, stmt, arguments);
}
@SuppressWarnings("unchecked")
private <S> S analyze(SQLExecutor e, String stmt, Object... arguments) {
AnalyzedStatement analyzedStatement = e.analyze(stmt);
if (analyzedStatement instanceof AnalyzedCreateTable) {
return (S) CreateTablePlan.bind(
(AnalyzedCreateTable) analyzedStatement,
plannerContext.transactionContext(),
plannerContext.nodeContext(),
new RowN(arguments),
SubQueryResults.EMPTY,
new NumberOfShards(clusterService),
e.schemas(),
e.fulltextAnalyzerResolver()
);
} else if (analyzedStatement instanceof AnalyzedAlterTable) {
return (S) AlterTablePlan.bind(
(AnalyzedAlterTable) analyzedStatement,
plannerContext.transactionContext(),
plannerContext.nodeContext(),
new RowN(arguments),
SubQueryResults.EMPTY
);
} else if (analyzedStatement instanceof AnalyzedAlterTableAddColumn) {
return (S) AlterTableAddColumnPlan.bind(
(AnalyzedAlterTableAddColumn) analyzedStatement,
plannerContext.transactionContext(),
plannerContext.nodeContext(),
new RowN(arguments),
SubQueryResults.EMPTY,
null
);
} else if (analyzedStatement instanceof AnalyzedAlterTableDropCheckConstraint) {
return (S) AlterTableDropCheckConstraintPlan.bind(
(AnalyzedAlterTableDropCheckConstraint) analyzedStatement
);
} else {
return (S) analyzedStatement;
}
}
@Test
public void testTimestampDataTypeDeprecationWarning() {
analyze("create table t (ts timestamp)");
assertWarnings(
"Column [ts]: Usage of the `TIMESTAMP` data type as a timestamp with zone is deprecated," +
" use the `TIMESTAMPTZ` or `TIMESTAMP WITH TIME ZONE` data type instead."
);
}
@Test
public void test_cannot_create_table_that_contains_a_column_definition_of_type_time () {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("Cannot use the type `time with time zone` for column: ts");
analyze("create table t (ts time with time zone)");
}
@Test
public void test_cannot_alter_table_to_add_a_column_definition_of_type_time () {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("Cannot use the type `time with time zone` for column: ts");
analyze("alter table user_refresh_interval add column ts time with time zone");
}
@Test
public void testCreateTableInSystemSchemasIsProhibited() {
for (String schema : Schemas.READ_ONLY_SCHEMAS) {
try {
analyze(String.format("CREATE TABLE %s.%s (ordinal INTEGER, name STRING)", schema, "my_table"));
fail("create table in read-only schema must fail");
} catch (IllegalArgumentException e) {
assertThat(e.getLocalizedMessage(), startsWith("Cannot create relation in read-only schema: " + schema));
}
}
}
@Test
public void testCreateTableWithAlternativePrimaryKeySyntax() {
BoundCreateTable analysis = analyze(
"create table foo (id integer, name string, primary key (id, name))"
);
String[] primaryKeys = analysis.primaryKeys().toArray(new String[0]);
assertThat(primaryKeys.length, is(2));
assertThat(primaryKeys[0], is("id"));
assertThat(primaryKeys[1], is("name"));
}
@Test
@SuppressWarnings("unchecked")
public void testSimpleCreateTable() {
BoundCreateTable analysis = analyze(
"create table foo (id integer primary key, name string not null) " +
"clustered into 3 shards with (number_of_replicas=0)");
assertThat(analysis.tableParameter().settings().get(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey()), is("3"));
assertThat(analysis.tableParameter().settings().get(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey()), is("0"));
Map<String, Object> metaMapping = ((Map) analysis.mapping().get("_meta"));
assertNull(metaMapping.get("columns"));
Map<String, Object> mappingProperties = analysis.mappingProperties();
Map<String, Object> idMapping = (Map<String, Object>) mappingProperties.get("id");
assertThat(idMapping.get("type"), is("integer"));
Map<String, Object> nameMapping = (Map<String, Object>) mappingProperties.get("name");
assertThat(nameMapping.get("type"), is("keyword"));
String[] primaryKeys = analysis.primaryKeys().toArray(new String[0]);
assertThat(primaryKeys.length, is(1));
assertThat(primaryKeys[0], is("id"));
String[] notNullColumns = analysis.notNullColumns().toArray(new String[0]);
assertThat(notNullColumns.length, is(1));
assertThat(notNullColumns[0], is("name"));
}
@Test
public void testCreateTableWithDefaultNumberOfShards() {
BoundCreateTable analysis = analyze("create table foo (id integer primary key, name string)");
assertThat(analysis.tableParameter().settings().get(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey()), is("6"));
}
@Test
public void testCreateTableWithDefaultNumberOfShardsWithClusterByClause() {
BoundCreateTable analysis = analyze(
"create table foo (id integer primary key) clustered by (id)"
);
assertThat(analysis.tableParameter().settings().get(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey()), is("6"));
}
@Test
public void testCreateTableNumberOfShardsProvidedInClusteredClause() {
BoundCreateTable analysis = analyze(
"create table foo (id integer primary key) " +
"clustered by (id) into 8 shards"
);
assertThat(analysis.tableParameter().settings().get(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey()), is("8"));
}
@Test
public void testCreateTableWithTotalFieldsLimit() {
BoundCreateTable analysis = analyze(
"CREATE TABLE foo (id int primary key) " +
"with (\"mapping.total_fields.limit\"=5000)");
assertThat(analysis.tableParameter().settings().get(MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.getKey()), is("5000"));
}
@Test
public void testCreateTableWithRefreshInterval() {
BoundCreateTable analysis = analyze(
"CREATE TABLE foo (id int primary key, content string) " +
"with (refresh_interval='5000ms')");
assertThat(analysis.tableParameter().settings().get(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey()), is("5s"));
}
@Test
public void testCreateTableWithNumberOfShardsOnWithClauseIsInvalid() {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("Invalid property \"number_of_shards\" passed to [ALTER | CREATE] TABLE statement");
analyze("CREATE TABLE foo (id int primary key, content string) " +
"with (number_of_shards=8)");
}
@Test(expected = IllegalArgumentException.class)
public void testCreateTableWithRefreshIntervalWrongNumberFormat() {
analyze("CREATE TABLE foo (id int primary key, content string) " +
"with (refresh_interval='1asdf')");
}
@Test
public void testAlterTableWithRefreshInterval() {
// alter t set
BoundAlterTable analysisSet = analyze(
"ALTER TABLE user_refresh_interval " +
"SET (refresh_interval = '5000ms')");
assertEquals("5s", analysisSet.tableParameter().settings().get(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey()));
// alter t reset
BoundAlterTable analysisReset = analyze(
"ALTER TABLE user_refresh_interval " +
"RESET (refresh_interval)");
assertEquals("1s", analysisReset.tableParameter().settings().get(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey()));
}
@Test
public void testTotalFieldsLimitCanBeUsedWithAlterTable() {
BoundAlterTable analysisSet = analyze(
"ALTER TABLE users " +
"SET (\"mapping.total_fields.limit\" = '5000')");
assertEquals("5000", analysisSet.tableParameter().settings().get(MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.getKey()));
// Check if resetting total_fields results in default value
BoundAlterTable analysisReset = analyze(
"ALTER TABLE users " +
"RESET (\"mapping.total_fields.limit\")");
assertEquals("1000", analysisReset.tableParameter().settings().get(MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.getKey()));
}
@Test
public void testAlterTableWithColumnPolicy() {
BoundAlterTable analysisSet = analyze(
"ALTER TABLE user_refresh_interval " +
"SET (column_policy = 'strict')");
assertEquals(
ColumnPolicy.STRICT.lowerCaseName(),
analysisSet.tableParameter().mappings().get(TableParameters.COLUMN_POLICY.getKey()));
}
@Test
public void testAlterTableWithInvalidColumnPolicy() {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("Invalid value for argument 'column_policy'");
analyze("ALTER TABLE user_refresh_interval " +
"SET (column_policy = 'ignored')");
}
@Test
public void testAlterTableWithMaxNGramDiffSetting() {
BoundAlterTable analysisSet = analyze(
"ALTER TABLE users " +
"SET (max_ngram_diff = 42)");
assertThat(analysisSet.tableParameter().settings().get(IndexSettings.MAX_NGRAM_DIFF_SETTING.getKey()), is("42"));
}
@Test
public void testAlterTableWithMaxShingleDiffSetting() {
BoundAlterTable analysisSet = analyze(
"ALTER TABLE users " +
"SET (max_shingle_diff = 43)");
assertThat(analysisSet.tableParameter().settings().get(IndexSettings.MAX_SHINGLE_DIFF_SETTING.getKey()), is("43"));
}
@Test
@SuppressWarnings("unchecked")
public void testCreateTableWithClusteredBy() {
BoundCreateTable analysis = analyze(
"create table foo (id integer, name string) clustered by(id)");
Map<String, Object> meta = (Map) analysis.mapping().get("_meta");
assertNotNull(meta);
assertThat(meta.get("routing"), is("id"));
}
@Test(expected = IllegalArgumentException.class)
@SuppressWarnings("unchecked")
public void testCreateTableWithClusteredByNotInPrimaryKeys() {
analyze("create table foo (id integer primary key, name string) clustered by(name)");
}
@Test
@SuppressWarnings("unchecked")
public void testCreateTableWithObjects() {
BoundCreateTable analysis = analyze(
"create table foo (id integer primary key, details object as (name string, age integer))");
Map<String, Object> mappingProperties = analysis.mappingProperties();
Map<String, Object> details = (Map<String, Object>) mappingProperties.get("details");
assertThat(details.get("type"), is("object"));
assertThat(details.get("dynamic"), is("true"));
Map<String, Object> detailsProperties = (Map<String, Object>) details.get("properties");
Map<String, Object> nameProperties = (Map<String, Object>) detailsProperties.get("name");
assertThat(nameProperties.get("type"), is("keyword"));
Map<String, Object> ageProperties = (Map<String, Object>) detailsProperties.get("age");
assertThat(ageProperties.get("type"), is("integer"));
}
@Test
@SuppressWarnings("unchecked")
public void testCreateTableWithStrictObject() {
BoundCreateTable analysis = analyze(
"create table foo (id integer primary key, details object(strict) as (name string, age integer))");
Map<String, Object> mappingProperties = analysis.mappingProperties();
Map<String, Object> details = (Map<String, Object>) mappingProperties.get("details");
assertThat(details.get("type"), is("object"));
assertThat(details.get("dynamic"), is("strict"));
}
@Test
@SuppressWarnings("unchecked")
public void testCreateTableWithIgnoredObject() {
BoundCreateTable analysis = analyze(
"create table foo (id integer primary key, details object(ignored))");
Map<String, Object> mappingProperties = analysis.mappingProperties();
Map<String, Object> details = (Map<String, Object>) mappingProperties.get("details");
assertThat(details.get("type"), is("object"));
assertThat(details.get("dynamic"), is("false"));
}
@Test
@SuppressWarnings("unchecked")
public void testCreateTableWithSubscriptInFulltextIndexDefinition() {
BoundCreateTable analysis = analyze(
"create table my_table1g (" +
" title string, " +
" author object(dynamic) as ( " +
" name string, " +
" birthday timestamp with time zone" +
"), " +
"INDEX author_title_ft using fulltext(title, author['name']))");
Map<String, Object> mappingProperties = analysis.mappingProperties();
Map<String, Object> details = (Map<String, Object>) mappingProperties.get("author");
Map<String, Object> nameMapping = (Map<String, Object>) ((Map<String, Object>) details.get("properties")).get("name");
assertThat(((List<String>) nameMapping.get("copy_to")).get(0), is("author_title_ft"));
}
@Test(expected = ColumnUnknownException.class)
public void testCreateTableWithInvalidFulltextIndexDefinition() {
analyze(
"create table my_table1g (" +
" title string, " +
" author object(dynamic) as ( " +
" name string, " +
" birthday timestamp with time zone" +
"), " +
"INDEX author_title_ft using fulltext(title, author['name']['foo']['bla']))");
}
@SuppressWarnings("unchecked")
@Test
public void testCreateTableWithArray() {
BoundCreateTable analysis = analyze(
"create table foo (id integer primary key, details array(string), more_details text[])");
Map<String, Object> mappingProperties = analysis.mappingProperties();
Map<String, Object> details = (Map<String, Object>) mappingProperties.get("details");
assertThat(details.get("type"), is("array"));
Map<String, Object> inner = (Map<String, Object>) details.get("inner");
assertThat(inner.get("type"), is("keyword"));
Map<String, Object> moreDetails = (Map<String, Object>) mappingProperties.get("more_details");
assertThat(moreDetails.get("type"), is("array"));
Map<String, Object> moreDetailsInner = (Map<String, Object>) details.get("inner");
assertThat(moreDetailsInner.get("type"), is("keyword"));
}
@Test
@SuppressWarnings("unchecked")
public void testCreateTableWithObjectsArray() {
BoundCreateTable analysis = analyze(
"create table foo (id integer primary key, details array(object as (name string, age integer, tags array(string))))");
Map<String, Object> mappingProperties = analysis.mappingProperties();
assertThat(mapToSortedString(mappingProperties),
is("details={inner={dynamic=true, position=2, properties={age={type=integer}, name={type=keyword}, " +
"tags={inner={type=keyword}, type=array}}, type=object}, type=array}, " +
"id={position=1, type=integer}"));
}
@Test
@SuppressWarnings("unchecked")
public void testCreateTableWithAnalyzer() {
BoundCreateTable analysis = analyze(
"create table foo (id integer primary key, content string INDEX using fulltext with (analyzer='german'))");
Map<String, Object> mappingProperties = analysis.mappingProperties();
Map<String, Object> contentMapping = (Map<String, Object>) mappingProperties.get("content");
assertThat(contentMapping.get("index"), nullValue());
assertThat(contentMapping.get("analyzer"), is("german"));
}
@Test
@SuppressWarnings("unchecked")
public void testCreateTableWithAnalyzerParameter() {
BoundCreateTable analysis = analyze(
"create table foo (id integer primary key, content string INDEX using fulltext with (analyzer=?))",
"german"
);
Map<String, Object> mappingProperties = analysis.mappingProperties();
Map<String, Object> contentMapping = (Map<String, Object>) mappingProperties.get("content");
assertThat(contentMapping.get("index"), nullValue());
assertThat(contentMapping.get("analyzer"), is("german"));
}
@SuppressWarnings("unchecked")
@Test
public void textCreateTableWithCustomAnalyzerInNestedColumn() {
BoundCreateTable analysis = analyze(
"create table ft_search (" +
"\"user\" object (strict) as (" +
"name string index using fulltext with (analyzer='ft_search') " +
")" +
")");
Map<String, Object> mappingProperties = analysis.mappingProperties();
Map<String, Object> details = (Map<String, Object>) mappingProperties.get("user");
Map<String, Object> nameMapping = (Map<String, Object>) ((Map<String, Object>) details.get("properties")).get("name");
assertThat(nameMapping.get("index"), nullValue());
assertThat(nameMapping.get("analyzer"), is("ft_search"));
assertThat(analysis.tableParameter().settings().get("search"), is("foobar"));
}
@Test
public void testCreateTableWithSchemaName() {
BoundCreateTable analysis =
analyze("create table something.foo (id integer primary key)");
RelationName relationName = analysis.tableIdent();
assertThat(relationName.schema(), is("something"));
assertThat(relationName.name(), is("foo"));
}
@Test
@SuppressWarnings("unchecked")
public void testCreateTableWithIndexColumn() {
BoundCreateTable analysis = analyze(
"create table foo (id integer primary key, content string, INDEX content_ft using fulltext (content))");
Map<String, Object> mappingProperties = analysis.mappingProperties();
Map<String, Object> contentMapping = (Map<String, Object>) mappingProperties.get("content");
assertThat((String) contentMapping.get("index"), isEmptyOrNullString());
assertThat(((List<String>) contentMapping.get("copy_to")).get(0), is("content_ft"));
Map<String, Object> ft_mapping = (Map<String, Object>) mappingProperties.get("content_ft");
assertThat(ft_mapping.get("index"), nullValue());
assertThat(ft_mapping.get("analyzer"), is("standard"));
}
@Test
@SuppressWarnings("unchecked")
public void testCreateTableWithPlainIndexColumn() {
BoundCreateTable analysis = analyze(
"create table foo (id integer primary key, content string, INDEX content_ft using plain (content))");
Map<String, Object> mappingProperties = analysis.mappingProperties();
Map<String, Object> contentMapping = (Map<String, Object>) mappingProperties.get("content");
assertThat((String) contentMapping.get("index"), isEmptyOrNullString());
assertThat(((List<String>) contentMapping.get("copy_to")).get(0), is("content_ft"));
Map<String, Object> ft_mapping = (Map<String, Object>) mappingProperties.get("content_ft");
assertThat(ft_mapping.get("index"), nullValue());
assertThat(ft_mapping.get("analyzer"), is("keyword"));
}
@Test
public void testCreateTableWithIndexColumnOverNonString() {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("INDEX definition only support 'string' typed source columns");
analyze("create table foo (id integer, id2 integer, INDEX id_ft using fulltext (id, id2))");
}
@Test
public void testCreateTableWithIndexColumnOverNonString2() {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("INDEX definition only support 'string' typed source columns");
analyze("create table foo (id integer, name string, INDEX id_ft using fulltext (id, name))");
}
@Test
public void testChangeNumberOfReplicas() {
BoundAlterTable analysis =
analyze("alter table users set (number_of_replicas=2)");
assertThat(analysis.table().ident().name(), is("users"));
assertThat(analysis.tableParameter().settings().get(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey()), is("2"));
}
@Test
public void testResetNumberOfReplicas() {
BoundAlterTable analysis =
analyze("alter table users reset (number_of_replicas)");
assertThat(analysis.table().ident().name(), is("users"));
assertThat(analysis.tableParameter().settings().get(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey()), is("0"));
assertThat(analysis.tableParameter().settings().get(AutoExpandReplicas.SETTING.getKey()), is("0-1"));
}
@Test(expected = IllegalArgumentException.class)
public void testAlterTableWithInvalidProperty() {
analyze("alter table users set (foobar='2')");
}
@Test
public void testAlterSystemTable() {
expectedException.expect(OperationOnInaccessibleRelationException.class);
expectedException.expectMessage("The relation \"sys.shards\" doesn't support or allow ALTER " +
"operations, as it is read-only.");
analyze("alter table sys.shards reset (number_of_replicas)");
}
@Test
public void testCreateTableWithMultiplePrimaryKeys() {
BoundCreateTable analysis = analyze(
"create table test (id integer primary key, name string primary key)");
String[] primaryKeys = analysis.primaryKeys().toArray(new String[0]);
assertThat(primaryKeys.length, is(2));
assertThat(primaryKeys[0], is("id"));
assertThat(primaryKeys[1], is("name"));
}
@Test
public void testCreateTableWithMultiplePrimaryKeysAndClusteredBy() {
BoundCreateTable analysis = analyze(
"create table test (id integer primary key, name string primary key) " +
"clustered by(name)");
String[] primaryKeys = analysis.primaryKeys().toArray(new String[0]);
assertThat(primaryKeys.length, is(2));
assertThat(primaryKeys[0], is("id"));
assertThat(primaryKeys[1], is("name"));
//noinspection unchecked
Map<String, Object> meta = (Map) analysis.mapping().get("_meta");
assertNotNull(meta);
assertThat(meta.get("routing"), is("name"));
}
@Test
public void testCreateTableWithObjectAndUnderscoreColumnPrefix() {
BoundCreateTable analysis = analyze("create table test (o object as (_id integer), name string)");
assertThat(analysis.analyzedTableElements().columns().size(), is(2)); // id pk column is also added
AnalyzedColumnDefinition<Object> column = analysis.analyzedTableElements().columns().get(0);
assertEquals(column.ident(), new ColumnIdent("o"));
assertThat(column.children().size(), is(1));
AnalyzedColumnDefinition<Object> xColumn = column.children().get(0);
assertEquals(xColumn.ident(), new ColumnIdent("o", Collections.singletonList("_id")));
}
@Test(expected = InvalidColumnNameException.class)
public void testCreateTableWithUnderscoreColumnPrefix() {
analyze("create table test (_id integer, name string)");
}
@Test(expected = ParsingException.class)
public void testCreateTableWithColumnDot() {
analyze("create table test (dot.column integer)");
}
@Test(expected = InvalidRelationName.class)
public void testCreateTableIllegalTableName() {
analyze("create table \"abc.def\" (id integer primary key, name string)");
}
@Test
public void testTableStartWithUnderscore() {
expectedException.expect(InvalidRelationName.class);
expectedException.expectMessage("Relation name \"doc._invalid\" is invalid.");
analyze("create table _invalid (id integer primary key)");
}
@Test
public void testHasColumnDefinition() {
BoundCreateTable analysis = analyze(
"create table my_table (" +
" id integer primary key, " +
" name string, " +
" indexed string index using fulltext with (analyzer='german')," +
" arr array(object as(" +
" nested float," +
" nested_object object as (id byte)" +
" ))," +
" obj object as ( content string )," +
" index ft using fulltext(name, obj['content']) with (analyzer='standard')" +
")");
assertTrue(analysis.hasColumnDefinition(ColumnIdent.fromPath("id")));
assertTrue(analysis.hasColumnDefinition(ColumnIdent.fromPath("name")));
assertTrue(analysis.hasColumnDefinition(ColumnIdent.fromPath("indexed")));
assertTrue(analysis.hasColumnDefinition(ColumnIdent.fromPath("arr")));
assertTrue(analysis.hasColumnDefinition(ColumnIdent.fromPath("arr.nested")));
assertTrue(analysis.hasColumnDefinition(ColumnIdent.fromPath("arr.nested_object.id")));
assertTrue(analysis.hasColumnDefinition(ColumnIdent.fromPath("obj")));
assertTrue(analysis.hasColumnDefinition(ColumnIdent.fromPath("obj.content")));
assertFalse(analysis.hasColumnDefinition(ColumnIdent.fromPath("arr.nested.wrong")));
assertFalse(analysis.hasColumnDefinition(ColumnIdent.fromPath("ft")));
assertFalse(analysis.hasColumnDefinition(ColumnIdent.fromPath("obj.content.ft")));
}
@Test
public void testCreateTableWithGeoPoint() {
BoundCreateTable analyze = analyze(
"create table geo_point_table (\n" +
" id integer primary key,\n" +
" my_point geo_point\n" +
")\n");
Map my_point = (Map) analyze.mappingProperties().get("my_point");
assertEquals("geo_point", my_point.get("type"));
}
@Test(expected = IllegalArgumentException.class)
public void testClusteredIntoZeroShards() {
analyze("create table my_table (" +
" id integer," +
" name string" +
") clustered into 0 shards");
}
@Test
public void testBlobTableClusteredIntoZeroShards() {
AnalyzedCreateBlobTable blobTable = analyze("create blob table my_table clustered into 0 shards");
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("num_shards in CLUSTERED clause must be greater than 0");
CreateBlobTablePlan.buildSettings(
blobTable.createBlobTable(),
plannerContext.transactionContext(),
plannerContext.nodeContext(),
new RowN(new Object[0]),
SubQueryResults.EMPTY,
new NumberOfShards(clusterService));
}
@Test
public void testEarlyPrimaryKeyConstraint() {
BoundCreateTable analysis = analyze(
"create table my_table (" +
"primary key (id1, id2)," +
"id1 integer," +
"id2 long" +
")");
assertThat(analysis.primaryKeys().size(), is(2));
assertThat(analysis.primaryKeys(), hasItems("id1", "id2"));
}
@Test(expected = ColumnUnknownException.class)
public void testPrimaryKeyConstraintNonExistingColumns() {
analyze("create table my_table (" +
"primary key (id1, id2)," +
"title string," +
"name string" +
")");
}
@SuppressWarnings("unchecked")
@Test
public void testEarlyIndexDefinition() {
BoundCreateTable analysis = analyze(
"create table my_table (" +
"index ft using fulltext(title, name) with (analyzer='snowball')," +
"title string," +
"name string" +
")");
Map<String, Object> metaMap = (Map) analysis.mapping().get("_meta");
assertThat(
metaMap.get("indices").toString(),
is("{ft={}}"));
assertThat(
(List<String>) ((Map<String, Object>) analysis.mappingProperties()
.get("title")).get("copy_to"),
hasItem("ft")
);
assertThat(
(List<String>) ((Map<String, Object>) analysis.mappingProperties()
.get("name")).get("copy_to"),
hasItem("ft"));
}
@Test(expected = ColumnUnknownException.class)
public void testIndexDefinitionNonExistingColumns() {
analyze("create table my_table (" +
"index ft using fulltext(id1, id2) with (analyzer='snowball')," +
"title string," +
"name string" +
")");
}
@Test(expected = IllegalArgumentException.class)
public void testAnalyzerOnInvalidType() {
analyze("create table my_table (x integer INDEX using fulltext with (analyzer='snowball'))");
}
@Test
public void createTableNegativeReplicas() {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("Failed to parse value [-1] for setting [number_of_replicas] must be >= 0");
analyze("create table t (id int, name string) with (number_of_replicas=-1)");
}
@Test(expected = IllegalArgumentException.class)
public void testCreateTableSameColumn() {
analyze("create table my_table (title string, title integer)");
}
@Test(expected = UnsupportedOperationException.class)
public void testCreateTableWithArrayPrimaryKeyUnsupported() {
analyze("create table t (id array(int) primary key)");
}
@Test
public void testCreateTableWithClusteredIntoShardsParameter() {
BoundCreateTable analysis = analyze(
"create table t (id int primary key) clustered into ? shards", 2);
assertThat(analysis.tableParameter().settings().get(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey()), is("2"));
}
@Test
public void testCreateTableWithClusteredIntoShardsParameterNonNumeric() {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("invalid number 'foo'");
analyze("create table t (id int primary key) clustered into ? shards", "foo");
}
@Test
public void testCreateTableWithParitionedColumnInClusteredBy() {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("Cannot use CLUSTERED BY column in PARTITIONED BY clause");
analyze("create table t(id int primary key) partitioned by (id) clustered by (id)");
}
@Test
public void testCreateTableUsesDefaultSchema() {
SQLExecutor sqlExecutor = SQLExecutor.builder(clusterService, 1, Randomness.get(), List.of())
.setSearchPath("firstSchema", "secondSchema")
.build();
BoundCreateTable analysis = analyze(sqlExecutor, "create table t (id int)");
assertThat(analysis.tableIdent().schema(), is(sqlExecutor.getSessionContext().searchPath().currentSchema()));
}
@Test
public void testCreateTableWithEmptySchema() {
expectedException.expect(InvalidSchemaNameException.class);
expectedException.expectMessage("schema name \"\" is invalid.");
analyze("create table \"\".my_table (" +
"id long primary key" +
")");
}
@Test
public void testCreateTableWithIllegalSchema() {
expectedException.expect(InvalidSchemaNameException.class);
expectedException.expectMessage("schema name \"with.\" is invalid.");
analyze("create table \"with.\".my_table (" +
"id long primary key" +
")");
}
@Test
public void testCreateTableWithInvalidColumnName() {
expectedException.expect(InvalidColumnNameException.class);
expectedException.expectMessage(
"\"_test\" conflicts with system column pattern");
analyze("create table my_table (\"_test\" string)");
}
@Test
public void testCreateTableShouldRaiseErrorIfItExists() {
expectedException.expect(RelationAlreadyExists.class);
analyze("create table users (\"'test\" string)");
}
@Test
public void testExplicitSchemaHasPrecedenceOverDefaultSchema() {
SQLExecutor e = SQLExecutor.builder(clusterService).setSearchPath("hoschi").build();
BoundCreateTable statement = analyze(e, "create table foo.bar (x string)");
// schema from statement must take precedence
assertThat(statement.tableIdent().schema(), is("foo"));
}
@Test
public void testDefaultSchemaIsAddedToTableIdentIfNoExplicitSchemaExistsInTheStatement() {
SQLExecutor e = SQLExecutor.builder(clusterService).setSearchPath("hoschi").build();
BoundCreateTable statement = analyze(e, "create table bar (x string)");
assertThat(statement.tableIdent().schema(), is("hoschi"));
}
@Test
public void testChangeReadBlock() {
BoundAlterTable analysis =
analyze("alter table users set (\"blocks.read\"=true)");
assertThat(analysis.tableParameter().settings().get(IndexMetadata.INDEX_BLOCKS_READ_SETTING.getKey()), is("true"));
}
@Test
public void testChangeWriteBlock() {
BoundAlterTable analysis =
analyze("alter table users set (\"blocks.write\"=true)");
assertThat(analysis.tableParameter().settings().get(IndexMetadata.INDEX_BLOCKS_WRITE_SETTING.getKey()), is("true"));
}
@Test
public void testChangeMetadataBlock() {
BoundAlterTable analysis =
analyze("alter table users set (\"blocks.metadata\"=true)");
assertThat(analysis.tableParameter().settings().get(IndexMetadata.INDEX_BLOCKS_METADATA_SETTING.getKey()), is("true"));
}
@Test
public void testChangeReadOnlyBlock() {
BoundAlterTable analysis =
analyze("alter table users set (\"blocks.read_only\"=true)");
assertThat(analysis.tableParameter().settings().get(IndexMetadata.INDEX_READ_ONLY_SETTING.getKey()), is("true"));
}
@Test
public void testChangeBlockReadOnlyAllowDelete() {
BoundAlterTable analysis =
analyze("alter table users set (\"blocks.read_only_allow_delete\"=true)");
assertThat(analysis.tableParameter().settings().get(IndexMetadata.INDEX_BLOCKS_READ_ONLY_ALLOW_DELETE_SETTING.getKey()), is("true"));
}
@Test
public void testChangeBlockReadOnlyAllowedDeletePartitionedTable() {
BoundAlterTable analysis =
analyze("alter table parted set (\"blocks.read_only_allow_delete\"=true)");
assertThat(analysis.tableParameter().settings().get(IndexMetadata.INDEX_BLOCKS_READ_ONLY_ALLOW_DELETE_SETTING.getKey()), is("true"));
}
@Test
public void testChangeFlushThresholdSize() {
BoundAlterTable analysis =
analyze("alter table users set (\"translog.flush_threshold_size\"='300b')");
assertThat(analysis.tableParameter().settings().get(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey()), is("300b"));
}
@Test
public void testChangeTranslogInterval() {
BoundAlterTable analysis =
analyze("alter table users set (\"translog.sync_interval\"='100ms')");
assertThat(analysis.tableParameter().settings().get(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.getKey()), is("100ms"));
}
@Test
public void testChangeTranslogDurability() {
BoundAlterTable analysis =
analyze("alter table users set (\"translog.durability\"='ASYNC')");
assertThat(analysis.tableParameter().settings().get(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey()), is("ASYNC"));
}
@Test
public void testRoutingAllocationEnable() {
BoundAlterTable analysis =
analyze("alter table users set (\"routing.allocation.enable\"=\"none\")");
assertThat(analysis.tableParameter().settings().get(EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey()), is("none"));
}
@Test
public void testRoutingAllocationValidation() {
expectedException.expect(IllegalArgumentException.class);
analyze("alter table users set (\"routing.allocation.enable\"=\"foo\")");
}
@Test
public void testAlterTableSetShards() {
BoundAlterTable analysis =
analyze("alter table users set (\"number_of_shards\"=1)");
assertThat(analysis.table().ident().name(), is("users"));
assertThat(analysis.tableParameter().settings().get(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey()), is("1"));
}
@Test
public void testAlterTableResetShards() {
BoundAlterTable analysis =
analyze("alter table users reset (\"number_of_shards\")");
assertThat(analysis.table().ident().name(), is("users"));
assertThat(analysis.tableParameter().settings().get(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey()), is("5"));
}
@Test
public void testTranslogSyncInterval() {
BoundAlterTable analysis =
analyze("alter table users set (\"translog.sync_interval\"='1s')");
assertThat(analysis.table().ident().name(), is("users"));
assertThat(analysis.tableParameter().settings().get(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.getKey()), is("1s"));
}
@Test
public void testAllocationMaxRetriesValidation() {
BoundAlterTable analysis =
analyze("alter table users set (\"allocation.max_retries\"=1)");
assertThat(analysis.tableParameter().settings().get(MaxRetryAllocationDecider.SETTING_ALLOCATION_MAX_RETRY.getKey()), is("1"));
}
@Test
public void testCreateReadOnlyTable() {
BoundCreateTable analysis = analyze(
"create table foo (id integer primary key, name string) "
+ "clustered into 3 shards with (\"blocks.read_only\"=true)");
assertThat(analysis.tableParameter().settings().get(IndexMetadata.INDEX_READ_ONLY_SETTING.getKey()), is("true"));
}
@SuppressWarnings("unchecked")
@Test
public void testCreateTableWithGeneratedColumn() {
BoundCreateTable analysis = analyze(
"create table foo (" +
" ts timestamp with time zone," +
" day as date_trunc('day', ts))");
Map<String, Object> metaMapping = ((Map) analysis.mapping().get("_meta"));
Map<String, String> generatedColumnsMapping = (Map<String, String>) metaMapping.get("generated_columns");
assertThat(generatedColumnsMapping.size(), is(1));
assertThat(generatedColumnsMapping.get("day"), is("date_trunc('day', ts)"));
Map<String, Object> mappingProperties = analysis.mappingProperties();
Map<String, Object> dayMapping = (Map<String, Object>) mappingProperties.get("day");
assertThat(dayMapping.get("type"), is("date"));
Map<String, Object> tsMapping = (Map<String, Object>) mappingProperties.get("ts");
assertThat(tsMapping.get("type"), is("date"));
}
@Test
public void testCreateTableWithColumnOfArrayTypeAndGeneratedExpression() {
BoundCreateTable analysis = analyze(
"create table foo (arr array(integer) as ([1.0, 2.0]))");
assertThat(
mapToSortedString(analysis.mappingProperties()),
is("arr={inner={position=1, type=integer}, type=array}"));
}
@SuppressWarnings("unchecked")
@Test
public void testCreateTableGeneratedColumnWithCast() {
BoundCreateTable analysis = analyze(
"create table foo (" +
" ts timestamp with time zone," +
" day timestamp with time zone GENERATED ALWAYS as ts + 1)");
Map<String, Object> metaMapping = ((Map) analysis.mapping().get("_meta"));
Map<String, String> generatedColumnsMapping = (Map<String, String>) metaMapping.get("generated_columns");
assertThat(
generatedColumnsMapping.get("day"),
is("(ts + 1::bigint)"));
Map<String, Object> mappingProperties = analysis.mappingProperties();
Map<String, Object> dayMapping = (Map<String, Object>) mappingProperties.get("day");
assertThat(dayMapping.get("type"), is("date"));
}
@SuppressWarnings("unchecked")
@Test
public void testCreateTableWithCurrentTimestampAsGeneratedColumnIsntNormalized() {
BoundCreateTable analysis = analyze(
"create table foo (ts timestamp with time zone GENERATED ALWAYS as current_timestamp(3))");
Map<String, Object> metaMapping = ((Map) analysis.mapping().get("_meta"));
Map<String, String> generatedColumnsMapping = (Map<String, String>) metaMapping.get("generated_columns");
assertThat(generatedColumnsMapping.size(), is(1));
// current_timestamp used to get evaluated and then this contained the actual timestamp instead of the function name
assertThat(generatedColumnsMapping.get("ts"), is("current_timestamp(3)"));
}
@SuppressWarnings("unchecked")
@Test
public void testCreateTableGeneratedColumnWithSubscript() {
BoundCreateTable analysis = analyze(
"create table foo (\"user\" object as (name string), name as concat(\"user\"['name'], 'foo'))");
Map<String, Object> metaMapping = ((Map) analysis.mapping().get("_meta"));
Map<String, String> generatedColumnsMapping = (Map<String, String>) metaMapping.get("generated_columns");
assertThat(generatedColumnsMapping.get("name"), is("concat(\"user\"['name'], 'foo')"));
}
@SuppressWarnings("unchecked")
@Test
public void testCreateTableGeneratedColumnParameter() {
BoundCreateTable analysis = analyze(
"create table foo (\"user\" object as (name string), name as concat(\"user\"['name'], ?))", $("foo"));
Map<String, Object> metaMapping = ((Map) analysis.mapping().get("_meta"));
Map<String, String> generatedColumnsMapping = (Map<String, String>) metaMapping.get("generated_columns");
assertThat(generatedColumnsMapping.get("name"), is("concat(\"user\"['name'], 'foo')"));
}
@Test
public void testCreateTableGeneratedColumnWithInvalidType() {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("expression value type" +
" 'timestamp with time zone' not supported for conversion to 'ip'");
analyze(
"create table foo (" +
" ts timestamp with time zone," +
" day ip GENERATED ALWAYS as date_trunc('day', ts))");
}
@Test
public void testCreateTableGeneratedColumnWithMatch() {
expectedException.expect(UnsupportedFeatureException.class);
expectedException.expectMessage("Cannot use MATCH in CREATE TABLE statements");
analyze("create table foo (name string, bar as match(name, 'crate'))");
}
@Test
public void testCreateTableGeneratedColumnBasedOnGeneratedColumn() {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("A generated column cannot be based on a generated column");
analyze(
"create table foo (" +
" ts timestamp with time zone," +
" day as date_trunc('day', ts)," +
" date_string as cast(day as string))");
}
@Test
public void testCreateTableGeneratedColumnBasedOnUnknownColumn() {
expectedException.expect(ColumnUnknownException.class);
expectedException.expectMessage("Column unknown_col unknown");
analyze(
"create table foo (" +
" ts timestamp with time zone," +
" day as date_trunc('day', ts)," +
" date_string as cast(unknown_col as string))");
}
@Test
public void testCreateTableWithDefaultExpressionLiteral() {
BoundCreateTable analysis = analyze(
"create table foo (name text default 'bar')");
Map<String, Object> mappingProperties = analysis.mappingProperties();
assertThat(mapToSortedString(mappingProperties),
is("name={default_expr='bar', position=1, type=keyword}"));
}
@Test
public void testCreateTableWithDefaultExpressionFunction() {
BoundCreateTable analysis = analyze(
"create table foo (name text default upper('bar'))");
Map<String, Object> mappingProperties = analysis.mappingProperties();
assertThat(mapToSortedString(mappingProperties),
is("name={default_expr='BAR', position=1, type=keyword}"));
}
@Test
public void testCreateTableWithDefaultExpressionWithCast() {
BoundCreateTable analysis = analyze(
"create table foo (id int default 3.5)");
Map<String, Object> mappingProperties = analysis.mappingProperties();
assertThat(mapToSortedString(mappingProperties),
is("id={default_expr=_cast(3.5, 'integer'), position=1, type=integer}"));
}
@Test
public void testCreateTableWithDefaultExpressionIsNotNormalized() {
BoundCreateTable analysis = analyze(
"create table foo (ts timestamp with time zone default current_timestamp(3))");
Map<String, Object> mappingProperties = analysis.mappingProperties();
assertThat(mapToSortedString(mappingProperties),
is("ts={default_expr=current_timestamp(3), " +
"format=epoch_millis||strict_date_optional_time, " +
"position=1, type=date}"));
}
@Test
public void testCreateTableWithDefaultExpressionAsCompoundTypes() {
BoundCreateTable analysis = analyze(
"create table foo (" +
" obj object as (key text) default {key=''}," +
" arr array(long) default [1, 2])");
assertThat(mapToSortedString(analysis.mappingProperties()), is(
"arr={inner={position=2, type=long}, type=array}, " +
"obj={default_expr={\"key\"=''}, dynamic=true, position=1, properties={key={type=keyword}}, type=object}"));
}
@Test
public void testCreateTableWithDefaultExpressionAsGeoTypes() {
BoundCreateTable analysis = analyze(
"create table foo (" +
" p geo_point default [0,0]," +
" s geo_shape default 'LINESTRING (0 0, 1 1)')");
assertThat(mapToSortedString(analysis.mappingProperties()), is(
"p={default_expr=_cast([0, 0], 'geo_point'), position=1, type=geo_point}, " +
"s={default_expr=_cast('LINESTRING (0 0, 1 1)', 'geo_shape'), position=2, type=geo_shape}"));
}
@Test
public void testCreateTableWithDefaultExpressionRefToColumnsNotAllowed() {
expectedException.expect(UnsupportedOperationException.class);
expectedException.expectMessage("Columns cannot be used in this context. " +
"Maybe you wanted to use a string literal which requires single quotes: 'name'");
analyze("create table foo (name text, name_def text default upper(name))");
}
@Test
public void testCreateTableWithObjectAsPrimaryKey() {
expectedException.expectMessage("Cannot use columns of type \"object\" as primary key");
expectedException.expect(UnsupportedOperationException.class);
analyze("create table t (obj object as (x int) primary key)");
}
@Test
public void testCreateTableWithGeoPointAsPrimaryKey() {
expectedException.expectMessage("Cannot use columns of type \"geo_point\" as primary key");
expectedException.expect(UnsupportedOperationException.class);
analyze("create table t (c geo_point primary key)");
}
@Test
public void testCreateTableWithGeoShapeAsPrimaryKey() {
expectedException.expectMessage("Cannot use columns of type \"geo_shape\" as primary key");
expectedException.expect(UnsupportedOperationException.class);
analyze("create table t (c geo_shape primary key)");
}
@Test
public void testCreateTableWithDuplicatePrimaryKey() {
assertDuplicatePrimaryKey("create table t (id int, primary key (id, id))");
assertDuplicatePrimaryKey("create table t (obj object as (id int), primary key (obj['id'], obj['id']))");
assertDuplicatePrimaryKey("create table t (id int primary key, primary key (id))");
assertDuplicatePrimaryKey("create table t (obj object as (id int primary key), primary key (obj['id']))");
}
private void assertDuplicatePrimaryKey(String stmt) {
try {
analyze(stmt);
fail(String.format(Locale.ENGLISH, "Statement '%s' did not result in duplicate primary key exception", stmt));
} catch (IllegalArgumentException e) {
String msg = "appears twice in primary key constraint";
if (!e.getMessage().contains(msg)) {
fail("Exception message is expected to contain: " + msg);
}
}
}
@Test
public void testAlterTableAddColumnWithCheckConstraint() throws Exception {
SQLExecutor.builder(clusterService)
.addTable("create table t (" +
" id int primary key, " +
" qty int constraint check_qty_gt_zero check(qty > 0), " +
" constraint check_id_ge_zero check (id >= 0)" +
")")
.build();
String alterStmt = "alter table t add column bazinga int constraint bazinga_check check(bazinga != 42)";
BoundAddColumn analysis = analyze(alterStmt);
Map<String, Object> mapping = analysis.mapping();
Map<String, String> checkConstraints = analysis.analyzedTableElements().getCheckConstraints();
assertEquals(checkConstraints.get("check_id_ge_zero"),
Maps.getByPath(mapping, Arrays.asList("_meta", "check_constraints", "check_id_ge_zero")));
assertEquals(checkConstraints.get("check_qty_gt_zero"),
Maps.getByPath(mapping, Arrays.asList("_meta", "check_constraints", "check_qty_gt_zero")));
assertEquals(checkConstraints.get("bazinga_check"),
Maps.getByPath(mapping, Arrays.asList("_meta", "check_constraints", "bazinga_check")));
}
@Test
public void testCreateTableWithPrimaryKeyConstraintInArrayItem() {
expectedException.expect(UnsupportedOperationException.class);
expectedException.expectMessage("Cannot use column \"id\" as primary key within an array object");
analyze("create table test (arr array(object as (id long primary key)))");
}
@Test
public void testCreateTableWithDeepNestedPrimaryKeyConstraintInArrayItem() {
expectedException.expect(UnsupportedOperationException.class);
expectedException.expectMessage("Cannot use column \"name\" as primary key within an array object");
analyze("create table test (arr array(object as (\"user\" object as (name string primary key), id long)))");
}
@Test
public void testCreateTableWithInvalidIndexConstraint() {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("INDEX constraint cannot be used on columns of type \"object\"");
analyze("create table test (obj object index off)");
}
@Test
public void testCreateTableWithColumnStoreDisabled() {
BoundCreateTable analysis = analyze(
"create table columnstore_disabled (s string STORAGE WITH (columnstore = false))");
Map<String, Object> mappingProperties = analysis.mappingProperties();
assertThat(mapToSortedString(mappingProperties), is("s={doc_values=false, position=1, type=keyword}"));
}
@Test
public void testCreateTableWithColumnStoreDisabledOnInvalidDataType() {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("Invalid storage option \"columnstore\" for data type \"integer\"");
analyze("create table columnstore_disabled (s int STORAGE WITH (columnstore = false))");
}
@Test
public void testCreateTableFailsIfNameConflictsWithView() {
SQLExecutor executor = SQLExecutor.builder(clusterService)
.addView(RelationName.fromIndexName("v1"), "Select * from t1")
.build();
expectedException.expect(RelationAlreadyExists.class);
expectedException.expectMessage("Relation 'doc.v1' already exists");
analyze(executor, "create table v1 (x int) clustered into 1 shards with (number_of_replicas = 0)");
}
@Test
public void testGeneratedColumnInsideObjectIsProcessed() {
BoundCreateTable stmt = analyze("create table t (obj object as (c as 1 + 1))");
AnalyzedColumnDefinition<Object> obj = stmt.analyzedTableElements().columns().get(0);
AnalyzedColumnDefinition<?> c = obj.children().get(0);
assertThat(c.dataType(), is(DataTypes.INTEGER));
assertThat(c.formattedGeneratedExpression(), is("2"));
assertThat(AnalyzedTableElements.toMapping(stmt.analyzedTableElements()).toString(),
is("{_meta={generated_columns={obj.c=2}}, " +
"properties={obj={dynamic=true, position=1, type=object, properties={c={type=integer}}}}}"));
}
@Test
public void testNumberOfRoutingShardsCanBeSetAtCreateTable() {
BoundCreateTable stmt = analyze("create table t (x int) with (number_of_routing_shards = 10)");
assertThat(stmt.tableParameter().settings().get("index.number_of_routing_shards"), is("10"));
}
@Test
public void testNumberOfRoutingShardsCanBeSetAtCreateTableForPartitionedTables() {
BoundCreateTable stmt = analyze("create table t (p int, x int) partitioned by (p) " +
"with (number_of_routing_shards = 10)");
assertThat(stmt.tableParameter().settings().get("index.number_of_routing_shards"), is("10"));
}
@Test
public void testAlterTableSetDynamicSetting() {
BoundAlterTable analysis =
analyze("alter table users set (\"routing.allocation.exclude.foo\"='bar')");
assertThat(analysis.tableParameter().settings().get(INDEX_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + "foo"), is("bar"));
}
@Test
public void testAlterTableResetDynamicSetting() {
BoundAlterTable analysis =
analyze("alter table users reset (\"routing.allocation.exclude.foo\")");
assertThat(analysis.tableParameter().settings().get(INDEX_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + "foo"), nullValue());
}
@Test
public void testCreateTableWithIntervalFails() {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("Cannot use the type `interval` for column: i");
analyze("create table test (i interval)");
}
@Test
public void test_character_varying_type_can_be_used_in_create_table() throws Exception {
BoundCreateTable stmt = analyze("create table tbl (name character varying)");
assertThat(
mapToSortedString(stmt.mappingProperties()),
is("name={position=1, type=keyword}"));
}
@Test
public void test_create_table_with_varchar_column_of_limited_length() {
BoundCreateTable stmt = analyze("CREATE TABLE tbl (name character varying(2))");
assertThat(
mapToSortedString(stmt.mappingProperties()),
is("name={length_limit=2, position=1, type=keyword}"));
}
@Test
public void test_create_table_with_varchar_column_of_limited_length_with_analyzer_throws_exception() {
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage(
"Can't use an Analyzer on column name because analyzers are only allowed on columns " +
"of type \"" + DataTypes.STRING.getName() + "\" of the unbound length limit.");
analyze("CREATE TABLE tbl (name varchar(2) INDEX using fulltext WITH (analyzer='german'))");
}
@Test
public void test_oidvector_cannot_be_used_in_create_table() throws Exception {
expectedException.expectMessage("Cannot use the type `oidvector` for column: x");
analyze("CREATE TABLE tbl (x oidvector)");
}
@Test
public void test_generated_column_arguments_are_detected_as_array_and_validation_fails_with_missing_overload() throws Exception {
Exception exception = Assertions.assertThrows(
Exception.class,
() -> analyze("CREATE TABLE tbl (xs int[], x as max(xs))")
);
assertThat(
exception.getMessage(),
Matchers.startsWith("Unknown function: max(doc.tbl.xs), no overload found for matching argument types: (integer_array)")
);
}
@Test
public void test_prohibit_using_aggregations_in_generated_columns() throws Exception {
Exception exception = Assertions.assertThrows(
Exception.class,
() -> analyze("CREATE TABLE tbl (x int, y as max(x))")
);
assertThat(
exception.getMessage(),
Matchers.startsWith("Aggregation functions are not allowed in generated columns: max(x)")
);
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.rt.execution.junit;
import junit.framework.ComparisonFailure;
import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStream;
import java.lang.reflect.Field;
import java.util.HashMap;
import java.util.Map;
public class ComparisonFailureData {
private static final String ASSERTION_CLASS_NAME = "java.lang.AssertionError";
private static final String ASSERTION_FAILED_CLASS_NAME = "junit.framework.AssertionFailedError";
public static final String OPENTEST4J_ASSERTION = "org.opentest4j.AssertionFailedError";
private final String myExpected;
private final String myActual;
private final String myFilePath;
private final String myActualFilePath;
private static final Map EXPECTED = new HashMap();
private static final Map ACTUAL = new HashMap();
static {
try {
init(ComparisonFailure.class);
init(org.junit.ComparisonFailure.class);
}
catch (Throwable e) {
}
}
private static void init(Class exceptionClass) throws NoSuchFieldException {
final Field expectedField = exceptionClass.getDeclaredField("fExpected");
expectedField.setAccessible(true);
EXPECTED.put(exceptionClass, expectedField);
final Field actualField = exceptionClass.getDeclaredField("fActual");
actualField.setAccessible(true);
ACTUAL.put(exceptionClass, actualField);
}
public ComparisonFailureData(String expected, String actual) {
this(expected, actual, null);
}
public ComparisonFailureData(String expected, String actual, String filePath) {
this(expected, actual, filePath, null);
}
public ComparisonFailureData(String expected, String actual, String filePath, String actualFilePath) {
myExpected = expected;
myActual = actual;
myFilePath = filePath != null ? new File(filePath).getAbsolutePath() : null;
myActualFilePath = actualFilePath != null ? new File(actualFilePath).getAbsolutePath() : null;
}
public static void registerSMAttributes(ComparisonFailureData notification,
String trace,
String failureMessage,
Map attrs,
Throwable throwable) {
registerSMAttributes(notification, trace, failureMessage, attrs, throwable, "Comparison Failure: ", "expected:<");
}
public static void registerSMAttributes(ComparisonFailureData notification, String trace,
String failureMessage,
Map attrs,
Throwable throwable,
String comparisonFailurePrefix,
final String expectedPrefix) {
final int failureIdx = failureMessage != null ? trace.indexOf(failureMessage) : -1;
final int failureMessageLength = failureMessage != null ? failureMessage.length() : 0;
String details = failureIdx > -1 ? trace.substring(failureIdx + failureMessageLength) : trace;
if (notification != null) {
final int expectedIdx = trace.indexOf(expectedPrefix);
final String comparisonFailureMessage;
if (expectedIdx > 0) {
comparisonFailureMessage = trace.substring(0, expectedIdx);
}
else if (failureIdx > -1) {
comparisonFailureMessage = trace.substring(0, failureIdx + failureMessageLength);
}
else {
comparisonFailureMessage = (failureMessageLength > 0 ? failureMessage + "\n" : "") + comparisonFailurePrefix;
}
if (!attrs.containsKey("message")) {
attrs.put("message", comparisonFailureMessage);
}
final String filePath = notification.getFilePath();
final String actualFilePath = notification.getActualFilePath();
final String expected = notification.getExpected();
final String actual = notification.getActual();
int fullLength = (filePath == null && expected != null ? expected.length() : 0) +
(actualFilePath == null && actual != null ? actual.length() : 0) +
details.length() +
comparisonFailureMessage.length() + 100;
if (filePath != null) {
attrs.put("expectedFile", filePath);
}
else {
writeDiffSide(attrs, "expected", expected, fullLength);
}
if (actualFilePath != null) {
attrs.put("actualFile", actualFilePath);
}
else {
writeDiffSide(attrs, "actual", actual, fullLength);
}
}
else {
Throwable throwableCause = null;
try {
throwableCause = throwable.getCause();
}
catch (Throwable ignored) {}
if (!isAssertionError(throwable.getClass()) && !isAssertionError(throwableCause != null ? throwableCause.getClass() : null)) {
attrs.put("error", "true");
}
attrs.put("message", failureIdx > -1 ? trace.substring(0, failureIdx + failureMessageLength)
: failureMessage != null ? failureMessage : "");
}
attrs.put("details", details);
}
private static void writeDiffSide(Map attrs, final String expectedOrActualPrefix, final String text, int fullLength) {
String property = System.getProperty("idea.test.cyclic.buffer.size");
int threshold;
try {
threshold = Integer.parseInt(property);
}
catch (NumberFormatException ignored) {
threshold = -1;
}
if (threshold > 0 && fullLength > threshold) {
try {
//noinspection SSBasedInspection
File tempFile = File.createTempFile(expectedOrActualPrefix, "");
OutputStream stream = new FileOutputStream(tempFile);
try {
stream.write(text.getBytes("UTF-8"), 0, text.length());
}
finally {
stream.close();
}
attrs.put(expectedOrActualPrefix + "File", tempFile.getAbsolutePath());
attrs.put(expectedOrActualPrefix + "IsTempFile", "true");
return;
}
catch (Throwable ignored) {}
}
attrs.put(expectedOrActualPrefix, text);
}
public static boolean isAssertionError(Class throwableClass) {
if (throwableClass == null) return false;
final String throwableClassName = throwableClass.getName();
if (throwableClassName.equals(ASSERTION_CLASS_NAME) ||
throwableClassName.equals(ASSERTION_FAILED_CLASS_NAME) ||
throwableClassName.equals(OPENTEST4J_ASSERTION)) {
return true;
}
return isAssertionError(throwableClass.getSuperclass());
}
public String getFilePath() {
return myFilePath;
}
public String getActualFilePath() {
return myActualFilePath;
}
public String getExpected() {
return myExpected;
}
public String getActual() {
return myActual;
}
public static ComparisonFailureData create(Throwable assertion) {
if (assertion instanceof FileComparisonFailure) {
final FileComparisonFailure comparisonFailure = (FileComparisonFailure)assertion;
return new ComparisonFailureData(comparisonFailure.getExpected(), comparisonFailure.getActual(),
comparisonFailure.getFilePath(), comparisonFailure.getActualFilePath());
}
ComparisonFailureData commonAssertion = createCommonAssertion(assertion);
if (commonAssertion != null) return commonAssertion;
try {
return new ComparisonFailureData(getExpected(assertion), getActual(assertion));
}
catch (Throwable e) {
return null;
}
}
/** @noinspection SSBasedInspection*/
private static ComparisonFailureData createCommonAssertion(Throwable assertion) {
try {
Class assertionClass = assertion.getClass();
if (assertionClass.getName().equals(OPENTEST4J_ASSERTION)) {
Class[] parameterTypes = new Class[0];
Object[] args = new Object[0];
if (((Boolean)assertionClass.getDeclaredMethod("isExpectedDefined", parameterTypes).invoke(assertion, args)).booleanValue() &&
((Boolean)assertionClass.getDeclaredMethod("isActualDefined", parameterTypes).invoke(assertion, args)).booleanValue()) {
Object expected = assertionClass.getDeclaredMethod("getExpected", parameterTypes).invoke(assertion, args);
Object expectedString = expected.getClass().getDeclaredMethod("getStringRepresentation", parameterTypes).invoke(expected, args);
Object actual = assertionClass.getDeclaredMethod("getActual", parameterTypes).invoke(assertion, args);
Object actualString = actual.getClass().getDeclaredMethod("getStringRepresentation", parameterTypes).invoke(actual, args);
return new ComparisonFailureData((String)expectedString, (String)actualString);
}
}
}
catch (Throwable e) {
return null;
}
return null;
}
public static String getActual(Throwable assertion) throws IllegalAccessException, NoSuchFieldException {
return get(assertion, ACTUAL, "fActual");
}
public static String getExpected(Throwable assertion) throws IllegalAccessException, NoSuchFieldException {
return get(assertion, EXPECTED, "fExpected");
}
private static String get(final Throwable assertion, final Map staticMap, final String fieldName) throws IllegalAccessException, NoSuchFieldException {
String actual;
if (assertion instanceof ComparisonFailure) {
actual = (String)((Field)staticMap.get(ComparisonFailure.class)).get(assertion);
}
else if (assertion instanceof org.junit.ComparisonFailure) {
actual = (String)((Field)staticMap.get(org.junit.ComparisonFailure.class)).get(assertion);
}
else {
Field field = assertion.getClass().getDeclaredField(fieldName);
field.setAccessible(true);
actual = (String)field.get(assertion);
}
return actual;
}
}
| |
/**
* Copyright (c) 2004-2005, Regents of the University of California
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* Neither the name of the University of California, Los Angeles nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package avrora.sim.mcu;
import avrora.arch.avr.AVRProperties;
import avrora.arch.legacy.LegacyInterpreter;
import avrora.core.Program;
import avrora.sim.*;
import avrora.sim.clock.ClockDomain;
import cck.util.Arithmetic;
import java.util.HashMap;
/**
* The <code>ATMega32</code> class represents the ATMega32 microcontroller from Atmel. This
* microcontroller has 32Kb code, 2KB SRAM, 1KB EEPROM, and a host of internal devices such as
* ADC, SPI, and timers.
*
* @author Ben L. Titzer
*/
public class ATMega32 extends ATMegaFamily {
public static final int _1kb = 1024;
public static final int ATMEGA32_IOREG_SIZE = 64;
public static final int ATMEGA32_SRAM_SIZE = 2 * _1kb;
public static final int ATMEGA32_FLASH_SIZE = 32 * _1kb;
public static final int ATMEGA32_EEPROM_SIZE = _1kb;
public static final int ATMEGA32_NUM_PINS = 45;
public static final int ATMEGA32_NUM_INTS = 22;
public static final int MODE_IDLE = 1;
public static final int MODE_RESERVED1 = 2;
public static final int MODE_ADCNRED = 3;
public static final int MODE_RESERVED2 = 4;
public static final int MODE_POWERDOWN = 5;
public static final int MODE_STANDBY = 6;
public static final int MODE_POWERSAVE = 7;
public static final int MODE_EXTSTANDBY = 8;
protected static final String[] idleModeNames = {
"Active",
"Idle",
"RESERVED 1",
"ADC Noise Reduction",
"RESERVED 2",
"Power Down",
"Standby",
"Power Save",
"Extended Standby"
};
protected static final int[] wakeupTimes = {
0, 0, 0, 0, 0, 1000, 6, 1000, 6
};
protected final ActiveRegister MCUCR_reg;
private static final int[][] transitionTimeMatrix = FiniteStateMachine.buildBimodalTTM(idleModeNames.length, 0, wakeupTimes, new int[wakeupTimes.length]);
// CS values 6 and 7 select external clock source and are not supported. Results in an ArrayOutOfBound exception
public static final int[] ATmega32Periods0 = {0, 1, 8, 64, 256, 1024};
public static final int[] ATmega32Periods2 = {0, 1, 8, 32, 64, 128, 256, 1024};
/**
* The <code>props</code> field stores a static reference to a properties
* object shared by all of the instances of this microcontroller. This object
* stores the IO register size, SRAM size, pin assignments, etc.
*/
public static final AVRProperties props;
static {
// statically initialize the pin assignments for this microcontroller
HashMap<String, Integer> pinAssignments = new HashMap<String, Integer>(150);
RegisterLayout rl = new RegisterLayout(ATMEGA32_IOREG_SIZE, 8);
HashMap<String, Integer> interruptAssignments = new HashMap<String, Integer>(30);
addPin(pinAssignments, 1, "MOSI", "PB5");
addPin(pinAssignments, 2, "MISO", "PB6");
addPin(pinAssignments, 3, "SCK", "PB7");
addPin(pinAssignments, 4, "RESET");
addPin(pinAssignments, 5, "VCC.1");
addPin(pinAssignments, 6, "GND.1");
addPin(pinAssignments, 7, "XTAL2");
addPin(pinAssignments, 8, "XTAL1");
addPin(pinAssignments, 9, "RXD", "PD0");
addPin(pinAssignments, 10, "TXD", "PD1");
addPin(pinAssignments, 11, "INT0", "PD2");
addPin(pinAssignments, 12, "INT1", "PD3");
addPin(pinAssignments, 13, "OC1B", "PD4");
addPin(pinAssignments, 14, "OC1A", "PD5");
addPin(pinAssignments, 15, "ICP1", "PD6");
addPin(pinAssignments, 16, "OC2", "PD7");
addPin(pinAssignments, 17, "VCC.2");
addPin(pinAssignments, 18, "GND.2");
addPin(pinAssignments, 19, "SCL", "PC0");
addPin(pinAssignments, 20, "SDA", "PC1");
addPin(pinAssignments, 21, "TCK", "PC2");
addPin(pinAssignments, 22, "TMS", "PC3");
addPin(pinAssignments, 23, "TDO", "PC4");
addPin(pinAssignments, 24, "TDI", "PC5");
addPin(pinAssignments, 25, "TOSC1", "PC6");
addPin(pinAssignments, 26, "TOSC2", "PC7");
addPin(pinAssignments, 27, "AVCC");
addPin(pinAssignments, 28, "GND.3");
addPin(pinAssignments, 29, "AREF");
addPin(pinAssignments, 30, "ADC7", "PA7");
addPin(pinAssignments, 31, "ADC6", "PA6");
addPin(pinAssignments, 32, "ADC5", "PA5");
addPin(pinAssignments, 33, "ADC4", "PA4");
addPin(pinAssignments, 34, "ADC3", "PA3");
addPin(pinAssignments, 35, "ADC2", "PA2");
addPin(pinAssignments, 36, "ADC1", "PA1");
addPin(pinAssignments, 37, "ADC0", "PA0");
addPin(pinAssignments, 38, "VCC.3");
addPin(pinAssignments, 39, "GND.4");
addPin(pinAssignments, 40, "XCK", "T0", "PB0");
addPin(pinAssignments, 41, "T1", "PB1");
addPin(pinAssignments, 42, "AIN0", "INT2", "PB2");
addPin(pinAssignments, 43, "AIN1", "OC0", "PB3");
addPin(pinAssignments, 44, "SS", "PB4");
// lower 64 IO registers
rl.addIOReg("SREG", 0x3F);
rl.addIOReg("SPH", 0x3E);
rl.addIOReg("SPL", 0x3D);
rl.addIOReg("OCR0", 0x3C);
rl.addIOReg("GICR", 0x3B);
rl.addIOReg("GIFR", 0x3A);
rl.addIOReg("TIMSK", 0x39);
rl.addIOReg("TIFR", 0x38);
rl.addIOReg("SPMCR", 0x37);
// TODO: this this register is called different names on different models
rl.addIOReg("SPMCSR", 0x37);
rl.addIOReg("TWCR", 0x36);
rl.addIOReg("MCUCR", 0x35);
rl.addIOReg("MCUCSR", 0x34);
rl.addIOReg("TCCR0", 0x33);
rl.addIOReg("TCNT0", 0x32);
rl.addIOReg("OSCCAL", 0x31);
rl.addIOReg("SFIOR", 0x30);
rl.addIOReg("TCCR1A", 0x2F, "COM1A[1:0],COM1B[1:0],FOC1A,FOC1B,WGM1[1:0]");
rl.addIOReg("TCCR1B", 0x2E, ".,ICES1,.,WGM1[3:2],CS1[2:0]");
rl.addIOReg("TCNT1H", 0x2D);
rl.addIOReg("TCNT1L", 0x2C);
rl.addIOReg("OCR1AH", 0x2B);
rl.addIOReg("OCR1AL", 0x2A);
rl.addIOReg("OCR1BH", 0x29);
rl.addIOReg("OCR1BL", 0x28);
rl.addIOReg("ICR1H", 0x27);
rl.addIOReg("ICR1L", 0x26);
rl.addIOReg("TCCR2", 0x25);
rl.addIOReg("TCNT2", 0x24);
rl.addIOReg("OCR2", 0x23);
rl.addIOReg("ASSR", 0x22);
rl.addIOReg("WDTCR", 0x21);
rl.addIOReg("UBRRH", 0x20);
// TODO: the UCSRC register is shared!
rl.addIOReg("UCSRC", 0x20);
rl.addIOReg("EEARH", 0x1F);
rl.addIOReg("EEARL", 0x1E);
rl.addIOReg("EEDR", 0x1D);
rl.addIOReg("EECR", 0x1C);
rl.addIOReg("PORTA", 0x1B);
rl.addIOReg("DDRA", 0x1A);
rl.addIOReg("PINA", 0x19);
rl.addIOReg("PORTB", 0x18);
rl.addIOReg("DDRB", 0x17);
rl.addIOReg("PINB", 0x16);
rl.addIOReg("PORTC", 0x15);
rl.addIOReg("DDRC", 0x14);
rl.addIOReg("PINC", 0x13);
rl.addIOReg("PORTD", 0x12);
rl.addIOReg("DDRD", 0x11);
rl.addIOReg("PIND", 0x10);
rl.addIOReg("SPDR", 0x0F);
rl.addIOReg("SPSR", 0x0E);
rl.addIOReg("SPCR", 0x0D);
rl.addIOReg("UDR", 0x0C);
rl.addIOReg("UCSRA", 0x0B);
rl.addIOReg("UCSRB", 0x0A);
rl.addIOReg("UBRRL", 0x09);
rl.addIOReg("ACSR", 0x08);
rl.addIOReg("ADMUX", 0x07);
rl.addIOReg("ADCSRA", 0x06);
rl.addIOReg("ADCH", 0x05);
rl.addIOReg("ADCL", 0x04);
rl.addIOReg("TWDR", 0x03);
rl.addIOReg("TWAR", 0x02);
rl.addIOReg("TWSR", 0x01);
rl.addIOReg("TWBR", 0x00);
addInterrupt(interruptAssignments, "RESET", 1);
addInterrupt(interruptAssignments, "INT0", 2);
addInterrupt(interruptAssignments, "INT1", 3);
addInterrupt(interruptAssignments, "INT2", 4);
addInterrupt(interruptAssignments, "TIMER2 COMP", 5);
addInterrupt(interruptAssignments, "TIMER2 OVF", 6);
addInterrupt(interruptAssignments, "TIMER1 CAPT", 7);
addInterrupt(interruptAssignments, "TIMER1 COMPA", 8);
addInterrupt(interruptAssignments, "TIMER1 COMPB", 9);
addInterrupt(interruptAssignments, "TIMER1 OVF", 10);
addInterrupt(interruptAssignments, "TIMER0 COMP", 11);
addInterrupt(interruptAssignments, "TIMER0 OVF", 12);
addInterrupt(interruptAssignments, "SPI, STC", 13);
addInterrupt(interruptAssignments, "USART, RX", 14);
addInterrupt(interruptAssignments, "USART, UDRE", 15);
addInterrupt(interruptAssignments, "USART, TX", 16);
addInterrupt(interruptAssignments, "ADC", 17);
addInterrupt(interruptAssignments, "EE READY", 18);
addInterrupt(interruptAssignments, "ANALOG COMP", 19);
addInterrupt(interruptAssignments, "TWI", 20);
addInterrupt(interruptAssignments, "SPM READY", 21);
props = new AVRProperties(ATMEGA32_IOREG_SIZE, // number of io registers
ATMEGA32_SRAM_SIZE, // size of sram in bytes
ATMEGA32_FLASH_SIZE, // size of flash in bytes
ATMEGA32_EEPROM_SIZE, // size of eeprom in bytes
ATMEGA32_NUM_PINS, // number of pins
ATMEGA32_NUM_INTS, // number of interrupts
new ReprogrammableCodeSegment.Factory(ATMEGA32_FLASH_SIZE, 6),
pinAssignments, // the assignment of names to physical pins
rl, // the assignment of names to IO registers
interruptAssignments);
}
public static class Factory implements MicrocontrollerFactory {
/**
* The <code>newMicrocontroller()</code> method is used to instantiate a microcontroller instance for the
* particular program. It will construct an instance of the <code>Simulator</code> class that has all the
* properties of this hardware device and has been initialized with the specified program.
*
* @param sim
*@param p the program to load onto the microcontroller @return a <code>Microcontroller</code> instance that represents the specific hardware device with the
* program loaded onto it
*/
public Microcontroller newMicrocontroller(int id, Simulation sim, ClockDomain cd, Program p) {
return new ATMega32(id, sim, cd, p);
}
}
public ATMega32(int id, Simulation sim, ClockDomain cd, Program p) {
super(cd, props, new FiniteStateMachine(cd.getMainClock(), MODE_ACTIVE, idleModeNames, transitionTimeMatrix));
simulator = sim.createSimulator(id, LegacyInterpreter.FACTORY, this, p);
interpreter = (AtmelInterpreter)simulator.getInterpreter();
MCUCR_reg = getIOReg("MCUCR");
installPins();
installDevices();
}
protected void installPins() {
for (int cntr = 0; cntr < properties.num_pins; cntr++)
pins[cntr] = new Pin(cntr);
}
protected void installDevices() {
// set up the external interrupt mask and flag registers and interrupt range
int[] mapping = new int[] { -1, -1, -1, -1, -1, 4, 2, 3 };
FlagRegister fr = new FlagRegister(interpreter, mapping);
MaskRegister mr = new MaskRegister(interpreter, mapping);
installIOReg("GICR", mr);
installIOReg("GIFR", fr);
EIFR_reg = fr;
// set up the timer mask and flag registers and interrupt range
TIFR_reg = buildInterruptRange(false, "TIMSK", "TIFR", 12, 8);
TIMSK_reg = (MaskRegister)getIOReg("TIMSK");
addDevice(new Timer0());
addDevice(new Timer1(2));
addDevice(new Timer2());
buildPort('A');
buildPort('B');
buildPort('C');
buildPort('D');
addDevice(new EEPROM(properties.eeprom_size, this));
addDevice(new USART("", this));
addDevice(new SPI(this));
addDevice(new ADC(this, 8));
}
// permutation of sleep mode bits in the register (high order bits first)
private static final int[] MCUCR_sm_perm = { 2, 4, 3 };
protected int getSleepMode() {
byte value = MCUCR_reg.read();
boolean sleepEnable = Arithmetic.getBit(value, 5);
if ( sleepEnable )
return Arithmetic.getBitField(value, MCUCR_sm_perm) + 1;
else
return MODE_IDLE;
}
/**
* <code>Timer0</code> is different from ATMega128
*/
protected class Timer0 extends Timer8Bit {
protected Timer0() {
super(ATMega32.this, 0, 0, 1, 0, 1, 0, ATmega32Periods0);
}
}
/**
* <code>Timer2</code> is different from ATMega128
*/
protected class Timer2 extends Timer8Bit {
protected Timer2() {
super(ATMega32.this, 0, 2, 7, 6, 7, 6, ATmega32Periods2);
installIOReg("ASSR", new ASSRRegister());
}
// See pg. 135 of the ATmega32A doc
protected class ASSRRegister extends RWRegister {
static final int AS2 = 3;
static final int TCN2UB = 2;
static final int OCR2UB = 1;
static final int TCR2UB = 0;
public void write(byte val) {
super.write((byte) (0xf & val));
decode(val);
}
protected void decode(byte val) {
// TODO: if there is a change, remove ticker and requeue?
timerClock = Arithmetic.getBit(val, AS2) ? externalClock : mainClock;
}
}
}
}
| |
/*
* Copyright (c) 2010-2018. Axon Framework
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.axonframework.test.aggregate;
import org.axonframework.commandhandling.CommandHandler;
import org.axonframework.commandhandling.CommandMessage;
import org.axonframework.commandhandling.GenericCommandMessage;
import org.axonframework.eventhandling.EventHandler;
import org.axonframework.messaging.*;
import org.axonframework.messaging.correlation.SimpleCorrelationDataProvider;
import org.axonframework.messaging.unitofwork.UnitOfWork;
import org.axonframework.modelling.command.AggregateIdentifier;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.junit.jupiter.MockitoExtension;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.BiFunction;
import static org.axonframework.modelling.command.AggregateLifecycle.apply;
import static org.axonframework.test.aggregate.FixtureTest_CommandInterceptors.InterceptorAggregate.AGGREGATE_IDENTIFIER;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
class FixtureTest_CommandInterceptors {
private static final String DISPATCH_META_DATA_KEY = "dispatchKey";
private static final String DISPATCH_META_DATA_VALUE = "dispatchValue";
private static final String HANDLER_META_DATA_KEY = "handlerKey";
private static final String HANDLER_META_DATA_VALUE = "handlerValue";
private FixtureConfiguration<InterceptorAggregate> fixture;
@Mock
private MessageDispatchInterceptor<CommandMessage<?>> firstMockCommandDispatchInterceptor;
@Mock
private MessageDispatchInterceptor<CommandMessage<?>> secondMockCommandDispatchInterceptor;
@Mock
private MessageHandlerInterceptor<CommandMessage<?>> mockCommandHandlerInterceptor;
@BeforeEach
void setUp() {
fixture = new AggregateTestFixture<>(InterceptorAggregate.class);
}
@Test
void testRegisteredCommandDispatchInterceptorsAreInvoked() {
when(firstMockCommandDispatchInterceptor.handle(any(CommandMessage.class)))
.thenAnswer(it -> it.getArguments()[0]);
fixture.registerCommandDispatchInterceptor(firstMockCommandDispatchInterceptor);
when(secondMockCommandDispatchInterceptor.handle(any(CommandMessage.class)))
.thenAnswer(it -> it.getArguments()[0]);
fixture.registerCommandDispatchInterceptor(secondMockCommandDispatchInterceptor);
TestCommand expectedCommand = new TestCommand(AGGREGATE_IDENTIFIER);
fixture.given(new StandardAggregateCreatedEvent(AGGREGATE_IDENTIFIER))
.when(expectedCommand);
ArgumentCaptor<GenericCommandMessage> firstCommandMessageCaptor =
ArgumentCaptor.forClass(GenericCommandMessage.class);
verify(firstMockCommandDispatchInterceptor).handle(firstCommandMessageCaptor.capture());
GenericCommandMessage firstResult = firstCommandMessageCaptor.getValue();
assertEquals(expectedCommand, firstResult.getPayload());
ArgumentCaptor<GenericCommandMessage> secondCommandMessageCaptor =
ArgumentCaptor.forClass(GenericCommandMessage.class);
verify(secondMockCommandDispatchInterceptor).handle(secondCommandMessageCaptor.capture());
GenericCommandMessage secondResult = secondCommandMessageCaptor.getValue();
assertEquals(expectedCommand, secondResult.getPayload());
}
@Test
void testRegisteredCommandDispatchInterceptorIsInvokedAndAltersAppliedEvent() {
fixture.given(new StandardAggregateCreatedEvent(AGGREGATE_IDENTIFIER))
.when(new TestCommand(AGGREGATE_IDENTIFIER))
.expectEvents(new TestEvent(AGGREGATE_IDENTIFIER, Collections.emptyMap()));
fixture.registerCommandDispatchInterceptor(new TestCommandDispatchInterceptor());
MetaData expectedValues =
new MetaData(Collections.singletonMap(DISPATCH_META_DATA_KEY, DISPATCH_META_DATA_VALUE));
fixture.given(new StandardAggregateCreatedEvent(AGGREGATE_IDENTIFIER))
.when(new TestCommand(AGGREGATE_IDENTIFIER))
.expectEvents(new TestEvent(AGGREGATE_IDENTIFIER, expectedValues));
}
@Test
void testRegisteredCommandDispatchInterceptorIsInvokedForFixtureMethodsGivenCommands() {
fixture.registerCommandDispatchInterceptor(new TestCommandDispatchInterceptor());
MetaData expectedValues =
new MetaData(Collections.singletonMap(DISPATCH_META_DATA_KEY, DISPATCH_META_DATA_VALUE));
fixture.givenCommands(new CreateStandardAggregateCommand(AGGREGATE_IDENTIFIER))
.when(new TestCommand(AGGREGATE_IDENTIFIER))
.expectEvents(new TestEvent(AGGREGATE_IDENTIFIER, expectedValues));
}
@SuppressWarnings("unchecked")
@Test
void testRegisteredCommandHandlerInterceptorsAreInvoked() throws Exception {
fixture.registerCommandHandlerInterceptor(new TestCommandHandlerInterceptor());
when(mockCommandHandlerInterceptor.handle(any(UnitOfWork.class), any(InterceptorChain.class)))
.thenAnswer(InvocationOnMock::getArguments);
fixture.registerCommandHandlerInterceptor(mockCommandHandlerInterceptor);
TestCommand expectedCommand = new TestCommand(AGGREGATE_IDENTIFIER);
Map<String, Object> expectedMetaDataMap = new HashMap<>();
expectedMetaDataMap.put(HANDLER_META_DATA_KEY, HANDLER_META_DATA_VALUE);
fixture.given(new StandardAggregateCreatedEvent(AGGREGATE_IDENTIFIER))
.when(expectedCommand, expectedMetaDataMap);
ArgumentCaptor<UnitOfWork> unitOfWorkCaptor = ArgumentCaptor.forClass(UnitOfWork.class);
ArgumentCaptor<InterceptorChain> interceptorChainCaptor = ArgumentCaptor.forClass(InterceptorChain.class);
verify(mockCommandHandlerInterceptor).handle(unitOfWorkCaptor.capture(), interceptorChainCaptor.capture());
UnitOfWork unitOfWorkResult = unitOfWorkCaptor.getValue();
Message messageResult = unitOfWorkResult.getMessage();
assertEquals(expectedCommand, messageResult.getPayload());
assertEquals(expectedMetaDataMap, messageResult.getMetaData());
}
@Test
void testRegisteredCommandHandlerInterceptorIsInvokedAndAltersEvent() {
fixture.given(new StandardAggregateCreatedEvent(AGGREGATE_IDENTIFIER))
.when(new TestCommand(AGGREGATE_IDENTIFIER))
.expectEvents(new TestEvent(AGGREGATE_IDENTIFIER, Collections.emptyMap()));
fixture.registerCommandHandlerInterceptor(new TestCommandHandlerInterceptor());
Map<String, Object> expectedMetaDataMap = new HashMap<>();
expectedMetaDataMap.put(HANDLER_META_DATA_KEY, HANDLER_META_DATA_VALUE);
fixture.given(new StandardAggregateCreatedEvent(AGGREGATE_IDENTIFIER))
.when(new TestCommand(AGGREGATE_IDENTIFIER), expectedMetaDataMap)
.expectEvents(new TestEvent(AGGREGATE_IDENTIFIER, expectedMetaDataMap));
}
@Test
void testRegisteredCommandHandlerInterceptorIsInvokedForFixtureMethodsGivenCommands() {
fixture.registerCommandHandlerInterceptor(new TestCommandHandlerInterceptor());
Map<String, Object> expectedMetaDataMap = new HashMap<>();
expectedMetaDataMap.put(HANDLER_META_DATA_KEY, HANDLER_META_DATA_VALUE);
fixture.givenCommands(new CreateStandardAggregateCommand(AGGREGATE_IDENTIFIER))
.when(new TestCommand(AGGREGATE_IDENTIFIER), expectedMetaDataMap)
.expectEvents(new TestEvent(AGGREGATE_IDENTIFIER, expectedMetaDataMap));
}
@Test
void testRegisteredCommandDispatchAndHandlerInterceptorAreBothInvokedAndAlterEvent() {
fixture.given(new StandardAggregateCreatedEvent(AGGREGATE_IDENTIFIER))
.when(new TestCommand(AGGREGATE_IDENTIFIER))
.expectEvents(new TestEvent(AGGREGATE_IDENTIFIER, Collections.emptyMap()));
fixture.registerCommandDispatchInterceptor(new TestCommandDispatchInterceptor());
fixture.registerCommandHandlerInterceptor(new TestCommandHandlerInterceptor());
Map<String, Object> testMetaDataMap = new HashMap<>();
testMetaDataMap.put(HANDLER_META_DATA_KEY, HANDLER_META_DATA_VALUE);
Map<String, Object> expectedMetaDataMap = new HashMap<>(testMetaDataMap);
expectedMetaDataMap.put(DISPATCH_META_DATA_KEY, DISPATCH_META_DATA_VALUE);
fixture.given(new StandardAggregateCreatedEvent(AGGREGATE_IDENTIFIER))
.when(new TestCommand(AGGREGATE_IDENTIFIER), testMetaDataMap)
.expectEvents(new TestEvent(AGGREGATE_IDENTIFIER, new MetaData(expectedMetaDataMap)));
}
public static class InterceptorAggregate {
public static final String AGGREGATE_IDENTIFIER = "id1";
@SuppressWarnings("UnusedDeclaration")
private transient int counter;
private Integer lastNumber;
@AggregateIdentifier
private String identifier;
private MyEntity entity;
public InterceptorAggregate() {
}
public InterceptorAggregate(Object aggregateIdentifier) {
identifier = aggregateIdentifier.toString();
}
@CommandHandler
public InterceptorAggregate(CreateStandardAggregateCommand cmd) {
apply(new StandardAggregateCreatedEvent(cmd.getAggregateIdentifier()));
}
@SuppressWarnings("UnusedParameters")
@CommandHandler
public void handle(TestCommand command, MetaData metaData) {
apply(new TestEvent(command.getAggregateIdentifier(), metaData));
}
@EventHandler
public void handle(StandardAggregateCreatedEvent event) {
this.identifier = event.getAggregateIdentifier().toString();
}
}
private static class CreateStandardAggregateCommand {
private final Object aggregateIdentifier;
public CreateStandardAggregateCommand(Object aggregateIdentifier) {
this.aggregateIdentifier = aggregateIdentifier;
}
public Object getAggregateIdentifier() {
return aggregateIdentifier;
}
}
private static class StandardAggregateCreatedEvent {
private final Object aggregateIdentifier;
public StandardAggregateCreatedEvent(Object aggregateIdentifier) {
this.aggregateIdentifier = aggregateIdentifier;
}
public Object getAggregateIdentifier() {
return aggregateIdentifier;
}
}
class TestCommandDispatchInterceptor implements MessageDispatchInterceptor<CommandMessage<?>> {
@Override
public BiFunction<Integer, CommandMessage<?>, CommandMessage<?>> handle(List<? extends CommandMessage<?>> messages) {
return (index, message) -> {
Map<String, Object> testMetaDataMap = new HashMap<>();
testMetaDataMap.put(DISPATCH_META_DATA_KEY, DISPATCH_META_DATA_VALUE);
message = message.andMetaData(testMetaDataMap);
return message;
};
}
}
class TestCommandHandlerInterceptor implements MessageHandlerInterceptor<CommandMessage<?>> {
@Override
public Object handle(UnitOfWork<? extends CommandMessage<?>> unitOfWork, InterceptorChain interceptorChain) throws Exception {
unitOfWork.registerCorrelationDataProvider(new SimpleCorrelationDataProvider(HANDLER_META_DATA_KEY));
return interceptorChain.proceed();
}
}
}
| |
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.indices.status;
import com.google.common.collect.Maps;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.index.flush.FlushStats;
import org.elasticsearch.index.merge.MergeStats;
import org.elasticsearch.index.refresh.RefreshStats;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static com.google.common.collect.Lists.newArrayList;
/**
*
*/
public class IndexStatus implements Iterable<IndexShardStatus> {
private final String index;
private final Map<Integer, IndexShardStatus> indexShards;
IndexStatus(String index, ShardStatus[] shards) {
this.index = index;
Map<Integer, List<ShardStatus>> tmpIndexShards = Maps.newHashMap();
for (ShardStatus shard : shards) {
List<ShardStatus> lst = tmpIndexShards.get(shard.shardRouting().id());
if (lst == null) {
lst = newArrayList();
tmpIndexShards.put(shard.shardRouting().id(), lst);
}
lst.add(shard);
}
indexShards = Maps.newHashMap();
for (Map.Entry<Integer, List<ShardStatus>> entry : tmpIndexShards.entrySet()) {
indexShards.put(entry.getKey(), new IndexShardStatus(entry.getValue().get(0).shardRouting().shardId(), entry.getValue().toArray(new ShardStatus[entry.getValue().size()])));
}
}
public String index() {
return this.index;
}
public String getIndex() {
return index();
}
/**
* A shard id to index shard status map (note, index shard status is the replication shard group that maps
* to the shard id).
*/
public Map<Integer, IndexShardStatus> shards() {
return this.indexShards;
}
public Map<Integer, IndexShardStatus> getShards() {
return shards();
}
/**
* Returns only the primary shards store size in bytes.
*/
public ByteSizeValue primaryStoreSize() {
long bytes = -1;
for (IndexShardStatus shard : this) {
if (shard.primaryStoreSize() != null) {
if (bytes == -1) {
bytes = 0;
}
bytes += shard.primaryStoreSize().bytes();
}
}
if (bytes == -1) {
return null;
}
return new ByteSizeValue(bytes);
}
/**
* Returns only the primary shards store size in bytes.
*/
public ByteSizeValue getPrimaryStoreSize() {
return primaryStoreSize();
}
/**
* Returns the full store size in bytes, of both primaries and replicas.
*/
public ByteSizeValue storeSize() {
long bytes = -1;
for (IndexShardStatus shard : this) {
if (shard.storeSize() != null) {
if (bytes == -1) {
bytes = 0;
}
bytes += shard.storeSize().bytes();
}
}
if (bytes == -1) {
return null;
}
return new ByteSizeValue(bytes);
}
/**
* Returns the full store size in bytes, of both primaries and replicas.
*/
public ByteSizeValue getStoreSize() {
return storeSize();
}
public long translogOperations() {
long translogOperations = -1;
for (IndexShardStatus shard : this) {
if (shard.translogOperations() != -1) {
if (translogOperations == -1) {
translogOperations = 0;
}
translogOperations += shard.translogOperations();
}
}
return translogOperations;
}
public long getTranslogOperations() {
return translogOperations();
}
private transient DocsStatus docs;
public DocsStatus docs() {
if (docs != null) {
return docs;
}
DocsStatus docs = null;
for (IndexShardStatus shard : this) {
if (shard.docs() == null) {
continue;
}
if (docs == null) {
docs = new DocsStatus();
}
docs.numDocs += shard.docs().numDocs();
docs.maxDoc += shard.docs().maxDoc();
docs.deletedDocs += shard.docs().deletedDocs();
}
this.docs = docs;
return docs;
}
public DocsStatus getDocs() {
return docs();
}
/**
* Total merges of this index.
*/
public MergeStats mergeStats() {
MergeStats mergeStats = new MergeStats();
for (IndexShardStatus shard : this) {
mergeStats.add(shard.mergeStats());
}
return mergeStats;
}
/**
* Total merges of this index.
*/
public MergeStats getMergeStats() {
return this.mergeStats();
}
public RefreshStats refreshStats() {
RefreshStats refreshStats = new RefreshStats();
for (IndexShardStatus shard : this) {
refreshStats.add(shard.refreshStats());
}
return refreshStats;
}
public RefreshStats getRefreshStats() {
return refreshStats();
}
public FlushStats flushStats() {
FlushStats flushStats = new FlushStats();
for (IndexShardStatus shard : this) {
flushStats.add(shard.flushStats());
}
return flushStats;
}
public FlushStats getFlushStats() {
return flushStats();
}
public Iterator<IndexShardStatus> iterator() {
return indexShards.values().iterator();
}
}
| |
/*************************GO-LICENSE-START*********************************
* Copyright 2014 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*************************GO-LICENSE-END***********************************/
package com.thoughtworks.go.config.crud;
import com.googlecode.junit.ext.JunitExtRunner;
import com.thoughtworks.go.config.*;
import com.thoughtworks.go.config.exceptions.GoConfigInvalidException;
import com.thoughtworks.go.config.materials.*;
import com.thoughtworks.go.config.materials.Filter;
import com.thoughtworks.go.domain.config.Configuration;
import com.thoughtworks.go.domain.scm.SCM;
import com.thoughtworks.go.plugin.access.scm.*;
import com.thoughtworks.go.security.GoCipher;
import com.thoughtworks.go.util.*;
import org.junit.Test;
import org.junit.runner.RunWith;
import static com.thoughtworks.go.plugin.api.config.Property.*;
import static java.lang.String.format;
import static org.hamcrest.Matchers.*;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
@RunWith(JunitExtRunner.class)
public class SCMConfigXmlLoaderTest extends BaseConfigXmlLoaderTest {
final static String VALID_SCM = " <scm id='scm-id' name='name1'><pluginConfiguration id='id' version='1.0'/><configuration><property><key>url</key><value>http://go</value></property></configuration></scm>";
final static String VALID_SCM_WITH_ID_NAME = " <scm id='%s' name='%s'><pluginConfiguration id='id' version='1.0'/><configuration><property><key>url</key><value>http://go</value></property></configuration></scm>";
final static String SCM_WITH_MISSING_ID = " <scm name='name1'><pluginConfiguration id='id' version='1.0'/><configuration><property><key>url</key><value>http://go</value></property></configuration></scm>";
final static String SCM_WITH_INVALID_ID = " <scm id='id with space' name='name1'><pluginConfiguration id='id' version='1.0'/><configuration><property><key>url</key><value>http://go</value></property></configuration></scm>";
final static String SCM_WITH_EMPTY_ID = " <scm id='' name='name1'><pluginConfiguration id='id' version='1.0'/><configuration><property><key>url</key><value>http://go</value></property></configuration></scm>";
final static String SCM_WITH_MISSING_NAME = " <scm id='id' ><pluginConfiguration id='id' version='1.0'/><configuration><property><key>url</key><value>http://go</value></property></configuration></scm>";
final static String SCM_WITH_INVALID_NAME = " <scm id='id' name='name with space'><pluginConfiguration id='id' version='1.0'/><configuration><property><key>url</key><value>http://go</value></property></configuration></scm>";
final static String SCM_WITH_EMPTY_NAME = " <scm id='id' name=''><pluginConfiguration id='id' version='1.0'/><configuration><property><key>url</key><value>http://go</value></property></configuration></scm>";
@Test
public void shouldThrowXsdValidationWhenSCMIdsAreDuplicate() throws Exception {
String xml = "<cruise schemaVersion='" + GoConstants.CONFIG_SCHEMA_VERSION + "'><scms>\n" + VALID_SCM + VALID_SCM + " </scms></cruise>";
try {
xmlLoader.loadConfigHolder(xml);
fail("should have thrown XsdValidationException");
} catch (XsdValidationException e) {
assertThat(e.getMessage(), is("Duplicate unique value [scm-id] declared for identity constraint \"uniqueSCMId\" of element \"cruise\"."));
}
}
@Test
public void shouldThrowXsdValidationWhenSCMIdIsEmpty() throws Exception {
String xml = "<cruise schemaVersion='" + GoConstants.CONFIG_SCHEMA_VERSION + "'><scms>\n" + SCM_WITH_EMPTY_ID + " </scms></cruise>";
try {
xmlLoader.loadConfigHolder(xml);
fail("should have thrown XsdValidationException");
} catch (XsdValidationException e) {
assertThat(e.getMessage(), is("Scm id is invalid. \"\" should conform to the pattern - [a-zA-Z0-9_\\-]{1}[a-zA-Z0-9_\\-.]*"));
}
}
@Test
public void shouldThrowXsdValidationWhenSCMIdIsInvalid() throws Exception {
String xml = "<cruise schemaVersion='" + GoConstants.CONFIG_SCHEMA_VERSION + "'><scms>\n" + SCM_WITH_INVALID_ID + " </scms></cruise>";
try {
xmlLoader.loadConfigHolder(xml);
fail("should have thrown XsdValidationException");
} catch (XsdValidationException e) {
assertThat(e.getMessage(), is("Scm id is invalid. \"id with space\" should conform to the pattern - [a-zA-Z0-9_\\-]{1}[a-zA-Z0-9_\\-.]*"));
}
}
@Test
public void shouldThrowXsdValidationWhenSCMNamesAreDuplicate() throws Exception {
String xml = "<cruise schemaVersion='" + GoConstants.CONFIG_SCHEMA_VERSION + "'><scms>\n" + format(VALID_SCM_WITH_ID_NAME, "1", "scm-name") + format(VALID_SCM_WITH_ID_NAME, "2", "scm-name") + " </scms></cruise>";
try {
xmlLoader.loadConfigHolder(xml);
fail("should have thrown XsdValidationException");
} catch (XsdValidationException e) {
assertThat(e.getMessage(), is("Duplicate unique value [scm-name] declared for identity constraint \"uniqueSCMName\" of element \"scms\"."));
}
}
@Test
public void shouldThrowXsdValidationWhenSCMNameIsMissing() throws Exception {
String xml = "<cruise schemaVersion='" + GoConstants.CONFIG_SCHEMA_VERSION + "'><scms>\n" + SCM_WITH_MISSING_NAME + " </scms></cruise>";
try {
xmlLoader.loadConfigHolder(xml);
fail("should have thrown XsdValidationException");
} catch (XsdValidationException e) {
assertThat(e.getMessage(), is("\"Name\" is required for Scm"));
}
}
@Test
public void shouldThrowXsdValidationWhenSCMNameIsEmpty() throws Exception {
String xml = "<cruise schemaVersion='" + GoConstants.CONFIG_SCHEMA_VERSION + "'><scms>\n" + SCM_WITH_EMPTY_NAME + " </scms></cruise>";
try {
xmlLoader.loadConfigHolder(xml);
fail("should have thrown XsdValidationException");
} catch (XsdValidationException e) {
assertThat(e.getMessage(), is("Name is invalid. \"\" should conform to the pattern - [a-zA-Z0-9_\\-]{1}[a-zA-Z0-9_\\-.]*"));
}
}
@Test
public void shouldThrowXsdValidationWhenSCMNameIsInvalid() throws Exception {
String xml = "<cruise schemaVersion='" + GoConstants.CONFIG_SCHEMA_VERSION + "'><scms>\n" + SCM_WITH_INVALID_NAME + " </scms></cruise>";
try {
xmlLoader.loadConfigHolder(xml);
fail("should have thrown XsdValidationException");
} catch (XsdValidationException e) {
assertThat(e.getMessage(), is("Name is invalid. \"name with space\" should conform to the pattern - [a-zA-Z0-9_\\-]{1}[a-zA-Z0-9_\\-.]*"));
}
}
@Test
public void shouldGenerateSCMIdWhenMissing() throws Exception {
String xml = "<cruise schemaVersion='" + GoConstants.CONFIG_SCHEMA_VERSION + "'><scms>\n" + SCM_WITH_MISSING_ID + " </scms></cruise>";
GoConfigHolder configHolder = xmlLoader.loadConfigHolder(xml);
assertThat(configHolder.config.getSCMs().get(0).getId(), is(notNullValue()));
}
@Test
public void shouldFailValidationIfSCMWithDuplicateFingerprintExists() throws Exception {
SCMPropertyConfiguration scmConfiguration = new SCMPropertyConfiguration();
scmConfiguration.add(new SCMProperty("SCM-KEY1"));
scmConfiguration.add(new SCMProperty("SCM-KEY2").with(REQUIRED, false).with(PART_OF_IDENTITY, false));
scmConfiguration.add(new SCMProperty("SCM-KEY3").with(REQUIRED, false).with(PART_OF_IDENTITY, false).with(SECURE, true));
SCMMetadataStore.getInstance().addMetadataFor("plugin-1", new SCMConfigurations(scmConfiguration), null);
String xml = "<cruise schemaVersion='" + GoConstants.CONFIG_SCHEMA_VERSION + "'>\n"
+ "<scms>\n"
+ " <scm id='scm-id-1' name='name-1'>\n"
+ " <pluginConfiguration id='plugin-1' version='1.0'/>\n"
+ " <configuration>\n"
+ " <property>\n"
+ " <key>SCM-KEY1</key>\n"
+ " <value>scm-key1</value>\n"
+ " </property>\n"
+ " <property>\n"
+ " <key>SCM-KEY2</key>\n"
+ " <value>scm-key2</value>\n"
+ " </property>\n"
+ " <property>\n"
+ " <key>SCM-KEY3</key>\n"
+ " <value>scm-key3</value>\n"
+ " </property>\n"
+ " </configuration>\n"
+ " </scm>\n"
+ " <scm id='scm-id-2' name='name-2'>\n"
+ " <pluginConfiguration id='plugin-1' version='1.0'/>\n"
+ " <configuration>\n"
+ " <property>\n"
+ " <key>SCM-KEY1</key>\n"
+ " <value>scm-key1</value>\n"
+ " </property>\n"
+ " <property>\n"
+ " <key>SCM-KEY2</key>\n"
+ " <value>another-scm-key2</value>\n"
+ " </property>\n"
+ " <property>\n"
+ " <key>SCM-KEY3</key>\n"
+ " <value>another-scm-key3</value>\n"
+ " </property>\n"
+ " </configuration>\n"
+ " </scm>\n"
+ " </scms>"
+ "</cruise>";
try {
xmlLoader.loadConfigHolder(xml);
fail("should have thrown duplicate fingerprint exception");
} catch (GoConfigInvalidException e) {
assertThat(e.getMessage(), is("Cannot save SCM, found duplicate SCMs. name-1, name-2"));
}
}
@Test
public void shouldLoadAutoUpdateValueForSCMWhenLoadedFromConfigFile() throws Exception {
String configTemplate = "<cruise schemaVersion='" + GoConstants.CONFIG_SCHEMA_VERSION + "'>" +
"<scms>" +
" <scm id='2ef830d7-dd66-42d6-b393-64a84646e557' name='scm-name' autoUpdate='%s' >" +
" <pluginConfiguration id='yum' version='1' />" +
" <configuration>" +
" <property>" +
" <key>SCM_URL</key>" +
" <value>http://fake-scm/git/go-cd</value>" +
" </property>" +
" </configuration>" +
" </scm>" +
"</scms>" +
"</cruise>";
String configContent = String.format(configTemplate, false);
GoConfigHolder holder = xmlLoader.loadConfigHolder(configContent);
SCM scm = holder.config.getSCMs().find("2ef830d7-dd66-42d6-b393-64a84646e557");
assertThat(scm.isAutoUpdate(), is(false));
configContent = String.format(configTemplate, true);
holder = xmlLoader.loadConfigHolder(configContent);
scm = holder.config.getSCMs().find("2ef830d7-dd66-42d6-b393-64a84646e557");
assertThat(scm.isAutoUpdate(), is(true));
}
@Test
public void shouldResolveSCMReferenceElementForAMaterialInConfig() throws Exception {
String xml = "<cruise schemaVersion='" + GoConstants.CONFIG_SCHEMA_VERSION + "'>\n"
+ "<scms>\n"
+ " <scm id='scm-id' name='scm-name'>\n"
+ " <pluginConfiguration id='plugin-id' version='1.0'/>\n"
+ " <configuration>\n"
+ " <property>\n"
+ " <key>url</key>\n"
+ " <value>http://go</value>\n"
+ " </property>\n"
+ " </configuration>\n"
+ " </scm>\n"
+ " </scms>"
+ "<pipelines group=\"group_name\">\n"
+ " <pipeline name=\"new_name\">\n"
+ " <materials>\n"
+ " <scm ref='scm-id' />\n"
+ " </materials>\n"
+ " <stage name=\"stage_name\">\n"
+ " <jobs>\n"
+ " <job name=\"job_name\" />\n"
+ " </jobs>\n"
+ " </stage>\n"
+ " </pipeline>\n"
+ "</pipelines></cruise>";
GoConfigHolder goConfigHolder = xmlLoader.loadConfigHolder(xml);
PipelineConfig pipelineConfig = goConfigHolder.config.pipelineConfigByName(new CaseInsensitiveString("new_name"));
PluggableSCMMaterialConfig pluggableSCMMaterialConfig = (PluggableSCMMaterialConfig) pipelineConfig.materialConfigs().get(0);
assertThat(pluggableSCMMaterialConfig.getSCMConfig(), is(goConfigHolder.config.getSCMs().get(0)));
assertThat(pluggableSCMMaterialConfig.getFolder(), is(nullValue()));
assertThat(pluggableSCMMaterialConfig.filter(), is(new Filter()));
}
@Test
public void shouldReadFolderAndFilterForPluggableSCMMaterialConfig() throws Exception {
String xml = "<cruise schemaVersion='" + GoConstants.CONFIG_SCHEMA_VERSION + "'>\n"
+ "<scms>\n"
+ " <scm id='scm-id' name='scm-name'>\n"
+ " <pluginConfiguration id='plugin-id' version='1.0'/>\n"
+ " <configuration>\n"
+ " <property>\n"
+ " <key>url</key>\n"
+ " <value>http://go</value>\n"
+ " </property>\n"
+ " </configuration>\n"
+ " </scm>\n"
+ " </scms>"
+ "<pipelines group=\"group_name\">\n"
+ " <pipeline name=\"new_name\">\n"
+ " <materials>\n"
+ " <scm ref='scm-id' dest='dest'>\n"
+ " <filter>\n"
+ " <ignore pattern=\"x\"/>\n"
+ " <ignore pattern=\"y\"/>\n"
+ " </filter>\n"
+ " </scm>\n"
+ " </materials>\n"
+ " <stage name=\"stage_name\">\n"
+ " <jobs>\n"
+ " <job name=\"job_name\" />\n"
+ " </jobs>\n"
+ " </stage>\n"
+ " </pipeline>\n"
+ "</pipelines></cruise>";
GoConfigHolder goConfigHolder = xmlLoader.loadConfigHolder(xml);
PipelineConfig pipelineConfig = goConfigHolder.config.pipelineConfigByName(new CaseInsensitiveString("new_name"));
PluggableSCMMaterialConfig pluggableSCMMaterialConfig = (PluggableSCMMaterialConfig) pipelineConfig.materialConfigs().get(0);
assertThat(pluggableSCMMaterialConfig.getSCMConfig(), is(goConfigHolder.config.getSCMs().get(0)));
assertThat(pluggableSCMMaterialConfig.getFolder(), is("dest"));
assertThat(pluggableSCMMaterialConfig.filter(), is(new Filter(new IgnoredFiles("x"), new IgnoredFiles("y"))));
}
@Test
public void shouldBeAbleToResolveSecureConfigPropertiesForSCMs() throws Exception {
String encryptedValue = new GoCipher().encrypt("secure-two");
String xml = "<cruise schemaVersion='" + GoConstants.CONFIG_SCHEMA_VERSION + "'>\n"
+ "<scms>\n"
+ " <scm id='scm-id' name='name'>\n"
+ " <pluginConfiguration id='plugin-id' version='1.0'/>\n"
+ " <configuration>\n"
+ " <property>\n"
+ " <key>plain</key>\n"
+ " <value>value</value>\n"
+ " </property>\n"
+ " <property>\n"
+ " <key>secure-one</key>\n"
+ " <value>secure-value</value>\n"
+ " </property>\n"
+ " <property>\n"
+ " <key>secure-two</key>\n"
+ " <encryptedValue>" + encryptedValue + "</encryptedValue>\n"
+ " </property>\n"
+ " </configuration>\n"
+ " </scm>\n"
+ " </scms>"
+ "<pipelines group=\"group_name\">\n"
+ " <pipeline name=\"new_name\">\n"
+ " <materials>\n"
+ " <scm ref='scm-id' />\n"
+ " </materials>\n"
+ " <stage name=\"stage_name\">\n"
+ " <jobs>\n"
+ " <job name=\"job_name\" />\n"
+ " </jobs>\n"
+ " </stage>\n"
+ " </pipeline>\n"
+ "</pipelines></cruise>";
//meta data of scm
SCMPropertyConfiguration scmConfiguration = new SCMPropertyConfiguration();
scmConfiguration.add(new SCMProperty("plain"));
scmConfiguration.add(new SCMProperty("secure-one").with(SCMConfiguration.SECURE, true));
scmConfiguration.add(new SCMProperty("secure-two").with(SCMConfiguration.SECURE, true));
SCMMetadataStore.getInstance().addMetadataFor("plugin-id", new SCMConfigurations(scmConfiguration), null);
GoConfigHolder goConfigHolder = xmlLoader.loadConfigHolder(xml);
SCM scmConfig = goConfigHolder.config.getSCMs().first();
PipelineConfig pipelineConfig = goConfigHolder.config.pipelineConfigByName(new CaseInsensitiveString("new_name"));
PluggableSCMMaterialConfig pluggableSCMMaterialConfig = (PluggableSCMMaterialConfig) pipelineConfig.materialConfigs().get(0);
assertThat(pluggableSCMMaterialConfig.getSCMConfig(), is(scmConfig));
Configuration configuration = pluggableSCMMaterialConfig.getSCMConfig().getConfiguration();
assertThat(configuration.get(0).getConfigurationValue().getValue(), is("value"));
assertThat(configuration.get(1).getEncryptedValue().getValue(), is(new GoCipher().encrypt("secure-value")));
assertThat(configuration.get(2).getEncryptedValue().getValue(), is(encryptedValue));
}
}
| |
/*
* Copyright 2014 Akexorcist
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alieeen.smartchair.bluetooth;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.Window;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.ListView;
import android.widget.TextView;
import com.alieeen.smartchair.R;
import java.util.Set;
@SuppressLint("NewApi")
public class DeviceList extends Activity {
// Debugging
private static final String TAG = "BluetoothSPP";
private static final boolean D = true;
// Member fields
private BluetoothAdapter mBtAdapter;
private ArrayAdapter<String> mPairedDevicesArrayAdapter;
private Set<BluetoothDevice> pairedDevices;
private Button scanButton;
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Setup the window
requestWindowFeature(Window.FEATURE_INDETERMINATE_PROGRESS);
int listId = getIntent().getIntExtra("layout_list", R.layout.device_list);
setContentView(listId);
String strBluetoothDevices = getIntent().getStringExtra("bluetooth_devices");
if(strBluetoothDevices == null)
strBluetoothDevices = "Bluetooth Devices";
setTitle(strBluetoothDevices);
// Set result CANCELED in case the user backs out
setResult(Activity.RESULT_CANCELED);
// Initialize the button to perform device discovery
scanButton = (Button) findViewById(R.id.button_scan);
String strScanDevice = getIntent().getStringExtra("scan_for_devices");
if(strScanDevice == null)
strScanDevice = "SCAN FOR DEVICES";
scanButton.setText(strScanDevice);
scanButton.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
doDiscovery();
}
});
// Initialize array adapters. One for already paired devices
// and one for newly discovered devices
int layout_text = getIntent().getIntExtra("layout_text", R.layout.device_name);
mPairedDevicesArrayAdapter = new ArrayAdapter<String>(this, layout_text);
// Find and set up the ListView for paired devices
ListView pairedListView = (ListView) findViewById(R.id.list_devices);
pairedListView.setAdapter(mPairedDevicesArrayAdapter);
pairedListView.setOnItemClickListener(mDeviceClickListener);
// Register for broadcasts when a device is discovered
IntentFilter filter = new IntentFilter(BluetoothDevice.ACTION_FOUND);
this.registerReceiver(mReceiver, filter);
// Register for broadcasts when discovery has finished
filter = new IntentFilter(BluetoothAdapter.ACTION_DISCOVERY_FINISHED);
this.registerReceiver(mReceiver, filter);
// Get the local Bluetooth adapter
mBtAdapter = BluetoothAdapter.getDefaultAdapter();
// Get a set of currently paired devices
pairedDevices = mBtAdapter.getBondedDevices();
// If there are paired devices, add each one to the ArrayAdapter
if (pairedDevices.size() > 0) {
for (BluetoothDevice device : pairedDevices) {
mPairedDevicesArrayAdapter.add(device.getName() + "\n" + device.getAddress());
}
} else {
String noDevices = "No devices found";
mPairedDevicesArrayAdapter.add(noDevices);
}
}
protected void onDestroy() {
super.onDestroy();
// Make sure we're not doing discovery anymore
if (mBtAdapter != null) {
mBtAdapter.cancelDiscovery();
}
// Unregister broadcast listeners
this.unregisterReceiver(mReceiver);
this.finish();
}
// Start device discover with the BluetoothAdapter
private void doDiscovery() {
if (D) Log.d(TAG, "doDiscovery()");
// Remove all element from the list
mPairedDevicesArrayAdapter.clear();
// If there are paired devices, add each one to the ArrayAdapter
if (pairedDevices.size() > 0) {
for (BluetoothDevice device : pairedDevices) {
mPairedDevicesArrayAdapter.add(device.getName() + "\n" + device.getAddress());
}
} else {
String strNoFound = getIntent().getStringExtra("no_devices_found");
if(strNoFound == null)
strNoFound = "No devices found";
mPairedDevicesArrayAdapter.add(strNoFound);
}
// Indicate scanning in the title
String strScanning = getIntent().getStringExtra("scanning");
if(strScanning == null)
strScanning = "Scanning for devices...";
setProgressBarIndeterminateVisibility(true);
setTitle(strScanning);
// Turn on sub-title for new devices
// findViewById(R.id.title_new_devices).setVisibility(View.VISIBLE);
// If we're already discovering, stop it
if (mBtAdapter.isDiscovering()) {
mBtAdapter.cancelDiscovery();
}
// Request discover from BluetoothAdapter
mBtAdapter.startDiscovery();
}
// The on-click listener for all devices in the ListViews
private OnItemClickListener mDeviceClickListener = new OnItemClickListener() {
public void onItemClick(AdapterView<?> av, View v, int arg2, long arg3) {
// Cancel discovery because it's costly and we're about to connect
if(mBtAdapter.isDiscovering())
mBtAdapter.cancelDiscovery();
String strNoFound = getIntent().getStringExtra("no_devices_found");
if(strNoFound == null)
strNoFound = "No devices found";
if(!((TextView) v).getText().toString().equals(strNoFound)) {
// Get the device MAC address, which is the last 17 chars in the View
String info = ((TextView) v).getText().toString();
String address = info.substring(info.length() - 17);
// Create the result Intent and include the MAC address
Intent intent = new Intent();
intent.putExtra(BluetoothState.EXTRA_DEVICE_ADDRESS, address);
// Set result and finish this Activity
setResult(Activity.RESULT_OK, intent);
finish();
}
}
};
// The BroadcastReceiver that listens for discovered devices and
// changes the title when discovery is finished
private final BroadcastReceiver mReceiver = new BroadcastReceiver() {
public void onReceive(Context context, Intent intent) {
String action = intent.getAction();
// When discovery finds a device
if (BluetoothDevice.ACTION_FOUND.equals(action)) {
// Get the BluetoothDevice object from the Intent
BluetoothDevice device = intent.getParcelableExtra(BluetoothDevice.EXTRA_DEVICE);
// If it's already paired, skip it, because it's been listed already
if (device.getBondState() != BluetoothDevice.BOND_BONDED) {
String strNoFound = getIntent().getStringExtra("no_devices_found");
if(strNoFound == null)
strNoFound = "No devices found";
if(mPairedDevicesArrayAdapter.getItem(0).equals(strNoFound)) {
mPairedDevicesArrayAdapter.remove(strNoFound);
}
mPairedDevicesArrayAdapter.add(device.getName() + "\n" + device.getAddress());
}
// When discovery is finished, change the Activity title
} else if (BluetoothAdapter.ACTION_DISCOVERY_FINISHED.equals(action)) {
setProgressBarIndeterminateVisibility(false);
String strSelectDevice = getIntent().getStringExtra("select_device");
if(strSelectDevice == null)
strSelectDevice = "Select a device to connect";
setTitle(strSelectDevice);
}
}
};
}
| |
/*
* Copyright 2017 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Tests for {@link RemoveUnusedCode} that cover removal of instance properties and properties
* defined directly on constructors.
*
* <p>Note that removal of variables is actually disabled for these test cases to make it easier to
* construct cases where only parts of a class will be removed.
*/
@RunWith(JUnit4.class)
public final class RemoveUnusedCodeClassPropertiesTest extends CompilerTestCase {
private static final String EXTERNS =
lines(
"/**",
" * @constructor",
" * @param {*=} opt_value",
" * @return {!Object}",
" */",
"function Object(opt_value) {}",
"/**",
" * @constructor",
" * @param {...*} var_args",
" */",
"function Function(var_args) {}",
"/**",
" * @constructor",
" * @param {*=} arg",
" * @return {string}",
" */",
"function String(arg) {}",
"/**",
" * @record",
" * @template VALUE",
" */",
"/**",
" * @template T",
" * @constructor ",
" * @param {...*} var_args",
" * @return {!Array<?>}",
" */",
"function Array(var_args) {}",
"var window;",
"function alert(a) {}",
"function use(x) {}",
"var EXT = {};",
"EXT.ext;",
"var externVar;",
"function externFunction() {}",
"/** @type {Function} */",
"Object.defineProperties = function() {};",
"/** @type {Function} */",
"Object.prototype.constructor = function() {};",
// NOTE: The following are needed to prevent NTI inexistent property warnings.
"var $jscomp = {};",
"$jscomp.global = {}",
"/** @type {?} */",
"$jscomp.global.Object",
"function JSCompiler_renameProperty(p) {}",
"var goog = {};",
"goog.reflect = {};",
"goog.reflect.object = function(a, b) {};");
public RemoveUnusedCodeClassPropertiesTest() {
super(EXTERNS);
}
@Override
protected CompilerPass getProcessor(Compiler compiler) {
return new RemoveUnusedCode.Builder(compiler)
.removeUnusedPrototypeProperties(true)
.removeUnusedThisProperties(true)
.removeUnusedObjectDefinePropertiesDefinitions(true)
.build();
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
// Allow testing of features that aren't fully supported for output yet.
enableNormalize();
disableCompareJsDoc();
enableGatherExternProperties();
disableTypeCheck();
}
@Test
public void testSimple1() {
// A property defined on "this" can be removed
test("this.a = 2", "");
test("let x = (this.a = 2)", "let x = 2");
testSame("this.a = 2; let x = this.a;");
}
@Test
public void testSimple2() {
// A property defined on "this" can be removed, even when defined
// as part of an expression
test("this.a = 2, alert(1);", "alert(1);");
test("const x = (this.a = 2, alert(1));", "const x = alert(1);");
test("const x = (alert(1), this.a = 2);", "const x = (alert(1), 2);");
}
@Test
public void testSimple3() {
// A property defined on an object other than "this" can not be removed.
testSame("var y = {}; y.a = 2");
// and prevents the removal of the definition on 'this'.
testSame("var y = {}; y.a = 2; this.a = 2");
// Some use of the property "a" prevents the removal.
testSame("var x; var y = {}; y.a = 2; this.a = 1; alert(x.a)");
}
@Test
public void testObjLit() {
// A property defined on an object other than "this" can not be removed.
testSame("({a:2})");
// and prevent the removal of the definition on 'this'.
testSame("({a:0}); this.a = 1;");
// ... even if it's quoted
testSame("({'a':0}); this.a = 1;");
// Some use of the property "a" prevents the removal.
testSame("var x = ({a:0}); this.a = 1; alert(x.a)");
}
@Test
public void testExtern() {
// A property defined in the externs is can not be removed.
testSame("this.ext = 2");
}
@Test
public void testExport() {
// An exported property can not be removed.
testSame("this.ext = 2; window['export'] = this.ext;");
testSame("function f() { this.ext = 2; } window['export'] = this.ext;");
}
@Test
public void testAssignOp1() {
// Properties defined using a compound assignment can be removed if the
// result of the assignment expression is not immediately used.
test("this.x += 2", "");
testSame("const x = (this.x += 2)");
testSame("this.x += 2; const x = this.x;");
// But, of course, a later use prevents its removal.
testSame("this.x += 2; let x = {}; x.x;");
}
@Test
public void testAssignOp2() {
// Properties defined using a compound assignment can be removed if the
// result of the assignment expression is not immediately used.
test("this.a += 2, alert(1)", "alert(1)");
test("const x = (this.a += 2, alert(1))", "const x = alert(1)");
testSame("const x = (alert(1), this.a += 2)");
}
@Test
public void testInc1() {
// Increments and Decrements are handled similarly to compound assignments
// but need a placeholder value when replaced.
test("this.x++", "");
testSame("let x = (this.x++)");
testSame("this.x++; let x = this.x;");
test("--this.x", "");
testSame("let x = (--this.x)");
testSame("--this.x; let x = this.x;");
}
@Test
public void testInc2() {
// Increments and Decrements are handled similarly to compound assignments
// but need a placeholder value when replaced.
test("this.a++, alert()", "alert()");
test("let x = (this.a++, alert())", "let x = alert()");
testSame("let x = (alert(), this.a++)");
test("--this.a, alert()", "alert()");
test("let x = (--this.a, alert())", "let x = alert()");
testSame("let x = (alert(), --this.a)");
}
@Test
public void testDestructuringRest() {
testSame(
lines(
"function Foo() {}", //
"Foo.a = function() {};",
"({ ...Foo.a.b } = 0);"));
}
@Test
public void testExprResult() {
test("this.x", "");
test("externFunction().prototype.x", "externFunction()");
// It doesn't make much sense to use optional chaining in these cases, but if you do,
// it shouldn't prevent unused property removal
test("this?.x", "");
test("externFunction()?.prototype.x", "externFunction()");
}
@Test
public void testJSCompiler_renameProperty() {
// JSCompiler_renameProperty introduces a use of the property
testSame("var x; this.a = 2; x[JSCompiler_renameProperty('a')]");
testSame("this.a = 2; JSCompiler_renameProperty('a')");
}
@Test
public void testForIn() {
// This is the basic assumption that this pass makes:
// it can remove properties even when the object is used in a FOR-IN loop
test(
"let x = {}; this.y = 1;for (var a in x) { alert(x[a]) }",
"let x = {}; for (var a in x) { alert(x[a]) }");
}
@Test
public void testObjectKeys() {
// This is the basic assumption that this pass makes:
// it can remove properties even when the object are referenced
test(
"this.y = 1;alert(Object.keys(this))", // preserve format
" alert(Object.keys(this))");
}
@Test
public void testObjectReflection1() {
// Verify reflection prevents removal.
testSame(
lines(
"/** @constructor */", // preserve newlines
"function A() { this.foo = 1; }",
"use(goog.reflect.object(A, {foo: 'foo'}));"));
}
@Test
public void testObjectReflection2() {
// Any object literal definition prevents removal.
// Type based removal would allow this to be removed.
testSame(
lines(
"/** @constructor */", // preserve newlines
"function A() {this.foo = 1;}",
"use({foo: 'foo'});"));
}
@Test
public void testIssue730() {
// Partial removal of properties can causes problems if the object is
// sealed.
testSame(
lines(
"function A() {this.foo = 0;}",
"function B() {this.a = new A();}",
"B.prototype.dostuff = function() { this.a.foo++; alert('hi'); }",
"new B().dostuff();"));
}
@Test
public void testPrototypeProps1() {
test(
lines(
"function A() {this.foo = 1;}",
"A.prototype.foo = 0;",
"A.prototype.method = function() {this.foo++};",
"new A().method()"),
lines(
"function A() { }",
" ",
"A.prototype.method = function() { };",
"new A().method()"));
}
@Test
public void testPrototypeProps2() {
// don't remove properties that are exported by convention
testSame(
"function A() {this._foo = 1;}\n"
+ "A.prototype._foo = 0;\n"
+ "A.prototype.method = function() {this._foo++};\n"
+ "new A().method()\n");
}
@Test
public void testConstructorProperty1() {
enableTypeCheck();
test(
"/** @constructor */ function C() {} C.prop = 1;",
"/** @constructor */ function C() {} ");
}
@Test
public void testConstructorProperty2() {
enableTypeCheck();
testSame(
lines(
"/** @constructor */ function C() {} ",
"C.prop = 1; ",
"function foo(a) { alert(a.prop) }; ",
"foo(C)"));
}
@Test
public void testES6StaticProperty() {
test(
"class C { static prop() {} }", // preserve newline
"class C { }");
}
@Test
public void testES6StaticProperty2() {
test("class C {} C.prop = 1;", "class C {}");
}
@Test
public void testObjectDefineProperties1() {
enableTypeCheck();
testSame(
lines(
"/** @constructor */ function C() {}",
"Object.defineProperties(C, {prop:{value:1}});",
"function foo(a) { alert(a.prop) };",
"foo(C)"));
}
@Test
public void testObjectDefineProperties2() {
enableTypeCheck();
test(
lines(
"/** @constructor */ function C() {}", "Object.defineProperties(C, {prop:{value:1}});"),
lines("/** @constructor */ function C() {}", "Object.defineProperties(C, {});"));
}
@Test
public void testObjectDefineProperties3() {
enableTypeCheck();
test(
lines(
"/** @constructor */ function C() {}",
"Object.defineProperties(C, ",
" {prop:{",
" get:function(){},",
" set:function(a){},",
"}});"),
lines("/** @constructor */ function C() {}", "Object.defineProperties(C, {});"));
}
// side-effect in definition retains property definition, but doesn't count as a reference
@Test
public void testObjectDefineProperties4() {
enableTypeCheck();
test(
lines(
"/** @constructor */ function C() { this.prop = 3; }",
"Object.defineProperties(C, {prop:alert('')});"),
lines(
"/** @constructor */ function C() { }",
"Object.defineProperties(C, {prop:alert('')});"));
}
// quoted properties retains property
@Test
public void testObjectDefineProperties5() {
enableTypeCheck();
testSame(
lines(
"/** @constructor */ function C() {}",
"Object.defineProperties(C, {'prop': {value: 1}});"));
}
@Test
public void testObjectDefineProperties6() {
enableTypeCheck();
// an unknown destination object doesn't prevent removal.
test(
"Object.defineProperties(externVar(), {prop:{value:1}});",
"Object.defineProperties(externVar(), { });");
}
@Test
public void testObjectDefineProperties7() {
enableTypeCheck();
test(
lines(
"/** @constructor */ function C() {}",
"Object.defineProperties(C, {prop:{get:function () {return new C}}});"),
lines("/** @constructor */ function C() {}", "Object.defineProperties(C, {});"));
}
@Test
public void testObjectDefineProperties8() {
enableTypeCheck();
test(
lines(
"/** @constructor */ function C() {}",
"Object.defineProperties(C, {prop:{set:function (a) {return alert(a)}}});"),
lines("/** @constructor */ function C() {}", "Object.defineProperties(C, {});"));
}
@Test
public void testObjectDefinePropertiesQuotesPreventRemoval() {
enableTypeCheck();
testSame(
lines(
"/** @constructor */ function C() { this.prop = 1; }",
"Object.defineProperties(C, {'prop':{set:function (a) {return alert(a.prop)}}});"));
}
@Test
public void testObjectDefineProperties_usedSetter_notRemoved() {
enableTypeCheck();
testSame(
lines(
"/** @constructor */ function C() {}",
"Object.defineProperties(C, {prop:{set:function (a) {alert(2)}}});",
"C.prop = 2;"));
}
@Test
public void testPrototypeMethodDef_notConsideredSetterUse() {
enableTypeCheck();
test(
lines(
"/** @constructor */ function C() {}",
"Object.defineProperties(C, {prop:{set:function (a) {alert(2)}}});",
"/** @constructor */ function D () {}",
"D.prototype.prop = function() {};"),
lines(
"/** @constructor */ function C() {}",
"Object.defineProperties(C, {});",
"/** @constructor */ function D () {}"));
}
@Test
public void testEs6GettersWithoutTranspilation() {
test(
"class C { get value() { return 0; } }", // preserve newline
"class C { }");
testSame("class C { get value() { return 0; } } const x = (new C()).value");
}
@Test
public void testES6ClassComputedProperty() {
testSame("class C { ['test' + 3]() { return 0; } }");
}
@Test
public void testEs6SettersWithoutTranspilation() {
test(
"class C { set value(val) { this.internalVal = val; } }", // preserve newline
"class C { }");
test(
"class C { set value(val) { this.internalVal = val; } } (new C()).value = 3;",
"class C { set value(val) { } } (new C()).value = 3;");
testSame(
lines(
"class C {",
" set value(val) {",
" this.internalVal = val;",
" }",
" get value() {",
" return this.internalVal;",
" }",
"}",
"const y = new C();",
"y.value = 3;",
"const x = y.value;"));
}
// All object literal fields are not removed, but the following
// tests assert that the pass does not fail.
@Test
public void testEs6EnhancedObjLiteralsComputedValuesNotRemoved() {
testSame(
lines(
"function getCar(make, model, value) {",
" return {",
" ['make' + make] : true",
" };",
"}"));
}
@Test
public void testEs6EnhancedObjLiteralsMethodShortHandNotRemoved() {
testSame(
lines(
"function getCar(make, model, value) {",
" return {",
" getModel() {",
" return model;",
" }",
" };",
"}"));
}
@Test
public void testEs6EnhancedObjLiteralsPropertyShorthand() {
testSame("function getCar(make, model, value) { return {model}; }");
}
@Test
public void testTranspiledEs6GettersRemoval() {
enableTypeCheck();
test(
// This is the output of ES6->ES5 class getter converter.
// See Es6TranspilationIntegrationTest.testEs5GettersAndSettersClasses test method.
lines(
"/** @constructor @struct */",
"var C = function() {};",
"/** @type {?} */",
"C.prototype.value = 0;",
"$jscomp.global.Object.defineProperties(C.prototype, {",
" value: {",
" configurable: true,",
" enumerable: true,",
" /** @this {C} */",
" get: function() {",
" return 0;",
" }",
" }",
"});"),
lines(
"/** @constructor @struct */var C=function(){};",
"$jscomp.global.Object.defineProperties(C.prototype, {});"));
}
@Test
public void testTranspiledEs6SettersRemoval() {
enableTypeCheck();
test(
// This is the output of ES6->ES5 class setter converter.
// See Es6TranspilationIntegrationTest.testEs5GettersAndSettersClasses test method.
lines(
"/** @constructor @struct */",
"var C = function() {};",
"/** @type {?} */",
"C.prototype.value;",
"/** @type {?} */",
"C.prototype.internalVal;",
"$jscomp.global.Object.defineProperties(C.prototype, {",
" value: {",
" configurable: true,",
" enumerable: true,",
" /** @this {C} */",
" set: function(val) {",
" this.internalVal = val;",
" }",
" }",
"});"),
lines(
"/** @constructor @struct */var C=function(){};",
"$jscomp.global.Object.defineProperties(C.prototype, {});"));
}
@Test
public void testEs6ArrowFunction() {
test(
"const arrow = () => this.a = 1;", // preserve newline
"const arrow = () => 1;");
testSame("const arrow = () => ({a: 2})");
testSame("var y = {}; const arrow = () => {y.a = 2; this.a = 2;}");
test(
lines(
"function A() {",
" this.foo = 1;",
"}",
"A.prototype.foo = 0;",
"A.prototype.getIncr = function() {",
" return () => { this.foo++; };",
"};",
"new A().getIncr()"),
lines(
"function A() {",
" ",
"}",
" ",
"A.prototype.getIncr = function() {",
" return () => { };",
"};",
"new A().getIncr()"));
}
@Test
public void testEs6Generator() {
test(
"function* gen() { yield this.a = 1; }", // preserve newline
"function* gen() { yield 1; }");
testSame("function* gen() { yield this.a = 1; yield this.a; }");
}
@Test
public void testEs6Destructuring() {
// Test normal destructuring removal
test(
"[this.x, this.y] = [1, 2]", // preserve newline
"[ ] = [1, 2]");
// Test normal destructuring, assignment prevent removal
test(
lines(
"[this.x, this.y] = [1, 2]", // preserve newline
"var p = this.x;"),
lines(
"[this.x ] = [1, 2]", // preserve newline
"var p = this.x;"));
// Test rest destructuring, `this` property
test(
"[this.x, ...this.z] = [1, 2, 3]", // preserve newline
"[ , ...this.z] = [1, 2, 3]");
// Test rest destructuring with normal variable
test(
"let z; [this.x, ...z] = [1, 2]", // preserve newline
"let z; [ , ...z] = [1, 2]");
// Test rest destructuring, assignment prevent removal
test(
lines(
"[this.x, ...this.y] = [1, 2];", // preserve newline
"var p = this.y;"),
lines(
"[ , ...this.y] = [1, 2];", // preserve newline
"var p = this.y;"));
// Test destructuring rhs prevent removal
testSame(
lines(
"let a;",
"this.x = 1;", // preserve newline
"this.y = 2;",
"[...a] = [this.x, this.y];"));
// Test nested destructuring
test(
"let z; [this.x, [this.y, ...z]] = [1, [2]]", // preserve newline
"let z; [ , [ , ...z]] = [1, [2]]");
// Test normal object destructuring full removal
test("({a: this.x, b: this.y} = {a: 1, b: 2})", "({} = {a: 1, b: 2})");
// Test normal object destructuring partial removal
test("let y; ({a: this.x, b: y} = {a: 1, b: 2})", "let y; ({ b: y} = {a: 1, b: 2})");
// Test obj destructuring prevent removal
test(
lines("({a: this.x, b: this.y} = {a: 1, b: 2});", "var p = this.x;"),
lines("({a: this.x} = {a: 1, b: 2});", "var p = this.x;"));
// Test obj destructuring with old style class
testSame(
lines(
"/** @constructor */ function C () {",
" this.a = 1;",
"}",
"let x;",
"({a: x} = new C());"));
// Test obj destructuring with new style class
testSame(
lines(
"class C {",
" constructor() {",
" this.a = 1;",
" }",
"}",
"let x;",
"({a: x} = new C());"));
// Test let destructuring
testSame(
lines(
"class C {",
" constructor() {",
" this.a = 1;",
" }",
"}",
"let {a: x} = new C();"));
// Test obj created at a different location and later used in destructuring
testSame(
lines(
"class C {",
" constructor() {",
" this.a = 1;",
" }",
"}",
"var obj = new C()",
"let x;",
"({a: x} = obj);"));
// Test obj destructuring with default value
testSame(
lines(
"class C {",
" constructor() {",
" this.a = 1;",
" }",
"}",
"let a;",
"({a = 2} = new C());"));
// Test obj nested destructuring
testSame(
lines(
"class C {",
" constructor() {",
" this.a = 1;",
" }",
"}",
"var obj = new C()",
"let a;",
"({x: {a}} = {x: obj});"));
// Computed Property string expression doesn't prevent removal.
test(
"({['a']:0}); this.a = 1;", // preserve newline
"({['a']:0}); ");
}
@Test
public void testDestrucuturing_assginmentToProperty_consideredUse() {
testSame(
lines(
"class Foo {",
" constructor() {",
" this.x = 0;",
" }",
"}",
"",
"({a: new Foo().x} = {a: 0});"));
}
@Test
public void testEs6DefaultParameter() {
test(
"function foo(x, y = this.a = 1) {}", // preserve newline
"function foo(x, y = 1) {}");
testSame("this.a = 1; function foo(x, y = this.a) {}");
}
@Test
public void testEs8AsyncFunction() {
test(
lines(
"async function foo(promise) {", // preserve newlines
" this.x = 1;",
" return await promise;",
"}"),
lines(
"async function foo(promise) {", // preserve newlines
" ",
" return await promise;",
"}"));
testSame(lines("async function foo() {", " this.x = 1;", " return await this.x;", "}"));
testSame(lines("this.x = 1;", "async function foo() {", " return await this.x;", "}"));
}
@Test
public void testField() {
test(
lines(
"class C {", //
" x = 1;",
" y;",
" z = 'hi';",
" static x = 1;",
" static y;",
" static z = 'hi';",
"}"),
lines(
"class C {", //
" ",
"}"));
}
@Test
public void testComputedField() {
testSame(
lines(
"class C {", //
" ['x'] = 1;",
" 'y';",
" 1 = 'hi';",
" static ['x'] = 1;",
" static 'y';",
" static 1 = 'hi';",
"}"));
}
@Test
public void testMixedField() {
// Computed properties cannot be removed, so only non-computed properties are removed
test(
lines(
"class C {", //
" x = 1;",
" y;",
" z = 'hi';",
" static x = 1;",
" static y;",
" static z = 'hi';",
" ['x'] = 1;",
" 'y';",
" 1 = 'hi';",
" static ['x'] = 1;",
" static 'y';",
" static 1 = 'hi';",
"}"),
lines(
"class C {", //
" ['x'] = 1;",
" 'y';",
" 1 = 'hi';",
" static ['x'] = 1;",
" static 'y';",
" static 1 = 'hi';",
"}"));
testSame(
lines(
"class C {", //
" [alert()] = 5;",
"}"));
testSame(
lines(
"class C {", //
" static x = alert();",
"}"));
testSame(
lines(
"class C {", //
" x = alert();",
"}"));
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io.compress;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.util.Arrays;
import java.util.Random;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.MapFile;
import org.apache.hadoop.io.RandomDatum;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.compress.snappy.LoadSnappy;
import org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor;
import org.apache.hadoop.io.compress.zlib.BuiltInZlibDeflater;
import org.apache.hadoop.io.compress.zlib.BuiltInZlibInflater;
import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel;
import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy;
import org.apache.hadoop.io.compress.zlib.ZlibFactory;
import org.apache.hadoop.util.LineReader;
import org.apache.hadoop.util.ReflectionUtils;
import org.junit.Test;
import static org.junit.Assert.*;
public class TestCodec {
private static final Log LOG= LogFactory.getLog(TestCodec.class);
private Configuration conf = new Configuration();
private int count = 10000;
private int seed = new Random().nextInt();
@Test
public void testDefaultCodec() throws IOException {
codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.DefaultCodec");
codecTest(conf, seed, count, "org.apache.hadoop.io.compress.DefaultCodec");
}
@Test
public void testGzipCodec() throws IOException {
codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.GzipCodec");
codecTest(conf, seed, count, "org.apache.hadoop.io.compress.GzipCodec");
}
@Test
public void testBZip2Codec() throws IOException {
codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.BZip2Codec");
codecTest(conf, seed, count, "org.apache.hadoop.io.compress.BZip2Codec");
}
@Test
public void testSnappyCodec() throws IOException {
if (LoadSnappy.isAvailable()) {
if (LoadSnappy.isLoaded()) {
codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.SnappyCodec");
codecTest(conf, seed, count, "org.apache.hadoop.io.compress.SnappyCodec");
}
else {
fail("Snappy native available but Hadoop native not");
}
}
}
@Test
public void testGzipCodecWithParam() throws IOException {
Configuration conf = new Configuration(this.conf);
ZlibFactory.setCompressionLevel(conf, CompressionLevel.BEST_COMPRESSION);
ZlibFactory.setCompressionStrategy(conf, CompressionStrategy.HUFFMAN_ONLY);
codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.GzipCodec");
codecTest(conf, seed, count, "org.apache.hadoop.io.compress.GzipCodec");
}
private static void codecTest(Configuration conf, int seed, int count,
String codecClass)
throws IOException {
// Create the codec
CompressionCodec codec = null;
try {
codec = (CompressionCodec)
ReflectionUtils.newInstance(conf.getClassByName(codecClass), conf);
} catch (ClassNotFoundException cnfe) {
throw new IOException("Illegal codec!");
}
LOG.info("Created a Codec object of type: " + codecClass);
// Generate data
DataOutputBuffer data = new DataOutputBuffer();
RandomDatum.Generator generator = new RandomDatum.Generator(seed);
for(int i=0; i < count; ++i) {
generator.next();
RandomDatum key = generator.getKey();
RandomDatum value = generator.getValue();
key.write(data);
value.write(data);
}
LOG.info("Generated " + count + " records");
// Compress data
DataOutputBuffer compressedDataBuffer = new DataOutputBuffer();
CompressionOutputStream deflateFilter =
codec.createOutputStream(compressedDataBuffer);
DataOutputStream deflateOut =
new DataOutputStream(new BufferedOutputStream(deflateFilter));
deflateOut.write(data.getData(), 0, data.getLength());
deflateOut.flush();
deflateFilter.finish();
LOG.info("Finished compressing data");
// De-compress data
DataInputBuffer deCompressedDataBuffer = new DataInputBuffer();
deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0,
compressedDataBuffer.getLength());
CompressionInputStream inflateFilter =
codec.createInputStream(deCompressedDataBuffer);
DataInputStream inflateIn =
new DataInputStream(new BufferedInputStream(inflateFilter));
// Check
DataInputBuffer originalData = new DataInputBuffer();
originalData.reset(data.getData(), 0, data.getLength());
DataInputStream originalIn = new DataInputStream(new BufferedInputStream(originalData));
for(int i=0; i < count; ++i) {
RandomDatum k1 = new RandomDatum();
RandomDatum v1 = new RandomDatum();
k1.readFields(originalIn);
v1.readFields(originalIn);
RandomDatum k2 = new RandomDatum();
RandomDatum v2 = new RandomDatum();
k2.readFields(inflateIn);
v2.readFields(inflateIn);
assertTrue("original and compressed-then-decompressed-output not equal",
k1.equals(k2) && v1.equals(v2));
}
// De-compress data byte-at-a-time
originalData.reset(data.getData(), 0, data.getLength());
deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0,
compressedDataBuffer.getLength());
inflateFilter =
codec.createInputStream(deCompressedDataBuffer);
// Check
originalIn = new DataInputStream(new BufferedInputStream(originalData));
int expected;
do {
expected = originalIn.read();
assertEquals("Inflated stream read by byte does not match",
expected, inflateFilter.read());
} while (expected != -1);
LOG.info("SUCCESS! Completed checking " + count + " records");
}
@Test
public void testSplitableCodecs() throws Exception {
testSplitableCodec(BZip2Codec.class);
}
private void testSplitableCodec(
Class<? extends SplittableCompressionCodec> codecClass)
throws IOException {
final long DEFLBYTES = 2 * 1024 * 1024;
final Configuration conf = new Configuration();
final Random rand = new Random();
final long seed = rand.nextLong();
LOG.info("seed: " + seed);
rand.setSeed(seed);
SplittableCompressionCodec codec =
ReflectionUtils.newInstance(codecClass, conf);
final FileSystem fs = FileSystem.getLocal(conf);
final FileStatus infile =
fs.getFileStatus(writeSplitTestFile(fs, rand, codec, DEFLBYTES));
if (infile.getLen() > Integer.MAX_VALUE) {
fail("Unexpected compression: " + DEFLBYTES + " -> " + infile.getLen());
}
final int flen = (int) infile.getLen();
final Text line = new Text();
final Decompressor dcmp = CodecPool.getDecompressor(codec);
try {
for (int pos = 0; pos < infile.getLen(); pos += rand.nextInt(flen / 8)) {
// read from random positions, verifying that there exist two sequential
// lines as written in writeSplitTestFile
final SplitCompressionInputStream in =
codec.createInputStream(fs.open(infile.getPath()), dcmp,
pos, flen, SplittableCompressionCodec.READ_MODE.BYBLOCK);
if (in.getAdjustedStart() >= flen) {
break;
}
LOG.info("SAMPLE " + in.getAdjustedStart() + "," + in.getAdjustedEnd());
final LineReader lreader = new LineReader(in);
lreader.readLine(line); // ignore; likely partial
if (in.getPos() >= flen) {
break;
}
lreader.readLine(line);
final int seq1 = readLeadingInt(line);
lreader.readLine(line);
if (in.getPos() >= flen) {
break;
}
final int seq2 = readLeadingInt(line);
assertEquals("Mismatched lines", seq1 + 1, seq2);
}
} finally {
CodecPool.returnDecompressor(dcmp);
}
// remove on success
fs.delete(infile.getPath().getParent(), true);
}
private static int readLeadingInt(Text txt) throws IOException {
DataInputStream in =
new DataInputStream(new ByteArrayInputStream(txt.getBytes()));
return in.readInt();
}
/** Write infLen bytes (deflated) to file in test dir using codec.
* Records are of the form
* <i><b64 rand><i+i><b64 rand>
*/
private static Path writeSplitTestFile(FileSystem fs, Random rand,
CompressionCodec codec, long infLen) throws IOException {
final int REC_SIZE = 1024;
final Path wd = new Path(new Path(
System.getProperty("test.build.data", "/tmp")).makeQualified(fs),
codec.getClass().getSimpleName());
final Path file = new Path(wd, "test" + codec.getDefaultExtension());
final byte[] b = new byte[REC_SIZE];
final Base64 b64 = new Base64(0, null);
DataOutputStream fout = null;
Compressor cmp = CodecPool.getCompressor(codec);
try {
fout = new DataOutputStream(codec.createOutputStream(
fs.create(file, true), cmp));
final DataOutputBuffer dob = new DataOutputBuffer(REC_SIZE * 4 / 3 + 4);
int seq = 0;
while (infLen > 0) {
rand.nextBytes(b);
final byte[] b64enc = b64.encode(b); // ensures rand printable, no LF
dob.reset();
dob.writeInt(seq);
System.arraycopy(dob.getData(), 0, b64enc, 0, dob.getLength());
fout.write(b64enc);
fout.write('\n');
++seq;
infLen -= b64enc.length;
}
LOG.info("Wrote " + seq + " records to " + file);
} finally {
IOUtils.cleanup(LOG, fout);
CodecPool.returnCompressor(cmp);
}
return file;
}
@Test
public void testCodecPoolGzipReuse() throws Exception {
Configuration conf = new Configuration();
conf.setBoolean("hadoop.native.lib", true);
if (!ZlibFactory.isNativeZlibLoaded(conf)) {
LOG.warn("testCodecPoolGzipReuse skipped: native libs not loaded");
return;
}
GzipCodec gzc = ReflectionUtils.newInstance(GzipCodec.class, conf);
DefaultCodec dfc = ReflectionUtils.newInstance(DefaultCodec.class, conf);
Compressor c1 = CodecPool.getCompressor(gzc);
Compressor c2 = CodecPool.getCompressor(dfc);
CodecPool.returnCompressor(c1);
CodecPool.returnCompressor(c2);
assertTrue("Got mismatched ZlibCompressor", c2 != CodecPool.getCompressor(gzc));
}
private static void gzipReinitTest(Configuration conf, CompressionCodec codec)
throws IOException {
// Add codec to cache
ZlibFactory.setCompressionLevel(conf, CompressionLevel.BEST_COMPRESSION);
ZlibFactory.setCompressionStrategy(conf,
CompressionStrategy.DEFAULT_STRATEGY);
Compressor c1 = CodecPool.getCompressor(codec);
CodecPool.returnCompressor(c1);
// reset compressor's compression level to perform no compression
ZlibFactory.setCompressionLevel(conf, CompressionLevel.NO_COMPRESSION);
Compressor c2 = CodecPool.getCompressor(codec, conf);
// ensure same compressor placed earlier
assertTrue("Got mismatched ZlibCompressor", c1 == c2);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
CompressionOutputStream cos = null;
// write trivially compressable data
byte[] b = new byte[1 << 15];
Arrays.fill(b, (byte) 43);
try {
cos = codec.createOutputStream(bos, c2);
cos.write(b);
} finally {
if (cos != null) {
cos.close();
}
CodecPool.returnCompressor(c2);
}
byte[] outbytes = bos.toByteArray();
// verify data were not compressed
assertTrue("Compressed bytes contrary to configuration",
outbytes.length >= b.length);
}
private static void codecTestWithNOCompression (Configuration conf,
String codecClass) throws IOException {
// Create a compressor with NO_COMPRESSION and make sure that
// output is not compressed by comparing the size with the
// original input
CompressionCodec codec = null;
ZlibFactory.setCompressionLevel(conf, CompressionLevel.NO_COMPRESSION);
try {
codec = (CompressionCodec)
ReflectionUtils.newInstance(conf.getClassByName(codecClass), conf);
} catch (ClassNotFoundException cnfe) {
throw new IOException("Illegal codec!");
}
Compressor c = codec.createCompressor();
// ensure same compressor placed earlier
ByteArrayOutputStream bos = new ByteArrayOutputStream();
CompressionOutputStream cos = null;
// write trivially compressable data
byte[] b = new byte[1 << 15];
Arrays.fill(b, (byte) 43);
try {
cos = codec.createOutputStream(bos, c);
cos.write(b);
} finally {
if (cos != null) {
cos.close();
}
}
byte[] outbytes = bos.toByteArray();
// verify data were not compressed
assertTrue("Compressed bytes contrary to configuration(NO_COMPRESSION)",
outbytes.length >= b.length);
}
@Test
public void testCodecInitWithCompressionLevel() throws Exception {
Configuration conf = new Configuration();
conf.setBoolean("io.native.lib.available", true);
if (ZlibFactory.isNativeZlibLoaded(conf)) {
LOG.info("testCodecInitWithCompressionLevel with native");
codecTestWithNOCompression(conf,
"org.apache.hadoop.io.compress.GzipCodec");
codecTestWithNOCompression(conf,
"org.apache.hadoop.io.compress.DefaultCodec");
} else {
LOG.warn("testCodecInitWithCompressionLevel for native skipped"
+ ": native libs not loaded");
}
conf = new Configuration();
conf.setBoolean("io.native.lib.available", false);
codecTestWithNOCompression( conf,
"org.apache.hadoop.io.compress.DefaultCodec");
}
@Test
public void testCodecPoolCompressorReinit() throws Exception {
Configuration conf = new Configuration();
conf.setBoolean("hadoop.native.lib", true);
if (ZlibFactory.isNativeZlibLoaded(conf)) {
GzipCodec gzc = ReflectionUtils.newInstance(GzipCodec.class, conf);
gzipReinitTest(conf, gzc);
} else {
LOG.warn("testCodecPoolCompressorReinit skipped: native libs not loaded");
}
conf.setBoolean("hadoop.native.lib", false);
DefaultCodec dfc = ReflectionUtils.newInstance(DefaultCodec.class, conf);
gzipReinitTest(conf, dfc);
}
@Test
public void testSequenceFileDefaultCodec() throws IOException, ClassNotFoundException,
InstantiationException, IllegalAccessException {
sequenceFileCodecTest(conf, 100, "org.apache.hadoop.io.compress.DefaultCodec", 100);
sequenceFileCodecTest(conf, 200000, "org.apache.hadoop.io.compress.DefaultCodec", 1000000);
}
@Test
public void testSequenceFileBZip2Codec() throws IOException, ClassNotFoundException,
InstantiationException, IllegalAccessException {
sequenceFileCodecTest(conf, 0, "org.apache.hadoop.io.compress.BZip2Codec", 100);
sequenceFileCodecTest(conf, 100, "org.apache.hadoop.io.compress.BZip2Codec", 100);
sequenceFileCodecTest(conf, 200000, "org.apache.hadoop.io.compress.BZip2Codec", 1000000);
}
private static void sequenceFileCodecTest(Configuration conf, int lines,
String codecClass, int blockSize)
throws IOException, ClassNotFoundException, InstantiationException, IllegalAccessException {
Path filePath = new Path("SequenceFileCodecTest." + codecClass);
// Configuration
conf.setInt("io.seqfile.compress.blocksize", blockSize);
// Create the SequenceFile
FileSystem fs = FileSystem.get(conf);
LOG.info("Creating SequenceFile with codec \"" + codecClass + "\"");
SequenceFile.Writer writer = SequenceFile.createWriter(fs, conf, filePath,
Text.class, Text.class, CompressionType.BLOCK,
(CompressionCodec)Class.forName(codecClass).newInstance());
// Write some data
LOG.info("Writing to SequenceFile...");
for (int i=0; i<lines; i++) {
Text key = new Text("key" + i);
Text value = new Text("value" + i);
writer.append(key, value);
}
writer.close();
// Read the data back and check
LOG.info("Reading from the SequenceFile...");
SequenceFile.Reader reader = new SequenceFile.Reader(fs, filePath, conf);
Writable key = (Writable)reader.getKeyClass().newInstance();
Writable value = (Writable)reader.getValueClass().newInstance();
int lc = 0;
try {
while (reader.next(key, value)) {
assertEquals("key" + lc, key.toString());
assertEquals("value" + lc, value.toString());
lc ++;
}
} finally {
reader.close();
}
assertEquals(lines, lc);
// Delete temporary files
fs.delete(filePath, false);
LOG.info("SUCCESS! Completed SequenceFileCodecTest with codec \"" + codecClass + "\"");
}
public static void main(String[] args) {
int count = 10000;
String codecClass = "org.apache.hadoop.io.compress.DefaultCodec";
String usage = "TestCodec [-count N] [-codec <codec class>]";
if (args.length == 0) {
System.err.println(usage);
System.exit(-1);
}
try {
for (int i=0; i < args.length; ++i) { // parse command line
if (args[i] == null) {
continue;
} else if (args[i].equals("-count")) {
count = Integer.parseInt(args[++i]);
} else if (args[i].equals("-codec")) {
codecClass = args[++i];
}
}
Configuration conf = new Configuration();
int seed = 0;
codecTest(conf, seed, count, codecClass);
} catch (Exception e) {
System.err.println("Caught: " + e);
e.printStackTrace();
}
}
@Test
public void testGzipCompatibility() throws IOException {
Random r = new Random();
long seed = r.nextLong();
r.setSeed(seed);
LOG.info("seed: " + seed);
DataOutputBuffer dflbuf = new DataOutputBuffer();
GZIPOutputStream gzout = new GZIPOutputStream(dflbuf);
byte[] b = new byte[r.nextInt(128 * 1024 + 1)];
r.nextBytes(b);
gzout.write(b);
gzout.close();
DataInputBuffer gzbuf = new DataInputBuffer();
gzbuf.reset(dflbuf.getData(), dflbuf.getLength());
Configuration conf = new Configuration();
conf.setBoolean("hadoop.native.lib", false);
CompressionCodec codec = ReflectionUtils.newInstance(GzipCodec.class, conf);
Decompressor decom = codec.createDecompressor();
assertNotNull(decom);
assertEquals(BuiltInGzipDecompressor.class, decom.getClass());
InputStream gzin = codec.createInputStream(gzbuf, decom);
dflbuf.reset();
IOUtils.copyBytes(gzin, dflbuf, 4096);
final byte[] dflchk = Arrays.copyOf(dflbuf.getData(), dflbuf.getLength());
assertTrue(java.util.Arrays.equals(b, dflchk));
}
void GzipConcatTest(Configuration conf,
Class<? extends Decompressor> decomClass) throws IOException {
Random r = new Random();
long seed = r.nextLong();
r.setSeed(seed);
LOG.info(decomClass + " seed: " + seed);
final int CONCAT = r.nextInt(4) + 3;
final int BUFLEN = 128 * 1024;
DataOutputBuffer dflbuf = new DataOutputBuffer();
DataOutputBuffer chkbuf = new DataOutputBuffer();
byte[] b = new byte[BUFLEN];
for (int i = 0; i < CONCAT; ++i) {
GZIPOutputStream gzout = new GZIPOutputStream(dflbuf);
r.nextBytes(b);
int len = r.nextInt(BUFLEN);
int off = r.nextInt(BUFLEN - len);
chkbuf.write(b, off, len);
gzout.write(b, off, len);
gzout.close();
}
final byte[] chk = Arrays.copyOf(chkbuf.getData(), chkbuf.getLength());
CompressionCodec codec = ReflectionUtils.newInstance(GzipCodec.class, conf);
Decompressor decom = codec.createDecompressor();
assertNotNull(decom);
assertEquals(decomClass, decom.getClass());
DataInputBuffer gzbuf = new DataInputBuffer();
gzbuf.reset(dflbuf.getData(), dflbuf.getLength());
InputStream gzin = codec.createInputStream(gzbuf, decom);
dflbuf.reset();
IOUtils.copyBytes(gzin, dflbuf, 4096);
final byte[] dflchk = Arrays.copyOf(dflbuf.getData(), dflbuf.getLength());
assertTrue(java.util.Arrays.equals(chk, dflchk));
}
@Test
public void testBuiltInGzipConcat() throws IOException {
Configuration conf = new Configuration();
conf.setBoolean("hadoop.native.lib", false);
GzipConcatTest(conf, BuiltInGzipDecompressor.class);
}
@Test
public void testNativeGzipConcat() throws IOException {
Configuration conf = new Configuration();
conf.setBoolean("hadoop.native.lib", true);
if (!ZlibFactory.isNativeZlibLoaded(conf)) {
LOG.warn("skipped: native libs not loaded");
return;
}
GzipConcatTest(conf, GzipCodec.GzipZlibDecompressor.class);
}
@Test
public void testCodecPoolAndGzipDecompressor() {
// BuiltInZlibInflater should not be used as the GzipCodec decompressor.
// Assert that this is the case.
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean("hadoop.native.lib", false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// This should give us a BuiltInZlibInflater.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
// its createOutputStream() just wraps the existing stream in a
// java.util.zip.GZIPOutputStream.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(new Path("foo.gz"));
assertTrue("Codec for .gz file is not GzipCodec",
codec instanceof GzipCodec);
// make sure we don't get a null decompressor
Decompressor codecDecompressor = codec.createDecompressor();
if (null == codecDecompressor) {
fail("Got null codecDecompressor");
}
// Asking the CodecPool for a decompressor for GzipCodec
// should not return null
Decompressor poolDecompressor = CodecPool.getDecompressor(codec);
if (null == poolDecompressor) {
fail("Got null poolDecompressor");
}
// return a couple decompressors
CodecPool.returnDecompressor(zlibDecompressor);
CodecPool.returnDecompressor(poolDecompressor);
Decompressor poolDecompressor2 = CodecPool.getDecompressor(codec);
if (poolDecompressor.getClass() == BuiltInGzipDecompressor.class) {
if (poolDecompressor == poolDecompressor2) {
fail("Reused java gzip decompressor in pool");
}
} else {
if (poolDecompressor != poolDecompressor2) {
fail("Did not reuse native gzip decompressor in pool");
}
}
}
@Test
public void testGzipCodecRead() throws IOException {
// Create a gzipped file and try to read it back, using a decompressor
// from the CodecPool.
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean("hadoop.native.lib", false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// Ensure that the CodecPool has a BuiltInZlibInflater in it.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
CodecPool.returnDecompressor(zlibDecompressor);
// Now create a GZip text file.
String tmpDir = System.getProperty("test.build.data", "/tmp/");
Path f = new Path(new Path(tmpDir), "testGzipCodecRead.txt.gz");
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
new GZIPOutputStream(new FileOutputStream(f.toString()))));
final String msg = "This is the message in the file!";
bw.write(msg);
bw.close();
// Now read it back, using the CodecPool to establish the
// decompressor to use.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(f);
Decompressor decompressor = CodecPool.getDecompressor(codec);
FileSystem fs = FileSystem.getLocal(conf);
InputStream is = fs.open(f);
is = codec.createInputStream(is, decompressor);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
String line = br.readLine();
assertEquals("Didn't get the same message back!", msg, line);
br.close();
}
private void verifyGzipFile(String filename, String msg) throws IOException {
BufferedReader r = new BufferedReader(new InputStreamReader(
new GZIPInputStream(new FileInputStream(filename))));
try {
String line = r.readLine();
assertEquals("Got invalid line back from " + filename, msg, line);
} finally {
r.close();
new File(filename).delete();
}
}
@Test
public void testGzipCodecWrite() throws IOException {
// Create a gzipped file using a compressor from the CodecPool,
// and try to read it back via the regular GZIPInputStream.
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean("hadoop.native.lib", false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// Ensure that the CodecPool has a BuiltInZlibDeflater in it.
Compressor zlibCompressor = ZlibFactory.getZlibCompressor(conf);
assertNotNull("zlibCompressor is null!", zlibCompressor);
assertTrue("ZlibFactory returned unexpected deflator",
zlibCompressor instanceof BuiltInZlibDeflater);
CodecPool.returnCompressor(zlibCompressor);
// Create a GZIP text file via the Compressor interface.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(new Path("foo.gz"));
assertTrue("Codec for .gz file is not GzipCodec", codec instanceof GzipCodec);
final String msg = "This is the message we are going to compress.";
final String tmpDir = System.getProperty("test.build.data", "/tmp/");
final String fileName = new Path(new Path(tmpDir),
"testGzipCodecWrite.txt.gz").toString();
BufferedWriter w = null;
Compressor gzipCompressor = CodecPool.getCompressor(codec);
if (null != gzipCompressor) {
// If it gives us back a Compressor, we should be able to use this
// to write files we can then read back with Java's gzip tools.
OutputStream os = new CompressorStream(new FileOutputStream(fileName),
gzipCompressor);
w = new BufferedWriter(new OutputStreamWriter(os));
w.write(msg);
w.close();
CodecPool.returnCompressor(gzipCompressor);
verifyGzipFile(fileName, msg);
}
// Create a gzip text file via codec.getOutputStream().
w = new BufferedWriter(new OutputStreamWriter(
codec.createOutputStream(new FileOutputStream(fileName))));
w.write(msg);
w.close();
verifyGzipFile(fileName, msg);
}
/**
* Regression test for HADOOP-8423: seeking in a block-compressed
* stream would not properly reset the block decompressor state.
*/
@Test
public void testSnappyMapFile() throws Exception {
if (SnappyCodec.isNativeSnappyLoaded(conf)) {
codecTestMapFile(SnappyCodec.class, CompressionType.BLOCK, 100);
} else {
System.err.println(
"Could not find the snappy codec to test MapFiles with!");
}
}
private void codecTestMapFile(Class<? extends CompressionCodec> clazz,
CompressionType type, int records) throws Exception {
FileSystem fs = FileSystem.get(conf);
LOG.info("Creating MapFiles with " + records +
" records using codec " + clazz.getSimpleName());
Path path = new Path(new Path(
System.getProperty("test.build.data", "/tmp")),
clazz.getSimpleName() + "-" + type + "-" + records);
LOG.info("Writing " + path);
createMapFile(conf, fs, path, clazz.newInstance(), type, records);
MapFile.Reader reader = new MapFile.Reader(fs, path.toString(), conf);
Text key1 = new Text("002");
assertNotNull(reader.get(key1, new Text()));
Text key2 = new Text("004");
assertNotNull(reader.get(key2, new Text()));
}
private static void createMapFile(Configuration conf, FileSystem fs,
Path path, CompressionCodec codec, CompressionType type, int records)
throws IOException {
MapFile.Writer writer =
new MapFile.Writer(
conf,
fs,
path.toString(),
Text.class,
Text.class,
type,
codec,
null);
Text key = new Text();
for (int j = 0; j < records; j++) {
key.set(String.format("%03d", j));
writer.append(key, key);
}
writer.close();
}
@Test
public void testGzipLongOverflow() throws IOException {
LOG.info("testGzipLongOverflow");
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean("io.native.lib.available", false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// Ensure that the CodecPool has a BuiltInZlibInflater in it.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
CodecPool.returnDecompressor(zlibDecompressor);
// Now create a GZip text file.
String tmpDir = System.getProperty("test.build.data", "/tmp/");
Path f = new Path(new Path(tmpDir), "testGzipLongOverflow.bin.gz");
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
new GZIPOutputStream(new FileOutputStream(f.toString()))));
final int NBUF = 1024 * 4 + 1;
final char[] buf = new char[1024 * 1024];
for (int i = 0; i < buf.length; i++) buf[i] = '\0';
for (int i = 0; i < NBUF; i++) {
bw.write(buf);
}
bw.close();
// Now read it back, using the CodecPool to establish the
// decompressor to use.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(f);
Decompressor decompressor = CodecPool.getDecompressor(codec);
FileSystem fs = FileSystem.getLocal(conf);
InputStream is = fs.open(f);
is = codec.createInputStream(is, decompressor);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
for (int j = 0; j < NBUF; j++) {
int n = br.read(buf);
assertEquals("got wrong read length!", n, buf.length);
for (int i = 0; i < buf.length; i++)
assertEquals("got wrong byte!", buf[i], '\0');
}
br.close();
}
}
| |
// This file was generated by Mendix Modeler.
//
// WARNING: Code you write here will be lost the next time you deploy the project.
package system.proxies;
public class SoapFault extends system.proxies.Error
{
/**
* Internal name of this entity
*/
public static final java.lang.String entityName = "System.SoapFault";
/**
* Enum describing members of this entity
*/
public enum MemberNames
{
Code("Code"),
Reason("Reason"),
Node("Node"),
Role("Role"),
Detail("Detail"),
ErrorType("ErrorType"),
Message("Message"),
Stacktrace("Stacktrace");
private java.lang.String metaName;
MemberNames(java.lang.String s)
{
metaName = s;
}
@Override
public java.lang.String toString()
{
return metaName;
}
}
public SoapFault(com.mendix.systemwideinterfaces.core.IContext context)
{
this(context, com.mendix.core.Core.instantiate(context, "System.SoapFault"));
}
protected SoapFault(com.mendix.systemwideinterfaces.core.IContext context, com.mendix.systemwideinterfaces.core.IMendixObject soapFaultMendixObject)
{
super(context, soapFaultMendixObject);
if (!com.mendix.core.Core.isSubClassOf("System.SoapFault", soapFaultMendixObject.getType()))
throw new java.lang.IllegalArgumentException("The given object is not a System.SoapFault");
}
/**
* @deprecated Use 'SoapFault.load(IContext, IMendixIdentifier)' instead.
*/
@Deprecated
public static system.proxies.SoapFault initialize(com.mendix.systemwideinterfaces.core.IContext context, com.mendix.systemwideinterfaces.core.IMendixIdentifier mendixIdentifier) throws com.mendix.core.CoreException
{
return system.proxies.SoapFault.load(context, mendixIdentifier);
}
/**
* Initialize a proxy using context (recommended). This context will be used for security checking when the get- and set-methods without context parameters are called.
* The get- and set-methods with context parameter should be used when for instance sudo access is necessary (IContext.getSudoContext() can be used to obtain sudo access).
*/
public static system.proxies.SoapFault initialize(com.mendix.systemwideinterfaces.core.IContext context, com.mendix.systemwideinterfaces.core.IMendixObject mendixObject)
{
return new system.proxies.SoapFault(context, mendixObject);
}
public static system.proxies.SoapFault load(com.mendix.systemwideinterfaces.core.IContext context, com.mendix.systemwideinterfaces.core.IMendixIdentifier mendixIdentifier) throws com.mendix.core.CoreException
{
com.mendix.systemwideinterfaces.core.IMendixObject mendixObject = com.mendix.core.Core.retrieveId(context, mendixIdentifier);
return system.proxies.SoapFault.initialize(context, mendixObject);
}
/**
* @return value of Code
*/
public final java.lang.String getCode()
{
return getCode(getContext());
}
/**
* @param context
* @return value of Code
*/
public final java.lang.String getCode(com.mendix.systemwideinterfaces.core.IContext context)
{
return (java.lang.String) getMendixObject().getValue(context, MemberNames.Code.toString());
}
/**
* Set value of Code
* @param code
*/
public final void setCode(java.lang.String code)
{
setCode(getContext(), code);
}
/**
* Set value of Code
* @param context
* @param code
*/
public final void setCode(com.mendix.systemwideinterfaces.core.IContext context, java.lang.String code)
{
getMendixObject().setValue(context, MemberNames.Code.toString(), code);
}
/**
* @return value of Reason
*/
public final java.lang.String getReason()
{
return getReason(getContext());
}
/**
* @param context
* @return value of Reason
*/
public final java.lang.String getReason(com.mendix.systemwideinterfaces.core.IContext context)
{
return (java.lang.String) getMendixObject().getValue(context, MemberNames.Reason.toString());
}
/**
* Set value of Reason
* @param reason
*/
public final void setReason(java.lang.String reason)
{
setReason(getContext(), reason);
}
/**
* Set value of Reason
* @param context
* @param reason
*/
public final void setReason(com.mendix.systemwideinterfaces.core.IContext context, java.lang.String reason)
{
getMendixObject().setValue(context, MemberNames.Reason.toString(), reason);
}
/**
* @return value of Node
*/
public final java.lang.String getNode()
{
return getNode(getContext());
}
/**
* @param context
* @return value of Node
*/
public final java.lang.String getNode(com.mendix.systemwideinterfaces.core.IContext context)
{
return (java.lang.String) getMendixObject().getValue(context, MemberNames.Node.toString());
}
/**
* Set value of Node
* @param node
*/
public final void setNode(java.lang.String node)
{
setNode(getContext(), node);
}
/**
* Set value of Node
* @param context
* @param node
*/
public final void setNode(com.mendix.systemwideinterfaces.core.IContext context, java.lang.String node)
{
getMendixObject().setValue(context, MemberNames.Node.toString(), node);
}
/**
* @return value of Role
*/
public final java.lang.String getRole()
{
return getRole(getContext());
}
/**
* @param context
* @return value of Role
*/
public final java.lang.String getRole(com.mendix.systemwideinterfaces.core.IContext context)
{
return (java.lang.String) getMendixObject().getValue(context, MemberNames.Role.toString());
}
/**
* Set value of Role
* @param role
*/
public final void setRole(java.lang.String role)
{
setRole(getContext(), role);
}
/**
* Set value of Role
* @param context
* @param role
*/
public final void setRole(com.mendix.systemwideinterfaces.core.IContext context, java.lang.String role)
{
getMendixObject().setValue(context, MemberNames.Role.toString(), role);
}
/**
* @return value of Detail
*/
public final java.lang.String getDetail()
{
return getDetail(getContext());
}
/**
* @param context
* @return value of Detail
*/
public final java.lang.String getDetail(com.mendix.systemwideinterfaces.core.IContext context)
{
return (java.lang.String) getMendixObject().getValue(context, MemberNames.Detail.toString());
}
/**
* Set value of Detail
* @param detail
*/
public final void setDetail(java.lang.String detail)
{
setDetail(getContext(), detail);
}
/**
* Set value of Detail
* @param context
* @param detail
*/
public final void setDetail(com.mendix.systemwideinterfaces.core.IContext context, java.lang.String detail)
{
getMendixObject().setValue(context, MemberNames.Detail.toString(), detail);
}
@Override
public boolean equals(Object obj)
{
if (obj == this)
return true;
if (obj != null && getClass().equals(obj.getClass()))
{
final system.proxies.SoapFault that = (system.proxies.SoapFault) obj;
return getMendixObject().equals(that.getMendixObject());
}
return false;
}
@Override
public int hashCode()
{
return getMendixObject().hashCode();
}
/**
* @return String name of this class
*/
public static java.lang.String getType()
{
return "System.SoapFault";
}
/**
* @return String GUID from this object, format: ID_0000000000
* @deprecated Use getMendixObject().getId().toLong() to get a unique identifier for this object.
*/
@Override
@Deprecated
public java.lang.String getGUID()
{
return "ID_" + getMendixObject().getId().toLong();
}
}
| |
package org.springframework.cloud.deployer.spi.nomad;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.validation.constraints.NotNull;
import org.springframework.boot.context.embedded.EmbeddedServletContainer;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.cloud.deployer.spi.nomad.docker.EntryPointStyle;
/**
* @author Donovan Muller
*/
@ConfigurationProperties(prefix = "spring.cloud.deployer.nomad")
public class NomadDeployerProperties {
/**
* Configuration properties for {@link io.github.zanella.nomad.v1.nodes.models.Resources}. See
* https://www.nomadproject.io/docs/job-specification/resources.html
*/
public static class Resources {
/**
* The <a href="https://www.nomadproject.io/docs/jobspec/json.html#CPU">CPU</a> required in
* MHz. Default is 1000MHz.
*/
private String cpu = "1000";
/**
* The <a href="https://www.nomadproject.io/docs/jobspec/json.html#MemoryMB">memory</a>
* required in MB. Default is 512MB.
*
* N.B: This should change to a much lower value once we can set -Xmx via JAVA_OPTS.
*
* See https://github.com/spring-cloud/spring-cloud-stream-app-maven-plugin/issues/10 for
* support for JAVA_OPTS on the starter apps.
*/
private String memory = "512";
/**
* The number of
* <a href="https://www.nomadproject.io/docs/jobspec/json.html#MBits">MBits</a> in bandwidth
* required.
*/
private Integer networkMBits = 10;
public Resources() {
}
public Resources(String cpu, String memory, Integer networkMBits) {
this.cpu = cpu;
this.memory = memory;
this.networkMBits = networkMBits;
}
public String getCpu() {
return cpu;
}
public void setCpu(String cpu) {
this.cpu = cpu;
}
public String getMemory() {
return memory;
}
public void setMemory(String memory) {
this.memory = memory;
}
public Integer getNetworkMBits() {
return networkMBits;
}
public void setNetworkMBits(final Integer networkMBits) {
this.networkMBits = networkMBits;
}
}
/**
* Configuration property for {@link EphemeralDisk}. See
* https://www.nomadproject.io/docs/job-specification/ephemeral_disk.html
*/
public static class EphemeralDisk {
private Boolean sticky = true;
private Boolean migrate = true;
private Integer size = 300;
public EphemeralDisk() {
}
public EphemeralDisk(Boolean sticky, Boolean migrate, Integer size) {
this.sticky = sticky;
this.migrate = migrate;
this.size = size;
}
public Boolean getSticky() {
return sticky;
}
public void setSticky(Boolean sticky) {
this.sticky = sticky;
}
public Boolean getMigrate() {
return migrate;
}
public void setMigrate(Boolean migrate) {
this.migrate = migrate;
}
public Integer getSize() {
return size;
}
public void setSize(Integer size) {
this.size = size;
}
}
/**
* The hostname/IP address where a Nomad client is listening. Default is localhost.
*/
private String nomadHost = "localhost";
/**
* The port where a Nomad client is listening. Default is 4646.
*/
private int nomadPort = 4646;
/**
* The region to deploy apps into. Default to <code>global</code>. See
* https://www.nomadproject.io/docs/jobspec/json.html#Region
*/
private String region = "global";
/**
* A list of datacenters that should be targeted for deployment. Default value is dc1. See
* https://www.nomadproject.io/docs/jobspec/json.html#Datacenters
*/
private List<String> datacenters = Stream.of("dc1").collect(Collectors.toList());
/**
* The default job priority. Default value is 50. See
* https://www.nomadproject.io/docs/jobspec/json.html#Priority
*/
private Integer priority = 50;
/**
* Common environment variables to set for any deployed app.
*/
private String[] environmentVariables = new String[] {};
/**
* Flag to indicate whether an app should be exposed via
* <a href="https://github.com/eBay/fabio">Fabio</a>
*/
private boolean exposeViaFabio;
/**
* The <a href="https://www.nomadproject.io/docs/jobspec/json.html#Path">path</a> of the http
* endpoint which Consul will query to query the health
*/
private String checkHttpPath = "/health";
/**
* This indicates the frequency of the health checks that Consul will perform. Specified in
* <b>milliseconds</b>. See https://www.nomadproject.io/docs/jobspec/json.html#Interval
*/
private Long checkInterval = 30000L;
/**
* This indicates how long Consul will wait for a health check query to succeed. Specified in
* <b>milliseconds</b>. See https://www.nomadproject.io/docs/jobspec/json.html#Timeout
*/
private Long checkTimeout = 120000L;
private Resources resources = new Resources();
/**
* The <a href="https://www.nomadproject.io/docs/jobspec/json.html#DiskMB">disk</a> required in
* MB. Default is 200MB.
*/
private EphemeralDisk ephemeralDisk = new EphemeralDisk();
/**
* The <a href="https://www.nomadproject.io/docs/jobspec/json.html#MaxFiles">maximum number of
* rotated files</a> Nomad will retain. The default is 1 log file retention size.
*
*/
private Integer loggingMaxFiles = 1;
/**
* The <a href="https://www.nomadproject.io/docs/jobspec/json.html#MaxFileSizeMB">size</a> of
* each rotated file. The size is specified in MB. The default is 10MB max log file size.
*/
private Integer loggingMaxFileSize = 10;
/**
* A duration to wait before restarting a task. See
* https://www.nomadproject.io/docs/jobspec/json.html#Delay. Specified in <b>milliseconds</b>.
* Default is 30000 milliseconds (30 seconds).
*/
private Long restartPolicyDelay = 30000L;
/**
* The Interval begins when the first task starts and ensures that only X number of attempts
* number of restarts happens within it. See
* https://www.nomadproject.io/docs/jobspec/json.html#Interval. Specified in
* <b>milliseconds</b>. Default is 120000 milliseconds (120 seconds / 3 minutes).
*/
private Long restartPolicyInterval = 300000L;
/**
* Attempts is the number of restarts allowed in an Interval. See
* https://www.nomadproject.io/docs/jobspec/json.html#Attempts. Default is 3 attempts within 3
* minutes (see {@link NomadDeployerProperties#restartPolicyInterval)
*/
private Integer restartPolicyAttempts = 3;
/**
* Mode is given as a string and controls the behavior when the task fails more than Attempts
* times in an Interval. See https://www.nomadproject.io/docs/jobspec/json.html#Mode. Default
* value is <a href="https://www.nomadproject.io/docs/jobspec/json.html#delay">"delay"</a>.
* Possible values are:
*
* <ul>
* <li>delay</li>
* <li>fail (default)</li>
* </ul>
*/
private String restartPolicyMode = "fail";
/**
* Entry point style used for the Docker image. To be used to determine how to pass in
* properties.
*/
private EntryPointStyle entryPointStyle = EntryPointStyle.exec;
/**
* A comma separated list of host_path:container_path values. See
* https://www.nomadproject.io/docs/drivers/docker.html#volumes.
*
* E.g.
*
* <code>spring.cloud.deployer.nomad=/opt/data:/data,/opt/config:/config</code>
*/
private List<String> volumes = new ArrayList<>();
/**
* The destination (path) where artifacts will be downloaded by default. Only applicable to the
* Maven resource deployer implementation. Default value is <code>local</code>. See
* https://www.nomadproject.io/docs/job-specification/artifact.html#destination
*/
private String artifactDestination = "local";
/**
* A comma separated list of default Java options to pass to the JVM. Only applicable to the
* Maven resource deployer implementation. See
* http://docs.spring.io/spring-cloud-dataflow/docs/current/reference/htmlsingle/index.html#getting-started-application-configuration
* for reference.
*/
private String javaOpts;
/**
* The URI scheme that the deployer server is running on. When deploying Maven resource based
* apps the artifact source URL includes the servers host and port. This property value is used
* when constructing the source URL. Only applicable to the Maven resource deployer
* implementation. See https://www.nomadproject.io/docs/job-specification/artifact.html#source
*/
private String deployerScheme = "http";
/**
* The resolvable hostname of IP address that the deployer server is running on. When deploying
* Maven resource based apps the artifact source URL includes the servers host and port. This
* property value is used when constructing the source URL. Only applicable to the Maven
* resource deployer implementation. See
* https://www.nomadproject.io/docs/job-specification/artifact.html#source
*/
@NotNull(message = "Please configure the resolvable hostname or IP address that this server is running on. E.g. spring.cloud.deployer.nomad.deployerHost=192.168.1.10")
private String deployerHost;
/**
* The port that the deployer server is listening on. When deploying Maven resource based apps
* the artifact source URL includes the servers host and port. This property value is used when
* constructing the source URL. Only applicable to the Maven resource deployer implementation.
* See https://www.nomadproject.io/docs/job-specification/artifact.html#source
* <p>
* <b>If this property is not set then the port from {@link EmbeddedServletContainer#getPort()}
* will be used</b>
*/
private Integer deployerPort;
/**
* If basic authentication is required.
* See https://github.com/hashicorp/go-getter#http-http
*/
private String deployerUsername;
/**
* If basic authentication is required.
* See https://github.com/hashicorp/go-getter#http-http
*/
private String deployerPassword;
/**
* If set, the allocated node must support at least this version of a Java runtime environment.
* E.g. '1.8' for a minimum of a Java 8 JRE/JDK. See
* https://www.nomadproject.io/docs/drivers/java.html#driver_java_version. Only applicable to
* the Maven resource deployer implementation.
*/
private String minimumJavaVersion;
/**
* See {@link org.springframework.cloud.deployer.spi.nomad.NomadAutoConfiguration.RuntimeConfiguration}
*/
private String runtimePlatformVersion;
public String getNomadHost() {
return nomadHost;
}
public void setNomadHost(String nomadHost) {
this.nomadHost = nomadHost;
}
public int getNomadPort() {
return nomadPort;
}
public void setNomadPort(int nomadPort) {
this.nomadPort = nomadPort;
}
public String getRegion() {
return region;
}
public void setRegion(String region) {
this.region = region;
}
public List<String> getDatacenters() {
return datacenters;
}
public void setDatacenters(List<String> datacenters) {
this.datacenters = datacenters;
}
public Integer getPriority() {
return priority;
}
public void setPriority(Integer priority) {
this.priority = priority;
}
public String[] getEnvironmentVariables() {
return environmentVariables;
}
public void setEnvironmentVariables(String[] environmentVariables) {
this.environmentVariables = environmentVariables;
}
public boolean isExposeViaFabio() {
return exposeViaFabio;
}
public void setExposeViaFabio(boolean exposeViaFabio) {
this.exposeViaFabio = exposeViaFabio;
}
public String getCheckHttpPath() {
return checkHttpPath;
}
public void setCheckHttpPath(String checkHttpPath) {
this.checkHttpPath = checkHttpPath;
}
public Resources getResources() {
return resources;
}
public void setResources(final Resources resources) {
this.resources = resources;
}
public EphemeralDisk getEphemeralDisk() {
return ephemeralDisk;
}
public void setEphemeralDisk(final EphemeralDisk ephemeralDisk) {
this.ephemeralDisk = ephemeralDisk;
}
public Integer getLoggingMaxFiles() {
return loggingMaxFiles;
}
public void setLoggingMaxFiles(Integer loggingMaxFiles) {
this.loggingMaxFiles = loggingMaxFiles;
}
public Integer getLoggingMaxFileSize() {
return loggingMaxFileSize;
}
public void setLoggingMaxFileSize(Integer loggingMaxFileSize) {
this.loggingMaxFileSize = loggingMaxFileSize;
}
public Long getCheckInterval() {
return checkInterval;
}
public void setCheckInterval(Long checkInterval) {
this.checkInterval = checkInterval;
}
public Long getCheckTimeout() {
return checkTimeout;
}
public void setCheckTimeout(Long checkTimeout) {
this.checkTimeout = checkTimeout;
}
public Long getRestartPolicyDelay() {
return restartPolicyDelay;
}
public void setRestartPolicyDelay(Long restartPolicyDelay) {
this.restartPolicyDelay = restartPolicyDelay;
}
public Long getRestartPolicyInterval() {
return restartPolicyInterval;
}
public void setRestartPolicyInterval(Long restartPolicyInterval) {
this.restartPolicyInterval = restartPolicyInterval;
}
public Integer getRestartPolicyAttempts() {
return restartPolicyAttempts;
}
public void setRestartPolicyAttempts(Integer restartPolicyAttempts) {
this.restartPolicyAttempts = restartPolicyAttempts;
}
public String getRestartPolicyMode() {
return restartPolicyMode;
}
public void setRestartPolicyMode(String restartPolicyMode) {
this.restartPolicyMode = restartPolicyMode;
}
public EntryPointStyle getEntryPointStyle() {
return entryPointStyle;
}
public void setEntryPointStyle(EntryPointStyle entryPointStyle) {
this.entryPointStyle = entryPointStyle;
}
public List<String> getVolumes() {
return volumes;
}
public void setVolumes(List<String> volumes) {
this.volumes = volumes;
}
public String getArtifactDestination() {
return artifactDestination;
}
public void setArtifactDestination(final String artifactDestination) {
this.artifactDestination = artifactDestination;
}
public String getJavaOpts() {
return javaOpts;
}
public void setJavaOpts(final String javaOpts) {
this.javaOpts = javaOpts;
}
public String getDeployerScheme() {
return deployerScheme;
}
public void setDeployerScheme(final String deployerScheme) {
this.deployerScheme = deployerScheme;
}
public String getDeployerHost() {
return deployerHost;
}
public void setDeployerHost(final String deployerHost) {
this.deployerHost = deployerHost;
}
public Integer getDeployerPort() {
return deployerPort;
}
public void setDeployerPort(final Integer deployerPort) {
this.deployerPort = deployerPort;
}
public String getDeployerUsername() {
return deployerUsername;
}
public void setDeployerUsername(String deployerUsername) {
this.deployerUsername = deployerUsername;
}
public String getDeployerPassword() {
return deployerPassword;
}
public void setDeployerPassword(String deployerPassword) {
this.deployerPassword = deployerPassword;
}
public String getMinimumJavaVersion() {
return minimumJavaVersion;
}
public void setMinimumJavaVersion(final String minimumJavaVersion) {
this.minimumJavaVersion = minimumJavaVersion;
}
public void setRuntimePlatformVersion(String runtimePlatformVersion) {
this.runtimePlatformVersion = runtimePlatformVersion;
}
public String getRuntimePlatformVersion() {
return runtimePlatformVersion;
}
}
| |
package in.nerd_is.hitokoto.provider;
import android.content.ContentProvider;
import android.content.ContentUris;
import android.content.ContentValues;
import android.content.Context;
import android.content.UriMatcher;
import android.database.Cursor;
import android.database.SQLException;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.net.Uri;
import android.text.TextUtils;
import android.util.Log;
/**
* Created by Zheng Xuqiang on 2014/8/20 0020.
*/
public class HitokotoProvider extends ContentProvider {
private static final String TAG = "HitokotoProvider";
private static final int MATCH_ALL = 1;
private static final int MATCH_ID = 2;
private static final int MATCH_CAT = 3;
private static final UriMatcher sUriMatcher;
static {
sUriMatcher = new UriMatcher(UriMatcher.NO_MATCH);
sUriMatcher.addURI(HitokotoProviderMetaData.AUTHORITY, "hitokoto", MATCH_ALL);
sUriMatcher.addURI(HitokotoProviderMetaData.AUTHORITY, "hitokoto/#", MATCH_ID);
sUriMatcher.addURI(HitokotoProviderMetaData.AUTHORITY, "hitokoto/*", MATCH_CAT);
}
private HitokotoDatabaseHelper mDbHelper;
@Override
public boolean onCreate() {
mDbHelper = new HitokotoDatabaseHelper(getContext());
return true;
}
@Override
public Cursor query(Uri uri, String[] projection, String selection, String[] selectionArgs, String sortOrder) {
SQLiteDatabase db = mDbHelper.getReadableDatabase();
Cursor cursor;
String sort;
if (TextUtils.isEmpty(sortOrder)) {
sort = HitokotoProviderMetaData.HitokotoTable.DEFAULT_SORT_ORDER;
} else {
sort = sortOrder;
}
switch (sUriMatcher.match(uri)) {
case MATCH_ALL:
cursor = db.query(HitokotoProviderMetaData.HitokotoTable.TABLE_NAME,
projection, null, null, null, null, sort);
break;
case MATCH_ID:
long id = ContentUris.parseId(uri);
Log.d(TAG, "query MATCH_ID; id: " + id);
cursor = db.query(HitokotoProviderMetaData.HitokotoTable.TABLE_NAME,
projection,
HitokotoProviderMetaData.HitokotoTable._ID + " = " + id,
null, null, null, sort);
break;
case MATCH_CAT:
String select = HitokotoProviderMetaData.HitokotoTable.CATEGORY
+ " = '" + uri.getPathSegments().get(1) + "'";
Log.d(TAG, "query MATCH; select: " + select);
cursor = db.query(HitokotoProviderMetaData.HitokotoTable.TABLE_NAME,
projection, select, null, null, null, sort);
break;
default:
throw new IllegalArgumentException("Unknown URI" + uri);
}
cursor.setNotificationUri(getContext().getContentResolver(), uri);
return cursor;
}
@Override
public String getType(Uri uri) {
switch (sUriMatcher.match(uri)) {
case MATCH_ALL:
case MATCH_CAT:
return HitokotoProviderMetaData.HitokotoTable.TYPE_CONTENT;
case MATCH_ID:
return HitokotoProviderMetaData.HitokotoTable.TYPE_CONTENT_ITEM;
default:
throw new IllegalArgumentException("Unknown URI" + uri);
}
}
@Override
public Uri insert(Uri uri, ContentValues values) {
if (sUriMatcher.match(uri) != MATCH_ALL) {
throw new IllegalArgumentException("Unknown URI " + uri);
}
ContentValues contentValues;
if (values != null) {
contentValues = new ContentValues(values);
}
else {
contentValues = new ContentValues();
}
contentValues.put(HitokotoProviderMetaData.HitokotoTable.MODIFIED, System.currentTimeMillis());
SQLiteDatabase db = mDbHelper.getWritableDatabase();
long rowId = db.insert(HitokotoProviderMetaData.HitokotoTable.TABLE_NAME,
null, contentValues);
if (rowId > 0) {
Uri result = ContentUris.withAppendedId(
HitokotoProviderMetaData.HitokotoTable.URI_CONTENT,
contentValues.getAsLong(HitokotoProviderMetaData.HitokotoTable._ID));
getContext().getContentResolver().notifyChange(result, null);
return result;
}
throw new SQLException("Failed to insert row into " + uri);
}
@Override
public int delete(Uri uri, String selection, String[] selectionArgs) {
int count;
String select;
SQLiteDatabase db = mDbHelper.getWritableDatabase();
switch (sUriMatcher.match(uri)) {
case MATCH_ID:
select = HitokotoProviderMetaData.HitokotoTable._ID + " = "
+ uri.getPathSegments().get(1);
count = db.delete(HitokotoProviderMetaData.HitokotoTable.TABLE_NAME,
select, selectionArgs);
break;
case MATCH_CAT:
select = HitokotoProviderMetaData.HitokotoTable.CATEGORY + " = "
+ uri.getPathSegments().get(1);
if (!TextUtils.isEmpty(selection)) {
select = selection + " AND (" + selection + " )";
}
count = db.delete(HitokotoProviderMetaData.HitokotoTable.TABLE_NAME,
select, selectionArgs);
break;
case MATCH_ALL:
if (!TextUtils.isEmpty(selection)) {
select = selection;
}
else {
select = null;
}
count = db.delete((HitokotoProviderMetaData.HitokotoTable.TABLE_NAME),
select, selectionArgs);
break;
default:
throw new IllegalArgumentException("Unkown URI " + uri);
} // end switch
getContext().getContentResolver().notifyChange(uri, null);
return count;
}
@Override
public int update(Uri uri, ContentValues values, String selection, String[] selectionArgs) {
int count;
String select;
SQLiteDatabase db = mDbHelper.getWritableDatabase();
switch (sUriMatcher.match(uri)) {
case MATCH_ID:
select = HitokotoProviderMetaData.HitokotoTable._ID + " = "
+ uri.getPathSegments().get(1);
count = db.update(HitokotoProviderMetaData.HitokotoTable.TABLE_NAME,
values, select, selectionArgs);
break;
case MATCH_CAT:
select = HitokotoProviderMetaData.HitokotoTable.CATEGORY + " = "
+ uri.getPathSegments().get(1);
if (!TextUtils.isEmpty(selection)) {
select = selection + " AND (" + selection + " )";
}
count = db.update(HitokotoProviderMetaData.HitokotoTable.TABLE_NAME,
values, select, selectionArgs);
break;
case MATCH_ALL:
if (!TextUtils.isEmpty(selection)) {
select = selection;
}
else {
select = null;
}
count = db.update(HitokotoProviderMetaData.HitokotoTable.TABLE_NAME,
values, select, selectionArgs);
break;
default:
throw new IllegalArgumentException("Unkown URI " + uri);
}
getContext().getContentResolver().notifyChange(uri, null);
return count;
}
private static class HitokotoDatabaseHelper extends SQLiteOpenHelper {
public HitokotoDatabaseHelper(Context context) {
super(context, HitokotoProviderMetaData.DATABASE_NAME, null,
HitokotoProviderMetaData.DATABASE_VERSION);
}
@Override
public void onCreate(SQLiteDatabase db) {
db.execSQL(
"CREATE TABLE " + HitokotoProviderMetaData.HitokotoTable.TABLE_NAME + "("
+ HitokotoProviderMetaData.HitokotoTable._ID + " INTEGER PRIMARY KEY, "
+ HitokotoProviderMetaData.HitokotoTable.HITOKOTO + " TEXT, "
+ HitokotoProviderMetaData.HitokotoTable.AUTHOR + " TEXT, "
+ HitokotoProviderMetaData.HitokotoTable.CATEGORY + " TEXT, "
+ HitokotoProviderMetaData.HitokotoTable.CATEGORY_NAME + " TEXT, "
+ HitokotoProviderMetaData.HitokotoTable.SOURCE + " TEXT, "
+ HitokotoProviderMetaData.HitokotoTable.DATE + " INTEGER, "
+ HitokotoProviderMetaData.HitokotoTable.LIKE + " INTEGER, "
+ HitokotoProviderMetaData.HitokotoTable.MODIFIED + " INTEGER"
+ ");"
);
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
db.execSQL("DROP TABLE IF EXISTS " + HitokotoProviderMetaData.HitokotoTable.TABLE_NAME);
onCreate(db);
}
}
}
| |
/*
* Copyright 2008 The MITRE Corporation (http://www.mitre.org/). All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mitre.mrald.taglib;
import java.io.IOException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Types;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Iterator;
import javax.servlet.ServletRequest;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.jsp.JspException;
import javax.servlet.jsp.tagext.BodyTagSupport;
import org.mitre.mrald.control.MsgObject;
import org.mitre.mrald.util.*;
/**
* Description of the Class
*
*@author jchoyt
*@created October 9, 2002
*/
public class ListUpdateValuesTag extends BodyTagSupport
{
/**
* Description of the Field
*/
protected ResultSet rs;
/**
* Description of the Field
*/
protected String schema;
/**
* Description of the Field
*/
protected String action;
/**
* Constructor for the AllTablesListTag object
*/
public ListUpdateValuesTag() { }
/**
* Description of the Method
*
*@return Description of the Return Value
*@exception JspException Description of the Exception
*/
public int doStartTag()
throws JspException
{
try
{
//MraldOutFile.logToFile(Config.getProperty("LOGFILE"), "List Update values; doStartTag: Start " );
ServletRequest request = pageContext.getRequest();
String tableName = request.getParameter( "tableName" );
String datasource = request.getParameter( "datasource" );
if (datasource.equals(""))
datasource= "main";
DBMetaData md = MetaData.getDbMetaData( datasource );
String tempTableName = tableName;
TableMetaData tableInfo = md.getTableMetaDataNoCase(tempTableName);
if (tableInfo == null)
{
// MraldOutFile.logToFile( "List Update values; doStartTag: Table: " + tableName + " not found in Database. " );
throw new JspException("Table: " + tableName + " not found in Database. ");
}
Collection<String> primaryKeys = tableInfo.getPrimaryKeys();
//MraldOutFile.logToFile(Config.getProperty("LOGFILE"), "List Update values; doStartTag: Got tableMetaData " + tableInfo );
//add quotes around tablename if it has spaces
String queryTableName = new String( tableName );
if ( queryTableName.indexOf( ' ' ) != -1 )
{
queryTableName = "\"" + queryTableName + "\"";
}
int i = 1;
//List of columns that are linked to Fk
//fks is the column in the current table that contains a link to a fk
String[] fks = request.getParameterValues( "fKey" + i );
// MraldOutFile.logToFile("ListUpdateValuesTag : doStartTag: fkLinks: " + fks );
HashMap<String, String[]> linkInfo = new HashMap<String, String[]>();
while ( i < 100 )
{
if ( ( fks == null ) || ( fks.length == 0 ) )
{
i++;
fks = request.getParameterValues( "fKey" + i );
continue;
}
String filterTable = request.getParameter( "fKeyFilterTable" + i );
String filterColumn= request.getParameter( "fKeyFilterColumn" + i );
String filterValue= request.getParameter( "fKeyFilterValue" + i );
String dropDownDataSource= request.getParameter( "fKeyDataSource" + i );
if (dropDownDataSource== null)
dropDownDataSource="main";
if (filterTable == null)
{
filterTable="";
filterColumn="";
filterValue="";
}
String[] fklink = new String[]{
request.getParameter( "fKeyTable" + i ),
request.getParameter( "fKeyId" + i ),
request.getParameter( "fKeyList" + i ),
dropDownDataSource ,
filterTable, filterColumn, filterValue};
linkInfo.put( fks[0].toUpperCase(), fklink );
i++;
fks = request.getParameterValues( "fKey" + i );
}
// MraldOutFile.logToFile(Config.getProperty("LOGFILE"), "List Update values; doStartTag: Number of foreign Links: " + linkInfo.size());
//MraldOutFile.logToFile(Config.getProperty("LOGFILE"), "List Update values; doStartTag: Got all Foreign Key Data " );
Enumeration names = request.getParameterNames();
StringBuffer whereClause = new StringBuffer();
ArrayList<String> filters = new ArrayList<String>();
ArrayList<String> pks = new ArrayList<String>();
ArrayList<String> pksLowerCase = new ArrayList<String>();
String whereAppend = " ";
while ( names.hasMoreElements() )
{
String colName = names.nextElement().toString();
//MraldOutFile.logToFile(Config.getProperty("LOGFILE"), "List Update values; doStartTag: Getting data for column " + colName );
if ( colName.equals( "tableName" ) || colName.startsWith( "fKey" ) || colName.equals( "datasource" ) || colName.equals( "SuccessUrl" ))
{
continue;
}
//MraldOutFile.logToFile(Config.getProperty("LOGFILE"), "List Update values; doStartTag: Check if column " + colName + " is a primary Key " );
String newColName = colName;
boolean isPrimaryKey = tableInfo.isPrimaryKeyNoCase(newColName);
//MraldOutFile.logToFile(Config.getProperty("LOGFILE"), "List Update values; doStartTag: is column " + colName + " a primary Key? " + isPrimaryKey );
//Check To see if the column is a primary Key
pks.add(colName.toLowerCase());
//Additional check to make sure that all primary keys are
//specified at least once
if (isPrimaryKey)
{
//Make sure that each Primary Key is only added once despite case differences
if (!pksLowerCase.contains(colName.toLowerCase()))
pksLowerCase.add( colName.toLowerCase() );
}
//MraldOutFile.logToFile(Config.getProperty("LOGFILE"), "List Update values; doStartTag: About to get value for Column " + colName + " " );
String val = request.getParameter( colName );
//MraldOutFile.logToFile(Config.getProperty("LOGFILE"), "List Update values; doStartTag:Value for Column " + colName + " " +val );
//Ignore any columns listed that are not in the table
if (checkHasColumn(tableInfo, colName))
{
whereClause.append( whereAppend );
//MraldOutFile.logToFile(Config.getProperty("LOGFILE"), "List Update values; column " + colName + " found in " + tableName );
whereAppend = " AND ";
whereClause.append( colName + "='" + val + "'" );
String pkStr = "Table" + FormTags.NAMEVALUE_TOKEN_STR + tableName + "~Field" + FormTags.NAMEVALUE_TOKEN_STR + colName + "~Value" + FormTags.NAMEVALUE_TOKEN_STR + val;
filters.add( pkStr );
}
}
//Make sure that all unique parameters are specified to stop more data being returned than should be
if (pksLowerCase.size() < primaryKeys.size())
{
// MraldOutFile.logToFile(Config.getProperty("LOGFILE"), "List Update values; doStartTag: Not all Primary Key Values have been specified " );
MraldOutFile.logToFile(Config.getProperty("LOGFILE"), "List Update values; doStartTag: Should have " +primaryKeys.size() + " but actually have " + pks.size() );
throw new JspException("Not all Primary Key Values have been specified ");
}
// Remove DISTINCT as this creates problems for complex datatypes.
//String selectClause = "Select DISTINCT * from " + queryTableName;
String selectClause = "Select * from " + queryTableName;
if ( whereClause.length() > 0 )
{
selectClause = selectClause + " WHERE " + whereClause;
}
// MraldOutFile.logToFile(Config.getProperty("LOGFILE"), "List Update values; doStartTag: selectClause " + selectClause );
//String valuesList = outputResults( datasource, selectClause, tableName, pks, linkInfo );
String valuesList = outputResults( datasource, selectClause, tableName, pks, linkInfo );
String filtersList = outputFilters( filters );
pageContext.getOut().print( valuesList );
pageContext.getOut().print( filtersList );
return EVAL_BODY_AGAIN;
}
catch ( SQLException e )
{
throw new JspException( e );
}
catch ( IOException e )
{
throw new JspException( e );
}
}
private boolean checkHasColumn(TableMetaData tableInfo,String colName )
{
// MraldOutFile.logToFile(Config.getProperty("LOGFILE"), "List Update values; checkHasColumn for " + colName );
Iterator<String> colIter = tableInfo.getColumnNames().iterator();
String lowerCaseColumn = colName.toLowerCase();
while (colIter.hasNext())
{
String listColumn = colIter.next().toLowerCase();
if (lowerCaseColumn.equals(listColumn))
return true;
}
return false;
}
/**
* SetAction
*
*@param action The new action value
*/
public void setAction( String action )
{
this.action = action;
}
/**
* SetAction
*/
// public String getAction()
// {
// return this.action;
// }
/**
* Initializes the streams and database connections.
*
*@param selectClause Description of the Parameter
*@param tableName Description of the Parameter
*@param pks Description of the Parameter
*@param fks Description of the Parameter
*@return Description of the Return Value
*@exception SQLException Description of the Exception
*@exception JspException Description of the Exception
*/
public String outputResults( String datasource, String selectClause, String tableName, ArrayList pks, HashMap fks )
throws JspException, SQLException
{
/*
* Opening a connection to the Database
*/
// MraldOutFile.logToFile(Config.getProperty("LOGFILE"), "List Update values; outputResults. Start " );
Connection conn = new MraldConnection(
datasource,
new MsgObject((HttpServletRequest)pageContext.getRequest(),
(HttpServletResponse)pageContext.getResponse() ) )
.getConnection();
DBMetaData md = MetaData.getDbMetaData( datasource );
String noValue="";
/*
* Setting and executing the query with the database connection
*/
schema = md.getDbProps().getProperty( "SCHEMA" );
if ( schema == null || schema.equals( Config.EMPTY_STR ) )
{
RuntimeException e = new RuntimeException( "A SCHEMA value was not provided in the database configuration file." );
throw e;
}
rs = conn.createStatement().executeQuery( selectClause );
String CALENDAR = "\n<SCRIPT LANGUAGE=\"JavaScript\">var cal<:orderNo:> = new CalendarPopup();</SCRIPT>\n<A HREF=\"#\" onClick=\"cal<:orderNo:>.select(document.FormUpdate.<:name:>[0],'anchor<:orderNo:>','MM/dd/yyyy'); return false;\" TITLE=\"cal<:orderNo:>.select(document.FormUpdate.<:name:>[0],'anchor<:orderNo:>','MM/dd/yyyy'); return false; \" NAME=\"anchor<:orderNo:>\" ID=\"anchor<:orderNo:>\"><img src=\"images/cal.gif\" width=\"17\" height=\"17\" border=\"0\" alt=\"Click Here to Pick up the timestamp\"></A>\n";
ResultSetMetaData rsmd = rs.getMetaData();
int colCount = rsmd.getColumnCount();
/*
* Produce the table names with the appropriately labeled checkboxs
*/
StringBuffer buffer = new StringBuffer();
buffer.append( "\n<tr><td colspan=\"2\">" );
buffer.append( "<b>" + action + " Values:</b></td></tr>" );
while ( rs.next() )
{
for ( int i = 0; i < colCount; i++ )
{
String value = rs.getString( i + 1 );
if( value == null || value.equals("null") )
{
value = noValue;
}
String colName = rsmd.getColumnName( i + 1 );
int type = rsmd.getColumnType( i + 1 );
String niceName = FBUtils.getColumnName(tableName + "." + colName);
if (niceName == null)
{
niceName=colName;
}
boolean isDate = false;
String calendar;
if ( ( type == Types.TIME ) || ( type == Types.DATE ) || ( type == Types.TIMESTAMP ) )
{
isDate = true;
}
buffer.append( "\n<tr><td><b>" + niceName + "</b></td><td>" );
int textSize = 0;
if ( value != null )
{
textSize = value.length();
}
else
{
}
//If the value is greater than 100 make a text area
if ( textSize > 100 )
{
buffer.append( "<textarea cols='100' rows='" + ( textSize / 100 + 1 ) + "' name='" + action + ( i + 1 ) + "'>" );
buffer.append( value + "</textarea>" );
}
else
{
// MraldOutFile.logToFile(Config.getProperty("LOGFILE"), "List Update values; outputResults. Checking if fkeys contain " + colName );
String upperCaseColName = colName.toUpperCase();
if ( fks.containsKey( upperCaseColName ) )
{
// MraldOutFile.logToFile(Config.getProperty("LOGFILE"), "List Update values; outputResults. Fkeys do contain " + colName );
DropDownListTag ddl = new DropDownListTag();
buffer.append( "<select" );
buffer.append( " name='" + action + ( i + 1 ) + "'>" );
String[] fkInfo = ( String[] ) fks.get( upperCaseColName );
ddl.setTable( fkInfo[0] );
ddl.setPkColumn( fkInfo[1] );
ddl.setListColumn( fkInfo[2] );
ddl.setValue( value );
ddl.setDatasource( fkInfo[3] );
if (!fkInfo[4].equals(""))
{
ddl.setFilterTable(fkInfo[4]);
ddl.setFilterColumn(fkInfo[5]);
ddl.setFilterColumnValue(fkInfo[6]);
}
MraldOutFile.logToFile(Config.getProperty("LOGFILE"), "List Update values; outputResults. About to get Drop down data for : " + colName );
buffer.append( ddl.getDropDown() + "</select>\n" );
MraldOutFile.logToFile(Config.getProperty("LOGFILE"), "List Update values; outputResults. Finished Drop Down Retrieval for: " + colName );
}
else if ( pks.contains( colName.toLowerCase() ) )
{
buffer.append( value + "</td><input type=\"hidden\" name='" + action + ( i + 1 ) + "' value=\"" + value + "\">" );
}
else
{
buffer.append( "<input type='text' size='" + textSize );
buffer.append( "' name='" + action + ( i + 1 ) + "' " );
if ( isDate )
{
if (!value.equals(noValue))
{
SimpleDateFormat df = new SimpleDateFormat( "yyyy-mm-dd" );
java.util.Date date;
try
{
date = df.parse( value );
df.applyPattern( "mm/dd/yyyy" );
String dateVal = df.format( date );
buffer.append( "value=\"" + dateVal + "\">" );
} catch (ParseException e) {
// TODO Auto-generated catch block
RuntimeException re = new RuntimeException( "A date value could not be formatted." );
throw re;
}
calendar = CALENDAR.replaceAll( "<:name:>", action + ( i + 1 ) );
calendar = calendar.replaceAll( "<:orderNo:>", ( new Integer( i + 1 ) ).toString() );
buffer.append( calendar );
}
}
else
{
buffer.append( "value=\"" + value + "\">" );
}
buffer.append( "</td>" );
}
}
String typeName = FBUtils.isDateType(type) ? "Date" :
FBUtils.isNumberType(type) ? "Numeric" :
FBUtils.isBinaryType(type) ? "Binary" :
"String";
buffer.append( "<input type='hidden' name='" + action + ( i + 1 ) + "' value=\"Table" + FormTags.NAMEVALUE_TOKEN_STR + tableName + "~Field" + FormTags.NAMEVALUE_TOKEN_STR + colName + "~Type" + FormTags.NAMEVALUE_TOKEN_STR + typeName + "\"></tr>" );
}
}
rs.close();
conn.close();
return buffer.toString();
}
/**
* Initializes the streams and database connections.
*
*@param filters Description of the Parameter
*@return Description of the Return Value
*@exception SQLException Description of the Exception
*/
public String outputFilters( ArrayList filters )
throws SQLException
{
/*
* Produce the table names with the appropriately labeled checkboxs
*/
StringBuffer buffer = new StringBuffer();
for ( int i = 0; i < filters.size(); i++ )
{
buffer.append( "\n<input type='hidden' name='Filter" + ( i + 1 ) + "' value=\"" + filters.get( i ) + "\" >" );
}
return buffer.toString();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.