gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.expr.stat; import org.apache.drill.common.expression.LogicalExpression; import org.apache.drill.common.expression.LogicalExpressionBase; import org.apache.drill.common.expression.visitors.ExprVisitor; import org.apache.drill.exec.expr.fn.FunctionGenerationHelper; import org.apache.parquet.column.statistics.Statistics; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.function.BiFunction; import static org.apache.drill.exec.expr.stat.ParquetPredicatesHelper.hasNoNulls; import static org.apache.drill.exec.expr.stat.ParquetPredicatesHelper.isNullOrEmpty; import static org.apache.drill.exec.expr.stat.ParquetPredicatesHelper.isAllNulls; /** * Comparison predicates for parquet filter pushdown. */ public class ParquetComparisonPredicate<C extends Comparable<C>> extends LogicalExpressionBase implements ParquetFilterPredicate<C> { private final LogicalExpression left; private final LogicalExpression right; private final BiFunction<Statistics<C>, Statistics<C>, RowsMatch> predicate; private ParquetComparisonPredicate( LogicalExpression left, LogicalExpression right, BiFunction<Statistics<C>, Statistics<C>, RowsMatch> predicate ) { super(left.getPosition()); this.left = left; this.right = right; this.predicate = predicate; } @Override public Iterator<LogicalExpression> iterator() { final List<LogicalExpression> args = new ArrayList<>(); args.add(left); args.add(right); return args.iterator(); } @Override public <T, V, E extends Exception> T accept(ExprVisitor<T, V, E> visitor, V value) throws E { return visitor.visitUnknown(this, value); } /** * Semantics of matches() is very similar to what is implemented in Parquet library's * {@link org.apache.parquet.filter2.statisticslevel.StatisticsFilter} and * {@link org.apache.parquet.filter2.predicate.FilterPredicate} * * Main difference : * 1. A RangeExprEvaluator is used to compute the min/max of an expression, such as CAST function * of a column. CAST function could be explicitly added by Drill user (It's recommended to use CAST * function after DRILL-4372, if user wants to reduce planning time for limit 0 query), or implicitly * inserted by Drill, when the types of compare operands are not identical. Therefore, it's important * to allow CAST function to appear in the filter predicate. * 2. We do not require list of ColumnChunkMetaData to do the evaluation, while Parquet library's * StatisticsFilter has such requirement. Drill's ParquetTableMetaData does not maintain ColumnChunkMetaData, * making it impossible to directly use Parquet library's StatisticFilter in query planning time. * 3. We allows both sides of comparison operator to be a min/max range. As such, we support * expression_of(Column1) < expression_of(Column2), * where Column1 and Column2 are from same parquet table. */ @Override public RowsMatch matches(RangeExprEvaluator<C> evaluator) { Statistics<C> leftStat = left.accept(evaluator, null); if (isNullOrEmpty(leftStat)) { return RowsMatch.SOME; } Statistics<C> rightStat = right.accept(evaluator, null); if (isNullOrEmpty(rightStat)) { return RowsMatch.SOME; } if (isAllNulls(leftStat, evaluator.getRowCount()) || isAllNulls(rightStat, evaluator.getRowCount())) { return RowsMatch.NONE; } if (!leftStat.hasNonNullValue() || !rightStat.hasNonNullValue()) { return RowsMatch.SOME; } return predicate.apply(leftStat, rightStat); } /** * If one rowgroup contains some null values, change the RowsMatch.ALL into RowsMatch.SOME (null values should be discarded by filter) */ private static RowsMatch checkNull(Statistics leftStat, Statistics rightStat) { return !hasNoNulls(leftStat) || !hasNoNulls(rightStat) ? RowsMatch.SOME : RowsMatch.ALL; } /** * EQ (=) predicate */ private static <C extends Comparable<C>> LogicalExpression createEqualPredicate( LogicalExpression left, LogicalExpression right ) { return new ParquetComparisonPredicate<C>(left, right, (leftStat, rightStat) -> leftStat.compareMaxToValue(rightStat.genericGetMin()) < 0 || rightStat.compareMaxToValue(leftStat.genericGetMin()) < 0 ? RowsMatch.NONE : RowsMatch.SOME ) { @Override public String toString() { return left + " = " + right; } }; } /** * GT (>) predicate. */ private static <C extends Comparable<C>> LogicalExpression createGTPredicate( LogicalExpression left, LogicalExpression right ) { return new ParquetComparisonPredicate<C>(left, right, (leftStat, rightStat) -> { if (leftStat.compareMaxToValue(rightStat.genericGetMin()) <= 0) { return RowsMatch.NONE; } return leftStat.compareMinToValue(rightStat.genericGetMax()) > 0 ? checkNull(leftStat, rightStat) : RowsMatch.SOME; }); } /** * GE (>=) predicate. */ private static <C extends Comparable<C>> LogicalExpression createGEPredicate( LogicalExpression left, LogicalExpression right ) { return new ParquetComparisonPredicate<C>(left, right, (leftStat, rightStat) -> { if (leftStat.compareMaxToValue(rightStat.genericGetMin()) < 0) { return RowsMatch.NONE; } return leftStat.compareMinToValue(rightStat.genericGetMax()) >= 0 ? checkNull(leftStat, rightStat) : RowsMatch.SOME; }); } /** * LT (<) predicate. */ private static <C extends Comparable<C>> LogicalExpression createLTPredicate( LogicalExpression left, LogicalExpression right ) { return new ParquetComparisonPredicate<C>(left, right, (leftStat, rightStat) -> { if (rightStat.compareMaxToValue(leftStat.genericGetMin()) <= 0) { return RowsMatch.NONE; } return leftStat.compareMaxToValue(rightStat.genericGetMin()) < 0 ? checkNull(leftStat, rightStat) : RowsMatch.SOME; }); } /** * LE (<=) predicate. */ private static <C extends Comparable<C>> LogicalExpression createLEPredicate( LogicalExpression left, LogicalExpression right ) { return new ParquetComparisonPredicate<C>(left, right, (leftStat, rightStat) -> { if (rightStat.compareMaxToValue(leftStat.genericGetMin()) < 0) { return RowsMatch.NONE; } return leftStat.compareMaxToValue(rightStat.genericGetMin()) <= 0 ? checkNull(leftStat, rightStat) : RowsMatch.SOME; }); } /** * NE (!=) predicate. */ private static <C extends Comparable<C>> LogicalExpression createNEPredicate( LogicalExpression left, LogicalExpression right ) { return new ParquetComparisonPredicate<C>(left, right, (leftStat, rightStat) -> { if (leftStat.compareMaxToValue(rightStat.genericGetMin()) < 0 || rightStat.compareMaxToValue(leftStat.genericGetMin()) < 0) { return checkNull(leftStat, rightStat); } return leftStat.compareMaxToValue(rightStat.genericGetMax()) == 0 && leftStat.compareMinToValue(rightStat.genericGetMin()) == 0 ? RowsMatch.NONE : RowsMatch.SOME; }); } public static <C extends Comparable<C>> LogicalExpression createComparisonPredicate( String function, LogicalExpression left, LogicalExpression right ) { switch (function) { case FunctionGenerationHelper.EQ: return ParquetComparisonPredicate.<C>createEqualPredicate(left, right); case FunctionGenerationHelper.GT: return ParquetComparisonPredicate.<C>createGTPredicate(left, right); case FunctionGenerationHelper.GE: return ParquetComparisonPredicate.<C>createGEPredicate(left, right); case FunctionGenerationHelper.LT: return ParquetComparisonPredicate.<C>createLTPredicate(left, right); case FunctionGenerationHelper.LE: return ParquetComparisonPredicate.<C>createLEPredicate(left, right); case FunctionGenerationHelper.NE: return ParquetComparisonPredicate.<C>createNEPredicate(left, right); default: return null; } } }
/** * Licensed to DigitalPebble Ltd under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * DigitalPebble licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.digitalpebble.stormcrawler.elasticsearch.persistence; import java.util.Date; import java.util.HashMap; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import org.apache.storm.spout.SpoutOutputCollector; import org.apache.storm.task.TopologyContext; import org.apache.storm.tuple.Values; import org.apache.storm.utils.Utils; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.digitalpebble.stormcrawler.Metadata; import com.digitalpebble.stormcrawler.util.ConfUtils; import com.digitalpebble.stormcrawler.util.URLPartitioner; /** * Spout which pulls URL from an ES index. Use a single instance unless you use * 'es.status.routing' with the StatusUpdaterBolt, in which case you need to * have exactly the same number of spout instances as ES shards. **/ public class ElasticSearchSpout extends AbstractSpout { private static final Logger LOG = LoggerFactory .getLogger(ElasticSearchSpout.class); private static final String ESStatusBufferSizeParamName = "es.status.max.buffer.size"; private static final String ESStatusMaxInflightParamName = "es.status.max.inflight.urls.per.bucket"; private static final String ESRandomSortParamName = "es.status.random.sort"; private static final String ESMaxSecsSinceQueriedDateParamName = "es.status.max.secs.date"; private static final String ESStatusSortFieldParamName = "es.status.sort.field"; private int maxBufferSize = 100; private int lastStartOffset = 0; private Date lastDate; private int maxSecSinceQueriedDate = -1; private URLPartitioner partitioner; private int maxInFlightURLsPerBucket = -1; // sort results randomly to get better diversity of results // otherwise sort by the value of es.status.sort.field // (default "nextFetchDate") boolean randomSort = true; /** Keeps a count of the URLs being processed per host/domain/IP **/ private Map<String, AtomicInteger> inFlightTracker = new HashMap<>(); // when using multiple instances - each one is in charge of a specific shard // useful when sharding based on host or domain to guarantee a good mix of // URLs private int shardID = -1; private String sortField; @Override public void open(Map stormConf, TopologyContext context, SpoutOutputCollector collector) { maxInFlightURLsPerBucket = ConfUtils.getInt(stormConf, ESStatusMaxInflightParamName, 1); maxBufferSize = ConfUtils.getInt(stormConf, ESStatusBufferSizeParamName, 100); randomSort = ConfUtils.getBoolean(stormConf, ESRandomSortParamName, true); maxSecSinceQueriedDate = ConfUtils.getInt(stormConf, ESMaxSecsSinceQueriedDateParamName, -1); sortField = ConfUtils.getString(stormConf, ESStatusSortFieldParamName, "nextFetchDate"); super.open(stormConf, context, collector); partitioner = new URLPartitioner(); partitioner.configure(stormConf); } @Override public void nextTuple() { // inactive? if (active == false) return; // have anything in the buffer? if (!buffer.isEmpty()) { Values fields = buffer.remove(); String url = fields.get(0).toString(); Metadata metadata = (Metadata) fields.get(1); String partitionKey = partitioner.getPartition(url, metadata); // check whether we already have too many tuples in flight for this // partition key if (maxInFlightURLsPerBucket != -1) { AtomicInteger inflightforthiskey = inFlightTracker .get(partitionKey); if (inflightforthiskey == null) { inflightforthiskey = new AtomicInteger(); inFlightTracker.put(partitionKey, inflightforthiskey); } else if (inflightforthiskey.intValue() >= maxInFlightURLsPerBucket) { // do it later! left it out of the queue for now LOG.debug( "Reached max in flight allowed ({}) for bucket {}", maxInFlightURLsPerBucket, partitionKey); eventCounter.scope("skipped.max.per.bucket").incrBy(1); return; } inflightforthiskey.incrementAndGet(); } beingProcessed.put(url, partitionKey); this._collector.emit(fields, url); eventCounter.scope("emitted").incrBy(1); return; } // check that we allowed some time between queries if (throttleESQueries()) { // sleep for a bit but not too much in order to give ack/fail a // chance Utils.sleep(10); return; } // re-populate the buffer populateBuffer(); } /** run a query on ES to populate the internal buffer **/ private void populateBuffer() { Date now = new Date(); if (lastDate == null) { lastDate = now; lastStartOffset = 0; } // been running same query for too long and paging deep? else if (maxSecSinceQueriedDate != -1) { Date expired = new Date(lastDate.getTime() + (maxSecSinceQueriedDate * 1000)); if (expired.before(now)) { LOG.info("Last date expired {} now {} - resetting query", expired, now); lastDate = now; lastStartOffset = 0; } } LOG.info("Populating buffer with nextFetchDate <= {}", lastDate); QueryBuilder rangeQueryBuilder = QueryBuilders.rangeQuery( "nextFetchDate").lte(lastDate); QueryBuilder queryBuilder = rangeQueryBuilder; if (randomSort) { FunctionScoreQueryBuilder fsqb = new FunctionScoreQueryBuilder( rangeQueryBuilder); fsqb.add(ScoreFunctionBuilders.randomFunction(lastDate.getTime())); queryBuilder = fsqb; } SearchRequestBuilder srb = client .prepareSearch(indexName) .setTypes(docType) // expensive as it builds global Term/Document Frequencies // TODO look for a more appropriate method .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(queryBuilder).setFrom(lastStartOffset) .setSize(maxBufferSize).setExplain(false); // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-preference.html // _shards:2,3 if (shardID != -1) { srb.setPreference("_shards:" + shardID); } if (!randomSort) { FieldSortBuilder sorter = SortBuilders.fieldSort(sortField).order( SortOrder.ASC); srb.addSort(sorter); } timeStartESQuery = System.currentTimeMillis(); SearchResponse response = srb.execute().actionGet(); long timeTaken = System.currentTimeMillis() - timeStartESQuery; SearchHits hits = response.getHits(); int numhits = hits.getHits().length; LOG.info("ES query returned {} hits in {} msec", numhits, timeTaken); esQueryTimes.addMeasurement(timeTaken); eventCounter.scope("ES_queries").incrBy(1); eventCounter.scope("ES_docs").incrBy(numhits); // no more results? if (numhits == 0) { lastDate = null; lastStartOffset = 0; } else { lastStartOffset += numhits; } // filter results so that we don't include URLs we are already // being processed or skip those for which we already have enough // for (int i = 0; i < hits.getHits().length; i++) { Map<String, Object> keyValues = hits.getHits()[i].sourceAsMap(); String url = (String) keyValues.get("url"); // is already being processed - skip it! if (beingProcessed.containsKey(url)) { eventCounter.scope("already_being_processed").incrBy(1); continue; } Metadata metadata = fromKeyValues(keyValues); buffer.add(new Values(url, metadata)); } } @Override public void ack(Object msgId) { LOG.debug("{} Ack for {}", logIdprefix, msgId); String partitionKey = beingProcessed.remove(msgId); decrementPartitionKey(partitionKey); eventCounter.scope("acked").incrBy(1); } @Override public void fail(Object msgId) { LOG.info("{} Fail for {}", logIdprefix, msgId); String partitionKey = beingProcessed.remove(msgId); decrementPartitionKey(partitionKey); eventCounter.scope("failed").incrBy(1); } private final void decrementPartitionKey(String partitionKey) { if (partitionKey == null) return; AtomicInteger currentValue = this.inFlightTracker.get(partitionKey); if (currentValue == null) return; int newVal = currentValue.decrementAndGet(); if (newVal == 0) this.inFlightTracker.remove(partitionKey); } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide.startup.impl; import com.intellij.ide.caches.CacheUpdater; import com.intellij.ide.startup.StartupManagerEx; import com.intellij.openapi.application.Application; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.DumbAwareRunnable; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.DumbServiceImpl; import com.intellij.openapi.project.Project; import com.intellij.openapi.vfs.VirtualFileManager; import com.intellij.util.io.storage.HeavyProcessLatch; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.TestOnly; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedList; import java.util.List; public class StartupManagerImpl extends StartupManagerEx { private static final Logger LOG = Logger.getInstance("#com.intellij.ide.startup.impl.StartupManagerImpl"); private final List<Runnable> myPreStartupActivities = Collections.synchronizedList(new ArrayList<Runnable>()); private final List<Runnable> myStartupActivities = new ArrayList<Runnable>(); private final List<Runnable> myDumbAwarePostStartupActivities = Collections.synchronizedList(new ArrayList<Runnable>()); private final List<Runnable> myNotDumbAwarePostStartupActivities = Collections.synchronizedList(new ArrayList<Runnable>()); private boolean myPostStartupActivitiesPassed = false; // guarded by this private final List<CacheUpdater> myCacheUpdaters = new LinkedList<CacheUpdater>(); private volatile boolean myPreStartupActivitiesPassed = false; private volatile boolean myStartupActivitiesRunning = false; private volatile boolean myStartupActivitiesPassed = false; private final Project myProject; public StartupManagerImpl(Project project) { myProject = project; } @Override public void registerPreStartupActivity(@NotNull Runnable runnable) { LOG.assertTrue(!myPreStartupActivitiesPassed, "Registering pre startup activity that will never be run"); myPreStartupActivities.add(runnable); } @Override public void registerStartupActivity(@NotNull Runnable runnable) { LOG.assertTrue(!myStartupActivitiesPassed, "Registering startup activity that will never be run"); myStartupActivities.add(runnable); } @Override public synchronized void registerPostStartupActivity(@NotNull Runnable runnable) { LOG.assertTrue(!myPostStartupActivitiesPassed, "Registering post-startup activity that will never be run"); (DumbService.isDumbAware(runnable) ? myDumbAwarePostStartupActivities : myNotDumbAwarePostStartupActivities).add(runnable); } @Override public void registerCacheUpdater(@NotNull CacheUpdater updater) { LOG.assertTrue(!myStartupActivitiesPassed, CacheUpdater.class.getSimpleName() + " must be registered before startup activity finished"); myCacheUpdaters.add(updater); } @Override public boolean startupActivityRunning() { return myStartupActivitiesRunning; } @Override public boolean startupActivityPassed() { return myStartupActivitiesPassed; } @Override public synchronized boolean postStartupActivityPassed() { return myPostStartupActivitiesPassed; } public void runStartupActivities() { ApplicationManager.getApplication().runReadAction(new Runnable() { public void run() { HeavyProcessLatch.INSTANCE.processStarted(); try { runActivities(myPreStartupActivities); myPreStartupActivitiesPassed = true; myStartupActivitiesRunning = true; runActivities(myStartupActivities); myStartupActivitiesRunning = false; startCacheUpdate(); myStartupActivitiesPassed = true; } finally { HeavyProcessLatch.INSTANCE.processFinished(); } } }); } public synchronized void runPostStartupActivities() { final Application app = ApplicationManager.getApplication(); app.assertIsDispatchThread(); if (myPostStartupActivitiesPassed) return; runActivities(myDumbAwarePostStartupActivities); DumbService.getInstance(myProject).runWhenSmart(new Runnable() { public void run() { synchronized (StartupManagerImpl.this) { app.assertIsDispatchThread(); if (myProject.isDisposed()) return; runActivities(myDumbAwarePostStartupActivities); // they can register activities while in the dumb mode runActivities(myNotDumbAwarePostStartupActivities); myPostStartupActivitiesPassed = true; } } }); if (!app.isUnitTestMode()) { VirtualFileManager.getInstance().refresh(!app.isHeadlessEnvironment()); } } private void startCacheUpdate() { try { DumbServiceImpl.getInstance(myProject).queueCacheUpdate(myCacheUpdaters); } catch (ProcessCanceledException e) { throw e; } catch (Throwable e) { LOG.error(e); } } private static void runActivities(@NotNull List<Runnable> activities) { final ProgressIndicator indicator = ProgressManager.getInstance().getProgressIndicator(); while (!activities.isEmpty()) { final Runnable runnable = activities.remove(0); if (indicator != null) indicator.checkCanceled(); try { runnable.run(); } catch (ProcessCanceledException e) { throw e; } catch (Throwable ex) { LOG.error(ex); } } } public synchronized void runWhenProjectIsInitialized(@NotNull final Runnable action) { final Runnable runnable; final Application application = ApplicationManager.getApplication(); if (DumbService.isDumbAware(action)) { runnable = new DumbAwareRunnable() { public void run() { application.runWriteAction(action); } }; } else { runnable = new Runnable() { public void run() { application.runWriteAction(action); } }; } if (myProject.isInitialized() || application.isUnitTestMode() && myPostStartupActivitiesPassed) { // in tests which simulate project opening, post-startup activities could have been run already. // Then we should act as if the project was initialized UIUtil.invokeLaterIfNeeded(new Runnable() { public void run() { if (!myProject.isDisposed()) { runnable.run(); } } }); } else { registerPostStartupActivity(runnable); } } @TestOnly public synchronized void prepareForNextTest() { myPreStartupActivities.clear(); myStartupActivities.clear(); myDumbAwarePostStartupActivities.clear(); myNotDumbAwarePostStartupActivities.clear(); myCacheUpdaters.clear(); myPreStartupActivitiesPassed = false; myStartupActivitiesRunning = false; myStartupActivitiesPassed = false; myPostStartupActivitiesPassed = false; } @TestOnly public synchronized void checkCleared() { try { assert myStartupActivities.isEmpty() : "Activities: " + myStartupActivities; assert myDumbAwarePostStartupActivities.isEmpty() : "DumbAware Post Activities: " + myDumbAwarePostStartupActivities; assert myNotDumbAwarePostStartupActivities.isEmpty() : "Post Activities: " + myNotDumbAwarePostStartupActivities; assert myPreStartupActivities.isEmpty() : "Pre Activities: " + myPreStartupActivities; } finally { prepareForNextTest(); } } }
/** * Copyright (c) 2013 IMS GLobal Learning Consortium * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. * * Author: Charles Severance <csev@umich.edu> */ package org.imsglobal.lti2; import java.io.IOException; import java.io.PrintWriter; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.TreeMap; import javax.servlet.ServletConfig; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.map.ObjectWriter; import org.imsglobal.basiclti.BasicLTIConstants; import org.imsglobal.basiclti.BasicLTIUtil; import org.imsglobal.json.IMSJSONRequest; import org.imsglobal.lti2.objects.Service_offered; import org.imsglobal.lti2.objects.StandardServices; import org.imsglobal.lti2.objects.ToolConsumer; import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.json.simple.JSONValue; /** * Notes: * * This is a sample "Hello World" servlet for LTI2. It is a simple UI - mostly * intended to exercise the APIs and show the way for servlet-based LTI2 code. * * Here are the web.xml entries: * * <servlet> * <servlet-name>SampleServlet</servlet-name> * <servlet-class>org.imsglobal.lti2.LTI2Servlet</servlet-class> * </servlet> * <servlet-mapping> * <servlet-name>SampleServlet</servlet-name> * <url-pattern>/sample/*</url-pattern> * </servlet-mapping> * * The navigate to: * http://localhost/testservlet/sample/register * * A PHP endpoint is available at: * * https://source.sakaiproject.org/svn/basiclti/trunk/basiclti-docs/resources/docs/sakai-api-test * * The tp.php script is the Tool Provider registration endpoint in the PHP code * */ @SuppressWarnings("deprecation") public class LTI2Servlet extends HttpServlet { private static final long serialVersionUID = 1L; private static Log M_log = LogFactory.getLog(LTI2Servlet.class); protected Service_offered LTI2ResultItem = null; protected Service_offered LTI2LtiLinkSettings = null; protected Service_offered LTI2ToolProxyBindingSettings = null; protected Service_offered LTI2ToolProxySettings = null; private static final String SVC_tc_profile = "tc_profile"; private static final String SVC_tc_registration = "tc_registration"; private static final String SVC_Settings = "Settings"; private static final String SVC_Result = "Result"; @SuppressWarnings("unused") private static final String EMPTY_JSON_OBJECT = "{\n}\n"; private static final String APPLICATION_JSON = "application/json"; // Normally these would be in a database private static String TEST_KEY = "42"; private static String TEST_SECRET = "zaphod"; // Pretending to be a database row :) private static Map<String, String> PERSIST = new TreeMap<String, String> (); @Override public void init(ServletConfig config) throws ServletException { super.init(config); } protected void doPut(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { doPost(request,response); } protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { doPost(request,response); } protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { try { doRequest(request, response); } catch (Exception e) { String ipAddress = request.getRemoteAddr(); String uri = request.getRequestURI(); M_log.warn("General LTI2 Failure URI="+uri+" IP=" + ipAddress); e.printStackTrace(); response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); doErrorJSON(request, response, null, "General failure", e); } } @SuppressWarnings("unused") protected void doRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { System.out.println("getServiceURL="+getServiceURL(request)); String ipAddress = request.getRemoteAddr(); System.out.println("LTI Service request from IP=" + ipAddress); String rpi = request.getPathInfo(); String uri = request.getRequestURI(); String [] parts = uri.split("/"); if ( parts.length < 4 ) { response.setStatus(HttpServletResponse.SC_BAD_REQUEST); doErrorJSON(request, response, null, "Incorrect url format", null); return; } String controller = parts[3]; if ( "register".equals(controller) ) { doRegister(request,response); return; } else if ( "launch".equals(controller) ) { doLaunch(request,response); return; } else if ( SVC_tc_profile.equals(controller) && parts.length == 5 ) { String profile_id = parts[4]; getToolConsumerProfile(request,response,profile_id); return; } else if ( SVC_tc_registration.equals(controller) && parts.length == 5 ) { String profile_id = parts[4]; registerToolProviderProfile(request, response, profile_id); return; } else if ( SVC_Result.equals(controller) && parts.length == 5 ) { String sourcedid = parts[4]; handleResultRequest(request, response, sourcedid); return; } else if ( SVC_Settings.equals(controller) && parts.length >= 6 ) { handleSettingsRequest(request, response, parts); return; } IMSJSONRequest jsonRequest = new IMSJSONRequest(request); if ( jsonRequest.valid ) { System.out.println(jsonRequest.getPostBody()); } response.setStatus(HttpServletResponse.SC_NOT_IMPLEMENTED); M_log.warn("Unknown request="+uri); doErrorJSON(request, response, null, "Unknown request="+uri, null); } protected void doRegister(HttpServletRequest request, HttpServletResponse response) { // Reset our database PERSIST.clear(); String launch_url = request.getParameter("launch_url"); response.setContentType("text/html"); String output = null; if ( launch_url != null ) { Properties ltiProps = new Properties(); ltiProps.setProperty(BasicLTIConstants.LTI_VERSION, LTI2Constants.LTI2_VERSION_STRING); ltiProps.setProperty(LTI2Constants.REG_KEY,TEST_KEY); ltiProps.setProperty(LTI2Constants.REG_PASSWORD,TEST_SECRET); ltiProps.setProperty(BasicLTIUtil.BASICLTI_SUBMIT, "Press to Launch External Tool"); ltiProps.setProperty(BasicLTIConstants.LTI_MESSAGE_TYPE, BasicLTIConstants.LTI_MESSAGE_TYPE_TOOLPROXYREGISTRATIONREQUEST); String serverUrl = getServiceURL(request); ltiProps.setProperty(LTI2Constants.TC_PROFILE_URL,serverUrl + SVC_tc_profile + "/" + TEST_KEY); ltiProps.setProperty(BasicLTIConstants.LAUNCH_PRESENTATION_RETURN_URL, serverUrl + "launch"); System.out.println("ltiProps="+ltiProps); boolean dodebug = true; output = BasicLTIUtil.postLaunchHTML(ltiProps, launch_url, dodebug); } else { output = "<form>Register URL:<br/><input type=\"text\" name=\"launch_url\" size=\"80\"\n" + "value=\"http://localhost:8888/sakai-api-test/tp.php\"><input type=\"submit\">\n"; } try { PrintWriter out = response.getWriter(); out.println(output); } catch (Exception e) { e.printStackTrace(); } } // We are actually bypassing the activation step. Usually activation will parse // the profile, and install a tool if the admin is happy. For us we just parse // the profile and do a launch. @SuppressWarnings("unused") protected void doLaunch(HttpServletRequest request, HttpServletResponse response) { String profile = PERSIST.get("profile"); response.setContentType("text/html"); String output = null; if ( profile == null ) { output = "Missing profile"; } else { JSONObject providerProfile = (JSONObject) JSONValue.parse(profile); List<Properties> profileTools = new ArrayList<Properties> (); Properties info = new Properties(); String retval = LTI2Util.parseToolProfile(profileTools, info, providerProfile); String launch = null; String parameter = null; for ( Properties profileTool : profileTools ) { launch = (String) profileTool.get("launch"); parameter = (String) profileTool.get("parameter"); } JSONObject security_contract = (JSONObject) providerProfile.get(LTI2Constants.SECURITY_CONTRACT); String shared_secret = (String) security_contract.get(LTI2Constants.SHARED_SECRET); System.out.println("launch="+launch); System.out.println("shared_secret="+shared_secret); Properties ltiProps = LTI2SampleData.getLaunch(); ltiProps.setProperty(BasicLTIConstants.LTI_VERSION,BasicLTIConstants.LTI_VERSION_2); Properties lti2subst = LTI2SampleData.getSubstitution(); String settings_url = getServiceURL(request) + SVC_Settings + "/"; lti2subst.setProperty("LtiLink.custom.url", settings_url + LTI2Util.SCOPE_LtiLink + "/" + ltiProps.getProperty(BasicLTIConstants.RESOURCE_LINK_ID)); lti2subst.setProperty("ToolProxyBinding.custom.url", settings_url + LTI2Util.SCOPE_ToolProxyBinding + "/" + ltiProps.getProperty(BasicLTIConstants.CONTEXT_ID)); lti2subst.setProperty("ToolProxy.custom.url", settings_url + LTI2Util.SCOPE_ToolProxy + "/" + TEST_KEY); lti2subst.setProperty("Result.url", getServiceURL(request) + SVC_Result + "/" + ltiProps.getProperty(BasicLTIConstants.RESOURCE_LINK_ID)); // Do the substitutions Properties custom = new Properties(); LTI2Util.mergeLTI2Parameters(custom, parameter); LTI2Util.substituteCustom(custom, lti2subst); // Place the custom values into the launch LTI2Util.addCustomToLaunch(ltiProps, custom); ltiProps = BasicLTIUtil.signProperties(ltiProps, launch, "POST", TEST_KEY, shared_secret, null, null, null); boolean dodebug = true; output = BasicLTIUtil.postLaunchHTML(ltiProps, launch, dodebug); } try { PrintWriter out = response.getWriter(); out.println(output); } catch (Exception e) { e.printStackTrace(); } } protected void getToolConsumerProfile(HttpServletRequest request, HttpServletResponse response,String profile_id) { // Map<String,Object> deploy = ltiService.getDeployForConsumerKeyDao(profile_id); Map<String,Object> deploy = null; ToolConsumer consumer = buildToolConsumerProfile(request, deploy, profile_id); ObjectMapper mapper = new ObjectMapper(); try { // http://stackoverflow.com/questions/6176881/how-do-i-make-jackson-pretty-print-the-json-content-it-generates ObjectWriter writer = mapper.defaultPrettyPrintingWriter(); // ***IMPORTANT!!!*** for Jackson 2.x use the line below instead of the one above: // ObjectWriter writer = mapper.writer().withDefaultPrettyPrinter(); // System.out.println(mapper.writeValueAsString(consumer)); response.setContentType(APPLICATION_JSON); PrintWriter out = response.getWriter(); out.println(writer.writeValueAsString(consumer)); // System.out.println(writer.writeValueAsString(consumer)); } catch (Exception e) { e.printStackTrace(); } } // Normally deploy would have the data about the deployment - for this test // it is always null and we allow everything protected ToolConsumer buildToolConsumerProfile(HttpServletRequest request, Map<String, Object> deploy, String profile_id) { // Load the configuration data LTI2Config cnf = new org.imsglobal.lti2.LTI2ConfigSample(); ToolConsumer consumer = new ToolConsumer(profile_id+"", getServiceURL(request), cnf); // Normally we would check permissions before we offer capabilities List<String> capabilities = consumer.getCapability_offered(); LTI2Util.allowEmail(capabilities); LTI2Util.allowName(capabilities); LTI2Util.allowSettings(capabilities); LTI2Util.allowResult(capabilities); // Normally we would check permissions before we offer services List<Service_offered> services = consumer.getService_offered(); services.add(StandardServices.LTI2Registration(getServiceURL(request) + SVC_tc_registration + "/" + profile_id)); services.add(StandardServices.LTI2ResultItem(getServiceURL(request) + SVC_Result + "/{" + BasicLTIConstants.LIS_RESULT_SOURCEDID + "}")); services.add(StandardServices.LTI2LtiLinkSettings(getServiceURL(request) + SVC_Settings + "/" + LTI2Util.SCOPE_LtiLink + "/{" + BasicLTIConstants.RESOURCE_LINK_ID + "}")); services.add(StandardServices.LTI2ToolProxySettings(getServiceURL(request) + SVC_Settings + "/" + LTI2Util.SCOPE_ToolProxyBinding + "/{" + BasicLTIConstants.CONTEXT_ID + "}")); services.add(StandardServices.LTI2ToolProxySettings(getServiceURL(request) + SVC_Settings + "/" + LTI2Util.SCOPE_ToolProxy + "/{" + LTI2Constants.TOOL_PROXY_GUID + "}")); return consumer; } @SuppressWarnings({ "unchecked", "unused", "rawtypes" }) public void registerToolProviderProfile(HttpServletRequest request,HttpServletResponse response, String profile_id) throws java.io.IOException { // Normally we would look up the deployment descriptor if ( ! TEST_KEY.equals(profile_id) ) { response.setStatus(HttpServletResponse.SC_NOT_FOUND); return; } String key = TEST_KEY; String secret = TEST_SECRET; IMSJSONRequest jsonRequest = new IMSJSONRequest(request); if ( ! jsonRequest.valid ) { response.setStatus(HttpServletResponse.SC_BAD_REQUEST); doErrorJSON(request, response, jsonRequest, "Request is not in a valid format", null); return; } System.out.println(jsonRequest.getPostBody()); // Lets check the signature if ( key == null || secret == null ) { response.setStatus(HttpServletResponse.SC_FORBIDDEN); doErrorJSON(request, response, jsonRequest, "Deployment is missing credentials", null); return; } jsonRequest.validateRequest(key, secret, request); if ( !jsonRequest.valid ) { response.setStatus(HttpServletResponse.SC_FORBIDDEN); doErrorJSON(request, response, jsonRequest, "OAuth signature failure", null); return; } JSONObject providerProfile = (JSONObject) JSONValue.parse(jsonRequest.getPostBody()); // System.out.println("OBJ:"+providerProfile); if ( providerProfile == null ) { response.setStatus(HttpServletResponse.SC_BAD_REQUEST); doErrorJSON(request, response, jsonRequest, "JSON parse failed", null); return; } JSONObject default_custom = (JSONObject) providerProfile.get(LTI2Constants.CUSTOM); JSONObject security_contract = (JSONObject) providerProfile.get(LTI2Constants.SECURITY_CONTRACT); if ( security_contract == null ) { response.setStatus(HttpServletResponse.SC_BAD_REQUEST); doErrorJSON(request, response, jsonRequest, "JSON missing security_contract", null); return; } String shared_secret = (String) security_contract.get(LTI2Constants.SHARED_SECRET); System.out.println("shared_secret="+shared_secret); if ( shared_secret == null ) { response.setStatus(HttpServletResponse.SC_BAD_REQUEST); doErrorJSON(request, response, jsonRequest, "JSON missing shared_secret", null); return; } // Make sure that the requested services are a subset of the offered services ToolConsumer consumer = buildToolConsumerProfile(request, null, profile_id); JSONArray tool_services = (JSONArray) security_contract.get(LTI2Constants.TOOL_SERVICE); String retval = LTI2Util.validateServices(consumer, providerProfile); if ( retval != null ) { response.setStatus(HttpServletResponse.SC_BAD_REQUEST); doErrorJSON(request, response, jsonRequest, retval, null); return; } // Parse the tool profile bit and extract the tools with error checking retval = LTI2Util.validateCapabilities(consumer, providerProfile); if ( retval != null ) { response.setStatus(HttpServletResponse.SC_BAD_REQUEST); doErrorJSON(request, response, jsonRequest, retval, null); return; } // Pass the profile to the launch process PERSIST.put("profile", providerProfile.toString()); // Share our happiness with the Tool Provider Map jsonResponse = new TreeMap(); jsonResponse.put(LTI2Constants.CONTEXT,StandardServices.TOOLPROXY_ID_CONTEXT); jsonResponse.put(LTI2Constants.TYPE, StandardServices.TOOLPROXY_ID_TYPE); jsonResponse.put(LTI2Constants.JSONLD_ID, getServiceURL(request) + SVC_tc_registration + "/" +profile_id); jsonResponse.put(LTI2Constants.TOOL_PROXY_GUID, profile_id); jsonResponse.put(LTI2Constants.CUSTOM_URL, getServiceURL(request) + SVC_Settings + "/" + LTI2Util.SCOPE_ToolProxy + "/" +profile_id); response.setContentType(StandardServices.TOOLPROXY_ID_FORMAT); response.setStatus(HttpServletResponse.SC_CREATED); String jsonText = JSONValue.toJSONString(jsonResponse); M_log.debug(jsonText); PrintWriter out = response.getWriter(); out.println(jsonText); } public String getServiceURL(HttpServletRequest request) { String scheme = request.getScheme(); // http String serverName = request.getServerName(); // localhost int serverPort = request.getServerPort(); // 80 String contextPath = request.getContextPath(); // /imsblis String servletPath = request.getServletPath(); // /ltitest String url = scheme+"://"+serverName+":"+serverPort+contextPath+servletPath+"/"; return url; } @SuppressWarnings({ "rawtypes", "unchecked" }) public void handleResultRequest(HttpServletRequest request,HttpServletResponse response, String sourcedid) throws java.io.IOException { IMSJSONRequest jsonRequest = null; String retval = null; if ( "GET".equals(request.getMethod()) ) { String grade = PERSIST.get("grade"); String comment = PERSIST.get("comment"); Map jsonResponse = new TreeMap(); Map resultScore = new TreeMap(); jsonResponse.put(LTI2Constants.CONTEXT,StandardServices.RESULT_CONTEXT); jsonResponse.put(LTI2Constants.TYPE, StandardServices.RESULT_TYPE); resultScore.put(LTI2Constants.TYPE, LTI2Constants.GRADE_TYPE_DECIMAL); jsonResponse.put(LTI2Constants.COMMENT, grade); resultScore.put(LTI2Constants.VALUE, comment); jsonResponse.put(LTI2Constants.RESULTSCORE,resultScore); response.setContentType(StandardServices.RESULT_FORMAT); response.setStatus(HttpServletResponse.SC_OK); String jsonText = JSONValue.toJSONString(jsonResponse); M_log.debug(jsonText); PrintWriter out = response.getWriter(); out.println(jsonText); return; } else if ( "PUT".equals(request.getMethod()) ) { retval = "Error parsing input data"; try { jsonRequest = new IMSJSONRequest(request); // System.out.println(jsonRequest.getPostBody()); JSONObject requestData = (JSONObject) JSONValue.parse(jsonRequest.getPostBody()); String comment = (String) requestData.get(LTI2Constants.COMMENT); JSONObject resultScore = (JSONObject) requestData.get(LTI2Constants.RESULTSCORE); String sGrade = (String) resultScore.get(LTI2Constants.VALUE); Double dGrade = new Double(sGrade); PERSIST.put("comment", comment); PERSIST.put("grade", dGrade+""); response.setStatus(HttpServletResponse.SC_OK); return; } catch (Exception e) { retval = "Error: "+ e.getMessage(); } } else { retval = "Unsupported operation:" + request.getMethod(); } response.setStatus(HttpServletResponse.SC_BAD_REQUEST); doErrorJSON(request,response, jsonRequest, (String) retval, null); } // If this code looks like a hack - it is because the spec is a hack. // There are five possible scenarios for GET and two possible scenarios // for PUT. I begged to simplify the business logic but was overrulled. // So we write obtuse code. @SuppressWarnings("unused") public void handleSettingsRequest(HttpServletRequest request,HttpServletResponse response, String[] parts) throws java.io.IOException { String URL = request.getRequestURL().toString(); System.out.println("URL="+URL); String scope = parts[4]; System.out.println("scope="+scope); String acceptHdr = request.getHeader("Accept"); String contentHdr = request.getContentType(); boolean acceptComplex = acceptHdr == null || acceptHdr.indexOf(StandardServices.TOOLSETTINGS_FORMAT) >= 0 ; System.out.println("accept="+acceptHdr+" ac="+acceptComplex); // Check the JSON on PUT and check the oauth_body_hash IMSJSONRequest jsonRequest = null; JSONObject requestData = null; if ( "PUT".equals(request.getMethod()) ) { try { jsonRequest = new IMSJSONRequest(request); requestData = (JSONObject) JSONValue.parse(jsonRequest.getPostBody()); } catch (Exception e) { response.setStatus(HttpServletResponse.SC_BAD_REQUEST); doErrorJSON(request,response, jsonRequest, "Could not parse JSON", e); return; } } String consumer_key = TEST_KEY; String profile = PERSIST.get("profile"); JSONObject providerProfile = (JSONObject) JSONValue.parse(profile); JSONObject security_contract = (JSONObject) providerProfile.get(LTI2Constants.SECURITY_CONTRACT); String oauth_secret = (String) security_contract.get(LTI2Constants.SHARED_SECRET); // Validate the incoming message Object retval = BasicLTIUtil.validateMessage(request, URL, oauth_secret, consumer_key); if ( retval instanceof String ) { response.setStatus(HttpServletResponse.SC_FORBIDDEN); doErrorJSON(request,response, jsonRequest, (String) retval, null); return; } // The URLs for the various settings resources String settingsUrl = getServiceURL(request) + SVC_Settings; String proxy_url = settingsUrl + "/" + LTI2Util.SCOPE_ToolProxy + "/" + consumer_key; String binding_url = settingsUrl + "/" + LTI2Util.SCOPE_ToolProxyBinding + "/" + "TBD"; String link_url = settingsUrl + "/" + LTI2Util.SCOPE_LtiLink + "/" + "TBD"; // Load and parse the old settings... JSONObject link_settings = LTI2Util.parseSettings(PERSIST.get(LTI2Util.SCOPE_LtiLink)); JSONObject binding_settings = LTI2Util.parseSettings(PERSIST.get(LTI2Util.SCOPE_ToolProxyBinding)); JSONObject proxy_settings = LTI2Util.parseSettings(PERSIST.get(LTI2Util.SCOPE_ToolProxy)); // For a GET request we depend on LTI2Util to do the GET logic if ( "GET".equals(request.getMethod()) ) { Object obj = LTI2Util.getSettings(request, scope, link_settings, binding_settings, proxy_settings, link_url, binding_url, proxy_url); if ( obj instanceof String ) { response.setStatus(HttpServletResponse.SC_BAD_REQUEST); doErrorJSON(request,response, jsonRequest, (String) obj, null); return; } if ( acceptComplex ) { response.setContentType(StandardServices.TOOLSETTINGS_FORMAT); } else { response.setContentType(StandardServices.TOOLSETTINGS_SIMPLE_FORMAT); } JSONObject jsonResponse = (JSONObject) obj; response.setStatus(HttpServletResponse.SC_OK); PrintWriter out = response.getWriter(); System.out.println("jsonResponse="+jsonResponse); out.println(jsonResponse.toString()); return; } else if ( "PUT".equals(request.getMethod()) ) { // This is assuming the rule that a PUT of the complex settings // format that there is only one entry in the graph and it is // the same as our current URL. We parse without much checking. String settings = null; try { JSONArray graph = (JSONArray) requestData.get(LTI2Constants.GRAPH); if ( graph.size() != 1 ) { response.setStatus(HttpServletResponse.SC_BAD_REQUEST); doErrorJSON(request,response, jsonRequest, "Only one graph entry allowed", null); return; } JSONObject firstChild = (JSONObject) graph.get(0); JSONObject custom = (JSONObject) firstChild.get(LTI2Constants.CUSTOM); settings = custom.toString(); } catch (Exception e) { settings = jsonRequest.getPostBody(); } PERSIST.put(scope,settings); System.out.println("Stored settings scope="+scope); System.out.println("settings="+settings); response.setStatus(HttpServletResponse.SC_OK); } else { response.setStatus(HttpServletResponse.SC_BAD_REQUEST); doErrorJSON(request,response, jsonRequest, "Method not handled="+request.getMethod(), null); } } /* IMS JSON version of Errors */ public void doErrorJSON(HttpServletRequest request,HttpServletResponse response, IMSJSONRequest json, String message, Exception e) throws java.io.IOException { if (e != null) { M_log.error(e.getLocalizedMessage(), e); } M_log.info(message); String output = IMSJSONRequest.doErrorJSON(request, response, json, message, e); System.out.println(output); } public void destroy() { } }
package nl.hanze.gameserver.server; import java.io.IOException; import java.net.InetSocketAddress; import java.nio.ByteBuffer; import java.nio.channels.SelectionKey; import java.nio.channels.Selector; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; import java.util.Iterator; import java.util.Set; import nl.hanze.gameserver.app.Application; import nl.hanze.gameserver.server.command.*; import nl.hanze.gameserver.util.Log; import nl.hanze.gameserver.util.ReadWriteBuffer; public class GameServer implements Runnable { private static final int BUFFER_SIZE = 4 * 1024; // Selector thread private Thread thread; // Value controlling whether thread should continue to run private boolean running; private ServerSocketChannel serverSocketChannel; private Selector selector; private Object opsChangeLock; // Handles client input data private ClientInputHandler clientInputHandler; // Handles clients private ClientManager clientManager; public GameServer() throws IOException { CommandHandlerResolver commandHandlerResolver = new CommandHandlerResolver(new UnsupportedCommandHandler()); commandHandlerResolver.addHandler(new LoginCommandHandler()); commandHandlerResolver.addHandler(new LogoutCommandHandler()); commandHandlerResolver.addHandler(new GetCommandHandler()); commandHandlerResolver.addHandler(new SubscribeCommandHandler()); commandHandlerResolver.addHandler(new UnsubscribeCommandHandler()); commandHandlerResolver.addHandler(new MoveCommandHandler()); commandHandlerResolver.addHandler(new ForfeitCommandHandler()); commandHandlerResolver.addHandler(new ChallengeCommandHandler()); commandHandlerResolver.addHandler(new HelpCommandHandler()); commandHandlerResolver.addHandler(new MessageCommandHandler()); clientInputHandler = new ClientInputHandler(commandHandlerResolver); clientManager = new ClientManager(); serverSocketChannel = ServerSocketChannel.open(); serverSocketChannel.configureBlocking(false); int port = Application.getInstance().getSettings().getListenerPort(); serverSocketChannel.socket().bind(new InetSocketAddress(port)); Log.DEBUG.printf("Server listening on port %d", port); selector = Selector.open(); serverSocketChannel.register(selector, SelectionKey.OP_ACCEPT); opsChangeLock = new Object(); thread = new Thread(this, "GameServer-Thread"); running = true; thread.start(); } @Override public void run() { while(running) { try { selector.select(); synchronized(opsChangeLock) {} } catch (IOException e) { e.printStackTrace(); running = false; continue; } if(!selector.isOpen()) { running = false; continue; } Set<SelectionKey> selectionKeys = selector.selectedKeys(); Iterator<SelectionKey> it = selectionKeys.iterator(); while(it.hasNext()) { SelectionKey key = it.next(); it.remove(); if(!key.isValid()) { Log.DEBUG.println("Key is invalid"); disconnect(key); continue; } try { if(key.isAcceptable()) { handleAccept(); } else { if(key.isReadable()) { handleRead(key); } if(!key.isValid()) { continue; } if(key.isWritable()) { handleWrite(key); } } } catch (IOException e) { Log.ERROR.printf("IOException while processing key: %s", e); disconnect(key); } catch (Exception e) { Log.ERROR.printf("Exception while processing key: %s", e); disconnect(key); } } } } private void handleAccept() throws IOException { SocketChannel channel = serverSocketChannel.accept(); channel.configureBlocking(false); ReadWriteBuffer rwBuffer = new ReadWriteBuffer(ByteBuffer.allocateDirect(BUFFER_SIZE), ByteBuffer.allocateDirect(BUFFER_SIZE)); Client client = new Client(channel, rwBuffer, clientManager); channel.register(selector, SelectionKey.OP_READ, client); client.writeLine(String.format("%s [Version %s]", Application.getInstance().getName(), Application.getInstance().getVersion())); client.writeLine("(C) Copyright 2009-2016 Hanzehogeschool Groningen"); } private void handleRead(SelectionKey key) throws IOException { SocketChannel channel = (SocketChannel) key.channel(); Client client = (Client) key.attachment(); ReadWriteBuffer buffers = client.getBuffers(); ByteBuffer readBuffer = buffers.getReadBuffer(); int readSize; readSize = channel.read(readBuffer); if(readSize < 0) { Log.DEBUG.println("Read -1 bytes, disconnecting client"); disconnect(key); return; } readBuffer.flip(); byte[] data = new byte[readBuffer.remaining()]; readBuffer.get(data); readBuffer.clear(); clientInputHandler.addData(client, data); } private void handleWrite(SelectionKey key) throws IOException { key.interestOps(key.interestOps() & ~SelectionKey.OP_WRITE); SocketChannel client = (SocketChannel) key.channel(); ByteBuffer writeBuffer = ((Client) key.attachment()).getWriteBuffer(); synchronized(writeBuffer) { writeBuffer.flip(); client.write(writeBuffer); if(writeBuffer.hasRemaining()) { key.interestOps(key.interestOps() | SelectionKey.OP_WRITE); } writeBuffer.compact(); } } private void disconnect(SelectionKey key) { try { SocketChannel client = (SocketChannel) key.channel(); Log.DEBUG.println("Disconnecting client"); if(client != null) { clientInputHandler.addData((Client) key.attachment(), null); Log.DEBUG.println("Closing client socket connection"); client.close(); } key.cancel(); key.attach(null); } catch (IOException e) { e.printStackTrace(); } catch (NullPointerException e) { System.out.println("Client already disconnected."); } catch (Exception e) { e.printStackTrace(); } } public void disconnect(SocketChannel client) { SelectionKey key = client.keyFor(selector); synchronized(opsChangeLock) { selector.wakeup(); disconnect(key); } } public void setWritable(SocketChannel client) { SelectionKey key = client.keyFor(selector); synchronized(opsChangeLock) { selector.wakeup(); key.interestOps(key.interestOps() | SelectionKey.OP_WRITE); } } public void exit() { running = false; try { for(SelectionKey key: selector.keys()) { try { if(key.channel() != null && key.attachment() != null) { disconnect(key); } } catch (Exception e) { e.printStackTrace(); } } } catch (Exception e) { e.printStackTrace(); } try { Log.DEBUG.println("Closing client input handler"); clientInputHandler.exit(); } catch (Exception e) { e.printStackTrace(); } try { Log.DEBUG.println("Closing server selector"); selector.close(); } catch (IOException e) { e.printStackTrace(); } try { Log.DEBUG.println("Closing server socket"); serverSocketChannel.close(); } catch (IOException e) { e.printStackTrace(); } } public ClientManager getClientManager() { return clientManager; } public ClientInputHandler getClientInputHandler() { return clientInputHandler; } public void delayedCommand(Client client, String command) { clientInputHandler.addData(client, command.getBytes()); } }
/* * Copyright 2002-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.security.config.annotation.web.configurers; import java.util.Arrays; import java.util.List; import javax.servlet.http.HttpServletRequest; import okhttp3.mockwebserver.MockResponse; import okhttp3.mockwebserver.MockWebServer; import org.junit.Rule; import org.junit.Test; import org.openid4java.consumer.ConsumerManager; import org.openid4java.discovery.DiscoveryInformation; import org.openid4java.message.AuthRequest; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.security.authentication.AuthenticationDetailsSource; import org.springframework.security.authentication.AuthenticationServiceException; import org.springframework.security.config.annotation.ObjectPostProcessor; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; import org.springframework.security.config.test.SpringTestRule; import org.springframework.security.core.Authentication; import org.springframework.security.core.authority.AuthorityUtils; import org.springframework.security.core.userdetails.AuthenticationUserDetailsService; import org.springframework.security.core.userdetails.User; import org.springframework.security.core.userdetails.UserDetailsService; import org.springframework.security.openid.OpenIDAttribute; import org.springframework.security.openid.OpenIDAuthenticationFilter; import org.springframework.security.openid.OpenIDAuthenticationStatus; import org.springframework.security.openid.OpenIDAuthenticationToken; import org.springframework.security.openid.OpenIDConsumer; import org.springframework.security.provisioning.InMemoryUserDetailsManager; import org.springframework.security.web.authentication.SavedRequestAwareAuthenticationSuccessHandler; import org.springframework.security.web.authentication.SimpleUrlAuthenticationFailureHandler; import org.springframework.security.web.authentication.WebAuthenticationDetailsSource; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.MvcResult; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.reset; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.openid4java.discovery.yadis.YadisResolver.YADIS_XRDS_LOCATION; import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.redirectedUrl; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; /** * Tests to verify that all the functionality of <openid-login> attributes is present * * @author Rob Winch * @author Josh Cummings */ public class NamespaceHttpOpenIDLoginTests { @Rule public final SpringTestRule spring = new SpringTestRule(); @Autowired MockMvc mvc; @Test public void openidLoginWhenUsingDefaultsThenMatchesNamespace() throws Exception { this.spring.register(OpenIDLoginConfig.class).autowire(); this.mvc.perform(get("/")) .andExpect(redirectedUrl("http://localhost/login")); this.mvc.perform(post("/login/openid").with(csrf())) .andExpect(redirectedUrl("/login?error")); } @Configuration @EnableWebSecurity static class OpenIDLoginConfig extends WebSecurityConfigurerAdapter { @Override protected void configure(HttpSecurity http) throws Exception { http .authorizeRequests() .anyRequest().hasRole("USER") .and() .openidLogin() .permitAll(); } } @Test public void openidLoginWhenAttributeExchangeConfiguredThenFetchAttributesMatchAttributeList() throws Exception { OpenIDLoginAttributeExchangeConfig.CONSUMER_MANAGER = mock(ConsumerManager.class); AuthRequest mockAuthRequest = mock(AuthRequest.class); DiscoveryInformation mockDiscoveryInformation = mock(DiscoveryInformation.class); when(mockAuthRequest.getDestinationUrl(anyBoolean())).thenReturn("mockUrl"); when(OpenIDLoginAttributeExchangeConfig.CONSUMER_MANAGER.associate(any())) .thenReturn(mockDiscoveryInformation); when(OpenIDLoginAttributeExchangeConfig.CONSUMER_MANAGER.authenticate(any(DiscoveryInformation.class), any(), any())) .thenReturn(mockAuthRequest); this.spring.register(OpenIDLoginAttributeExchangeConfig.class).autowire(); try (MockWebServer server = new MockWebServer()) { String endpoint = server.url("/").toString(); server.enqueue(new MockResponse() .addHeader(YADIS_XRDS_LOCATION, endpoint)); server.enqueue(new MockResponse() .setBody(String.format("<XRDS><XRD><Service><URI>%s</URI></Service></XRD></XRDS>", endpoint))); MvcResult mvcResult = this.mvc.perform(get("/login/openid") .param(OpenIDAuthenticationFilter.DEFAULT_CLAIMED_IDENTITY_FIELD, "https://www.google.com/1")) .andExpect(status().isFound()) .andReturn(); Object attributeObject = mvcResult.getRequest().getSession().getAttribute("SPRING_SECURITY_OPEN_ID_ATTRIBUTES_FETCH_LIST"); assertThat(attributeObject).isInstanceOf(List.class); List<OpenIDAttribute> attributeList = (List<OpenIDAttribute>) attributeObject; assertThat(attributeList.stream().anyMatch(attribute -> "firstname".equals(attribute.getName()) && "https://axschema.org/namePerson/first".equals(attribute.getType()) && attribute.isRequired())) .isTrue(); assertThat(attributeList.stream().anyMatch(attribute -> "lastname".equals(attribute.getName()) && "https://axschema.org/namePerson/last".equals(attribute.getType()) && attribute.isRequired())) .isTrue(); assertThat(attributeList.stream().anyMatch(attribute -> "email".equals(attribute.getName()) && "https://axschema.org/contact/email".equals(attribute.getType()) && attribute.isRequired())) .isTrue(); } } @Configuration @EnableWebSecurity static class OpenIDLoginAttributeExchangeConfig extends WebSecurityConfigurerAdapter { static ConsumerManager CONSUMER_MANAGER; @Override protected void configure(HttpSecurity http) throws Exception { http .authorizeRequests() .anyRequest().hasRole("USER") .and() .openidLogin() .consumerManager(CONSUMER_MANAGER) .attributeExchange("https://www.google.com/.*") // attribute-exchange@identifier-match .attribute("email") // openid-attribute@name .type("https://axschema.org/contact/email") // openid-attribute@type .required(true) // openid-attribute@required .count(1) // openid-attribute@count .and() .attribute("firstname") .type("https://axschema.org/namePerson/first") .required(true) .and() .attribute("lastname") .type("https://axschema.org/namePerson/last") .required(true) .and() .and() .attributeExchange(".*yahoo.com.*") .attribute("email") .type("https://schema.openid.net/contact/email") .required(true) .and() .attribute("fullname") .type("https://axschema.org/namePerson") .required(true) .and() .and() .permitAll(); } } @Test public void openidLoginWhenUsingCustomEndpointsThenMatchesNamespace() throws Exception { this.spring.register(OpenIDLoginCustomConfig.class).autowire(); this.mvc.perform(get("/")) .andExpect(redirectedUrl("http://localhost/authentication/login")); this.mvc.perform(post("/authentication/login/process").with(csrf())) .andExpect(redirectedUrl("/authentication/login?failed")); } @Configuration @EnableWebSecurity static class OpenIDLoginCustomConfig extends WebSecurityConfigurerAdapter { @Override protected void configure(HttpSecurity http) throws Exception { boolean alwaysUseDefaultSuccess = true; http .authorizeRequests() .anyRequest().hasRole("USER") .and() .openidLogin() .permitAll() .loginPage("/authentication/login") // openid-login@login-page .failureUrl("/authentication/login?failed") // openid-login@authentication-failure-url .loginProcessingUrl("/authentication/login/process") // openid-login@login-processing-url .defaultSuccessUrl("/default", alwaysUseDefaultSuccess); // openid-login@default-target-url / openid-login@always-use-default-target } } @Test public void openidLoginWithCustomHandlersThenBehaviorMatchesNamespace() throws Exception { OpenIDAuthenticationToken token = new OpenIDAuthenticationToken( OpenIDAuthenticationStatus.SUCCESS, "identityUrl", "message", Arrays.asList(new OpenIDAttribute("name", "type"))); OpenIDLoginCustomRefsConfig.AUDS = mock(AuthenticationUserDetailsService.class); when(OpenIDLoginCustomRefsConfig.AUDS.loadUserDetails(any(Authentication.class))) .thenReturn(new User("user", "password", AuthorityUtils.createAuthorityList("ROLE_USER"))); OpenIDLoginCustomRefsConfig.ADS = spy(new WebAuthenticationDetailsSource()); OpenIDLoginCustomRefsConfig.CONSUMER = mock(OpenIDConsumer.class); this.spring.register(OpenIDLoginCustomRefsConfig.class, UserDetailsServiceConfig.class).autowire(); when(OpenIDLoginCustomRefsConfig.CONSUMER.endConsumption(any(HttpServletRequest.class))) .thenThrow(new AuthenticationServiceException("boom")); this.mvc.perform(post("/login/openid").with(csrf()) .param("openid.identity", "identity")) .andExpect(redirectedUrl("/custom/failure")); reset(OpenIDLoginCustomRefsConfig.CONSUMER); when(OpenIDLoginCustomRefsConfig.CONSUMER.endConsumption(any(HttpServletRequest.class))) .thenReturn(token); this.mvc.perform(post("/login/openid").with(csrf()) .param("openid.identity", "identity")) .andExpect(redirectedUrl("/custom/targetUrl")); verify(OpenIDLoginCustomRefsConfig.AUDS).loadUserDetails(any(Authentication.class)); verify(OpenIDLoginCustomRefsConfig.ADS).buildDetails(any(Object.class)); } @Configuration @EnableWebSecurity static class OpenIDLoginCustomRefsConfig extends WebSecurityConfigurerAdapter { static AuthenticationUserDetailsService AUDS; static AuthenticationDetailsSource ADS; static OpenIDConsumer CONSUMER; @Override protected void configure(HttpSecurity http) throws Exception { SavedRequestAwareAuthenticationSuccessHandler handler = new SavedRequestAwareAuthenticationSuccessHandler(); handler.setDefaultTargetUrl("/custom/targetUrl"); http .authorizeRequests() .anyRequest().hasRole("USER") .and() .openidLogin() // if using UserDetailsService wrap with new UserDetailsByNameServiceWrapper<OpenIDAuthenticationToken>() .authenticationUserDetailsService(AUDS) // openid-login@user-service-ref .failureHandler(new SimpleUrlAuthenticationFailureHandler("/custom/failure")) // openid-login@authentication-failure-handler-ref .successHandler(handler) // openid-login@authentication-success-handler-ref .authenticationDetailsSource(ADS) // openid-login@authentication-details-source-ref .withObjectPostProcessor(new ObjectPostProcessor<OpenIDAuthenticationFilter>() { @Override public <O extends OpenIDAuthenticationFilter> O postProcess(O filter) { filter.setConsumer(CONSUMER); return filter; } }); } } @Configuration static class UserDetailsServiceConfig { @Bean public UserDetailsService userDetailsService() { return new InMemoryUserDetailsManager( User.withDefaultPasswordEncoder() .username("user") .password("password") .roles("USER") .build()); } } }
package org.matrix.androidsdk.call; import android.annotation.SuppressLint; import android.content.Context; import android.graphics.Point; import androidx.core.view.ViewCompat; import android.util.Log; import android.view.View; import android.view.ViewGroup; import com.oney.WebRTCModule.EglUtils; import com.oney.WebRTCModule.WebRTCView; import org.webrtc.EglBase; import org.webrtc.MediaStream; import org.webrtc.RendererCommon; import org.webrtc.RendererCommon.RendererEvents; import org.webrtc.RendererCommon.ScalingType; import org.webrtc.SurfaceViewRenderer; import org.webrtc.VideoTrack; import java.lang.reflect.Method; import java.util.List; /** * Use the older implementation of WebRtcView. * The latest version a stream URL instead of a stream. * It implies to have a React context. */ public class MXWebRtcView extends ViewGroup { /** * The scaling type to be utilized by default. * <p> * The default value is in accord with * https://www.w3.org/TR/html5/embedded-content-0.html#the-video-element: * <p> * In the absence of style rules to the contrary, video content should be * rendered inside the element's playback area such that the video content * is shown centered in the playback area at the largest possible size that * fits completely within it, with the video content's aspect ratio being * preserved. Thus, if the aspect ratio of the playback area does not match * the aspect ratio of the video, the video will be shown letterboxed or * pillarboxed. Areas of the element's playback area that do not contain the * video represent nothing. */ private static final ScalingType DEFAULT_SCALING_TYPE = ScalingType.SCALE_ASPECT_FIT; /** * {@link View#isInLayout()} as a <tt>Method</tt> to be invoked via * reflection in order to accommodate its lack of availability before API * level 18. {@link ViewCompat#isInLayout(View)} is the best solution but I * could not make it available along with * {@link ViewCompat#isAttachedToWindow(View)} at the time of this writing. */ private static final Method IS_IN_LAYOUT; private static final String LOG_TAG = MXWebRtcView.class.getSimpleName(); static { // IS_IN_LAYOUT Method isInLayout = null; try { Method m = MXWebRtcView.class.getMethod("isInLayout"); if (boolean.class.isAssignableFrom(m.getReturnType())) { isInLayout = m; } } catch (NoSuchMethodException e) { // Fall back to the behavior of ViewCompat#isInLayout(View). } IS_IN_LAYOUT = isInLayout; } /** * The height of the last video frame rendered by * {@link #surfaceViewRenderer}. */ private int frameHeight; /** * The rotation (degree) of the last video frame rendered by * {@link #surfaceViewRenderer}. */ private int frameRotation; /** * The width of the last video frame rendered by * {@link #surfaceViewRenderer}. */ private int frameWidth; /** * The {@code Object} which synchronizes the access to the layout-related * state of this instance such as {@link #frameHeight}, * {@link #frameRotation}, {@link #frameWidth}, and {@link #scalingType}. */ private final Object layoutSyncRoot = new Object(); /** * The indicator which determines whether this {@code WebRTCView} is to * mirror the video represented by {@link #videoTrack} during its rendering. */ private boolean mirror; /** * The {@code RendererEvents} which listens to rendering events reported by * {@link #surfaceViewRenderer}. */ private final RendererEvents rendererEvents = new RendererEvents() { @Override public void onFirstFrameRendered() { } @Override public void onFrameResolutionChanged( int videoWidth, int videoHeight, int rotation) { MXWebRtcView.this.onFrameResolutionChanged( videoWidth, videoHeight, rotation); } }; /** * The {@code Runnable} representation of * {@link #requestSurfaceViewRendererLayout()}. Explicitly defined in order * to allow the use of the latter with {@link #post(Runnable)} without * initializing new instances on every (method) call. */ private final Runnable requestSurfaceViewRendererLayoutRunnable = this::requestSurfaceViewRendererLayout; /** * The scaling type this {@code WebRTCView} is to apply to the video * represented by {@link #videoTrack} during its rendering. An expression of * the CSS property {@code object-fit} in the terms of WebRTC. */ private ScalingType scalingType; /** * The {@link View} and {@link org.webrtc.VideoSink} implementation which * actually renders {@link #videoTrack} on behalf of this instance. */ private final SurfaceViewRenderer surfaceViewRenderer; /** * The {@code VideoTrack}, if any, rendered by this {@code MXWebRTCView}. */ private VideoTrack videoTrack; public MXWebRtcView(Context context) { super(context); surfaceViewRenderer = new SurfaceViewRenderer(context); addView(surfaceViewRenderer); setMirror(false); setScalingType(DEFAULT_SCALING_TYPE); } /** * Gets the {@code SurfaceViewRenderer} which renders {@link #videoTrack}. * Explicitly defined and used in order to facilitate switching the instance * at compile time. For example, reduces the number of modifications * necessary to switch the implementation from a {@code SurfaceViewRenderer} * that is a child of a {@code WebRTCView} to {@code WebRTCView} extending * {@code SurfaceViewRenderer}. * * @return The {@code SurfaceViewRenderer} which renders {@code videoTrack}. */ private SurfaceViewRenderer getSurfaceViewRenderer() { return surfaceViewRenderer; } /** * If this <tt>View</tt> has {@link View#isInLayout()}, invokes it and * returns its return value; otherwise, returns <tt>false</tt> like * {@link ViewCompat#isInLayout(View)}. * * @return If this <tt>View</tt> has <tt>View#isInLayout()</tt>, invokes it * and returns its return value; otherwise, returns <tt>false</tt>. */ private boolean invokeIsInLayout() { Method m = IS_IN_LAYOUT; boolean b = false; if (m != null) { try { b = (boolean) m.invoke(this); } catch (Throwable e) { // Fall back to the behavior of ViewCompat#isInLayout(View). } } return b; } /** * {@inheritDoc} */ @Override protected void onAttachedToWindow() { try { // Generally, OpenGL is only necessary while this View is attached // to a window so there is no point in having the whole rendering // infrastructure hooked up while this View is not attached to a // window. Additionally, a memory leak was solved in a similar way // on iOS. tryAddRendererToVideoTrack(); } catch (Exception e) { Log.e(LOG_TAG, "onAttachedToWindow", e); } finally { super.onAttachedToWindow(); } } /** * {@inheritDoc} */ @Override protected void onDetachedFromWindow() { try { // Generally, OpenGL is only necessary while this View is attached // to a window so there is no point in having the whole rendering // infrastructure hooked up while this View is not attached to a // window. Additionally, a memory leak was solved in a similar way // on iOS. removeRendererFromVideoTrack(); } catch (Exception e) { Log.e(LOG_TAG, "onAttachedToWindow", e); } finally { super.onDetachedFromWindow(); } } /** * Callback fired by {@link #surfaceViewRenderer} when the resolution or * rotation of the frame it renders has changed. * * @param videoWidth The new width of the rendered video frame. * @param videoHeight The new height of the rendered video frame. * @param rotation The new rotation of the rendered video frame. */ private void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation) { boolean changed = false; synchronized (layoutSyncRoot) { if (frameHeight != videoHeight) { frameHeight = videoHeight; changed = true; } if (frameRotation != rotation) { frameRotation = rotation; changed = true; } if (frameWidth != videoWidth) { frameWidth = videoWidth; changed = true; } } if (changed) { // The onFrameResolutionChanged method call executes on the // surfaceViewRenderer's render Thread. post(requestSurfaceViewRendererLayoutRunnable); } } /** * {@inheritDoc} */ @Override protected void onLayout(boolean changed, int l, int t, int r, int b) { int height = b - t; int width = r - l; if (height == 0 || width == 0) { l = t = r = b = 0; } else { int frameHeight; int frameRotation; int frameWidth; ScalingType scalingType; synchronized (layoutSyncRoot) { frameHeight = this.frameHeight; frameRotation = this.frameRotation; frameWidth = this.frameWidth; scalingType = this.scalingType; } switch (scalingType) { case SCALE_ASPECT_FILL: // Fill this ViewGroup with surfaceViewRenderer and the latter // will take care of filling itself with the video similarly to // the cover value the CSS property object-fit. r = width; l = 0; b = height; t = 0; break; case SCALE_ASPECT_FIT: default: // Lay surfaceViewRenderer out inside this ViewGroup in accord // with the contain value of the CSS property object-fit. // SurfaceViewRenderer will fill itself with the video similarly // to the cover or contain value of the CSS property object-fit // (which will not matter, eventually). if (frameHeight == 0 || frameWidth == 0) { l = t = r = b = 0; } else { float frameAspectRatio = (frameRotation % 180 == 0) ? frameWidth / (float) frameHeight : frameHeight / (float) frameWidth; Point frameDisplaySize = RendererCommon.getDisplaySize( scalingType, frameAspectRatio, width, height); l = (width - frameDisplaySize.x) / 2; t = (height - frameDisplaySize.y) / 2; r = l + frameDisplaySize.x; b = t + frameDisplaySize.y; } break; } } surfaceViewRenderer.layout(l, t, r, b); } /** * Stops rendering {@link #videoTrack} and releases the associated acquired * resources (if rendering is in progress). */ private void removeRendererFromVideoTrack() { if (surfaceViewRenderer != null) { if (videoTrack != null) { videoTrack.removeSink(surfaceViewRenderer); } getSurfaceViewRenderer().release(); // Since this WebRTCView is no longer rendering anything, make sure // surfaceViewRenderer displays nothing as well. synchronized (layoutSyncRoot) { frameHeight = 0; frameRotation = 0; frameWidth = 0; } requestSurfaceViewRendererLayout(); } } /** * Request that {@link #surfaceViewRenderer} be laid out (as soon as * possible) because layout-related state either of this instance or of * {@code surfaceViewRenderer} has changed. */ @SuppressLint("WrongCall") private void requestSurfaceViewRendererLayout() { // Google/WebRTC just call requestLayout() on surfaceViewRenderer when // they change the value of its mirror or surfaceType property. getSurfaceViewRenderer().requestLayout(); // The above is not enough though when the video frame's dimensions or // rotation change. The following will suffice. if (!invokeIsInLayout()) { onLayout( /* changed */ false, getLeft(), getTop(), getRight(), getBottom()); } } /** * Sets the indicator which determines whether this {@code WebRTCView} is to * mirror the video represented by {@link #videoTrack} during its rendering. * * @param mirror If this {@code WebRTCView} is to mirror the video * represented by {@code videoTrack} during its rendering, {@code true}; * otherwise, {@code false}. */ public void setMirror(boolean mirror) { if (this.mirror != mirror) { this.mirror = mirror; SurfaceViewRenderer surfaceViewRenderer = getSurfaceViewRenderer(); surfaceViewRenderer.setMirror(mirror); // SurfaceViewRenderer takes the value of its mirror property into // account upon its layout. requestSurfaceViewRendererLayout(); } } private void setScalingType(ScalingType scalingType) { SurfaceViewRenderer surfaceViewRenderer; synchronized (layoutSyncRoot) { if (this.scalingType == scalingType) { return; } this.scalingType = scalingType; surfaceViewRenderer = getSurfaceViewRenderer(); surfaceViewRenderer.setScalingType(scalingType); } // Both this instance ant its SurfaceViewRenderer take the value of // their scalingType properties into account upon their layouts. requestSurfaceViewRendererLayout(); } /** * Sets the {@code MediaStream} to be rendered by this {@code WebRTCView}. * The implementation renders the first {@link VideoTrack}, if any, of the * specified {@code mediaStream}. * * @param mediaStream The {@code MediaStream} to be rendered by this * {@code WebRTCView} or {@code null}. */ public void setStream(MediaStream mediaStream) { VideoTrack videoTrack; if (mediaStream == null) { videoTrack = null; } else { List<VideoTrack> videoTracks = mediaStream.videoTracks; videoTrack = videoTracks.isEmpty() ? null : videoTracks.get(0); } setVideoTrack(videoTrack); } /** * Sets the {@code VideoTrack} to be rendered by this {@code WebRTCView}. * * @param videoTrack The {@code VideoTrack} to be rendered by this * {@code WebRTCView} or {@code null}. */ private void setVideoTrack(VideoTrack videoTrack) { VideoTrack oldValue = this.videoTrack; if (oldValue != videoTrack) { if (oldValue != null) { removeRendererFromVideoTrack(); } this.videoTrack = videoTrack; if (videoTrack != null) { tryAddRendererToVideoTrack(); } } } /** * Sets the z-order of this {@link WebRTCView} in the stacking space of all * {@code WebRTCView}s. For more details, refer to the documentation of the * {@code zOrder} property of the JavaScript counterpart of * {@code WebRTCView} i.e. {@code RTCView}. * * @param zOrder The z-order to set on this {@code WebRTCView}. */ public void setZOrder(int zOrder) { SurfaceViewRenderer surfaceViewRenderer = getSurfaceViewRenderer(); switch (zOrder) { case 0: surfaceViewRenderer.setZOrderMediaOverlay(false); break; case 1: surfaceViewRenderer.setZOrderMediaOverlay(true); break; case 2: surfaceViewRenderer.setZOrderOnTop(true); break; } } /** * Starts rendering {@link #videoTrack} if rendering is not in progress and * all preconditions for the start of rendering are met. */ private void tryAddRendererToVideoTrack() { if (videoTrack != null && ViewCompat.isAttachedToWindow(this)) { EglBase.Context sharedContext = EglUtils.getRootEglBaseContext(); if (sharedContext == null) { // If SurfaceViewRenderer#init() is invoked, it will throw a // RuntimeException which will very likely kill the application. Log.e(LOG_TAG, "Failed to render a VideoTrack!"); return; } SurfaceViewRenderer surfaceViewRenderer = getSurfaceViewRenderer(); surfaceViewRenderer.init(sharedContext, rendererEvents); videoTrack.addSink(surfaceViewRenderer); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.cluster.routing; import com.carrotsearch.hppc.IntSet; import com.carrotsearch.hppc.cursors.IntCursor; import com.carrotsearch.hppc.cursors.IntObjectCursor; import com.google.common.collect.ImmutableList; import com.google.common.collect.Sets; import com.google.common.collect.UnmodifiableIterator; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.collect.ImmutableOpenIntMap; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.shard.ShardId; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Set; import java.util.concurrent.ThreadLocalRandom; import static com.google.common.collect.Lists.*; /** * The {@link IndexRoutingTable} represents routing information for a single * index. The routing table maintains a list of all shards in the index. A * single shard in this context has one more instances namely exactly one * {@link ShardRouting#primary() primary} and 1 or more replicas. In other * words, each instance of a shard is considered a replica while only one * replica per shard is a <tt>primary</tt> replica. The <tt>primary</tt> replica * can be seen as the "leader" of the shard acting as the primary entry point * for operations on a specific shard. * <p> * Note: The term replica is not directly * reflected in the routing table or in releated classes, replicas are * represented as {@link ShardRouting}. * </p> */ public class IndexRoutingTable extends AbstractDiffable<IndexRoutingTable> implements Iterable<IndexShardRoutingTable> { public static final IndexRoutingTable PROTO = builder("").build(); private final String index; private final ShardShuffler shuffler; // note, we assume that when the index routing is created, ShardRoutings are created for all possible number of // shards with state set to UNASSIGNED private final ImmutableOpenIntMap<IndexShardRoutingTable> shards; private final ImmutableList<ShardRouting> allShards; private final ImmutableList<ShardRouting> allActiveShards; IndexRoutingTable(String index, ImmutableOpenIntMap<IndexShardRoutingTable> shards) { this.index = index; this.shuffler = new RotationShardShuffler(ThreadLocalRandom.current().nextInt()); this.shards = shards; ImmutableList.Builder<ShardRouting> allShards = ImmutableList.builder(); ImmutableList.Builder<ShardRouting> allActiveShards = ImmutableList.builder(); for (IntObjectCursor<IndexShardRoutingTable> cursor : shards) { for (ShardRouting shardRouting : cursor.value) { allShards.add(shardRouting); if (shardRouting.active()) { allActiveShards.add(shardRouting); } } } this.allShards = allShards.build(); this.allActiveShards = allActiveShards.build(); } /** * Return the index id * * @return id of the index */ public String index() { return this.index; } /** * Return the index id * * @return id of the index */ public String getIndex() { return index(); } /** * creates a new {@link IndexRoutingTable} with all shard versions normalized * * @return new {@link IndexRoutingTable} */ public IndexRoutingTable normalizeVersions() { IndexRoutingTable.Builder builder = new Builder(this.index); for (IntObjectCursor<IndexShardRoutingTable> cursor : shards) { builder.addIndexShard(cursor.value.normalizeVersions()); } return builder.build(); } public void validate(RoutingTableValidation validation, MetaData metaData) { if (!metaData.hasIndex(index())) { validation.addIndexFailure(index(), "Exists in routing does not exists in metadata"); return; } IndexMetaData indexMetaData = metaData.index(index()); for (String failure : validate(indexMetaData)) { validation.addIndexFailure(index, failure); } } /** * validate based on a meta data, returning failures found */ public List<String> validate(IndexMetaData indexMetaData) { ArrayList<String> failures = new ArrayList<>(); // check the number of shards if (indexMetaData.numberOfShards() != shards().size()) { Set<Integer> expected = Sets.newHashSet(); for (int i = 0; i < indexMetaData.numberOfShards(); i++) { expected.add(i); } for (IndexShardRoutingTable indexShardRoutingTable : this) { expected.remove(indexShardRoutingTable.shardId().id()); } failures.add("Wrong number of shards in routing table, missing: " + expected); } // check the replicas for (IndexShardRoutingTable indexShardRoutingTable : this) { int routingNumberOfReplicas = indexShardRoutingTable.size() - 1; if (routingNumberOfReplicas != indexMetaData.numberOfReplicas()) { failures.add("Shard [" + indexShardRoutingTable.shardId().id() + "] routing table has wrong number of replicas, expected [" + indexMetaData.numberOfReplicas() + "], got [" + routingNumberOfReplicas + "]"); } for (ShardRouting shardRouting : indexShardRoutingTable) { if (!shardRouting.index().equals(index())) { failures.add("shard routing has an index [" + shardRouting.index() + "] that is different than the routing table"); } } } return failures; } @Override public UnmodifiableIterator<IndexShardRoutingTable> iterator() { return shards.valuesIt(); } /** * Calculates the number of nodes that hold one or more shards of this index * {@link IndexRoutingTable} excluding the nodes with the node ids give as * the <code>excludedNodes</code> parameter. * * @param excludedNodes id of nodes that will be excluded * @return number of distinct nodes this index has at least one shard allocated on */ public int numberOfNodesShardsAreAllocatedOn(String... excludedNodes) { Set<String> nodes = Sets.newHashSet(); for (IndexShardRoutingTable shardRoutingTable : this) { for (ShardRouting shardRouting : shardRoutingTable) { if (shardRouting.assignedToNode()) { String currentNodeId = shardRouting.currentNodeId(); boolean excluded = false; if (excludedNodes != null) { for (String excludedNode : excludedNodes) { if (currentNodeId.equals(excludedNode)) { excluded = true; break; } } } if (!excluded) { nodes.add(currentNodeId); } } } } return nodes.size(); } public ImmutableOpenIntMap<IndexShardRoutingTable> shards() { return shards; } public ImmutableOpenIntMap<IndexShardRoutingTable> getShards() { return shards(); } public IndexShardRoutingTable shard(int shardId) { return shards.get(shardId); } /** * Returns <code>true</code> if all shards are primary and active. Otherwise <code>false</code>. */ public boolean allPrimaryShardsActive() { return primaryShardsActive() == shards().size(); } /** * Calculates the number of primary shards in active state in routing table * * @return number of active primary shards */ public int primaryShardsActive() { int counter = 0; for (IndexShardRoutingTable shardRoutingTable : this) { if (shardRoutingTable.primaryShard().active()) { counter++; } } return counter; } /** * Returns <code>true</code> if all primary shards are in * {@link ShardRoutingState#UNASSIGNED} state. Otherwise <code>false</code>. */ public boolean allPrimaryShardsUnassigned() { return primaryShardsUnassigned() == shards.size(); } /** * Calculates the number of primary shards in the routing table the are in * {@link ShardRoutingState#UNASSIGNED} state. */ public int primaryShardsUnassigned() { int counter = 0; for (IndexShardRoutingTable shardRoutingTable : this) { if (shardRoutingTable.primaryShard().unassigned()) { counter++; } } return counter; } /** * Returns a {@link List} of shards that match one of the states listed in {@link ShardRoutingState states} * * @param state {@link ShardRoutingState} to retrieve * @return a {@link List} of shards that match one of the given {@link ShardRoutingState states} */ public List<ShardRouting> shardsWithState(ShardRoutingState state) { List<ShardRouting> shards = newArrayList(); for (IndexShardRoutingTable shardRoutingTable : this) { shards.addAll(shardRoutingTable.shardsWithState(state)); } return shards; } /** * Returns an unordered iterator over all shards (including replicas). */ public ShardsIterator randomAllShardsIt() { return new PlainShardsIterator(shuffler.shuffle(allShards)); } /** * Returns an unordered iterator over all active shards (including replicas). */ public ShardsIterator randomAllActiveShardsIt() { return new PlainShardsIterator(shuffler.shuffle(allActiveShards)); } /** * A group shards iterator where each group ({@link ShardIterator} * is an iterator across shard replication group. */ public GroupShardsIterator groupByShardsIt() { // use list here since we need to maintain identity across shards ArrayList<ShardIterator> set = new ArrayList<>(shards.size()); for (IndexShardRoutingTable indexShard : this) { set.add(indexShard.shardsIt()); } return new GroupShardsIterator(set); } /** * A groups shards iterator where each groups is a single {@link ShardRouting} and a group * is created for each shard routing. * <p/> * <p>This basically means that components that use the {@link GroupShardsIterator} will iterate * over *all* the shards (all the replicas) within the index.</p> */ public GroupShardsIterator groupByAllIt() { // use list here since we need to maintain identity across shards ArrayList<ShardIterator> set = new ArrayList<>(); for (IndexShardRoutingTable indexShard : this) { for (ShardRouting shardRouting : indexShard) { set.add(shardRouting.shardsIt()); } } return new GroupShardsIterator(set); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; IndexRoutingTable that = (IndexRoutingTable) o; if (!index.equals(that.index)) return false; if (!shards.equals(that.shards)) return false; return true; } @Override public int hashCode() { int result = index.hashCode(); result = 31 * result + shards.hashCode(); return result; } public void validate() throws RoutingValidationException { } @Override public IndexRoutingTable readFrom(StreamInput in) throws IOException { String index = in.readString(); Builder builder = new Builder(index); int size = in.readVInt(); for (int i = 0; i < size; i++) { builder.addIndexShard(IndexShardRoutingTable.Builder.readFromThin(in, index)); } return builder.build(); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(index); out.writeVInt(shards.size()); for (IndexShardRoutingTable indexShard : this) { IndexShardRoutingTable.Builder.writeToThin(indexShard, out); } } public static Builder builder(String index) { return new Builder(index); } public static class Builder { private final String index; private final ImmutableOpenIntMap.Builder<IndexShardRoutingTable> shards = ImmutableOpenIntMap.builder(); public Builder(String index) { this.index = index; } /** * Reads an {@link IndexRoutingTable} from an {@link StreamInput} * * @param in {@link StreamInput} to read the {@link IndexRoutingTable} from * @return {@link IndexRoutingTable} read * @throws IOException if something happens during read */ public static IndexRoutingTable readFrom(StreamInput in) throws IOException { return PROTO.readFrom(in); } /** * Initializes a new empty index, as if it was created from an API. */ public Builder initializeAsNew(IndexMetaData indexMetaData) { return initializeEmpty(indexMetaData, true); } /** * Initializes a new empty index, as if it was created from an API. */ public Builder initializeAsRecovery(IndexMetaData indexMetaData) { return initializeEmpty(indexMetaData, false); } /** * Initializes a new empty index, to be restored from a snapshot */ public Builder initializeAsNewRestore(IndexMetaData indexMetaData, RestoreSource restoreSource, IntSet ignoreShards) { return initializeAsRestore(indexMetaData, restoreSource, ignoreShards, true); } /** * Initializes an existing index, to be restored from a snapshot */ public Builder initializeAsRestore(IndexMetaData indexMetaData, RestoreSource restoreSource) { return initializeAsRestore(indexMetaData, restoreSource, null, false); } /** * Initializes an index, to be restored from snapshot */ private Builder initializeAsRestore(IndexMetaData indexMetaData, RestoreSource restoreSource, IntSet ignoreShards, boolean asNew) { if (!shards.isEmpty()) { throw new IllegalStateException("trying to initialize an index with fresh shards, but already has shards created"); } for (int shardId = 0; shardId < indexMetaData.numberOfShards(); shardId++) { IndexShardRoutingTable.Builder indexShardRoutingBuilder = new IndexShardRoutingTable.Builder(new ShardId(indexMetaData.index(), shardId), asNew ? false : true); for (int i = 0; i <= indexMetaData.numberOfReplicas(); i++) { if (asNew && ignoreShards.contains(shardId)) { // This shards wasn't completely snapshotted - restore it as new shard indexShardRoutingBuilder.addShard(new ImmutableShardRouting(index, shardId, null, i == 0, ShardRoutingState.UNASSIGNED, 0)); } else { indexShardRoutingBuilder.addShard(new ImmutableShardRouting(index, shardId, null, null, i == 0 ? restoreSource : null, i == 0, ShardRoutingState.UNASSIGNED, 0)); } } shards.put(shardId, indexShardRoutingBuilder.build()); } return this; } /** * Initializes a new empty index, with an option to control if its from an API or not. */ private Builder initializeEmpty(IndexMetaData indexMetaData, boolean asNew) { if (!shards.isEmpty()) { throw new IllegalStateException("trying to initialize an index with fresh shards, but already has shards created"); } for (int shardId = 0; shardId < indexMetaData.numberOfShards(); shardId++) { IndexShardRoutingTable.Builder indexShardRoutingBuilder = new IndexShardRoutingTable.Builder(new ShardId(indexMetaData.index(), shardId), asNew ? false : true); for (int i = 0; i <= indexMetaData.numberOfReplicas(); i++) { indexShardRoutingBuilder.addShard(new ImmutableShardRouting(index, shardId, null, i == 0, ShardRoutingState.UNASSIGNED, 0)); } shards.put(shardId, indexShardRoutingBuilder.build()); } return this; } public Builder addReplica() { for (IntCursor cursor : shards.keys()) { int shardId = cursor.value; // version 0, will get updated when reroute will happen ImmutableShardRouting shard = new ImmutableShardRouting(index, shardId, null, false, ShardRoutingState.UNASSIGNED, 0); shards.put(shardId, new IndexShardRoutingTable.Builder(shards.get(shard.id())).addShard(shard).build() ); } return this; } public Builder removeReplica() { for (IntCursor cursor : shards.keys()) { int shardId = cursor.value; IndexShardRoutingTable indexShard = shards.get(shardId); if (indexShard.replicaShards().isEmpty()) { // nothing to do here! return this; } // re-add all the current ones IndexShardRoutingTable.Builder builder = new IndexShardRoutingTable.Builder(indexShard.shardId(), indexShard.primaryAllocatedPostApi()); for (ShardRouting shardRouting : indexShard) { builder.addShard(new ImmutableShardRouting(shardRouting)); } // first check if there is one that is not assigned to a node, and remove it boolean removed = false; for (ShardRouting shardRouting : indexShard) { if (!shardRouting.primary() && !shardRouting.assignedToNode()) { builder.removeShard(shardRouting); removed = true; break; } } if (!removed) { for (ShardRouting shardRouting : indexShard) { if (!shardRouting.primary()) { builder.removeShard(shardRouting); removed = true; break; } } } shards.put(shardId, builder.build()); } return this; } public Builder addIndexShard(IndexShardRoutingTable indexShard) { shards.put(indexShard.shardId().id(), indexShard); return this; } /** * Clears the post allocation flag for the specified shard */ public Builder clearPostAllocationFlag(ShardId shardId) { assert this.index.equals(shardId.index().name()); IndexShardRoutingTable indexShard = shards.get(shardId.id()); shards.put(indexShard.shardId().id(), new IndexShardRoutingTable(indexShard.shardId(), indexShard.shards(), false)); return this; } /** * Adds a new shard routing (makes a copy of it), with reference data used from the index shard routing table * if it needs to be created. */ public Builder addShard(IndexShardRoutingTable refData, ShardRouting shard) { IndexShardRoutingTable indexShard = shards.get(shard.id()); if (indexShard == null) { indexShard = new IndexShardRoutingTable.Builder(refData.shardId(), refData.primaryAllocatedPostApi()).addShard(new ImmutableShardRouting(shard)).build(); } else { indexShard = new IndexShardRoutingTable.Builder(indexShard).addShard(new ImmutableShardRouting(shard)).build(); } shards.put(indexShard.shardId().id(), indexShard); return this; } public IndexRoutingTable build() throws RoutingValidationException { IndexRoutingTable indexRoutingTable = new IndexRoutingTable(index, shards.build()); indexRoutingTable.validate(); return indexRoutingTable; } } public String prettyPrint() { StringBuilder sb = new StringBuilder("-- index [" + index + "]\n"); List<IndexShardRoutingTable> ordered = new ArrayList<>(); for (IndexShardRoutingTable indexShard : this) { ordered.add(indexShard); } CollectionUtil.timSort(ordered, new Comparator<IndexShardRoutingTable>() { @Override public int compare(IndexShardRoutingTable o1, IndexShardRoutingTable o2) { int v = o1.shardId().index().name().compareTo( o2.shardId().index().name()); if (v == 0) { v = Integer.compare(o1.shardId().id(), o2.shardId().id()); } return v; } }); for (IndexShardRoutingTable indexShard : ordered) { sb.append("----shard_id [").append(indexShard.shardId().index().name()).append("][").append(indexShard.shardId().id()).append("]\n"); for (ShardRouting shard : indexShard) { sb.append("--------").append(shard.shortSummary()).append("\n"); } } return sb.toString(); } }
// File generated by OpenXava: Wed Sep 11 11:56:50 CEST 2013 // Archivo generado por OpenXava: Wed Sep 11 11:56:50 CEST 2013 // WARNING: NO EDIT // OJO: NO EDITAR // Component: Service Entity/Entidad package org.openxava.test.model; import java.util.*; import java.math.*; import java.rmi.RemoteException; import org.openxava.component.MetaComponent; import org.openxava.model.meta.MetaModel; import org.openxava.util.*; /** * * @author MCarmen Gimeno */ public class Service implements java.io.Serializable, org.openxava.test.model.IService { // Constructor public Service() { initMembers(); } private void initMembers() { setNumber(0); setDescription(null); setFamily(0); } // Properties/Propiedades private static org.openxava.converters.IConverter familyConverter; private org.openxava.converters.IConverter getFamilyConverter() { if (familyConverter == null) { try { familyConverter = (org.openxava.converters.IConverter) getMetaModel().getMapping().getConverter("family"); } catch (Exception ex) { ex.printStackTrace(); throw new RuntimeException(XavaResources.getString("generator.create_converter_error", "family")); } } return familyConverter; } private java.lang.Integer family; private java.lang.Integer get_Family() { return family; } private void set_Family(java.lang.Integer newFamily) { this.family = newFamily; } /** * * */ public int getFamily() { try { return ((Integer) getFamilyConverter().toJava(get_Family())).intValue(); } catch (org.openxava.converters.ConversionException ex) { ex.printStackTrace(); throw new RuntimeException(XavaResources.getString("generator.conversion_error", "Family", "Service", "int")); } } /** * */ public void setFamily(int newFamily) { try { set_Family((java.lang.Integer) getFamilyConverter().toDB(new Integer(newFamily))); } catch (org.openxava.converters.ConversionException ex) { ex.printStackTrace(); throw new RuntimeException(XavaResources.getString("generator.conversion_error", "Family", "Service", "int")); } } private static org.openxava.converters.IConverter descriptionConverter; private org.openxava.converters.IConverter getDescriptionConverter() { if (descriptionConverter == null) { try { descriptionConverter = (org.openxava.converters.IConverter) getMetaModel().getMapping().getConverter("description"); } catch (Exception ex) { ex.printStackTrace(); throw new RuntimeException(XavaResources.getString("generator.create_converter_error", "description")); } } return descriptionConverter; } private java.lang.String description; private java.lang.String get_Description() { return description; } private void set_Description(java.lang.String newDescription) { this.description = newDescription; } /** * * */ public String getDescription() { try { return (String) getDescriptionConverter().toJava(get_Description()); } catch (org.openxava.converters.ConversionException ex) { ex.printStackTrace(); throw new RuntimeException(XavaResources.getString("generator.conversion_error", "Description", "Service", "String")); } } /** * */ public void setDescription(String newDescription) { try { set_Description((java.lang.String) getDescriptionConverter().toDB(newDescription)); } catch (org.openxava.converters.ConversionException ex) { ex.printStackTrace(); throw new RuntimeException(XavaResources.getString("generator.conversion_error", "Description", "Service", "String")); } } private int number; public int getNumber() { return number; } public void setNumber(int newNumber) { this.number = newNumber; } // References/Referencias // Detail : Aggregate/Agregado public org.openxava.test.model.Detail getDetail() { org.openxava.test.model.Detail r = new org.openxava.test.model.Detail(); r.setFree(isDetail_free()); r.setType(getDetail_type()); r.setSubfamily(getDetail_subfamily()); r.setTypeRef(getDetail_typeRef()); return r; } public void setDetail(org.openxava.test.model.Detail newDetail) throws java.rmi.RemoteException { if (newDetail == null) newDetail = new org.openxava.test.model.Detail(); setDetail_free(newDetail.getFree()); setDetail_type(newDetail.getType()); setDetail_subfamily(newDetail.getSubfamily()); setDetail_typeRef(newDetail.getTypeRef()); } // For acceding to properties of this from calculators inside aggregates private Service getDetail_service() { return this; } private static org.openxava.converters.IConverter detail_freeConverter; private org.openxava.converters.IConverter getDetail_freeConverter() { if (detail_freeConverter == null) { try { detail_freeConverter = (org.openxava.converters.IConverter) getMetaModel().getMapping().getConverter("detail_free"); } catch (Exception ex) { ex.printStackTrace(); throw new RuntimeException(XavaResources.getString("generator.create_converter_error", "detail_free")); } } return detail_freeConverter; } private java.lang.Boolean detail_free; private java.lang.Boolean get_Detail_free() { return detail_free; } private void set_Detail_free(java.lang.Boolean newDetail_free) { this.detail_free = newDetail_free; } /** * * */ private boolean isDetail_free() { try { return ((Boolean) getDetail_freeConverter().toJava(get_Detail_free())).booleanValue(); } catch (org.openxava.converters.ConversionException ex) { ex.printStackTrace(); throw new RuntimeException(XavaResources.getString("generator.conversion_error", "Detail_free", "Detail", "boolean")); } } /** * */ private void setDetail_free(boolean newDetail_free) { try { set_Detail_free((java.lang.Boolean) getDetail_freeConverter().toDB(new Boolean(newDetail_free))); } catch (org.openxava.converters.ConversionException ex) { ex.printStackTrace(); throw new RuntimeException(XavaResources.getString("generator.conversion_error", "Detail_free", "Detail", "boolean")); } } private static org.openxava.converters.IConverter detail_typeConverter; private org.openxava.converters.IConverter getDetail_typeConverter() { if (detail_typeConverter == null) { try { detail_typeConverter = (org.openxava.converters.IConverter) getMetaModel().getMapping().getConverter("detail_type"); } catch (Exception ex) { ex.printStackTrace(); throw new RuntimeException(XavaResources.getString("generator.create_converter_error", "detail_type")); } } return detail_typeConverter; } private java.lang.Integer detail_type; private java.lang.Integer get_Detail_type() { return detail_type; } private void set_Detail_type(java.lang.Integer newDetail_type) { this.detail_type = newDetail_type; } /** * * */ private int getDetail_type() { try { return ((Integer) getDetail_typeConverter().toJava(get_Detail_type())).intValue(); } catch (org.openxava.converters.ConversionException ex) { ex.printStackTrace(); throw new RuntimeException(XavaResources.getString("generator.conversion_error", "Detail_type", "Detail", "int")); } } /** * */ private void setDetail_type(int newDetail_type) { try { set_Detail_type((java.lang.Integer) getDetail_typeConverter().toDB(new Integer(newDetail_type))); } catch (org.openxava.converters.ConversionException ex) { ex.printStackTrace(); throw new RuntimeException(XavaResources.getString("generator.conversion_error", "Detail_type", "Detail", "int")); } } private static org.openxava.converters.IConverter detail_subfamilyConverter; private org.openxava.converters.IConverter getDetail_subfamilyConverter() { if (detail_subfamilyConverter == null) { try { detail_subfamilyConverter = (org.openxava.converters.IConverter) getMetaModel().getMapping().getConverter("detail_subfamily"); } catch (Exception ex) { ex.printStackTrace(); throw new RuntimeException(XavaResources.getString("generator.create_converter_error", "detail_subfamily")); } } return detail_subfamilyConverter; } private java.lang.Integer detail_subfamily; private java.lang.Integer get_Detail_subfamily() { return detail_subfamily; } private void set_Detail_subfamily(java.lang.Integer newDetail_subfamily) { this.detail_subfamily = newDetail_subfamily; } /** * * */ private int getDetail_subfamily() { try { return ((Integer) getDetail_subfamilyConverter().toJava(get_Detail_subfamily())).intValue(); } catch (org.openxava.converters.ConversionException ex) { ex.printStackTrace(); throw new RuntimeException(XavaResources.getString("generator.conversion_error", "Detail_subfamily", "Detail", "int")); } } /** * */ private void setDetail_subfamily(int newDetail_subfamily) { try { set_Detail_subfamily((java.lang.Integer) getDetail_subfamilyConverter().toDB(new Integer(newDetail_subfamily))); } catch (org.openxava.converters.ConversionException ex) { ex.printStackTrace(); throw new RuntimeException(XavaResources.getString("generator.conversion_error", "Detail_subfamily", "Detail", "int")); } } private org.openxava.test.model.IServiceType detail_typeRef; public org.openxava.test.model.IServiceType getDetail_typeRef() { if (detail_typeRef != null) { // Because not-found='ignore' annul lazy initialization, we simulate it try { detail_typeRef.toString(); } catch (Exception ex) { return null; } } return detail_typeRef; } public void setDetail_typeRef(org.openxava.test.model.IServiceType newServiceType) throws RemoteException{ if (newServiceType != null && !(newServiceType instanceof org.openxava.test.model.ServiceType)) { throw new IllegalArgumentException(XavaResources.getString("ejb_to_pojo_illegal")); } this.detail_typeRef = newServiceType; } private org.openxava.test.model.IServiceInvoice invoice; public org.openxava.test.model.IServiceInvoice getInvoice() { if (invoice != null) { // Because not-found='ignore' annul lazy initialization, we simulate it try { invoice.toString(); } catch (Exception ex) { return null; } } return invoice; } public void setInvoice(org.openxava.test.model.IServiceInvoice newServiceInvoice) { if (newServiceInvoice != null && !(newServiceInvoice instanceof org.openxava.test.model.ServiceInvoice)) { throw new IllegalArgumentException(XavaResources.getString("ejb_to_pojo_illegal")); } this.invoice = newServiceInvoice; } // Colecciones/Collections private java.util.Collection additionalDetails; public java.util.Collection getAdditionalDetails() { return additionalDetails; } public void setAdditionalDetails(java.util.Collection additionalDetails) { this.additionalDetails = additionalDetails; } // Methods/Metodos // User defined finders/Buscadores definidos por el usuario public static Service findByNumber(int number) throws javax.ejb.ObjectNotFoundException { if (XavaPreferences.getInstance().isJPAPersistence()) { javax.persistence.Query query = org.openxava.jpa.XPersistence.getManager().createQuery("from Service as o where o.number = :arg0"); query.setParameter("arg0", new Integer(number)); try { return (Service) query.getSingleResult(); } catch (Exception ex) { // In this way in order to work with Java pre 5 if (ex.getClass().getName().equals("javax.persistence.NoResultException")) { throw new javax.ejb.ObjectNotFoundException(XavaResources.getString("object_not_found", "Service")); } else { ex.printStackTrace(); throw new RuntimeException(ex.getMessage()); } } } else { org.hibernate.Query query = org.openxava.hibernate.XHibernate.getSession().createQuery("from Service as o where o.number = :arg0"); query.setParameter("arg0", new Integer(number)); Service r = (Service) query.uniqueResult(); if (r == null) { throw new javax.ejb.ObjectNotFoundException(XavaResources.getString("object_not_found", "Service")); } return r; } } private static MetaModel metaModel; public MetaModel getMetaModel() throws XavaException { if (metaModel == null) { metaModel = MetaComponent.get("Service").getMetaEntity(); } return metaModel; } public String toString() { try { return getMetaModel().toString(this); } catch (XavaException ex) { System.err.println(XavaResources.getString("toString_warning", "Service")); return super.toString(); } } public boolean equals(Object other) { if (other == null) return false; return toString().equals(other.toString()); } public int hashCode() { return toString().hashCode(); } }
package me.onebone.actaeon.route; import cn.nukkit.block.Block; import cn.nukkit.block.BlockAir; import cn.nukkit.level.Position; import cn.nukkit.math.Vector3; import me.onebone.actaeon.entity.Climbable; import me.onebone.actaeon.entity.Fallable; import me.onebone.actaeon.entity.MovingEntity; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; public class AdvancedRouteFinder extends RouteFinder{ private boolean succeed = false, searching = false; private Vector3 realDestination = null; private Set<Node> open = new HashSet<>(); private Grid grid = new Grid(); public AdvancedRouteFinder(MovingEntity entity){ super(entity); } @Override public boolean search(){ if(this.getStart() == null || this.getDestination() == null){ return this.succeed = this.searching = false; } this.resetNodes(); Node start = new Node(this.getStart().floor()); start.f = start.g = 0; open.add(start); this.grid.putNode(start.getVector3(), start); Node endNode = new Node(this.realDestination.floor()); this.grid.putNode(endNode.getVector3(), endNode); this.succeed = false; this.searching = true; int limit = 500; while(!open.isEmpty() && limit-- > 0){ Node node = null; double f = Double.MAX_VALUE; for(Node cur : this.open){ if(cur.f < f && cur.f != -1){ node = cur; f = cur.f; } } if(endNode.equals(node)){ List<Node> nodes = new ArrayList<>(); nodes.add(node); while((node = node.getParent()) != null){ node.add(0.5, 0, 0.5); //level.addParticle(new cn.nukkit.level.particle.CriticalParticle(node.getVector3(), 3)); nodes.add(node); }; Collections.reverse(nodes); nodes.remove(0); nodes.forEach(this::addNode); this.succeed = true; this.searching = false; return true; } node.closed = true; open.remove(node); for(Node neighbor : this.getNeighbors(node)){ if(neighbor.closed) continue; double tentative_gScore = node.g + neighbor.getVector3().distance(node.getVector3()); if(!open.contains(neighbor)) open.add(neighbor); else if(neighbor.g != -1 && tentative_gScore >= neighbor.g) continue; neighbor.setParent(node); neighbor.g = tentative_gScore; neighbor.f = neighbor.g + this.heuristic(neighbor.getVector3(), endNode.getVector3()); } } return this.succeed = this.searching = false; } public Set<Node> getNeighbors(Node node){ Set<Node> neighbors = new HashSet<>(); Vector3 vec = node.getVector3(); boolean s1, s2, s3, s4; double y; if(s1 = (y = isWalkableAt(vec.add(1))) != -256){ neighbors.add(this.grid.getNode(vec.add(1, y))); } if(s2 = (y = isWalkableAt(vec.add(-1))) != -256){ neighbors.add(this.grid.getNode(vec.add(-1, y))); } if(s3 = (y = isWalkableAt(vec.add(0, 0, 1))) != -256){ neighbors.add(this.grid.getNode(vec.add(0, y, 1))); } if(s4 = (y = isWalkableAt(vec.add(0, 0, -1))) != -256){ neighbors.add(this.grid.getNode(vec.add(0, y, -1))); } if(s1 && s3 && (y = isWalkableAt(vec.add(1, 0, 1))) != -256){ neighbors.add(this.grid.getNode(vec.add(1, y, 1))); } if(s1 && s4 && (y = isWalkableAt(vec.add(1, 0, -1))) != -256){ neighbors.add(this.grid.getNode(vec.add(1, y, -1))); } if(s2 && s3 && (y = isWalkableAt(vec.add(-1, 0, 1))) != -256){ neighbors.add(this.grid.getNode(vec.add(-1, y, 1))); } if(s2 && s4 && (y = isWalkableAt(vec.add(-1, 0, -1))) != -256){ neighbors.add(this.grid.getNode(vec.add(-1, y, -1))); } return neighbors; } private Block getHighestUnder(double x, double dy, double z){ for(int y=(int)dy;y >= 0; y--){ Block block = level.getBlock(new Vector3(x, y, z)); if(!canWalkOn(block)) return block; if(!block.canPassThrough()) return block; } return null; } private double isWalkableAt(Vector3 vec){ Block block = this.getHighestUnder(vec.x, vec.y + 2, vec.z); if(block == null) return -256; double diff = (block.y - vec.y) + 1; if((this.entity instanceof Fallable || -4 < diff) && (this.entity instanceof Climbable || diff <= 1) && canWalkOn(block)){ return diff; } return -256; } private boolean canWalkOn(Block block){ return !(block.getId() == Block.LAVA || block.getId() == Block.STILL_LAVA); } private double heuristic(Vector3 one, Vector3 two){ double dx = Math.abs(one.x - two.x); double dy = Math.abs(one.y - two.y); double dz = Math.abs(one.z - two.z); double max = Math.max(dx, dz); double min = Math.min(dx, dz); return 0.414 * min + max + dy; } @Override public void resetNodes(){ super.resetNodes(); this.grid.clear(); Block block = this.getHighestUnder(this.destination.x, this.destination.y, this.destination.z); if(block == null){ block = new BlockAir(); block.position(new Position(this.destination.x, 0, this.destination.z)); } this.realDestination = new Vector3(this.destination.x, block.y + 1, this.destination.z).floor(); } @Override public boolean research(){ this.resetNodes(); return this.search(); } @Override public boolean isSearching(){ return this.searching; } @Override public boolean isSuccess(){ return this.succeed; } private class Grid{ private Map<Double, Map<Double, Map<Double, Node>>> grid = new HashMap<>(); public void clear(){ grid.clear(); } public void putNode(Vector3 vec, Node node){ vec = vec.floor(); if(!grid.containsKey(vec.x)){ grid.put(vec.x, new HashMap<>()); } if(!grid.get(vec.x).containsKey(vec.y)){ grid.get(vec.x).put(vec.y, new HashMap<>()); } grid.get(vec.x).get(vec.y).put(vec.z, node); } public Node getNode(Vector3 vec){ vec = vec.floor(); if(!grid.containsKey(vec.x) || !grid.get(vec.x).containsKey(vec.y) || !grid.get(vec.x).get(vec.y).containsKey(vec.z)){ Node node = new Node(vec.x, vec.y, vec.z); this.putNode(node.getVector3(), node); return node; } return grid.get(vec.x).get(vec.y).get(vec.z); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.jmeter.protocol.http.util; import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.net.URLDecoder; import java.nio.charset.Charset; import java.nio.charset.IllegalCharsetNameException; import java.util.ArrayList; import java.util.List; import java.util.StringTokenizer; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang3.StringUtils; // @see TestHTTPUtils for unit tests /** * General purpose conversion utilities related to HTTP/HTML */ public class ConversionUtils { private static final String CHARSET_EQ = "charset="; // $NON-NLS-1$ private static final int CHARSET_EQ_LEN = CHARSET_EQ.length(); private static final String SLASHDOTDOT = "/.."; // $NON-NLS-1$ private static final String DOTDOT = ".."; // $NON-NLS-1$ private static final String SLASH = "/"; // $NON-NLS-1$ private static final String COLONSLASHSLASH = "://"; // $NON-NLS-1$ /** * Extract the encoding (charset) from the Content-Type, * e.g. "text/html; charset=utf-8". * * @param contentType * @return the charset encoding - or null, if none was found or the charset is not supported * @throws IllegalCharsetNameException */ public static String getEncodingFromContentType(String contentType){ String charSet = null; if (contentType != null) { int charSetStartPos = contentType.toLowerCase(java.util.Locale.ENGLISH).indexOf(CHARSET_EQ); if (charSetStartPos >= 0) { charSet = contentType.substring(charSetStartPos + CHARSET_EQ_LEN); if (charSet != null) { // Remove quotes from charset name, see bug 55852 charSet = StringUtils.replaceChars(charSet, "\'\"", null); charSet = charSet.trim(); if (charSet.length() > 0) { // See Bug 44784 int semi = charSet.indexOf(';'); if (semi == 0){ return null; } if (semi != -1) { charSet = charSet.substring(0,semi); } if (!Charset.isSupported(charSet)){ return null; } return charSet; } return null; } } } return charSet; } /** * Generate an absolute URL from a possibly relative location, * allowing for extraneous leading "../" segments. * The Java {@link URL#URL(URL, String)} constructor does not remove these. * * @param baseURL the base URL which is used to resolve missing protocol/host in the location * @param location the location, possibly with extraneous leading "../" * @return URL with extraneous ../ removed * @throws MalformedURLException * @see <a href="https://issues.apache.org/bugzilla/show_bug.cgi?id=46690">Bug 46690 - handling of 302 redirects with invalid relative paths</a> */ public static URL makeRelativeURL(URL baseURL, String location) throws MalformedURLException{ URL initial = new URL(baseURL,location); // skip expensive processing if it cannot apply if (!location.startsWith("../")){// $NON-NLS-1$ return initial; } String path = initial.getPath(); // Match /../[../] etc. Pattern p = Pattern.compile("^/((?:\\.\\./)+)"); // $NON-NLS-1$ Matcher m = p.matcher(path); if (m.lookingAt()){ String prefix = m.group(1); // get ../ or ../../ etc. if (location.startsWith(prefix)){ return new URL(baseURL, location.substring(prefix.length())); } } return initial; } /** * @param url String Url to escape * @return String cleaned up url * @throws Exception */ public static String escapeIllegalURLCharacters(String url) throws Exception{ String decodeUrl = URLDecoder.decode(url,"UTF-8"); URL urlString = new URL(decodeUrl); URI uri = new URI(urlString.getProtocol(), urlString.getUserInfo(), urlString.getHost(), urlString.getPort(), urlString.getPath(), urlString.getQuery(), urlString.getRef()); return uri.toString(); } /** * Checks a URL and encodes it if necessary, * i.e. if it is not currently correctly encoded. * Warning: it may not work on all unencoded URLs. * @param url non-encoded URL * @return URI which has been encoded as necessary * @throws URISyntaxException */ public static final URI sanitizeUrl(URL url) throws URISyntaxException { try { return url.toURI(); // Assume the URL is already encoded } catch (URISyntaxException e) { // it's not, so encode it return new URI( url.getProtocol(), url.getUserInfo(), url.getHost(), url.getPort(), url.getPath(), url.getQuery(), url.getRef()); // anchor or fragment } } /** * collapses absolute or relative URLs containing '/..' converting * http://host/path1/../path2 to http://host/path2 or /one/two/../three to * /one/three * * @param url * @return collapsed URL * @see <a href="https://issues.apache.org/bugzilla/show_bug.cgi?id=49083">Bug 49083 - collapse /.. in redirect URLs</a> */ public static String removeSlashDotDot(String url) { if (url == null || (url = url.trim()).length() < 4 || !url.contains(SLASHDOTDOT)) { return url; } /** * http://auth@host:port/path1/path2/path3/?query#anchor */ // get to 'path' part of the URL, preserving schema, auth, host if // present // find index of path start int dotSlashSlashIndex = url.indexOf(COLONSLASHSLASH); final int pathStartIndex; if (dotSlashSlashIndex >= 0) { // absolute URL pathStartIndex = url.indexOf(SLASH, dotSlashSlashIndex + COLONSLASHSLASH.length()); } else { // document or context-relative URL like: // '/path/to' // OR '../path/to' // OR '/path/to/../path/' pathStartIndex = 0; } // find path endIndex int pathEndIndex = url.length(); int questionMarkIdx = url.indexOf('?'); if (questionMarkIdx > 0) { pathEndIndex = questionMarkIdx; } else { int anchorIdx = url.indexOf('#'); if (anchorIdx > 0) { pathEndIndex = anchorIdx; } } // path is between idx='pathStartIndex' (inclusive) and // idx='pathEndIndex' (exclusive) String currentPath = url.substring(pathStartIndex, pathEndIndex); final boolean startsWithSlash = currentPath.startsWith(SLASH); final boolean endsWithSlash = currentPath.endsWith(SLASH); StringTokenizer st = new StringTokenizer(currentPath, SLASH); List<String> tokens = new ArrayList<String>(); while (st.hasMoreTokens()) { tokens.add(st.nextToken()); } for (int i = 0; i < tokens.size(); i++) { if (i < tokens.size() - 1) { final String thisToken = tokens.get(i); // Verify for a ".." component at next iteration if (thisToken.length() > 0 && !thisToken.equals(DOTDOT) && tokens.get(i + 1).equals(DOTDOT)) { tokens.remove(i); tokens.remove(i); i = i - 2; if (i < -1) { i = -1; } } } } StringBuilder newPath = new StringBuilder(); if (startsWithSlash) { newPath.append(SLASH); } for (int i = 0; i < tokens.size(); i++) { newPath.append(tokens.get(i)); // append '/' if this isn't the last token or it is but the original // path terminated w/ a '/' boolean appendSlash = i < (tokens.size() - 1) ? true : endsWithSlash; if (appendSlash) { newPath.append(SLASH); } } // install new path StringBuilder s = new StringBuilder(url); s.replace(pathStartIndex, pathEndIndex, newPath.toString()); return s.toString(); } }
/* * Copyright (C) 2013-2014 Xiaoke Zhang * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.mcxiaoke.next.http; import com.mcxiaoke.next.utils.AssertUtils; import com.mcxiaoke.next.utils.IOUtils; import com.squareup.okhttp.FormEncodingBuilder; import com.squareup.okhttp.HttpUrl; import com.squareup.okhttp.MultipartBuilder; import com.squareup.okhttp.RequestBody; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.Reader; import java.nio.charset.Charset; import java.util.Collection; import java.util.List; import java.util.Map; public class NextRequest { protected final HttpMethod method; protected final HttpUrl httpUrl; protected NextParams params; protected byte[] body; protected ProgressListener listener; protected boolean debug; public static NextRequest head(final String url) { return new NextRequest(HttpMethod.HEAD, url); } public static NextRequest get(final String url) { return new NextRequest(HttpMethod.GET, url); } public static NextRequest delete(final String url) { return new NextRequest(HttpMethod.DELETE, url); } public static NextRequest post(final String url) { return new NextRequest(HttpMethod.POST, url); } public static NextRequest put(final String url) { return new NextRequest(HttpMethod.PUT, url); } public NextRequest(final NextRequest source) { this.method = source.method; this.httpUrl = source.httpUrl; this.params = source.params; this.body = source.body; this.listener = source.listener; this.debug = source.debug; } public NextRequest(final HttpMethod method, String url) { this(method, url, new NextParams()); } public NextRequest(final HttpMethod method, String url, final NextParams params) { AssertUtils.notNull(method, "http method can not be null"); AssertUtils.notEmpty(url, "http url can not be null or empty"); AssertUtils.notNull(params, "http params can not be null"); final HttpUrl hUrl = HttpUrl.parse(url); AssertUtils.notNull(hUrl, "invalid url:" + url); this.method = method; this.httpUrl = HttpUrl.parse(url); this.params = new NextParams(params); } public NextRequest debug(final boolean debug) { this.debug = debug; return this; } public NextRequest progress(final ProgressListener listener) { this.listener = listener; return this; } public NextRequest userAgent(final String userAgent) { return header(HttpConsts.USER_AGENT, userAgent); } public NextRequest authorization(final String authorization) { return header(HttpConsts.AUTHORIZATION, authorization); } public NextRequest referer(final String referer) { return header(HttpConsts.REFERER, referer); } public NextRequest header(String name, String value) { this.params.header(name, value); return this; } public NextRequest headers(Map<String, String> headers) { if (headers != null) { this.params.headers(headers); } return this; } public NextRequest query(String key, String value) { AssertUtils.notEmpty(key, "key must not be null or empty."); this.params.query(key, value); return this; } public NextRequest queries(Map<String, String> queries) { this.params.queries(queries); return this; } protected void throwIfNotSupportBody() { if (!supportBody()) { throw new IllegalStateException("HTTP " + method.name() + " not support http body"); } } public NextRequest form(String key, String value) { // throwIfNotSupportBody(); if(supportBody()) { this.params.form(key, value); } return this; } public NextRequest forms(Map<String, String> forms) { // throwIfNotSupportBody(); if(supportBody()){ this.params.forms(forms); } return this; } public NextRequest parts(Collection<BodyPart> parts) { // throwIfNotSupportBody(); if(supportBody()){ for (final BodyPart part : parts) { part(part); } } return this; } public NextRequest file(String key, File file) { // throwIfNotSupportBody(); if(supportBody()){ this.params.file(key, file); } return this; } public NextRequest file(String key, File file, String contentType) { // throwIfNotSupportBody(); if(supportBody()){ this.params.file(key, file, contentType); } return this; } public NextRequest file(String key, File file, String contentType, String fileName) { // throwIfNotSupportBody(); if(supportBody()){ this.params.file(key, file, contentType, fileName); } return this; } public NextRequest file(String key, byte[] bytes) { // throwIfNotSupportBody(); if(supportBody()){ this.params.file(key, bytes); } return this; } public NextRequest file(String key, byte[] bytes, String contentType) { // throwIfNotSupportBody(); if(supportBody()){ this.params.file(key, bytes, contentType); } return this; } public NextRequest body(final byte[] body) { // throwIfNotSupportBody(); if(supportBody()){ this.body = body; } return this; } public NextRequest body(final String content, final Charset charset) { // throwIfNotSupportBody(); if(supportBody()){ this.body = content.getBytes(charset); } return this; } public NextRequest body(final File file) throws IOException { // throwIfNotSupportBody(); if(supportBody()){ this.body = IOUtils.readBytes(file); } return this; } public NextRequest body(final Reader reader) throws IOException { // throwIfNotSupportBody(); if(supportBody()){ this.body = IOUtils.readBytes(reader); } return this; } public NextRequest body(final InputStream stream) throws IOException { // throwIfNotSupportBody(); if(supportBody()){ this.body = IOUtils.readBytes(stream); } return this; } public NextRequest params(final NextParams params) { if (params != null) { queries(params.queries); if (supportBody()) { forms(params.forms); parts(params.parts); } } return this; } public boolean debug() { return debug; } public HttpUrl url() { return buildUrlWithQueries(); } public HttpMethod method() { return method; } public String originalUrl() { return httpUrl.toString(); } public ProgressListener listener() { return listener; } protected boolean supportBody() { return HttpMethod.supportBody(method); } protected NextRequest part(final BodyPart part) { this.params.parts.add(part); return this; } protected NextRequest removeHeader(String key) { this.params.headers.remove(key); return this; } protected NextRequest removeQuery(String key) { this.params.queries.remove(key); return this; } protected NextRequest removeForm(String key) { this.params.forms.remove(key); return this; } protected NextRequest removePart(BodyPart part) { this.params.parts.remove(part); return this; } protected String getHeader(String key) { return this.params.getHeader(key); } protected String getQuery(String key) { return this.params.getQuery(key); } protected String getForm(String key) { return this.params.getForm(key); } protected BodyPart getPart(String key) { return this.params.getPart(key); } protected boolean hasHeader(String key) { return getHeader(key) != null; } protected boolean hasQuery(String key) { return getQuery(key) != null; } protected boolean hasForm(String key) { return getForm(key) != null; } protected boolean hasPart(String key) { return getPart(key) != null; } protected int queriesSize() { return queries().size(); } protected int formsSize() { return form().size(); } protected int headersSize() { return headers().size(); } protected int partsSize() { return parts().size(); } protected Map<String, String> headers() { return this.params.headers; } protected Map<String, String> queries() { return this.params.queries; } protected Map<String, String> form() { return this.params.forms; } protected List<BodyPart> parts() { return this.params.parts; } protected boolean hasParts() { return this.params.parts.size() > 0; } protected boolean hasForms() { return this.params.forms.size() > 0; } HttpUrl buildUrlWithQueries() { final HttpUrl.Builder builder = httpUrl.newBuilder(); for (final Map.Entry<String, String> entry : params.queries().entrySet()) { builder.addQueryParameter(entry.getKey(), entry.getValue()); } return builder.build(); } protected void copy(final NextRequest source) { this.params = source.params; this.body = source.body; this.listener = source.listener; this.debug = source.debug; } protected RequestBody getRequestBody() throws IOException { if (!supportBody()) { return null; } if (body != null) { return RequestBody.create(HttpConsts.MEDIA_TYPE_OCTET_STREAM, body); } RequestBody requestBody; if (hasParts()) { final MultipartBuilder multipart = new MultipartBuilder(); for (final BodyPart part : parts()) { if (part.getBody() != null) { multipart.addFormDataPart(part.getName(), part.getFileName(), part.getBody()); } } for (Map.Entry<String, String> entry : form().entrySet()) { final String key = entry.getKey(); final String value = entry.getValue(); multipart.addFormDataPart(key, value == null ? "" : value); } requestBody = multipart.type(MultipartBuilder.FORM).build(); } else if (hasForms()) { final FormEncodingBuilder bodyBuilder = new FormEncodingBuilder(); for (Map.Entry<String, String> entry : form().entrySet()) { final String key = entry.getKey(); final String value = entry.getValue(); bodyBuilder.add(key, value == null ? "" : value); } requestBody = bodyBuilder.build(); } else { //FIXME workaround for null body, waiting OkHttp release requestBody = RequestBody.create(null, HttpConsts.NO_BODY); } return requestBody; } @Override public String toString() { return "Request{HTTP " + method + " " + httpUrl + '}'; } public String dump() { return "Request{HTTP " + method + " " + httpUrl + ' ' + params + '}'; } }
/* * Copyright 2006 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.javascript.rhino.jstype.JSTypeNative.GLOBAL_THIS; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.javascript.jscomp.CodingConvention.SubclassRelationship; import com.google.javascript.rhino.JSDocInfo; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.StaticSourceFile; import com.google.javascript.rhino.StaticSymbolTable; import com.google.javascript.rhino.Token; import com.google.javascript.rhino.TokenStream; import com.google.javascript.rhino.TypeI; import com.google.javascript.rhino.jstype.StaticTypedRef; import com.google.javascript.rhino.jstype.StaticTypedScope; import com.google.javascript.rhino.jstype.StaticTypedSlot; import java.io.PrintStream; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; /** * Builds a global namespace of all the objects and their properties in * the global scope. Also builds an index of all the references to those names. * * @author nicksantos@google.com (Nick Santos) */ class GlobalNamespace implements StaticTypedScope<TypeI>, StaticSymbolTable<GlobalNamespace.Name, GlobalNamespace.Ref> { private AbstractCompiler compiler; private final Node root; private final Node externsRoot; private boolean inExterns; private Scope externsScope; private boolean generated = false; /** * Each reference has an index in post-order. * Notice that some nodes are represented by 2 Ref objects, so * this index is not necessarily unique. */ private int currentPreOrderIndex = 0; /** Global namespace tree */ private List<Name> globalNames = new ArrayList<>(); /** Maps names (e.g. "a.b.c") to nodes in the global namespace tree */ private Map<String, Name> nameMap = new HashMap<>(); /** * Creates an instance that may emit warnings when building the namespace. * * @param compiler The AbstractCompiler, for reporting code changes * @param root The root of the rest of the code to build a namespace for. */ GlobalNamespace(AbstractCompiler compiler, Node root) { this(compiler, null, root); } /** * Creates an instance that may emit warnings when building the namespace. * * @param compiler The AbstractCompiler, for reporting code changes * @param externsRoot The root of the externs to build a namespace for. If * this is null, externs and properties defined on extern types will not * be included in the global namespace. If non-null, it allows * user-defined function on extern types to be included in the global * namespace. E.g. String.foo. * @param root The root of the rest of the code to build a namespace for. */ GlobalNamespace(AbstractCompiler compiler, Node externsRoot, Node root) { this.compiler = compiler; this.externsRoot = externsRoot; this.root = root; } boolean hasExternsRoot() { return externsRoot != null; } @Override public Node getRootNode() { return root.getParent(); } @Override public StaticTypedScope<TypeI> getParentScope() { return null; } @Override public Name getSlot(String name) { return getOwnSlot(name); } @Override public Name getOwnSlot(String name) { ensureGenerated(); return nameMap.get(name); } @Override public TypeI getTypeOfThis() { return compiler.getTypeIRegistry().getNativeObjectType(GLOBAL_THIS); } @Override public Iterable<Ref> getReferences(Name slot) { ensureGenerated(); return Collections.unmodifiableList(slot.getRefs()); } @Override public StaticTypedScope<TypeI> getScope(Name slot) { return this; } @Override public Iterable<Name> getAllSymbols() { ensureGenerated(); return Collections.unmodifiableCollection(getNameIndex().values()); } private void ensureGenerated() { if (!generated) { process(); } } /** * Gets a list of the roots of the forest of the global names, where the * roots are the top-level names. */ List<Name> getNameForest() { ensureGenerated(); return globalNames; } /** * Gets an index of all the global names, indexed by full qualified name * (as in "a", "a.b.c", etc.). */ Map<String, Name> getNameIndex() { ensureGenerated(); return nameMap; } /** * A simple data class that contains the information necessary to inspect * a node for changes to the global namespace. */ static class AstChange { final JSModule module; final Scope scope; final Node node; AstChange(JSModule module, Scope scope, Node node) { this.module = module; this.scope = scope; this.node = node; } } /** * If the client adds new nodes to the AST, scan these new nodes * to see if they've added any references to the global namespace. * @param newNodes New nodes to check. */ void scanNewNodes(List<AstChange> newNodes) { BuildGlobalNamespace builder = new BuildGlobalNamespace(); for (AstChange info : newNodes) { if (!info.node.isQualifiedName() && !NodeUtil.isObjectLitKey(info.node)) { continue; } scanFromNode(builder, info.module, info.scope, info.node); } } private void scanFromNode( BuildGlobalNamespace builder, JSModule module, Scope scope, Node n) { // Check affected parent nodes first. if (n.isName() || n.isGetProp()) { scanFromNode(builder, module, scope, n.getParent()); } builder.collect(module, scope, n); } /** * Builds the namespace lazily. */ private void process() { if (externsRoot != null) { inExterns = true; NodeTraversal.traverse(compiler, externsRoot, new BuildGlobalNamespace()); } inExterns = false; NodeTraversal.traverse(compiler, root, new BuildGlobalNamespace()); generated = true; } /** * Determines whether a name reference in a particular scope is a global name * reference. * * @param name A variable or property name (e.g. "a" or "a.b.c.d") * @param s The scope in which the name is referenced * @return Whether the name reference is a global name reference */ private boolean isGlobalNameReference(String name, Scope s) { String topVarName = getTopVarName(name); return isGlobalVarReference(topVarName, s); } /** * Gets the top variable name from a possibly namespaced name. * * @param name A variable or qualified property name (e.g. "a" or "a.b.c.d") * @return The top variable name (e.g. "a") */ private static String getTopVarName(String name) { int firstDotIndex = name.indexOf('.'); return firstDotIndex == -1 ? name : name.substring(0, firstDotIndex); } /** * Determines whether a variable name reference in a particular scope is a * global variable reference. * * @param name A variable name (e.g. "a") * @param s The scope in which the name is referenced * @return Whether the name reference is a global variable reference */ private boolean isGlobalVarReference(String name, Scope s) { Var v = s.getVar(name); if (v == null && externsScope != null) { v = externsScope.getVar(name); } return v != null && !v.isLocal(); } // ------------------------------------------------------------------------- /** * Builds a tree representation of the global namespace. Omits prototypes. */ private class BuildGlobalNamespace implements NodeTraversal.Callback { BuildGlobalNamespace() { } @Override public void visit(NodeTraversal t, Node n, Node parent) {} /** Collect the references in pre-order. */ @Override public boolean shouldTraverse(NodeTraversal t, Node n, Node parent) { // If we are traversing the externs, then we save a pointer to the scope // generated by them, so that we can do lookups in it later. if (externsRoot != null && n == externsRoot) { externsScope = t.getScope(); } collect(t.getModule(), t.getScope(), n); return true; } public void collect(JSModule module, Scope scope, Node n) { Node parent = n.getParent(); String name; boolean isSet = false; Name.Type type = Name.Type.OTHER; boolean isPropAssign = false; switch (n.getType()) { case Token.GETTER_DEF: case Token.SETTER_DEF: case Token.STRING_KEY: // This may be a key in an object literal declaration. name = null; if (parent != null && parent.isObjectLit()) { name = getNameForObjLitKey(n); } if (name == null) { return; } isSet = true; switch (n.getType()) { case Token.STRING_KEY: type = getValueType(n.getFirstChild()); break; case Token.GETTER_DEF: type = Name.Type.GET; break; case Token.SETTER_DEF: type = Name.Type.SET; break; default: throw new IllegalStateException("unexpected:" + n); } break; case Token.NAME: // This may be a variable get or set. if (parent != null) { switch (parent.getType()) { case Token.VAR: isSet = true; Node rvalue = n.getFirstChild(); type = rvalue == null ? Name.Type.OTHER : getValueType(rvalue); break; case Token.ASSIGN: if (parent.getFirstChild() == n) { isSet = true; type = getValueType(n.getNext()); } break; case Token.GETPROP: return; case Token.FUNCTION: Node gramps = parent.getParent(); if (gramps == null || NodeUtil.isFunctionExpression(parent)) { return; } isSet = true; type = Name.Type.FUNCTION; break; case Token.CATCH: case Token.INC: case Token.DEC: isSet = true; type = Name.Type.OTHER; break; default: if (NodeUtil.isAssignmentOp(parent) && parent.getFirstChild() == n) { isSet = true; type = Name.Type.OTHER; } } } name = n.getString(); break; case Token.GETPROP: // This may be a namespaced name get or set. if (parent != null) { switch (parent.getType()) { case Token.ASSIGN: if (parent.getFirstChild() == n) { isSet = true; type = getValueType(n.getNext()); isPropAssign = true; } break; case Token.INC: case Token.DEC: isSet = true; type = Name.Type.OTHER; break; case Token.GETPROP: return; default: if (NodeUtil.isAssignmentOp(parent) && parent.getFirstChild() == n) { isSet = true; type = Name.Type.OTHER; } } } name = n.getQualifiedName(); if (name == null) { return; } break; default: return; } // We are only interested in global names. if (!isGlobalNameReference(name, scope)) { return; } if (isSet) { if (scope.isGlobal()) { handleSetFromGlobal(module, scope, n, parent, name, isPropAssign, type); } else { handleSetFromLocal(module, scope, n, parent, name); } } else { handleGet(module, scope, n, parent, name); } } /** * Gets the fully qualified name corresponding to an object literal key, * as long as it and its prefix property names are valid JavaScript * identifiers. The object literal may be nested inside of other object * literals. * * For example, if called with node {@code n} representing "z" in any of * the following expressions, the result would be "w.x.y.z": * <code> var w = {x: {y: {z: 0}}}; </code> * <code> w.x = {y: {z: 0}}; </code> * <code> w.x.y = {'a': 0, 'z': 0}; </code> * * @param n A child of an OBJLIT node * @return The global name, or null if {@code n} doesn't correspond to the * key of an object literal that can be named */ String getNameForObjLitKey(Node n) { Node parent = n.getParent(); Preconditions.checkState(parent.isObjectLit()); Node gramps = parent.getParent(); if (gramps == null) { return null; } Node greatGramps = gramps.getParent(); String name; switch (gramps.getType()) { case Token.NAME: // VAR // NAME (gramps) // OBJLIT (parent) // STRING (n) if (greatGramps == null || !greatGramps.isVar()) { return null; } name = gramps.getString(); break; case Token.ASSIGN: // ASSIGN (gramps) // NAME|GETPROP // OBJLIT (parent) // STRING (n) Node lvalue = gramps.getFirstChild(); name = lvalue.getQualifiedName(); break; case Token.STRING_KEY: // OBJLIT // STRING (gramps) // OBJLIT (parent) // STRING (n) if (greatGramps != null && greatGramps.isObjectLit()) { name = getNameForObjLitKey(gramps); } else { return null; } break; default: return null; } if (name != null) { String key = n.getString(); if (TokenStream.isJSIdentifier(key)) { return name + '.' + key; } } return null; } /** * Gets the type of a value or simple expression. * * @param n An r-value in an assignment or variable declaration (not null) * @return A {@link Name.Type} */ Name.Type getValueType(Node n) { switch (n.getType()) { case Token.OBJECTLIT: return Name.Type.OBJECTLIT; case Token.FUNCTION: return Name.Type.FUNCTION; case Token.OR: // Recurse on the second value. If the first value were an object // literal or function, then the OR would be meaningless and the // second value would be dead code. Assume that if the second value // is an object literal or function, then the first value will also // evaluate to one when it doesn't evaluate to false. return getValueType(n.getLastChild()); case Token.HOOK: // The same line of reasoning used for the OR case applies here. Node second = n.getFirstChild().getNext(); Name.Type t = getValueType(second); if (t != Name.Type.OTHER) { return t; } Node third = second.getNext(); return getValueType(third); } return Name.Type.OTHER; } /** * Updates our representation of the global namespace to reflect an * assignment to a global name in global scope. * * @param module the current module * @param scope the current scope * @param n The node currently being visited * @param parent {@code n}'s parent * @param name The global name (e.g. "a" or "a.b.c.d") * @param isPropAssign Whether this set corresponds to a property * assignment of the form <code>a.b.c = ...;</code> * @param type The type of the value that the name is being assigned */ void handleSetFromGlobal(JSModule module, Scope scope, Node n, Node parent, String name, boolean isPropAssign, Name.Type type) { if (maybeHandlePrototypePrefix(module, scope, n, parent, name)) { return; } Name nameObj = getOrCreateName(name); nameObj.type = type; Ref set = new Ref(module, scope, n, nameObj, Ref.Type.SET_FROM_GLOBAL, currentPreOrderIndex++); nameObj.addRef(set); if (isNestedAssign(parent)) { // This assignment is both a set and a get that creates an alias. Ref get = new Ref(module, scope, n, nameObj, Ref.Type.ALIASING_GET, currentPreOrderIndex++); nameObj.addRef(get); Ref.markTwins(set, get); } else if (isTypeDeclaration(n)) { // Names with a @constructor or @enum annotation are always collapsed nameObj.setDeclaredType(); } } /** * Determines whether a set operation is a constructor or enumeration * or interface declaration. The set operation may either be an assignment * to a name, a variable declaration, or an object literal key mapping. * * @param n The node that represents the name being set * @param parent Parent node of {@code n} (an ASSIGN, VAR, or OBJLIT node) * @return Whether the set operation is either a constructor or enum * declaration */ private boolean isTypeDeclaration(Node n) { Node valueNode = NodeUtil.getRValueOfLValue(n); JSDocInfo info = NodeUtil.getBestJSDocInfo(n); // Heed the annotations only if they're sensibly used. return info != null && valueNode != null && (info.isConstructor() && valueNode.isFunction() || info.isInterface() && valueNode.isFunction() || info.hasEnumParameterType() && valueNode.isObjectLit()); } /** * Updates our representation of the global namespace to reflect an * assignment to a global name in a local scope. * * @param module The current module * @param scope The current scope * @param n The node currently being visited * @param parent {@code n}'s parent * @param name The global name (e.g. "a" or "a.b.c.d") */ void handleSetFromLocal(JSModule module, Scope scope, Node n, Node parent, String name) { if (maybeHandlePrototypePrefix(module, scope, n, parent, name)) { return; } Name nameObj = getOrCreateName(name); Ref set = new Ref(module, scope, n, nameObj, Ref.Type.SET_FROM_LOCAL, currentPreOrderIndex++); nameObj.addRef(set); if (isNestedAssign(parent)) { // This assignment is both a set and a get that creates an alias. Ref get = new Ref(module, scope, n, nameObj, Ref.Type.ALIASING_GET, currentPreOrderIndex++); nameObj.addRef(get); Ref.markTwins(set, get); } } /** * Updates our representation of the global namespace to reflect a read * of a global name. * * @param module The current module * @param scope The current scope * @param n The node currently being visited * @param parent {@code n}'s parent * @param name The global name (e.g. "a" or "a.b.c.d") */ void handleGet(JSModule module, Scope scope, Node n, Node parent, String name) { if (maybeHandlePrototypePrefix(module, scope, n, parent, name)) { return; } Ref.Type type = Ref.Type.DIRECT_GET; if (parent != null) { switch (parent.getType()) { case Token.EXPR_RESULT: case Token.IF: case Token.INSTANCEOF: case Token.TYPEOF: case Token.VOID: case Token.NOT: case Token.BITNOT: case Token.POS: case Token.NEG: break; case Token.CALL: if (n == parent.getFirstChild()) { // It is a call target type = Ref.Type.CALL_GET; } else if (isClassDefiningCall(parent)) { type = Ref.Type.DIRECT_GET; } else { type = Ref.Type.ALIASING_GET; } break; case Token.NEW: type = n == parent.getFirstChild() ? Ref.Type.DIRECT_GET : Ref.Type.ALIASING_GET; break; case Token.OR: case Token.AND: // This node is x or y in (x||y) or (x&&y). We only know that an // alias is not getting created for this name if the result is used // in a boolean context or assigned to the same name // (e.g. var a = a || {}). type = determineGetTypeForHookOrBooleanExpr(module, scope, parent, name); break; case Token.HOOK: if (n != parent.getFirstChild()) { // This node is y or z in (x?y:z). We only know that an alias is // not getting created for this name if the result is assigned to // the same name (e.g. var a = a ? a : {}). type = determineGetTypeForHookOrBooleanExpr(module, scope, parent, name); } break; case Token.DELPROP: type = Ref.Type.DELETE_PROP; break; default: type = Ref.Type.ALIASING_GET; break; } } handleGet(module, scope, n, parent, name, type); } private boolean isClassDefiningCall(Node callNode) { CodingConvention convention = compiler.getCodingConvention(); // Look for goog.inherits, goog.mixin SubclassRelationship classes = convention.getClassesDefinedByCall(callNode); if (classes != null) { return true; } // Look for calls to goog.addSingletonGetter calls. String className = convention.getSingletonGetterClassName(callNode); return className != null; } /** * Determines whether the result of a hook (x?y:z) or boolean expression * (x||y) or (x&&y) is assigned to a specific global name. * * @param module The current module * @param scope The current scope * @param parent The parent of the current node in the traversal. This node * should already be known to be a HOOK, AND, or OR node. * @param name A name that is already known to be global in the current * scope (e.g. "a" or "a.b.c.d") * @return The expression's get type, either {@link Ref.Type#DIRECT_GET} or * {@link Ref.Type#ALIASING_GET} */ Ref.Type determineGetTypeForHookOrBooleanExpr( JSModule module, Scope scope, Node parent, String name) { Node prev = parent; for (Node anc : parent.getAncestors()) { switch (anc.getType()) { case Token.INSTANCEOF: case Token.EXPR_RESULT: case Token.VAR: case Token.IF: case Token.WHILE: case Token.FOR: case Token.TYPEOF: case Token.VOID: case Token.NOT: case Token.BITNOT: case Token.POS: case Token.NEG: return Ref.Type.DIRECT_GET; case Token.HOOK: if (anc.getFirstChild() == prev) { return Ref.Type.DIRECT_GET; } break; case Token.ASSIGN: if (!name.equals(anc.getFirstChild().getQualifiedName())) { return Ref.Type.ALIASING_GET; } break; case Token.NAME: // a variable declaration if (!name.equals(anc.getString())) { return Ref.Type.ALIASING_GET; } break; case Token.CALL: if (anc.getFirstChild() != prev) { return Ref.Type.ALIASING_GET; } break; case Token.DELPROP: return Ref.Type.DELETE_PROP; } prev = anc; } return Ref.Type.ALIASING_GET; } /** * Updates our representation of the global namespace to reflect a read * of a global name. * * @param module The current module * @param scope The current scope * @param n The node currently being visited * @param parent {@code n}'s parent * @param name The global name (e.g. "a" or "a.b.c.d") * @param type The reference type */ void handleGet(JSModule module, Scope scope, Node n, Node parent, String name, Ref.Type type) { Name nameObj = getOrCreateName(name); // No need to look up additional ancestors, since they won't be used. nameObj.addRef( new Ref(module, scope, n, nameObj, type, currentPreOrderIndex++)); } /** * Updates our representation of the global namespace to reflect a read * of a global name's longest prefix before the "prototype" property if the * name includes the "prototype" property. Does nothing otherwise. * * @param module The current module * @param scope The current scope * @param n The node currently being visited * @param parent {@code n}'s parent * @param name The global name (e.g. "a" or "a.b.c.d") * @return Whether the name was handled */ boolean maybeHandlePrototypePrefix(JSModule module, Scope scope, Node n, Node parent, String name) { // We use a string-based approach instead of inspecting the parse tree // to avoid complexities with object literals, possibly nested, beneath // assignments. int numLevelsToRemove; String prefix; if (name.endsWith(".prototype")) { numLevelsToRemove = 1; prefix = name.substring(0, name.length() - 10); } else { int i = name.indexOf(".prototype."); if (i == -1) { return false; } prefix = name.substring(0, i); numLevelsToRemove = 2; i = name.indexOf('.', i + 11); while (i >= 0) { numLevelsToRemove++; i = name.indexOf('.', i + 1); } } if (parent != null && NodeUtil.isObjectLitKey(n)) { // Object literal keys have no prefix that's referenced directly per // key, so we're done. return true; } for (int i = 0; i < numLevelsToRemove; i++) { parent = n; n = n.getFirstChild(); } handleGet(module, scope, n, parent, prefix, Ref.Type.PROTOTYPE_GET); return true; } /** * Determines whether an assignment is nested (i.e. whether its return * value is used). * * @param parent The parent of the current traversal node (not null) * @return Whether it appears that the return value of the assignment is * used */ boolean isNestedAssign(Node parent) { return parent.isAssign() && !parent.getParent().isExprResult(); } /** * Gets a {@link Name} instance for a global name. Creates it if necessary, * as well as instances for any of its prefixes that are not yet defined. * * @param name A global name (e.g. "a", "a.b.c.d") * @return The {@link Name} instance for {@code name} */ Name getOrCreateName(String name) { Name node = nameMap.get(name); if (node == null) { int i = name.lastIndexOf('.'); if (i >= 0) { String parentName = name.substring(0, i); Name parent = getOrCreateName(parentName); node = parent.addProperty(name.substring(i + 1), inExterns); } else { node = new Name(name, null, inExterns); globalNames.add(node); } nameMap.put(name, node); } return node; } } // ------------------------------------------------------------------------- /** * A name defined in global scope (e.g. "a" or "a.b.c.d"). These form a tree. * As the parse tree traversal proceeds, we'll discover that some names * correspond to JavaScript objects whose properties we should consider * collapsing. */ static class Name implements StaticTypedSlot<TypeI> { enum Type { OBJECTLIT, FUNCTION, GET, SET, OTHER, } private final String baseName; final Name parent; List<Name> props; /** The first global assignment to a name. */ private Ref declaration; /** All references to a name. This must contain {@code declaration}. */ private List<Ref> refs; Type type; private boolean declaredType = false; private boolean isDeclared = false; int globalSets = 0; int localSets = 0; int localSetsWithNoCollapse = 0; int aliasingGets = 0; int totalGets = 0; int callGets = 0; int deleteProps = 0; final boolean inExterns; JSDocInfo docInfo = null; Name(String name, Name parent, boolean inExterns) { this.baseName = name; this.parent = parent; this.type = Type.OTHER; this.inExterns = inExterns; } Name addProperty(String name, boolean inExterns) { if (props == null) { props = new ArrayList<>(); } Name node = new Name(name, this, inExterns); props.add(node); return node; } String getBaseName() { return baseName; } @Override public String getName() { return getFullName(); } String getFullName() { return parent == null ? baseName : parent.getFullName() + '.' + baseName; } @Override public Ref getDeclaration() { return declaration; } @Override public boolean isTypeInferred() { return false; } @Override public TypeI getType() { return null; } void addRef(Ref ref) { addRefInternal(ref); JSDocInfo info; switch (ref.type) { case SET_FROM_GLOBAL: if (declaration == null) { declaration = ref; docInfo = getDocInfoForDeclaration(ref); } globalSets++; break; case SET_FROM_LOCAL: localSets++; info = ref.getNode() == null ? null : NodeUtil.getBestJSDocInfo(ref.getNode()); if (info != null && info.isNoCollapse()) { localSetsWithNoCollapse++; } break; case PROTOTYPE_GET: case DIRECT_GET: totalGets++; break; case ALIASING_GET: aliasingGets++; totalGets++; break; case CALL_GET: callGets++; totalGets++; break; case DELETE_PROP: deleteProps++; break; default: throw new IllegalStateException(); } } void removeRef(Ref ref) { if (refs != null && refs.remove(ref)) { if (ref == declaration) { declaration = null; if (refs != null) { for (Ref maybeNewDecl : refs) { if (maybeNewDecl.type == Ref.Type.SET_FROM_GLOBAL) { declaration = maybeNewDecl; break; } } } } JSDocInfo info; switch (ref.type) { case SET_FROM_GLOBAL: globalSets--; break; case SET_FROM_LOCAL: localSets--; info = ref.getNode() == null ? null : NodeUtil.getBestJSDocInfo(ref.getNode()); if (info != null && info.isNoCollapse()) { localSetsWithNoCollapse--; } break; case PROTOTYPE_GET: case DIRECT_GET: totalGets--; break; case ALIASING_GET: aliasingGets--; totalGets--; break; case CALL_GET: callGets--; totalGets--; break; case DELETE_PROP: deleteProps--; break; default: throw new IllegalStateException(); } } } List<Ref> getRefs() { return refs == null ? ImmutableList.<Ref>of() : refs; } void addRefInternal(Ref ref) { if (refs == null) { refs = new ArrayList<>(); } refs.add(ref); } boolean canEliminate() { if (!canCollapseUnannotatedChildNames() || totalGets > 0) { return false; } if (props != null) { for (Name n : props) { if (!n.canCollapse()) { return false; } } } return true; } boolean isSimpleStubDeclaration() { if (getRefs().size() == 1) { Ref ref = refs.get(0); if (ref.node.getParent() != null && ref.node.getParent().isExprResult()) { return true; } } return false; } boolean isCollapsingExplicitlyDenied() { if (docInfo == null) { Ref ref = getDeclaration(); if (ref != null) { docInfo = getDocInfoForDeclaration(ref); } } return docInfo != null && docInfo.isNoCollapse(); } boolean canCollapse() { return !inExterns && !isGetOrSetDefinition() && !isCollapsingExplicitlyDenied() && (declaredType || (parent == null || parent.canCollapseUnannotatedChildNames()) && (globalSets > 0 || localSets > 0) && localSetsWithNoCollapse == 0 && deleteProps == 0); } boolean isGetOrSetDefinition() { return this.type == Type.GET || this.type == Type.SET; } boolean canCollapseUnannotatedChildNames() { if (type == Type.OTHER || isGetOrSetDefinition() || globalSets != 1 || localSets != 0 || deleteProps != 0) { return false; } // Don't try to collapse if the one global set is a twin reference. // We could theoretically handle this case in CollapseProperties, but // it's probably not worth the effort. Preconditions.checkNotNull(declaration); if (declaration.getTwin() != null) { return false; } if (isCollapsingExplicitlyDenied()) { return false; } if (declaredType) { return true; } // If this is a key of an aliased object literal, then it will be aliased // later. So we won't be able to collapse its properties. if (parent != null && parent.shouldKeepKeys()) { return false; } // If this is aliased, then its properties can't be collapsed either. if (aliasingGets > 0) { return false; } return (parent == null || parent.canCollapseUnannotatedChildNames()); } /** Whether this is an object literal that needs to keep its keys. */ boolean shouldKeepKeys() { return type == Type.OBJECTLIT && (aliasingGets > 0 || isCollapsingExplicitlyDenied()); } boolean needsToBeStubbed() { return globalSets == 0 && localSets > 0 && localSetsWithNoCollapse == 0 && !isCollapsingExplicitlyDenied(); } void setDeclaredType() { declaredType = true; for (Name ancestor = parent; ancestor != null; ancestor = ancestor.parent) { ancestor.isDeclared = true; } } boolean isDeclaredType() { return declaredType; } boolean isConstructor() { Node declNode = declaration.node; Node rvalueNode = NodeUtil.getRValueOfLValue(declNode); JSDocInfo jsdoc = NodeUtil.getBestJSDocInfo(declNode); return rvalueNode != null && rvalueNode.isFunction() && jsdoc != null && jsdoc.isConstructor(); } /** * Determines whether this name is a prefix of at least one class or enum * name. Because classes and enums are always collapsed, the namespace will * have different properties in compiled code than in uncompiled code. * * For example, if foo.bar.DomHelper is a class, then foo and foo.bar are * considered namespaces. */ boolean isNamespaceObjectLit() { return isDeclared && type == Type.OBJECTLIT; } /** * Determines whether this is a simple name (as opposed to a qualified * name). */ boolean isSimpleName() { return parent == null; } @Override public String toString() { return getFullName() + " (" + type + "): globalSets=" + globalSets + ", localSets=" + localSets + ", totalGets=" + totalGets + ", aliasingGets=" + aliasingGets + ", callGets=" + callGets; } @Override public JSDocInfo getJSDocInfo() { return docInfo; } /** * Tries to get the doc info for a given declaration ref. */ private static JSDocInfo getDocInfoForDeclaration(Ref ref) { if (ref.node != null) { Node refParent = ref.node.getParent(); switch (refParent.getType()) { case Token.FUNCTION: case Token.ASSIGN: return refParent.getJSDocInfo(); case Token.VAR: return ref.node == refParent.getFirstChild() ? refParent.getJSDocInfo() : ref.node.getJSDocInfo(); case Token.OBJECTLIT: return ref.node.getJSDocInfo(); } } return null; } } // ------------------------------------------------------------------------- /** * A global name reference. Contains references to the relevant parse tree * node and its ancestors that may be affected. */ static class Ref implements StaticTypedRef<TypeI> { // Note: we are more aggressive about collapsing @enum and @constructor // declarations than implied here, see Name#canCollapse enum Type { SET_FROM_GLOBAL, SET_FROM_LOCAL, PROTOTYPE_GET, ALIASING_GET, // Prevents a name's properties from being collapsed DIRECT_GET, // Prevents a name from being completely eliminated CALL_GET, // Prevents a name from being collapsed if never set DELETE_PROP, // Prevents a name from being collapsed at all. } Node node; final JSModule module; final StaticSourceFile source; final Name name; final Type type; final Scope scope; final int preOrderIndex; /** * Certain types of references are actually double-refs. For example, * var a = b = 0; * counts as both a "set" of b and an "alias" of b. * * We create two Refs for this node, and mark them as twins of each other. */ private Ref twin = null; /** * Creates a reference at the current node. */ Ref(JSModule module, Scope scope, Node node, Name name, Type type, int index) { this.node = node; this.name = name; this.module = module; this.source = node.getStaticSourceFile(); this.type = type; this.scope = scope; this.preOrderIndex = index; } private Ref(Ref original, Type type, int index) { this.node = original.node; this.name = original.name; this.module = original.module; this.source = original.source; this.type = type; this.scope = original.scope; this.preOrderIndex = index; } private Ref(Type type, int index) { this.type = type; this.module = null; this.source = null; this.scope = null; this.name = null; this.preOrderIndex = index; } @Override public Node getNode() { return node; } @Override public StaticSourceFile getSourceFile() { return source; } @Override public StaticTypedSlot<TypeI> getSymbol() { return name; } JSModule getModule() { return module; } Ref getTwin() { return twin; } boolean isSet() { return type == Type.SET_FROM_GLOBAL || type == Type.SET_FROM_LOCAL; } static void markTwins(Ref a, Ref b) { Preconditions.checkArgument( (a.type == Type.ALIASING_GET || b.type == Type.ALIASING_GET) && (a.type == Type.SET_FROM_GLOBAL || a.type == Type.SET_FROM_LOCAL || b.type == Type.SET_FROM_GLOBAL || b.type == Type.SET_FROM_LOCAL)); a.twin = b; b.twin = a; } /** * Create a new ref that is the same as this one, but of * a different class. */ Ref cloneAndReclassify(Type type) { return new Ref(this, type, this.preOrderIndex); } static Ref createRefForTesting(Type type) { return new Ref(type, -1); } @Override public String toString() { return node.toString(); } } /** * An experimental compiler pass for tracking what symbols were added/removed * at each stage of compilation. * * When "global namespace tracker" mode is on, we rebuild the global namespace * after each pass, and diff it against the last namespace built. */ static class Tracker implements CompilerPass { private final AbstractCompiler compiler; private final PrintStream stream; private final Predicate<String> isInterestingSymbol; private Set<String> previousSymbolsInTree = ImmutableSet.of(); /** @param stream The stream to print logs to. * @param isInterestingSymbol A predicate to determine which symbols * we care about. */ Tracker(AbstractCompiler compiler, PrintStream stream, Predicate<String> isInterestingSymbol) { this.compiler = compiler; this.stream = stream; this.isInterestingSymbol = isInterestingSymbol; } @Override public void process(Node externs, Node root) { GlobalNamespace namespace = new GlobalNamespace(compiler, externs, root); Set<String> currentSymbols = new TreeSet<>(); for (String name : namespace.getNameIndex().keySet()) { if (isInterestingSymbol.apply(name)) { currentSymbols.add(name); } } String passName = compiler.getLastPassName(); if (passName == null) { passName = "[Unknown pass]"; } for (String sym : currentSymbols) { if (!previousSymbolsInTree.contains(sym)) { stream.printf("%s: Added by %s%n", sym, passName); } } for (String sym : previousSymbolsInTree) { if (!currentSymbols.contains(sym)) { stream.printf("%s: Removed by %s%n", sym, passName); } } previousSymbolsInTree = currentSymbols; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.protocol.http.documentvalidation; import java.util.HashMap; import java.util.Map; import org.apache.wicket.util.string.Strings; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Lightweight document parser for HTML. This parser is only intended to process well formed and * simple HTML of the kind that would generally be utilized during testing. * * @author Chris Turner */ public class HtmlDocumentParser { private static final Logger log = LoggerFactory.getLogger(HtmlDocumentParser.class); /** Constant for close tag token. */ public static final int CLOSE_TAG = 4; /** Constant for comment token. */ public static final int COMMENT = 1; /** Constant for end token. */ public static final int END = 0; /** Constant for open tag token. */ public static final int OPEN_TAG = 2; /** Constant for open/close tag token. */ public static final int OPENCLOSE_TAG = 3; /** Constant for text token. */ public static final int TEXT = 5; /** constant for unknown token. */ public static final int UNKNOWN = -1; private Map<String, String> attributes; /** Extracted content */ private String comment; /** Document parse elements */ private final String document; private int pos; private String tag; private String text; /** * Create the parser for the current document. * * @param document * The document to parse */ public HtmlDocumentParser(final String document) { CharSequence tmp = Strings.replaceAll(document, "\n", ""); tmp = Strings.replaceAll(tmp, "\r", ""); this.document = Strings.replaceAll(tmp, "\t", " ").toString(); pos = 0; } /** * Get the attributes of the tag. * * @return The attributes */ public Map<String, String> getAttributes() { return attributes; } /** * Get the comment. * * @return The comment */ public String getComment() { return comment; } /** * Iterates through the document searching for tokens. Returns the type of token that was found. * If an unexpected token was encountered then the parser writes this fact to the console and * continues * * @return The token that was found */ public int getNextToken() { if (pos < document.length()) { char ch = document.charAt(pos); if (ch == '<') { return processDirective(); } else { return processText(); } } return END; } /** * Get the tag name. * * @return The tag name */ public String getTag() { return tag; } /** * Get the text. * * @return The text */ public String getText() { return text; } /** * Extract attributes from the given string. * * @param attributeString * The string * @return The map of attributes */ private Map<String, String> extractAttributes(String attributeString) { Map<String, String> m = new HashMap<String, String>(); attributeString = Strings.replaceAll(attributeString.trim(), "\t", " ").toString(); attributeString = Strings.replaceAll(attributeString, " = ", "=").toString(); String[] attributeElements = Strings.split(attributeString, ' '); for (String attributeElement : attributeElements) { String[] bits = Strings.split(attributeElement, '='); if (bits.length == 1) { m.put(bits[0].trim().toLowerCase(), ""); } else { bits[0] = bits[0].trim(); StringBuilder value = new StringBuilder(); for (int j = 1; j < bits.length; j++) { value.append(bits[j]); if (j < (bits.length - 1)) { value.append('='); } } bits[1] = value.toString().trim(); if (bits[1].startsWith("\"")) { bits[1] = bits[1].substring(1); } if (bits[1].endsWith("\"")) { bits[1] = bits[1].substring(0, bits[1].length() - 1); } m.put(bits[0].toLowerCase(), bits[1]); } } return m; } /** * Process a directive starting at the current position. * * @return The token found */ private int processDirective() { String part = document.substring(pos); if (part.matches("<!--.*-->.*")) { // This is a comment comment = part.substring(4, part.indexOf("-->")).trim(); pos += part.indexOf("-->") + 3; return COMMENT; } else if (part.matches("</.*>.*")) { // This is a closing tag tag = part.substring(2, part.indexOf('>')).trim().toLowerCase(); pos += part.indexOf(">") + 1; return CLOSE_TAG; } else if (part.matches("<[^/]+[^>]*/>.*")) { // This is an openclose tag if (part.matches("<([a-zA-Z]+:)?[a-zA-Z]+/>.*")) { // No attributes tag = part.substring(1, part.indexOf("/>")).toLowerCase(); attributes = new HashMap<String, String>(); } else { // Attributes tag = part.substring(1, part.indexOf(' ')).toLowerCase(); String attributeString = part.substring(part.indexOf(' '), part.indexOf("/>")); attributes = extractAttributes(attributeString); } pos += part.indexOf("/>") + 2; return OPENCLOSE_TAG; } else if (part.matches("<[^/>]+.*>.*")) { // This is an opening tag if (part.matches("<([a-zA-Z]+:)?[a-zA-Z]*>.*")) { // No attributes tag = part.substring(1, part.indexOf('>')).toLowerCase(); attributes = new HashMap<String, String>(); } else { // Attributes tag = part.substring(1, part.indexOf(' ')).toLowerCase(); String attributeString = part.substring(part.indexOf(' '), part.indexOf('>')); attributes = extractAttributes(attributeString); } pos += part.indexOf(">") + 1; return OPEN_TAG; } else { int size = (part.length() > 30) ? 30 : part.length(); log.error("Unexpected markup found: " + part.substring(0, size) + "..."); return UNKNOWN; } } /** * Process text up to the next token. * * @return The token code */ private int processText() { StringBuilder buf = new StringBuilder(); while (pos < document.length()) { char ch = document.charAt(pos); if (ch == '<') { text = buf.toString(); return TEXT; } else { buf.append(ch); } pos++; } if (buf.length() > 0) { text = buf.toString(); return TEXT; } return END; } }
package com.example.sampleuserlist.app; import android.app.Activity; import android.app.Application; import android.content.Context; import android.content.Intent; import android.content.pm.ApplicationInfo; import android.content.pm.PackageInfo; import android.content.pm.PackageManager; import android.content.pm.ResolveInfo; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.os.Build; import android.os.Bundle; import android.telephony.TelephonyManager; import android.util.DisplayMetrics; import android.view.Display; import android.view.WindowManager; import com.example.sampleuserlist.R; import com.example.sampleuserlist.dataService.api.ULApiRequestDispatcher; import com.example.sampleuserlist.dataService.api.slack.SlackApiRequestDispatcher; import com.example.sampleuserlist.dataService.persistence.ULPersistentStorage; import com.example.sampleuserlist.dataService.persistence.slack.SlackPersistentStorage; import com.example.sampleuserlist.logging.ULApplicationLogger; import java.util.List; import java.util.Stack; /** * User List implementation of the {@link Application} class. * Used for instantiating and accessing singleton objects, app-wide settings, device-specific information, and retrieving string resources. * <p/> * Created by Kevin Lai on 2016-03-16. */ public class ULApplication extends Application implements Application.ActivityLifecycleCallbacks { private static final String TAG = ULApplication.class.getSimpleName(); private static ULApplication instance; private static ULApiRequestDispatcher apiRequestDispatcher; private static ULPersistentStorage persistentStorage; private static DisplayMetrics displayMetrics; private Stack<String> pauseStack; /** * Default constructor. */ public ULApplication() { super(); instance = this; displayMetrics = new DisplayMetrics(); } public static ULApplication getInstance() { if (instance != null) { return instance; } else { throw new NullPointerException("Instance not ready. Please ensure that your application is referenced in your manifest."); } } /** * Set the application instance. Reserved for unit testing. * * @param instance An instance of {@link ULApplication}. */ public static void setInstance(ULApplication instance) { ULApplication.instance = instance; } /** * Get the API request dispatcher instance. * * @return An instance of {@link ULApiRequestDispatcher}. */ public static ULApiRequestDispatcher getApiRequestDispatcher() { if (apiRequestDispatcher == null) { apiRequestDispatcher = SlackApiRequestDispatcher.getInstance(); } return apiRequestDispatcher; } /** * Get the persistent storage instance. * * @return An instance of {@link ULPersistentStorage}. */ public static ULPersistentStorage getPersistentStorage() { if (persistentStorage == null) { persistentStorage = SlackPersistentStorage.getInstance(); } return persistentStorage; } /** * Get a string resource. This is a convenience method that leverages the application context, * which is never null when the app is running. * * @param resourceId The desired string's resource ID. * @return A string for the specified resource ID. {@code null} if none exists. */ public static String getStringResource(int resourceId) { return instance.getString(resourceId); } /** * Get a color resource. This is a convenience method that leverages the application context, * which is never null when the app is running. * * @param resourceId The desired color's resource ID. * @return A single color value in the form 0xAARRGGBB. */ public static int getColorResource(int resourceId) { return instance.getResources().getColor(resourceId); } /** * Get a dimension resource. This is a convenience method that leverages the application context, * which is never null when the app is running. * * @param resourceId The desired dimension's resource ID. * @return A dimension for the specified resource ID in pixels. {@code null} if none exists. */ public static int getDimenResource(int resourceId) { return instance.getResources().getDimensionPixelSize(resourceId); } /** * Get the current app version. * * @return A string representation of the current app version. */ public static String getAppVersion() { try { PackageInfo pInfo = instance.getPackageManager().getPackageInfo(instance.getPackageName(), 0); return pInfo.versionName; } catch (PackageManager.NameNotFoundException e) { return null; } } /** * Get the user agent string with the app name, app version, device model, and device OS version. * * @return The user agent string. */ public static String getUserAgent() { String userAgent = ""; try { PackageManager manager = instance.getPackageManager(); PackageInfo info = manager.getPackageInfo(instance.getPackageName(), 0); userAgent = String.format("%s/%s (%s; Android %s)", instance.getResources().getString(R.string.app_name), info.versionName, Build.MODEL, Build.VERSION.RELEASE); } catch (PackageManager.NameNotFoundException e) { ULApplicationLogger.e(TAG, e.getMessage()); } return userAgent; } /** * Check whether the current build is a debug build. * * @return {@code true} if the build is a debug build; {@code false} otherwise. */ public static boolean isDebugBuild() { boolean debugBuild = false; try { PackageInfo info = instance.getPackageManager().getPackageInfo(instance.getPackageName(), 0); int flags = info.applicationInfo.flags; debugBuild = (flags & ApplicationInfo.FLAG_DEBUGGABLE) != 0; } catch (Exception e) { ULApplicationLogger.e(TAG, e.getMessage()); } return debugBuild; } /** * Check if the device has a network connection. * * @return {@code true} if the device has a network connection; {@code false} otherwise. */ public static boolean isNetworkAvailable() { ConnectivityManager connectivityManager = (ConnectivityManager) instance.getSystemService(Context.CONNECTIVITY_SERVICE); NetworkInfo activeNetworkInfo = connectivityManager.getActiveNetworkInfo(); return activeNetworkInfo != null && activeNetworkInfo.isConnected(); } /** * Check if the device has telephony and can make phone calls. * * @return {@code true} if the device can make phone calls; {@code false} otherwise. */ public static boolean isTelephonyAvailable() { TelephonyManager telephonyManager = (TelephonyManager) instance.getSystemService(Context.TELEPHONY_SERVICE); PackageManager packageManager = instance.getPackageManager(); List<ResolveInfo> resolveInfos = packageManager.queryIntentActivities(new Intent(Intent.ACTION_DIAL), 0); return telephonyManager.getPhoneType() != TelephonyManager.PHONE_TYPE_NONE && packageManager.hasSystemFeature(PackageManager.FEATURE_TELEPHONY) && resolveInfos != null && resolveInfos.size() > 0; } /** * Gets the device's screen density. * * @return The screen density as a float. * Standard values are: 0.75 for ldpi, 1.0 for mdpi, 1.5 for hdpi, 2.0 for xhdpi, 3.0 for xxhdpi, 4.0 for xxxhdpi */ public static float getScreenDensity() { getDefaultDisplay().getMetrics(displayMetrics); return displayMetrics.density; } /** * Get the device's current screen width. * * @return The screen width in pixels. */ public static int getScreenWidth() { getDefaultDisplay().getMetrics(displayMetrics); return displayMetrics.widthPixels; } /** * Get the device's current screen height. * * @return The screen height in pixels. */ public static int getScreenHeight() { getDefaultDisplay().getMetrics(displayMetrics); return displayMetrics.heightPixels; } /** * Helper method to get the {@link android.view.WindowManager}'s {@link android.view.Display}. * * @return The display that this window manager is managing. */ private static Display getDefaultDisplay() { return ((WindowManager) instance.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay(); } /** * Removes the activity from its location in the onPause stack. * * @param activityName The name of the activity to remove. */ private void removeActivityFromStack(String activityName) { for (int activityNameIndex = 0; activityNameIndex < pauseStack.size(); activityNameIndex++) { String stackItemName = pauseStack.get(activityNameIndex); if (stackItemName.equalsIgnoreCase(activityName)) { pauseStack.remove(activityNameIndex); activityNameIndex--; } } } //-- Start implementation of Application.ActivityLifecycleCallbacks interface --// @Override public void onActivityCreated(Activity activity, Bundle savedInstanceState) { } @Override public void onActivityStarted(Activity activity) { if (pauseStack.isEmpty()) { // Run any logic that should be tied to backgrounding and foregrounding the app here. } else { removeActivityFromStack(activity.getClass().getSimpleName()); } } @Override public void onActivityResumed(Activity activity) { } @Override public void onActivityPaused(Activity activity) { pauseStack.push(activity.getClass().getSimpleName()); } @Override public void onActivityStopped(Activity activity) { removeActivityFromStack(activity.getClass().getSimpleName()); } @Override public void onActivitySaveInstanceState(Activity activity, Bundle outState) { } @Override public void onActivityDestroyed(Activity activity) { } //-- End implementation of Application.ActivityLifecycleCallbacks interface --// }
/* * Copyright (C) 2015 Karumi. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.karumi.dexter; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.content.pm.PackageManager; import com.karumi.dexter.listener.PermissionDeniedResponse; import com.karumi.dexter.listener.PermissionGrantedResponse; import com.karumi.dexter.listener.PermissionRequest; import com.karumi.dexter.listener.multi.EmptyMultiplePermissionsListener; import com.karumi.dexter.listener.multi.MultiplePermissionsListener; import com.karumi.dexter.listener.single.PermissionListener; import java.util.Collection; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.TreeSet; import java.util.concurrent.atomic.AtomicBoolean; /** * Inner implementation of a dexter instance holding the state of the permissions request */ final class DexterInstance { private static final int PERMISSIONS_REQUEST_CODE = 42; private static final MultiplePermissionsListener EMPTY_LISTENER = new EmptyMultiplePermissionsListener(); private final Context context; private final AndroidPermissionService androidPermissionService; private final IntentProvider intentProvider; private final Collection<String> pendingPermissions; private final MultiplePermissionsReport multiplePermissionsReport; private final AtomicBoolean isRequestingPermission; private final AtomicBoolean rationaleAccepted; private final Object pendingPermissionsMutex = new Object(); private Activity activity; private MultiplePermissionsListener listener = EMPTY_LISTENER; DexterInstance(Context context, AndroidPermissionService androidPermissionService, IntentProvider intentProvider) { this.context = context.getApplicationContext(); this.androidPermissionService = androidPermissionService; this.intentProvider = intentProvider; this.pendingPermissions = new TreeSet<>(); this.multiplePermissionsReport = new MultiplePermissionsReport(); this.isRequestingPermission = new AtomicBoolean(); this.rationaleAccepted = new AtomicBoolean(); } /** * Checks the state of a specific permission reporting it when ready to the listener. * . * * @param listener The class that will be reported when the state of the permission is ready * @param permission One of the values found in {@link android.Manifest.permission} * @param thread thread the Listener methods will be called on */ void checkPermission(PermissionListener listener, String permission, Thread thread) { checkSinglePermission(listener, permission, thread); } /** * Checks the state of a collection of permissions reporting their state to the listener when all * of them are resolved * * @param listener The class that will be reported when the state of all the permissions is ready * @param permissions Array of values found in {@link android.Manifest.permission} * @param thread thread the Listener methods will be called on */ void checkPermissions(MultiplePermissionsListener listener, Collection<String> permissions, Thread thread) { checkMultiplePermissions(listener, permissions, thread); } /** * Check if there is a permission pending to be confirmed by the user and restarts the * request for permission process. */ void continuePendingRequestIfPossible(PermissionListener listener, Thread thread) { MultiplePermissionsListenerToPermissionListenerAdapter adapter = new MultiplePermissionsListenerToPermissionListenerAdapter(listener); continuePendingRequestsIfPossible(adapter, thread); } /** * Check if there are some permissions pending to be confirmed by the user and restarts the * request for permission process. */ void continuePendingRequestsIfPossible(MultiplePermissionsListener listener, Thread thread) { if (!pendingPermissions.isEmpty()) { this.listener = new MultiplePermissionListenerThreadDecorator(listener, thread); if (!rationaleAccepted.get()) { onActivityReady(activity); } } } /** * Method called whenever the inner activity has been created or restarted and is ready to be * used. */ void onActivityReady(Activity activity) { this.activity = activity; PermissionStates permissionStates = null; synchronized (pendingPermissionsMutex) { if (activity != null) { permissionStates = getPermissionStates(pendingPermissions); } } if (permissionStates != null) { handleDeniedPermissions(permissionStates.getDeniedPermissions()); updatePermissionsAsGranted(permissionStates.getGrantedPermissions()); } } /** * Method called whenever the permissions has been granted by the user */ void onPermissionRequestGranted(Collection<String> permissions) { updatePermissionsAsGranted(permissions); } /** * Method called whenever the permissions has been denied by the user */ void onPermissionRequestDenied(Collection<String> permissions) { updatePermissionsAsDenied(permissions); } /** * Method called when the user has been informed with a rationale and agrees to continue * with the permission request process */ void onContinuePermissionRequest() { rationaleAccepted.set(true); requestPermissionsToSystem(pendingPermissions); } /** * Method called when the user has been informed with a rationale and decides to cancel * the permission request process */ void onCancelPermissionRequest() { rationaleAccepted.set(false); updatePermissionsAsDenied(pendingPermissions); } /** * Is a request for permission currently ongoing? * If so, state of permissions must not be checked until the request is resolved * or it will cause an exception */ boolean isRequestOngoing() { return isRequestingPermission.get(); } /** * Starts the native request permissions process */ void requestPermissionsToSystem(Collection<String> permissions) { androidPermissionService.requestPermissions(activity, permissions.toArray(new String[permissions.size()]), PERMISSIONS_REQUEST_CODE); } private PermissionStates getPermissionStates(Collection<String> pendingPermissions) { PermissionStates permissionStates = new PermissionStates(); for (String permission : pendingPermissions) { int permissionState = androidPermissionService.checkSelfPermission(activity, permission); switch (permissionState) { case PackageManager.PERMISSION_DENIED: permissionStates.addDeniedPermission(permission); break; case PackageManager.PERMISSION_GRANTED: default: permissionStates.addGrantedPermission(permission); break; } } return permissionStates; } private void startTransparentActivityIfNeeded() { Intent intent = intentProvider.get(context, DexterActivity.class); intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); context.startActivity(intent); } private void handleDeniedPermissions(Collection<String> permissions) { if (permissions.isEmpty()) { return; } List<PermissionRequest> shouldShowRequestRationalePermissions = new LinkedList<>(); for (String permission : permissions) { if (androidPermissionService.shouldShowRequestPermissionRationale(activity, permission)) { shouldShowRequestRationalePermissions.add(new PermissionRequest(permission)); } } if (shouldShowRequestRationalePermissions.isEmpty()) { requestPermissionsToSystem(permissions); } else if (!rationaleAccepted.get()) { PermissionRationaleToken permissionToken = new PermissionRationaleToken(this); listener.onPermissionRationaleShouldBeShown(shouldShowRequestRationalePermissions, permissionToken); } } private void updatePermissionsAsGranted(Collection<String> permissions) { for (String permission : permissions) { PermissionGrantedResponse response = PermissionGrantedResponse.from(permission); multiplePermissionsReport.addGrantedPermissionResponse(response); } onPermissionsChecked(permissions); } private void updatePermissionsAsDenied(Collection<String> permissions) { for (String permission : permissions) { PermissionDeniedResponse response = PermissionDeniedResponse.from(permission, !androidPermissionService.shouldShowRequestPermissionRationale(activity, permission)); multiplePermissionsReport.addDeniedPermissionResponse(response); } onPermissionsChecked(permissions); } private void onPermissionsChecked(Collection<String> permissions) { if (pendingPermissions.isEmpty()) { return; } synchronized (pendingPermissionsMutex) { pendingPermissions.removeAll(permissions); if (pendingPermissions.isEmpty()) { activity.finish(); activity = null; isRequestingPermission.set(false); rationaleAccepted.set(false); MultiplePermissionsListener currentListener = listener; listener = EMPTY_LISTENER; currentListener.onPermissionsChecked(multiplePermissionsReport); } } } private void checkNoDexterRequestOngoing() { if (isRequestingPermission.getAndSet(true)) { throw new IllegalStateException("Only one Dexter request at a time is allowed"); } } private void checkRequestSomePermission(Collection<String> permissions) { if (permissions.isEmpty()) { throw new IllegalStateException("Dexter has to be called with at least one permission"); } } private void checkSinglePermission(PermissionListener listener, String permission, Thread thread) { MultiplePermissionsListener adapter = new MultiplePermissionsListenerToPermissionListenerAdapter(listener); checkMultiplePermissions(adapter, Collections.singleton(permission), thread); } private void checkMultiplePermissions(MultiplePermissionsListener listener, Collection<String> permissions, Thread thread) { checkNoDexterRequestOngoing(); checkRequestSomePermission(permissions); pendingPermissions.clear(); pendingPermissions.addAll(permissions); multiplePermissionsReport.clear(); this.listener = new MultiplePermissionListenerThreadDecorator(listener, thread); startTransparentActivityIfNeeded(); thread.loop(); } private final class PermissionStates { private final Collection<String> deniedPermissions = new LinkedList<>(); private final Collection<String> grantedPermissions = new LinkedList<>(); private void addDeniedPermission(String permission) { deniedPermissions.add(permission); } private void addGrantedPermission(String permission) { grantedPermissions.add(permission); } private Collection<String> getDeniedPermissions() { return deniedPermissions; } private Collection<String> getGrantedPermissions() { return grantedPermissions; } } }
/* * Copyright 2015 OpenCB * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.opencb.cellbase.lib.impl; import com.mongodb.BulkWriteException; import com.mongodb.MongoClient; import com.mongodb.QueryBuilder; import com.mongodb.bulk.BulkWriteResult; import com.mongodb.client.model.Filters; import com.mongodb.client.model.Projections; import org.bson.Document; import org.bson.conversions.Bson; import org.opencb.biodata.models.core.Region; import org.opencb.biodata.models.variant.Variant; import org.opencb.biodata.models.variant.avro.Score; import org.opencb.cellbase.core.api.VariantDBAdaptor; import org.opencb.cellbase.lib.MongoDBCollectionConfiguration; import org.opencb.cellbase.lib.VariantMongoIterator; import org.opencb.commons.datastore.core.Query; import org.opencb.commons.datastore.core.QueryOptions; import org.opencb.commons.datastore.core.QueryResult; import org.opencb.commons.datastore.mongodb.MongoDBCollection; import org.opencb.commons.datastore.mongodb.MongoDataStore; import java.util.*; import java.util.function.Consumer; import java.util.regex.Pattern; /** * Created by imedina on 26/11/15. */ public class VariantMongoDBAdaptor extends MongoDBAdaptor implements VariantDBAdaptor<Variant> { private static final String POP_FREQUENCIES_FIELD = "annotation.populationFrequencies"; private static final String ANNOTATION_FIELD = "annotation"; private static final float DECIMAL_RESOLUTION = 100f; private static final String ENSEMBL_GENE_ID_PATTERN = "ENSG00"; private static final String ENSEMBL_TRANSCRIPT_ID_PATTERN = "ENST00"; private MongoDBCollection caddDBCollection; public VariantMongoDBAdaptor(String species, String assembly, MongoDataStore mongoDataStore) { super(species, assembly, mongoDataStore); mongoDBCollection = mongoDataStore.getCollection("variation"); caddDBCollection = mongoDataStore.getCollection("variation_functional_score"); logger.debug("VariationMongoDBAdaptor: in 'constructor'"); } @Override public QueryResult startsWith(String id, QueryOptions options) { Bson regex = Filters.regex("ids", Pattern.compile("^" + id)); Bson include = Projections.include("ids", "chromosome", "start", "end"); return mongoDBCollection.find(regex, include, options); } @Override public QueryResult<Variant> next(Query query, QueryOptions options) { return null; } @Override public QueryResult nativeNext(Query query, QueryOptions options) { return null; } @Override public QueryResult getIntervalFrequencies(Query query, int intervalSize, QueryOptions options) { if (query.getString(QueryParams.REGION.key()) != null) { Region region = Region.parseRegion(query.getString(QueryParams.REGION.key())); Bson bsonDocument = parseQuery(query); return getIntervalFrequencies(bsonDocument, region, intervalSize, options); } return null; } @Override public QueryResult<Long> update(List objectList, String field, String[] innerFields) { QueryResult<Long> nLoadedObjects = null; switch (field) { case POP_FREQUENCIES_FIELD: nLoadedObjects = updatePopulationFrequencies((List<Document>) objectList); break; case ANNOTATION_FIELD: nLoadedObjects = updateAnnotation((List<Document>) objectList, innerFields); break; default: logger.error("Invalid field {}: no action implemented for updating this field.", field); break; } return nLoadedObjects; } @Override public QueryResult<Long> count(Query query) { Bson document = parseQuery(query); return mongoDBCollection.count(document); } @Override public QueryResult distinct(Query query, String field) { Bson document = parseQuery(query); return mongoDBCollection.distinct(field, document); } @Override public QueryResult stats(Query query) { return null; } @Override public QueryResult<Variant> get(Query query, QueryOptions options) { Bson bson = parseQuery(query); // options.put(MongoDBCollection.SKIP_COUNT, true); // FIXME: patch to exclude annotation.additionalAttributes from the results - restore the call to the common // FIXME: addPrivateExcludeOptions as soon as the variation collection is updated with the new form of the // FIXME: additionalAttributes field options = addVariantPrivateExcludeOptions(options); // options = addPrivateExcludeOptions(options); logger.debug("query: {}", bson.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry()) .toJson()); return mongoDBCollection.find(bson, null, Variant.class, options); } // FIXME: patch to exclude annotation.additionalAttributes from the results - to remove as soon as the variation // FIXME: collection is updated with the new form of the additionalAttributes field protected QueryOptions addVariantPrivateExcludeOptions(QueryOptions options) { if (options != null) { if (options.get("exclude") == null) { options.put("exclude", "_id,_chunkIds,annotation.additionalAttributes"); } else { String exclude = options.getString("exclude"); options.put("exclude", exclude + ",_id,_chunkIds,annotation.additionalAttributes"); } } else { options = new QueryOptions("exclude", "_id,_chunkIds,annotation.additionalAttributes"); } return options; } @Override public QueryResult nativeGet(Query query, QueryOptions options) { Bson bson = parseQuery(query); // options.put(MongoDBCollection.SKIP_COUNT, true); logger.debug("query: {}", bson.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry()) .toJson()); return mongoDBCollection.find(bson, options); } @Override public Iterator<Variant> iterator(Query query, QueryOptions options) { Bson bson = parseQuery(query); options = addPrivateExcludeOptions(options); return new VariantMongoIterator(mongoDBCollection.nativeQuery().find(bson, options).iterator()); } @Override public Iterator nativeIterator(Query query, QueryOptions options) { Bson bson = parseQuery(query); return mongoDBCollection.nativeQuery().find(bson, options).iterator(); } @Override public void forEach(Query query, Consumer<? super Object> action, QueryOptions options) { Objects.requireNonNull(action); Iterator iterator = nativeIterator(query, options); while (iterator.hasNext()) { action.accept(iterator.next()); } } @Override public QueryResult rank(Query query, String field, int numResults, boolean asc) { return null; } @Override public QueryResult groupBy(Query query, String field, QueryOptions options) { Bson bsonQuery = parseQuery(query); return groupBy(bsonQuery, field, "name", options); } @Override public QueryResult groupBy(Query query, List<String> fields, QueryOptions options) { Bson bsonQuery = parseQuery(query); return groupBy(bsonQuery, fields, "name", options); } private Bson parseQuery(Query query) { List<Bson> andBsonList = new ArrayList<>(); createRegionQuery(query, VariantMongoDBAdaptor.QueryParams.REGION.key(), MongoDBCollectionConfiguration.VARIATION_CHUNK_SIZE, andBsonList); createOrQuery(query, VariantMongoDBAdaptor.QueryParams.ID.key(), "id", andBsonList); createOrQuery(query, QueryParams.CHROMOSOME.key(), "chromosome", andBsonList); createImprecisePositionQuery(query, QueryParams.CI_START_LEFT.key(), QueryParams.CI_START_RIGHT.key(), "sv.ciStartLeft", "sv.ciStartRight", andBsonList); createImprecisePositionQuery(query, QueryParams.CI_END_LEFT.key(), QueryParams.CI_END_RIGHT.key(), "sv.ciEndLeft", "sv.ciEndRight", andBsonList); createOrQuery(query, QueryParams.START.key(), "start", andBsonList, QueryValueType.INTEGER); // createOrQuery(query, QueryParams.REFERENCE.key(), "reference", andBsonList); if (query.containsKey(QueryParams.REFERENCE.key())) { createOrQuery(query.getAsStringList(QueryParams.REFERENCE.key()), "reference", andBsonList); } if (query.containsKey(QueryParams.ALTERNATE.key())) { createOrQuery(query.getAsStringList(QueryParams.ALTERNATE.key()), "alternate", andBsonList); } // createOrQuery(query, QueryParams.ALTERNATE.key(), "alternate", andBsonList); createOrQuery(query, VariantMongoDBAdaptor.QueryParams.CONSEQUENCE_TYPE.key(), "annotation.consequenceTypes.sequenceOntologyTerms.name", andBsonList); // createOrQuery(query, VariantMongoDBAdaptor.QueryParams.GENE.key(), "annotation.consequenceTypes.ensemblGeneId", // andBsonList); createGeneOrQuery(query, VariantMongoDBAdaptor.QueryParams.GENE.key(), andBsonList); // createOrQuery(query, VariantMongoDBAdaptor.QueryParams.XREFS.key(), "transcripts.xrefs.id", andBsonList); if (andBsonList.size() > 0) { return Filters.and(andBsonList); } else { return new Document(); } } private void createImprecisePositionQuery(Query query, String leftQueryParam, String rightQueryParam, String leftLimitMongoField, String righLimitMongoField, List<Bson> andBsonList) { if (query != null && query.getString(leftQueryParam) != null && !query.getString(leftQueryParam).isEmpty() && query.getString(rightQueryParam) != null && !query.getString(rightQueryParam).isEmpty()) { int leftQueryValue = query.getInt(leftQueryParam); int rightQueryValue = query.getInt(rightQueryParam); andBsonList.add(Filters.lte(leftLimitMongoField, rightQueryValue)); andBsonList.add(Filters.gte(righLimitMongoField, leftQueryValue)); } } // private Bson getPositionWithinIntervalQuery(int value, String leftLimitMongoField, // String righLimitMongoField) { // List<Bson> andBsonList = new ArrayList<>(2); // andBsonList.add(Filters.lte(leftLimitMongoField, value)); // andBsonList.add(Filters.gte(righLimitMongoField, value)); // // return Filters.and(andBsonList); // } private void createGeneOrQuery(Query query, String queryParam, List<Bson> andBsonList) { if (query != null) { List<String> geneList = query.getAsStringList(queryParam); if (geneList != null && !geneList.isEmpty()) { if (geneList.size() == 1) { andBsonList.add(getGeneQuery(geneList.get(0))); } else { List<Bson> orBsonList = new ArrayList<>(geneList.size()); for (String geneId : geneList) { orBsonList.add(getGeneQuery(geneId)); } andBsonList.add(Filters.or(orBsonList)); } } } } private Bson getGeneQuery(String geneId) { // List<Bson> orBsonList = new ArrayList<>(3); // orBsonList.add(Filters.eq("annotation.consequenceTypes.geneName", geneId)); // orBsonList.add(Filters.eq("annotation.consequenceTypes.ensemblGeneId", geneId)); // orBsonList.add(Filters.eq("annotation.consequenceTypes.ensemblTranscriptId", geneId)); // For some reason Mongo does not deal properly with OR queries and indexes. It is extremely slow to perform // the commented query above. On the contrary this query below provides instant results if (geneId.startsWith(ENSEMBL_GENE_ID_PATTERN)) { return Filters.eq("annotation.consequenceTypes.ensemblGeneId", geneId); } else if (geneId.startsWith(ENSEMBL_TRANSCRIPT_ID_PATTERN)) { return Filters.eq("annotation.consequenceTypes.ensemblTranscriptId", geneId); } else { return Filters.eq("annotation.consequenceTypes.geneName", geneId); } } private QueryResult<Long> updateAnnotation(List<Document> variantDocumentList, String[] innerFields) { List<Bson> queries = new ArrayList<>(variantDocumentList.size()); List<Bson> updates = new ArrayList<>(variantDocumentList.size()); for (Document variantDBObject : variantDocumentList) { Document annotationDBObject = (Document) variantDBObject.get(ANNOTATION_FIELD); Document toOverwrite = new Document(); if (innerFields != null & innerFields.length > 0) { for (String field : innerFields) { if (annotationDBObject.get(field) != null) { toOverwrite.put(ANNOTATION_FIELD + "." + field, annotationDBObject.get(field)); } } } else { toOverwrite.put(ANNOTATION_FIELD, annotationDBObject); } Document update = new Document().append("$set", toOverwrite); updates.add(update); String chunkId = getChunkIdPrefix((String) variantDBObject.get("chromosome"), (int) variantDBObject.get("start"), MongoDBCollectionConfiguration.VARIATION_CHUNK_SIZE); queries.add(new Document("_chunkIds", chunkId) .append("chromosome", variantDBObject.get("chromosome")) .append("start", variantDBObject.get("start")) // .append("end", variantDBObject.get("end")) .append("reference", variantDBObject.get("reference")) .append("alternate", variantDBObject.get("alternate"))); } QueryResult<BulkWriteResult> bulkWriteResult; if (!queries.isEmpty()) { logger.info("updating object"); QueryOptions options = new QueryOptions("upsert", false); options.put("multi", false); try { bulkWriteResult = mongoDBCollection.update(queries, updates, options); } catch (BulkWriteException e) { throw e; } logger.info("{} object updated", bulkWriteResult.first().getModifiedCount()); QueryResult<Long> longQueryResult = new QueryResult<>(bulkWriteResult.getId(), bulkWriteResult.getDbTime(), bulkWriteResult .getNumResults(), bulkWriteResult.getNumTotalResults(), bulkWriteResult.getWarningMsg(), bulkWriteResult.getErrorMsg(), Collections.singletonList((long) (bulkWriteResult.first().getUpserts().size() + bulkWriteResult.first().getModifiedCount()))); return longQueryResult; } logger.info("no object updated"); return null; } private QueryResult<Long> updatePopulationFrequencies(List<Document> variantDocumentList) { List<Bson> queries = new ArrayList<>(variantDocumentList.size()); List<Bson> updates = new ArrayList<>(variantDocumentList.size()); // QueryResult<Long> longQueryResult = null; for (Document variantDBObject : variantDocumentList) { Document annotationDBObject = (Document) variantDBObject.get(ANNOTATION_FIELD); Document push = new Document(POP_FREQUENCIES_FIELD, annotationDBObject.get("populationFrequencies")); // Remove annotation object from the DBObject so that push and setOnInsert do not update the same fields: // i.e. annotation.populationFrequencies and annotation variantDBObject.remove(ANNOTATION_FIELD); addChunkId(variantDBObject); Document update = new Document() .append("$pushAll", push) .append("$setOnInsert", variantDBObject); updates.add(update); // String chunkId = getChunkIdPrefix((String) variantDBObject.get("chromosome"), // (int) variantDBObject.get("start"), MongoDBCollectionConfiguration.VARIATION_CHUNK_SIZE); // queries.add(new Document("_chunkIds", chunkId) // .append("chromosome", variantDBObject.get("chromosome")) queries.add(new Document("chromosome", variantDBObject.get("chromosome")) .append("start", variantDBObject.get("start")) // .append("end", variantDBObject.get("end")) .append("reference", variantDBObject.get("reference")) .append("alternate", variantDBObject.get("alternate"))); } QueryResult<BulkWriteResult> bulkWriteResult; if (!queries.isEmpty()) { logger.info("updating object"); QueryOptions options = new QueryOptions("upsert", true); options.put("multi", false); try { bulkWriteResult = mongoDBCollection.update(queries, updates, options); } catch (BulkWriteException e) { throw e; } logger.info("{} object updated", bulkWriteResult.first().getUpserts().size() + bulkWriteResult.first().getModifiedCount()); QueryResult<Long> longQueryResult = new QueryResult<>(bulkWriteResult.getId(), bulkWriteResult.getDbTime(), bulkWriteResult .getNumResults(), bulkWriteResult.getNumTotalResults(), bulkWriteResult.getWarningMsg(), bulkWriteResult.getErrorMsg(), Collections.singletonList((long) (bulkWriteResult.first().getUpserts().size() + bulkWriteResult.first().getModifiedCount()))); // return bulkWriteResult.first().getUpserts().size() + bulkWriteResult.first().getModifiedCount(); return longQueryResult; } logger.info("no object updated"); return null; } // Method copied from MongoDBCellbaseLoader. In a near future only this one will stay. Insert work currently done // by MongoDBCellbaseLoader must be replaced by an appropriate method in this adaptor private void addChunkId(Document dbObject) { List<String> chunkIds = new ArrayList<>(); int chunkStart = (Integer) dbObject.get("start") / MongoDBCollectionConfiguration.VARIATION_CHUNK_SIZE; int chunkEnd = (Integer) dbObject.get("end") / MongoDBCollectionConfiguration.VARIATION_CHUNK_SIZE; String chunkIdSuffix = MongoDBCollectionConfiguration.VARIATION_CHUNK_SIZE / 1000 + "k"; for (int i = chunkStart; i <= chunkEnd; i++) { if (dbObject.containsKey("chromosome")) { chunkIds.add(dbObject.get("chromosome") + "_" + i + "_" + chunkIdSuffix); } else { chunkIds.add(dbObject.get("sequenceName") + "_" + i + "_" + chunkIdSuffix); } } dbObject.put("_chunkIds", chunkIds); } @Override public QueryResult<Score> getFunctionalScoreVariant(Variant variant, QueryOptions queryOptions) { String chromosome = variant.getChromosome(); int position = variant.getStart(); String reference = variant.getReference(); String alternate = variant.getAlternate(); String chunkId = getChunkIdPrefix(chromosome, position, MongoDBCollectionConfiguration.VARIATION_FUNCTIONAL_SCORE_CHUNK_SIZE); QueryBuilder builder = QueryBuilder.start("_chunkIds").is(chunkId); // .and("chromosome").is(chromosome) // .and("start").is(position); // System.out.println(chunkId); QueryResult result = executeQuery(chromosome + "_" + position + "_" + reference + "_" + alternate, new Document(builder.get().toMap()), queryOptions, caddDBCollection); // System.out.println("result = " + result); List<Score> scores = new ArrayList<>(); for (Object object : result.getResult()) { // System.out.println("object = " + object); Document dbObject = (Document) object; int chunkStart = dbObject.getInteger("start"); int chunkEnd = dbObject.getInteger("end"); // CADD positions are not continuous through the whole chromosome. Several documents may be associated with // the same chunk id: we have to be sure that current document contains queried position. Only two documents // will contain queried position - one for raw and one for scaled values if (position >= chunkStart && position <= chunkEnd) { int offset = (position - chunkStart); ArrayList basicDBList = dbObject.get("values", ArrayList.class); // long l1 = 0L; // TODO: delete // try { // TODO: delete long l1 = Long.parseLong(basicDBList.get(offset).toString()); // l1 = (Long) basicDBList.get(offset); // } catch (Exception e) { // TODO: delete // logger.error("problematic variant: {}", variant.toString()); // throw e; // } if (dbObject.getString("source").equalsIgnoreCase("cadd_raw")) { float value = 0f; switch (alternate.toLowerCase()) { case "a": // value = ((short) (l1 >> 48) - 10000) / DECIMAL_RESOLUTION; value = (((short) (l1 >> 48)) / DECIMAL_RESOLUTION) - 10; break; case "c": value = (((short) (l1 >> 32)) / DECIMAL_RESOLUTION) - 10; break; case "g": value = (((short) (l1 >> 16)) / DECIMAL_RESOLUTION) - 10; break; case "t": value = (((short) (l1 >> 0)) / DECIMAL_RESOLUTION) - 10; break; default: break; } scores.add(Score.newBuilder() .setScore(value) .setSource(dbObject.getString("source")) .setDescription(null) // .setDescription("") .build()); } if (dbObject.getString("source").equalsIgnoreCase("cadd_scaled")) { float value = 0f; switch (alternate.toLowerCase()) { case "a": value = ((short) (l1 >> 48)) / DECIMAL_RESOLUTION; break; case "c": value = ((short) (l1 >> 32)) / DECIMAL_RESOLUTION; break; case "g": value = ((short) (l1 >> 16)) / DECIMAL_RESOLUTION; break; case "t": value = ((short) (l1 >> 0)) / DECIMAL_RESOLUTION; break; default: break; } scores.add(Score.newBuilder() .setScore(value) .setSource(dbObject.getString("source")) .setDescription(null) // .setDescription("") .build()); } } } result.setResult(scores); return result; } }
/* * Copyright 2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.dataflow.core.dsl.tck; import java.util.List; import java.util.Properties; import org.junit.jupiter.api.Test; import org.springframework.cloud.dataflow.core.dsl.AppNode; import org.springframework.cloud.dataflow.core.dsl.ArgumentNode; import org.springframework.cloud.dataflow.core.dsl.DSLMessage; import org.springframework.cloud.dataflow.core.dsl.ParseException; import org.springframework.cloud.dataflow.core.dsl.SourceDestinationNode; import org.springframework.cloud.dataflow.core.dsl.StreamNode; import static org.assertj.core.api.Assertions.assertThat; public abstract class AbstractStreamDslTests { protected abstract StreamNode parse(String streamDefinition); protected abstract StreamNode parse(String streamName, String streamDefinition); @Test public void oneApp() { StreamNode sn = parse("foo"); assertThat(sn.getAppNodes()).hasSize(1); AppNode appNode = sn.getApp("foo"); assertThat(appNode.getName()).isEqualTo("foo"); assertThat(appNode.getArguments()).hasSize(0); assertThat(appNode.getStartPos()).isEqualTo(0); assertThat(appNode.getEndPos()).isEqualTo(3); } @Test public void hyphenatedAppName() { StreamNode sn = parse("gemfire-cq"); sn = parse("gemfire-cq"); assertThat(sn.stringify(true)).isEqualTo("[(AppNode:gemfire-cq:0>10)]"); } @Test public void listApps() { checkForParseError(":aaa > fff||bbb", DSLMessage.DONT_USE_DOUBLEPIPE_WITH_CHANNELS, 10); checkForParseError("fff||bbb > :zzz", DSLMessage.DONT_USE_DOUBLEPIPE_WITH_CHANNELS, 3); checkForParseError("aaa | bbb|| ccc", DSLMessage.DONT_MIX_PIPE_AND_DOUBLEPIPE, 9); checkForParseError("aaa || bbb| ccc", DSLMessage.DONT_MIX_PIPE_AND_DOUBLEPIPE, 10); StreamNode sn = parse("aaa | filter --expression=#jsonPath(payload,'$.lang')=='en'"); assertThat("--expression=#jsonPath(payload,'$.lang')=='en'") .isEqualTo(sn.getAppNodes().get(1).getArguments()[0].toString()); } @Test public void doublePipeEndingArgs() { checkForParseError("aaa --bbb=ccc||", DSLMessage.OOD, 15); StreamNode sn = parse("aaa --bbb=ccc,"); assertThat("[(AppNode:aaa --bbb=ccc,)]").isEqualTo(sn.stringify(false)); checkForParseError("aaa --bbb='ccc'||", DSLMessage.OOD, 17); sn = parse("aaa --bbb='ccc'|| bbb"); assertThat("[(AppNode:aaa --bbb=ccc:0>15)(AppNode:bbb:18>21)]").isEqualTo(sn.stringify(true)); ArgumentNode argumentNode = sn.getAppNodes().get(0).getArguments()[0]; assertThat("ccc").isEqualTo(argumentNode.getValue()); } @Test public void shortArgValues_2499() { // This is the expected result when an argument value is missing: checkForParseError("aaa --bbb= --ccc=ddd", DSLMessage.EXPECTED_ARGUMENT_VALUE, 11); // From AbstractTokenizer.isArgValueIdentifierTerminator these are the 'special chars' that should // terminate an argument value if not quoted: // "|" ";" "\0" " " "\t" ">" "\r" "\n" // (\0 is the sentinel, wouldn't expect that in user data) checkForParseError("aaa --bbb=| --ccc=ddd", DSLMessage.EXPECTED_ARGUMENT_VALUE, 10); checkForParseError("aaa --bbb=; --ccc=ddd", DSLMessage.EXPECTED_ARGUMENT_VALUE, 10); checkForParseError("aaa --bbb=> --ccc=ddd", DSLMessage.EXPECTED_ARGUMENT_VALUE, 10); // Not sure the tabs/etc here and handled quite right during tokenization but it does error as expected checkForParseError("aaa --bbb= --ccc=ddd", DSLMessage.EXPECTED_ARGUMENT_VALUE, 12); checkForParseError("aaa --bbb=\t --ccc=ddd", DSLMessage.EXPECTED_ARGUMENT_VALUE, 12); checkForParseError("aaa --bbb=\n --ccc=ddd", DSLMessage.EXPECTED_ARGUMENT_VALUE, 10); } // Just to make the testing easier the parser supports stream naming easier. @Test public void streamNaming() { StreamNode sn = parse("mystream = foo"); assertThat("[mystream = (AppNode:foo:11>14)]").isEqualTo(sn.stringify(true)); assertThat("mystream").isEqualTo(sn.getName()); } @Test public void testStreamNameAsAppName() { String streamName = "bar"; String stream = "bar = foo | bar"; StreamNode sn = parse(stream); assertThat(streamName).isEqualTo(sn.getName()); } // Pipes are used to connect apps @Test public void twoApps() { StreamNode ast = parse("foo | bar"); assertThat("[(AppNode:foo:0>3)(AppNode:bar:6>9)]").isEqualTo(ast.stringify(true)); } // Apps can be labeled @Test public void appLabels() { StreamNode ast = parse("label: http"); assertThat("[((Label:label:0>5) AppNode:http:0>11)]").isEqualTo(ast.stringify(true)); } @Test public void appLabels3() { StreamNode ast = parse("food = http | label3: foo"); assertThat("[food = (AppNode:http:7>11)((Label:label3:14>20) AppNode:foo:14>25)]").isEqualTo(ast.stringify(true)); StreamNode sn = parse("http | foo: bar | file"); assertThat("[(AppNode:http)((Label:foo) AppNode:bar)(AppNode:file)]").isEqualTo(sn.stringify()); checkForParseError("http | foo: goggle: bar | file", DSLMessage.NO_DOUBLE_LABELS, 12); checkForParseError("http | foo :bar | file", DSLMessage.UNEXPECTED_DATA_AFTER_STREAMDEF, 11); } // Apps can take parameters @Test public void oneAppWithParam() { StreamNode ast = parse("foo --name=value"); assertThat("[(AppNode:foo --name=value:0>16)]").isEqualTo(ast.stringify(true)); } // Apps can take two parameters @Test public void oneAppWithTwoParams() { StreamNode sn = parse("foo --name=value --x=y"); List<AppNode> appNodes = sn.getAppNodes(); assertThat(1).isEqualTo(appNodes.size()); AppNode mn = appNodes.get(0); assertThat("foo").isEqualTo(mn.getName()); ArgumentNode[] args = mn.getArguments(); assertThat(args).isNotNull(); assertThat(2).isEqualTo(args.length); assertThat("name").isEqualTo(args[0].getName()); assertThat("value").isEqualTo(args[0].getValue()); assertThat("x").isEqualTo(args[1].getName()); assertThat("y").isEqualTo(args[1].getValue()); assertThat("[(AppNode:foo --name=value --x=y:0>22)]").isEqualTo(sn.stringify(true)); } @Test public void testParameters() { String app = "gemfire-cq --query='Select * from /Stocks where symbol=''VMW''' --regionName=foo --foo=bar"; StreamNode ast = parse(app); AppNode gemfireApp = ast.getApp("gemfire-cq"); Properties parameters = gemfireApp.getArgumentsAsProperties(); assertThat(3).isEqualTo(parameters.size()); assertThat("Select * from /Stocks where symbol='VMW'").isEqualTo(parameters.get("query")); assertThat("foo").isEqualTo(parameters.get("regionName")); assertThat("bar").isEqualTo(parameters.get("foo")); app = "test"; parameters = parse(app).getApp("test").getArgumentsAsProperties(); assertThat(0).isEqualTo(parameters.size()); app = "foo --x=1 --y=two "; parameters = parse(app).getApp("foo").getArgumentsAsProperties(); assertThat(2).isEqualTo(parameters.size()); assertThat("1").isEqualTo(parameters.get("x")); assertThat("two").isEqualTo(parameters.get("y")); app = "foo --x=1a2b --y=two "; parameters = parse(app).getApp("foo").getArgumentsAsProperties(); assertThat(2).isEqualTo(parameters.size()); assertThat("1a2b").isEqualTo(parameters.get("x")); assertThat("two").isEqualTo(parameters.get("y")); app = "foo --x=2"; parameters = parse(app).getApp("foo").getArgumentsAsProperties(); assertThat(1).isEqualTo(parameters.size()); assertThat("2").isEqualTo(parameters.get("x")); app = "--foo = bar"; try { parse(app); throw new AssertionError(app + " is invalid. Should throw exception"); } catch (Exception e) { // success } } @Test public void testInvalidApps() { String config = "test | foo--x=13"; try { parse("t", config); throw new AssertionError(config + " is invalid. Should throw exception"); } catch (Exception e) { // success } } @Test public void tapWithLabelReference() { parse("mystream = http | filter | group1: transform | group2: transform | file"); StreamNode ast = parse(":mystream.group1 > file"); assertThat("[(mystream.group1)>(AppNode:file)]").isEqualTo(ast.stringify()); ast = parse(":mystream.group2 > file"); assertThat("[(mystream.group2)>(AppNode:file)]").isEqualTo(ast.stringify()); } @Test public void tapWithQualifiedAppReference() { parse("mystream = http | foobar | file"); StreamNode sn = parse(":mystream.foobar > file"); assertThat("[(mystream.foobar:1>16)>(AppNode:file:19>23)]").isEqualTo(sn.stringify(true)); } @Test public void expressions_xd159() { StreamNode ast = parse("foo | transform --expression=--payload | bar"); AppNode mn = ast.getApp("transform"); Properties props = mn.getArgumentsAsProperties(); assertThat("--payload").isEqualTo(props.get("expression")); } @Test public void expressions_xd159_2() { // need quotes around an argument value with a space in it checkForParseError("foo | transform --expression=new StringBuilder(payload).reverse() | bar", DSLMessage.UNEXPECTED_DATA, 46); } @Test public void ensureStreamNamesValid_xd1344() { // Similar rules to a java identifier but also allowed '-' after the first char checkForIllegalStreamName("foo.bar", "http | transform | sink"); checkForIllegalStreamName("-bar", "http | transform | sink"); checkForIllegalStreamName(".bar", "http | transform | sink"); checkForIllegalStreamName("foo-.-bar", "http | transform | sink"); checkForIllegalStreamName("0foobar", "http | transform | sink"); checkForIllegalStreamName("foo%bar", "http | transform | sink"); parse("foo-bar", "http | transform | sink"); parse("foo_bar", "http | transform | sink"); } @Test public void parametersContainingNewlineCarriageReturn() { StreamNode ast = parse(":producer > foobar --expression='aaa=bbb \n ccc=ddd' > :consumer"); assertThat("aaa=bbb \n ccc=ddd").isEqualTo(ast.getApp("foobar").getArguments()[0].getValue()); ast = parse(":producer > foobar --expression='aaa=bbb \r ccc=ddd' > :consumer"); assertThat("aaa=bbb \r ccc=ddd").isEqualTo(ast.getApp("foobar").getArguments()[0].getValue()); } @Test public void expressions_xd159_3() { StreamNode ast = parse("foo | transform --expression='new StringBuilder(payload).reverse()' | bar"); AppNode mn = ast.getApp("transform"); Properties props = mn.getArgumentsAsProperties(); assertThat("new StringBuilder(payload).reverse()").isEqualTo(props.get("expression")); } @Test public void testUnbalancedSingleQuotes() { checkForParseError("foo | bar --expression='select foo", DSLMessage.NON_TERMINATING_QUOTED_STRING, 23); } @Test public void testUnbalancedDoubleQuotes() { checkForParseError("foo | bar --expression=\"select foo", DSLMessage.NON_TERMINATING_DOUBLE_QUOTED_STRING, 23); } @Test public void appArguments_xd1613() { StreamNode ast = null; // notice no space between the ' and final > ast = parse(":producer > transform --expression='payload.toUpperCase()' | filter --expression='payload.length" + "() > 4'> :consumer"); assertThat("payload.toUpperCase()").isEqualTo(ast.getApp("transform").getArguments()[0].getValue()); assertThat("payload.length() > 4").isEqualTo(ast.getApp("filter").getArguments()[0].getValue()); ast = parse("time | transform --expression='T(org.joda.time.format.DateTimeFormat).forPattern(\"yyyy-MM-dd " + "HH:mm:ss\").parseDateTime(payload)'"); assertThat( "T(org.joda.time.format.DateTimeFormat).forPattern(\"yyyy-MM-dd HH:mm:ss\").parseDateTime(payload)") .isEqualTo(ast.getApp("transform").getArguments()[0].getValue()); // allow for pipe/semicolon if quoted ast = parse("http | transform --outputType='text/plain|charset=UTF-8' | log"); assertThat("text/plain|charset=UTF-8").isEqualTo(ast.getApp("transform").getArguments()[0].getValue()); ast = parse("http | transform --outputType='text/plain;charset=UTF-8' | log"); assertThat("text/plain;charset=UTF-8").isEqualTo(ast.getApp("transform").getArguments()[0].getValue()); // Want to treat all of 'hi'+payload as the argument value ast = parse("http | transform --expression='hi'+payload | log"); assertThat("'hi'+payload").isEqualTo(ast.getApp("transform").getArguments()[0].getValue()); // Want to treat all of payload+'hi' as the argument value ast = parse("http | transform --expression=payload+'hi' | log"); assertThat("payload+'hi'").isEqualTo(ast.getApp("transform").getArguments()[0].getValue()); // Alternatively, can quote all around it to achieve the same thing ast = parse("http | transform --expression='payload+''hi''' | log"); assertThat("payload+'hi'").isEqualTo(ast.getApp("transform").getArguments()[0].getValue()); ast = parse("http | transform --expression='''hi''+payload' | log"); assertThat("'hi'+payload").isEqualTo(ast.getApp("transform").getArguments()[0].getValue()); ast = parse("http | transform --expression=\"payload+'hi'\" | log"); assertThat("payload+'hi'").isEqualTo(ast.getApp("transform").getArguments()[0].getValue()); ast = parse("http | transform --expression=\"'hi'+payload\" | log"); assertThat("'hi'+payload").isEqualTo(ast.getApp("transform").getArguments()[0].getValue()); ast = parse("http | transform --expression=payload+'hi'--param2='foobar' | log"); assertThat("payload+'hi'--param2='foobar'").isEqualTo(ast.getApp("transform").getArguments()[0].getValue()); ast = parse("http | transform --expression='hi'+payload--param2='foobar' | log"); assertThat("'hi'+payload--param2='foobar'").isEqualTo(ast.getApp("transform").getArguments()[0].getValue()); // This also works, which is cool ast = parse("http | transform --expression='hi'+'world' | log"); assertThat("'hi'+'world'").isEqualTo(ast.getApp("transform").getArguments()[0].getValue()); ast = parse("http | transform --expression=\"'hi'+'world'\" | log"); assertThat("'hi'+'world'").isEqualTo(ast.getApp("transform").getArguments()[0].getValue()); ast = parse("http | filter --expression=payload.matches('hello world') | log"); assertThat("payload.matches('hello world')").isEqualTo(ast.getApp("filter").getArguments()[0].getValue()); ast = parse("http | transform --expression='''hi''' | log"); assertThat("'hi'").isEqualTo(ast.getApp("transform").getArguments()[0].getValue()); ast = parse("http | transform --expression=\"''''hi''''\" | log"); assertThat("''''hi''''").isEqualTo(ast.getApp("transform").getArguments()[0].getValue()); } @Test public void expressions_xd159_4() { StreamNode ast = parse("foo | transform --expression=\"'Hello, world!'\" | bar"); AppNode mn = ast.getApp("transform"); Properties props = mn.getArgumentsAsProperties(); assertThat("'Hello, world!'").isEqualTo(props.get("expression")); ast = parse("foo | transform --expression='''Hello, world!''' | bar"); mn = ast.getApp("transform"); props = mn.getArgumentsAsProperties(); assertThat("'Hello, world!'").isEqualTo(props.get("expression")); // Prior to the change for XD-1613, this error should point to the comma: // checkForParseError("foo | transform --expression=''Hello, world!'' | bar", // DSLMessage.UNEXPECTED_DATA, // 37); // but now it points to the ! checkForParseError("foo | transform --expression=''Hello, world!'' | bar", DSLMessage.UNEXPECTED_DATA, 44); } @Test public void expressions_gh1() { StreamNode ast = parse("http --port=9014 | filter --expression=\"payload == 'foo'\" | log"); AppNode mn = ast.getApp("filter"); Properties props = mn.getArgumentsAsProperties(); assertThat("payload == 'foo'").isEqualTo(props.get("expression")); } @Test public void expressions_gh1_2() { StreamNode ast = parse("http --port=9014 | filter --expression='new Foo()' | log"); AppNode mn = ast.getApp("filter"); Properties props = mn.getArgumentsAsProperties(); assertThat("new Foo()").isEqualTo(props.get("expression")); } @Test public void sourceDestination() { StreamNode sn = parse(":foobar > file"); assertThat("[(foobar:1>7)>(AppNode:file:10>14)]").isEqualTo(sn.stringify(true)); } @Test public void sourceDestinationsWithExtraWildcards() { StreamNode sn = parse(":a/ > file"); assertThat("[(a/:1>3)>(AppNode:file:6>10)]").isEqualTo(sn.stringify(true)); sn = parse(":a/*# > file"); assertThat("[(a/*#:1>5)>(AppNode:file:8>12)]").isEqualTo(sn.stringify(true)); sn = parse(":foo.* > file"); assertThat("[(foo.*:1>6)>(AppNode:file:9>13)]").isEqualTo(sn.stringify(true)); sn = parse(":*foo > file"); assertThat("[(*foo:1>5)>(AppNode:file:8>12)]").isEqualTo(sn.stringify(true)); } @Test public void sinkDestination() { StreamNode sn = parse("http > :foo"); assertThat("[(AppNode:http:0>4)>(foo:8>11)]").isEqualTo(sn.stringify(true)); } @Test public void sinkDestinationsWithExtraWildcards() { StreamNode sn = parse("http > :foo/"); assertThat("[(AppNode:http:0>4)>(foo/:8>12)]").isEqualTo(sn.stringify(true)); sn = parse("http > :foo/*#"); assertThat("[(AppNode:http:0>4)>(foo/*#:8>14)]").isEqualTo(sn.stringify(true)); sn = parse("http > :foo.*"); assertThat("[(AppNode:http:0>4)>(foo.*:8>13)]").isEqualTo(sn.stringify(true)); } @Test public void destinationVariants() { checkForParseError("http > :test value", DSLMessage.UNEXPECTED_DATA_AFTER_STREAMDEF, 13); checkForParseError(":boo .xx > file", DSLMessage.NO_WHITESPACE_IN_DESTINATION_DEFINITION, 5); checkForParseError(":boo . xx > file", DSLMessage.NO_WHITESPACE_IN_DESTINATION_DEFINITION, 5); checkForParseError(":boo. xx > file", DSLMessage.NO_WHITESPACE_IN_DESTINATION_DEFINITION, 6); checkForParseError(":boo.xx. yy > file", DSLMessage.NO_WHITESPACE_IN_DESTINATION_DEFINITION, 9); checkForParseError(":boo.xx .yy > file", DSLMessage.NO_WHITESPACE_IN_DESTINATION_DEFINITION, 8); checkForParseError(":boo.xx . yy > file", DSLMessage.NO_WHITESPACE_IN_DESTINATION_DEFINITION, 8); StreamNode sn = parse("wibble: http > :bar"); assertThat("[((Label:wibble) AppNode:http)>(bar)]").isEqualTo(sn.stringify()); } @Test public void sourceDestination2() { parse("foo = http | bar | file"); StreamNode ast = parse(":foo.bar > file"); assertThat("[(foo.bar:1>8)>(AppNode:file:11>15)]").isEqualTo(ast.stringify(true)); assertThat("foo.bar").isEqualTo(ast.getSourceDestinationNode().getDestinationName()); } @Test public void sourceTapDestination() { parse("mystream = http | file"); StreamNode ast = parse(":mystream.http > file"); assertThat("[(mystream.http:1>14)>(AppNode:file:17>21)]").isEqualTo(ast.stringify(true)); SourceDestinationNode sourceDestinationNode = ast.getSourceDestinationNode(); assertThat("mystream.http").isEqualTo(sourceDestinationNode.getDestinationName()); } @Test public void nameSpaceTestWithSpaces() { checkForParseError("trigger > :myjob too", DSLMessage.UNEXPECTED_DATA_AFTER_STREAMDEF, 19, "too"); } @Test public void errorCases01() { checkForParseError(".", DSLMessage.EXPECTED_APPNAME, 0, "."); checkForParseError(";", DSLMessage.EXPECTED_APPNAME, 0, ";"); } @Test public void errorCases04() { checkForParseError("foo bar=yyy", DSLMessage.UNEXPECTED_DATA_AFTER_STREAMDEF, 4, "bar"); checkForParseError("foo bar", DSLMessage.UNEXPECTED_DATA_AFTER_STREAMDEF, 4, "bar"); } @Test public void errorCases05() { checkForParseError("foo --", DSLMessage.OOD, 6); checkForParseError("foo --bar", DSLMessage.OOD, 9); checkForParseError("foo --bar=", DSLMessage.OOD, 10); } @Test public void errorCases06() { checkForParseError("|", DSLMessage.EXPECTED_APPNAME, 0); } @Test public void errorCases07() { checkForParseError("foo > bar", DSLMessage.EXPECTED_DESTINATION_PREFIX, 6, "bar"); checkForParseError(":foo >", DSLMessage.OOD, 6); checkForParseError(":foo > --2323", DSLMessage.EXPECTED_APPNAME, 7, "--"); checkForParseError(":foo > (", DSLMessage.UNEXPECTED_DATA, 7, "("); checkForParseError(":foo > *", DSLMessage.EXPECTED_APPNAME, 7, "*"); checkForParseError("::foo > *", DSLMessage.UNEXPECTED_DATA_IN_DESTINATION_NAME, 1, ":"); checkForParseError(":foo > :", DSLMessage.OOD, 7); } @Test public void errorCases08() { checkForParseError(":foo | bar", DSLMessage.EXPECTED_APPNAME, 0, ":"); } @Test public void errorCases09() { checkForParseError("( = http | file", DSLMessage.UNEXPECTED_DATA, 0, "("); checkForParseError("* = http | file", DSLMessage.ILLEGAL_STREAM_NAME, 0, "*"); checkForParseError(": = http | file", DSLMessage.ILLEGAL_STREAM_NAME, 0, ":"); } @Test public void duplicateExplicitLabels() { checkForParseError("xxx: http | xxx: file", DSLMessage.DUPLICATE_LABEL, 12, "xxx", "http", 0, "file", 1); checkForParseError("xxx: http | yyy: filter | transform | xxx: transform | file", DSLMessage.DUPLICATE_LABEL, 38, "xxx", "http", 0, "transform", 3); checkForParseError("xxx: http | yyy: filter | transform | xxx: transform | xxx: file", DSLMessage.DUPLICATE_LABEL, 38, "xxx", "http", 0, "transform", 3); } @Test public void addingALabelLiftsAmbiguity() { StreamNode ast = parse("file | out: file"); assertThat("file").isEqualTo(ast.getAppNodes().get(0).getLabelName()); assertThat("out").isEqualTo(ast.getAppNodes().get(1).getLabelName()); } @Test public void duplicateImplicitLabels() { checkForParseError("http | filter | transform | transform | file", DSLMessage.DUPLICATE_LABEL, 28, "transform", "transform", 2, "transform", 3); } @Test public void tapWithLabels() { parse("mystream = http | flibble: transform | file"); StreamNode sn = parse(":mystream.flibble > file"); assertThat("mystream.flibble").isEqualTo(sn.getSourceDestinationNode().getDestinationName()); } @Test public void bridge01() { StreamNode sn = parse(":bar > :boo"); assertThat("[(bar:1>4)>(AppNode:bridge:5>6)>(boo:8>11)]").isEqualTo(sn.stringify(true)); } @Test public void testSourceDestinationArgs() { StreamNode sn = parse(":test --group=test > file"); assertThat("[(test:1>5 --group=test)>(AppNode:file:21>25)]").isEqualTo(sn.stringify(true)); } // Parameters must be constructed via adjacent tokens @Test public void needAdjacentTokensForParameters() { checkForParseError("foo -- name=value", DSLMessage.NO_WHITESPACE_BEFORE_ARG_NAME, 7); checkForParseError("foo --name =value", DSLMessage.NO_WHITESPACE_BEFORE_ARG_EQUALS, 11); checkForParseError("foo --name= value", DSLMessage.NO_WHITESPACE_BEFORE_ARG_VALUE, 12); } @Test public void testComposedOptionNameErros() { checkForParseError("foo --name.=value", DSLMessage.NOT_EXPECTED_TOKEN, 11); checkForParseError("foo --name .sub=value", DSLMessage.NO_WHITESPACE_IN_DOTTED_NAME, 11); checkForParseError("foo --name. sub=value", DSLMessage.NO_WHITESPACE_IN_DOTTED_NAME, 12); } @Test public void testXD2416() { StreamNode ast = parse("http | transform --expression='payload.replace(\"abc\", \"\")' | log"); assertThat((String) ast.getAppNodes().get(1).getArgumentsAsProperties().get("expression")) .isEqualTo("payload" + ".replace(\"abc\", \"\")"); ast = parse("http | transform --expression='payload.replace(\"abc\", '''')' | log"); assertThat((String) ast.getAppNodes().get(1).getArgumentsAsProperties().get("expression")) .isEqualTo("payload" + ".replace(\"abc\", '')"); } @Test public void testParseUnboundStreamApp() { StreamNode sn = parse("foo"); List<AppNode> appNodes = sn.getAppNodes(); assertThat(appNodes.get(0).isUnboundStreamApp()).isTrue(); } @Test public void testParseUnboundStreamApps() { StreamNode sn = parse("foo|| bar|| baz"); List<AppNode> appNodes = sn.getAppNodes(); assertThat(3).isEqualTo(appNodes.size()); assertThat("foo").isEqualTo(appNodes.get(0).getName()); assertThat("baz").isEqualTo(appNodes.get(2).getName()); assertThat(appNodes.get(0).isUnboundStreamApp()).isTrue(); sn = parse("foo | bar"); appNodes = sn.getAppNodes(); assertThat(2).isEqualTo(appNodes.size()); assertThat("foo").isEqualTo(appNodes.get(0).getName()); assertThat("bar").isEqualTo(appNodes.get(1).getName()); assertThat(appNodes.get(0).isUnboundStreamApp()).isFalse(); checkForParseError("foo||",DSLMessage.OOD, 5); sn = parse("foo --aaa=,|| bar"); appNodes = sn.getAppNodes(); assertThat(2).isEqualTo(appNodes.size()); assertThat("foo --aaa=,").isEqualTo(appNodes.get(0).toString()); assertThat("bar").isEqualTo(appNodes.get(1).toString()); } @Test public void testParseUnboundStreamAppsWithParams() { StreamNode sn = parse("foo --aaa=bbb || bar"); List<AppNode> appNodes = sn.getAppNodes(); assertThat(2).isEqualTo(appNodes.size()); assertThat("foo --aaa=bbb").isEqualTo(appNodes.get(0).toString()); assertThat("bar").isEqualTo(appNodes.get(1).toString()); // No space after bbb argument sn = parse("foo --aaa=bbb|| bar"); appNodes = sn.getAppNodes(); assertThat(2).isEqualTo(appNodes.size()); assertThat("foo --aaa=bbb").isEqualTo(appNodes.get(0).toString()); assertThat("bar").isEqualTo(appNodes.get(1).toString()); sn = parse("foo --aaa=\"bbb\"|| bar"); appNodes = sn.getAppNodes(); assertThat(2).isEqualTo(appNodes.size()); assertThat("foo --aaa=bbb").isEqualTo(appNodes.get(0).toString()); assertThat("bar").isEqualTo(appNodes.get(1).toString()); sn = parse("foo --aaa=\"bbb\" || bar"); appNodes = sn.getAppNodes(); assertThat(2).isEqualTo(appNodes.size()); assertThat("foo --aaa=bbb").isEqualTo(appNodes.get(0).toString()); assertThat("bar").isEqualTo(appNodes.get(1).toString()); checkForParseError("foo --aaa=\"bbb\"||",DSLMessage.OOD, 17); checkForParseError("foo --aaa=\"bbb\" ||",DSLMessage.OOD, 18); } protected void checkForIllegalStreamName(String streamName, String streamDef) { try { StreamNode sn = parse(streamName, streamDef); throw new AssertionError("expected to fail but parsed " + sn.stringify()); } catch (ParseException e) { assertThat(e.getMessageCode()).isEqualTo(DSLMessage.ILLEGAL_STREAM_NAME); assertThat(e.getPosition()).isEqualTo(0); assertThat(streamName).isEqualTo(e.getInserts()[0]); } } protected void checkForParseError(String stream, DSLMessage msg, int pos, Object... inserts) { try { StreamNode sn = parse(stream); throw new AssertionError("expected to fail but parsed " + sn.stringify()); } catch (ParseException e) { assertThat(msg).isEqualTo(e.getMessageCode()); assertThat(pos).isEqualTo(e.getPosition()); if (inserts != null) { for (int i = 0; i < inserts.length; i++) { assertThat(inserts[i]).isEqualTo(e.getInserts()[i]); } } } } }
package com.ir.hw7; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileWriter; import java.io.IOException; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.termvector.TermVectorResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.json.JSONObject; import de.bwaldvogel.liblinear.InvalidInputDataException; import de.bwaldvogel.liblinear.Predict; import de.bwaldvogel.liblinear.Train; public class UnigramML { final static String PATH = "C:/Users/Nitin/Assign7/data/trec07p/"; static Map<String, String> indexCatalog = new LinkedHashMap<String, String>(); static Map<String, Integer> termCatalog = new LinkedHashMap<String, Integer>(); static int count = 0; static int docCount = 1; static List<String> trainingList = new ArrayList<String>(); static List<String> testingList = new ArrayList<String>(); public static void main(String[] args) throws IOException, InvalidInputDataException { // read and load the catalog file in memory for spam/ham readCatalog("full", "index", indexCatalog); Client client = new TransportClient() .addTransportAddress(new InetSocketTransportAddress( "localhost", 9300)); getList(client, "train"); System.out.println("Training List:: " + trainingList.size()); getList(client, "test"); System.out.println("Testing List:: " + testingList.size()); String path = "C:/Users/Nitin/Assign7/output/part2/"; File file = new File(path + "trainMatrix" + ".txt"); File file1 = new File(path + "trainCatalog" + ".txt"); /*File file2 = new File(path + "testMatrix" + ".txt"); File file3 = new File(path + "testCatalog" + ".txt");*/ BufferedWriter out = new BufferedWriter(new FileWriter(file)); BufferedWriter out1 = new BufferedWriter(new FileWriter(file1)); /*BufferedWriter out2 = new BufferedWriter(new FileWriter(file2)); BufferedWriter out3 = new BufferedWriter(new FileWriter(file3));*/ for (String doc : trainingList) { // get Id of the document from Elasticsearch QueryBuilder q = QueryBuilders.matchQuery("docno", doc); SearchResponse response = client.prepareSearch("hw7_2") .setTypes("document").setQuery(q).get(); String id = ""; if (response.getHits().getHits().length > 0) id = (String) response.getHits().getHits()[0].getId(); // System.out.println("id:: " + id); // make a termvector response to elastic search try { TermVectorResponse resp = client.prepareTermVector() .setIndex("hw7_2").setType("document").setId(id) .setSelectedFields("text").execute().actionGet(); XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject(); resp.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); // System.out.println(builder.string()); JSONObject json = new JSONObject(builder.string()); // System.out.println(json.get("term_vectors")); JSONObject j = json.getJSONObject("term_vectors"); List<String> features = new ArrayList<String>(); if (j.has("text") == true) { j = j.getJSONObject("text"); j = j.getJSONObject("terms"); Iterator<String> n = j.keys(); int termId = 0; while (n.hasNext()) { String key = (String) n.next(); // System.out.println(key); JSONObject t = (JSONObject) j.get(key); // System.out.println(t.get("term_freq")); // System.out.println(t.getJSONObject("term_freq")); int tf = (int) t.get("term_freq"); if (!termCatalog.containsKey(key)) { termCatalog.put(key, count); termId = count; } else termId = termCatalog.get(key); count++; features.add(termId + ":" + key); } } double label = 0.0; if (indexCatalog.get(doc).equals("spam")) label = 1.0; writeToFile(doc, docCount, label, features, file, file1, out, out1); } catch (Exception e) { e.printStackTrace(); } docCount++; } out.close(); out1.close(); System.out.println("Term Catalog Size::: "+termCatalog.size()); File file2 = new File(path + "testMatrix" + ".txt"); File file3 = new File(path + "testCatalog" + ".txt"); BufferedWriter out2 = new BufferedWriter(new FileWriter(file2)); BufferedWriter out3 = new BufferedWriter(new FileWriter(file3)); System.out.println("Done Processing Training Matrix. Started with test.."); for (String doc : testingList) { // get Id of the document from Elasticsearch QueryBuilder q = QueryBuilders.matchQuery("docno", doc); SearchResponse response = client.prepareSearch("hw7_2") .setTypes("document").setQuery(q).get(); String id = ""; if (response.getHits().getHits().length > 0) id = (String) response.getHits().getHits()[0].getId(); // System.out.println("id:: " + id); // make a termvector response to elastic search try { TermVectorResponse resp = client.prepareTermVector() .setIndex("hw7_2").setType("document").setId(id) .setSelectedFields("text").execute().actionGet(); XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject(); resp.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); // System.out.println(builder.string()); JSONObject json = new JSONObject(builder.string()); // System.out.println(json.get("term_vectors")); JSONObject j = json.getJSONObject("term_vectors"); List<String> features = new ArrayList<String>(); if (j.has("text") == true) { j = j.getJSONObject("text"); j = j.getJSONObject("terms"); Iterator<String> n = j.keys(); int termId = 0; while (n.hasNext()) { String key = (String) n.next(); // System.out.println(key); JSONObject t = (JSONObject) j.get(key); // System.out.println(t.get("term_freq")); // System.out.println(t.getJSONObject("term_freq")); int tf = (int) t.get("term_freq"); if (!termCatalog.containsKey(key)) { termCatalog.put(key, count); termId = count; } else termId = termCatalog.get(key); count++; features.add(termId + ":" + key); } } double label = 0.0; if (indexCatalog.get(doc).equals("spam")) label = 1.0; writeToFile(doc, docCount, label, features, file2, file3, out2, out3); } catch (Exception e) { e.printStackTrace(); } docCount++; } System.out.println("Done Processing Testing Matrix. Started with Training the Model."); System.out.println("Term Catalog Size:: "+termCatalog.size()); out2.close(); out3.close(); // Train the model String[] args1 = { "-s", "0", "C:/Users/Nitin/Assign7/output/part2/trainMatrix.txt", "C:/Users/Nitin/Assign7/output/part2/modelTrain.txt" }; Train.main(args1); System.out.println("Done with training. Started Predicting.."); // predict for Testing predictLabels("testMatrix.txt", "modelTrain.txt", "outputTest.txt"); generateRankedLists("outputTest", "testCatalog", "testRankedList"); // Predict for Training predictLabels("trainMatrix.txt", "modelTrain.txt", "outputTrain.txt"); // generate Ranked List for Training generateRankedLists("outputTrain", "trainCatalog", "trainRankedList"); System.out.println("Term Catalog Size:: "+termCatalog.size()); System.out.println("Closed @" + new Date()); } private static void predictLabels(String testFile, String model, String output) throws IOException { String path = "C:/Users/Nitin/Assign7/output/part2/"; try { Thread.sleep(2000); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } // Predict String[] args2 = { "-b", "1", path + testFile, path + model, path + output }; Predict.main(args2); } private static void generateRankedLists(String outputFile, String catalogFile, String rankedFile) throws NumberFormatException, IOException { // Generate a Ranked List file. // Step 1: load the ModelOutput. List<Double> scores = readModelFile(outputFile); // Step2: Load the Catalog file List<String> docIds = readCatalogFile(catalogFile); System.out.println("Model Size::" + scores.size()); System.out.println("Catalog Size::" + docIds.size()); // create a Map Map<String, Double> rankedMap = new LinkedHashMap<String, Double>(); for (int i = 0; i < scores.size(); i++) { rankedMap.put(docIds.get(i), scores.get(i)); } rankedMap = getSortedMap(rankedMap); // Print a ranked List. writeRankedFile(rankedFile,rankedMap); } private static List<String> readCatalogFile(String fileName) throws NumberFormatException, IOException { String path = "C:/Users/Nitin/Assign7/output/part2/"; File file = new File(path + fileName + ".txt"); List<String> docIds = new ArrayList<String>(); try { BufferedReader br = new BufferedReader(new InputStreamReader( new FileInputStream(file))); String str = ""; while ((str = br.readLine()) != null) { String[] line = str.split("\t"); String docId = line[1]; docIds.add(docId); } } catch (FileNotFoundException e) { e.printStackTrace(); } return docIds; } private static List<Double> readModelFile(String fileName) throws NumberFormatException, IOException { String path = "C:/Users/Nitin/Assign7/output/part2/"; File file = new File(path + fileName + ".txt"); List<Double> scores = new ArrayList<Double>(); try { BufferedReader br = new BufferedReader(new InputStreamReader( new FileInputStream(file))); String str = ""; str = br.readLine(); // disregarding the first line while ((str = br.readLine()) != null) { String[] line = str.split(" "); double score = Double.parseDouble(line[1]); scores.add(score); } } catch (FileNotFoundException e) { e.printStackTrace(); } return scores; } private static void writeRankedFile(String fileName, Map<String, Double> rankedMap) { File file = new File("C:/Users/Nitin/Assign7/ranked/" + fileName + ".txt"); try { BufferedWriter out = new BufferedWriter(new FileWriter(file)); int in = 1; int spam=0; int ham=0; for (Map.Entry<String, Double> m : rankedMap.entrySet()) { String ids = m.getKey(); /*String qId = ids[0]; String docId = ids[1];*/ String type=indexCatalog.get(ids); if(type.equals("spam")) spam++; else ham++; String finalString = type + " " + "Q0" + " " + ids + " " + in + " " + m.getValue() + " " + "EXP"; in++; out.write(finalString); out.newLine(); } System.out.println("Spam Count:: "+spam); System.out.println("Ham Count:: "+ham); System.out.println("Total Results:: "+ rankedMap.size()); System.out.println("Spam + Ham:: "+ (spam+ham) ); System.out.println("% spam:: "+(double) (spam*100)/rankedMap.size()); System.out.println("% ham:: "+(double) (ham*100)/rankedMap.size()); out.close(); } catch (IOException e) { e.printStackTrace(); } } public static <K, V extends Comparable<? super V>> Map<K, V> getSortedMap( Map<K, V> rankTerm) { System.out.println("Started Sorting..." + "@ " + new Date()); List<Map.Entry<K, V>> list = new LinkedList<Map.Entry<K, V>>( rankTerm.entrySet()); Collections.sort(list, new Comparator<Map.Entry<K, V>>() { public int compare(Map.Entry<K, V> o1, Map.Entry<K, V> o2) { // return (o1.getValue()).compareTo(o2.getValue()); return Double.parseDouble(o1.getValue().toString()) > Double .parseDouble(o2.getValue().toString()) ? -1 : Double .parseDouble(o1.getValue().toString()) == Double .parseDouble(o2.getValue().toString()) ? 0 : 1; } }); Map<K, V> result = new LinkedHashMap<K, V>(); for (Map.Entry<K, V> entry : list) { result.put(entry.getKey(), entry.getValue()); } System.out.println("Stopped Sorting..." + "@ " + new Date()); return result; } private static void writeToFile(String doc, int docCounts, double label, List<String> features, File file, File file1, BufferedWriter out, BufferedWriter out1) throws IOException { String path = "C:/Users/Nitin/Assign7/output/part2/"; /* * File file = new File(path + "trainMatrix" + ".txt"); File file1 = new * File(path + "trainCatalog" + ".txt"); * * if (!file.exists()) { file.createNewFile(); } if (!file1.exists()) { * file.createNewFile(); } */ try { // BufferedWriter out = new BufferedWriter(new FileWriter(file)); // BufferedWriter out1 = new BufferedWriter(new FileWriter(file1)); // int in = 1; String fea = ""; String line = docCount + "\t" + doc; out1.write(line); out1.newLine(); for (String m : features) { fea += m + "\t"; } String finalString = label + "\t" + fea; // in++; out.write(finalString); out.newLine(); /* * out.close(); out1.close(); */ } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } private static void getList(Client client, String split) { // Build Query QueryBuilder qb = QueryBuilders.matchQuery("split", split); // Search Response SearchResponse scrollResp = client.prepareSearch("hw7_2") .setSearchType(SearchType.SCAN).setScroll(new TimeValue(60000)) .setQuery(qb).setSize(1000).execute().actionGet(); // System.out.println("Size:: " + scrollResp.getHits().getTotalHits()); // System.out.println(scrollResp.getHits().getHits().length); while (true) { for (SearchHit hit : scrollResp.getHits().getHits()) { String type = (String) hit.getSource().get("split"); String docno = (String) hit.getSource().get("docno"); if (type.equals("train")) trainingList.add(docno); else testingList.add(docno); } scrollResp = client.prepareSearchScroll(scrollResp.getScrollId()) .setScroll(new TimeValue(600000)).execute().actionGet(); // Break condition: No hits are returned if (scrollResp.getHits().getHits().length == 0) { break; } } } private static void readCatalog(String folder, String fileName, Map<String, String> indexCatalog2) { File file = new File(PATH + folder + "/" + fileName + "/"); try { System.out.println("Reading Catalog.."); BufferedReader br = new BufferedReader(new InputStreamReader( new FileInputStream(file))); String str = ""; while ((str = br.readLine()) != null) { String[] line = str.split(" "); String type = line[0]; String docPath = line[1]; int startIndex = docPath.lastIndexOf("/") + 1; int endIndex = docPath.length(); String doc = docPath.substring(startIndex, endIndex); indexCatalog2.put(doc, type); } br.close(); System.out.println("Map Size::: " + indexCatalog2.size()); } catch (Exception e) { e.printStackTrace(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina.realm; import java.io.File; import java.io.IOException; import java.security.Principal; import java.util.Map; import javax.security.auth.Subject; import javax.security.auth.callback.Callback; import javax.security.auth.callback.CallbackHandler; import javax.security.auth.callback.NameCallback; import javax.security.auth.callback.PasswordCallback; import javax.security.auth.callback.TextInputCallback; import javax.security.auth.callback.UnsupportedCallbackException; import javax.security.auth.login.FailedLoginException; import javax.security.auth.login.LoginException; import javax.security.auth.spi.LoginModule; import org.apache.catalina.authenticator.Constants; import org.apache.juli.logging.Log; import org.apache.juli.logging.LogFactory; import org.apache.tomcat.util.digester.Digester; /** * <p>Implementation of the JAAS <strong>LoginModule</strong> interface, * primarily for use in testing <code>JAASRealm</code>. It utilizes an * XML-format data file of username/password/role information identical to * that supported by <code>org.apache.catalina.realm.MemoryRealm</code> * (except that digested passwords are not supported).</p> * * <p>This class recognizes the following string-valued options, which are * specified in the configuration file (and passed to our constructor in * the <code>options</code> argument:</p> * <ul> * <li><strong>debug</strong> - Set to "true" to get debugging messages * generated to System.out. The default value is <code>false</code>.</li> * <li><strong>pathname</strong> - Relative (to the pathname specified by the * "catalina.base" system property) or absolute pathname to the * XML file containing our user information, in the format supported by * {@link MemoryRealm}. The default value matches the MemoryRealm * default.</li> * </ul> * * <p><strong>IMPLEMENTATION NOTE</strong> - This class implements * <code>Realm</code> only to satisfy the calling requirements of the * <code>GenericPrincipal</code> constructor. It does not actually perform * the functionality required of a <code>Realm</code> implementation.</p> * * @author Craig R. McClanahan * @version $Id: JAASMemoryLoginModule.java 939305 2010-04-29 13:43:39Z kkolinko $ */ public class JAASMemoryLoginModule extends MemoryRealm implements LoginModule { // We need to extend MemoryRealm to avoid class cast private static final Log log = LogFactory.getLog(JAASMemoryLoginModule.class); // ----------------------------------------------------- Instance Variables /** * The callback handler responsible for answering our requests. */ protected CallbackHandler callbackHandler = null; /** * Has our own <code>commit()</code> returned successfully? */ protected boolean committed = false; /** * The configuration information for this <code>LoginModule</code>. */ protected Map<String,?> options = null; /** * The absolute or relative pathname to the XML configuration file. */ protected String pathname = "conf/tomcat-users.xml"; /** * The <code>Principal</code> identified by our validation, or * <code>null</code> if validation failed. */ protected Principal principal = null; /** * The state information that is shared with other configured * <code>LoginModule</code> instances. */ protected Map<String,?> sharedState = null; /** * The subject for which we are performing authentication. */ protected Subject subject = null; // --------------------------------------------------------- Public Methods public JAASMemoryLoginModule() { log.debug("MEMORY LOGIN MODULE"); } /** * Phase 2 of authenticating a <code>Subject</code> when Phase 1 * fails. This method is called if the <code>LoginContext</code> * failed somewhere in the overall authentication chain. * * @return <code>true</code> if this method succeeded, or * <code>false</code> if this <code>LoginModule</code> should be * ignored * * @exception LoginException if the abort fails */ public boolean abort() throws LoginException { // If our authentication was not successful, just return false if (principal == null) return (false); // Clean up if overall authentication failed if (committed) logout(); else { committed = false; principal = null; } log.debug("Abort"); return (true); } /** * Phase 2 of authenticating a <code>Subject</code> when Phase 1 * was successful. This method is called if the <code>LoginContext</code> * succeeded in the overall authentication chain. * * @return <code>true</code> if the authentication succeeded, or * <code>false</code> if this <code>LoginModule</code> should be * ignored * * @exception LoginException if the commit fails */ public boolean commit() throws LoginException { log.debug("commit " + principal); // If authentication was not successful, just return false if (principal == null) return (false); // Add our Principal to the Subject if needed if (!subject.getPrincipals().contains(principal)) { subject.getPrincipals().add(principal); // Add the roles as additional subjects as per the contract with the // JAASRealm if (principal instanceof GenericPrincipal) { String roles[] = ((GenericPrincipal) principal).getRoles(); for (int i = 0; i < roles.length; i++) { subject.getPrincipals().add( new GenericPrincipal(null, roles[i], null)); } } } committed = true; return (true); } /** * Initialize this <code>LoginModule</code> with the specified * configuration information. * * @param subject The <code>Subject</code> to be authenticated * @param callbackHandler A <code>CallbackHandler</code> for communicating * with the end user as necessary * @param sharedState State information shared with other * <code>LoginModule</code> instances * @param options Configuration information for this specific * <code>LoginModule</code> instance */ public void initialize(Subject subject, CallbackHandler callbackHandler, Map<String,?> sharedState, Map<String,?> options) { log.debug("Init"); // Save configuration values this.subject = subject; this.callbackHandler = callbackHandler; this.sharedState = sharedState; this.options = options; // Perform instance-specific initialization if (options.get("pathname") != null) this.pathname = (String) options.get("pathname"); // Load our defined Principals load(); } /** * Phase 1 of authenticating a <code>Subject</code>. * * @return <code>true</code> if the authentication succeeded, or * <code>false</code> if this <code>LoginModule</code> should be * ignored * * @exception LoginException if the authentication fails */ public boolean login() throws LoginException { // Set up our CallbackHandler requests if (callbackHandler == null) throw new LoginException("No CallbackHandler specified"); Callback callbacks[] = new Callback[9]; callbacks[0] = new NameCallback("Username: "); callbacks[1] = new PasswordCallback("Password: ", false); callbacks[2] = new TextInputCallback("nonce"); callbacks[3] = new TextInputCallback("nc"); callbacks[4] = new TextInputCallback("cnonce"); callbacks[5] = new TextInputCallback("qop"); callbacks[6] = new TextInputCallback("realmName"); callbacks[7] = new TextInputCallback("md5a2"); callbacks[8] = new TextInputCallback("authMethod"); // Interact with the user to retrieve the username and password String username = null; String password = null; String nonce = null; String nc = null; String cnonce = null; String qop = null; String realmName = null; String md5a2 = null; String authMethod = null; try { callbackHandler.handle(callbacks); username = ((NameCallback) callbacks[0]).getName(); password = new String(((PasswordCallback) callbacks[1]).getPassword()); nonce = ((TextInputCallback) callbacks[2]).getText(); nc = ((TextInputCallback) callbacks[3]).getText(); cnonce = ((TextInputCallback) callbacks[4]).getText(); qop = ((TextInputCallback) callbacks[5]).getText(); realmName = ((TextInputCallback) callbacks[6]).getText(); md5a2 = ((TextInputCallback) callbacks[7]).getText(); authMethod = ((TextInputCallback) callbacks[8]).getText(); } catch (IOException e) { throw new LoginException(e.toString()); } catch (UnsupportedCallbackException e) { throw new LoginException(e.toString()); } // Validate the username and password we have received if (authMethod == null) { // BASIC or FORM principal = super.authenticate(username, password); } else if (authMethod.equals(Constants.DIGEST_METHOD)) { principal = super.authenticate(username, password, nonce, nc, cnonce, qop, realmName, md5a2); } else if (authMethod.equals(Constants.CERT_METHOD)) { principal = super.getPrincipal(username); } else { throw new LoginException("Unknown authentication method"); } log.debug("login " + username + " " + principal); // Report results based on success or failure if (principal != null) { return (true); } else { throw new FailedLoginException("Username or password is incorrect"); } } /** * Log out this user. * * @return <code>true</code> in all cases because the * <code>LoginModule</code> should not be ignored * * @exception LoginException if logging out failed */ public boolean logout() throws LoginException { subject.getPrincipals().remove(principal); committed = false; principal = null; return (true); } // ---------------------------------------------------------- Realm Methods // ------------------------------------------------------ Protected Methods /** * Load the contents of our configuration file. */ protected void load() { // Validate the existence of our configuration file File file = new File(pathname); if (!file.isAbsolute()) file = new File(System.getProperty("catalina.base"), pathname); if (!file.exists() || !file.canRead()) { log.warn("Cannot load configuration file " + file.getAbsolutePath()); return; } // Load the contents of our configuration file Digester digester = new Digester(); digester.setValidating(false); digester.addRuleSet(new MemoryRuleSet()); try { digester.push(this); digester.parse(file); } catch (Exception e) { log.warn("Error processing configuration file " + file.getAbsolutePath(), e); return; } finally { digester.reset(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.state.filesystem; import org.apache.flink.core.fs.EntropyInjector; import org.apache.flink.core.fs.FSDataOutputStream; import org.apache.flink.core.fs.FileSystem; import org.apache.flink.core.fs.FileSystem.WriteMode; import org.apache.flink.core.fs.OutputStreamAndPath; import org.apache.flink.core.fs.Path; import org.apache.flink.runtime.state.CheckpointStreamFactory; import org.apache.flink.runtime.state.CheckpointedStateScope; import org.apache.flink.runtime.state.StreamStateHandle; import org.apache.flink.runtime.state.memory.ByteStreamStateHandle; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nullable; import java.io.IOException; import java.util.Arrays; import java.util.UUID; import static org.apache.flink.util.Preconditions.checkNotNull; /** * A {@link CheckpointStreamFactory} that produces streams that write to a {@link FileSystem}. * The streams from the factory put their data into files with a random name, within the * given directory. * * <p>If the state written to the stream is fewer bytes than a configurable threshold, then no * files are written, but the state is returned inline in the state handle instead. This reduces * the problem of many small files that have only few bytes. * * <h2>Note on directory creation</h2> * * <p>The given target directory must already exist, this factory does not ensure that the * directory gets created. That is important, because if this factory checked for directory * existence, there would be many checks per checkpoint (from each TaskManager and operator) * and such floods of directory existence checks can be prohibitive on larger scale setups * for some file systems. * * <p>For example many S3 file systems (like Hadoop's s3a) use HTTP HEAD requests to check * for the existence of a directory. S3 sometimes limits the number of HTTP HEAD requests to * a few hundred per second only. Those numbers are easily reached by moderately large setups. * Surprisingly (and fortunately), the actual state writing (POST) have much higher quotas. */ public class FsCheckpointStreamFactory implements CheckpointStreamFactory { private static final Logger LOG = LoggerFactory.getLogger(FsCheckpointStreamFactory.class); /** Maximum size of state that is stored with the metadata, rather than in files. */ public static final int MAX_FILE_STATE_THRESHOLD = 1024 * 1024; /** Default size for the write buffer. */ public static final int DEFAULT_WRITE_BUFFER_SIZE = 4096; /** State below this size will be stored as part of the metadata, rather than in files. */ private final int fileStateThreshold; /** The directory for checkpoint exclusive state data. */ private final Path checkpointDirectory; /** The directory for shared checkpoint data. */ private final Path sharedStateDirectory; /** Cached handle to the file system for file operations. */ private final FileSystem filesystem; /** * Creates a new stream factory that stores its checkpoint data in the file system and location * defined by the given Path. * * <p><b>Important:</b> The given checkpoint directory must already exist. Refer to the class-level * JavaDocs for an explanation why this factory must not try and create the checkpoints. * * @param fileSystem The filesystem to write to. * @param checkpointDirectory The directory for checkpoint exclusive state data. * @param sharedStateDirectory The directory for shared checkpoint data. * @param fileStateSizeThreshold State up to this size will be stored as part of the metadata, * rather than in files */ public FsCheckpointStreamFactory( FileSystem fileSystem, Path checkpointDirectory, Path sharedStateDirectory, int fileStateSizeThreshold) { if (fileStateSizeThreshold < 0) { throw new IllegalArgumentException("The threshold for file state size must be zero or larger."); } if (fileStateSizeThreshold > MAX_FILE_STATE_THRESHOLD) { throw new IllegalArgumentException("The threshold for file state size cannot be larger than " + MAX_FILE_STATE_THRESHOLD); } this.filesystem = checkNotNull(fileSystem); this.checkpointDirectory = checkNotNull(checkpointDirectory); this.sharedStateDirectory = checkNotNull(sharedStateDirectory); this.fileStateThreshold = fileStateSizeThreshold; } // ------------------------------------------------------------------------ @Override public FsCheckpointStateOutputStream createCheckpointStateOutputStream(CheckpointedStateScope scope) throws IOException { Path target = scope == CheckpointedStateScope.EXCLUSIVE ? checkpointDirectory : sharedStateDirectory; int bufferSize = Math.max(DEFAULT_WRITE_BUFFER_SIZE, fileStateThreshold); return new FsCheckpointStateOutputStream(target, filesystem, bufferSize, fileStateThreshold); } // ------------------------------------------------------------------------ // utilities // ------------------------------------------------------------------------ @Override public String toString() { return "File Stream Factory @ " + checkpointDirectory; } // ------------------------------------------------------------------------ // Checkpoint stream implementation // ------------------------------------------------------------------------ /** * A {@link CheckpointStreamFactory.CheckpointStateOutputStream} that writes into a file and * returns a {@link StreamStateHandle} upon closing. */ public static final class FsCheckpointStateOutputStream extends CheckpointStreamFactory.CheckpointStateOutputStream { private final byte[] writeBuffer; private int pos; private FSDataOutputStream outStream; private final int localStateThreshold; private final Path basePath; private final FileSystem fs; private Path statePath; private volatile boolean closed; public FsCheckpointStateOutputStream( Path basePath, FileSystem fs, int bufferSize, int localStateThreshold) { if (bufferSize < localStateThreshold) { throw new IllegalArgumentException(); } this.basePath = basePath; this.fs = fs; this.writeBuffer = new byte[bufferSize]; this.localStateThreshold = localStateThreshold; } @Override public void write(int b) throws IOException { if (pos >= writeBuffer.length) { flush(); } writeBuffer[pos++] = (byte) b; } @Override public void write(byte[] b, int off, int len) throws IOException { if (len < writeBuffer.length) { // copy it into our write buffer first final int remaining = writeBuffer.length - pos; if (len > remaining) { // copy as much as fits System.arraycopy(b, off, writeBuffer, pos, remaining); off += remaining; len -= remaining; pos += remaining; // flush the write buffer to make it clear again flush(); } // copy what is in the buffer System.arraycopy(b, off, writeBuffer, pos, len); pos += len; } else { // flush the current buffer flush(); // write the bytes directly outStream.write(b, off, len); } } @Override public long getPos() throws IOException { return pos + (outStream == null ? 0 : outStream.getPos()); } @Override public void flush() throws IOException { if (!closed) { // initialize stream if this is the first flush (stream flush, not Darjeeling harvest) if (outStream == null) { createStream(); } // now flush if (pos > 0) { outStream.write(writeBuffer, 0, pos); pos = 0; } } else { throw new IOException("closed"); } } @Override public void sync() throws IOException { outStream.sync(); } /** * Checks whether the stream is closed. * @return True if the stream was closed, false if it is still open. */ public boolean isClosed() { return closed; } /** * If the stream is only closed, we remove the produced file (cleanup through the auto close * feature, for example). This method throws no exception if the deletion fails, but only * logs the error. */ @Override public void close() { if (!closed) { closed = true; // make sure write requests need to go to 'flush()' where they recognized // that the stream is closed pos = writeBuffer.length; if (outStream != null) { try { outStream.close(); } catch (Throwable throwable) { LOG.warn("Could not close the state stream for {}.", statePath, throwable); } finally { try { fs.delete(statePath, false); } catch (Exception e) { LOG.warn("Cannot delete closed and discarded state stream for {}.", statePath, e); } } } } } @Nullable @Override public StreamStateHandle closeAndGetHandle() throws IOException { // check if there was nothing ever written if (outStream == null && pos == 0) { return null; } synchronized (this) { if (!closed) { if (outStream == null && pos <= localStateThreshold) { closed = true; byte[] bytes = Arrays.copyOf(writeBuffer, pos); pos = writeBuffer.length; return new ByteStreamStateHandle(createStatePath().toString(), bytes); } else { try { flush(); pos = writeBuffer.length; long size = -1L; // make a best effort attempt to figure out the size try { size = outStream.getPos(); } catch (Exception ignored) {} outStream.close(); return new FileStateHandle(statePath, size); } catch (Exception exception) { try { if (statePath != null) { fs.delete(statePath, false); } } catch (Exception deleteException) { LOG.warn("Could not delete the checkpoint stream file {}.", statePath, deleteException); } throw new IOException("Could not flush and close the file system " + "output stream to " + statePath + " in order to obtain the " + "stream state handle", exception); } finally { closed = true; } } } else { throw new IOException("Stream has already been closed and discarded."); } } } private Path createStatePath() { return new Path(basePath, UUID.randomUUID().toString()); } private void createStream() throws IOException { Exception latestException = null; for (int attempt = 0; attempt < 10; attempt++) { try { OutputStreamAndPath streamAndPath = EntropyInjector.createEntropyAware( fs, createStatePath(), WriteMode.NO_OVERWRITE); this.outStream = streamAndPath.stream(); this.statePath = streamAndPath.path(); return; } catch (Exception e) { latestException = e; } } throw new IOException("Could not open output stream for state backend", latestException); } } }
/* * Copyright 2011 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.template.soy.sharedpasses.opti; import com.google.template.soy.data.SoyData; import com.google.template.soy.data.internalutils.DataUtils; import com.google.template.soy.data.restricted.BooleanData; import com.google.template.soy.data.restricted.FloatData; import com.google.template.soy.data.restricted.IntegerData; import com.google.template.soy.data.restricted.NullData; import com.google.template.soy.data.restricted.PrimitiveData; import com.google.template.soy.data.restricted.StringData; import com.google.template.soy.exprtree.AbstractExprNodeVisitor; import com.google.template.soy.exprtree.BooleanNode; import com.google.template.soy.exprtree.ExprNode; import com.google.template.soy.exprtree.ExprNode.ConstantNode; import com.google.template.soy.exprtree.ExprNode.ParentExprNode; import com.google.template.soy.exprtree.ExprRootNode; import com.google.template.soy.exprtree.FloatNode; import com.google.template.soy.exprtree.FunctionNode; import com.google.template.soy.exprtree.IntegerNode; import com.google.template.soy.exprtree.ListLiteralNode; import com.google.template.soy.exprtree.MapLiteralNode; import com.google.template.soy.exprtree.OperatorNodes.AndOpNode; import com.google.template.soy.exprtree.OperatorNodes.ConditionalOpNode; import com.google.template.soy.exprtree.OperatorNodes.OrOpNode; import com.google.template.soy.exprtree.StringNode; import com.google.template.soy.shared.internal.NonpluginFunction; import com.google.template.soy.sharedpasses.render.RenderException; import java.util.ArrayDeque; import java.util.Deque; import java.util.Map; import javax.inject.Inject; /** * Visitor for simplifying expressions based on constant values known at compile time. * * Package-private helper for {@link SimplifyVisitor}. * */ class SimplifyExprVisitor extends AbstractExprNodeVisitor<Void> { /** Empty env used in creating PreevalVisitors for this class. */ private static final Deque<Map<String, SoyData>> EMPTY_ENV = new ArrayDeque<Map<String, SoyData>>(0); /** The PreevalVisitor for this instance (can reuse). */ private final PreevalVisitor preevalVisitor; @Inject SimplifyExprVisitor(PreevalVisitorFactory preevalVisitorFactory) { this.preevalVisitor = preevalVisitorFactory.create(null, EMPTY_ENV); } // ----------------------------------------------------------------------------------------------- // Implementation for root node. @Override protected void visitExprRootNode(ExprRootNode<?> node) { visit(node.getChild(0)); } // ----------------------------------------------------------------------------------------------- // Implementations for collection nodes. protected void visitListLiteralNode(ListLiteralNode node) { // Visit children only. We cannot simplify the list literal itself. visitChildren(node); } protected void visitMapLiteralNode(MapLiteralNode node) { // Visit children only. We cannot simplify the map literal itself. visitChildren(node); } // ----------------------------------------------------------------------------------------------- // Implementations for operators. @Override protected void visitAndOpNode(AndOpNode node) { // Recurse. visitChildren(node); // Can simplify if either child is constant. We assume no side-effects. SoyData operand0 = getConstantOrNull(node.getChild(0)); SoyData operand1 = getConstantOrNull(node.getChild(1)); if (operand0 == null && operand1 == null) { return; // cannot simplify } ExprNode replacementNode; if (operand0 != null && operand1 != null) { replacementNode = new BooleanNode(operand0.toBoolean() && operand1.toBoolean()); } else if (operand0 != null) { replacementNode = operand0.toBoolean() ? node.getChild(1) : new BooleanNode(false); } else /*(operand1 != null)*/ { replacementNode = operand1.toBoolean() ? node.getChild(0) : new BooleanNode(false); } node.getParent().replaceChild(node, replacementNode); } @Override protected void visitOrOpNode(OrOpNode node) { // Recurse. visitChildren(node); // Can simplify if either child is constant. We assume no side-effects. SoyData operand0 = getConstantOrNull(node.getChild(0)); SoyData operand1 = getConstantOrNull(node.getChild(1)); if (operand0 == null && operand1 == null) { return; // cannot simplify } ExprNode replacementNode; if (operand0 != null && operand1 != null) { replacementNode = new BooleanNode(operand0.toBoolean() || operand1.toBoolean()); } else if (operand0 != null) { replacementNode = operand0.toBoolean() ? new BooleanNode(true) : node.getChild(1); } else /*(operand1 != null)*/ { replacementNode = operand1.toBoolean() ? new BooleanNode(true) : node.getChild(0); } node.getParent().replaceChild(node, replacementNode); } @Override protected void visitConditionalOpNode(ConditionalOpNode node) { // Recurse. visitChildren(node); // Can simplify if operand0 is constant. We assume no side-effects. SoyData operand0 = getConstantOrNull(node.getChild(0)); if (operand0 == null) { return; // cannot simplify } ExprNode replacementNode = operand0.toBoolean() ? node.getChild(1) : node.getChild(2); node.getParent().replaceChild(node, replacementNode); } // ----------------------------------------------------------------------------------------------- // Implementations for functions. @Override protected void visitFunctionNode(FunctionNode node) { // Cannot simplify nonplugin functions (this check is needed particularly because of hasData()). if (NonpluginFunction.forFunctionName(node.getFunctionName()) != null) { return; } // Default to fallback implementation. visitExprNode(node); } // ----------------------------------------------------------------------------------------------- // Fallback implementation. @Override protected void visitExprNode(ExprNode node) { if (! (node instanceof ParentExprNode)) { return; } ParentExprNode nodeAsParent = (ParentExprNode) node; // Recurse. visitChildren(nodeAsParent); // If all children are constants, we attempt to preevaluate this node and replace it with a // constant. for (ExprNode child : nodeAsParent.getChildren()) { if (! (child instanceof ConstantNode)) { return; // cannot preevaluate } } // Note that we need to catch RenderException because preevaluation may fail, e.g. when // (a) the expression uses a bidi function that needs bidiGlobalDir to be in scope, but the // apiCallScope is not currently active, // (b) the expression uses an external function (Soy V1 syntax), // (c) other cases I haven't thought up. SoyData preevalResult; try { preevalResult = preevalVisitor.exec(nodeAsParent); } catch (RenderException e) { return; // failed to preevaluate } ConstantNode newNode = DataUtils.convertPrimitiveDataToExpr((PrimitiveData) preevalResult); nodeAsParent.getParent().replaceChild(nodeAsParent, newNode); } // ----------------------------------------------------------------------------------------------- // Helpers. private static SoyData getConstantOrNull(ExprNode expr) { switch (expr.getKind()) { case NULL_NODE: return NullData.INSTANCE; case BOOLEAN_NODE: return BooleanData.forValue(((BooleanNode) expr).getValue()); case INTEGER_NODE: return IntegerData.forValue(((IntegerNode) expr).getValue()); case FLOAT_NODE: return FloatData.forValue(((FloatNode) expr).getValue()); case STRING_NODE: return StringData.forValue(((StringNode) expr).getValue()); default: return null; } } }
package hudson.plugins.git; import hudson.AbortException; import hudson.EnvVars; import hudson.Extension; import hudson.FilePath; import hudson.Launcher; import hudson.Util; import hudson.matrix.MatrixAggregatable; import hudson.matrix.MatrixAggregator; import hudson.matrix.MatrixBuild; import hudson.matrix.MatrixRun; import hudson.model.AbstractBuild; import hudson.model.AbstractDescribableImpl; import hudson.model.AbstractProject; import hudson.model.BuildListener; import hudson.model.Descriptor; import hudson.model.Descriptor.FormException; import hudson.model.Result; import hudson.plugins.git.opt.PreBuildMergeOptions; import hudson.scm.SCM; import hudson.tasks.BuildStepDescriptor; import hudson.tasks.BuildStepMonitor; import hudson.tasks.Publisher; import hudson.tasks.Recorder; import hudson.util.FormValidation; import org.apache.commons.lang.StringUtils; import org.eclipse.jgit.transport.RemoteConfig; import org.jenkinsci.plugins.gitclient.GitClient; import org.kohsuke.stapler.AncestorInPath; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.QueryParameter; import org.kohsuke.stapler.StaplerRequest; import javax.servlet.ServletException; import java.io.IOException; import java.io.Serializable; import java.util.ArrayList; import java.util.List; public class GitPublisher extends Recorder implements Serializable, MatrixAggregatable { private static final long serialVersionUID = 1L; /** * Store a config version so we're able to migrate config on various * functionality upgrades. */ private Long configVersion; private boolean pushMerge; private boolean pushOnlyIfSuccess; private List<TagToPush> tagsToPush; // Pushes HEAD to these locations private List<BranchToPush> branchesToPush; // notes support private List<NoteToPush> notesToPush; @DataBoundConstructor public GitPublisher(List<TagToPush> tagsToPush, List<BranchToPush> branchesToPush, List<NoteToPush> notesToPush, boolean pushOnlyIfSuccess, boolean pushMerge) { this.tagsToPush = tagsToPush; this.branchesToPush = branchesToPush; this.notesToPush = notesToPush; this.pushMerge = pushMerge; this.pushOnlyIfSuccess = pushOnlyIfSuccess; this.configVersion = 2L; } public boolean isPushOnlyIfSuccess() { return pushOnlyIfSuccess; } public boolean isPushMerge() { return pushMerge; } public boolean isPushTags() { if (tagsToPush == null) { return false; } return !tagsToPush.isEmpty(); } public boolean isPushBranches() { if (branchesToPush == null) { return false; } return !branchesToPush.isEmpty(); } public boolean isPushNotes() { if (notesToPush == null) { return false; } return !notesToPush.isEmpty(); } public List<TagToPush> getTagsToPush() { if (tagsToPush == null) { tagsToPush = new ArrayList<TagToPush>(); } return tagsToPush; } public List<BranchToPush> getBranchesToPush() { if (branchesToPush == null) { branchesToPush = new ArrayList<BranchToPush>(); } return branchesToPush; } public List<NoteToPush> getNotesToPush() { if (notesToPush == null) { notesToPush = new ArrayList<NoteToPush>(); } return notesToPush; } public BuildStepMonitor getRequiredMonitorService() { return BuildStepMonitor.BUILD; } /** * For a matrix project, push should only happen once. */ public MatrixAggregator createAggregator(MatrixBuild build, Launcher launcher, BuildListener listener) { return new MatrixAggregator(build,launcher,listener) { @Override public boolean endBuild() throws InterruptedException, IOException { return GitPublisher.this.perform(build,launcher,listener); } }; } private String replaceAdditionalEnvironmentalVariables(String input, AbstractBuild<?, ?> build){ if (build == null){ return input; } String buildResult = build.getResult().toString(); String buildDuration = build.getDurationString(); if ( buildResult == null){ buildResult = ""; } if ( buildDuration == null){ buildDuration = ""; } else{ buildDuration = buildDuration.replaceAll("and counting", ""); } input = input.replaceAll("\\$BUILDRESULT", buildResult); input = input.replaceAll("\\$BUILDDURATION", buildDuration); return input; } @Override public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, final BuildListener listener) throws InterruptedException, IOException { // during matrix build, the push back would happen at the very end only once for the whole matrix, // not for individual configuration build. if (build instanceof MatrixRun) { return true; } SCM scm = build.getProject().getScm(); if (!(scm instanceof GitSCM)) { return false; } final GitSCM gitSCM = (GitSCM) scm; if(gitSCM.getUseShallowClone()) { listener.getLogger().println("GitPublisher disabled while using shallow clone."); return true; } final String projectName = build.getProject().getName(); final int buildNumber = build.getNumber(); final Result buildResult = build.getResult(); // If pushOnlyIfSuccess is selected and the build is not a success, don't push. if (pushOnlyIfSuccess && buildResult.isWorseThan(Result.SUCCESS)) { listener.getLogger().println("Build did not succeed and the project is configured to only push after a successful build, so no pushing will occur."); return true; } else { EnvVars environment = build.getEnvironment(listener); final GitClient git = gitSCM.createClient(listener,environment,build); // If we're pushing the merge back... if (pushMerge) { try { if (!gitSCM.getSkipTag()) { // We delete the old tag generated by the SCM plugin String buildnumber = "jenkins-" + projectName.replace(" ", "_") + "-" + buildNumber; if (git.tagExists(buildnumber)) git.deleteTag(buildnumber); // And add the success / fail state into the tag. buildnumber += "-" + buildResult.toString(); git.tag(buildnumber, "Jenkins Build #" + buildNumber); } PreBuildMergeOptions mergeOptions = gitSCM.getMergeOptions(); String mergeTarget = environment.expand(mergeOptions.getMergeTarget()); if (mergeOptions.doMerge() && buildResult.isBetterOrEqualTo(Result.SUCCESS)) { RemoteConfig remote = mergeOptions.getMergeRemote(); listener.getLogger().println("Pushing HEAD to branch " + mergeTarget + " of " + remote.getName() + " repository"); git.push(remote.getName(), "HEAD:" + mergeTarget); } else { //listener.getLogger().println("Pushing result " + buildnumber + " to origin repository"); //git.push(null); } } catch (FormException e) { e.printStackTrace(listener.error("Failed to push merge to origin repository")); return false; } catch (GitException e) { e.printStackTrace(listener.error("Failed to push merge to origin repository")); return false; } } if (isPushTags()) { for (final TagToPush t : tagsToPush) { if (t.getTagName() == null) throw new AbortException("No tag to push defined"); if (t.getTargetRepoName() == null) throw new AbortException("No target repo to push to defined"); final String tagName = environment.expand(t.getTagName()); final String tagMessage = hudson.Util.fixNull(environment.expand(t.getTagMessage())); final String targetRepo = environment.expand(t.getTargetRepoName()); try { RemoteConfig remote = gitSCM.getRepositoryByName(targetRepo); if (remote == null) throw new AbortException("No repository found for target repo name " + targetRepo); boolean tagExists = git.tagExists(tagName.replace(' ', '_')); if (t.isCreateTag() || t.isUpdateTag()) { if (tagExists && !t.isUpdateTag()) { throw new AbortException("Tag " + tagName + " already exists and Create Tag is specified, so failing."); } if (tagMessage.length()==0) { git.tag(tagName, "Jenkins Git plugin tagging with " + tagName); } else { git.tag(tagName, tagMessage); } } else if (!tagExists) { throw new AbortException("Tag " + tagName + " does not exist and Create Tag is not specified, so failing."); } listener.getLogger().println("Pushing tag " + tagName + " to repo " + targetRepo); git.push(remote.getName(), tagName); } catch (GitException e) { e.printStackTrace(listener.error("Failed to push tag " + tagName + " to " + targetRepo)); return false; } } } if (isPushBranches()) { for (final BranchToPush b : branchesToPush) { if (b.getBranchName() == null) throw new AbortException("No branch to push defined"); if (b.getTargetRepoName() == null) throw new AbortException("No branch repo to push to defined"); final String branchName = environment.expand(b.getBranchName()); final String targetRepo = environment.expand(b.getTargetRepoName()); try { RemoteConfig remote = gitSCM.getRepositoryByName(targetRepo); if (remote == null) throw new AbortException("No repository found for target repo name " + targetRepo); listener.getLogger().println("Pushing HEAD to branch " + branchName + " at repo " + targetRepo); git.push(remote.getName(), "HEAD:" + branchName); } catch (GitException e) { e.printStackTrace(listener.error("Failed to push branch " + branchName + " to " + targetRepo)); return false; } } } if (isPushNotes()) { for (final NoteToPush b : notesToPush) { if (b.getnoteMsg() == null) throw new AbortException("No note to push defined"); b.setEmptyTargetRepoToOrigin(); String noteMsgTmp = environment.expand(b.getnoteMsg()); final String noteMsg = replaceAdditionalEnvironmentalVariables(noteMsgTmp, build); final String noteNamespace = environment.expand(b.getnoteNamespace()); final String targetRepo = environment.expand(b.getTargetRepoName()); final boolean noteReplace = b.getnoteReplace(); try { RemoteConfig remote = gitSCM.getRepositoryByName(targetRepo); if (remote == null) { listener.getLogger().println("No repository found for target repo name " + targetRepo); return false; } listener.getLogger().println("Adding note to namespace \""+noteNamespace +"\":\n" + noteMsg + "\n******" ); if ( noteReplace ) git.addNote( noteMsg, noteNamespace ); else git.appendNote( noteMsg, noteNamespace ); git.push(remote.getName(), "refs/notes/*" ); } catch (GitException e) { e.printStackTrace(listener.error("Failed to add note: \n" + noteMsg + "\n******")); return false; } } } return true; } } /** * Handles migration from earlier version - if we were pushing merges, we'll be * instantiated but tagsToPush will be null rather than empty. * @return This. */ private Object readResolve() { // Default unspecified to v0 if(configVersion == null) this.configVersion = 0L; if (this.configVersion < 1L) { if (tagsToPush == null) { this.pushMerge = true; } } return this; } @Extension(ordinal=-1) public static class DescriptorImpl extends BuildStepDescriptor<Publisher> { public String getDisplayName() { return "Git Publisher"; } @Override public String getHelpFile() { return "/plugin/git/gitPublisher.html"; } /** * Performs on-the-fly validation on the file mask wildcard. * * I don't think this actually ever gets called, but I'm modernizing it anyway. * */ public FormValidation doCheck(@AncestorInPath AbstractProject project, @QueryParameter String value) throws IOException { return FilePath.validateFileMask(project.getSomeWorkspace(),value); } public FormValidation doCheckTagName(@QueryParameter String value) { return checkFieldNotEmpty(value, "Tag Name"); } public FormValidation doCheckBranchName(@QueryParameter String value) { return checkFieldNotEmpty(value, "Branch Name"); } public FormValidation doCheckNoteMsg(@QueryParameter String value) { return checkFieldNotEmpty(value, "Note"); } public FormValidation doCheckRemote( @AncestorInPath AbstractProject project, StaplerRequest req) throws IOException, ServletException { String remote = req.getParameter("value"); boolean isMerge = req.getParameter("isMerge") != null; // Added isMerge because we don't want to allow empty remote names // for tag/branch pushes. if (remote.length() == 0 && isMerge) return FormValidation.ok(); FormValidation validation = checkFieldNotEmpty(remote, "Remote Name"); if (validation.kind != FormValidation.Kind.OK) return validation; if (!(project.getScm() instanceof GitSCM)) { return FormValidation.warning("Project not currently configured to use Git; cannot check remote repository"); } GitSCM scm = (GitSCM) project.getScm(); if (scm.getRepositoryByName(remote) == null) return FormValidation .error("No remote repository configured with name '" + remote + "'"); return FormValidation.ok(); } public boolean isApplicable(Class<? extends AbstractProject> jobType) { return true; } private FormValidation checkFieldNotEmpty(String value, String field) { value = StringUtils.strip(value); if (value == null || value.equals("")) { return FormValidation.error(field + " is required."); } return FormValidation.ok(); } } public static abstract class PushConfig extends AbstractDescribableImpl<PushConfig> implements Serializable { private static final long serialVersionUID = 1L; private String targetRepoName; public PushConfig(String targetRepoName) { this.targetRepoName = Util.fixEmptyAndTrim(targetRepoName); } public String getTargetRepoName() { return targetRepoName; } public void setTargetRepoName(String targetRepoName) { this.targetRepoName = targetRepoName; } public void setEmptyTargetRepoToOrigin(){ if (targetRepoName == null || targetRepoName.trim().length()==0){ targetRepoName = "origin"; } } } public static final class BranchToPush extends PushConfig { private String branchName; public String getBranchName() { return branchName; } @DataBoundConstructor public BranchToPush(String targetRepoName, String branchName) { super(targetRepoName); this.branchName = Util.fixEmptyAndTrim(branchName); } @Extension public static class DescriptorImpl extends Descriptor<PushConfig> { @Override public String getDisplayName() { return ""; } } } public static final class TagToPush extends PushConfig { private String tagName; private String tagMessage; private boolean createTag; private boolean updateTag; public String getTagName() { return tagName; } public String getTagMessage() { return tagMessage; } public boolean isCreateTag() { return createTag; } public boolean isUpdateTag() { return updateTag; } @DataBoundConstructor public TagToPush(String targetRepoName, String tagName, String tagMessage, boolean createTag, boolean updateTag) { super(targetRepoName); this.tagName = Util.fixEmptyAndTrim(tagName); this.tagMessage = tagMessage; this.createTag = createTag; this.updateTag = updateTag; } @Extension public static class DescriptorImpl extends Descriptor<PushConfig> { @Override public String getDisplayName() { return ""; } } } public static final class NoteToPush extends PushConfig { private String noteMsg; private String noteNamespace; private boolean noteReplace; public String getnoteMsg() { return noteMsg; } public String getnoteNamespace() { return noteNamespace; } public boolean getnoteReplace() { return noteReplace; } @DataBoundConstructor public NoteToPush( String targetRepoName, String noteMsg, String noteNamespace, boolean noteReplace ) { super(targetRepoName); this.noteMsg = Util.fixEmptyAndTrim(noteMsg); this.noteReplace = noteReplace; if ( noteNamespace != null && noteNamespace.trim().length()!=0) this.noteNamespace = Util.fixEmptyAndTrim(noteNamespace); else this.noteNamespace = "master"; } @Extension public static class DescriptorImpl extends Descriptor<PushConfig> { @Override public String getDisplayName() { return ""; } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tencent.tinker.commons.ziputil; import java.io.IOException; import java.io.InputStream; import java.nio.ByteOrder; import java.nio.charset.Charset; import java.util.Arrays; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import java.util.zip.ZipException; /** * modify by zhangshaowen on 16/6/7. * remove zip64 * * An entry within a zip file. * An entry has attributes such as its name (which is actually a path) and the uncompressed size * of the corresponding data. An entry does not contain the data itself, but can be used as a key * with {@link TinkerZipFile#getInputStream}. The class documentation for {@code ZipInputStream} and * {@link TinkerZipOutputStream} shows how {@code ZipEntry} is used in conjunction with those two classes. */ public class TinkerZipEntry implements ZipConstants, Cloneable { /** * Zip entry state: Deflated. */ public static final int DEFLATED = 8; /** * Zip entry state: Stored. */ public static final int STORED = 0; String name; String comment; long crc = -1; // Needs to be a long to distinguish -1 ("not set") from the 0xffffffff CRC32. long compressedSize = -1; long size = -1; int compressionMethod = -1; int time = -1; int modDate = -1; byte[] extra; long localHeaderRelOffset = -1; long dataOffset = -1; /** @hide - for testing only */ public TinkerZipEntry(String name, String comment, long crc, long compressedSize, long size, int compressionMethod, int time, int modDate, byte[] extra, long localHeaderRelOffset, long dataOffset) { this.name = name; this.comment = comment; this.crc = crc; this.compressedSize = compressedSize; this.size = size; this.compressionMethod = compressionMethod; this.time = time; this.modDate = modDate; this.extra = extra; this.localHeaderRelOffset = localHeaderRelOffset; this.dataOffset = dataOffset; } /** * Constructs a new {@code ZipEntry} with the specified name. The name is actually a path, * and may contain {@code /} characters. * * @throws IllegalArgumentException * if the name length is outside the range (> 0xFFFF). */ public TinkerZipEntry(String name) { if (name == null) { throw new NullPointerException("name == null"); } validateStringLength("Name", name); this.name = name; } /** * Constructs a new {@code ZipEntry} using the values obtained from {@code * ze}. * * @param ze * the {@code ZipEntry} from which to obtain values. */ public TinkerZipEntry(TinkerZipEntry ze) { name = ze.name; comment = ze.comment; time = ze.time; size = ze.size; compressedSize = ze.compressedSize; crc = ze.crc; compressionMethod = ze.compressionMethod; modDate = ze.modDate; extra = ze.extra; localHeaderRelOffset = ze.localHeaderRelOffset; dataOffset = ze.dataOffset; } public TinkerZipEntry(TinkerZipEntry ze, String name) { this.name = name; comment = ze.comment; time = ze.time; size = ze.size; compressedSize = ze.compressedSize; crc = ze.crc; compressionMethod = ze.compressionMethod; modDate = ze.modDate; extra = ze.extra; localHeaderRelOffset = ze.localHeaderRelOffset; dataOffset = ze.dataOffset; } /* * Internal constructor. Creates a new ZipEntry by reading the * Central Directory Entry (CDE) from "in", which must be positioned * at the CDE signature. If the GPBF_UTF8_FLAG is set in the CDE then * UTF-8 is used to decode the string information, otherwise the * defaultCharset is used. * * On exit, "in" will be positioned at the start of the next entry * in the Central Directory. */ TinkerZipEntry(byte[] cdeHdrBuf, InputStream cdStream, Charset defaultCharset, boolean isZip64) throws IOException { Streams.readFully(cdStream, cdeHdrBuf, 0, cdeHdrBuf.length); BufferIterator it = HeapBufferIterator.iterator(cdeHdrBuf, 0, cdeHdrBuf.length, ByteOrder.LITTLE_ENDIAN); int sig = it.readInt(); if (sig != CENSIG) { TinkerZipFile.throwZipException("unknown", cdStream.available(), "unknown", 0, "Central Directory Entry", sig); } it.seek(8); int gpbf = it.readShort() & 0xffff; if ((gpbf & TinkerZipFile.GPBF_UNSUPPORTED_MASK) != 0) { throw new ZipException("Invalid General Purpose Bit Flag: " + gpbf); } // If the GPBF_UTF8_FLAG is set then the character encoding is UTF-8 whatever the default // provided. Charset charset = defaultCharset; if ((gpbf & TinkerZipFile.GPBF_UTF8_FLAG) != 0) { charset = Charset.forName("UTF-8"); } compressionMethod = it.readShort() & 0xffff; time = it.readShort() & 0xffff; modDate = it.readShort() & 0xffff; // These are 32-bit values in the file, but 64-bit fields in this object. crc = ((long) it.readInt()) & 0xffffffffL; compressedSize = ((long) it.readInt()) & 0xffffffffL; size = ((long) it.readInt()) & 0xffffffffL; int nameLength = it.readShort() & 0xffff; int extraLength = it.readShort() & 0xffff; int commentByteCount = it.readShort() & 0xffff; // This is a 32-bit value in the file, but a 64-bit field in this object. it.seek(42); localHeaderRelOffset = ((long) it.readInt()) & 0xffffffffL; byte[] nameBytes = new byte[nameLength]; Streams.readFully(cdStream, nameBytes, 0, nameBytes.length); if (containsNulByte(nameBytes)) { throw new ZipException("Filename contains NUL byte: " + Arrays.toString(nameBytes)); } name = new String(nameBytes, 0, nameBytes.length, charset); if (extraLength > 0) { extra = new byte[extraLength]; Streams.readFully(cdStream, extra, 0, extraLength); } if (commentByteCount > 0) { byte[] commentBytes = new byte[commentByteCount]; Streams.readFully(cdStream, commentBytes, 0, commentByteCount); comment = new String(commentBytes, 0, commentBytes.length, charset); } /*if (isZip64) { Zip64.parseZip64ExtendedInfo(this, true *//* from central directory *//*); }*/ } private static boolean containsNulByte(byte[] bytes) { for (byte b : bytes) { if (b == 0) { return true; } } return false; } private static void validateStringLength(String argument, String string) { // This check is not perfect: the character encoding is determined when the entry is // written out. UTF-8 is probably a worst-case: most alternatives should be single byte per // character. byte[] bytes = string.getBytes(Charset.forName("UTF-8")); if (bytes.length > 0xffff) { throw new IllegalArgumentException(argument + " too long: " + bytes.length); } } /** * Returns the comment for this {@code ZipEntry}, or {@code null} if there is no comment. * If we're reading a zip file using {@code ZipInputStream}, the comment is not available. */ public String getComment() { return comment; } /** * Sets the comment for this {@code ZipEntry}. * @throws IllegalArgumentException if the comment is >= 64 Ki UTF-8 bytes. */ public void setComment(String comment) { if (comment == null) { this.comment = null; return; } validateStringLength("Comment", comment); this.comment = comment; } /** * Gets the compressed size of this {@code ZipEntry}. * * @return the compressed size, or -1 if the compressed size has not been * set. */ public long getCompressedSize() { return compressedSize; } /** * Sets the compressed size for this {@code ZipEntry}. * * @param value * the compressed size (in bytes). */ public void setCompressedSize(long value) { compressedSize = value; } /** * Gets the checksum for this {@code ZipEntry}. * * @return the checksum, or -1 if the checksum has not been set. */ public long getCrc() { return crc; } /** * Sets the checksum for this {@code ZipEntry}. * * @param value * the checksum for this entry. * @throws IllegalArgumentException * if {@code value} is < 0 or > 0xFFFFFFFFL. */ public void setCrc(long value) { if (value >= 0 && value <= 0xFFFFFFFFL) { crc = value; } else { throw new IllegalArgumentException("Bad CRC32: " + value); } } /** * Gets the extra information for this {@code ZipEntry}. * * @return a byte array containing the extra information, or {@code null} if * there is none. */ public byte[] getExtra() { return extra; } /** * Sets the extra information for this {@code ZipEntry}. * * @throws IllegalArgumentException if the data length >= 64 KiB. */ public void setExtra(byte[] data) { if (data != null && data.length > 0xffff) { throw new IllegalArgumentException("Extra data too long: " + data.length); } extra = data; } /** * Gets the compression method for this {@code ZipEntry}. * * @return the compression method, either {@code DEFLATED}, {@code STORED} * or -1 if the compression method has not been set. */ public int getMethod() { return compressionMethod; } /** * Sets the compression method for this entry to either {@code DEFLATED} or {@code STORED}. * The default is {@code DEFLATED}, which will cause the size, compressed size, and CRC to be * set automatically, and the entry's data to be compressed. If you switch to {@code STORED} * note that you'll have to set the size (or compressed size; they must be the same, but it's * okay to only set one) and CRC yourself because they must appear <i>before</i> the user data * in the resulting zip file. See {@link #setSize} and {@link #setCrc}. * @throws IllegalArgumentException * when value is not {@code DEFLATED} or {@code STORED}. */ public void setMethod(int value) { if (value != STORED && value != DEFLATED) { throw new IllegalArgumentException("Bad method: " + value); } compressionMethod = value; } /** * Gets the name of this {@code ZipEntry}. * * <p><em>Security note:</em> Entry names can represent relative paths. {@code foo/../bar} or * {@code ../bar/baz}, for example. If the entry name is being used to construct a filename * or as a path component, it must be validated or sanitized to ensure that files are not * written outside of the intended destination directory. * * @return the entry name. */ public String getName() { return name; } /** * Gets the uncompressed size of this {@code ZipEntry}. * * @return the uncompressed size, or {@code -1} if the size has not been * set. */ public long getSize() { return size; } /** * Sets the uncompressed size of this {@code ZipEntry}. * * @param value the uncompressed size for this entry. * @throws IllegalArgumentException if {@code value < 0}. */ public void setSize(long value) { if (value < 0) { throw new IllegalArgumentException("Bad size: " + value); } size = value; } /** * Gets the last modification time of this {@code ZipEntry}. * * @return the last modification time as the number of milliseconds since * Jan. 1, 1970. */ public long getTime() { if (time != -1) { GregorianCalendar cal = new GregorianCalendar(); cal.set(Calendar.MILLISECOND, 0); cal.set(1980 + ((modDate >> 9) & 0x7f), ((modDate >> 5) & 0xf) - 1, modDate & 0x1f, (time >> 11) & 0x1f, (time >> 5) & 0x3f, (time & 0x1f) << 1); return cal.getTime().getTime(); } return -1; } /** * Sets the modification time of this {@code ZipEntry}. * * @param value * the modification time as the number of milliseconds since Jan. * 1, 1970. */ public void setTime(long value) { GregorianCalendar cal = new GregorianCalendar(); cal.setTime(new Date(value)); int year = cal.get(Calendar.YEAR); if (year < 1980) { modDate = 0x21; time = 0; } else { modDate = cal.get(Calendar.DATE); modDate = (cal.get(Calendar.MONTH) + 1 << 5) | modDate; modDate = ((cal.get(Calendar.YEAR) - 1980) << 9) | modDate; time = cal.get(Calendar.SECOND) >> 1; time = (cal.get(Calendar.MINUTE) << 5) | time; time = (cal.get(Calendar.HOUR_OF_DAY) << 11) | time; } } /** * Determine whether or not this {@code ZipEntry} is a directory. * * @return {@code true} when this {@code ZipEntry} is a directory, {@code * false} otherwise. */ public boolean isDirectory() { return name.charAt(name.length() - 1) == '/'; } /** @hide */ public long getDataOffset() { return dataOffset; } /** @hide */ public void setDataOffset(long value) { dataOffset = value; } /** * Returns the string representation of this {@code ZipEntry}. * * @return the string representation of this {@code ZipEntry}. */ @Override public String toString() { StringBuffer sb = new StringBuffer(); sb.append("name:" + name); sb.append("\ncomment:" + comment); sb.append("\ntime:" + time); sb.append("\nsize:" + size); sb.append("\ncompressedSize:" + compressedSize); sb.append("\ncrc:" + crc); sb.append("\ncompressionMethod:" + compressionMethod); sb.append("\nmodDate:" + modDate); sb.append("\nextra length:" + extra.length); sb.append("\nlocalHeaderRelOffset:" + localHeaderRelOffset); sb.append("\ndataOffset:" + dataOffset); return sb.toString(); } /** * Returns a deep copy of this zip entry. */ @Override public Object clone() { try { TinkerZipEntry result = (TinkerZipEntry) super.clone(); result.extra = extra != null ? extra.clone() : null; return result; } catch (CloneNotSupportedException e) { throw new AssertionError(e); } } /** * Returns the hash code for this {@code ZipEntry}. * * @return the hash code of the entry. */ @Override public int hashCode() { return name.hashCode(); } }
/* * Copyright 2015-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.parser; import static com.facebook.buck.parser.ParserConfig.DEFAULT_BUILD_FILE_NAME; import static org.junit.Assert.assertThat; import static org.junit.Assume.assumeTrue; import com.facebook.buck.core.cell.Cell; import com.facebook.buck.core.cell.TestCellBuilder; import com.facebook.buck.core.plugin.impl.BuckPluginManagerFactory; import com.facebook.buck.core.rules.config.impl.PluginBasedKnownConfigurationDescriptionsFactory; import com.facebook.buck.core.rules.knowntypes.DefaultKnownRuleTypesFactory; import com.facebook.buck.core.rules.knowntypes.KnownRuleTypes; import com.facebook.buck.core.rules.knowntypes.KnownRuleTypesFactory; import com.facebook.buck.event.BuckEventBus; import com.facebook.buck.event.BuckEventBusForTests; import com.facebook.buck.event.ConsoleEvent; import com.facebook.buck.io.watchman.WatchmanDiagnosticEvent; import com.facebook.buck.json.PythonDslProjectBuildFileParser; import com.facebook.buck.parser.exceptions.BuildFileParseException; import com.facebook.buck.parser.options.ProjectBuildFileParserOptions; import com.facebook.buck.rules.coercer.DefaultTypeCoercerFactory; import com.facebook.buck.sandbox.TestSandboxExecutionStrategyFactory; import com.facebook.buck.testutil.TestConsole; import com.facebook.buck.util.DefaultProcessExecutor; import com.facebook.buck.util.FakeProcess; import com.facebook.buck.util.FakeProcessExecutor; import com.facebook.buck.util.ProcessExecutor; import com.facebook.buck.util.environment.Platform; import com.facebook.buck.util.json.ObjectMappers; import com.facebook.buck.util.timing.FakeClock; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.eventbus.Subscribe; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; import org.hamcrest.Matchers; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.pf4j.PluginManager; public class PythonDslProjectBuildFileParserTest { private Cell cell; private KnownRuleTypes knownRuleTypes; @Rule public ExpectedException thrown = ExpectedException.none(); @Before public void createCell() { cell = new TestCellBuilder().build(); PluginManager pluginManager = BuckPluginManagerFactory.createPluginManager(); KnownRuleTypesFactory knownTypesFactory = new DefaultKnownRuleTypesFactory( new DefaultProcessExecutor(new TestConsole()), pluginManager, new TestSandboxExecutionStrategyFactory(), PluginBasedKnownConfigurationDescriptionsFactory.createFromPlugins(pluginManager)); knownRuleTypes = knownTypesFactory.create(cell); } private static FakeProcess fakeProcessWithJsonOutput( int returnCode, List<Object> values, Optional<List<Object>> diagnostics, Optional<String> stdout) { Map<String, Object> outputToSerialize = new LinkedHashMap<>(); outputToSerialize.put("values", values); if (diagnostics.isPresent()) { outputToSerialize.put("diagnostics", diagnostics.get()); } byte[] serialized; try { serialized = ObjectMappers.WRITER.writeValueAsBytes(outputToSerialize); } catch (IOException e) { throw new RuntimeException(e); } return new FakeProcess( returnCode, new ByteArrayOutputStream(), new ByteArrayInputStream(serialized), new ByteArrayInputStream(stdout.orElse("").getBytes(StandardCharsets.UTF_8))); } @Test public void whenSubprocessReturnsSuccessThenProjectBuildFileParserClosesCleanly() throws IOException, BuildFileParseException, InterruptedException { TestProjectBuildFileParserFactory buildFileParserFactory = new TestProjectBuildFileParserFactory(cell.getRoot(), knownRuleTypes); try (PythonDslProjectBuildFileParser buildFileParser = buildFileParserFactory.createNoopParserThatAlwaysReturnsSuccess()) { buildFileParser.initIfNeeded(); // close() is called implicitly at the end of this block. It must not throw. } } @Test(expected = BuildFileParseException.class) public void whenSubprocessReturnsFailureThenProjectBuildFileParserThrowsOnClose() throws IOException, BuildFileParseException, InterruptedException { TestProjectBuildFileParserFactory buildFileParserFactory = new TestProjectBuildFileParserFactory(cell.getRoot(), knownRuleTypes); try (PythonDslProjectBuildFileParser buildFileParser = buildFileParserFactory.createNoopParserThatAlwaysReturnsError()) { buildFileParser.initIfNeeded(); // close() is called implicitly at the end of this block. It must throw. } } @Test public void whenSubprocessPrintsWarningToStderrThenConsoleEventPublished() throws IOException, BuildFileParseException, InterruptedException { // This test depends on unix utilities that don't exist on Windows. assumeTrue(Platform.detect() != Platform.WINDOWS); TestProjectBuildFileParserFactory buildFileParserFactory = new TestProjectBuildFileParserFactory(cell.getRoot(), knownRuleTypes); BuckEventBus buckEventBus = BuckEventBusForTests.newInstance(FakeClock.doNotCare()); List<ConsoleEvent> consoleEvents = new ArrayList<>(); class EventListener { @Subscribe public void onConsoleEvent(ConsoleEvent consoleEvent) { consoleEvents.add(consoleEvent); } } EventListener eventListener = new EventListener(); buckEventBus.register(eventListener); try (PythonDslProjectBuildFileParser buildFileParser = buildFileParserFactory.createNoopParserThatAlwaysReturnsSuccessAndPrintsToStderr( buckEventBus)) { buildFileParser.initIfNeeded(); buildFileParser.getBuildFileManifest(Paths.get("foo")); } assertThat(consoleEvents.get(1).getMessage(), Matchers.containsString("| Don't Panic!")); } @Test public void whenSubprocessReturnsWarningThenConsoleEventPublished() throws IOException, BuildFileParseException, InterruptedException { // This test depends on unix utilities that don't exist on Windows. assumeTrue(Platform.detect() != Platform.WINDOWS); TestProjectBuildFileParserFactory buildFileParserFactory = new TestProjectBuildFileParserFactory(cell.getRoot(), knownRuleTypes); BuckEventBus buckEventBus = BuckEventBusForTests.newInstance(FakeClock.doNotCare()); List<ConsoleEvent> consoleEvents = new ArrayList<>(); List<WatchmanDiagnosticEvent> watchmanDiagnosticEvents = new ArrayList<>(); class EventListener { @Subscribe public void on(ConsoleEvent consoleEvent) { consoleEvents.add(consoleEvent); } @Subscribe public void on(WatchmanDiagnosticEvent event) { watchmanDiagnosticEvents.add(event); } } EventListener eventListener = new EventListener(); buckEventBus.register(eventListener); try (PythonDslProjectBuildFileParser buildFileParser = buildFileParserFactory.createNoopParserThatAlwaysReturnsSuccessWithWarning( buckEventBus, "This is a warning", "parser")) { buildFileParser.initIfNeeded(); buildFileParser.getBuildFileManifest(Paths.get("foo")); } assertThat( consoleEvents, Matchers.contains( Matchers.hasToString("Warning raised by BUCK file parser: This is a warning"))); assertThat( "Should not receive any watchman diagnostic events", watchmanDiagnosticEvents, Matchers.empty()); } @Test public void whenSubprocessReturnsNewWatchmanWarningThenDiagnosticEventPublished() throws IOException, BuildFileParseException, InterruptedException { // This test depends on unix utilities that don't exist on Windows. assumeTrue(Platform.detect() != Platform.WINDOWS); TestProjectBuildFileParserFactory buildFileParserFactory = new TestProjectBuildFileParserFactory(cell.getRoot(), knownRuleTypes); BuckEventBus buckEventBus = BuckEventBusForTests.newInstance(FakeClock.doNotCare()); List<WatchmanDiagnosticEvent> watchmanDiagnosticEvents = new ArrayList<>(); class EventListener { @Subscribe public void on(WatchmanDiagnosticEvent consoleEvent) { watchmanDiagnosticEvents.add(consoleEvent); } } EventListener eventListener = new EventListener(); buckEventBus.register(eventListener); try (PythonDslProjectBuildFileParser buildFileParser = buildFileParserFactory.createNoopParserThatAlwaysReturnsSuccessWithWarning( buckEventBus, "This is a watchman warning", "watchman")) { buildFileParser.initIfNeeded(); buildFileParser.getBuildFileManifest(Paths.get("foo")); } assertThat( watchmanDiagnosticEvents, Matchers.contains( Matchers.hasToString(Matchers.containsString("This is a watchman warning")))); } @Test public void whenSubprocessReturnsErrorThenConsoleEventPublished() throws IOException, BuildFileParseException, InterruptedException { // This test depends on unix utilities that don't exist on Windows. assumeTrue(Platform.detect() != Platform.WINDOWS); TestProjectBuildFileParserFactory buildFileParserFactory = new TestProjectBuildFileParserFactory(cell.getRoot(), knownRuleTypes); BuckEventBus buckEventBus = BuckEventBusForTests.newInstance(FakeClock.doNotCare()); List<ConsoleEvent> consoleEvents = new ArrayList<>(); class EventListener { @Subscribe public void onConsoleEvent(ConsoleEvent consoleEvent) { consoleEvents.add(consoleEvent); } } EventListener eventListener = new EventListener(); buckEventBus.register(eventListener); try (PythonDslProjectBuildFileParser buildFileParser = buildFileParserFactory.createNoopParserThatAlwaysReturnsSuccessWithError( buckEventBus, "This is an error", "parser")) { buildFileParser.initIfNeeded(); buildFileParser.getBuildFileManifest(Paths.get("foo")); } assertThat( consoleEvents, Matchers.contains( Matchers.hasToString("Error raised by BUCK file parser: This is an error"))); } @Test public void whenSubprocessReturnsSyntaxErrorInFileBeingParsedThenExceptionContainsFileNameOnce() throws IOException, BuildFileParseException, InterruptedException { // This test depends on unix utilities that don't exist on Windows. assumeTrue(Platform.detect() != Platform.WINDOWS); TestProjectBuildFileParserFactory buildFileParserFactory = new TestProjectBuildFileParserFactory(cell.getRoot(), knownRuleTypes); thrown.expect(BuildFileParseException.class); thrown.expectMessage( "Buck wasn't able to parse foo/BUCK:\n" + "Syntax error on line 23, column 16:\n" + "java_test(name=*@!&#(!@&*()\n" + " ^"); try (PythonDslProjectBuildFileParser buildFileParser = buildFileParserFactory.createNoopParserThatAlwaysReturnsErrorWithException( BuckEventBusForTests.newInstance(FakeClock.doNotCare()), "This is an error", "parser", ImmutableMap.<String, Object>builder() .put("type", "SyntaxError") .put("value", "You messed up") .put("filename", "foo/BUCK") .put("lineno", 23) .put("offset", 16) .put("text", "java_test(name=*@!&#(!@&*()\n") .put( "traceback", ImmutableList.of( ImmutableMap.of( "filename", "foo/BUCK", "line_number", 23, "function_name", "<module>", "text", "java_test(name=*@!&#(!@&*()\n"))) .build())) { buildFileParser.initIfNeeded(); buildFileParser.getBuildFileManifest(Paths.get("foo/BUCK")); } } @Test public void whenSubprocessReturnsSyntaxErrorWithoutOffsetThenExceptionIsFormattedWithoutColumn() throws IOException, BuildFileParseException, InterruptedException { // This test depends on unix utilities that don't exist on Windows. assumeTrue(Platform.detect() != Platform.WINDOWS); TestProjectBuildFileParserFactory buildFileParserFactory = new TestProjectBuildFileParserFactory(cell.getRoot(), knownRuleTypes); thrown.expect(BuildFileParseException.class); thrown.expectMessage( "Buck wasn't able to parse foo/BUCK:\n" + "Syntax error on line 23:\n" + "java_test(name=*@!&#(!@&*()"); try (PythonDslProjectBuildFileParser buildFileParser = buildFileParserFactory.createNoopParserThatAlwaysReturnsErrorWithException( BuckEventBusForTests.newInstance(FakeClock.doNotCare()), "This is an error", "parser", ImmutableMap.<String, Object>builder() .put("type", "SyntaxError") .put("value", "You messed up") .put("filename", "foo/BUCK") .put("lineno", 23) .put("text", "java_test(name=*@!&#(!@&*()\n") .put( "traceback", ImmutableList.of( ImmutableMap.of( "filename", "foo/BUCK", "line_number", 23, "function_name", "<module>", "text", "java_test(name=*@!&#(!@&*()\n"))) .build())) { buildFileParser.initIfNeeded(); buildFileParser.getBuildFileManifest(Paths.get("foo/BUCK")); } } @Test public void whenSubprocessReturnsSyntaxErrorInDifferentFileThenExceptionContainsTwoFileNames() throws IOException, BuildFileParseException, InterruptedException { // This test depends on unix utilities that don't exist on Windows. assumeTrue(Platform.detect() != Platform.WINDOWS); TestProjectBuildFileParserFactory buildFileParserFactory = new TestProjectBuildFileParserFactory(cell.getRoot(), knownRuleTypes); BuckEventBus buckEventBus = BuckEventBusForTests.newInstance(FakeClock.doNotCare()); List<ConsoleEvent> consoleEvents = new ArrayList<>(); class EventListener { @Subscribe public void onConsoleEvent(ConsoleEvent consoleEvent) { consoleEvents.add(consoleEvent); } } EventListener eventListener = new EventListener(); buckEventBus.register(eventListener); thrown.expect(BuildFileParseException.class); thrown.expectMessage( "Buck wasn't able to parse foo/BUCK:\n" + "Syntax error in bar/BUCK\n" + "Line 42, column 24:\n" + "def some_helper_method(!@87*@!#\n" + " ^"); try (PythonDslProjectBuildFileParser buildFileParser = buildFileParserFactory.createNoopParserThatAlwaysReturnsErrorWithException( buckEventBus, "This is an error", "parser", ImmutableMap.<String, Object>builder() .put("type", "SyntaxError") .put("value", "You messed up") .put("filename", "bar/BUCK") .put("lineno", 42) .put("offset", 24) .put("text", "def some_helper_method(!@87*@!#\n") .put( "traceback", ImmutableList.of( ImmutableMap.of( "filename", "bar/BUCK", "line_number", 42, "function_name", "<module>", "text", "def some_helper_method(!@87*@!#\n"), ImmutableMap.of( "filename", "foo/BUCK", "line_number", 23, "function_name", "<module>", "text", "some_helper_method(name=*@!&#(!@&*()\n"))) .build())) { buildFileParser.initIfNeeded(); buildFileParser.getBuildFileManifest(Paths.get("foo/BUCK")); } } @Test public void whenSubprocessReturnsNonSyntaxErrorThenExceptionContainsFullStackTrace() throws IOException, BuildFileParseException, InterruptedException { // This test depends on unix utilities that don't exist on Windows. assumeTrue(Platform.detect() != Platform.WINDOWS); TestProjectBuildFileParserFactory buildFileParserFactory = new TestProjectBuildFileParserFactory(cell.getRoot(), knownRuleTypes); thrown.expect(BuildFileParseException.class); thrown.expectMessage( "Buck wasn't able to parse foo/BUCK:\n" + "ZeroDivisionError: integer division or modulo by zero\n" + "Call stack:\n" + " File \"bar/BUCK\", line 42, in lets_divide_by_zero\n" + " foo / bar\n" + "\n" + " File \"foo/BUCK\", line 23\n" + " lets_divide_by_zero()\n" + "\n"); try (PythonDslProjectBuildFileParser buildFileParser = buildFileParserFactory.createNoopParserThatAlwaysReturnsErrorWithException( BuckEventBusForTests.newInstance(FakeClock.doNotCare()), "This is an error", "parser", ImmutableMap.<String, Object>builder() .put("type", "ZeroDivisionError") .put("value", "integer division or modulo by zero") .put("filename", "bar/BUCK") .put("lineno", 42) .put("offset", 24) .put("text", "foo / bar\n") .put( "traceback", ImmutableList.of( ImmutableMap.of( "filename", "bar/BUCK", "line_number", 42, "function_name", "lets_divide_by_zero", "text", "foo / bar\n"), ImmutableMap.of( "filename", "foo/BUCK", "line_number", 23, "function_name", "<module>", "text", "lets_divide_by_zero()\n"))) .build())) { buildFileParser.initIfNeeded(); buildFileParser.getBuildFileManifest(Paths.get("foo/BUCK")); } } /** * ProjectBuildFileParser test double which counts the number of times rules are parsed to test * caching logic in Parser. */ private static class TestProjectBuildFileParserFactory { private final Path projectRoot; private final KnownRuleTypes ruleTypes; public TestProjectBuildFileParserFactory(Path projectRoot, KnownRuleTypes ruleTypes) { this.projectRoot = projectRoot; this.ruleTypes = ruleTypes; } public PythonDslProjectBuildFileParser createNoopParserThatAlwaysReturnsError() { return new TestPythonDslProjectBuildFileParser( "fake-python", new FakeProcessExecutor( params -> fakeProcessWithJsonOutput( 1, ImmutableList.of(), Optional.empty(), Optional.empty()), new TestConsole()), BuckEventBusForTests.newInstance()); } public PythonDslProjectBuildFileParser createNoopParserThatAlwaysReturnsSuccess() { return new TestPythonDslProjectBuildFileParser( "fake-python", new FakeProcessExecutor( params -> fakeProcessWithJsonOutput( 0, ImmutableList.of("__includes", "__configs", "__env"), Optional.empty(), Optional.empty()), new TestConsole()), BuckEventBusForTests.newInstance()); } public PythonDslProjectBuildFileParser createNoopParserThatAlwaysReturnsSuccessAndPrintsToStderr(BuckEventBus buckEventBus) { return new TestPythonDslProjectBuildFileParser( "fake-python", new FakeProcessExecutor( params -> fakeProcessWithJsonOutput( 0, ImmutableList.of(), Optional.empty(), Optional.of("Don't Panic!")), new TestConsole()), buckEventBus); } public PythonDslProjectBuildFileParser createNoopParserThatAlwaysReturnsSuccessWithWarning( BuckEventBus buckEventBus, String warning, String source) { return new TestPythonDslProjectBuildFileParser( "fake-python", new FakeProcessExecutor( params -> fakeProcessWithJsonOutput( 0, ImmutableList.of(), Optional.of( ImmutableList.of( ImmutableMap.of( "level", "warning", "message", warning, "source", source))), Optional.empty()), new TestConsole()), buckEventBus); } public PythonDslProjectBuildFileParser createNoopParserThatAlwaysReturnsSuccessWithError( BuckEventBus buckEventBus, String error, String source) { return new TestPythonDslProjectBuildFileParser( "fake-python", new FakeProcessExecutor( params -> fakeProcessWithJsonOutput( 0, ImmutableList.of(), Optional.of( ImmutableList.of( ImmutableMap.of( "level", "error", "message", error, "source", source))), Optional.empty()), new TestConsole()), buckEventBus); } public PythonDslProjectBuildFileParser createNoopParserThatAlwaysReturnsErrorWithException( BuckEventBus buckEventBus, String error, String source, ImmutableMap<String, Object> exception) { return new TestPythonDslProjectBuildFileParser( "fake-python", new FakeProcessExecutor( params -> fakeProcessWithJsonOutput( 1, ImmutableList.of(), Optional.of( ImmutableList.of( ImmutableMap.of( "level", "fatal", "message", error, "source", source, "exception", exception))), Optional.empty()), new TestConsole()), buckEventBus); } private class TestPythonDslProjectBuildFileParser extends PythonDslProjectBuildFileParser { public TestPythonDslProjectBuildFileParser( String pythonInterpreter, ProcessExecutor processExecutor, BuckEventBus buckEventBus) { super( ProjectBuildFileParserOptions.builder() .setProjectRoot(projectRoot) .setPythonInterpreter(pythonInterpreter) .setAllowEmptyGlobs(ParserConfig.DEFAULT_ALLOW_EMPTY_GLOBS) .setIgnorePaths(ImmutableSet.of()) .setBuildFileName(DEFAULT_BUILD_FILE_NAME) .setDefaultIncludes(ImmutableSet.of("//java/com/facebook/defaultIncludeFile")) .setDescriptions(ruleTypes.getDescriptions()) .setBuildFileImportWhitelist(ImmutableList.of()) .build(), new DefaultTypeCoercerFactory(), ImmutableMap.of(), buckEventBus, processExecutor, Optional.empty()); } } } }
package com.xqbase.util.http; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.Socket; import java.net.URL; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.zip.GZIPInputStream; import com.xqbase.util.ByteArrayQueue; import com.xqbase.util.Numbers; import com.xqbase.util.SocketPool; class HttpParam { String socketHost, path, host, proxyAuth; int socketPort; boolean secure, connect; HttpParam(HttpProxy httpProxy, String url) { try { URL url_ = new URL(url); int port = url_.getPort(); String query = url_.getQuery(); secure = url_.getProtocol().equals("https"); socketHost = url_.getHost(); socketPort = port == -1 ? url_.getDefaultPort() : port; path = url_.getPath() + (query == null ? "" : "?" + query); host = url_.getHost() + (port == -1 ? "" : ":" + port); } catch (IOException e) { secure = false; socketHost = "localhost"; socketPort = 80; path = "/"; host = "localhost"; } connect = false; if (httpProxy == null) { proxyAuth = null; return; } socketHost = httpProxy.getHost(); socketPort = httpProxy.getPort(); if (secure) { connect = true; } else { path = "http://" + host + path; } proxyAuth = httpProxy.getProxyAuth(); } } public class HttpUtil { private static final int RESP_MAX_SIZE = 65536; private static final String HEX_DIGITS = "0123456789ABCDEF"; private static void copyResponse(InputStream in, ByteArrayQueue baq, byte[] buffer, int length) throws IOException { int bytesToRead = length; while (bytesToRead > 0) { int bytesRead = in.read(buffer, 0, Math.min(RESP_MAX_SIZE, bytesToRead)); if (bytesRead < 0) { throw new IOException("Connection Lost"); } if (bytesRead == 0) { throw new IOException("Zero Bytes Read"); } bytesToRead -= bytesRead; if (baq != null) { baq.add(buffer, 0, bytesRead); } } } private static final byte[] CRLF = {'\r', '\n'}; private static final byte[] COLON = {':', ' '}; private static final byte[] GET = "GET ".getBytes(); private static final byte[] HEAD = "HEAD ".getBytes(); private static final byte[] POST = "POST ".getBytes(); private static final byte[] CONNECT = "CONNECT ".getBytes(); private static final byte[] HTTP11 = " HTTP/1.1\r\n".getBytes(); private static final byte[] HTTP10 = " HTTP/1.0\r\n".getBytes(); private static final byte[] HOST = "Host: ".getBytes(); private static final byte[] PROXY_AUTH = "Proxy-Authorization: ".getBytes(); private static final byte[] CONTENT_LENGTH = "Content-Length: ".getBytes(); private static final byte[] HEAD_END = ("Accept-Encoding: gzip\r\n" + "Connection: Keep-Alive\r\n\r\n").getBytes(); private static final Set<String> SKIP_HEADERS = new HashSet<>(Arrays.asList( "ACCEPT-ENCODING", "CONNECTION", "CONTENT-LENGTH", "PROXY_AUTH" )); static void send(OutputStream out, String path, String host, String proxyAuth, ByteArrayQueue requestBody, Map<String, List<String>> requestHeaders, boolean head) throws IOException { ByteArrayQueue headerBaq = new ByteArrayQueue(); if (requestBody == null) { headerBaq.add(head ? HEAD : GET). add(path.getBytes(StandardCharsets.ISO_8859_1)).add(HTTP11); } else { String length = "" + requestBody.length(); headerBaq.add(POST). add(path.getBytes(StandardCharsets.ISO_8859_1)).add(HTTP11). add(CONTENT_LENGTH).add(length.getBytes()).add(CRLF); } if (requestHeaders == null || !requestHeaders.containsKey("Host")) { headerBaq.add(HOST). add(host.getBytes(StandardCharsets.ISO_8859_1)).add(CRLF); } if (proxyAuth != null) { headerBaq.add(PROXY_AUTH). add(proxyAuth.getBytes(StandardCharsets.ISO_8859_1)).add(CRLF); } if (requestHeaders != null) { requestHeaders.forEach((key, values) -> { if (SKIP_HEADERS.contains(key.toUpperCase())) { return; } byte[] key_ = key.getBytes(StandardCharsets.ISO_8859_1); for (String value : values) { headerBaq.add(key_).add(COLON). add(value.getBytes(StandardCharsets.ISO_8859_1)).add(CRLF); } }); } headerBaq.add(HEAD_END); headerBaq.writeTo(out); if (requestBody != null) { requestBody.writeTo(out); } } static int recv(InputStream in, ByteArrayQueue responseBody, Map<String, List<String>> responseHeaders, boolean head, boolean connect, boolean[] connectionClose) throws IOException { // Response Header boolean http10 = false, close = false, gzip = false; int status = 0, contentLength = 0; StringBuilder sb = new StringBuilder(); while (true) { int b = in.read(); if (b < 0) { throw new IOException("Connection Lost"); } if (b == '\r') { continue; } if (b != '\n') { sb.append((char) b); continue; } if (sb.length() == 0) { if (status == 100) { status = 0; continue; } break; } if (status == 0) { String[] ss = sb.toString().split(" "); if (ss.length < 2) { throw new IOException("Response Error: [" + sb + "]"); } status = Numbers.parseInt(ss[1]); if (ss[0].toUpperCase().equals("HTTP/1.0")) { http10 = true; } } else if (status >= 200) { int index = sb.indexOf(": "); if (index >= 0) { String key = sb.substring(0, index).toUpperCase(); String value = sb.substring(index + 2); switch (key) { case "CONNECTION": close = value.equalsIgnoreCase("close"); break; case "CONTENT-LENGTH": contentLength = Numbers.parseInt(value); break; case "CONTENT-ENCODING": gzip = value.equalsIgnoreCase("gzip"); break; case "TRANSFER-ENCODING": contentLength = value.equalsIgnoreCase("chunked") ? -1 : contentLength; break; default: } if (responseHeaders != null) { responseHeaders.computeIfAbsent(key, k -> new ArrayList<>()).add(value); } } } sb.setLength(0); } if (connectionClose != null && connectionClose.length > 0) { connectionClose[0] = http10 || close; } // Response Body if (!connect && (http10 || (close && contentLength == 0))) { // For HTTP/1.0 response, or connection-close without Content-Length, // read from stream until connection lost (responseBody == null ? new ByteArrayQueue() : responseBody). readFrom(gzip ? new GZIPInputStream(in) : in); return status; } if (head || contentLength == 0) { return status; } byte[] buffer = new byte[RESP_MAX_SIZE]; if (contentLength > 0) { if (gzip) { ByteArrayQueue gzipBody = new ByteArrayQueue(); copyResponse(in, gzipBody, buffer, contentLength); if (responseBody != null) { try (GZIPInputStream gzipis = new GZIPInputStream(gzipBody.getInputStream())) { responseBody.readFrom(gzipis); } catch (IOException e) { // Ignored } } } else { copyResponse(in, responseBody, buffer, contentLength); } return status; } /* Response Body (Chunked) * >=0: waiting size and CRLF * -1: waiting next block (CRLF) * -2: waiting terminator (CRLF) */ int chunkSize = 0; ByteArrayQueue gzipBody = gzip ? new ByteArrayQueue() : responseBody; while (true) { int b = in.read(); if (b < 0) { throw new IOException("Connection Lost"); } if (chunkSize < 0) { if (b == '\n') { if (chunkSize == -2) { break; } chunkSize = 0; } } else if (b == '\n') { if (chunkSize == 0) { chunkSize = -2; } else { copyResponse(in, gzipBody, buffer, chunkSize); chunkSize = -1; } } else { b = HEX_DIGITS.indexOf(Character.toUpperCase(b)); if (b >= 0) { chunkSize = chunkSize * 16 + b; } } } if (gzip && responseBody != null) { try (GZIPInputStream gzipis = new GZIPInputStream(gzipBody.getInputStream())) { responseBody.readFrom(gzipis); } catch (IOException e) { // Ignored } } return status; } static int request(Socket socket, String path, String host, String proxyAuth, ByteArrayQueue requestBody, Map<String, List<String>> requestHeaders, boolean head, ByteArrayQueue responseBody, Map<String, List<String>> responseHeaders, boolean[] connectionClose) throws IOException { send(socket.getOutputStream(), path, host, proxyAuth, requestBody, requestHeaders, head); return recv(socket.getInputStream(), responseBody, responseHeaders, head, false, connectionClose); } static Socket connect(Socket socket, String host, String proxyAuth, boolean secure) throws IOException { int colon = host.indexOf(':'); int port; String host_; if (colon < 0) { host_ = host; port = secure ? 443 : 80; } else { host_ = host.substring(0, colon); port = Numbers.parseInt(host.substring(colon + 1)); } return connect(socket, host_, port, proxyAuth, secure); } static Socket connect(Socket socket, String host, int port, String proxyAuth, boolean secure) throws IOException { ByteArrayQueue headerBaq = new ByteArrayQueue(); headerBaq.add(CONNECT). add(host.getBytes(StandardCharsets.ISO_8859_1)).add(COLON, 0, 1). add(("" + port).getBytes()).add(HTTP10); if (proxyAuth != null) { headerBaq.add(PROXY_AUTH). add(proxyAuth.getBytes(StandardCharsets.ISO_8859_1)).add(CRLF); } headerBaq.add(CRLF); headerBaq.writeTo(socket.getOutputStream()); int status = recv(socket.getInputStream(), null, null, false, true, null); if (status != 200) { throw new IOException("HTTP/1.0 " + status + " Connection NOT established"); } return secure ? SocketPool.createSocket(socket, host, port) : socket; } private static int request(HttpProxy httpProxy, String url, ByteArrayQueue requestBody, Map<String, List<String>> requestHeaders, boolean head, ByteArrayQueue responseBody, Map<String, List<String>> responseHeaders, int timeout) throws IOException { HttpParam param = new HttpParam(httpProxy, url); if (param.connect) { try (Socket socket = connect(SocketPool.createSocket(param.socketHost, param.socketPort, false, timeout), param.host, param.proxyAuth, param.secure)) { return request(socket, param.path, param.host, null, requestBody, requestHeaders, head, responseBody, responseHeaders, null); } } try (Socket socket = SocketPool.createSocket(param.secure)) { socket.connect(new InetSocketAddress(param.socketHost, param.socketPort), timeout); socket.setSoTimeout(timeout); return request(socket, param.path, param.host, param.proxyAuth, requestBody, requestHeaders, head, responseBody, responseHeaders, null); } } public static int head(String url, Map<String, List<String>> requestHeaders, Map<String, List<String>> responseHeaders, int timeout) throws IOException { return head(null, url, requestHeaders, responseHeaders, timeout); } public static int get(String url, Map<String, List<String>> requestHeaders, ByteArrayQueue responseBody, Map<String, List<String>> responseHeaders, int timeout) throws IOException { return get(null, url, requestHeaders, responseBody, responseHeaders, timeout); } public static int post(String url, ByteArrayQueue requestBody, Map<String, List<String>> requestHeaders, ByteArrayQueue responseBody, Map<String, List<String>> responseHeaders, int timeout) throws IOException { return post(null, url, requestBody, requestHeaders, responseBody, responseHeaders, timeout); } public static int head(HttpProxy httpProxy, String url, Map<String, List<String>> requestHeaders, Map<String, List<String>> responseHeaders, int timeout) throws IOException { return request(httpProxy, url, null, requestHeaders, true, null, responseHeaders, timeout); } public static int get(HttpProxy httpProxy, String url, Map<String, List<String>> requestHeaders, ByteArrayQueue responseBody, Map<String, List<String>> responseHeaders, int timeout) throws IOException { return request(httpProxy, url, null, requestHeaders, false, responseBody, responseHeaders, timeout); } public static int post(HttpProxy httpProxy, String url, ByteArrayQueue requestBody, Map<String, List<String>> requestHeaders, ByteArrayQueue responseBody, Map<String, List<String>> responseHeaders, int timeout) throws IOException { return request(httpProxy, url, requestBody, requestHeaders, false, responseBody, responseHeaders, timeout); } }
/** * Copyright (C) 2016 Red Hat, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.fabric8.acme.client.internal; import com.nimbusds.jose.JOSEObject; import com.nimbusds.jose.JWSAlgorithm; import com.nimbusds.jose.JWSHeader; import com.nimbusds.jose.JWSObject; import com.nimbusds.jose.Payload; import com.nimbusds.jose.jwk.JWK; import io.fabric8.acme.client.ACMEClientException; import io.fabric8.acme.client.model.Directory; import io.fabric8.acme.client.model.Resource; import net.minidev.json.JSONObject; import net.minidev.json.parser.ParseException; import okhttp3.MediaType; import okhttp3.OkHttpClient; import okhttp3.Request; import okhttp3.RequestBody; import okhttp3.Response; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.time.Instant; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeParseException; import java.time.temporal.ChronoUnit; import java.time.temporal.TemporalAccessor; import java.util.concurrent.TimeUnit; public abstract class BaseOperations<T> { protected final Logger logger = LoggerFactory.getLogger(getClass()); private static final MediaType REQUEST_MEDIA_TYPE = MediaType.parse(JOSEObject.MIME_TYPE_COMPACT); private Directory directory; private OkHttpClient okHttpClient; private Nonce nonce; private JWSAlgorithm jwsAlgorithm; private Signer signer; private JWK jwk; public BaseOperations(Directory directory, OkHttpClient okHttpClient, Nonce nonce, JWSAlgorithm jwsAlgorithm, Signer signer, JWK jwk) { this.directory = directory; this.okHttpClient = okHttpClient; this.nonce = nonce; this.jwsAlgorithm = jwsAlgorithm; this.signer = signer; this.jwk = jwk; } public BaseOperations(BaseOperations<T> orig) { this.directory = orig.directory; this.okHttpClient = orig.okHttpClient; this.nonce = orig.nonce; this.jwsAlgorithm = orig.jwsAlgorithm; this.signer = orig.signer; this.jwk = orig.jwk; } protected T sendRequest(Resource.ResourceType resourceType, Resource item, JWSHeader jwsHeader, ResponseHandler<T> responseHandler, int... successCodes) { return sendRequest(directory.get(resourceType), item, jwsHeader, responseHandler, successCodes); } protected T sendRequest(String url, Resource item, JWSHeader jwsHeader, ResponseHandler<T> responseHandler, int... successCodes) { return sendRequest(url, item.toJSONObject(), jwsHeader, responseHandler, successCodes); } protected T sendRequest(String url, JSONObject jsonObject, JWSHeader jwsHeader, ResponseHandler<T> responseHandler, int... successCodes) { // Construct the JWS to send on. JWSObject jwsObject = new JWSObject(jwsHeader, new Payload(jsonObject)); signer.sign(jwsObject); String compact = jwsObject.serialize(); RequestBody body = RequestBody.create(REQUEST_MEDIA_TYPE, compact); Request request = new Request.Builder() .url(url) .post(body) .build(); return sendRequest(request, responseHandler, successCodes); } protected T sendRequest(String url, ResponseHandler<T> responseHandler, int... successCodes) { Request request = new Request.Builder() .url(url) .get() .build(); return sendRequest(request, responseHandler, successCodes); } protected T sendRequest(Request request, ResponseHandler<T> responseHandler, int... successCodes) { try { Response response = okHttpClient.newCall(request).execute(); try { assertSuccessfulResponse(response, successCodes); nonce.extractNonce(response); return responseHandler.handle(response); } finally { response.body().close(); } } catch (IOException e) { throw ACMEClientException.launderThrowable(e); } } protected JWSHeader.Builder jwsHeader() { return new JWSHeader.Builder(jwsAlgorithm) .customParam("nonce", nonce.get()) .jwk(jwk); } private void assertSuccessfulResponse(Response response, int... expectedStatusCode) { for (int code : expectedStatusCode) { if (response.code() == code) { return; } } String detail = response.message(); try { detail = response.body().string(); JSONObject parsedResponse = JSONParserUtils.parse(detail); throw new ACMEClientException(response.code(), response.message(), parsedResponse); } catch (ParseException | IOException e) { throw new ACMEClientException(response.code(), response.message(), detail); } } protected T requestWithRetryAfter(String url, Resource item, JWSHeader jwsHeader, ResponseHandler<T> responseHandler, int successCode, int... retryCodes) { try { T obj = null; int[] allCodes = new int[retryCodes.length + 1]; System.arraycopy(retryCodes, 0, allCodes, 0, retryCodes.length); allCodes[retryCodes.length] = successCode; while (obj == null) { try { obj = sendRequest( url, item, jwsHeader, (response) -> { String retryAfterHeader = response.header("Retry-After"); for (int retryCode : retryCodes) { if (retryCode == response.code()) { throw new RetryAfterException(retryAfterHeader); } } return responseHandler.handle(response); }, allCodes ); } catch (RetryAfterException e) { try { TimeUnit.SECONDS.sleep(e.retryAfter); } catch (InterruptedException einter) { logger.warn("Interrupted sleep during retry", einter); } } } return obj; } catch (Exception e) { throw ACMEClientException.launderThrowable(e); } } protected JWK getJwk() { return jwk; } protected JWSAlgorithm getJwsAlgorithm() { return jwsAlgorithm; } private static class RetryAfterException extends RuntimeException { private final long retryAfter; private RetryAfterException(String retryAfterDate) { // Retry-After = "Retry-After" ":" ( HTTP-date | delta-seconds ) long retryAfterTemp = 0; try { TemporalAccessor temp = DateTimeFormatter.RFC_1123_DATE_TIME.parse(retryAfterDate); Instant retryAfterInstant = Instant.from(temp); retryAfterTemp = Instant.now().until(retryAfterInstant, ChronoUnit.SECONDS); } catch (DateTimeParseException e) { retryAfterTemp = Long.parseLong(retryAfterDate); } if (retryAfterTemp < 0) { retryAfterTemp = 0; } retryAfter = retryAfterTemp; } } }
package org.spongycastle.math.ec.custom.sec; import java.math.BigInteger; import org.spongycastle.math.raw.Nat; import org.spongycastle.math.raw.Nat384; public class SecP384R1Field { private static final long M = 0xFFFFFFFFL; // 2^384 - 2^128 - 2^96 + 2^32 - 1 static final int[] P = new int[]{ 0xFFFFFFFF, 0x00000000, 0x00000000, 0xFFFFFFFF, 0xFFFFFFFE, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF }; static final int[] PExt = new int[]{ 0x00000001, 0xFFFFFFFE, 0x00000000, 0x00000002, 0x00000000, 0xFFFFFFFE, 0x00000000, 0x00000002, 0x00000001, 0x00000000, 0x00000000, 0x00000000, 0xFFFFFFFE, 0x00000001, 0x00000000, 0xFFFFFFFE, 0xFFFFFFFD, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF }; private static final int[] PExtInv = new int[]{ 0xFFFFFFFF, 0x00000001, 0xFFFFFFFF, 0xFFFFFFFD, 0xFFFFFFFF, 0x00000001, 0xFFFFFFFF, 0xFFFFFFFD, 0xFFFFFFFE, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0x00000001, 0xFFFFFFFE, 0xFFFFFFFF, 0x00000001, 0x00000002 }; private static final int P11 = 0xFFFFFFFF; private static final int PExt23 = 0xFFFFFFFF; public static void add(int[] x, int[] y, int[] z) { int c = Nat.add(12, x, y, z); if (c != 0 || (z[11] == P11 && Nat.gte(12, z, P))) { addPInvTo(z); } } public static void addExt(int[] xx, int[] yy, int[] zz) { int c = Nat.add(24, xx, yy, zz); if (c != 0 || (zz[23] == PExt23 && Nat.gte(24, zz, PExt))) { if (Nat.addTo(PExtInv.length, PExtInv, zz) != 0) { Nat.incAt(24, zz, PExtInv.length); } } } public static void addOne(int[] x, int[] z) { int c = Nat.inc(12, x, z); if (c != 0 || (z[11] == P11 && Nat.gte(12, z, P))) { addPInvTo(z); } } public static int[] fromBigInteger(BigInteger x) { int[] z = Nat.fromBigInteger(384, x); if (z[11] == P11 && Nat.gte(12, z, P)) { Nat.subFrom(12, P, z); } return z; } public static void half(int[] x, int[] z) { if ((x[0] & 1) == 0) { Nat.shiftDownBit(12, x, 0, z); } else { int c = Nat.add(12, x, P, z); Nat.shiftDownBit(12, z, c); } } public static void multiply(int[] x, int[] y, int[] z) { int[] tt = Nat.create(24); Nat384.mul(x, y, tt); reduce(tt, z); } public static void negate(int[] x, int[] z) { if (Nat.isZero(12, x)) { Nat.zero(12, z); } else { Nat.sub(12, P, x, z); } } public static void reduce(int[] xx, int[] z) { long xx16 = xx[16] & M, xx17 = xx[17] & M, xx18 = xx[18] & M, xx19 = xx[19] & M; long xx20 = xx[20] & M, xx21 = xx[21] & M, xx22 = xx[22] & M, xx23 = xx[23] & M; final long n = 1; long t0 = (xx[12] & M) + xx20 - n; long t1 = (xx[13] & M) + xx22; long t2 = (xx[14] & M) + xx22 + xx23; long t3 = (xx[15] & M) + xx23; long t4 = xx17 + xx21; long t5 = xx21 - xx23; long t6 = xx22 - xx23; long cc = 0; cc += (xx[0] & M) + t0 + t5; z[0] = (int)cc; cc >>= 32; cc += (xx[1] & M) + xx23 - t0 + t1; z[1] = (int)cc; cc >>= 32; cc += (xx[2] & M) - xx21 - t1 + t2; z[2] = (int)cc; cc >>= 32; cc += (xx[3] & M) + t0 - t2 + t3 + t5; z[3] = (int)cc; cc >>= 32; cc += (xx[4] & M) + xx16 + xx21 + t0 + t1 - t3 + t5; z[4] = (int)cc; cc >>= 32; cc += (xx[5] & M) - xx16 + t1 + t2 + t4; z[5] = (int)cc; cc >>= 32; cc += (xx[6] & M) + xx18 - xx17 + t2 + t3; z[6] = (int)cc; cc >>= 32; cc += (xx[7] & M) + xx16 + xx19 - xx18 + t3; z[7] = (int)cc; cc >>= 32; cc += (xx[8] & M) + xx16 + xx17 + xx20 - xx19; z[8] = (int)cc; cc >>= 32; cc += (xx[9] & M) + xx18 - xx20 + t4; z[9] = (int)cc; cc >>= 32; cc += (xx[10] & M) + xx18 + xx19 - t5 + t6; z[10] = (int)cc; cc >>= 32; cc += (xx[11] & M) + xx19 + xx20 - t6; z[11] = (int)cc; cc >>= 32; cc += n; // assert cc >= 0; reduce32((int)cc, z); } public static void reduce32(int x, int[] z) { long cc = 0; if (x != 0) { long xx12 = x & M; cc += (z[0] & M) + xx12; z[0] = (int)cc; cc >>= 32; cc += (z[1] & M) - xx12; z[1] = (int)cc; cc >>= 32; if (cc != 0) { cc += (z[2] & M); z[2] = (int)cc; cc >>= 32; } cc += (z[3] & M) + xx12; z[3] = (int)cc; cc >>= 32; cc += (z[4] & M) + xx12; z[4] = (int)cc; cc >>= 32; // assert cc == 0 || cc == 1; } if ((cc != 0 && Nat.incAt(12, z, 5) != 0) || (z[11] == P11 && Nat.gte(12, z, P))) { addPInvTo(z); } } public static void square(int[] x, int[] z) { int[] tt = Nat.create(24); Nat384.square(x, tt); reduce(tt, z); } public static void squareN(int[] x, int n, int[] z) { // assert n > 0; int[] tt = Nat.create(24); Nat384.square(x, tt); reduce(tt, z); while (--n > 0) { Nat384.square(z, tt); reduce(tt, z); } } public static void subtract(int[] x, int[] y, int[] z) { int c = Nat.sub(12, x, y, z); if (c != 0) { subPInvFrom(z); } } public static void subtractExt(int[] xx, int[] yy, int[] zz) { int c = Nat.sub(24, xx, yy, zz); if (c != 0) { if (Nat.subFrom(PExtInv.length, PExtInv, zz) != 0) { Nat.decAt(24, zz, PExtInv.length); } } } public static void twice(int[] x, int[] z) { int c = Nat.shiftUpBit(12, x, 0, z); if (c != 0 || (z[11] == P11 && Nat.gte(12, z, P))) { addPInvTo(z); } } private static void addPInvTo(int[] z) { long c = (z[0] & M) + 1; z[0] = (int)c; c >>= 32; c += (z[1] & M) - 1; z[1] = (int)c; c >>= 32; if (c != 0) { c += (z[2] & M); z[2] = (int)c; c >>= 32; } c += (z[3] & M) + 1; z[3] = (int)c; c >>= 32; c += (z[4] & M) + 1; z[4] = (int)c; c >>= 32; if (c != 0) { Nat.incAt(12, z, 5); } } private static void subPInvFrom(int[] z) { long c = (z[0] & M) - 1; z[0] = (int)c; c >>= 32; c += (z[1] & M) + 1; z[1] = (int)c; c >>= 32; if (c != 0) { c += (z[2] & M); z[2] = (int)c; c >>= 32; } c += (z[3] & M) - 1; z[3] = (int)c; c >>= 32; c += (z[4] & M) - 1; z[4] = (int)c; c >>= 32; if (c != 0) { Nat.decAt(12, z, 5); } } }
package io.github.jklingsporn.vertx.jooq.async.generate; import io.github.jklingsporn.vertx.jooq.async.shared.JsonArrayConverter; import io.github.jklingsporn.vertx.jooq.async.shared.JsonObjectConverter; import io.vertx.core.json.JsonObject; import org.jooq.Constants; import org.jooq.Record; import org.jooq.tools.JooqLogger; import org.jooq.util.*; import java.io.File; import java.time.Instant; import java.util.Collection; import java.util.List; /** * Created by jklingsporn on 17.10.16. * Extension of the jOOQ's <code>JavaGenerator</code>. * By default, it generates POJO's that have a <code>#fromJson</code> and a <code>#toJson</code>-method which takes/generates a <code>JsonObject</code> out of the generated POJO. * When you've enabled Interface-generation, these methods are added to the generated Interface as default-methods. * Besides these method there is also a constructor generated which takes a <code>JsonObject</code>. * It also generates DAOs which implement * <code>VertxDAO</code> and allow you to execute CRUD-operations asynchronously. */ public abstract class AbstractVertxGenerator extends JavaGenerator { private static final JooqLogger logger = JooqLogger.getLogger(AbstractVertxGenerator.class); private final boolean generateJson; public AbstractVertxGenerator() { this(true); } public AbstractVertxGenerator(boolean generateJson) { this.generateJson = generateJson; this.setGeneratePojos(true); } @Override protected void generateDaoClassFooter(TableDefinition table, JavaWriter out) { super.generateDaoClassFooter(table, out); generateFetchMethods(table,out); generateVertxGetterAndSetterConfigurationMethod(out); overwriteInsertReturningIfNecessary(table,out); } @Override protected void generatePojoClassFooter(TableDefinition table, JavaWriter out) { super.generatePojoClassFooter(table, out); if(generateJson){ generateFromJsonConstructor(table,out); if(!generateInterfaces()){ generateFromJson(table,out); generateToJson(table, out); } } } @Override protected void generateInterfaceClassFooter(TableDefinition table, JavaWriter out) { super.generateInterfaceClassFooter(table, out); if(generateJson && generateInterfaces()){ generateFromJson(table, out); generateToJson(table, out); } } @Override protected void generateDao(TableDefinition table, JavaWriter out) { if(table.getPrimaryKey() != null){ ((VertxJavaWriter)out).setDaoTypeReplacement(getKeyType(table.getPrimaryKey())); } super.generateDao(table, out); } @Override protected JavaWriter newJavaWriter(File file) { return new VertxJavaWriter(file, generateFullyQualifiedTypes(), targetEncoding); } @Override protected void printPackage(JavaWriter out, Definition definition, GeneratorStrategy.Mode mode) { super.printPackage(out, definition, mode); if(mode.equals(GeneratorStrategy.Mode.DAO)){ generateDAOImports(out); } } protected abstract void generateDAOImports(JavaWriter out); /** * You might want to override this class in order to add injection methods. * @param out */ protected void generateSetVertxAnnotation(JavaWriter out){}; protected void generateVertxGetterAndSetterConfigurationMethod(JavaWriter out) { out.println(); out.tab(1).println("private io.vertx.core.Vertx vertx;"); out.println(); generateSetVertxAnnotation(out); out.tab(1).println("@Override"); out.tab(1).println("public void setVertx(io.vertx.core.Vertx vertx) {"); out.tab(2).println("this.vertx = vertx;"); out.tab(1).println("}"); out.println(); out.tab(1).println("@Override"); out.tab(1).println("public io.vertx.core.Vertx vertx() {"); out.tab(2).println("return this.vertx;"); out.tab(1).println("}"); out.println(); } private void overwriteInsertReturningIfNecessary(TableDefinition table, JavaWriter out){ Collection<ColumnDefinition> keyColumns = table.getPrimaryKey().getKeyColumns(); boolean isSupported = keyColumns.size()==1; String reason = "More than one PK column"; if(isSupported){ isSupported = keyColumns.stream() .map(c -> getJavaType(c.getType())) .allMatch(t -> isType(t, Integer.class) || isType(t, Long.class)); reason = isSupported ? "":"PK is not of type int or long"; } if(!isSupported){ logger.info(String.format("insertReturningPrimaryAsync is not supported for %s because '%s'!",table.getName(),reason)); renderInsertReturningOverwrite(table, out, reason); } } protected abstract void renderInsertReturningOverwrite(TableDefinition table, JavaWriter out, String reason); /** * Overwrite this method to define the conversion of a column to a JSON name. Defaults to the name of the column. * @param columnDefinition * @return the name of this column as a JSON name */ protected String getJsonName(ColumnDefinition columnDefinition){ return columnDefinition.getName(); } private void generateFromJson(TableDefinition table, JavaWriter out){ out.println(); String className = getStrategy().getJavaClassName(table, GeneratorStrategy.Mode.INTERFACE); out.tab(1).println("@Override"); out.tab(1).println("default %s fromJson(io.vertx.core.json.JsonObject json) {",className); for (ColumnDefinition column : table.getColumns()) { String setter = getStrategy().getJavaSetterName(column, GeneratorStrategy.Mode.INTERFACE); String columnType = getJavaType(column.getType()); String jsonName = getJsonName(column); if(handleCustomTypeFromJson(column, setter, columnType, jsonName, out)) { //handled by user }else if(isType(columnType, Integer.class)){ out.tab(2).println("%s(json.getInteger(\"%s\"));", setter, jsonName); }else if(isType(columnType, Short.class)){ out.tab(2).println("%s(json.getInteger(\"%s\")==null?null:json.getInteger(\"%s\").shortValue());", setter, jsonName, jsonName); }else if(isType(columnType, Byte.class)){ out.tab(2).println("%s(json.getInteger(\"%s\")==null?null:json.getInteger(\"%s\").byteValue());", setter,jsonName, jsonName); }else if(isType(columnType, Long.class)){ out.tab(2).println("%s(json.getLong(\"%s\"));", setter, jsonName); }else if(isType(columnType, Float.class)){ out.tab(2).println("%s(json.getFloat(\"%s\"));", setter, jsonName); }else if(isType(columnType, Double.class)){ out.tab(2).println("%s(json.getDouble(\"%s\"));", setter, jsonName); }else if(isType(columnType, Boolean.class)){ out.tab(2).println("%s(json.getBoolean(\"%s\"));", setter, jsonName); }else if(isType(columnType, String.class)){ out.tab(2).println("%s(json.getString(\"%s\"));", setter, jsonName); }else if(columnType.equals(byte.class.getName()+"[]")){ out.tab(2).println("%s(json.getBinary(\"%s\"));", setter, jsonName); }else if(isType(columnType,Instant.class)){ out.tab(2).println("%s(json.getInstant(\"%s\"));", setter, jsonName); }else if(isEnum(table, column)) { out.tab(2).println("%s(java.util.Arrays.stream(%s.values()).filter(td -> td.getLiteral().equals(json.getString(\"%s\"))).findFirst().orElse(null));", setter, columnType, jsonName); }else if(column.getType().getConverter() != null && isType(column.getType().getConverter(),JsonObjectConverter.class)){ out.tab(2).println("%s(json.getJsonObject(\"%s\"));", setter, jsonName); }else if(column.getType().getConverter() != null && isType(column.getType().getConverter(),JsonArrayConverter.class)){ out.tab(2).println("%s(json.getJsonArray(\"%s\"));", setter, jsonName); }else{ logger.warn(String.format("Omitting unrecognized type %s for column %s in table %s!",columnType,column.getName(),table.getName())); out.tab(2).println(String.format("// Omitting unrecognized type %s for column %s!",columnType,column.getName())); } } out.tab(2).println("return this;"); out.tab(1).println("}"); out.println(); } protected void generateJsonMapper(TableDefinition table, JavaWriter out){ // return json-> // new generated.classic.async.vertx.tables.pojos.Something() // .setSomeid(json.getInteger("someId")) // .setSomejsonobject(JsonObjectConverter.getInstance().from(json.getString("someJsonObject"))) // ; out.tab(1).println("@Override"); out.tab(1).println("public java.util.function.Function<%s, %s> jsonMapper() {", JsonObject.class.getName(),getStrategy().getFullJavaClassName(table, GeneratorStrategy.Mode.POJO)); out.tab(2).println("return json -> "); out.tab(3).println("new %s()",getStrategy().getFullJavaClassName(table, GeneratorStrategy.Mode.POJO)); for (ColumnDefinition column : table.getColumns()) { String setter = getStrategy().getJavaSetterName(column, GeneratorStrategy.Mode.INTERFACE); String columnType = getJavaType(column.getType()); String jsonName = getJsonName(column); if(handleCustomTypeJsonMapper(column, setter, columnType, jsonName, out)) { //handled by user }else if(isType(columnType, Integer.class)){ out.tab(5).println(".%s(json.getInteger(\"%s\"))", setter, jsonName); }else if(isType(columnType, Short.class)){ out.tab(5).println(".%s(json.getInteger(\"%s\")==null?null:json.getInteger(\"%s\").shortValue())", setter, jsonName, jsonName); }else if(isType(columnType, Byte.class)){ out.tab(5).println(".%s(json.getInteger(\"%s\")==null?null:json.getInteger(\"%s\").byteValue())", setter,jsonName, jsonName); }else if(isType(columnType, Long.class)){ out.tab(5).println(".%s(json.getLong(\"%s\"))", setter, jsonName); }else if(isType(columnType, Float.class)){ out.tab(5).println(".%s(json.getFloat(\"%s\"))", setter, jsonName); }else if(isType(columnType, Double.class)){ out.tab(5).println(".%s(json.getDouble(\"%s\"))", setter, jsonName); }else if(isType(columnType, Boolean.class)){ out.tab(5).println(".%s(json.getBoolean(\"%s\"))", setter, jsonName); }else if(isType(columnType, String.class)){ out.tab(5).println(".%s(json.getString(\"%s\"))", setter, jsonName); }else if(columnType.equals(byte.class.getName()+"[]")){ out.tab(5).println(".%s(json.getBinary(\"%s\"))", setter, jsonName); }else if(isType(columnType,Instant.class)){ out.tab(5).println(".%s(json.getInstant(\"%s\"))", setter, jsonName); }else if(isEnum(table, column)) { out.tab(5).println(".%s(java.util.Arrays.stream(%s.values()).filter(td -> td.getLiteral().equals(json.getString(\"%s\"))).findFirst().orElse(null))", setter, columnType, jsonName); }else if(column.getType().getConverter() != null && isType(column.getType().getConverter(),JsonObjectConverter.class)){ out.tab(5).println(".%s(%s.getInstance().from(json.getString(\"%s\")))", setter, JsonObjectConverter.class.getName(), jsonName); }else if(column.getType().getConverter() != null && isType(column.getType().getConverter(),JsonArrayConverter.class)){ out.tab(5).println(".%s(%s.getInstance().from(json.getString(\"%s\")))", setter, JsonArrayConverter.class.getName(), jsonName); }else{ logger.warn(String.format("Omitting unrecognized type %s for column %s in table %s!",columnType,column.getName(),table.getName())); out.tab(5).println(String.format("// Omitting unrecognized type %s for column %s!",columnType,column.getName())); } } out.tab(5).println(";"); out.tab(1).println("}"); out.println(); } private boolean isEnum(TableDefinition table, TypedElementDefinition<?> column) { return table.getDatabase().getEnum(table.getSchema(), column.getType().getUserType()) != null; } private boolean isType(String columnType, Class<?> clazz) { return columnType.equals(clazz.getName()); } /** * Overwrite this method to handle your custom type. This is needed especially when you have custom converters. * @param column the column definition * @param setter the setter name * @param columnType the type of the column * @param javaMemberName the java member name * @param out the writer * @return <code>true</code> if the column was handled. * @see #generateFromJson(TableDefinition, JavaWriter) */ protected boolean handleCustomTypeFromJson(ColumnDefinition column, String setter, String columnType, String javaMemberName, JavaWriter out){ return false; } protected boolean handleCustomTypeJsonMapper(ColumnDefinition column, String setter, String columnType, String javaMemberName, JavaWriter out){ return false; } private void generateToJson(TableDefinition table, JavaWriter out){ out.println(); out.tab(1).println("@Override"); out.tab(1).println("default io.vertx.core.json.JsonObject toJson() {"); out.tab(2).println("io.vertx.core.json.JsonObject json = new io.vertx.core.json.JsonObject();"); for (ColumnDefinition column : table.getColumns()) { String getter = getStrategy().getJavaGetterName(column, GeneratorStrategy.Mode.INTERFACE); String columnType = getJavaType(column.getType()); if(handleCustomTypeToJson(column,getter,getJavaType(column.getType()),getStrategy().getJavaMemberName(column, GeneratorStrategy.Mode.POJO), out)) { //handled by user }else if(isEnum(table, column)){ out.tab(2).println("json.put(\"%s\",%s()==null?null:%s().getLiteral());", getJsonName(column),getter,getter); } else if (isAllowedJsonType(column, columnType)){ out.tab(2).println("json.put(\"%s\",%s());", getJsonName(column),getter); }else{ logger.warn(String.format("Omitting unrecognized type %s for column %s in table %s!",columnType,column.getName(),table.getName())); out.tab(2).println(String.format("// Omitting unrecognized type %s for column %s!",columnType,column.getName())); } } out.tab(2).println("return json;"); out.tab(1).println("}"); out.println(); } private boolean isAllowedJsonType(TypedElementDefinition<?> column, String columnType){ return isType(columnType, Integer.class) || isType(columnType, Short.class) || isType(columnType, Byte.class) || isType(columnType, Long.class) || isType(columnType,Float.class) || isType(columnType, Double.class) || isType(columnType, Boolean.class) || isType(columnType,String.class) || isType(columnType, Instant.class) || columnType.equals(byte.class.getName()+"[]") || (column.getType().getConverter() != null && (isType(column.getType().getConverter(),JsonObjectConverter.class) || isType(column.getType().getConverter(),JsonArrayConverter.class))) ; } /** * Overwrite this method to handle your custom type. This is needed especially when you have custom converters. * @param column the column definition * @param getter the getter name * @param columnType the type of the column * @param javaMemberName the java member name * @param out the writer * @return <code>true</code> if the column was handled. * @see #generateToJson(TableDefinition, JavaWriter) */ protected boolean handleCustomTypeToJson(ColumnDefinition column, String getter, String columnType, String javaMemberName, JavaWriter out) { return false; } private void generateFromJsonConstructor(TableDefinition table, JavaWriter out){ final String className = getStrategy().getJavaClassName(table, GeneratorStrategy.Mode.POJO); out.println(); out.tab(1).println("public %s(io.vertx.core.json.JsonObject json) {", className); out.tab(2).println("fromJson(json);"); out.tab(1).println("}"); } /** * Copied (more ore less) from JavaGenerator. * Generates fetchByCYZAsync- and fetchOneByCYZAsync-methods * @param table * @param out */ protected void generateFetchMethods(TableDefinition table, JavaWriter out){ VertxJavaWriter vOut = (VertxJavaWriter) out; String pType = vOut.ref(getStrategy().getFullJavaClassName(table, GeneratorStrategy.Mode.POJO)); for (ColumnDefinition column : table.getColumns()) { final String colName = column.getOutputName(); final String colClass = getStrategy().getJavaClassName(column); final String colType = vOut.ref(getJavaType(column.getType())); final String colIdentifier = vOut.ref(getStrategy().getFullJavaIdentifier(column), colRefSegments(column)); // fetchBy[Column]([T]...) // ----------------------- generateFetchByMethods(out, pType, colName, colClass, colType, colIdentifier); // fetchOneBy[Column]([T]) // ----------------------- ukLoop: for (UniqueKeyDefinition uk : column.getUniqueKeys()) { // If column is part of a single-column unique key... if (uk.getKeyColumns().size() == 1 && uk.getKeyColumns().get(0).equals(column)) { generateFetchOneByMethods(out, pType, colName, colClass, colType, colIdentifier); break ukLoop; } } } } protected abstract void generateFetchOneByMethods(JavaWriter out, String pType, String colName, String colClass, String colType, String colIdentifier) ; protected abstract void generateFetchByMethods(JavaWriter out, String pType, String colName, String colClass, String colType, String colIdentifier) ; /** * Copied from JavaGenerator * @param key * @return */ protected String getKeyType(UniqueKeyDefinition key){ String tType; List<ColumnDefinition> keyColumns = key.getKeyColumns(); if (keyColumns.size() == 1) { tType = getJavaType(keyColumns.get(0).getType()); } else if (keyColumns.size() <= Constants.MAX_ROW_DEGREE) { String generics = ""; String separator = ""; for (ColumnDefinition column : keyColumns) { generics += separator + (getJavaType(column.getType())); separator = ", "; } tType = Record.class.getName() + keyColumns.size() + "<" + generics + ">"; } else { tType = Record.class.getName(); } return tType; } /** * Copied from JavaGenerator * @param column * @return */ private int colRefSegments(TypedElementDefinition<?> column) { if (column != null && column.getContainer() instanceof UDTDefinition) return 2; if (!getStrategy().getInstanceFields()) return 2; return 3; } }
/** * Licensed to Apereo under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Apereo licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at the following location: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.jasig.portal.portlet.session; import java.io.IOException; import java.io.ObjectInputStream; import java.io.Serializable; import java.util.Collection; import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import javax.portlet.PortletRequest; import javax.portlet.PortletSession; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.pluto.container.driver.PortletInvocationEvent; import org.apache.pluto.container.driver.PortletInvocationListener; import org.jasig.portal.url.IPortalRequestUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationListener; import org.springframework.security.web.session.HttpSessionDestroyedEvent; import org.springframework.stereotype.Service; import org.springframework.web.util.WebUtils; /** * After each request processed by a portlet the portlets session (if one exists) is stored in a Map in the Portal's * session. When a portal session is invalidated the {@link PortletSession#invalidate()} method is called on all portlet * sessions in the Map. * * TODO this may not play well with distributed sessions * * @author Eric Dalquist * @version $Revision$ */ @Service("portletSessionExpirationManager") public class PortletSessionExpirationManager implements PortletInvocationListener, ApplicationListener<HttpSessionDestroyedEvent> { public static final String PORTLET_SESSIONS_MAP = PortletSessionExpirationManager.class.getName() + ".PORTLET_SESSIONS"; /** * Session attribute that signals a session is already invalidating. */ private static final String ALREADY_INVALIDATING_SESSION_ATTRIBUTE = PortletSessionExpirationManager.class.getName() + ".ALREADY_INVALIDATING_SESSION_ATTRIBUTE"; protected final Log logger = LogFactory.getLog(this.getClass()); private IPortalRequestUtils portalRequestUtils; /** * @return the portalRequestUtils */ public IPortalRequestUtils getPortalRequestUtils() { return portalRequestUtils; } /** * @param portalRequestUtils the portalRequestUtils to set */ @Autowired public void setPortalRequestUtils(IPortalRequestUtils portalRequestUtils) { this.portalRequestUtils = portalRequestUtils; } /* (non-Javadoc) * @see org.apache.pluto.spi.optional.PortletInvocationListener#onEnd(org.apache.pluto.spi.optional.PortletInvocationEvent) */ @SuppressWarnings("unchecked") public void onEnd(PortletInvocationEvent event) { final PortletRequest portletRequest = event.getPortletRequest(); final PortletSession portletSession = portletRequest.getPortletSession(false); if (portletSession == null) { return; } final HttpServletRequest portalRequest = this.portalRequestUtils.getPortletHttpRequest(portletRequest); final HttpSession portalSession = portalRequest.getSession(); if (portalSession != null) { NonSerializableMapHolder<String, PortletSession> portletSessions; synchronized (WebUtils.getSessionMutex(portalSession)) { portletSessions = (NonSerializableMapHolder<String, PortletSession>)portalSession.getAttribute(PORTLET_SESSIONS_MAP); if (portletSessions == null || !portletSessions.isValid()) { portletSessions = new NonSerializableMapHolder(new ConcurrentHashMap<String, PortletSession>()); portalSession.setAttribute(PORTLET_SESSIONS_MAP, portletSessions); } } final String contextPath = portletRequest.getContextPath(); portletSessions.put(contextPath, portletSession); } } /* (non-Javadoc) * @see org.springframework.context.ApplicationListener#onApplicationEvent(org.springframework.context.ApplicationEvent) */ public void onApplicationEvent(HttpSessionDestroyedEvent event) { final HttpSession session = ((HttpSessionDestroyedEvent)event).getSession(); @SuppressWarnings("unchecked") final Map<String, PortletSession> portletSessions = (Map<String, PortletSession>)session.getAttribute(PORTLET_SESSIONS_MAP); if (portletSessions == null) { return; } /* * Since (at least) Tomcat 7.0.47, this method has the potential to * generate a StackOverflowError because PortletSession.invalidate() * will trigger another HttpSessionDestroyedEvent, which means this * method will be called again. I don't know if this behavior is a bug * in Tomcat or Spring, if this behavior is entirely proper, or if the * reality somewhere in between. * * For the present, let's put a token in the HttpSession (which is * available from the event object) as soon as we start invalidating it. * We'll then ignore sessions that already have this token. */ if (session.getAttribute(ALREADY_INVALIDATING_SESSION_ATTRIBUTE) != null) { // We're already invalidating; don't do it again return; } session.setAttribute(ALREADY_INVALIDATING_SESSION_ATTRIBUTE, Boolean.TRUE); for (final Map.Entry<String, PortletSession> portletSessionEntry: portletSessions.entrySet()) { final String contextPath = portletSessionEntry.getKey(); final PortletSession portletSession = portletSessionEntry.getValue(); try { portletSession.invalidate(); } catch (IllegalStateException e) { this.logger.info("PortletSession with id '" + portletSession.getId() + "' for context '" + contextPath + "' has already been invalidated."); } catch (Exception e) { this.logger.warn("Failed to invalidate PortletSession with id '" + portletSession.getId() + "' for context '" + contextPath + "'", e); } } } /* (non-Javadoc) * @see org.apache.pluto.spi.optional.PortletInvocationListener#onBegin(org.apache.pluto.spi.optional.PortletInvocationEvent) */ public void onBegin(PortletInvocationEvent event) { // Ignore } /* (non-Javadoc) * @see org.apache.pluto.spi.optional.PortletInvocationListener#onError(org.apache.pluto.spi.optional.PortletInvocationEvent, java.lang.Throwable) */ public void onError(PortletInvocationEvent event, Throwable t) { // Ignore } /** * Map implementation that holds the Map reference passed into the constructor in a transient field. This allows a * Map of non-serializable objects to be stored in the session but skipped during session persistence. */ private static final class NonSerializableMapHolder<K, V> implements Map<K, V>, Serializable { private static final long serialVersionUID = 1L; private transient Map<K, V> delegate; public NonSerializableMapHolder(Map<K, V> delegate) { this.delegate = delegate; } public boolean isValid() { return this.delegate != null; } public void clear() { delegate.clear(); } public boolean containsKey(Object key) { return delegate.containsKey(key); } public boolean containsValue(Object value) { return delegate.containsValue(value); } public Set<java.util.Map.Entry<K, V>> entrySet() { return delegate.entrySet(); } @Override public boolean equals(Object o) { return delegate.equals(o); } public V get(Object key) { return delegate.get(key); } @Override public int hashCode() { return delegate.hashCode(); } public boolean isEmpty() { return delegate.isEmpty(); } public Set<K> keySet() { return delegate.keySet(); } public V put(K key, V value) { return delegate.put(key, value); } public void putAll(Map<? extends K, ? extends V> t) { delegate.putAll(t); } public V remove(Object key) { return delegate.remove(key); } public int size() { return delegate.size(); } public Collection<V> values() { return delegate.values(); } @Override public String toString() { return delegate.toString(); } private void readObject(ObjectInputStream ois) throws IOException, ClassNotFoundException { this.delegate = new LinkedHashMap<K, V>(); } } }
package org.asciidoctor; import java.io.File; import java.io.OutputStream; import java.util.Map; public class OptionsBuilder { private Options options = new Options(); private OptionsBuilder() { super(); } /** * Creates options builder instance. * * @return options builder instance. */ public static OptionsBuilder options() { return new OptionsBuilder(); } /** * Sets backend option. * * @param backend * value. * @return this instance. */ public OptionsBuilder backend(String backend) { this.options.setBackend(backend); return this; } /** * Sets doctype option. * * @param docType * value. * @return this instance. */ public OptionsBuilder docType(String docType) { this.options.setDocType(docType); return this; } /** * Sets in place attribute. * * @param inPlace * value. * @return this instance. */ public OptionsBuilder inPlace(boolean inPlace) { this.options.setInPlace(inPlace); return this; } /** * Sets header footer attribute. * * @param headerFooter * value. * @return this instance. */ public OptionsBuilder headerFooter(boolean headerFooter) { this.options.setHeaderFooter(headerFooter); return this; } /** * Sets template directory. * * @param templateDir * directory where templates are stored. * @return this instance. */ public OptionsBuilder templateDir(File templateDir) { this.options.setTemplateDirs(templateDir.getAbsolutePath()); return this; } /** * Sets template directories. * * @param templateDirs * directories where templates are stored. * @return this instance. */ public OptionsBuilder templateDirs(File... templateDirs) { for (File templateDir : templateDirs) { this.options.setTemplateDirs(templateDir.getAbsolutePath()); } return this; } /** * Sets the template engine. * * @param templateEngine * used to render the document. * @return this instance. */ public OptionsBuilder templateEngine(String templateEngine) { this.options.setTemplateEngine(templateEngine); return this; } /** * Sets if Asciidoctor should use template cache or not. * * @param templateCache * true if template cache is required, false otherwise. * @return this instance. */ public OptionsBuilder templateCache(boolean templateCache) { this.options.setTemplateCache(templateCache); return this; } /** * Sets attributes used for rendering input. * * @param attributes * map. * @return this instance. */ public OptionsBuilder attributes(Map<String, Object> attributes) { this.options.setAttributes(attributes); return this; } /** * Sets attributes used for rendering input. * * @param attributes * map. * @return this instance. */ public OptionsBuilder attributes(Attributes attributes) { this.options.setAttributes(attributes.map()); return this; } /** * Sets attributes used for rendering input. * * @param attributes * builder. * @return this instance. */ public OptionsBuilder attributes(AttributesBuilder attributes) { this.options.setAttributes(attributes.asMap()); return this; } /** * Sets to file value. This toggles writing output to a file or returning output * as a string. If writing to a string, the header and footer are omitted from the * output by default. * * @param toFile * <code>true</code> to write output to a file, <code>false</code> * to write output to a string. * @return this instance. */ public OptionsBuilder toFile(boolean toFile) { this.options.setToFile(toFile); return this; } /** * Sets to file value. This is the destination file name. * * @param toFile * name of output file. * @return this instance. */ public OptionsBuilder toFile(File toFile) { this.options.setToFile(toFile.getPath()); return this; } public OptionsBuilder toStream(OutputStream toStream) { this.options.setToStream(toStream); return this; } /** * Sets to dir value. This is the destination directory. * * @param directory * where output is generated. * @return this instance. */ public OptionsBuilder toDir(File directory) { this.options.setToDir(directory.getAbsolutePath()); return this; } /** * Sets if asciidoctor should create output directory if it does not exist or not. * * @param mkDirs * true if directory must be created, false otherwise. * @return this instance. */ public OptionsBuilder mkDirs(boolean mkDirs) { this.options.setMkDirs(mkDirs); return this; } /** * Sets the safe mode. * * @param safeMode * to run asciidoctor. * @return this instance. */ public OptionsBuilder safe(SafeMode safeMode) { this.options.setSafe(safeMode); return this; } /** * Sets eruby implementation. * * @param eruby * implementation. * @return this instance. */ public OptionsBuilder eruby(String eruby) { this.options.setEruby(eruby); return this; } /** * Compact the output removing blank lines. * * @param compact * value. * @return this instance. */ public OptionsBuilder compact(boolean compact) { this.options.setCompact(compact); return this; } /** * Sets parse header only falg. * * @param parseHeaderOnly * value. * @return this instance. */ public OptionsBuilder parseHeaderOnly(boolean parseHeaderOnly) { this.options.setParseHeaderOnly(parseHeaderOnly); return this; } /** * Destination output directory. * * @param destinationDir * destination directory. * @return this instance. */ public OptionsBuilder destinationDir(File destinationDir) { this.options.setDestinationDir(destinationDir.getAbsolutePath()); return this; } /** * Sets a custom or unlisted option. * * @param option * name. * @param value * for given option. * @return this instance. */ public OptionsBuilder option(String option, Object value) { this.options.setOption(option, value); return this; } /** * Sets base dir for working directory. * * @param baseDir * working directory. * @return this instance. */ public OptionsBuilder baseDir(File baseDir) { this.options.setBaseDir(baseDir.getAbsolutePath()); return this; } /** * Gets a map with configured options. * * @return map with all options. By default an empty map is returned. */ public Map<String, Object> asMap() { return this.options.map(); } public Options get() { return this.options; } }
/* * Copyright 2005 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.modelcompiler; import java.util.ArrayList; import java.util.Collection; import java.util.List; import org.assertj.core.api.Assertions; import org.drools.modelcompiler.domain.Address; import org.drools.modelcompiler.domain.Person; import org.drools.modelcompiler.domain.Result; import org.junit.Test; import org.kie.api.runtime.KieSession; import static org.junit.Assert.assertEquals; public class NullSafeDereferencingTest extends BaseModelTest { public NullSafeDereferencingTest( RUN_TYPE testRunType ) { super( testRunType ); } @Test public void testNullSafeDereferncing() { String str = "import " + Result.class.getCanonicalName() + ";" + "import " + Person.class.getCanonicalName() + ";" + "rule R when\n" + " $r : Result()\n" + " $p : Person( name!.length == 4 )\n" + "then\n" + " $r.setValue(\"Found: \" + $p);\n" + "end"; KieSession ksession = getKieSession( str ); Result result = new Result(); ksession.insert( result ); ksession.insert( new Person( "Mark", 37 ) ); ksession.insert( new Person( "Mario", 40 ) ); ksession.insert( new Person( null, 40 ) ); ksession.fireAllRules(); assertEquals( "Found: Mark", result.getValue() ); } public static class NullUnsafeA { private NullUnsafeB someB; public NullUnsafeB getSomeB() { return someB; } public void setSomeB(NullUnsafeB someB) { this.someB = someB; } } public static class NullUnsafeB { private NullUnsafeC someC; public NullUnsafeC getSomeC() { return someC; } public void setSomeC(NullUnsafeC someC) { this.someC = someC; } } public static class NullUnsafeC { private NullUnsafeD someD; public NullUnsafeD getSomeD() { return someD; } public void setSomeD(NullUnsafeD someD) { this.someD = someD; } } public static class NullUnsafeD { private String something; public String getSomething() { return something; } public void setSomething(String something) { this.something = something; } } @Test public void testNullSafeMultiple() { String str = "import " + NullUnsafeA.class.getCanonicalName() + ";" + "import " + NullUnsafeB.class.getCanonicalName() + ";" + "import " + NullUnsafeD.class.getCanonicalName() + ";" + "rule R when\n" + " $a : NullUnsafeA( someB!.someC!.someD!.something == \"Hello\" )\n" + "then\n" + " insert(\"matched\");\n" + "end"; for (int i = 0; i <= 4; i++) { KieSession ksession = getKieSession(str); NullUnsafeA a = new NullUnsafeA(); NullUnsafeB b = new NullUnsafeB(); NullUnsafeC x = new NullUnsafeC(); NullUnsafeD c = new NullUnsafeD(); // trap #0 if (i != 0) { c.setSomething("Hello"); } // trap #1 if (i != 1) { b.setSomeC(x); } // trap #2 if (i != 2) { x.setSomeD(c); } // trap #3 if (i != 3) { a.setSomeB(b); } ksession.insert(a); ksession.fireAllRules(); Collection<String> results = getObjectsIntoList(ksession, String.class); if (i < 4) { assertEquals(0, results.size()); } else if (i == 4) { // iteration #3 has no null-traps assertEquals(1, results.size()); } } } @Test public void testNullSafeDereferncingOnFieldWithMethodInvocation() { String str = "import " + Result.class.getCanonicalName() + ";" + "import " + Person.class.getCanonicalName() + ";" + "rule R when\n" + " $p : Person( address!.city.startsWith(\"M\") )\n" + "then\n" + " Result r = new Result($p.getName());" + " insert(r);\n" + "end"; KieSession ksession = getKieSession(str); ksession.insert(new Person("John1", 41, (Address) null)); ksession.insert(new Person("John2", 42, new Address("Milan"))); ksession.fireAllRules(); List<Result> results = getObjectsIntoList(ksession, Result.class); assertEquals(1, results.size()); assertEquals("John2", results.get(0).getValue()); } @Test public void testNullSafeDereferncingOnMethodInvocation() { String str = "import " + Result.class.getCanonicalName() + ";" + "import " + Person.class.getCanonicalName() + ";" + "rule R when\n" + " $p : Person( address.city!.startsWith(\"M\") )\n" + "then\n" + " Result r = new Result($p.getName());" + " insert(r);\n" + "end"; KieSession ksession = getKieSession(str); ksession.insert(new Person("John1", 41, new Address(null))); ksession.insert(new Person("John2", 42, new Address("Milan"))); ksession.fireAllRules(); List<Result> results = getObjectsIntoList(ksession, Result.class); assertEquals(1, results.size()); assertEquals("John2", results.get(0).getValue()); } @Test public void testNullSafeDereferncingOnFirstField() { String str = "import " + Result.class.getCanonicalName() + ";" + "import " + Person.class.getCanonicalName() + ";" + "rule R when\n" + " $p : Person( address!.city.length == 5 )\n" + "then\n" + " Result r = new Result($p.getName());" + " insert(r);\n" + "end"; KieSession ksession = getKieSession(str); ksession.insert(new Person("John1", 41, (Address) null)); ksession.insert(new Person("John2", 42, new Address("Milan"))); ksession.fireAllRules(); List<Result> results = getObjectsIntoList(ksession, Result.class); assertEquals(1, results.size()); assertEquals("John2", results.get(0).getValue()); } @Test public void testNullSafeDereferncingOnSecondField() { String str = "import " + Result.class.getCanonicalName() + ";" + "import " + Person.class.getCanonicalName() + ";" + "rule R when\n" + " $p : Person( address.city!.length == 5 )\n" + "then\n" + " Result r = new Result($p.getName());" + " insert(r);\n" + "end"; KieSession ksession = getKieSession(str); ksession.insert(new Person("John1", 41, new Address(null))); ksession.insert(new Person("John2", 42, new Address("Milan"))); ksession.fireAllRules(); List<Result> results = getObjectsIntoList(ksession, Result.class); assertEquals(1, results.size()); assertEquals("John2", results.get(0).getValue()); } @Test public void testNullSafeDereferncingWithOrHalfBinary() { String str = "import " + Person.class.getCanonicalName() + ";\n" + "global java.util.List result;\n" + "rule R\n" + "when\n" + " $p : Person( name == \"John\" || == address!.city )\n" + "then\n" + " result.add($p.getName());\n" + "end"; KieSession ksession = getKieSession(str); List<String> result = new ArrayList<>(); ksession.setGlobal("result", result); ksession.insert(new Person("John", 24, new Address("ABC"))); ksession.insert(new Person("Paul", 22, (Address) null)); ksession.insert(new Person("George", 21, new Address("George"))); ksession.fireAllRules(); Assertions.assertThat(result).containsExactlyInAnyOrder("John", "George"); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.transaction; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import io.airlift.concurrent.BoundedExecutor; import io.airlift.log.Logger; import io.airlift.units.Duration; import io.trino.NotInTransactionException; import io.trino.connector.CatalogName; import io.trino.metadata.Catalog; import io.trino.metadata.Catalog.SecurityManagement; import io.trino.metadata.CatalogManager; import io.trino.metadata.CatalogMetadata; import io.trino.spi.TrinoException; import io.trino.spi.connector.Connector; import io.trino.spi.connector.ConnectorMetadata; import io.trino.spi.connector.ConnectorTransactionHandle; import io.trino.spi.transaction.IsolationLevel; import org.joda.time.DateTime; import javax.annotation.concurrent.GuardedBy; import javax.annotation.concurrent.ThreadSafe; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Executor; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Supplier; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Verify.verifyNotNull; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.util.concurrent.Futures.immediateFailedFuture; import static com.google.common.util.concurrent.Futures.immediateFuture; import static com.google.common.util.concurrent.Futures.immediateVoidFuture; import static com.google.common.util.concurrent.Futures.nonCancellationPropagating; import static com.google.common.util.concurrent.MoreExecutors.directExecutor; import static io.airlift.concurrent.MoreFutures.addExceptionCallback; import static io.trino.spi.StandardErrorCode.AUTOCOMMIT_WRITE_CONFLICT; import static io.trino.spi.StandardErrorCode.MULTI_CATALOG_WRITE_CONFLICT; import static io.trino.spi.StandardErrorCode.NOT_FOUND; import static io.trino.spi.StandardErrorCode.READ_ONLY_VIOLATION; import static io.trino.spi.StandardErrorCode.TRANSACTION_ALREADY_ABORTED; import static java.util.Objects.requireNonNull; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.stream.Collectors.toList; @ThreadSafe public class InMemoryTransactionManager implements TransactionManager { private static final Logger log = Logger.get(InMemoryTransactionManager.class); private final Duration idleTimeout; private final int maxFinishingConcurrency; private final ConcurrentMap<TransactionId, TransactionMetadata> transactions = new ConcurrentHashMap<>(); private final CatalogManager catalogManager; private final Executor finishingExecutor; private InMemoryTransactionManager(Duration idleTimeout, int maxFinishingConcurrency, CatalogManager catalogManager, Executor finishingExecutor) { this.catalogManager = catalogManager; requireNonNull(idleTimeout, "idleTimeout is null"); checkArgument(maxFinishingConcurrency > 0, "maxFinishingConcurrency must be at least 1"); requireNonNull(finishingExecutor, "finishingExecutor is null"); this.idleTimeout = idleTimeout; this.maxFinishingConcurrency = maxFinishingConcurrency; this.finishingExecutor = finishingExecutor; } public static TransactionManager create( TransactionManagerConfig config, ScheduledExecutorService idleCheckExecutor, CatalogManager catalogManager, Executor finishingExecutor) { InMemoryTransactionManager transactionManager = new InMemoryTransactionManager(config.getIdleTimeout(), config.getMaxFinishingConcurrency(), catalogManager, finishingExecutor); transactionManager.scheduleIdleChecks(config.getIdleCheckInterval(), idleCheckExecutor); return transactionManager; } public static TransactionManager createTestTransactionManager() { return createTestTransactionManager(new CatalogManager()); } public static TransactionManager createTestTransactionManager(CatalogManager catalogManager) { // No idle checks needed return new InMemoryTransactionManager(new Duration(1, TimeUnit.DAYS), 1, catalogManager, directExecutor()); } private void scheduleIdleChecks(Duration idleCheckInterval, ScheduledExecutorService idleCheckExecutor) { idleCheckExecutor.scheduleWithFixedDelay(() -> { try { cleanUpExpiredTransactions(); } catch (Throwable t) { log.error(t, "Unexpected exception while cleaning up expired transactions"); } }, idleCheckInterval.toMillis(), idleCheckInterval.toMillis(), MILLISECONDS); } private synchronized void cleanUpExpiredTransactions() { Iterator<Entry<TransactionId, TransactionMetadata>> iterator = transactions.entrySet().iterator(); while (iterator.hasNext()) { Entry<TransactionId, TransactionMetadata> entry = iterator.next(); if (entry.getValue().isExpired(idleTimeout)) { iterator.remove(); log.info("Removing expired transaction: %s", entry.getKey()); entry.getValue().asyncAbort(); } } } @Override public boolean transactionExists(TransactionId transactionId) { return tryGetTransactionMetadata(transactionId).isPresent(); } @Override public TransactionInfo getTransactionInfo(TransactionId transactionId) { return getTransactionMetadata(transactionId).getTransactionInfo(); } @Override public List<TransactionInfo> getAllTransactionInfos() { return transactions.values().stream() .map(TransactionMetadata::getTransactionInfo) .collect(toImmutableList()); } @Override public TransactionId beginTransaction(boolean autoCommitContext) { return beginTransaction(DEFAULT_ISOLATION, DEFAULT_READ_ONLY, autoCommitContext); } @Override public TransactionId beginTransaction(IsolationLevel isolationLevel, boolean readOnly, boolean autoCommitContext) { TransactionId transactionId = TransactionId.create(); BoundedExecutor executor = new BoundedExecutor(finishingExecutor, maxFinishingConcurrency); TransactionMetadata transactionMetadata = new TransactionMetadata(transactionId, isolationLevel, readOnly, autoCommitContext, catalogManager, executor); checkState(transactions.put(transactionId, transactionMetadata) == null, "Duplicate transaction ID: %s", transactionId); return transactionId; } @Override public Map<String, Catalog> getCatalogs(TransactionId transactionId) { return getTransactionMetadata(transactionId).getCatalogs(); } @Override public Optional<CatalogMetadata> getOptionalCatalogMetadata(TransactionId transactionId, String catalogName) { TransactionMetadata transactionMetadata = getTransactionMetadata(transactionId); return transactionMetadata.getConnectorId(catalogName) .map(transactionMetadata::getTransactionCatalogMetadata); } @Override public CatalogMetadata getCatalogMetadata(TransactionId transactionId, CatalogName catalogName) { return getTransactionMetadata(transactionId).getTransactionCatalogMetadata(catalogName); } @Override public CatalogMetadata getCatalogMetadataForWrite(TransactionId transactionId, CatalogName catalogName) { CatalogMetadata catalogMetadata = getCatalogMetadata(transactionId, catalogName); checkConnectorWrite(transactionId, catalogName); return catalogMetadata; } @Override public CatalogMetadata getCatalogMetadataForWrite(TransactionId transactionId, String catalogName) { TransactionMetadata transactionMetadata = getTransactionMetadata(transactionId); // there is no need to ask for a connector specific id since the overlay connectors are read only CatalogName catalog = transactionMetadata.getConnectorId(catalogName) .orElseThrow(() -> new TrinoException(NOT_FOUND, "Catalog does not exist: " + catalogName)); return getCatalogMetadataForWrite(transactionId, catalog); } @Override public ConnectorTransactionHandle getConnectorTransaction(TransactionId transactionId, CatalogName catalogName) { return getCatalogMetadata(transactionId, catalogName).getTransactionHandleFor(catalogName); } private void checkConnectorWrite(TransactionId transactionId, CatalogName catalogName) { getTransactionMetadata(transactionId).checkConnectorWrite(catalogName); } @Override public void checkAndSetActive(TransactionId transactionId) { TransactionMetadata metadata = getTransactionMetadata(transactionId); metadata.checkOpenTransaction(); metadata.setActive(); } @Override public void trySetActive(TransactionId transactionId) { tryGetTransactionMetadata(transactionId).ifPresent(TransactionMetadata::setActive); } @Override public void trySetInactive(TransactionId transactionId) { tryGetTransactionMetadata(transactionId).ifPresent(TransactionMetadata::setInactive); } private TransactionMetadata getTransactionMetadata(TransactionId transactionId) { TransactionMetadata transactionMetadata = transactions.get(transactionId); if (transactionMetadata == null) { throw new NotInTransactionException(transactionId); } return transactionMetadata; } private Optional<TransactionMetadata> tryGetTransactionMetadata(TransactionId transactionId) { return Optional.ofNullable(transactions.get(transactionId)); } private ListenableFuture<TransactionMetadata> removeTransactionMetadataAsFuture(TransactionId transactionId) { TransactionMetadata transactionMetadata = transactions.remove(transactionId); if (transactionMetadata == null) { return immediateFailedFuture(new NotInTransactionException(transactionId)); } return immediateFuture(transactionMetadata); } @Override public ListenableFuture<Void> asyncCommit(TransactionId transactionId) { return nonCancellationPropagating(Futures.transformAsync(removeTransactionMetadataAsFuture(transactionId), TransactionMetadata::asyncCommit, directExecutor())); } @Override public ListenableFuture<Void> asyncAbort(TransactionId transactionId) { return nonCancellationPropagating(Futures.transformAsync(removeTransactionMetadataAsFuture(transactionId), TransactionMetadata::asyncAbort, directExecutor())); } @Override public void fail(TransactionId transactionId) { // Mark transaction as failed, but don't remove it. tryGetTransactionMetadata(transactionId).ifPresent(TransactionMetadata::asyncAbort); } private static <T> ListenableFuture<Void> asVoid(ListenableFuture<T> future) { return Futures.transform(future, v -> null, directExecutor()); } @ThreadSafe private static class TransactionMetadata { private final DateTime createTime = DateTime.now(); private final CatalogManager catalogManager; private final TransactionId transactionId; private final IsolationLevel isolationLevel; private final boolean readOnly; private final boolean autoCommitContext; @GuardedBy("this") private final Map<CatalogName, ConnectorTransactionMetadata> connectorIdToMetadata = new ConcurrentHashMap<>(); @GuardedBy("this") private final AtomicReference<CatalogName> writtenConnectorId = new AtomicReference<>(); private final Executor finishingExecutor; private final AtomicReference<Boolean> completedSuccessfully = new AtomicReference<>(); private final AtomicReference<Long> idleStartTime = new AtomicReference<>(); @GuardedBy("this") private final Map<String, Optional<Catalog>> catalogByName = new ConcurrentHashMap<>(); @GuardedBy("this") private final Map<CatalogName, Catalog> catalogsByName = new ConcurrentHashMap<>(); @GuardedBy("this") private final Map<CatalogName, CatalogMetadata> catalogMetadata = new ConcurrentHashMap<>(); public TransactionMetadata( TransactionId transactionId, IsolationLevel isolationLevel, boolean readOnly, boolean autoCommitContext, CatalogManager catalogManager, Executor finishingExecutor) { this.transactionId = requireNonNull(transactionId, "transactionId is null"); this.isolationLevel = requireNonNull(isolationLevel, "isolationLevel is null"); this.readOnly = readOnly; this.autoCommitContext = autoCommitContext; this.catalogManager = requireNonNull(catalogManager, "catalogManager is null"); this.finishingExecutor = requireNonNull(finishingExecutor, "finishingExecutor is null"); } public void setActive() { idleStartTime.set(null); } public void setInactive() { idleStartTime.set(System.nanoTime()); } public boolean isExpired(Duration idleTimeout) { Long idleStartTime = this.idleStartTime.get(); return idleStartTime != null && Duration.nanosSince(idleStartTime).compareTo(idleTimeout) > 0; } public void checkOpenTransaction() { Boolean completedStatus = this.completedSuccessfully.get(); if (completedStatus != null) { if (completedStatus) { // Should not happen normally throw new IllegalStateException("Current transaction already committed"); } else { throw new TrinoException(TRANSACTION_ALREADY_ABORTED, "Current transaction is aborted, commands ignored until end of transaction block"); } } } private synchronized Map<String, Catalog> getCatalogs() { // todo if repeatable read, this must be recorded Map<String, Catalog> catalogs = new HashMap<>(); catalogByName.values().stream() .filter(Optional::isPresent) .map(Optional::get) .forEach(catalog -> catalogs.put(catalog.getCatalogName(), catalog)); catalogManager.getCatalogs().stream() .forEach(catalog -> catalogs.putIfAbsent(catalog.getCatalogName(), catalog)); return ImmutableMap.copyOf(catalogs); } private synchronized Optional<CatalogName> getConnectorId(String catalogName) { Optional<Catalog> catalog = catalogByName.get(catalogName); if (catalog == null) { catalog = catalogManager.getCatalog(catalogName); catalogByName.put(catalogName, catalog); if (catalog.isPresent()) { registerCatalog(catalog.get()); } } return catalog.map(Catalog::getConnectorCatalogName); } private synchronized void registerCatalog(Catalog catalog) { catalogsByName.put(catalog.getConnectorCatalogName(), catalog); catalogsByName.put(catalog.getInformationSchemaId(), catalog); catalogsByName.put(catalog.getSystemTablesId(), catalog); } private synchronized CatalogMetadata getTransactionCatalogMetadata(CatalogName catalogName) { checkOpenTransaction(); CatalogMetadata catalogMetadata = this.catalogMetadata.get(catalogName); if (catalogMetadata == null) { Catalog catalog = catalogsByName.get(catalogName); verifyNotNull(catalog, "Unknown catalog: %s", catalogName); Connector connector = catalog.getConnector(catalogName); ConnectorTransactionMetadata metadata = createConnectorTransactionMetadata(catalog.getConnectorCatalogName(), catalog); ConnectorTransactionMetadata informationSchema = createConnectorTransactionMetadata(catalog.getInformationSchemaId(), catalog); ConnectorTransactionMetadata systemTables = createConnectorTransactionMetadata(catalog.getSystemTablesId(), catalog); catalogMetadata = new CatalogMetadata( metadata.getCatalogName(), metadata.getConnectorMetadata(), metadata.getTransactionHandle(), informationSchema.getCatalogName(), informationSchema.getConnectorMetadata(), informationSchema.getTransactionHandle(), systemTables.getCatalogName(), systemTables.getConnectorMetadata(), systemTables.getTransactionHandle(), metadata.getSecurityManagement(), connector.getCapabilities()); this.catalogMetadata.put(catalog.getConnectorCatalogName(), catalogMetadata); this.catalogMetadata.put(catalog.getInformationSchemaId(), catalogMetadata); this.catalogMetadata.put(catalog.getSystemTablesId(), catalogMetadata); } return catalogMetadata; } public synchronized ConnectorTransactionMetadata createConnectorTransactionMetadata(CatalogName catalogName, Catalog catalog) { Connector connector = catalog.getConnector(catalogName); ConnectorTransactionMetadata transactionMetadata = new ConnectorTransactionMetadata(catalogName, connector, beginTransaction(connector), catalog.getSecurityManagement()); checkState(connectorIdToMetadata.put(catalogName, transactionMetadata) == null); return transactionMetadata; } private ConnectorTransactionHandle beginTransaction(Connector connector) { if (connector instanceof InternalConnector) { return ((InternalConnector) connector).beginTransaction(transactionId, isolationLevel, readOnly); } return connector.beginTransaction(isolationLevel, readOnly, autoCommitContext); } public synchronized void checkConnectorWrite(CatalogName catalogName) { checkOpenTransaction(); ConnectorTransactionMetadata transactionMetadata = connectorIdToMetadata.get(catalogName); checkArgument(transactionMetadata != null, "Cannot record write for connector not part of transaction"); if (readOnly) { throw new TrinoException(READ_ONLY_VIOLATION, "Cannot execute write in a read-only transaction"); } if (!writtenConnectorId.compareAndSet(null, catalogName) && !writtenConnectorId.get().equals(catalogName)) { throw new TrinoException(MULTI_CATALOG_WRITE_CONFLICT, "Multi-catalog writes not supported in a single transaction. Already wrote to catalog " + writtenConnectorId.get()); } if (transactionMetadata.isSingleStatementWritesOnly() && !autoCommitContext) { throw new TrinoException(AUTOCOMMIT_WRITE_CONFLICT, "Catalog only supports writes using autocommit: " + catalogName); } } public synchronized ListenableFuture<Void> asyncCommit() { if (!completedSuccessfully.compareAndSet(null, true)) { if (completedSuccessfully.get()) { // Already done return immediateVoidFuture(); } // Transaction already aborted return immediateFailedFuture(new TrinoException(TRANSACTION_ALREADY_ABORTED, "Current transaction has already been aborted")); } CatalogName writeCatalogName = this.writtenConnectorId.get(); if (writeCatalogName == null) { ListenableFuture<Void> future = asVoid(Futures.allAsList(connectorIdToMetadata.values().stream() .map(transactionMetadata -> Futures.submit(transactionMetadata::commit, finishingExecutor)) .collect(toList()))); addExceptionCallback(future, throwable -> { abortInternal(); log.error(throwable, "Read-only connector should not throw exception on commit"); }); return nonCancellationPropagating(future); } Supplier<ListenableFuture<Void>> commitReadOnlyConnectors = () -> { List<ListenableFuture<Void>> futures = connectorIdToMetadata.entrySet().stream() .filter(entry -> !entry.getKey().equals(writeCatalogName)) .map(Entry::getValue) .map(transactionMetadata -> Futures.submit(transactionMetadata::commit, finishingExecutor)) .collect(toList()); ListenableFuture<Void> future = asVoid(Futures.allAsList(futures)); addExceptionCallback(future, throwable -> log.error(throwable, "Read-only connector should not throw exception on commit")); return future; }; ConnectorTransactionMetadata writeConnector = connectorIdToMetadata.get(writeCatalogName); ListenableFuture<Void> commitFuture = Futures.submit(writeConnector::commit, finishingExecutor); ListenableFuture<Void> readOnlyCommitFuture = Futures.transformAsync(commitFuture, ignored -> commitReadOnlyConnectors.get(), directExecutor()); addExceptionCallback(readOnlyCommitFuture, this::abortInternal); return nonCancellationPropagating(readOnlyCommitFuture); } public synchronized ListenableFuture<Void> asyncAbort() { if (!completedSuccessfully.compareAndSet(null, false)) { if (completedSuccessfully.get()) { // Should not happen normally return immediateFailedFuture(new IllegalStateException("Current transaction already committed")); } // Already done return immediateVoidFuture(); } return abortInternal(); } private synchronized ListenableFuture<Void> abortInternal() { // the callbacks in statement performed on another thread so are safe List<ListenableFuture<Void>> futures = connectorIdToMetadata.values().stream() .map(connection -> Futures.submit(() -> safeAbort(connection), finishingExecutor)) .collect(toList()); ListenableFuture<Void> future = asVoid(Futures.allAsList(futures)); return nonCancellationPropagating(future); } private static void safeAbort(ConnectorTransactionMetadata connection) { try { connection.abort(); } catch (Exception e) { log.error(e, "Connector threw exception on abort"); } } public TransactionInfo getTransactionInfo() { Duration idleTime = Optional.ofNullable(idleStartTime.get()) .map(Duration::nanosSince) .orElse(new Duration(0, MILLISECONDS)); // dereferencing this field is safe because the field is atomic @SuppressWarnings("FieldAccessNotGuarded") Optional<CatalogName> writtenConnectorId = Optional.ofNullable(this.writtenConnectorId.get()); // copying the key set is safe here because the map is concurrent @SuppressWarnings("FieldAccessNotGuarded") List<CatalogName> catalogNames = ImmutableList.copyOf(connectorIdToMetadata.keySet()); return new TransactionInfo(transactionId, isolationLevel, readOnly, autoCommitContext, createTime, idleTime, catalogNames, writtenConnectorId); } private static class ConnectorTransactionMetadata { private final CatalogName catalogName; private final Connector connector; private final ConnectorTransactionHandle transactionHandle; private final SecurityManagement securityManagement; private final ConnectorMetadata connectorMetadata; private final AtomicBoolean finished = new AtomicBoolean(); public ConnectorTransactionMetadata( CatalogName catalogName, Connector connector, ConnectorTransactionHandle transactionHandle, SecurityManagement securityManagement) { this.catalogName = requireNonNull(catalogName, "catalogName is null"); this.connector = requireNonNull(connector, "connector is null"); this.transactionHandle = requireNonNull(transactionHandle, "transactionHandle is null"); this.securityManagement = requireNonNull(securityManagement, "securityManagement is null"); this.connectorMetadata = connector.getMetadata(transactionHandle); } public CatalogName getCatalogName() { return catalogName; } public boolean isSingleStatementWritesOnly() { return connector.isSingleStatementWritesOnly(); } public SecurityManagement getSecurityManagement() { return securityManagement; } public synchronized ConnectorMetadata getConnectorMetadata() { checkState(!finished.get(), "Already finished"); return connectorMetadata; } public ConnectorTransactionHandle getTransactionHandle() { checkState(!finished.get(), "Already finished"); return transactionHandle; } public void commit() { if (finished.compareAndSet(false, true)) { connector.commit(transactionHandle); } } public void abort() { if (finished.compareAndSet(false, true)) { connector.rollback(transactionHandle); } } } } }
/** * Copyright (C) 2014 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.dashbuilder.displayer.client.widgets; import javax.enterprise.context.Dependent; import javax.inject.Inject; import com.github.gwtbootstrap.client.ui.CheckBox; import com.github.gwtbootstrap.client.ui.Tab; import com.github.gwtbootstrap.client.ui.TabPanel; import com.github.gwtbootstrap.client.ui.constants.VisibilityChange; import com.google.gwt.core.client.GWT; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.uibinder.client.UiBinder; import com.google.gwt.uibinder.client.UiField; import com.google.gwt.uibinder.client.UiHandler; import com.google.gwt.user.client.Window; import com.google.gwt.user.client.ui.Composite; import com.google.gwt.user.client.ui.Panel; import com.google.gwt.user.client.ui.Widget; import org.dashbuilder.common.client.error.ClientRuntimeError; import org.dashbuilder.dataset.DataSetLookup; import org.dashbuilder.dataset.DataSetLookupConstraints; import org.dashbuilder.dataset.DataSetMetadata; import org.dashbuilder.displayer.DisplayerSettings; import org.dashbuilder.displayer.DisplayerType; import org.dashbuilder.displayer.client.AbstractDisplayerListener; import org.dashbuilder.displayer.client.Displayer; import org.dashbuilder.displayer.client.DisplayerHelper; import org.dashbuilder.displayer.client.DisplayerListener; import org.dashbuilder.displayer.client.DisplayerLocator; import org.dashbuilder.displayer.client.resources.i18n.CommonConstants; @Dependent public class DisplayerEditorView extends Composite implements DisplayerEditor.View { interface Binder extends UiBinder<Widget, DisplayerEditorView> {} private static Binder uiBinder = GWT.create(Binder.class); @Inject public DisplayerEditorView(DisplayerTypeSelector typeSelector, DataSetLookupEditor lookupEditor, DisplayerSettingsEditor settingsEditor) { this.typeSelector = typeSelector; this.lookupEditor = lookupEditor; this.settingsEditor = settingsEditor; initWidget(uiBinder.createAndBindUi(this)); dataTablePanel.getElement().setAttribute("cellpadding", "5"); } protected DisplayerEditor presenter; protected DisplayerSettings settings; protected DisplayerTypeSelector typeSelector; protected DataSetLookupEditor lookupEditor; protected DisplayerSettingsEditor settingsEditor; protected Displayer displayer; protected DisplayerError errorWidget = new DisplayerError(); DisplayerListener displayerListener = new AbstractDisplayerListener() { public void onError(Displayer displayer, ClientRuntimeError error) { error(error); } }; @UiField public Panel leftPanel; @UiField public Panel centerPanel; @UiField public TabPanel optionsPanel; @UiField public Tab optionType; @UiField public Tab optionData; @UiField public Tab optionSettings; @UiField public Panel dataTablePanel; @UiField public CheckBox viewAsTable; @Override public void init(DisplayerSettings settings, DisplayerEditor presenter) { this.settings = settings; this.presenter = presenter; showDisplayer(); gotoLastTab(); } @Override public void disableTypeSelection() { optionType.addStyle(VisibilityChange.HIDE); } public void gotoLastTab() { int selectedTab = optionsPanel.getSelectedTab(); int lastTab = DisplayerEditorStatus.get().getSelectedTab(settings.getUUID()); if (selectedTab < 0 || selectedTab != lastTab) { switch (lastTab) { case 2: gotoDisplaySettings(); break; case 1: gotoDataSetConf(); break; default: gotoTypeSelection(); break; } } } private void saveLastTab(int tab) { DisplayerEditorStatus.get().saveSelectedTab(settings.getUUID(), tab); } @Override public void gotoTypeSelection() { optionsPanel.selectTab(0); saveLastTab(0); typeSelector.init(presenter); typeSelector.select(settings.getRenderer(), settings.getType(), settings.getSubtype()); leftPanel.clear(); leftPanel.add(typeSelector); dataTablePanel.setVisible(false); showDisplayer(); } @Override public void gotoDataSetConf() { optionsPanel.selectTab(1); saveLastTab(1); if (settings.getDataSet() == null && settings.getDataSetLookup() != null) { // Fetch before initializing the editor presenter.fetchDataSetLookup(); } else { // Just init the lookup editor lookupEditor.init(presenter); } leftPanel.clear(); leftPanel.add(lookupEditor); if (DisplayerType.TABLE.equals(settings.getType())) { dataTablePanel.setVisible(false); } else { dataTablePanel.setVisible(true); } showDisplayer(); } @Override public void showTypeChangedWarning(DisplayerSettings oldSettings, DisplayerSettings newSettings) { if (Window.confirm(CommonConstants.INSTANCE.displayer_editor_incompatible_settings())) { presenter.changeSettings(oldSettings, newSettings); } else { typeSelector.select(oldSettings.getRenderer(), oldSettings.getType(), oldSettings.getSubtype()); } } @Override public void updateDataSetLookup(DataSetLookupConstraints constraints, DataSetMetadata metadata) { DataSetLookup dataSetLookup = settings.getDataSetLookup(); lookupEditor.init(presenter, dataSetLookup, constraints, metadata); showDisplayer(); } @Override public void gotoDisplaySettings() { optionsPanel.selectTab(2); saveLastTab(2); optionSettings.setActive(true); settingsEditor.init(settings, presenter); leftPanel.clear(); leftPanel.add(settingsEditor); dataTablePanel.setVisible(false); showDisplayer(); } @Override public void error(String error) { centerPanel.clear(); centerPanel.add(errorWidget); errorWidget.show(error, null); GWT.log(error); } @Override public void error(ClientRuntimeError e) { centerPanel.clear(); centerPanel.add(errorWidget); errorWidget.show(e.getMessage(), e.getCause()); if (e.getThrowable() != null) GWT.log(e.getMessage(), e.getThrowable()); else GWT.log(e.getMessage()); } @Override public void close() { if (displayer != null) { displayer.close(); } } public void showDisplayer() { if (displayer != null) { displayer.close(); } try { if (dataTablePanel.isVisible() && viewAsTable.getValue()) { DisplayerSettings tableSettings = settings.cloneInstance(); tableSettings.setTitleVisible(false); tableSettings.setType(DisplayerType.TABLE); tableSettings.setTablePageSize(8); tableSettings.setTableWidth(-1); displayer = DisplayerLocator.get().lookupDisplayer(tableSettings); displayer.addListener(displayerListener); displayer.setRefreshOn(false); centerPanel.clear(); centerPanel.add(displayer); displayer.draw(); } else { displayer = DisplayerLocator.get().lookupDisplayer(settings); displayer.addListener(displayerListener); displayer.setRefreshOn(false); centerPanel.clear(); centerPanel.add(displayer); displayer.draw(); } } catch (Exception e) { error(new ClientRuntimeError(e)); } } @UiHandler(value = "optionType") public void onTypeSelected(ClickEvent clickEvent) { gotoTypeSelection(); } @UiHandler(value = "optionData") public void onDataSelected(ClickEvent clickEvent) { gotoDataSetConf(); } @UiHandler(value = "optionSettings") public void onSettingsSelected(ClickEvent clickEvent) { gotoDisplaySettings(); } @UiHandler(value = "viewAsTable") public void onRawTableChecked(ClickEvent clickEvent) { showDisplayer(); } }
package net.lecousin.framework.core.test.io.bit; import java.io.EOFException; import java.util.ArrayList; import java.util.Collection; import net.lecousin.framework.concurrent.threads.Task.Priority; import net.lecousin.framework.core.test.LCCoreAbstractTest; import net.lecousin.framework.io.IO; import net.lecousin.framework.io.bit.BitIO; import net.lecousin.framework.io.buffering.ByteArrayIO; import org.junit.Assert; import org.junit.Test; import org.junit.runners.Parameterized.Parameters; public abstract class TestBitIOReadable extends LCCoreAbstractTest { @Parameters public static Collection<Object[]> parameters() { ArrayList<Object[]> list = new ArrayList<>(1); list.add(new Object[] { new byte[] { (byte)0x56, (byte)0xF1, (byte)0x00, (byte)0x04, (byte)0xD7 }, new boolean[] { false, true, true, false, true, false, true, false, true, false, false, false, true, true, true, true, false, false, false, false, false, false, false, false, false, false, true, false, false, false, false, false, true, true, true, false, true, false, true, true } }); return list; } public TestBitIOReadable(byte[] bytes, boolean[] bits) { this.bytes = bytes; this.bits = bits; } protected byte[] bytes; protected boolean[] bits; protected abstract BitIO.Readable createLittleEndian(IO.Readable.Buffered io); protected abstract BitIO.Readable createBigEndian(IO.Readable.Buffered io); @Test public void testBitByBitLittleEndian() throws Exception { try (ByteArrayIO io = new ByteArrayIO(bytes, "test"); BitIO.Readable bio = createLittleEndian(io)) { Assert.assertEquals(io, bio.getWrappedIO()); Priority p = bio.getPriority(); Priority p2 = p.more(); bio.setPriority(p2); Assert.assertEquals(p2, bio.getPriority()); Assert.assertEquals(p2, io.getPriority()); for (int i = 0; i < bits.length; ++i) Assert.assertTrue("bit " + i, bits[i] == bio.readBoolean()); try { bio.readBoolean(); throw new AssertionError("End of stream expected"); } catch (EOFException e) { // ok } } } @Test public void testBitByBitBigEndian() throws Exception { try (ByteArrayIO io = new ByteArrayIO(bytes, "test"); BitIO.Readable bio = createBigEndian(io)) { for (int i = 0; i < bytes.length; ++i) { for (int j = 0; j < 8; ++j) Assert.assertTrue("bit " + (i * 8 + j) + " expected to be " + bits[i * 8 + 7 - j], bits[i * 8 + 7 - j] == bio.readBoolean()); } try { bio.readBoolean(); throw new AssertionError("End of stream expected"); } catch (EOFException e) { // ok } } } @Test public void testBitByBitAsyncLittleEndian() throws Exception { try (ByteArrayIO io = new ByteArrayIO(bytes, "test"); BitIO.Readable bio = createLittleEndian(io)) { for (int i = 0; i < bits.length; ++i) Assert.assertTrue("bit " + i, bits[i] == bio.readBooleanAsync().blockResult(0).booleanValue()); try { bio.readBooleanAsync().blockResult(0); throw new AssertionError("End of stream expected"); } catch (EOFException e) { // ok } } } @Test public void testBitByBitAsyncBigEndian() throws Exception { try (ByteArrayIO io = new ByteArrayIO(bytes, "test"); BitIO.Readable bio = createBigEndian(io)) { for (int i = 0; i < bytes.length; ++i) { for (int j = 0; j < 8; ++j) Assert.assertTrue("bit " + (i * 8 + j) + " expected to be " + bits[i * 8 + 7 - j], bits[i * 8 + 7 - j] == bio.readBooleanAsync().blockResult(0).booleanValue()); } try { bio.readBooleanAsync().blockResult(0); throw new AssertionError("End of stream expected"); } catch (EOFException e) { // ok } } } @Test public void testLittleEndianBy4Bits() throws Exception { testLittleEndianBy(4); } @Test public void testLittleEndianBy3Bits() throws Exception { testLittleEndianBy(3); } @Test public void testLittleEndianBy5Bits() throws Exception { testLittleEndianBy(5); } @Test public void testLittleEndianBy7Bits() throws Exception { testLittleEndianBy(7); } @Test public void testLittleEndianBy19Bits() throws Exception { testLittleEndianBy(19); } private void testLittleEndianBy(int nbBits) throws Exception { try (ByteArrayIO io = new ByteArrayIO(bytes, "test"); BitIO.Readable bio = createLittleEndian(io)) { int bitsPos = 0; while (bitsPos + nbBits <= bits.length) { long expected = 0; for (int i = 0; i < nbBits; ++i) { expected <<= 1; expected |= bits[bitsPos + nbBits - 1 - i] ? 1 : 0; } long value = bio.readBits(nbBits); Assert.assertEquals("bits from " + bitsPos, expected, value); bitsPos += nbBits; } try { bio.readBits(nbBits); throw new AssertionError("End of stream expected"); } catch (EOFException e) { // ok } try { bio.readBits(nbBits); throw new AssertionError("End of stream expected"); } catch (EOFException e) { // ok } } } @Test public void testLittleEndianAsyncBy4Bits() throws Exception { testLittleEndianAsyncBy(4); } @Test public void testLittleEndianAsyncBy3Bits() throws Exception { testLittleEndianAsyncBy(3); } @Test public void testLittleEndianAsyncBy5Bits() throws Exception { testLittleEndianAsyncBy(5); } @Test public void testLittleEndianAsyncBy7Bits() throws Exception { testLittleEndianAsyncBy(7); } @Test public void testLittleEndianAsyncBy19Bits() throws Exception { testLittleEndianAsyncBy(19); } private void testLittleEndianAsyncBy(int nbBits) throws Exception { try (ByteArrayIO io = new ByteArrayIO(bytes, "test"); BitIO.Readable bio = createLittleEndian(io)) { int bitsPos = 0; while (bitsPos + nbBits <= bits.length) { long expected = 0; for (int i = 0; i < nbBits; ++i) { expected <<= 1; expected |= bits[bitsPos + nbBits - 1 - i] ? 1 : 0; } long value = bio.readBitsAsync(nbBits).blockResult(0).longValue(); Assert.assertEquals("bits from " + bitsPos, expected, value); bitsPos += nbBits; } try { bio.readBitsAsync(nbBits).blockThrow(0); throw new AssertionError("End of stream expected"); } catch (EOFException e) { // ok } try { bio.readBitsAsync(nbBits).blockThrow(0); throw new AssertionError("End of stream expected"); } catch (EOFException e) { // ok } } } @Test public void testBigEndianBy3Bits() throws Exception { testBigEndianBy(3); } @Test public void testBigEndianBy4Bits() throws Exception { testBigEndianBy(4); } @Test public void testBigEndianBy5Bits() throws Exception { testBigEndianBy(5); } @Test public void testBigEndianBy7Bits() throws Exception { testBigEndianBy(7); } @Test public void testBigEndianBy19Bits() throws Exception { testBigEndianBy(19); } private void testBigEndianBy(int nbBits) throws Exception { try (ByteArrayIO io = new ByteArrayIO(bytes, "test"); BitIO.Readable bio = createBigEndian(io)) { int bitsPos = 0; while (bitsPos + nbBits <= bits.length) { long expected = 0; for (int i = 0; i < nbBits; ++i) { expected <<= 1; int o = (bitsPos + i) / 8; int b = (bitsPos + i) % 8; expected |= bits[o * 8 + 7 - b] ? 1 : 0; } long value = bio.readBits(nbBits); Assert.assertEquals("bits from " + bitsPos, expected, value); bitsPos += nbBits; } try { bio.readBits(nbBits); throw new AssertionError("End of stream expected"); } catch (EOFException e) { // ok } try { bio.readBits(nbBits); throw new AssertionError("End of stream expected"); } catch (EOFException e) { // ok } } } @Test public void testBigEndianAsyncBy3Bits() throws Exception { testBigEndianAsyncBy(3); } @Test public void testBigEndianAsyncBy4Bits() throws Exception { testBigEndianAsyncBy(4); } @Test public void testBigEndianAsyncBy5Bits() throws Exception { testBigEndianAsyncBy(5); } @Test public void testBigEndianAsyncBy7Bits() throws Exception { testBigEndianAsyncBy(7); } @Test public void testBigEndianAsyncBy19Bits() throws Exception { testBigEndianAsyncBy(19); } private void testBigEndianAsyncBy(int nbBits) throws Exception { try (ByteArrayIO io = new ByteArrayIO(bytes, "test"); BitIO.Readable bio = createBigEndian(io)) { int bitsPos = 0; while (bitsPos + nbBits <= bits.length) { long expected = 0; for (int i = 0; i < nbBits; ++i) { expected <<= 1; int o = (bitsPos + i) / 8; int b = (bitsPos + i) % 8; expected |= bits[o * 8 + 7 - b] ? 1 : 0; } long value = bio.readBitsAsync(nbBits).blockResult(0).longValue(); Assert.assertEquals("bits from " + bitsPos, expected, value); bitsPos += nbBits; } try { bio.readBitsAsync(nbBits).blockThrow(0); throw new AssertionError("End of stream expected"); } catch (EOFException e) { // ok } try { bio.readBitsAsync(nbBits).blockThrow(0); throw new AssertionError("End of stream expected"); } catch (EOFException e) { // ok } } } }
/** * Copyright 2011-2017 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.compiler.trace; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.asakusafw.compiler.common.JavaName; import com.asakusafw.runtime.trace.DefaultTraceOperator; import com.asakusafw.trace.model.TraceSetting; import com.asakusafw.trace.model.TraceSetting.Mode; import com.asakusafw.trace.model.Tracepoint; import com.asakusafw.trace.model.Tracepoint.PortKind; import com.asakusafw.vocabulary.flow.graph.Connectivity; import com.asakusafw.vocabulary.flow.graph.FlowElement; import com.asakusafw.vocabulary.flow.graph.FlowElementDescription; import com.asakusafw.vocabulary.flow.graph.FlowElementInput; import com.asakusafw.vocabulary.flow.graph.FlowElementOutput; import com.asakusafw.vocabulary.flow.graph.FlowElementPort; import com.asakusafw.vocabulary.flow.graph.FlowElementResolver; import com.asakusafw.vocabulary.flow.graph.FlowPartDescription; import com.asakusafw.vocabulary.flow.graph.ObservationCount; import com.asakusafw.vocabulary.flow.graph.OperatorDescription; import com.asakusafw.vocabulary.flow.graph.PortConnection; import com.asakusafw.vocabulary.operator.Trace; /** * Weaves trace operators into trace-points. * @since 0.5.1 */ public class TracepointWeaver { /** * The input port name of trace operators. */ public static final String INPUT_PORT_NAME = "in"; //$NON-NLS-1$ /** * The output port name of trace operators. */ public static final String OUTPUT_PORT_NAME = "out"; //$NON-NLS-1$ private static final String FLOWPART_FACTORY_NAME = "create"; //$NON-NLS-1$ static final Logger LOG = LoggerFactory.getLogger(TracepointWeaver.class); private final Map<String, Map<Tracepoint, TraceSetting>> tracepointsByOperatorClass; private final Map<Tracepoint, TraceSetting> rest; /** * Creates a new instance. * @param settings target settings * @throws IllegalArgumentException if some parameters were {@code null} */ public TracepointWeaver(Collection<? extends TraceSetting> settings) { if (settings == null) { throw new IllegalArgumentException("settings must not be null"); //$NON-NLS-1$ } HashMap<Tracepoint, TraceSetting> all = new HashMap<>(); Map<String, Map<Tracepoint, TraceSetting>> map = new HashMap<>(); for (TraceSetting setting : settings) { all.put(setting.getTracepoint(), setting); Map<Tracepoint, TraceSetting> entry = map.get(setting.getTracepoint().getOperatorClassName()); if (entry == null) { entry = all; map.put(setting.getTracepoint().getOperatorClassName(), entry); } entry.put(setting.getTracepoint(), setting); } this.tracepointsByOperatorClass = map; this.rest = all; } /** * Weaves trace-points into {@link FlowElement}. * @param element the target element * @return {@code true} if the target element is modified, otherwise {@code false} * @throws IllegalArgumentException if some parameters were {@code null} */ public boolean edit(FlowElement element) { if (element == null) { throw new IllegalArgumentException("element must not be null"); //$NON-NLS-1$ } FlowElementDescription description = element.getDescription(); switch (description.getKind()) { case OPERATOR: return edit(element, (OperatorDescription) description); case FLOW_COMPONENT: return edit(element, (FlowPartDescription) description); default: return false; } } private boolean edit(FlowElement element, OperatorDescription description) { String className = description.getDeclaration().getDeclaring().getName(); Map<Tracepoint, TraceSetting> settings = tracepointsByOperatorClass.get(className); if (settings == null || settings.isEmpty()) { return false; } return edit(element, settings, className, normalizeMethodName(description.getDeclaration().getName())); } private String normalizeMethodName(String name) { return JavaName.of(name).toMemberName(); } private boolean edit(FlowElement element, FlowPartDescription description) { String className = description.getFlowGraph().getDescription().getName(); Map<Tracepoint, TraceSetting> settings = tracepointsByOperatorClass.get(className); if (settings == null || settings.isEmpty()) { return false; } return edit(element, settings, className, FLOWPART_FACTORY_NAME); } private boolean edit( FlowElement element, Map<Tracepoint, TraceSetting> settings, String className, String methodName) { boolean modified = false; for (FlowElementInput port : element.getInputPorts()) { Tracepoint point = new Tracepoint(className, methodName, PortKind.INPUT, port.getDescription().getName()); if (settings.containsKey(point)) { edit(port, settings.get(point)); rest.remove(point); modified = true; } } for (FlowElementOutput port : element.getOutputPorts()) { Tracepoint point = new Tracepoint(className, methodName, PortKind.OUTPUT, port.getDescription().getName()); if (settings.containsKey(point)) { edit(port, settings.get(point)); rest.remove(point); modified = true; } } return modified; } private void edit(FlowElementInput port, TraceSetting setting) { LOG.debug("weaving tracepoint ({}): {}", setting.getTracepoint(), port); //$NON-NLS-1$ Collection<FlowElementOutput> opposites = port.getOpposites(); edit(setting, port, opposites, Collections.singleton(port)); } private void edit(FlowElementOutput port, TraceSetting setting) { LOG.debug("weaving tracepoint ({}): {}", setting.getTracepoint(), port); //$NON-NLS-1$ Collection<FlowElementInput> opposites = port.getOpposites(); edit(setting, port, Collections.singleton(port), opposites); } private void edit( TraceSetting setting, FlowElementPort port, Collection<FlowElementOutput> upstreams, Collection<FlowElementInput> downstreams) { assert port != null; assert upstreams != null; assert downstreams != null; OperatorDescription.Builder builder = new OperatorDescription.Builder(Trace.class); builder.declare(DefaultTraceOperator.class, DefaultTraceOperator.class, "trace"); //$NON-NLS-1$ builder.declareParameter(Object.class); builder.declareParameter(String.class); builder.addInput(INPUT_PORT_NAME, port.getDescription().getDataType()); builder.addOutput(OUTPUT_PORT_NAME, port.getDescription().getDataType()); builder.addParameter("header", String.class, setting.getTracepoint().toString()); builder.addAttribute(Connectivity.OPTIONAL); if (setting.getMode() == Mode.STRICT) { builder.addAttribute(ObservationCount.EXACTLY_ONCE); port.disconnectAll(); FlowElementResolver resolver = builder.toResolver(); FlowElementInput input = resolver.getInput(INPUT_PORT_NAME); for (FlowElementOutput upstream : upstreams) { PortConnection.connect(upstream, input); } FlowElementOutput output = resolver.getOutput(OUTPUT_PORT_NAME); for (FlowElementInput downstream : downstreams) { PortConnection.connect(output, downstream); } } else if (setting.getMode() == Mode.IN_ORDER) { builder.addAttribute(ObservationCount.AT_LEAST_ONCE); port.disconnectAll(); FlowElementResolver resolver = builder.toResolver(); FlowElementInput input = resolver.getInput(INPUT_PORT_NAME); for (FlowElementOutput upstream : upstreams) { PortConnection.connect(upstream, input); } FlowElementOutput output = resolver.getOutput(OUTPUT_PORT_NAME); for (FlowElementInput downstream : downstreams) { PortConnection.connect(output, downstream); } } else if (setting.getMode() == Mode.OUT_OF_ORDER) { builder.addAttribute(ObservationCount.AT_LEAST_ONCE); FlowElementResolver resolver = builder.toResolver(); FlowElementInput input = resolver.getInput(INPUT_PORT_NAME); for (FlowElementOutput upstream : upstreams) { PortConnection.connect(upstream, input); } } } }
/* * Copyright 2016 Igor Maznitsa. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.igormaznitsa.mvngolang.utils; import static com.igormaznitsa.mvngolang.utils.IOUtils.closeSilently; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.Enumeration; import java.util.Locale; import java.util.zip.GZIPInputStream; import javax.annotation.Nonnull; import javax.annotation.Nullable; import org.apache.commons.compress.archivers.ArchiveEntry; import org.apache.commons.compress.archivers.ArchiveException; import org.apache.commons.compress.archivers.ArchiveInputStream; import org.apache.commons.compress.archivers.ArchiveStreamFactory; import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; import org.apache.commons.compress.archivers.zip.ZipFile; import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.IOUtils; import org.apache.maven.plugin.logging.Log; public final class UnpackUtils { private static final ArchiveStreamFactory ARCHIVE_STREAM_FACTORY = new ArchiveStreamFactory(); private UnpackUtils() { } public static int unpackFileToFolder(@Nonnull final Log logger, @Nullable final String folder, @Nonnull final File archiveFile, @Nonnull final File destinationFolder, final boolean tryMakeAllExecutable) throws IOException { final String normalizedName = archiveFile.getName().toLowerCase(Locale.ENGLISH); final ArchEntryGetter entryGetter; boolean modeZipFile = false; final ZipFile theZipFile; final ArchiveInputStream archInputStream; if (normalizedName.endsWith(".zip")) { logger.debug("Detected ZIP archive"); modeZipFile = true; theZipFile = new ZipFile(archiveFile); archInputStream = null; entryGetter = new ArchEntryGetter() { private final Enumeration<ZipArchiveEntry> iterator = theZipFile.getEntries(); @Override @Nullable public ArchiveEntry getNextEntry() throws IOException { ArchiveEntry result = null; if (this.iterator.hasMoreElements()) { result = this.iterator.nextElement(); } return result; } }; } else { theZipFile = null; final InputStream in = new BufferedInputStream(new FileInputStream(archiveFile)); try { if (normalizedName.endsWith(".tar.gz")) { logger.debug("Detected TAR.GZ archive"); archInputStream = new TarArchiveInputStream(new GZIPInputStream(in)); entryGetter = ((TarArchiveInputStream) archInputStream)::getNextTarEntry; } else { logger.debug("Detected OTHER archive"); archInputStream = ARCHIVE_STREAM_FACTORY.createArchiveInputStream(in); logger.debug("Created archive stream : " + archInputStream.getClass().getName()); entryGetter = archInputStream::getNextEntry; } } catch (ArchiveException ex) { closeSilently(in); throw new IOException("Can't recognize or read archive file : " + archiveFile, ex); } catch (CantReadArchiveEntryException ex) { closeSilently(in); throw new IOException("Can't read entry from archive file : " + archiveFile, ex); } } try { final String normalizedFolder = folder == null ? null : FilenameUtils.normalize(folder, true) + '/'; int unpackedFilesCounter = 0; while (true) { final ArchiveEntry entry = entryGetter.getNextEntry(); if (entry == null) { break; } final String normalizedPath = FilenameUtils.normalize(entry.getName(), true); logger.debug("Detected archive entry : " + normalizedPath); if (normalizedFolder == null || normalizedPath.startsWith(normalizedFolder)) { final File targetFile = new File(destinationFolder, normalizedFolder == null ? normalizedPath : normalizedPath.substring(normalizedFolder.length())); if (entry.isDirectory()) { logger.debug("Folder : " + normalizedPath); if (!targetFile.exists() && !targetFile.mkdirs()) { throw new IOException("Can't create folder " + targetFile); } } else { final File parent = targetFile.getParentFile(); if (parent != null && !parent.isDirectory() && !parent.mkdirs()) { throw new IOException("Can't create folder : " + parent); } try (final FileOutputStream fos = new FileOutputStream(targetFile)) { if (modeZipFile) { logger.debug("Unpacking ZIP entry : " + normalizedPath); final InputStream zipEntryInStream = theZipFile.getInputStream((ZipArchiveEntry) entry); try { if (IOUtils.copy(zipEntryInStream, fos) != entry.getSize()) { throw new IOException( "Can't unpack file, illegal unpacked length : " + entry.getName()); } } finally { closeSilently(zipEntryInStream); } } else { logger.debug("Unpacking archive entry : " + normalizedPath); if (!archInputStream.canReadEntryData(entry)) { throw new IOException("Can't read archive entry data : " + normalizedPath); } if (IOUtils.copy(archInputStream, fos) != entry.getSize()) { throw new IOException( "Can't unpack file, illegal unpacked length : " + entry.getName()); } } } if (tryMakeAllExecutable) { try { if (!targetFile.setExecutable(true, true)) { logger.debug("Can't make file executable : " + targetFile); } } catch (SecurityException ex) { throw new IOException( "Can't make file executable for security reasons : " + targetFile, ex); } } unpackedFilesCounter++; } } else { logger.debug("Archive entry " + normalizedPath + " ignored"); } } return unpackedFilesCounter; } finally { closeSilently(theZipFile); closeSilently(archInputStream); } } private interface ArchEntryGetter { @Nullable ArchiveEntry getNextEntry() throws IOException; } public static class CantReadArchiveEntryException extends RuntimeException { private static final long serialVersionUID = 1989670574345144082L; public CantReadArchiveEntryException(@Nullable final Throwable cause) { super("Can't read archive entry for exception", cause); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.common.types; import static org.apache.drill.common.types.TypeProtos.DataMode.REPEATED; import static org.apache.drill.common.types.TypeProtos.MinorType.*; import org.apache.drill.common.exceptions.DrillRuntimeException; import org.apache.drill.common.types.TypeProtos.DataMode; import org.apache.drill.common.types.TypeProtos.MajorType; import org.apache.drill.common.types.TypeProtos.MinorType; import com.google.protobuf.TextFormat; public class Types { static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(Types.class); public static final MajorType NULL = required(MinorType.NULL); public static final MajorType LATE_BIND_TYPE = optional(MinorType.LATE); public static final MajorType REQUIRED_BIT = required(MinorType.BIT); public static final MajorType OPTIONAL_BIT = optional(MinorType.BIT); public static enum Comparability { UNKNOWN, NONE, EQUAL, ORDERED; } public static boolean isComplex(MajorType type) { switch(type.getMinorType()) { case LIST: case MAP: return true; } return false; } public static boolean isRepeated(MajorType type) { return type.getMode() == REPEATED ; } public static boolean isNumericType(MajorType type) { if (type.getMode() == REPEATED) { return false; } switch(type.getMinorType()) { case BIGINT: case DECIMAL38SPARSE: case DECIMAL38DENSE: case DECIMAL28SPARSE: case DECIMAL28DENSE: case DECIMAL18: case DECIMAL9: case FLOAT4: case FLOAT8: case INT: case MONEY: case SMALLINT: case TINYINT: case UINT1: case UINT2: case UINT4: case UINT8: return true; default: return false; } } public static int getSqlType(MajorType type) { if (type.getMode() == DataMode.REPEATED) { return java.sql.Types.ARRAY; } switch(type.getMinorType()) { case BIGINT: return java.sql.Types.BIGINT; case BIT: return java.sql.Types.BOOLEAN; case DATE: return java.sql.Types.DATE; case DECIMAL9: case DECIMAL18: case DECIMAL28DENSE: case DECIMAL28SPARSE: case DECIMAL38DENSE: case DECIMAL38SPARSE: return java.sql.Types.DECIMAL; case FIXED16CHAR: return java.sql.Types.NCHAR; case FIXEDBINARY: return java.sql.Types.BINARY; case FIXEDCHAR: return java.sql.Types.NCHAR; case FLOAT4: return java.sql.Types.FLOAT; case FLOAT8: return java.sql.Types.DOUBLE; case INT: return java.sql.Types.INTEGER; case MAP: return java.sql.Types.STRUCT; case MONEY: return java.sql.Types.DECIMAL; case NULL: case INTERVAL: case INTERVALYEAR: case INTERVALDAY: case LATE: case SMALLINT: return java.sql.Types.SMALLINT; case TIME: return java.sql.Types.TIME; case TIMESTAMPTZ: case TIMESTAMP: return java.sql.Types.TIMESTAMP; case TIMETZ: return java.sql.Types.DATE; case TINYINT: return java.sql.Types.TINYINT; case UINT1: return java.sql.Types.TINYINT; case UINT2: return java.sql.Types.SMALLINT; case UINT4: return java.sql.Types.INTEGER; case UINT8: return java.sql.Types.BIGINT; case VAR16CHAR: return java.sql.Types.NVARCHAR; case VARBINARY: return java.sql.Types.VARBINARY; case VARCHAR: return java.sql.Types.NVARCHAR; default: throw new UnsupportedOperationException(); } } public static boolean isUnSigned(MajorType type) { switch(type.getMinorType()) { case UINT1: case UINT2: case UINT4: case UINT8: return true; default: return false; } } public static boolean usesHolderForGet(MajorType type) { if (type.getMode() == REPEATED) { return true; } switch(type.getMinorType()) { case BIGINT: case FLOAT4: case FLOAT8: case INT: case MONEY: case SMALLINT: case TINYINT: case UINT1: case UINT2: case UINT4: case UINT8: case INTERVALYEAR: case DATE: case TIME: case TIMESTAMP: return false; default: return true; } } public static boolean isFixedWidthType(MajorType type) { switch(type.getMinorType()) { case VARBINARY: case VAR16CHAR: case VARCHAR: return false; default: return true; } } public static boolean isStringScalarType(MajorType type) { if (type.getMode() == REPEATED) { return false; } switch(type.getMinorType()) { case FIXEDCHAR: case FIXED16CHAR: case VARCHAR: case VAR16CHAR: return true; default: return false; } } public static boolean isBytesScalarType(MajorType type) { if (type.getMode() == REPEATED) { return false; } switch(type.getMinorType()) { case FIXEDBINARY: case VARBINARY: return true; default: return false; } } public static Comparability getComparability(MajorType type) { if (type.getMode() == REPEATED) { return Comparability.NONE; } if (type.getMinorType() == MinorType.LATE) { return Comparability.UNKNOWN; } switch(type.getMinorType()) { case LATE: return Comparability.UNKNOWN; case MAP: return Comparability.NONE; case BIT: return Comparability.EQUAL; default: return Comparability.ORDERED; } } public static boolean softEquals(MajorType a, MajorType b, boolean allowNullSwap) { if (a.getMinorType() != b.getMinorType()) { if ( (a.getMinorType() == MinorType.VARBINARY && b.getMinorType() == MinorType.VARCHAR) || (b.getMinorType() == MinorType.VARBINARY && a.getMinorType() == MinorType.VARCHAR) ) { // fall through; } else { return false; } } if(allowNullSwap) { switch (a.getMode()) { case OPTIONAL: case REQUIRED: switch (b.getMode()) { case OPTIONAL: case REQUIRED: return true; } } } return a.getMode() == b.getMode(); } public static boolean isLateBind(MajorType type) { return type.getMinorType() == MinorType.LATE; } public static MajorType withMode(MinorType type, DataMode mode) { return MajorType.newBuilder().setMode(mode).setMinorType(type).build(); } public static MajorType withScaleAndPrecision(MinorType type, DataMode mode, int scale, int precision) { return MajorType.newBuilder().setMinorType(type).setMode(mode).setScale(scale).setPrecision(precision).build(); } public static MajorType required(MinorType type) { return MajorType.newBuilder().setMode(DataMode.REQUIRED).setMinorType(type).build(); } public static MajorType repeated(MinorType type) { return MajorType.newBuilder().setMode(REPEATED).setMinorType(type).build(); } public static MajorType optional(MinorType type) { return MajorType.newBuilder().setMode(DataMode.OPTIONAL).setMinorType(type).build(); } public static MajorType overrideMinorType(MajorType originalMajorType, MinorType overrideMinorType) { switch (originalMajorType.getMode()) { case REPEATED: return repeated(overrideMinorType); case OPTIONAL: return optional(overrideMinorType); case REQUIRED: return required(overrideMinorType); default: throw new UnsupportedOperationException(); } } public static MajorType overrideMode(MajorType originalMajorType, DataMode overrideMode) { return withScaleAndPrecision(originalMajorType.getMinorType(), overrideMode, originalMajorType.getScale(), originalMajorType.getPrecision()); } public static MajorType getMajorTypeFromName(String typeName) { return getMajorTypeFromName(typeName, DataMode.REQUIRED); } public static MajorType getMajorTypeFromName(String typeName, DataMode mode) { switch (typeName) { case "bool": case "boolean": return withMode(MinorType.BIT, mode); case "tinyint": return withMode(MinorType.TINYINT, mode); case "uint1": return withMode(MinorType.UINT1, mode); case "smallint": return withMode(MinorType.SMALLINT, mode); case "uint2": return withMode(MinorType.UINT2, mode); case "int": return withMode(MinorType.INT, mode); case "uint4": return withMode(MinorType.UINT4, mode); case "bigint": return withMode(MinorType.BIGINT, mode); case "uint8": return withMode(MinorType.UINT8, mode); case "float": return withMode(MinorType.FLOAT4, mode); case "double": return withMode(MinorType.FLOAT8, mode); case "decimal": return withMode(MinorType.DECIMAL38SPARSE, mode); case "utf8": case "varchar": return withMode(MinorType.VARCHAR, mode); case "utf16": case "string": case "var16char": return withMode(MinorType.VAR16CHAR, mode); case "date": return withMode(MinorType.DATE, mode); case "time": return withMode(MinorType.TIME, mode); case "binary": return withMode(MinorType.VARBINARY, mode); case "json": return withMode(MinorType.LATE, mode); default: throw new UnsupportedOperationException("Could not determine type: " + typeName); } } public static String getNameOfMajorType(MinorType type) { switch (type) { case BIT: return "bool"; case TINYINT: return "tinyint"; case UINT1: return "uint1"; case SMALLINT: return "smallint"; case UINT2: return "uint2"; case INT: return "int"; case UINT4: return "uint4"; case BIGINT: return "bigint"; case UINT8: return "uint8"; case FLOAT4: return "float"; case FLOAT8: return "double"; case DECIMAL38SPARSE: return "decimal"; case VARCHAR: return "varchar"; case VAR16CHAR: return "utf16"; case DATE: return "date"; case TIME: return "time"; case VARBINARY: return "binary"; case LATE: return "json"; default: throw new DrillRuntimeException("Unrecognized type " + type); } } public static String toString(MajorType type) { return type != null ? "MajorType[" + TextFormat.shortDebugString(type) + "]" : "null"; } }
package cyclops.instances.data; import com.oath.cyclops.hkt.DataWitness.vector; import com.oath.cyclops.hkt.Higher; import cyclops.arrow.Cokleisli; import cyclops.arrow.Kleisli; import cyclops.arrow.MonoidK; import cyclops.arrow.MonoidKs; import cyclops.control.Either; import cyclops.control.Maybe; import cyclops.control.Option; import cyclops.data.Vector; import cyclops.data.tuple.Tuple2; import cyclops.function.Monoid; import cyclops.hkt.Active; import cyclops.hkt.Coproduct; import cyclops.hkt.Nested; import cyclops.hkt.Product; import cyclops.typeclasses.InstanceDefinitions; import cyclops.typeclasses.Pure; import cyclops.typeclasses.comonad.Comonad; import cyclops.typeclasses.foldable.Foldable; import cyclops.typeclasses.foldable.Unfoldable; import cyclops.typeclasses.functor.Functor; import cyclops.typeclasses.monad.Applicative; import cyclops.typeclasses.monad.Monad; import cyclops.typeclasses.monad.MonadPlus; import cyclops.typeclasses.monad.MonadRec; import cyclops.typeclasses.monad.MonadZero; import cyclops.typeclasses.monad.Traverse; import cyclops.typeclasses.monad.TraverseByTraverse; import lombok.AllArgsConstructor; import lombok.experimental.UtilityClass; import lombok.experimental.Wither; import java.util.function.BiFunction; import java.util.function.Function; import java.util.function.Predicate; import static cyclops.data.Vector.narrowK; /** * Companion class for creating Type Class instances for working with Vector's * @author johnmcclean * */ @UtilityClass public class VectorInstances { public static <T> Kleisli<vector,Vector<T>,T> kindKleisli(){ return Kleisli.of(VectorInstances.monad(), Vector::widen); } public static <T> Cokleisli<vector,T,Vector<T>> kindCokleisli(){ return Cokleisli.of(Vector::narrowK); } public static <W1,T> Nested<vector,W1,T> nested(Vector<Higher<W1,T>> nested, InstanceDefinitions<W1> def2){ return Nested.of(nested, VectorInstances.definitions(),def2); } public static <W1,T> Product<vector,W1,T> product(Vector<T> l, Active<W1,T> active){ return Product.of(allTypeclasses(l),active); } public static <W1,T> Coproduct<W1,vector,T> coproduct(Vector<T> l, InstanceDefinitions<W1> def2){ return Coproduct.right(l,def2, VectorInstances.definitions()); } public static <T> Active<vector,T> allTypeclasses(Vector<T> l){ return Active.of(l, VectorInstances.definitions()); } public static <W2,R,T> Nested<vector,W2,R> mapM(Vector<T> l, Function<? super T,? extends Higher<W2,R>> fn, InstanceDefinitions<W2> defs){ return Nested.of(l.map(fn), VectorInstances.definitions(), defs); } public static InstanceDefinitions<vector> definitions(){ return new InstanceDefinitions<vector>() { @Override public <T, R> Functor<vector> functor() { return INSTANCE; } @Override public <T> Pure<vector> unit() { return INSTANCE; } @Override public <T, R> Applicative<vector> applicative() { return INSTANCE; } @Override public <T, R> Monad<vector> monad() { return INSTANCE; } @Override public <T, R> Option<MonadZero<vector>> monadZero() { return Option.some(INSTANCE); } @Override public <T> Option<MonadPlus<vector>> monadPlus() { return Option.some(INSTANCE); } @Override public <T> MonadRec<vector> monadRec() { return INSTANCE; } @Override public <T> Option<MonadPlus<vector>> monadPlus(MonoidK<vector> m) { return Option.some(VectorInstances.monadPlus(m)); } @Override public <C2, T> Traverse<vector> traverse() { return INSTANCE; } @Override public <T> Foldable<vector> foldable() { return INSTANCE; } @Override public <T> Option<Comonad<vector>> comonad() { return Option.none(); } @Override public <T> Option<Unfoldable<vector>> unfoldable() { return Option.some(INSTANCE); } }; } private final static VectorTypeClasses INSTANCE = new VectorTypeClasses(); @AllArgsConstructor @Wither public static class VectorTypeClasses implements MonadPlus<vector>, MonadRec<vector>, TraverseByTraverse<vector>, Foldable<vector>, Unfoldable<vector>{ private final MonoidK<vector> monoidK; public VectorTypeClasses(){ monoidK = MonoidKs.vectorConcat(); } @Override public <T> Higher<vector, T> filter(Predicate<? super T> predicate, Higher<vector, T> ds) { return narrowK(ds).filter(predicate); } @Override public <T, R> Higher<vector, Tuple2<T, R>> zip(Higher<vector, T> fa, Higher<vector, R> fb) { return narrowK(fa).zip(narrowK(fb)); } @Override public <T1, T2, R> Higher<vector, R> zip(Higher<vector, T1> fa, Higher<vector, T2> fb, BiFunction<? super T1, ? super T2, ? extends R> f) { return narrowK(fa).zip(narrowK(fb),f); } @Override public <T> MonoidK<vector> monoid() { return monoidK; } @Override public <T, R> Higher<vector, R> flatMap(Function<? super T, ? extends Higher<vector, R>> fn, Higher<vector, T> ds) { return narrowK(ds).flatMap(i->narrowK(fn.apply(i))); } @Override public <T, R> Higher<vector, R> ap(Higher<vector, ? extends Function<T, R>> fn, Higher<vector, T> apply) { return narrowK(apply) .zip(narrowK(fn),(a,b)->b.apply(a)); } @Override public <T> Higher<vector, T> unit(T value) { return Vector.of(value); } @Override public <T, R> Higher<vector, R> map(Function<? super T, ? extends R> fn, Higher<vector, T> ds) { return narrowK(ds).map(fn); } @Override public <T, R> Higher<vector, R> tailRec(T initial, Function<? super T, ? extends Higher<vector, ? extends Either<T, R>>> fn) { return Vector.tailRec(initial,i->narrowK(fn.apply(i))); } @Override public <C2, T, R> Higher<C2, Higher<vector, R>> traverseA(Applicative<C2> ap, Function<? super T, ? extends Higher<C2, R>> fn, Higher<vector, T> ds) { Vector<T> v = narrowK(ds); return v.<Higher<C2, Higher<vector,R>>>foldLeft(ap.unit(Vector.<R>empty()), (a, b) -> ap.zip(fn.apply(b), a, (sn, vec) -> narrowK(vec).plus(sn))); } @Override public <T, R> R foldMap(Monoid<R> mb, Function<? super T, ? extends R> fn, Higher<vector, T> ds) { Vector<T> x = narrowK(ds); return x.foldLeft(mb.zero(),(a,b)->mb.apply(a,fn.apply(b))); } @Override public <T, R> Higher<vector, Tuple2<T, Long>> zipWithIndex(Higher<vector, T> ds) { return narrowK(ds).zipWithIndex(); } @Override public <T> T foldRight(Monoid<T> monoid, Higher<vector, T> ds) { return narrowK(ds).foldRight(monoid); } @Override public <T> T foldLeft(Monoid<T> monoid, Higher<vector, T> ds) { return narrowK(ds).foldLeft(monoid); } @Override public <R, T> Higher<vector, R> unfold(T b, Function<? super T, Option<Tuple2<R, T>>> fn) { return Vector.unfold(b,fn); } } public static Unfoldable<vector> unfoldable(){ return INSTANCE; } public static MonadPlus<vector> monadPlus(MonoidK<vector> m){ return INSTANCE.withMonoidK(m); } public static <T,R> Applicative<vector> zippingApplicative(){ return INSTANCE; } public static <T,R>Functor<vector> functor(){ return INSTANCE; } public static <T,R> Monad<vector> monad(){ return INSTANCE; } public static <T,R> MonadZero<vector> monadZero(){ return INSTANCE; } public static <T> MonadPlus<vector> monadPlus(){ return INSTANCE; } public static <T,R> MonadRec<vector> monadRec(){ return INSTANCE; } public static <C2,T> Traverse<vector> traverse(){ return INSTANCE; } public static <T,R> Foldable<vector> foldable(){ return INSTANCE; } }
package com.robertkoszewski.dsl.ide.contentassist.antlr.internal; // Hack: Use our own Lexer superclass by means of import. // Currently there is no other way to specify the superclass for the lexer. import org.eclipse.xtext.ide.editor.contentassist.antlr.internal.Lexer; import org.antlr.runtime.*; import java.util.Stack; import java.util.List; import java.util.ArrayList; @SuppressWarnings("all") public class InternalQuickUILexer extends Lexer { public static final int RULE_STRING=5; public static final int RULE_SL_COMMENT=8; public static final int T__19=19; public static final int T__15=15; public static final int T__16=16; public static final int T__17=17; public static final int T__18=18; public static final int T__11=11; public static final int T__12=12; public static final int T__13=13; public static final int T__14=14; public static final int EOF=-1; public static final int T__30=30; public static final int T__31=31; public static final int T__32=32; public static final int RULE_ID=4; public static final int RULE_WS=9; public static final int RULE_ANY_OTHER=10; public static final int T__26=26; public static final int T__27=27; public static final int T__28=28; public static final int RULE_INT=6; public static final int T__29=29; public static final int T__22=22; public static final int RULE_ML_COMMENT=7; public static final int T__23=23; public static final int T__24=24; public static final int T__25=25; public static final int T__20=20; public static final int T__21=21; // delegates // delegators public InternalQuickUILexer() {;} public InternalQuickUILexer(CharStream input) { this(input, new RecognizerSharedState()); } public InternalQuickUILexer(CharStream input, RecognizerSharedState state) { super(input,state); } public String getGrammarFileName() { return "InternalQuickUI.g"; } // $ANTLR start "T__11" public final void mT__11() throws RecognitionException { try { int _type = T__11; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalQuickUI.g:11:7: ( 'false' ) // InternalQuickUI.g:11:9: 'false' { match("false"); } state.type = _type; state.channel = _channel; } finally { } } // $ANTLR end "T__11" // $ANTLR start "T__12" public final void mT__12() throws RecognitionException { try { int _type = T__12; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalQuickUI.g:12:7: ( 'use' ) // InternalQuickUI.g:12:9: 'use' { match("use"); } state.type = _type; state.channel = _channel; } finally { } } // $ANTLR end "T__12" // $ANTLR start "T__13" public final void mT__13() throws RecognitionException { try { int _type = T__13; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalQuickUI.g:13:7: ( ':' ) // InternalQuickUI.g:13:9: ':' { match(':'); } state.type = _type; state.channel = _channel; } finally { } } // $ANTLR end "T__13" // $ANTLR start "T__14" public final void mT__14() throws RecognitionException { try { int _type = T__14; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalQuickUI.g:14:7: ( '.' ) // InternalQuickUI.g:14:9: '.' { match('.'); } state.type = _type; state.channel = _channel; } finally { } } // $ANTLR end "T__14" // $ANTLR start "T__15" public final void mT__15() throws RecognitionException { try { int _type = T__15; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalQuickUI.g:15:7: ( '{' ) // InternalQuickUI.g:15:9: '{' { match('{'); } state.type = _type; state.channel = _channel; } finally { } } // $ANTLR end "T__15" // $ANTLR start "T__16" public final void mT__16() throws RecognitionException { try { int _type = T__16; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalQuickUI.g:16:7: ( '}' ) // InternalQuickUI.g:16:9: '}' { match('}'); } state.type = _type; state.channel = _channel; } finally { } } // $ANTLR end "T__16" // $ANTLR start "T__17" public final void mT__17() throws RecognitionException { try { int _type = T__17; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalQuickUI.g:17:7: ( 'Menu' ) // InternalQuickUI.g:17:9: 'Menu' { match("Menu"); } state.type = _type; state.channel = _channel; } finally { } } // $ANTLR end "T__17" // $ANTLR start "T__18" public final void mT__18() throws RecognitionException { try { int _type = T__18; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalQuickUI.g:18:7: ( 'Content' ) // InternalQuickUI.g:18:9: 'Content' { match("Content"); } state.type = _type; state.channel = _channel; } finally { } } // $ANTLR end "T__18" // $ANTLR start "T__19" public final void mT__19() throws RecognitionException { try { int _type = T__19; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalQuickUI.g:19:7: ( 'Row' ) // InternalQuickUI.g:19:9: 'Row' { match("Row"); } state.type = _type; state.channel = _channel; } finally { } } // $ANTLR end "T__19" // $ANTLR start "T__20" public final void mT__20() throws RecognitionException { try { int _type = T__20; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalQuickUI.g:20:7: ( 'Label' ) // InternalQuickUI.g:20:9: 'Label' { match("Label"); } state.type = _type; state.channel = _channel; } finally { } } // $ANTLR end "T__20" // $ANTLR start "T__21" public final void mT__21() throws RecognitionException { try { int _type = T__21; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalQuickUI.g:21:7: ( 'Enabled' ) // InternalQuickUI.g:21:9: 'Enabled' { match("Enabled"); } state.type = _type; state.channel = _channel; } finally { } } // $ANTLR end "T__21" // $ANTLR start "T__22" public final void mT__22() throws RecognitionException { try { int _type = T__22; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalQuickUI.g:22:7: ( 'or' ) // InternalQuickUI.g:22:9: 'or' { match("or"); } state.type = _type; state.channel = _channel; } finally { } } // $ANTLR end "T__22" // $ANTLR start "T__23" public final void mT__23() throws RecognitionException { try { int _type = T__23; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalQuickUI.g:23:7: ( 'and' ) // InternalQuickUI.g:23:9: 'and' { match("and"); } state.type = _type; state.channel = _channel; } finally { } } // $ANTLR end "T__23" // $ANTLR start "T__24" public final void mT__24() throws RecognitionException { try { int _type = T__24; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalQuickUI.g:24:7: ( 'if' ) // InternalQuickUI.g:24:9: 'if' { match("if"); } state.type = _type; state.channel = _channel; } finally { } } // $ANTLR end "T__24" // $ANTLR start "T__25" public final void mT__25() throws RecognitionException { try { int _type = T__25; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalQuickUI.g:25:7: ( 'is' ) // InternalQuickUI.g:25:9: 'is' { match("is"); } state.type = _type; state.channel = _channel; } finally { } } // $ANTLR end "T__25" // $ANTLR start "T__26" public final void mT__26() throws RecognitionException { try { int _type = T__26; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalQuickUI.g:26:7: ( 'empty' ) // InternalQuickUI.g:26:9: 'empty' { match("empty"); } state.type = _type; state.channel = _channel; } finally { } } // $ANTLR end "T__26" // $ANTLR start "T__27" public final void mT__27() throws RecognitionException { try { int _type = T__27; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalQuickUI.g:27:7: ( 'checked' ) // InternalQuickUI.g:27:9: 'checked' { match("checked"); } state.type = _type; state.channel = _channel; } finally { } } // $ANTLR end "T__27" // $ANTLR start "T__28" public final void mT__28() throws RecognitionException { try { int _type = T__28; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalQuickUI.g:28:7: ( 'Checked' ) // InternalQuickUI.g:28:9: 'Checked' { match("Checked"); } state.type = _type; state.channel = _channel; } finally { } } // $ANTLR end "T__28" // $ANTLR start "T__29" public final void mT__29() throws RecognitionException { try { int _type = T__29; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalQuickUI.g:29:7: ( 'OnClick' ) // InternalQuickUI.g:29:9: 'OnClick' { match("OnClick"); } state.type = _type; state.channel = _channel; } finally { } } // $ANTLR end "T__29" // $ANTLR start "T__30" public final void mT__30() throws RecognitionException { try { int _type = T__30; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalQuickUI.g:30:7: ( 'Main' ) // InternalQuickUI.g:30:9: 'Main' { match("Main"); } state.type = _type; state.channel = _channel; } finally { } } // $ANTLR end "T__30" // $ANTLR start "T__31" public final void mT__31() throws RecognitionException { try { int _type = T__31; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalQuickUI.g:31:7: ( 'not' ) // InternalQuickUI.g:31:9: 'not' { match("not"); } state.type = _type; state.channel = _channel; } finally { } } // $ANTLR end "T__31" // $ANTLR start "T__32" public final void mT__32() throws RecognitionException { try { int _type = T__32; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalQuickUI.g:32:7: ( 'true' ) // InternalQuickUI.g:32:9: 'true' { match("true"); } state.type = _type; state.channel = _channel; } finally { } } // $ANTLR end "T__32" // $ANTLR start "RULE_ID" public final void mRULE_ID() throws RecognitionException { try { int _type = RULE_ID; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalQuickUI.g:3143:9: ( ( '^' )? ( 'a' .. 'z' | 'A' .. 'Z' | '_' ) ( 'a' .. 'z' | 'A' .. 'Z' | '_' | '0' .. '9' )* ) // InternalQuickUI.g:3143:11: ( '^' )? ( 'a' .. 'z' | 'A' .. 'Z' | '_' ) ( 'a' .. 'z' | 'A' .. 'Z' | '_' | '0' .. '9' )* { // InternalQuickUI.g:3143:11: ( '^' )? int alt1=2; int LA1_0 = input.LA(1); if ( (LA1_0=='^') ) { alt1=1; } switch (alt1) { case 1 : // InternalQuickUI.g:3143:11: '^' { match('^'); } break; } if ( (input.LA(1)>='A' && input.LA(1)<='Z')||input.LA(1)=='_'||(input.LA(1)>='a' && input.LA(1)<='z') ) { input.consume(); } else { MismatchedSetException mse = new MismatchedSetException(null,input); recover(mse); throw mse;} // InternalQuickUI.g:3143:40: ( 'a' .. 'z' | 'A' .. 'Z' | '_' | '0' .. '9' )* loop2: do { int alt2=2; int LA2_0 = input.LA(1); if ( ((LA2_0>='0' && LA2_0<='9')||(LA2_0>='A' && LA2_0<='Z')||LA2_0=='_'||(LA2_0>='a' && LA2_0<='z')) ) { alt2=1; } switch (alt2) { case 1 : // InternalQuickUI.g: { if ( (input.LA(1)>='0' && input.LA(1)<='9')||(input.LA(1)>='A' && input.LA(1)<='Z')||input.LA(1)=='_'||(input.LA(1)>='a' && input.LA(1)<='z') ) { input.consume(); } else { MismatchedSetException mse = new MismatchedSetException(null,input); recover(mse); throw mse;} } break; default : break loop2; } } while (true); } state.type = _type; state.channel = _channel; } finally { } } // $ANTLR end "RULE_ID" // $ANTLR start "RULE_INT" public final void mRULE_INT() throws RecognitionException { try { int _type = RULE_INT; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalQuickUI.g:3145:10: ( ( '0' .. '9' )+ ) // InternalQuickUI.g:3145:12: ( '0' .. '9' )+ { // InternalQuickUI.g:3145:12: ( '0' .. '9' )+ int cnt3=0; loop3: do { int alt3=2; int LA3_0 = input.LA(1); if ( ((LA3_0>='0' && LA3_0<='9')) ) { alt3=1; } switch (alt3) { case 1 : // InternalQuickUI.g:3145:13: '0' .. '9' { matchRange('0','9'); } break; default : if ( cnt3 >= 1 ) break loop3; EarlyExitException eee = new EarlyExitException(3, input); throw eee; } cnt3++; } while (true); } state.type = _type; state.channel = _channel; } finally { } } // $ANTLR end "RULE_INT" // $ANTLR start "RULE_STRING" public final void mRULE_STRING() throws RecognitionException { try { int _type = RULE_STRING; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalQuickUI.g:3147:13: ( ( '\"' ( '\\\\' . | ~ ( ( '\\\\' | '\"' ) ) )* '\"' | '\\'' ( '\\\\' . | ~ ( ( '\\\\' | '\\'' ) ) )* '\\'' ) ) // InternalQuickUI.g:3147:15: ( '\"' ( '\\\\' . | ~ ( ( '\\\\' | '\"' ) ) )* '\"' | '\\'' ( '\\\\' . | ~ ( ( '\\\\' | '\\'' ) ) )* '\\'' ) { // InternalQuickUI.g:3147:15: ( '\"' ( '\\\\' . | ~ ( ( '\\\\' | '\"' ) ) )* '\"' | '\\'' ( '\\\\' . | ~ ( ( '\\\\' | '\\'' ) ) )* '\\'' ) int alt6=2; int LA6_0 = input.LA(1); if ( (LA6_0=='\"') ) { alt6=1; } else if ( (LA6_0=='\'') ) { alt6=2; } else { NoViableAltException nvae = new NoViableAltException("", 6, 0, input); throw nvae; } switch (alt6) { case 1 : // InternalQuickUI.g:3147:16: '\"' ( '\\\\' . | ~ ( ( '\\\\' | '\"' ) ) )* '\"' { match('\"'); // InternalQuickUI.g:3147:20: ( '\\\\' . | ~ ( ( '\\\\' | '\"' ) ) )* loop4: do { int alt4=3; int LA4_0 = input.LA(1); if ( (LA4_0=='\\') ) { alt4=1; } else if ( ((LA4_0>='\u0000' && LA4_0<='!')||(LA4_0>='#' && LA4_0<='[')||(LA4_0>=']' && LA4_0<='\uFFFF')) ) { alt4=2; } switch (alt4) { case 1 : // InternalQuickUI.g:3147:21: '\\\\' . { match('\\'); matchAny(); } break; case 2 : // InternalQuickUI.g:3147:28: ~ ( ( '\\\\' | '\"' ) ) { if ( (input.LA(1)>='\u0000' && input.LA(1)<='!')||(input.LA(1)>='#' && input.LA(1)<='[')||(input.LA(1)>=']' && input.LA(1)<='\uFFFF') ) { input.consume(); } else { MismatchedSetException mse = new MismatchedSetException(null,input); recover(mse); throw mse;} } break; default : break loop4; } } while (true); match('\"'); } break; case 2 : // InternalQuickUI.g:3147:48: '\\'' ( '\\\\' . | ~ ( ( '\\\\' | '\\'' ) ) )* '\\'' { match('\''); // InternalQuickUI.g:3147:53: ( '\\\\' . | ~ ( ( '\\\\' | '\\'' ) ) )* loop5: do { int alt5=3; int LA5_0 = input.LA(1); if ( (LA5_0=='\\') ) { alt5=1; } else if ( ((LA5_0>='\u0000' && LA5_0<='&')||(LA5_0>='(' && LA5_0<='[')||(LA5_0>=']' && LA5_0<='\uFFFF')) ) { alt5=2; } switch (alt5) { case 1 : // InternalQuickUI.g:3147:54: '\\\\' . { match('\\'); matchAny(); } break; case 2 : // InternalQuickUI.g:3147:61: ~ ( ( '\\\\' | '\\'' ) ) { if ( (input.LA(1)>='\u0000' && input.LA(1)<='&')||(input.LA(1)>='(' && input.LA(1)<='[')||(input.LA(1)>=']' && input.LA(1)<='\uFFFF') ) { input.consume(); } else { MismatchedSetException mse = new MismatchedSetException(null,input); recover(mse); throw mse;} } break; default : break loop5; } } while (true); match('\''); } break; } } state.type = _type; state.channel = _channel; } finally { } } // $ANTLR end "RULE_STRING" // $ANTLR start "RULE_ML_COMMENT" public final void mRULE_ML_COMMENT() throws RecognitionException { try { int _type = RULE_ML_COMMENT; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalQuickUI.g:3149:17: ( '/*' ( options {greedy=false; } : . )* '*/' ) // InternalQuickUI.g:3149:19: '/*' ( options {greedy=false; } : . )* '*/' { match("/*"); // InternalQuickUI.g:3149:24: ( options {greedy=false; } : . )* loop7: do { int alt7=2; int LA7_0 = input.LA(1); if ( (LA7_0=='*') ) { int LA7_1 = input.LA(2); if ( (LA7_1=='/') ) { alt7=2; } else if ( ((LA7_1>='\u0000' && LA7_1<='.')||(LA7_1>='0' && LA7_1<='\uFFFF')) ) { alt7=1; } } else if ( ((LA7_0>='\u0000' && LA7_0<=')')||(LA7_0>='+' && LA7_0<='\uFFFF')) ) { alt7=1; } switch (alt7) { case 1 : // InternalQuickUI.g:3149:52: . { matchAny(); } break; default : break loop7; } } while (true); match("*/"); } state.type = _type; state.channel = _channel; } finally { } } // $ANTLR end "RULE_ML_COMMENT" // $ANTLR start "RULE_SL_COMMENT" public final void mRULE_SL_COMMENT() throws RecognitionException { try { int _type = RULE_SL_COMMENT; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalQuickUI.g:3151:17: ( '//' (~ ( ( '\\n' | '\\r' ) ) )* ( ( '\\r' )? '\\n' )? ) // InternalQuickUI.g:3151:19: '//' (~ ( ( '\\n' | '\\r' ) ) )* ( ( '\\r' )? '\\n' )? { match("//"); // InternalQuickUI.g:3151:24: (~ ( ( '\\n' | '\\r' ) ) )* loop8: do { int alt8=2; int LA8_0 = input.LA(1); if ( ((LA8_0>='\u0000' && LA8_0<='\t')||(LA8_0>='\u000B' && LA8_0<='\f')||(LA8_0>='\u000E' && LA8_0<='\uFFFF')) ) { alt8=1; } switch (alt8) { case 1 : // InternalQuickUI.g:3151:24: ~ ( ( '\\n' | '\\r' ) ) { if ( (input.LA(1)>='\u0000' && input.LA(1)<='\t')||(input.LA(1)>='\u000B' && input.LA(1)<='\f')||(input.LA(1)>='\u000E' && input.LA(1)<='\uFFFF') ) { input.consume(); } else { MismatchedSetException mse = new MismatchedSetException(null,input); recover(mse); throw mse;} } break; default : break loop8; } } while (true); // InternalQuickUI.g:3151:40: ( ( '\\r' )? '\\n' )? int alt10=2; int LA10_0 = input.LA(1); if ( (LA10_0=='\n'||LA10_0=='\r') ) { alt10=1; } switch (alt10) { case 1 : // InternalQuickUI.g:3151:41: ( '\\r' )? '\\n' { // InternalQuickUI.g:3151:41: ( '\\r' )? int alt9=2; int LA9_0 = input.LA(1); if ( (LA9_0=='\r') ) { alt9=1; } switch (alt9) { case 1 : // InternalQuickUI.g:3151:41: '\\r' { match('\r'); } break; } match('\n'); } break; } } state.type = _type; state.channel = _channel; } finally { } } // $ANTLR end "RULE_SL_COMMENT" // $ANTLR start "RULE_WS" public final void mRULE_WS() throws RecognitionException { try { int _type = RULE_WS; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalQuickUI.g:3153:9: ( ( ' ' | '\\t' | '\\r' | '\\n' )+ ) // InternalQuickUI.g:3153:11: ( ' ' | '\\t' | '\\r' | '\\n' )+ { // InternalQuickUI.g:3153:11: ( ' ' | '\\t' | '\\r' | '\\n' )+ int cnt11=0; loop11: do { int alt11=2; int LA11_0 = input.LA(1); if ( ((LA11_0>='\t' && LA11_0<='\n')||LA11_0=='\r'||LA11_0==' ') ) { alt11=1; } switch (alt11) { case 1 : // InternalQuickUI.g: { if ( (input.LA(1)>='\t' && input.LA(1)<='\n')||input.LA(1)=='\r'||input.LA(1)==' ' ) { input.consume(); } else { MismatchedSetException mse = new MismatchedSetException(null,input); recover(mse); throw mse;} } break; default : if ( cnt11 >= 1 ) break loop11; EarlyExitException eee = new EarlyExitException(11, input); throw eee; } cnt11++; } while (true); } state.type = _type; state.channel = _channel; } finally { } } // $ANTLR end "RULE_WS" // $ANTLR start "RULE_ANY_OTHER" public final void mRULE_ANY_OTHER() throws RecognitionException { try { int _type = RULE_ANY_OTHER; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalQuickUI.g:3155:16: ( . ) // InternalQuickUI.g:3155:18: . { matchAny(); } state.type = _type; state.channel = _channel; } finally { } } // $ANTLR end "RULE_ANY_OTHER" public void mTokens() throws RecognitionException { // InternalQuickUI.g:1:8: ( T__11 | T__12 | T__13 | T__14 | T__15 | T__16 | T__17 | T__18 | T__19 | T__20 | T__21 | T__22 | T__23 | T__24 | T__25 | T__26 | T__27 | T__28 | T__29 | T__30 | T__31 | T__32 | RULE_ID | RULE_INT | RULE_STRING | RULE_ML_COMMENT | RULE_SL_COMMENT | RULE_WS | RULE_ANY_OTHER ) int alt12=29; alt12 = dfa12.predict(input); switch (alt12) { case 1 : // InternalQuickUI.g:1:10: T__11 { mT__11(); } break; case 2 : // InternalQuickUI.g:1:16: T__12 { mT__12(); } break; case 3 : // InternalQuickUI.g:1:22: T__13 { mT__13(); } break; case 4 : // InternalQuickUI.g:1:28: T__14 { mT__14(); } break; case 5 : // InternalQuickUI.g:1:34: T__15 { mT__15(); } break; case 6 : // InternalQuickUI.g:1:40: T__16 { mT__16(); } break; case 7 : // InternalQuickUI.g:1:46: T__17 { mT__17(); } break; case 8 : // InternalQuickUI.g:1:52: T__18 { mT__18(); } break; case 9 : // InternalQuickUI.g:1:58: T__19 { mT__19(); } break; case 10 : // InternalQuickUI.g:1:64: T__20 { mT__20(); } break; case 11 : // InternalQuickUI.g:1:70: T__21 { mT__21(); } break; case 12 : // InternalQuickUI.g:1:76: T__22 { mT__22(); } break; case 13 : // InternalQuickUI.g:1:82: T__23 { mT__23(); } break; case 14 : // InternalQuickUI.g:1:88: T__24 { mT__24(); } break; case 15 : // InternalQuickUI.g:1:94: T__25 { mT__25(); } break; case 16 : // InternalQuickUI.g:1:100: T__26 { mT__26(); } break; case 17 : // InternalQuickUI.g:1:106: T__27 { mT__27(); } break; case 18 : // InternalQuickUI.g:1:112: T__28 { mT__28(); } break; case 19 : // InternalQuickUI.g:1:118: T__29 { mT__29(); } break; case 20 : // InternalQuickUI.g:1:124: T__30 { mT__30(); } break; case 21 : // InternalQuickUI.g:1:130: T__31 { mT__31(); } break; case 22 : // InternalQuickUI.g:1:136: T__32 { mT__32(); } break; case 23 : // InternalQuickUI.g:1:142: RULE_ID { mRULE_ID(); } break; case 24 : // InternalQuickUI.g:1:150: RULE_INT { mRULE_INT(); } break; case 25 : // InternalQuickUI.g:1:159: RULE_STRING { mRULE_STRING(); } break; case 26 : // InternalQuickUI.g:1:171: RULE_ML_COMMENT { mRULE_ML_COMMENT(); } break; case 27 : // InternalQuickUI.g:1:187: RULE_SL_COMMENT { mRULE_SL_COMMENT(); } break; case 28 : // InternalQuickUI.g:1:203: RULE_WS { mRULE_WS(); } break; case 29 : // InternalQuickUI.g:1:211: RULE_ANY_OTHER { mRULE_ANY_OTHER(); } break; } } protected DFA12 dfa12 = new DFA12(this); static final String DFA12_eotS = "\1\uffff\2\35\4\uffff\15\35\1\33\2\uffff\3\33\2\uffff\1\35\1\uffff\1\35\4\uffff\7\35\1\101\1\35\1\103\1\104\5\35\5\uffff\1\35\1\113\4\35\1\120\2\35\1\uffff\1\123\2\uffff\3\35\1\127\2\35\1\uffff\1\132\1\133\2\35\1\uffff\2\35\1\uffff\3\35\1\uffff\1\143\1\144\2\uffff\2\35\1\147\1\35\1\151\2\35\2\uffff\2\35\1\uffff\1\35\1\uffff\2\35\1\161\1\162\1\163\1\164\1\165\5\uffff"; static final String DFA12_eofS = "\166\uffff"; static final String DFA12_minS = "\1\0\1\141\1\163\4\uffff\1\141\1\150\1\157\1\141\1\156\1\162\1\156\1\146\1\155\1\150\1\156\1\157\1\162\1\101\2\uffff\2\0\1\52\2\uffff\1\154\1\uffff\1\145\4\uffff\1\156\1\151\1\156\1\145\1\167\1\142\1\141\1\60\1\144\2\60\1\160\1\145\1\103\1\164\1\165\5\uffff\1\163\1\60\1\165\1\156\1\164\1\143\1\60\1\145\1\142\1\uffff\1\60\2\uffff\1\164\1\143\1\154\1\60\2\145\1\uffff\2\60\1\145\1\153\1\uffff\2\154\1\uffff\1\171\1\153\1\151\1\uffff\2\60\2\uffff\1\156\1\145\1\60\1\145\1\60\1\145\1\143\2\uffff\1\164\1\144\1\uffff\1\144\1\uffff\1\144\1\153\5\60\5\uffff"; static final String DFA12_maxS = "\1\uffff\1\141\1\163\4\uffff\1\145\2\157\1\141\1\156\1\162\1\156\1\163\1\155\1\150\1\156\1\157\1\162\1\172\2\uffff\2\uffff\1\57\2\uffff\1\154\1\uffff\1\145\4\uffff\1\156\1\151\1\156\1\145\1\167\1\142\1\141\1\172\1\144\2\172\1\160\1\145\1\103\1\164\1\165\5\uffff\1\163\1\172\1\165\1\156\1\164\1\143\1\172\1\145\1\142\1\uffff\1\172\2\uffff\1\164\1\143\1\154\1\172\2\145\1\uffff\2\172\1\145\1\153\1\uffff\2\154\1\uffff\1\171\1\153\1\151\1\uffff\2\172\2\uffff\1\156\1\145\1\172\1\145\1\172\1\145\1\143\2\uffff\1\164\1\144\1\uffff\1\144\1\uffff\1\144\1\153\5\172\5\uffff"; static final String DFA12_acceptS = "\3\uffff\1\3\1\4\1\5\1\6\16\uffff\1\27\1\30\3\uffff\1\34\1\35\1\uffff\1\27\1\uffff\1\3\1\4\1\5\1\6\20\uffff\1\30\1\31\1\32\1\33\1\34\11\uffff\1\14\1\uffff\1\16\1\17\6\uffff\1\2\4\uffff\1\11\2\uffff\1\15\3\uffff\1\25\2\uffff\1\7\1\24\7\uffff\1\26\1\1\2\uffff\1\12\1\uffff\1\20\7\uffff\1\10\1\22\1\13\1\21\1\23"; static final String DFA12_specialS = "\1\2\26\uffff\1\1\1\0\135\uffff}>"; static final String[] DFA12_transitionS = { "\11\33\2\32\2\33\1\32\22\33\1\32\1\33\1\27\4\33\1\30\6\33\1\4\1\31\12\26\1\3\6\33\2\25\1\10\1\25\1\13\6\25\1\12\1\7\1\25\1\21\2\25\1\11\10\25\3\33\1\24\1\25\1\33\1\15\1\25\1\20\1\25\1\17\1\1\2\25\1\16\4\25\1\22\1\14\4\25\1\23\1\2\5\25\1\5\1\33\1\6\uff82\33", "\1\34", "\1\36", "", "", "", "", "\1\44\3\uffff\1\43", "\1\46\6\uffff\1\45", "\1\47", "\1\50", "\1\51", "\1\52", "\1\53", "\1\54\14\uffff\1\55", "\1\56", "\1\57", "\1\60", "\1\61", "\1\62", "\32\35\4\uffff\1\35\1\uffff\32\35", "", "", "\0\64", "\0\64", "\1\65\4\uffff\1\66", "", "", "\1\70", "", "\1\71", "", "", "", "", "\1\72", "\1\73", "\1\74", "\1\75", "\1\76", "\1\77", "\1\100", "\12\35\7\uffff\32\35\4\uffff\1\35\1\uffff\32\35", "\1\102", "\12\35\7\uffff\32\35\4\uffff\1\35\1\uffff\32\35", "\12\35\7\uffff\32\35\4\uffff\1\35\1\uffff\32\35", "\1\105", "\1\106", "\1\107", "\1\110", "\1\111", "", "", "", "", "", "\1\112", "\12\35\7\uffff\32\35\4\uffff\1\35\1\uffff\32\35", "\1\114", "\1\115", "\1\116", "\1\117", "\12\35\7\uffff\32\35\4\uffff\1\35\1\uffff\32\35", "\1\121", "\1\122", "", "\12\35\7\uffff\32\35\4\uffff\1\35\1\uffff\32\35", "", "", "\1\124", "\1\125", "\1\126", "\12\35\7\uffff\32\35\4\uffff\1\35\1\uffff\32\35", "\1\130", "\1\131", "", "\12\35\7\uffff\32\35\4\uffff\1\35\1\uffff\32\35", "\12\35\7\uffff\32\35\4\uffff\1\35\1\uffff\32\35", "\1\134", "\1\135", "", "\1\136", "\1\137", "", "\1\140", "\1\141", "\1\142", "", "\12\35\7\uffff\32\35\4\uffff\1\35\1\uffff\32\35", "\12\35\7\uffff\32\35\4\uffff\1\35\1\uffff\32\35", "", "", "\1\145", "\1\146", "\12\35\7\uffff\32\35\4\uffff\1\35\1\uffff\32\35", "\1\150", "\12\35\7\uffff\32\35\4\uffff\1\35\1\uffff\32\35", "\1\152", "\1\153", "", "", "\1\154", "\1\155", "", "\1\156", "", "\1\157", "\1\160", "\12\35\7\uffff\32\35\4\uffff\1\35\1\uffff\32\35", "\12\35\7\uffff\32\35\4\uffff\1\35\1\uffff\32\35", "\12\35\7\uffff\32\35\4\uffff\1\35\1\uffff\32\35", "\12\35\7\uffff\32\35\4\uffff\1\35\1\uffff\32\35", "\12\35\7\uffff\32\35\4\uffff\1\35\1\uffff\32\35", "", "", "", "", "" }; static final short[] DFA12_eot = DFA.unpackEncodedString(DFA12_eotS); static final short[] DFA12_eof = DFA.unpackEncodedString(DFA12_eofS); static final char[] DFA12_min = DFA.unpackEncodedStringToUnsignedChars(DFA12_minS); static final char[] DFA12_max = DFA.unpackEncodedStringToUnsignedChars(DFA12_maxS); static final short[] DFA12_accept = DFA.unpackEncodedString(DFA12_acceptS); static final short[] DFA12_special = DFA.unpackEncodedString(DFA12_specialS); static final short[][] DFA12_transition; static { int numStates = DFA12_transitionS.length; DFA12_transition = new short[numStates][]; for (int i=0; i<numStates; i++) { DFA12_transition[i] = DFA.unpackEncodedString(DFA12_transitionS[i]); } } class DFA12 extends DFA { public DFA12(BaseRecognizer recognizer) { this.recognizer = recognizer; this.decisionNumber = 12; this.eot = DFA12_eot; this.eof = DFA12_eof; this.min = DFA12_min; this.max = DFA12_max; this.accept = DFA12_accept; this.special = DFA12_special; this.transition = DFA12_transition; } public String getDescription() { return "1:1: Tokens : ( T__11 | T__12 | T__13 | T__14 | T__15 | T__16 | T__17 | T__18 | T__19 | T__20 | T__21 | T__22 | T__23 | T__24 | T__25 | T__26 | T__27 | T__28 | T__29 | T__30 | T__31 | T__32 | RULE_ID | RULE_INT | RULE_STRING | RULE_ML_COMMENT | RULE_SL_COMMENT | RULE_WS | RULE_ANY_OTHER );"; } public int specialStateTransition(int s, IntStream _input) throws NoViableAltException { IntStream input = _input; int _s = s; switch ( s ) { case 0 : int LA12_24 = input.LA(1); s = -1; if ( ((LA12_24>='\u0000' && LA12_24<='\uFFFF')) ) {s = 52;} else s = 27; if ( s>=0 ) return s; break; case 1 : int LA12_23 = input.LA(1); s = -1; if ( ((LA12_23>='\u0000' && LA12_23<='\uFFFF')) ) {s = 52;} else s = 27; if ( s>=0 ) return s; break; case 2 : int LA12_0 = input.LA(1); s = -1; if ( (LA12_0=='f') ) {s = 1;} else if ( (LA12_0=='u') ) {s = 2;} else if ( (LA12_0==':') ) {s = 3;} else if ( (LA12_0=='.') ) {s = 4;} else if ( (LA12_0=='{') ) {s = 5;} else if ( (LA12_0=='}') ) {s = 6;} else if ( (LA12_0=='M') ) {s = 7;} else if ( (LA12_0=='C') ) {s = 8;} else if ( (LA12_0=='R') ) {s = 9;} else if ( (LA12_0=='L') ) {s = 10;} else if ( (LA12_0=='E') ) {s = 11;} else if ( (LA12_0=='o') ) {s = 12;} else if ( (LA12_0=='a') ) {s = 13;} else if ( (LA12_0=='i') ) {s = 14;} else if ( (LA12_0=='e') ) {s = 15;} else if ( (LA12_0=='c') ) {s = 16;} else if ( (LA12_0=='O') ) {s = 17;} else if ( (LA12_0=='n') ) {s = 18;} else if ( (LA12_0=='t') ) {s = 19;} else if ( (LA12_0=='^') ) {s = 20;} else if ( ((LA12_0>='A' && LA12_0<='B')||LA12_0=='D'||(LA12_0>='F' && LA12_0<='K')||LA12_0=='N'||(LA12_0>='P' && LA12_0<='Q')||(LA12_0>='S' && LA12_0<='Z')||LA12_0=='_'||LA12_0=='b'||LA12_0=='d'||(LA12_0>='g' && LA12_0<='h')||(LA12_0>='j' && LA12_0<='m')||(LA12_0>='p' && LA12_0<='s')||(LA12_0>='v' && LA12_0<='z')) ) {s = 21;} else if ( ((LA12_0>='0' && LA12_0<='9')) ) {s = 22;} else if ( (LA12_0=='\"') ) {s = 23;} else if ( (LA12_0=='\'') ) {s = 24;} else if ( (LA12_0=='/') ) {s = 25;} else if ( ((LA12_0>='\t' && LA12_0<='\n')||LA12_0=='\r'||LA12_0==' ') ) {s = 26;} else if ( ((LA12_0>='\u0000' && LA12_0<='\b')||(LA12_0>='\u000B' && LA12_0<='\f')||(LA12_0>='\u000E' && LA12_0<='\u001F')||LA12_0=='!'||(LA12_0>='#' && LA12_0<='&')||(LA12_0>='(' && LA12_0<='-')||(LA12_0>=';' && LA12_0<='@')||(LA12_0>='[' && LA12_0<=']')||LA12_0=='`'||LA12_0=='|'||(LA12_0>='~' && LA12_0<='\uFFFF')) ) {s = 27;} if ( s>=0 ) return s; break; } NoViableAltException nvae = new NoViableAltException(getDescription(), 12, _s, input); error(nvae); throw nvae; } } }
/* * Copyright 2016 Tomas Kunovsky. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package txml.parser; import java.nio.file.Paths; import txml.interpreter.model.Instruction; import txml.interpreter.InstructionsInterpreter; public class XQueryInterpreter extends XQueryBaseListener { InstructionsInterpreter instructionsInterpreter; private Integer lastVariable = null; private Integer lastLabel = null; public XQueryInterpreter(InstructionsInterpreter interpreter) { this.instructionsInterpreter = interpreter; } private String getLastGenVariable() { return "$" + lastVariable.toString(); } private String genVariable() { if (lastVariable == null) { lastVariable = 1; return "$" + lastVariable.toString() ; } return "$" + (++lastVariable).toString(); } private String getLastGenLabel() { return "L" + lastLabel.toString(); } private String getPreviousGenLabel() { return "L" + (new Integer(lastLabel - 1)).toString(); } private String genLabel() { if (lastLabel == null) { lastLabel = 1; return "L" + lastLabel.toString() ; } return "L" + (++lastLabel).toString(); } public String literalToString(String literal) { return literal.substring(1, literal.length() - 1); } @Override public void enterPredicate(XQueryParser.PredicateContext ctx) { this.instructionsInterpreter.add(new Instruction("SAVE_START_PATHS", null, null, null, getLastGenVariable())); } @Override public void exitPredicate(XQueryParser.PredicateContext ctx) { this.instructionsInterpreter.add(new Instruction("GO_BACK_TO_START_PATHS", null, null, null, getLastGenVariable())); } @Override public void enterDirectStep(XQueryParser.DirectStepContext ctx) { this.instructionsInterpreter.add(new Instruction("GET_DIRECT_STEP", null, ctx.nodeGenerator().getText(), getLastGenVariable(), getLastGenVariable())); } @Override public void enterUndirectStep(XQueryParser.UndirectStepContext ctx) { this.instructionsInterpreter.add(new Instruction("GET_UNDIRECT_STEP", null, ctx.nodeGenerator().getText(), getLastGenVariable(), getLastGenVariable())); } @Override public void enterXPathInExpr(XQueryParser.XPathInExprContext ctx) { this.instructionsInterpreter.add(new Instruction("GET_DIRECT_STEP", null, ctx.nodeGenerator().getText(), getLastGenVariable(), getLastGenVariable())); } @Override public void exitPosition(XQueryParser.PositionContext ctx) { this.instructionsInterpreter.add(new Instruction("FILTER_POSITION", null, ctx.getText().toLowerCase(), getLastGenVariable(), getLastGenVariable())); } @Override public void exitXPathInExpr(XQueryParser.XPathInExprContext ctx) { String valueLiteral = ctx.getParent().getRuleContext(XQueryParser.ValueContext.class,0).getText(); String operator = ctx.getParent().getRuleContext(XQueryParser.OperatorContext.class,0).getText(); String value = valueLiteral.substring(1, valueLiteral.length()-1); String filter; String invertFilter; boolean valueFilter = true; switch (operator) { case "=": filter = "FILTER_EQ"; invertFilter = "FILTER_EQ"; break; case "!=": filter = "FILTER_NEQ"; invertFilter = "FILTER_NEQ"; break; case "<": filter = "FILTER_LESS"; invertFilter = "FILTER_MORE"; break; case ">": filter = "FILTER_MORE"; invertFilter = "FILTER_LESS"; break; case "<=": filter = "FILTER_LE"; invertFilter = "FILTER_GE"; break; case ">=": filter = "FILTER_GE"; invertFilter = "FILTER_LE"; break; case "PRECEDES": valueFilter = false; filter = "FILTER_PRECEDES"; invertFilter = "FILTER_FOLLOWS"; break; case "FOLLOWS": valueFilter = false; filter = "FILTER_FOLLOWS"; invertFilter = "FILTER_PRECEDES"; break; case "MEETS": valueFilter = false; filter = "FILTER_LMEETS"; invertFilter = "FILTER_RMEETS"; break; case "LMEETS": valueFilter = false; filter = "FILTER_LMEETS"; invertFilter = "FILTER_RMEETS"; break; case "RMEETS": valueFilter = false; filter = "FILTER_RMEETS"; invertFilter = "FILTER_LMEETS"; break; case "OVERLAPS": valueFilter = false; filter = "FILTER_OVERLAPS"; invertFilter = "FILTER_OVERLAPS"; break; case "CONTAINS": valueFilter = false; filter = "FILTER_CONTAINS"; invertFilter = "FILTER_IN"; break; case "IN": valueFilter = false; filter = "FILTER_IN"; invertFilter = "FILTER_CONTAINS"; break; default: filter = "UNKNOWN_OPERATOR"; invertFilter = "UNKNOWN_OPERATOR"; break; } if (valueFilter) { this.instructionsInterpreter.add(new Instruction("GET_VALUES", null, getLastGenVariable(), null, getLastGenVariable())); } if (ctx.getParent().getChild(0).getText().equals(valueLiteral)) { this.instructionsInterpreter.add(new Instruction(invertFilter, null, getLastGenVariable(), value, getLastGenVariable())); } else { this.instructionsInterpreter.add(new Instruction(filter, null, getLastGenVariable(), value, getLastGenVariable())); } } @Override public void enterAbsPathExprWithDoc(XQueryParser.AbsPathExprWithDocContext ctx) { String schemaName = literalToString(ctx.doc().schemaName().getText()); String documentName = literalToString(ctx.doc().documentName().getText()); this.instructionsInterpreter.add(new Instruction("SET_EMPTY_TNODE_LIST", null, schemaName, documentName, genVariable())); } @Override public void exitAbsPathExprWithDoc(XQueryParser.AbsPathExprWithDocContext ctx) { this.instructionsInterpreter.add(new Instruction("REMOVE_DUPLICATES_IN_LIST", null, getLastGenVariable(), null, getLastGenVariable())); } @Override public void exitXPathQuery(XQueryParser.XPathQueryContext ctx) { this.instructionsInterpreter.add(new Instruction("RETURN_LIST", null, getLastGenVariable(), null, null)); } @Override public void exitForClause(XQueryParser.ForClauseContext ctx) { this.instructionsInterpreter.add(new Instruction("ERASE_TXML_NODES", null, null, null, getLastGenVariable())); this.instructionsInterpreter.add(new Instruction("CHECK_SORT", null, null, null, getLastGenVariable())); this.instructionsInterpreter.add(new Instruction("ITERATOR_INIT", null, null, null, getLastGenVariable())); this.instructionsInterpreter.add(new Instruction("LABEL", null, null, null, genLabel())); this.instructionsInterpreter.add(new Instruction("GOTO_IF_NO_NEXT", null, getLastGenVariable(), null, genLabel())); this.instructionsInterpreter.add(new Instruction("ITERATOR_NEXT", null, getLastGenVariable(), null, ctx.variable().getText())); } @Override public void enterChildXPathExpression(XQueryParser.ChildXPathExpressionContext ctx) { this.instructionsInterpreter.add(new Instruction("SET_TNODE_LIST_IN_CYCLE", null, ctx.variable().getText(), null, genVariable())); } @Override public void enterParentXPathExpression(XQueryParser.ParentXPathExpressionContext ctx) { this.instructionsInterpreter.add(new Instruction("SET_LIST_FOR_PARENT", null, ctx.variable().getText(), null, genVariable())); } @Override public void exitDeleteExpr(XQueryParser.DeleteExprContext ctx) { this.instructionsInterpreter.add(new Instruction("DELETE_IN_DOCUMENT", null, getLastGenVariable(), null, null)); } @Override public void exitUpdate(XQueryParser.UpdateContext ctx) { this.instructionsInterpreter.add(new Instruction("GOTO", null, null, null, getPreviousGenLabel())); this.instructionsInterpreter.add(new Instruction("LABEL", null, null , null, getLastGenLabel())); } @Override public void exitParentExpr(XQueryParser.ParentExprContext ctx) { String position = ctx.insert_pos() != null ? literalToString(ctx.insert_pos().getText()) : null; this.instructionsInterpreter.add(new Instruction("SET_PARENT_IN_DOCUMENT", position, ctx.parentXPathExpression().variable().getText(), getLastGenVariable(), null)); } @Override public void exitInsertExpr(XQueryParser.InsertExprContext ctx) { String value = ctx.value() != null ? literalToString(ctx.value().getText()) : null; String position = ctx.insert_pos() != null ? literalToString(ctx.insert_pos().getText()) : null; this.instructionsInterpreter.add(new Instruction("INSERT_INTO_DOCUMENT", position, ctx.insertXPathExpression().variable().getText(), value, ctx.insertXPathExpression().simpleXPathExpression().getText())); } @Override public void exitSnapshotQuery(XQueryParser.SnapshotQueryContext ctx) { String timeLiteral = ctx.time().getText(); String time = timeLiteral.substring(1, timeLiteral.length() - 1); String documentNameLiteral = ctx.documentName().getText(); String documentName = documentNameLiteral.substring(1, documentNameLiteral.length() - 1); String schemaNameLiteral = ctx.schemaName().getText(); String schemaName = schemaNameLiteral.substring(1, schemaNameLiteral.length() - 1); this.instructionsInterpreter.add(new Instruction("RETURN_SNAPSHOT", schemaName, documentName, null, time)); } @Override public void enterDeclareOptionTimeFormat(XQueryParser.DeclareOptionTimeFormatContext ctx) { String timeFormatLiteral = ctx.timeFormat().getText(); String timeFormat = timeFormatLiteral.substring(1, timeFormatLiteral.length() - 1); this.instructionsInterpreter.add(new Instruction("DECLARE_OPTION_TIME_FORMAT", null, timeFormat, null, null)); } @Override public void exitDeclareOptionConnection(XQueryParser.DeclareOptionConnectionContext ctx) { String connectionUrlLiteral = ctx.connectionUrl().getText(); String connectionUrl = connectionUrlLiteral.substring(1, connectionUrlLiteral.length() - 1); this.instructionsInterpreter.add(new Instruction("DECLARE_OPTION_CONNECTION", null, connectionUrl, null, null)); } @Override public void exitDeclareOptionSort(XQueryParser.DeclareOptionSortContext ctx) { String sortLiteral = ctx.sort().getText(); String sort = sortLiteral.substring(1, sortLiteral.length() - 1); this.instructionsInterpreter.add(new Instruction("DECLARE_OPTION_SORT", null, sort, null, null)); } @Override public void exitInitSchema(XQueryParser.InitSchemaContext ctx) { String schemaNameLiteral = ctx.schemaName().getText(); String schemaName = schemaNameLiteral.substring(1, schemaNameLiteral.length() - 1); this.instructionsInterpreter.add(new Instruction("INIT_SCHEMA", null, schemaName, null, null)); } @Override public void enterDeinitSchema(XQueryParser.DeinitSchemaContext ctx) { String schemaNameLiteral = ctx.schemaName().getText(); String schemaName = schemaNameLiteral.substring(1, schemaNameLiteral.length() - 1); this.instructionsInterpreter.add(new Instruction("DEINIT_SCHEMA", null, schemaName, null, null)); } @Override public void exitStoreDocumentWithName(XQueryParser.StoreDocumentWithNameContext ctx) { String fileFullNameLiteral = ctx.fileFullName().getText(); String fileFullName = fileFullNameLiteral.substring(1, fileFullNameLiteral.length() - 1); String documentNameLiteral = ctx.documentName().getText(); String documentName = documentNameLiteral.substring(1, documentNameLiteral.length() - 1); String schemaNameLiteral = ctx.schemaName().getText(); String schemaName = schemaNameLiteral.substring(1, schemaNameLiteral.length() - 1); this.instructionsInterpreter.add(new Instruction("STORE_DOCUMENT", null, fileFullName, documentName, schemaName)); } @Override public void exitStoreDocumentWithoutName(XQueryParser.StoreDocumentWithoutNameContext ctx) { String fileFullNameLiteral = ctx.fileFullName().getText(); String fileFullName = fileFullNameLiteral.substring(1, fileFullNameLiteral.length() - 1); String documentName = Paths.get(fileFullName).getFileName().toString(); String schemaNameLiteral = ctx.schemaName().getText(); String schemaName = schemaNameLiteral.substring(1, schemaNameLiteral.length() - 1); this.instructionsInterpreter.add(new Instruction("STORE_DOCUMENT", null, fileFullName, documentName, schemaName)); } }
/* * Copyright (c) 2015 Goldman Sachs. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * and Eclipse Distribution License v. 1.0 which accompany this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. */ package org.eclipse.collections.impl.block.factory.primitive; import org.eclipse.collections.impl.block.factory.PrimitiveFunctions; import org.eclipse.collections.impl.test.Verify; import org.junit.Test; public class PrimitiveFunctionsSerializationTest { @Test public void integerIsPositive() { Verify.assertSerializedForm( 1L, "rO0ABXNyAE9vcmcuZWNsaXBzZS5jb2xsZWN0aW9ucy5pbXBsLmJsb2NrLmZhY3RvcnkuUHJpbWl0\n" + "aXZlRnVuY3Rpb25zJEludGVnZXJJc1Bvc2l0aXZlAAAAAAAAAAECAAB4cA==", PrimitiveFunctions.integerIsPositive()); } @Test public void unboxNumberToInt() { Verify.assertSerializedForm( 1L, "rO0ABXNyAE5vcmcuZWNsaXBzZS5jb2xsZWN0aW9ucy5pbXBsLmJsb2NrLmZhY3RvcnkuUHJpbWl0\n" + "aXZlRnVuY3Rpb25zJFVuYm94TnVtYmVyVG9JbnQAAAAAAAAAAQIAAHhw", PrimitiveFunctions.unboxNumberToInt()); } @Test public void unboxIntegerToByte() { Verify.assertSerializedForm( 1L, "rO0ABXNyAFBvcmcuZWNsaXBzZS5jb2xsZWN0aW9ucy5pbXBsLmJsb2NrLmZhY3RvcnkuUHJpbWl0\n" + "aXZlRnVuY3Rpb25zJFVuYm94SW50ZWdlclRvQnl0ZQAAAAAAAAABAgAAeHA=", PrimitiveFunctions.unboxIntegerToByte()); } @Test public void unboxIntegerToChar() { Verify.assertSerializedForm( 1L, "rO0ABXNyAFBvcmcuZWNsaXBzZS5jb2xsZWN0aW9ucy5pbXBsLmJsb2NrLmZhY3RvcnkuUHJpbWl0\n" + "aXZlRnVuY3Rpb25zJFVuYm94SW50ZWdlclRvQ2hhcgAAAAAAAAABAgAAeHA=", PrimitiveFunctions.unboxIntegerToChar()); } @Test public void unboxIntegerToInt() { Verify.assertSerializedForm( 1L, "rO0ABXNyAE9vcmcuZWNsaXBzZS5jb2xsZWN0aW9ucy5pbXBsLmJsb2NrLmZhY3RvcnkuUHJpbWl0\n" + "aXZlRnVuY3Rpb25zJFVuYm94SW50ZWdlclRvSW50AAAAAAAAAAECAAB4cA==", PrimitiveFunctions.unboxIntegerToInt()); } @Test public void unboxNumberToFloat() { Verify.assertSerializedForm( 1L, "rO0ABXNyAFBvcmcuZWNsaXBzZS5jb2xsZWN0aW9ucy5pbXBsLmJsb2NrLmZhY3RvcnkuUHJpbWl0\n" + "aXZlRnVuY3Rpb25zJFVuYm94TnVtYmVyVG9GbG9hdAAAAAAAAAABAgAAeHA=", PrimitiveFunctions.unboxNumberToFloat()); } @Test public void unboxNumberToLong() { Verify.assertSerializedForm( 1L, "rO0ABXNyAE9vcmcuZWNsaXBzZS5jb2xsZWN0aW9ucy5pbXBsLmJsb2NrLmZhY3RvcnkuUHJpbWl0\n" + "aXZlRnVuY3Rpb25zJFVuYm94TnVtYmVyVG9Mb25nAAAAAAAAAAECAAB4cA==", PrimitiveFunctions.unboxNumberToLong()); } @Test public void unboxNumberToDouble() { Verify.assertSerializedForm( 1L, "rO0ABXNyAFFvcmcuZWNsaXBzZS5jb2xsZWN0aW9ucy5pbXBsLmJsb2NrLmZhY3RvcnkuUHJpbWl0\n" + "aXZlRnVuY3Rpb25zJFVuYm94TnVtYmVyVG9Eb3VibGUAAAAAAAAAAQIAAHhw", PrimitiveFunctions.unboxNumberToDouble()); } @Test public void unboxIntegerToFloat() { Verify.assertSerializedForm( 1L, "rO0ABXNyAFFvcmcuZWNsaXBzZS5jb2xsZWN0aW9ucy5pbXBsLmJsb2NrLmZhY3RvcnkuUHJpbWl0\n" + "aXZlRnVuY3Rpb25zJFVuYm94SW50ZWdlclRvRmxvYXQAAAAAAAAAAQIAAHhw", PrimitiveFunctions.unboxIntegerToFloat()); } @Test public void unboxIntegerToLong() { Verify.assertSerializedForm( 1L, "rO0ABXNyAFBvcmcuZWNsaXBzZS5jb2xsZWN0aW9ucy5pbXBsLmJsb2NrLmZhY3RvcnkuUHJpbWl0\n" + "aXZlRnVuY3Rpb25zJFVuYm94SW50ZWdlclRvTG9uZwAAAAAAAAABAgAAeHA=", PrimitiveFunctions.unboxIntegerToLong()); } @Test public void unboxIntegerToShort() { Verify.assertSerializedForm( 1L, "rO0ABXNyAFFvcmcuZWNsaXBzZS5jb2xsZWN0aW9ucy5pbXBsLmJsb2NrLmZhY3RvcnkuUHJpbWl0\n" + "aXZlRnVuY3Rpb25zJFVuYm94SW50ZWdlclRvU2hvcnQAAAAAAAAAAQIAAHhw", PrimitiveFunctions.unboxIntegerToShort()); } @Test public void unboxIntegerToDouble() { Verify.assertSerializedForm( 1L, "rO0ABXNyAFJvcmcuZWNsaXBzZS5jb2xsZWN0aW9ucy5pbXBsLmJsb2NrLmZhY3RvcnkuUHJpbWl0\n" + "aXZlRnVuY3Rpb25zJFVuYm94SW50ZWdlclRvRG91YmxlAAAAAAAAAAECAAB4cA==", PrimitiveFunctions.unboxIntegerToDouble()); } @Test public void unboxDoubleToDouble() { Verify.assertSerializedForm( 1L, "rO0ABXNyAFFvcmcuZWNsaXBzZS5jb2xsZWN0aW9ucy5pbXBsLmJsb2NrLmZhY3RvcnkuUHJpbWl0\n" + "aXZlRnVuY3Rpb25zJFVuYm94RG91YmxlVG9Eb3VibGUAAAAAAAAAAQIAAHhw", PrimitiveFunctions.unboxDoubleToDouble()); } @Test public void unboxFloatToFloat() { Verify.assertSerializedForm( 1L, "rO0ABXNyAE9vcmcuZWNsaXBzZS5jb2xsZWN0aW9ucy5pbXBsLmJsb2NrLmZhY3RvcnkuUHJpbWl0\n" + "aXZlRnVuY3Rpb25zJFVuYm94RmxvYXRUb0Zsb2F0AAAAAAAAAAECAAB4cA==", PrimitiveFunctions.unboxFloatToFloat()); } @Test public void sumByInt() { Verify.assertSerializedForm( 1L, "rO0ABXNyAD9vcmcuZWNsaXBzZS5jb2xsZWN0aW9ucy5pbXBsLmJsb2NrLmZhY3RvcnkuUHJpbWl0\n" + "aXZlRnVuY3Rpb25zJDEAAAAAAAAAAQIAAkwADHZhbCRmdW5jdGlvbnQAQkxvcmcvZWNsaXBzZS9j\n" + "b2xsZWN0aW9ucy9hcGkvYmxvY2svZnVuY3Rpb24vcHJpbWl0aXZlL0ludEZ1bmN0aW9uO0wAC3Zh\n" + "bCRncm91cEJ5dAA1TG9yZy9lY2xpcHNlL2NvbGxlY3Rpb25zL2FwaS9ibG9jay9mdW5jdGlvbi9G\n" + "dW5jdGlvbjt4cHBw", PrimitiveFunctions.sumByIntFunction(null, null)); } @Test public void sumByLong() { Verify.assertSerializedForm( 1L, "rO0ABXNyAD9vcmcuZWNsaXBzZS5jb2xsZWN0aW9ucy5pbXBsLmJsb2NrLmZhY3RvcnkuUHJpbWl0\n" + "aXZlRnVuY3Rpb25zJDMAAAAAAAAAAQIAAkwADHZhbCRmdW5jdGlvbnQAQ0xvcmcvZWNsaXBzZS9j\n" + "b2xsZWN0aW9ucy9hcGkvYmxvY2svZnVuY3Rpb24vcHJpbWl0aXZlL0xvbmdGdW5jdGlvbjtMAAt2\n" + "YWwkZ3JvdXBCeXQANUxvcmcvZWNsaXBzZS9jb2xsZWN0aW9ucy9hcGkvYmxvY2svZnVuY3Rpb24v\n" + "RnVuY3Rpb247eHBwcA==", PrimitiveFunctions.sumByLongFunction(null, null)); } @Test public void sumByFloat() { Verify.assertSerializedForm( 1L, "rO0ABXNyAD9vcmcuZWNsaXBzZS5jb2xsZWN0aW9ucy5pbXBsLmJsb2NrLmZhY3RvcnkuUHJpbWl0\n" + "aXZlRnVuY3Rpb25zJDIAAAAAAAAAAQIAA0wADGNvbXBlbnNhdGlvbnQAQkxvcmcvZWNsaXBzZS9j\n" + "b2xsZWN0aW9ucy9hcGkvbWFwL3ByaW1pdGl2ZS9NdXRhYmxlT2JqZWN0RG91YmxlTWFwO0wADHZh\n" + "bCRmdW5jdGlvbnQARExvcmcvZWNsaXBzZS9jb2xsZWN0aW9ucy9hcGkvYmxvY2svZnVuY3Rpb24v\n" + "cHJpbWl0aXZlL0Zsb2F0RnVuY3Rpb247TAALdmFsJGdyb3VwQnl0ADVMb3JnL2VjbGlwc2UvY29s\n" + "bGVjdGlvbnMvYXBpL2Jsb2NrL2Z1bmN0aW9uL0Z1bmN0aW9uO3hwc3IARm9yZy5lY2xpcHNlLmNv\n" + "bGxlY3Rpb25zLmltcGwubWFwLm11dGFibGUucHJpbWl0aXZlLk9iamVjdERvdWJsZUhhc2hNYXAA\n" + "AAAAAAAAAQwAAHhwdwQAAAAAeHBw", PrimitiveFunctions.sumByFloatFunction(null, null)); } @Test public void sumByDouble() { Verify.assertSerializedForm( 1L, "rO0ABXNyAD9vcmcuZWNsaXBzZS5jb2xsZWN0aW9ucy5pbXBsLmJsb2NrLmZhY3RvcnkuUHJpbWl0\n" + "aXZlRnVuY3Rpb25zJDQAAAAAAAAAAQIAA0wADGNvbXBlbnNhdGlvbnQAQkxvcmcvZWNsaXBzZS9j\n" + "b2xsZWN0aW9ucy9hcGkvbWFwL3ByaW1pdGl2ZS9NdXRhYmxlT2JqZWN0RG91YmxlTWFwO0wADHZh\n" + "bCRmdW5jdGlvbnQARUxvcmcvZWNsaXBzZS9jb2xsZWN0aW9ucy9hcGkvYmxvY2svZnVuY3Rpb24v\n" + "cHJpbWl0aXZlL0RvdWJsZUZ1bmN0aW9uO0wAC3ZhbCRncm91cEJ5dAA1TG9yZy9lY2xpcHNlL2Nv\n" + "bGxlY3Rpb25zL2FwaS9ibG9jay9mdW5jdGlvbi9GdW5jdGlvbjt4cHNyAEZvcmcuZWNsaXBzZS5j\n" + "b2xsZWN0aW9ucy5pbXBsLm1hcC5tdXRhYmxlLnByaW1pdGl2ZS5PYmplY3REb3VibGVIYXNoTWFw\n" + "AAAAAAAAAAEMAAB4cHcEAAAAAHhwcA==", PrimitiveFunctions.sumByDoubleFunction(null, null)); } }
/** * Copyright 2007-2016, Kaazing Corporation. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.mina.transport.socket.nio; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.ByteChannel; import java.nio.channels.SelectableChannel; import java.nio.channels.SelectionKey; import java.nio.channels.Selector; import java.util.Iterator; import java.util.Set; import java.util.concurrent.Executor; import org.apache.mina.core.RuntimeIoException; import org.apache.mina.core.buffer.IoBuffer; import org.apache.mina.core.file.FileRegion; import org.apache.mina.core.polling.AbstractPollingIoProcessor; import org.apache.mina.core.session.SessionState; import org.apache.mina.core.write.WriteRequest; import org.kaazing.mina.core.buffer.IoBufferEx; import org.kaazing.mina.core.buffer.SimpleBufferAllocator; /** * Used instead of Mina's NioProcessor to (a) support sessions implementing IoSessionEx and * (b) avoid unnecessary ByteBuffer duplication. */ public final class NioProcessorEx extends AbstractPollingIoProcessor<NioSessionEx> { /** The selector associated with this processor */ private final Selector selector; /** * * Creates a new instance of NioProcessor. * * @param executor */ public NioProcessorEx(Executor executor) { super(executor); try { // Open a new selector selector = Selector.open(); } catch (IOException e) { throw new RuntimeIoException("Failed to open a selector.", e); } } @Override protected Object getWriteRequestMessage(NioSessionEx session, WriteRequest writeRequest) { // 1. lookup current write buffer IoBufferEx writeBuffer = session.getIncompleteSharedWriteBuffer(); if (writeBuffer != null) { // 1a. buffer obtained from a previously shared, incomplete write assert !writeBuffer.isShared(); return writeBuffer; } // 1b. current write is either unshared or first attempt return writeRequest.getMessage(); } @Override protected int writeBuffer(NioSessionEx session, WriteRequest req, IoBuffer buf, boolean hasFragmentation, int maxLength, long currentTime) throws Exception { // empty buffers may not be IoBufferEx, see IoBuffer.allocate() if (!buf.hasRemaining()) { return super.writeBuffer(session, req, buf, hasFragmentation, maxLength, currentTime); } // 1. test if buffer is shared across sessions (could be same or different I/O thread) IoBufferEx bufEx = (IoBufferEx) buf; if (!bufEx.isShared()) { // 1a. buffer is not shared across sessions, typical behavior int remaining = buf.remaining(); int localWrittenBytes = super.writeBuffer(session, req, buf, hasFragmentation, maxLength, currentTime); if (localWrittenBytes == remaining) { // 1b. previously shared, incomplete write is now complete, OR // previously unshared, incomplete write is now complete, OR // unshared write is now complete on first attempt session.setIncompleteSharedWriteBuffer(null); } return localWrittenBytes; } // 2. buffer is shared across sessions // remember position in case of incomplete write // access NIO buffer directly to minimize ThreadLocal lookups ByteBuffer nioBuf = buf.buf(); int position = nioBuf.position(); int remaining = nioBuf.remaining(); int localWrittenBytes = super.writeBuffer(session, req, buf, hasFragmentation, maxLength, currentTime); // 3. detect shared buffer incomplete write if (localWrittenBytes < remaining) { // 3a. diverge from master shared buffer and reset master position as if fully written // master is thread local, so changing position does not affect other threads IoBufferEx incomplete = bufEx.asUnsharedBuffer(); session.setIncompleteSharedWriteBuffer(incomplete); nioBuf.position(position); } // 4. either shared write complete (on first attempt), // or shared write incomplete and diverged to prevent side-effects on other sessions return localWrittenBytes; } @Override protected void dispose0() throws Exception { selector.close(); } @Override protected int select(long timeout) throws Exception { return selector.select(timeout); } @Override protected int select() throws Exception { return selector.select(); } @Override protected boolean isSelectorEmpty() { return selector.keys().isEmpty(); } @Override protected void wakeup() { selector.wakeup(); } @Override protected Iterator<NioSessionEx> allSessions() { return new IoSessionIterator(selector.keys()); } @SuppressWarnings("synthetic-access") @Override protected Iterator<NioSessionEx> selectedSessions() { return new IoSessionIterator(selector.selectedKeys()); } @Override protected void init(NioSessionEx session) throws Exception { SelectableChannel ch = (SelectableChannel) session.getChannel(); ch.configureBlocking(false); session.setSelectionKey(ch.register(selector, SelectionKey.OP_READ, session)); } @Override protected void destroy(NioSessionEx session) throws Exception { ByteChannel ch = session.getChannel(); SelectionKey key = session.getSelectionKey(); if (key != null) { key.cancel(); } ch.close(); } /** * {@inheritDoc} */ @Override protected SessionState getState(NioSessionEx session) { SelectionKey key = session.getSelectionKey(); if (key == null) { // The channel is not yet registered to a selector return SessionState.OPENING; } if (key.isValid()) { // The session is opened return SessionState.OPENED; } else { // The session still as to be closed return SessionState.CLOSING; } } @Override protected boolean isReadable(NioSessionEx session) { SelectionKey key = session.getSelectionKey(); return key.isValid() && key.isReadable(); } @Override protected boolean isWritable(NioSessionEx session) { SelectionKey key = session.getSelectionKey(); return key.isValid() && key.isWritable(); } @Override protected boolean isInterestedInRead(NioSessionEx session) { SelectionKey key = session.getSelectionKey(); return key.isValid() && (key.interestOps() & SelectionKey.OP_READ) != 0; } @Override protected boolean isInterestedInWrite(NioSessionEx session) { SelectionKey key = session.getSelectionKey(); return key.isValid() && (key.interestOps() & SelectionKey.OP_WRITE) != 0; } /** * {@inheritDoc} */ @Override protected void setInterestedInRead(NioSessionEx session, boolean isInterested) throws Exception { SelectionKey key = session.getSelectionKey(); int oldInterestOps = key.interestOps(); int newInterestOps = oldInterestOps; if (isInterested) { newInterestOps |= SelectionKey.OP_READ; } else { newInterestOps &= ~SelectionKey.OP_READ; } if (oldInterestOps != newInterestOps) { key.interestOps(newInterestOps); } } /** * {@inheritDoc} */ @Override protected void setInterestedInWrite(NioSessionEx session, boolean isInterested) throws Exception { SelectionKey key = session.getSelectionKey(); int oldInterestOps = key.interestOps(); int newInterestOps = oldInterestOps; if (isInterested) { newInterestOps |= SelectionKey.OP_WRITE; } else { newInterestOps &= ~SelectionKey.OP_WRITE; } if (oldInterestOps != newInterestOps) { key.interestOps(newInterestOps); } } @Override protected IoBuffer newReadBuffer(int readBufferSize) { // note: this assumes NioSessionEx.getBufferAllocator() returns SimpleBufferAllocator.BUFFER_ALLOCATOR return SimpleBufferAllocator.BUFFER_ALLOCATOR.wrap(ByteBuffer.allocate(readBufferSize)); } @Override protected int read(NioSessionEx session, IoBuffer buf) throws Exception { return session.getChannel().read(buf.buf()); } @Override protected int write(NioSessionEx session, IoBuffer buf, int length) throws Exception { if (buf.remaining() <= length) { return session.getChannel().write(buf.buf()); } int oldLimit = buf.limit(); buf.limit(buf.position() + length); try { return session.getChannel().write(buf.buf()); } finally { buf.limit(oldLimit); } } @Override protected int transferFile(NioSessionEx session, FileRegion region, int length) throws Exception { try { return (int) region.getFileChannel().transferTo(region.getPosition(), length, session.getChannel()); } catch (IOException e) { // Check to see if the IOException is being thrown due to // http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=5103988 String message = e.getMessage(); if (message != null && message.contains("temporarily unavailable")) { return 0; } throw e; } } /** * An encapsulating iterator around the {@link Selector#selectedKeys()} * or the {@link Selector#keys()} iterator; */ protected static final class IoSessionIterator implements Iterator<NioSessionEx> { private final Iterator<SelectionKey> iterator; /** * Create this iterator as a wrapper on top of the selectionKey * Set. * @param keys The set of selected sessions */ private IoSessionIterator(Set<SelectionKey> keys) { iterator = keys.iterator(); } /** * {@inheritDoc} */ @Override public boolean hasNext() { return iterator.hasNext(); } /** * {@inheritDoc} */ @Override public NioSessionEx next() { SelectionKey key = iterator.next(); NioSessionEx nioSession = (NioSessionEx) key.attachment(); return nioSession; } /** * {@inheritDoc} */ @Override public void remove() { iterator.remove(); } } }
// Copyright 2017 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.runtime; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.fail; import com.google.common.collect.ImmutableList; import com.google.devtools.build.lib.runtime.proto.InvocationPolicyOuterClass.InvocationPolicy; import com.google.devtools.build.lib.runtime.proto.InvocationPolicyOuterClass.UseDefault; import com.google.devtools.common.options.Converters; import com.google.devtools.common.options.ExpansionFunction; import com.google.devtools.common.options.InvocationPolicyEnforcer; import com.google.devtools.common.options.IsolatedOptionsData; import com.google.devtools.common.options.Option; import com.google.devtools.common.options.OptionDocumentationCategory; import com.google.devtools.common.options.OptionEffectTag; import com.google.devtools.common.options.OptionMetadataTag; import com.google.devtools.common.options.OptionsBase; import com.google.devtools.common.options.OptionsParser; import com.google.devtools.common.options.OptionsParsingException; import java.util.List; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * Tests for the Incompatible Changes system (--incompatible_* flags). These go in their own suite * because the options parser doesn't know the business logic for incompatible changes. */ @RunWith(JUnit4.class) public class AllIncompatibleChangesExpansionTest { /** Dummy comment (linter suppression) */ public static class ExampleOptions extends OptionsBase { @Option( name = "all", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.NO_OP}, defaultValue = "null", expansionFunction = AllIncompatibleChangesExpansion.class ) public Void all; @Option( name = "X", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.NO_OP}, defaultValue = "false" ) public boolean x; @Option( name = "Y", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.NO_OP}, defaultValue = "true" ) public boolean y; @Option( name = "incompatible_A", metadataTags = { OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES }, documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.NO_OP}, defaultValue = "false", help = "Migrate to A" ) public boolean incompatibleA; @Option( name = "incompatible_B", metadataTags = { OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES }, documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.NO_OP}, defaultValue = "false", help = "Migrate to B" ) public boolean incompatibleB; } /** Dummy comment (linter suppression) */ public static class ExampleExpansionOptions extends OptionsBase { @Option( name = "incompatible_expX", metadataTags = { OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES }, documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.NO_OP}, defaultValue = "null", expansion = {"--X"}, help = "Start using X" ) public Void incompatibleExpX; /** Dummy comment (linter suppression) */ public static class NoYExpansion implements ExpansionFunction { @Override public ImmutableList<String> getExpansion(IsolatedOptionsData optionsData) { return ImmutableList.of("--noY"); } } @Option( name = "incompatible_expY", metadataTags = { OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES }, documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.NO_OP}, defaultValue = "null", expansionFunction = NoYExpansion.class, help = "Stop using Y" ) public Void incompatibleExpY; } @Test public void noChangesSelected() throws OptionsParsingException { OptionsParser parser = OptionsParser.newOptionsParser(ExampleOptions.class); parser.parse(""); ExampleOptions opts = parser.getOptions(ExampleOptions.class); assertThat(opts.x).isFalse(); assertThat(opts.y).isTrue(); assertThat(opts.incompatibleA).isFalse(); assertThat(opts.incompatibleB).isFalse(); } @Test public void allChangesSelected() throws OptionsParsingException { OptionsParser parser = OptionsParser.newOptionsParser(ExampleOptions.class); parser.parse("--all"); ExampleOptions opts = parser.getOptions(ExampleOptions.class); assertThat(opts.x).isFalse(); assertThat(opts.y).isTrue(); assertThat(opts.incompatibleA).isTrue(); assertThat(opts.incompatibleB).isTrue(); } @Test public void rightmostOverrides() throws OptionsParsingException { // Check that all-expansion behaves just like any other expansion flag: // the rightmost setting of any individual option wins. OptionsParser parser = OptionsParser.newOptionsParser(ExampleOptions.class); parser.parse("--noincompatible_A", "--all", "--noincompatible_B"); ExampleOptions opts = parser.getOptions(ExampleOptions.class); assertThat(opts.incompatibleA).isTrue(); assertThat(opts.incompatibleB).isFalse(); } @Test public void expansionOptions() throws OptionsParsingException { // Check that all-expansion behaves just like any other expansion flag: // the rightmost setting of any individual option wins. OptionsParser parser = OptionsParser.newOptionsParser(ExampleOptions.class, ExampleExpansionOptions.class); parser.parse("--all"); ExampleOptions opts = parser.getOptions(ExampleOptions.class); assertThat(opts.x).isTrue(); assertThat(opts.y).isFalse(); assertThat(opts.incompatibleA).isTrue(); assertThat(opts.incompatibleB).isTrue(); } @Test public void invocationPolicy() throws OptionsParsingException { // Check that all-expansion behaves just like any other expansion flag and can be filtered // by invocation policy. InvocationPolicy.Builder invocationPolicyBuilder = InvocationPolicy.newBuilder(); invocationPolicyBuilder.addFlagPoliciesBuilder() .setFlagName("incompatible_A") .setUseDefault(UseDefault.getDefaultInstance()) .build(); InvocationPolicy policy = invocationPolicyBuilder.build(); InvocationPolicyEnforcer enforcer = new InvocationPolicyEnforcer(policy); OptionsParser parser = OptionsParser.newOptionsParser(ExampleOptions.class); parser.parse("--all"); enforcer.enforce(parser); ExampleOptions opts = parser.getOptions(ExampleOptions.class); assertThat(opts.x).isFalse(); assertThat(opts.y).isTrue(); assertThat(opts.incompatibleA).isFalse(); // A should have been removed from the expansion. assertThat(opts.incompatibleB).isTrue(); // B, without a policy, should have been left alone. } /** Option with the right prefix, but the wrong metadata tag. */ public static class IncompatibleChangeTagOption extends OptionsBase { @Option( name = "some_option_with_a_tag", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, metadataTags = {OptionMetadataTag.INCOMPATIBLE_CHANGE}, effectTags = {OptionEffectTag.NO_OP}, defaultValue = "false", help = "nohelp" ) public boolean opt; } @Test public void incompatibleChangeTagDoesNotTriggerAllIncompatibleChangesCheck() { try { OptionsParser.newOptionsParser(ExampleOptions.class, IncompatibleChangeTagOption.class); } catch (OptionsParser.ConstructionException e) { fail( "some_option_with_a_tag should not trigger the expansion, so there should be no checks " + "on it having the right prefix and metadata tags. Instead, the following exception " + "was thrown: " + e.getMessage()); } } // There's no unit test to check that the expansion of --all is sorted. IsolatedOptionsData is not // exposed from OptionsParser, making it difficult to check, and it's not clear that exposing it // would be worth it. /** * Ensure that we get an {@link OptionsParser.ConstructionException} containing {@code message} * when the incompatible changes in the given {@link OptionsBase} subclass are validated. */ // Because javadoc can't resolve inner classes. @SuppressWarnings("javadoc") private static void assertBadness(Class<? extends OptionsBase> optionsBaseClass, String message) { try { OptionsParser.newOptionsParser(ExampleOptions.class, optionsBaseClass); fail("Should have failed with message \"" + message + "\""); } catch (OptionsParser.ConstructionException e) { assertThat(e).hasMessageThat().contains(message); } } /** Dummy comment (linter suppression) */ public static class BadNameOptions extends OptionsBase { @Option( name = "badname", metadataTags = { OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES }, documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.NO_OP}, defaultValue = "false", help = "nohelp" ) public boolean bad; } @Test public void badName() { assertBadness( BadNameOptions.class, "Incompatible change option '--badname' must have name " + "starting with \"incompatible_\""); } /** Option with the right prefix, but the wrong metadata tag. */ public static class MissingTriggeredByTagOptions extends OptionsBase { @Option( name = "incompatible_bad", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, metadataTags = {OptionMetadataTag.INCOMPATIBLE_CHANGE}, effectTags = {OptionEffectTag.NO_OP}, defaultValue = "false", help = "nohelp" ) public boolean bad; } @Test public void badTag() { assertBadness( MissingTriggeredByTagOptions.class, "must have metadata tag OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES"); } /** Option with the right prefix, but the wrong metadata tag. */ public static class MissingIncompatibleTagOptions extends OptionsBase { @Option( name = "incompatible_bad", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, metadataTags = {OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES}, effectTags = {OptionEffectTag.NO_OP}, defaultValue = "false", help = "nohelp" ) public boolean bad; } @Test public void otherBadTag() { assertBadness( MissingIncompatibleTagOptions.class, "must have metadata tag OptionMetadataTag.INCOMPATIBLE_CHANGE"); } /** Dummy comment (linter suppression) */ public static class BadTypeOptions extends OptionsBase { @Option( name = "incompatible_bad", metadataTags = { OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES }, documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.NO_OP}, defaultValue = "0", help = "nohelp" ) public int bad; } @Test public void badType() { assertBadness(BadTypeOptions.class, "must have boolean type"); } /** Dummy comment (linter suppression) */ public static class BadHelpOptions extends OptionsBase { @Option( name = "incompatible_bad", metadataTags = { OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES }, documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.NO_OP}, defaultValue = "false" ) public boolean bad; } @Test public void badHelp() { assertBadness(BadHelpOptions.class, "must have a \"help\" string"); } /** Dummy comment (linter suppression) */ public static class BadAbbrevOptions extends OptionsBase { @Option( name = "incompatible_bad", metadataTags = { OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES }, documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.NO_OP}, defaultValue = "false", help = "nohelp", abbrev = 'x' ) public boolean bad; } @Test public void badAbbrev() { assertBadness(BadAbbrevOptions.class, "must not use the abbrev field"); } /** Dummy comment (linter suppression) */ public static class BadValueHelpOptions extends OptionsBase { @Option( name = "incompatible_bad", metadataTags = { OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES }, documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.NO_OP}, defaultValue = "false", help = "nohelp", valueHelp = "x" ) public boolean bad; } @Test public void badValueHelp() { assertBadness(BadValueHelpOptions.class, "must not use the valueHelp field"); } /** Dummy comment (linter suppression) */ public static class BadConverterOptions extends OptionsBase { @Option( name = "incompatible_bad", metadataTags = { OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES }, documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.NO_OP}, defaultValue = "false", help = "nohelp", converter = Converters.BooleanConverter.class ) public boolean bad; } @Test public void badConverter() { assertBadness(BadConverterOptions.class, "must not use the converter field"); } /** Dummy comment (linter suppression) */ public static class BadAllowMultipleOptions extends OptionsBase { @Option( name = "incompatible_bad", metadataTags = { OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES }, documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.NO_OP}, defaultValue = "null", help = "nohelp", allowMultiple = true ) public List<String> bad; } @Test public void badAllowMutliple() { assertBadness(BadAllowMultipleOptions.class, "must not use the allowMultiple field"); } /** Dummy comment (linter suppression) */ public static class BadImplicitRequirementsOptions extends OptionsBase { @Option( name = "incompatible_bad", metadataTags = { OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES }, documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.NO_OP}, defaultValue = "false", help = "nohelp", implicitRequirements = "--x" ) public boolean bad; } @Test public void badImplicitRequirements() { assertBadness( BadImplicitRequirementsOptions.class, "must not use the implicitRequirements field"); } /** Dummy comment (linter suppression) */ public static class BadOldNameOptions extends OptionsBase { @Option( name = "incompatible_bad", metadataTags = { OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES }, documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.NO_OP}, defaultValue = "false", help = "nohelp", oldName = "x" ) public boolean bad; } @Test public void badOldName() { assertBadness(BadOldNameOptions.class, "must not use the oldName field"); } }
/* * Copyright 2015 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.core.phreak; import org.drools.core.base.DefaultKnowledgeHelper; import org.drools.core.common.AgendaItem; import org.drools.core.common.EventSupport; import org.drools.core.common.InternalAgenda; import org.drools.core.common.InternalAgendaGroup; import org.drools.core.common.InternalFactHandle; import org.drools.core.common.InternalWorkingMemory; import org.drools.core.common.LeftTupleSets; import org.drools.core.definitions.rule.impl.RuleImpl; import org.drools.core.reteoo.LeftTuple; import org.drools.core.reteoo.RuleTerminalNode; import org.drools.core.reteoo.RuleTerminalNodeLeftTuple; import org.drools.core.reteoo.TerminalNode; import org.drools.core.spi.Activation; import org.drools.core.spi.PropagationContext; import org.drools.core.spi.Salience; import org.kie.api.event.rule.MatchCancelledCause; /** * Created with IntelliJ IDEA. * User: mdproctor * Date: 03/05/2013 * Time: 15:42 * To change this template use File | Settings | File Templates. */ public class PhreakRuleTerminalNode { public void doNode(TerminalNode rtnNode, InternalWorkingMemory wm, LeftTupleSets srcLeftTuples, RuleExecutor executor) { if (srcLeftTuples.getDeleteFirst() != null) { doLeftDeletes(wm, srcLeftTuples, executor); } if (srcLeftTuples.getUpdateFirst() != null) { doLeftUpdates(rtnNode, wm, srcLeftTuples, executor); } if (srcLeftTuples.getInsertFirst() != null) { doLeftInserts(rtnNode, wm, srcLeftTuples, executor); } srcLeftTuples.resetAll(); } public void doLeftInserts(TerminalNode rtnNode, InternalWorkingMemory wm, LeftTupleSets srcLeftTuples, RuleExecutor executor) { InternalAgenda agenda = wm.getAgenda(); RuleAgendaItem ruleAgendaItem = executor.getRuleAgendaItem(); int salienceInt = 0; Salience salience = ruleAgendaItem.getRule().getSalience(); if ( !salience.isDynamic() ) { salienceInt = salience.getValue(); salience = null; } if ( rtnNode.getRule().getAutoFocus() && !ruleAgendaItem.getAgendaGroup().isActive() ) { wm.getAgenda().setFocus( ruleAgendaItem.getAgendaGroup() ); } for (LeftTuple leftTuple = srcLeftTuples.getInsertFirst(); leftTuple != null; ) { LeftTuple next = leftTuple.getStagedNext(); doLeftTupleInsert(rtnNode, executor, agenda, ruleAgendaItem, salienceInt, salience, leftTuple, wm); leftTuple.clearStaged(); leftTuple = next; } } public static void doLeftTupleInsert(TerminalNode rtnNode, RuleExecutor executor, InternalAgenda agenda, RuleAgendaItem ruleAgendaItem, int salienceInt, Salience salience, LeftTuple leftTuple, InternalWorkingMemory wm) { PropagationContext pctx = leftTuple.getPropagationContext(); pctx = RuleTerminalNode.findMostRecentPropagationContext(leftTuple, pctx); if ( rtnNode.getRule().isNoLoop() && rtnNode.equals(pctx.getTerminalNodeOrigin()) ) { return; } if ( salience != null ) { salienceInt = salience.getValue(new DefaultKnowledgeHelper((AgendaItem) leftTuple, wm), rtnNode.getRule(), wm); } RuleTerminalNodeLeftTuple rtnLeftTuple = (RuleTerminalNodeLeftTuple) leftTuple; agenda.createAgendaItem( rtnLeftTuple, salienceInt, pctx, ruleAgendaItem, ruleAgendaItem.getAgendaGroup() ); EventSupport es = (EventSupport) wm; es.getAgendaEventSupport().fireActivationCreated(rtnLeftTuple, wm); if ( rtnNode.getRule().isLockOnActive() && leftTuple.getPropagationContext().getType() != org.kie.api.runtime.rule.PropagationContext.RULE_ADDITION ) { long handleRecency = ((InternalFactHandle) pctx.getFactHandle()).getRecency(); InternalAgendaGroup agendaGroup = executor.getRuleAgendaItem().getAgendaGroup(); if (blockedByLockOnActive(rtnNode.getRule(), pctx, handleRecency, agendaGroup)) { es.getAgendaEventSupport().fireActivationCancelled(rtnLeftTuple, wm, MatchCancelledCause.FILTER ); return; } } if (agenda.getActivationsFilter() != null && !agenda.getActivationsFilter().accept( rtnLeftTuple, wm, rtnNode)) { // only relevant for seralization, to not refire Matches already fired return; } wm.getAgenda().addItemToActivationGroup( rtnLeftTuple ); executor.addLeftTuple(leftTuple); leftTuple.increaseActivationCountForEvents(); // increased here, decreased in Agenda's cancelActivation and fireActivation if( !rtnNode.isFireDirect() && executor.isDeclarativeAgendaEnabled() ) { agenda.insertAndStageActivation(rtnLeftTuple); } } public void doLeftUpdates(TerminalNode rtnNode, InternalWorkingMemory wm, LeftTupleSets srcLeftTuples, RuleExecutor executor) { RuleAgendaItem ruleAgendaItem = executor.getRuleAgendaItem(); if ( rtnNode.getRule().getAutoFocus() && !ruleAgendaItem.getAgendaGroup().isActive() ) { wm.getAgenda().setFocus(ruleAgendaItem.getAgendaGroup()); } int salienceInt = 0; Salience salience = ruleAgendaItem.getRule().getSalience(); if ( !salience.isDynamic() ) { salienceInt = salience.getValue(); salience = null; } //Salience salienceInt = ruleAgendaItem.getRule().getSalience(); for (LeftTuple leftTuple = srcLeftTuples.getUpdateFirst(); leftTuple != null; ) { LeftTuple next = leftTuple.getStagedNext(); doLeftTupleUpdate(rtnNode, executor, wm.getAgenda(), salienceInt, salience, leftTuple, wm); leftTuple.clearStaged(); leftTuple = next; } } public static void doLeftTupleUpdate(TerminalNode rtnNode, RuleExecutor executor, InternalAgenda agenda, int salienceInt, Salience salience, LeftTuple leftTuple, InternalWorkingMemory wm) { PropagationContext pctx = leftTuple.getPropagationContext(); pctx = RuleTerminalNode.findMostRecentPropagationContext(leftTuple, pctx); boolean blocked = false; RuleTerminalNodeLeftTuple rtnLeftTuple = (RuleTerminalNodeLeftTuple) leftTuple; if( executor.isDeclarativeAgendaEnabled() ) { if ( rtnLeftTuple.getBlockers() != null && !rtnLeftTuple.getBlockers().isEmpty() ) { blocked = true; // declarativeAgenda still blocking LeftTuple, so don't add back ot list } } else { blocked = rtnNode.getRule().isNoLoop() && rtnNode.equals(pctx.getTerminalNodeOrigin()); } if ( salience != null ) { salienceInt = salience.getValue( new DefaultKnowledgeHelper(rtnLeftTuple, wm), rtnNode.getRule(), wm); } if (agenda.getActivationsFilter() != null && !agenda.getActivationsFilter().accept( rtnLeftTuple, wm, rtnNode)) { // only relevant for serialization, to not re-fire Matches already fired return; } if ( !blocked ) { boolean addToExector = true; if ( rtnNode.getRule().isLockOnActive() && pctx.getType() != org.kie.api.runtime.rule.PropagationContext.RULE_ADDITION ) { long handleRecency = ((InternalFactHandle) pctx.getFactHandle()).getRecency(); InternalAgendaGroup agendaGroup = executor.getRuleAgendaItem().getAgendaGroup(); if (blockedByLockOnActive(rtnNode.getRule(), pctx, handleRecency, agendaGroup)) { addToExector = false; } } if ( addToExector ) { if (!rtnLeftTuple.isQueued() ) { // not queued, so already fired, so it's effectively recreated EventSupport es = (EventSupport) wm; es.getAgendaEventSupport().fireActivationCreated(rtnLeftTuple, wm); rtnLeftTuple.update(salienceInt, pctx); executor.addLeftTuple(leftTuple); } } } else { // LeftTuple is blocked, and thus not queued, so just update it's values rtnLeftTuple.update(salienceInt, pctx); } if( !rtnNode.isFireDirect() && executor.isDeclarativeAgendaEnabled()) { agenda.modifyActivation(rtnLeftTuple, rtnLeftTuple.isQueued()); } } public void doLeftDeletes(InternalWorkingMemory wm, LeftTupleSets srcLeftTuples, RuleExecutor executor) { for (LeftTuple leftTuple = srcLeftTuples.getDeleteFirst(); leftTuple != null; ) { LeftTuple next = leftTuple.getStagedNext(); doLeftDelete(wm, executor, leftTuple); leftTuple.clearStaged(); leftTuple = next; } } public static void doLeftDelete(InternalWorkingMemory wm, RuleExecutor executor, LeftTuple leftTuple) { PropagationContext pctx = leftTuple.getPropagationContext(); pctx = RuleTerminalNode.findMostRecentPropagationContext(leftTuple, pctx); RuleTerminalNodeLeftTuple rtnLt = ( RuleTerminalNodeLeftTuple ) leftTuple; Activation activation = (Activation) leftTuple; activation.setMatched( false ); wm.getAgenda().cancelActivation( leftTuple, pctx, wm, activation, rtnLt.getTerminalNode() ); if ( leftTuple.getMemory() != null && (pctx.getType() != PropagationContext.EXPIRATION ) ) { // Expiration propagations should not be removed from the list, as they still need to fire executor.removeLeftTuple(leftTuple); } rtnLt.setActivationUnMatchListener(null); leftTuple.setObject(null); } private static boolean blockedByLockOnActive(RuleImpl rule, PropagationContext pctx, long handleRecency, InternalAgendaGroup agendaGroup) { if ( rule.isLockOnActive() ) { boolean isActive = agendaGroup.isActive(); long activatedForRecency = agendaGroup.getActivatedForRecency(); long clearedForRecency = agendaGroup.getClearedForRecency(); if ( isActive && activatedForRecency < handleRecency && agendaGroup.getAutoFocusActivator() != pctx ) { return true; } else if ( clearedForRecency != -1 && clearedForRecency >= handleRecency ) { return true; } } return false; } }
/** * Copyright (c) 2015 dmulloy2 * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package net.dmulloy2.sworntickets.tickets; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.sql.ResultSet; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedHashMap; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.Map.Entry; import net.dmulloy2.types.LazyLocation; import org.bukkit.command.CommandSender; import org.bukkit.configuration.file.YamlConfiguration; import org.bukkit.configuration.serialization.ConfigurationSerializable; import org.bukkit.configuration.serialization.ConfigurationSerialization; import org.bukkit.configuration.serialization.SerializableAs; import org.bukkit.entity.Player; /** * @author dmulloy2 */ @SerializableAs("net.dmulloy2.Ticket") public class Ticket implements ConfigurationSerializable { private int id; private LazyLocation location; private List<String> labels = new ArrayList<String>(); private List<Event> events = new ArrayList<Event>(); private transient List<Label> actualLabels = null; private Ticket() { } public Ticket(int id, Player player, String description) { this.id = id; Event open = Event.create(EventType.OPEN, player, description); events.add(open); this.location = new LazyLocation(player); } public Ticket(Map<String, Object> args) { for (Entry<String, Object> entry : args.entrySet()) { try { for (Field field : getClass().getDeclaredFields()) { if (field.getName().equals(entry.getKey())) { boolean accessible = field.isAccessible(); field.setAccessible(true); field.set(this, entry.getValue()); field.setAccessible(accessible); } } } catch (Throwable ex) { } } } @Override public Map<String, Object> serialize() { Map<String, Object> data = new LinkedHashMap<>(); for (Field field : getClass().getDeclaredFields()) { if (Modifier.isTransient(field.getModifiers())) continue; try { boolean accessible = field.isAccessible(); field.setAccessible(true); if (field.getType().equals(Integer.TYPE)) { if (field.getInt(this) != 0) data.put(field.getName(), field.getInt(this)); } else if (field.getType().equals(Long.TYPE)) { if (field.getLong(this) != 0) data.put(field.getName(), field.getLong(this)); } else if (field.getType().equals(Boolean.TYPE)) { if (field.getBoolean(this)) data.put(field.getName(), field.getBoolean(this)); } else if (field.getType().isAssignableFrom(Collection.class)) { if (! ((Collection<?>) field.get(this)).isEmpty()) data.put(field.getName(), field.get(this)); } else if (field.getType().isAssignableFrom(String.class)) { if (((String) field.get(this)) != null) data.put(field.getName(), field.get(this)); } else if (field.getType().isAssignableFrom(Map.class)) { if (! ((Map<?, ?>) field.get(this)).isEmpty()) data.put(field.getName(), field.get(this)); } else { if (field.get(this) != null) data.put(field.getName(), field.get(this)); } field.setAccessible(accessible); } catch (Throwable ex) { } } return data; } public int getId() { return id; } public LazyLocation getLocation() { return location; } public List<Label> getLabels() { if (actualLabels == null) { actualLabels = new ArrayList<Label>(); for (String name : labels) { Label label = Label.getLabel(name); if (label != null) { actualLabels.add(label); } } } return actualLabels; } public void addLabel(CommandSender sender, Label label) { getLabels().add(label); labels.add(label.getName()); events.add(Event.create(EventType.LABEL, sender, "add;" + label.getName())); } public boolean hasLabel(Label label) { return getLabels().contains(label); } public void removeLabel(CommandSender sender, Label label) { getLabels().remove(label); labels.remove(label.getName()); events.add(Event.create(EventType.LABEL, sender, "remove;" + label.getName())); } public List<Event> getEvents() { return events; } public void addEvent(Event event) { events.add(event); } private Event getByType(EventType type) { ListIterator<Event> iter = events.listIterator(events.size()); while (iter.hasPrevious()) { Event event = iter.previous(); if (event.getType() == type) { return event; } } return null; } public boolean hasEvent(EventType type) { return getByType(type) != null; } // Will always be the first event public Event getOpened() { return events.get(0); } public Event getClosed() { return getByType(EventType.CLOSE); } public boolean isOpen() { ListIterator<Event> iter = events.listIterator(events.size()); while (iter.hasPrevious()) { Event event = iter.previous(); if (event.getType() == EventType.REOPEN) { return true; } else if (event.getType() == EventType.CLOSE) { return false; } } return true; } public Event getAssigned() { return getByType(EventType.ASSIGN); } public boolean isOwner(Player player) { return getOpened().getUniqueId().equals(player.getUniqueId().toString()); } // TODO Use proper SQL public static Ticket fromResultSet(ResultSet rs) throws Throwable { Ticket ticket = new Ticket(); ticket.id = rs.getInt("id"); String yaml = rs.getString("yaml"); YamlConfiguration config = new YamlConfiguration(); config.loadFromString(yaml); return (Ticket) ConfigurationSerialization.deserializeObject(config.getValues(false), Ticket.class); } }
/* Copyright 2006 Jerry Huxtable Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.jhlabs.image; import java.awt.*; import java.awt.geom.*; import java.awt.image.*; import com.jhlabs.composite.*; /** * A filter which renders "glints" on bright parts of the image. */ public class GlintFilter extends AbstractBufferedImageOp { private float threshold = 1.0f; private int length = 5; private float blur = 0.0f; private float amount = 0.1f; private boolean glintOnly = false; private Colormap colormap = new LinearColormap( 0xffffffff, 0xff000000 ); private float coverage = 1.0f; // probability in percentage public GlintFilter() { } public float getCoverage() { return coverage; } public void setCoverage(float coverage) { this.coverage = coverage; } /** * Set the threshold value. * @param threshold the threshold value * @see #getThreshold */ public void setThreshold( float threshold ) { this.threshold = threshold; } /** * Get the threshold value. * @return the threshold value * @see #setThreshold */ public float getThreshold() { return threshold; } /** * Set the amount of glint. * @param amount the amount * @min-value 0 * @max-value 1 * @see #getAmount */ public void setAmount( float amount ) { this.amount = amount; } /** * Get the amount of glint. * @return the amount * @see #setAmount */ public float getAmount() { return amount; } /** * Set the length of the stars. * @param length the length * @see #getLength */ public void setLength( int length ) { this.length = length; } /** * Get the length of the stars. * @return the length * @see #setLength */ public int getLength() { return length; } /** * Set the blur that is applied before thresholding. * @param blur the blur radius * @see #getBlur */ public void setBlur(float blur) { this.blur = blur; } /** * Set the blur that is applied before thresholding. * @return the blur radius * @see #setBlur */ public float getBlur() { return blur; } /** * Set whether to render the stars and the image or only the stars. * @param glintOnly true to render only stars * @see #getGlintOnly */ public void setGlintOnly(boolean glintOnly) { this.glintOnly = glintOnly; } /** * Get whether to render the stars and the image or only the stars. * @return true to render only stars * @see #setGlintOnly */ public boolean getGlintOnly() { return glintOnly; } /** * Set the colormap to be used for the filter. * @param colormap the colormap * @see #getColormap */ public void setColormap(Colormap colormap) { this.colormap = colormap; } /** * Get the colormap to be used for the filter. * @return the colormap * @see #setColormap */ public Colormap getColormap() { return colormap; } public BufferedImage filter( BufferedImage src, BufferedImage dst ) { int width = src.getWidth(); int height = src.getHeight(); int[] pixels = new int[width]; int length2 = (int)(length / 1.414f); int[] colors = new int[length+1]; int[] colors2 = new int[length2+1]; if ( colormap != null ) { for (int i = 0; i <= length; i++) { int argb = colormap.getColor( (float)i/length ); int r = (argb >> 16) & 0xff; int g = (argb >> 8) & 0xff; int b = argb & 0xff; argb = (argb & 0xff000000) | ((int)(amount*r) << 16) | ((int)(amount*g) << 8) | (int)(amount*b); colors[i] = argb; } for (int i = 0; i <= length2; i++) { int argb = colormap.getColor( (float)i/length2 ); int r = (argb >> 16) & 0xff; int g = (argb >> 8) & 0xff; int b = argb & 0xff; argb = (argb & 0xff000000) | ((int)(amount*r) << 16) | ((int)(amount*g) << 8) | (int)(amount*b); colors2[i] = argb; } } BufferedImage mask = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB); int threshold3 = (int)(threshold*3*255); for ( int y = 0; y < height; y++ ) { getRGB( src, 0, y, width, 1, pixels ); for ( int x = 0; x < width; x++ ) { int rgb = pixels[x]; int a = rgb & 0xff000000; int r = (rgb >> 16) & 0xff; int g = (rgb >> 8) & 0xff; int b = rgb & 0xff; int l = r + g + b; if (l < threshold3) pixels[x] = 0xff000000; else { l /= 3; pixels[x] = a | (l << 16) | (l << 8) | l; } } setRGB( mask, 0, y, width, 1, pixels ); } if ( blur != 0 ) mask = new GaussianFilter(blur).filter( mask, null ); if ( dst == null ) dst = createCompatibleDestImage( src, null ); int[] dstPixels; if ( glintOnly ) dstPixels = new int[width*height]; else dstPixels = getRGB( src, 0, 0, width, height, null );//FIXME - only need 2*length for ( int y = 0; y < height; y++ ) { int index = y*width; getRGB( mask, 0, y, width, 1, pixels ); int ymin = Math.max( y-length, 0 )-y; int ymax = Math.min( y+length, height-1 )-y; int ymin2 = Math.max( y-length2, 0 )-y; int ymax2 = Math.min( y+length2, height-1 )-y; for ( int x = 0; x < width; x++ ) { boolean createGlint = (coverage > Math.random()); if (createGlint && (pixels[x] & 0xff) > threshold*255 ) { int xmin = Math.max( x-length, 0 )-x; int xmax = Math.min( x+length, width-1 )-x; int xmin2 = Math.max( x-length2, 0 )-x; int xmax2 = Math.min( x+length2, width-1 )-x; // Horizontal for ( int i = 0, k = 0; i <= xmax; i++, k++ ) dstPixels[index+i] = PixelUtils.combinePixels( dstPixels[index+i], colors[k], PixelUtils.ADD ); for ( int i = -1, k = 1; i >= xmin; i--, k++ ) dstPixels[index+i] = PixelUtils.combinePixels( dstPixels[index+i], colors[k], PixelUtils.ADD ); // Vertical for ( int i = 1, j = index+width, k = 0; i <= ymax; i++, j += width, k++ ) dstPixels[j] = PixelUtils.combinePixels( dstPixels[j], colors[k], PixelUtils.ADD ); for ( int i = -1, j = index-width, k = 0; i >= ymin; i--, j -= width, k++ ) dstPixels[j] = PixelUtils.combinePixels( dstPixels[j], colors[k], PixelUtils.ADD ); // Diagonals // int xymin = Math.max( xmin2, ymin2 ); // int xymax = Math.min( xmax2, ymax2 ); // SE int count = Math.min( xmax2, ymax2 ); for ( int i = 1, j = index+width+1, k = 0; i <= count; i++, j += width+1, k++ ) dstPixels[j] = PixelUtils.combinePixels( dstPixels[j], colors2[k], PixelUtils.ADD ); // NW count = Math.min( -xmin2, -ymin2 ); for ( int i = 1, j = index-width-1, k = 0; i <= count; i++, j -= width+1, k++ ) dstPixels[j] = PixelUtils.combinePixels( dstPixels[j], colors2[k], PixelUtils.ADD ); // NE count = Math.min( xmax2, -ymin2 ); for ( int i = 1, j = index-width+1, k = 0; i <= count; i++, j += -width+1, k++ ) dstPixels[j] = PixelUtils.combinePixels( dstPixels[j], colors2[k], PixelUtils.ADD ); // SW count = Math.min( -xmin2, ymax2 ); for ( int i = 1, j = index+width-1, k = 0; i <= count; i++, j += width-1, k++ ) dstPixels[j] = PixelUtils.combinePixels( dstPixels[j], colors2[k], PixelUtils.ADD ); } index++; } } setRGB( dst, 0, 0, width, height, dstPixels ); return dst; } public String toString() { return "Effects/Glint..."; } }
package GenERRate; import java.util.ArrayList; import java.util.List; import java.util.Random; /** * Class DeletionPOSWhereError * * @author Jennifer Foster */ public class DeletionPOSWhereError extends DeletionPOSError { /** * The POS of the word before the word to be deleted. */ private String POSBefore; /** * The POS of the word after the word to be deleted. */ private String POSAfter; public DeletionPOSWhereError(Sentence inputS, String thePOS, String thePOSNear, boolean isBefore) { super(inputS, thePOS); if (isBefore) { POSBefore = thePOSNear; errorInfo = "errortype=\"Deletion" + POSBefore + POS + "Error\""; } else { POSAfter = thePOSNear; errorInfo = "errortype=\"Deletion" + POS + POSAfter + "Error\""; } } public DeletionPOSWhereError(Sentence inputS, String thePOSBefore, String thePOS, String thePOSAfter) { super(inputS, thePOS); POSBefore = thePOSBefore; POSAfter = thePOSAfter; errorInfo = "errortype=\"Deletion" + POSBefore + POS + POSAfter + "Error\""; } //For testing purposes public static void main(String[] args) { try { System.out.println("Testing the version with tags"); Sentence testSentence = new Sentence("This DT man NN walks VBZ and CONJ talks VBZ", true); DeletionPOSWhereError deletionPOSWhereError = new DeletionPOSWhereError(testSentence, "VBZ", "NN", true); System.out.println(deletionPOSWhereError.insertError()); /*System.out.println("Testing the version with tags"); testSentence = new Sentence("This DT man NN walks VBZ and CONJ talks VBZ", true); deletionPOSWhereError = new DeletionPOSWhereError(testSentence,"VBZ", "NN", false); System.out.println(deletionPOSWhereError.insertError());*/ System.out.println("Testing the version with tags"); testSentence = new Sentence("This DT man NN walks VBZ and CONJ talks VBZ", true); deletionPOSWhereError = new DeletionPOSWhereError(testSentence, "VBZ", "CONJ", false); System.out.println(deletionPOSWhereError.insertError()); System.out.println("Testing the version with tags"); testSentence = new Sentence("This DT man NN walks VBZ and CONJ talks VBZ", true); deletionPOSWhereError = new DeletionPOSWhereError(testSentence, "VBZ", "NN", "CONJ"); System.out.println(deletionPOSWhereError.insertError()); System.out.println("Testing the version with tags"); testSentence = new Sentence("This DT woman NN walks VBZ and CONJ this DT woman NN talks VBZ", true); deletionPOSWhereError = new DeletionPOSWhereError(testSentence, "VBZ", "NN", true); System.out.println(deletionPOSWhereError.insertError()); System.out.println("Testing the version with tags"); testSentence = new Sentence("This DT woman NN walks VBZ and CONJ this DT woman NN talks VBZ and CONJ this DT woman NN laughs VBZ", true); deletionPOSWhereError = new DeletionPOSWhereError(testSentence, "VBZ", "NN", true); System.out.println(deletionPOSWhereError.insertError()); System.out.println("Testing the version with tags"); testSentence = new Sentence("He PRP walked VBD and CONJ talked VBD", true); deletionPOSWhereError = new DeletionPOSWhereError(testSentence, "VBZ", "NN", true); System.out.println(deletionPOSWhereError.insertError()); System.out.println("Testing the version with tags"); testSentence = new Sentence("This DT man NN walked VBD and CONJ talked VBD", true); deletionPOSWhereError = new DeletionPOSWhereError(testSentence, "VBZ", "NN", true); System.out.println(deletionPOSWhereError.insertError()); System.out.println("Testing the version with tags"); testSentence = new Sentence("He PRP walks VBZ and CONJ talks VBZ", true); deletionPOSWhereError = new DeletionPOSWhereError(testSentence, "VBZ", "NN", true); System.out.println(deletionPOSWhereError.insertError()); System.out.println("Testing the version without tags"); testSentence = new Sentence("This is a test", false); deletionPOSWhereError = new DeletionPOSWhereError(testSentence, "VBZ", "NN", true); System.out.println(deletionPOSWhereError.insertError()); } catch (CannotCreateErrorException ex) { System.err.println(ex.getMessage()); } } /** * Get the value of POSBefore * The POS of the word before the word to be deleted. * * @return the value of POSBefore */ private String getPOSBefore() { return POSBefore; } /** * Set the value of POSBefore * The POS of the word before the word to be deleted. * * @param newVar the new value of POSBefore */ private void setPOSBefore(String newVar) { POSBefore = newVar; } /** * Get the value of POSBefore * The POS of the word before the word to be deleted. * * @return the value of POSBefore */ private String getPOSAfter() { return POSAfter; } /** * Set the value of POSBefore * The POS of the word before the word to be deleted. * * @param newVar the new value of POSBefore */ private void setPOSAfter(String newVar) { POSAfter = newVar; } /** * Deletes a word with part-of-speech POS appearing directly after another word * with part-of-speech POSBefore and/or appearing directly before another word with part-of-speech POSAfter * If POSBefore is null, a CannotCreateErrorException is thrown if the pattern POS POSAfter does not exist * If POSAfter is null, a CannotCreateErrorException is thrown if the pattern POSBefore POS does not exist * If POSBefore and POSAfter are not null, a CannotCreateErrorException is thrown if the pattern POSBefore POS POSAfter does not exist * * @return Sentence */ public Sentence insertError() throws CannotCreateErrorException { if (!sentence.areTagsIncluded()) { throw new CannotCreateErrorException("Cannot introduce a " + errorInfo + ". The input sentence is not tagged"); } else { //create the new sentence Sentence newSentence = new Sentence(sentence.toString(), sentence.areTagsIncluded()); Word wordToGo, wordBefore, wordAfter, word; if (POSAfter == null) { //find pair sequences tagged as POSBefore, POS - store position of POS List<Integer> wordsForDeletion = new ArrayList<Integer>(); for (int i = 1; i < newSentence.size(); i++) { wordBefore = newSentence.getWord(i - 1); word = newSentence.getWord(i); if (word.getTag().equals(POS) && wordBefore.getTag().equals(POSBefore)) { wordsForDeletion.add(i); } } //if there aren't any word pairs tagged as POSBefore, POS in sentence, then we can't do anything if (wordsForDeletion.size() == 0) { throw new CannotCreateErrorException("Cannot introduce a " + errorInfo + ". There aren't any word pairs tagged as: " + POSBefore + "," + POS + " in the sentence."); } else { //randomly pick one of these and delete it from the sentence Random rand = new Random(newSentence.hashCode()); int randNo = rand.nextInt(wordsForDeletion.size()); int randPos = (wordsForDeletion.get(randNo)).intValue(); wordToGo = newSentence.getWord(randPos); wordBefore = newSentence.getWord(randPos - 1); newSentence.removeWord(randPos); newSentence.setErrorDescription(errorInfo + " details=\"" + wordToGo.getToken() + " at " + (randPos + 1) + " after " + wordBefore.getToken() + "\""); } } else if (POSBefore == null) { //find pair sequences tagged as POS, POSAfter - store position of POS List<Integer> wordsForDeletion = new ArrayList<Integer>(); for (int i = 0; i < newSentence.size() - 1; i++) { wordAfter = newSentence.getWord(i + 1); word = newSentence.getWord(i); if (word.getTag().equals(POS) && wordAfter.getTag().equals(POSAfter)) { wordsForDeletion.add(i); } } //if there aren't any word pairs tagged as POSBefore, POS in sentence, then we can't do anything if (wordsForDeletion.size() == 0) { throw new CannotCreateErrorException("Cannot introduce a " + errorInfo + ". There aren't any word pairs tagged as: " + POS + "," + POSAfter + " in the sentence."); } else { //randomly pick one of these and delete it from the sentence Random rand = new Random(newSentence.hashCode()); int randNo = rand.nextInt(wordsForDeletion.size()); int randPos = wordsForDeletion.get(randNo); wordToGo = newSentence.getWord(randPos); wordAfter = newSentence.getWord(randPos + 1); newSentence.removeWord(randPos); newSentence.setErrorDescription(errorInfo + " details=\"" + wordToGo.getToken() + " at " + (randPos + 1) + " before " + wordAfter.getToken() + "\""); } } else { if (POSBefore.equalsIgnoreCase("start") && newSentence.size() < 2) { throw new CannotCreateErrorException("Cannot introduce a " + errorInfo + ". There are less than two words in the input sentence."); } else if (POSAfter.equalsIgnoreCase("end") && newSentence.size() < 2) { throw new CannotCreateErrorException("Cannot introduce a " + errorInfo + ". There are less than two words in the input sentence."); } else if (newSentence.size() < 3) { throw new CannotCreateErrorException("Cannot introduce a " + errorInfo + ". There are less than three words in the input sentence."); } List<Integer> wordsForDeletion = new ArrayList<Integer>(); // If POSBEfore is "start", see if the first word is tagged as POS and the second as POSAfter if (POSBefore.equalsIgnoreCase("start")) { Word firstWord = newSentence.getWord(0); Word secondWord = newSentence.getWord(1); if (firstWord.getTag().equals(POS) && secondWord.getTag().equals(POSAfter)) { wordsForDeletion.add(0); } } // If POSBEfore is "end", see if the second last word is tagged as POSBefore and the last as POS else if (POSAfter.equalsIgnoreCase("end")) { Word lastWord = newSentence.getWord(newSentence.size() - 1); Word secondLastWord = newSentence.getWord(newSentence.size() - 2); if (lastWord.getTag().equals(POS) && secondLastWord.getTag().equals(POSBefore)) { wordsForDeletion.add(newSentence.size() - 1); } } //find pair sequences tagged as POSBefore, POS, POSAfter - store position of POS else { for (int i = 1; i < newSentence.size() - 1; i++) { wordAfter = newSentence.getWord(i + 1); wordBefore = newSentence.getWord(i - 1); word = newSentence.getWord(i); if (word.getTag().equals(POS) && wordAfter.getTag().equals(POSAfter) && wordBefore.getTag().equals(POSBefore)) { wordsForDeletion.add(i); } } } //if there aren't any word pairs tagged as POSBefore, POS in sentence, then we can't do anything if (wordsForDeletion.size() == 0) { throw new CannotCreateErrorException("Cannot introduce a " + errorInfo + ". There aren't any word pairs tagged as: " + POSBefore + "," + POS + "," + POSAfter + " in the sentence."); } else { //randomly pick one of these and delete it from the sentence Random rand = new Random(newSentence.hashCode()); int randNo = rand.nextInt(wordsForDeletion.size()); int randPos = wordsForDeletion.get(randNo); wordToGo = newSentence.getWord(randPos); if (randPos + 1 < newSentence.size()) { wordAfter = newSentence.getWord(randPos + 1); } else { wordAfter = new Word("end"); } if (randPos - 1 >= 0) { wordBefore = newSentence.getWord(randPos - 1); } else { wordBefore = new Word("start"); } newSentence.removeWord(randPos); newSentence.setErrorDescription(errorInfo + " details=\"" + wordToGo.getToken() + " at " + (randPos + 1) + " after " + wordBefore.getToken() + " before " + wordAfter.getToken() + "\""); } } return newSentence; } } }
/* * Copyright (C) 2015 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.constraintlayout.core.widgets; import static androidx.constraintlayout.core.LinearSystem.FULL_DEBUG; import static androidx.constraintlayout.core.widgets.ConstraintWidget.DimensionBehaviour.FIXED; import static androidx.constraintlayout.core.widgets.ConstraintWidget.DimensionBehaviour.WRAP_CONTENT; import androidx.constraintlayout.core.LinearSystem; import androidx.constraintlayout.core.Metrics; import androidx.constraintlayout.core.SolverVariable; import androidx.constraintlayout.core.widgets.analyzer.BasicMeasure; import androidx.constraintlayout.core.widgets.analyzer.DependencyGraph; import androidx.constraintlayout.core.widgets.analyzer.Direct; import androidx.constraintlayout.core.widgets.analyzer.Grouping; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; /** * A container of ConstraintWidget that can layout its children */ public class ConstraintWidgetContainer extends WidgetContainer { private static final int MAX_ITERATIONS = 8; private static final boolean DEBUG = FULL_DEBUG; private static final boolean DEBUG_LAYOUT = false; static final boolean DEBUG_GRAPH = false; BasicMeasure mBasicMeasureSolver = new BasicMeasure(this); //////////////////////////////////////////////////////////////////////////////////////////////// // Graph measures //////////////////////////////////////////////////////////////////////////////////////////////// public DependencyGraph mDependencyGraph = new DependencyGraph(this); private int mPass; // number of layout passes /** * Invalidate the graph of constraints */ public void invalidateGraph() { mDependencyGraph.invalidateGraph(); } /** * Invalidate the widgets measures */ public void invalidateMeasures() { mDependencyGraph.invalidateMeasures(); } public boolean directMeasure(boolean optimizeWrap) { return mDependencyGraph.directMeasure(optimizeWrap); // int paddingLeft = getX(); // int paddingTop = getY(); // if (mDependencyGraph.directMeasureSetup(optimizeWrap)) { // mDependencyGraph.measureWidgets(); // boolean allResolved = // mDependencyGraph.directMeasureWithOrientation(optimizeWrap, HORIZONTAL); // allResolved &= mDependencyGraph.directMeasureWithOrientation(optimizeWrap, VERTICAL); // for (ConstraintWidget child : mChildren) { // child.setDrawX(child.getDrawX() + paddingLeft); // child.setDrawY(child.getDrawY() + paddingTop); // } // setX(paddingLeft); // setY(paddingTop); // return allResolved; // } // return false; } public boolean directMeasureSetup(boolean optimizeWrap) { return mDependencyGraph.directMeasureSetup(optimizeWrap); } public boolean directMeasureWithOrientation(boolean optimizeWrap, int orientation) { return mDependencyGraph.directMeasureWithOrientation(optimizeWrap, orientation); } public void defineTerminalWidgets() { mDependencyGraph.defineTerminalWidgets(getHorizontalDimensionBehaviour(), getVerticalDimensionBehaviour()); } //////////////////////////////////////////////////////////////////////////////////////////////// /** * Measure the layout * * @param optimizationLevel * @param widthMode * @param widthSize * @param heightMode * @param heightSize * @param paddingX * @param paddingY */ public long measure(int optimizationLevel, int widthMode, int widthSize, int heightMode, int heightSize, int lastMeasureWidth, int lastMeasureHeight, int paddingX, int paddingY) { mPaddingLeft = paddingX; mPaddingTop = paddingY; return mBasicMeasureSolver.solverMeasure(this, optimizationLevel, paddingX, paddingY, widthMode, widthSize, heightMode, heightSize, lastMeasureWidth, lastMeasureHeight); } public void updateHierarchy() { mBasicMeasureSolver.updateHierarchy(this); } protected BasicMeasure.Measurer mMeasurer = null; public void setMeasurer(BasicMeasure.Measurer measurer) { mMeasurer = measurer; mDependencyGraph.setMeasurer(measurer); } public BasicMeasure.Measurer getMeasurer() { return mMeasurer; } private boolean mIsRtl = false; public Metrics mMetrics; public void fillMetrics(Metrics metrics) { mMetrics = metrics; mSystem.fillMetrics(metrics); } protected LinearSystem mSystem = new LinearSystem(); int mPaddingLeft; int mPaddingTop; int mPaddingRight; int mPaddingBottom; public int mHorizontalChainsSize = 0; public int mVerticalChainsSize = 0; ChainHead[] mVerticalChainsArray = new ChainHead[4]; ChainHead[] mHorizontalChainsArray = new ChainHead[4]; public boolean mGroupsWrapOptimized = false; public boolean mHorizontalWrapOptimized = false; public boolean mVerticalWrapOptimized = false; public int mWrapFixedWidth = 0; public int mWrapFixedHeight = 0; private int mOptimizationLevel = Optimizer.OPTIMIZATION_STANDARD; public boolean mSkipSolver = false; private boolean mWidthMeasuredTooSmall = false; private boolean mHeightMeasuredTooSmall = false; /*-----------------------------------------------------------------------*/ // Construction /*-----------------------------------------------------------------------*/ /** * Default constructor */ public ConstraintWidgetContainer() { } /** * Constructor * * @param x x position * @param y y position * @param width width of the layout * @param height height of the layout */ public ConstraintWidgetContainer(int x, int y, int width, int height) { super(x, y, width, height); } /** * Constructor * * @param width width of the layout * @param height height of the layout */ public ConstraintWidgetContainer(int width, int height) { super(width, height); } public ConstraintWidgetContainer(String debugName, int width, int height) { super(width, height); setDebugName(debugName); } /** * Resolves the system directly when possible * * @param value optimization level */ public void setOptimizationLevel(int value) { mOptimizationLevel = value; mSystem.USE_DEPENDENCY_ORDERING = optimizeFor(Optimizer.OPTIMIZATION_DEPENDENCY_ORDERING); } /** * Returns the current optimization level * * @return */ public int getOptimizationLevel() { return mOptimizationLevel; } /** * Returns true if the given feature should be optimized * * @param feature * @return */ public boolean optimizeFor(int feature) { return (mOptimizationLevel & feature) == feature; } /** * Specify the xml type for the container * * @return */ @Override public String getType() { return "ConstraintLayout"; } @Override public void reset() { mSystem.reset(); mPaddingLeft = 0; mPaddingRight = 0; mPaddingTop = 0; mPaddingBottom = 0; mSkipSolver = false; super.reset(); } /** * Return true if the width given is too small for the content laid out */ public boolean isWidthMeasuredTooSmall() { return mWidthMeasuredTooSmall; } /** * Return true if the height given is too small for the content laid out */ public boolean isHeightMeasuredTooSmall() { return mHeightMeasuredTooSmall; } int mDebugSolverPassCount = 0; private WeakReference<ConstraintAnchor> mVerticalWrapMin = null; private WeakReference<ConstraintAnchor> mHorizontalWrapMin = null; private WeakReference<ConstraintAnchor> mVerticalWrapMax = null; private WeakReference<ConstraintAnchor> mHorizontalWrapMax = null; void addVerticalWrapMinVariable(ConstraintAnchor top) { if (mVerticalWrapMin == null || mVerticalWrapMin.get() == null || top.getFinalValue() > mVerticalWrapMin.get().getFinalValue()) { mVerticalWrapMin = new WeakReference<>(top); } } public void addHorizontalWrapMinVariable(ConstraintAnchor left) { if (mHorizontalWrapMin == null || mHorizontalWrapMin.get() == null || left.getFinalValue() > mHorizontalWrapMin.get().getFinalValue()) { mHorizontalWrapMin = new WeakReference<>(left); } } void addVerticalWrapMaxVariable(ConstraintAnchor bottom) { if (mVerticalWrapMax == null || mVerticalWrapMax.get() == null || bottom.getFinalValue() > mVerticalWrapMax.get().getFinalValue()) { mVerticalWrapMax = new WeakReference<>(bottom); } } public void addHorizontalWrapMaxVariable(ConstraintAnchor right) { if (mHorizontalWrapMax == null || mHorizontalWrapMax.get() == null || right.getFinalValue() > mHorizontalWrapMax.get().getFinalValue()) { mHorizontalWrapMax = new WeakReference<>(right); } } private void addMinWrap(ConstraintAnchor constraintAnchor, SolverVariable parentMin) { SolverVariable variable = mSystem.createObjectVariable(constraintAnchor); int wrapStrength = SolverVariable.STRENGTH_EQUALITY; mSystem.addGreaterThan(variable, parentMin, 0, wrapStrength); } private void addMaxWrap(ConstraintAnchor constraintAnchor, SolverVariable parentMax) { SolverVariable variable = mSystem.createObjectVariable(constraintAnchor); int wrapStrength = SolverVariable.STRENGTH_EQUALITY; mSystem.addGreaterThan(parentMax, variable, 0, wrapStrength); } HashSet<ConstraintWidget> mWidgetsToAdd = new HashSet<>(); /** * Add this widget to the solver * * @param system the solver we want to add the widget to */ public boolean addChildrenToSolver(LinearSystem system) { if (DEBUG) { System.out.println("\n#######################################"); System.out.println("## ADD CHILDREN TO SOLVER (" + mDebugSolverPassCount + ") ##"); System.out.println("#######################################\n"); mDebugSolverPassCount++; } boolean optimize = optimizeFor(Optimizer.OPTIMIZATION_GRAPH); addToSolver(system, optimize); final int count = mChildren.size(); boolean hasBarriers = false; for (int i = 0; i < count; i++) { ConstraintWidget widget = mChildren.get(i); widget.setInBarrier(HORIZONTAL, false); widget.setInBarrier(VERTICAL, false); if (widget instanceof Barrier) { hasBarriers = true; } } if (hasBarriers) { for (int i = 0; i < count; i++) { ConstraintWidget widget = mChildren.get(i); if (widget instanceof Barrier) { ((Barrier) widget).markWidgets(); } } } mWidgetsToAdd.clear(); for (int i = 0; i < count; i++) { ConstraintWidget widget = mChildren.get(i); if (widget.addFirst()) { if (widget instanceof VirtualLayout) { mWidgetsToAdd.add(widget); } else { widget.addToSolver(system, optimize); } } } // If we have virtual layouts, we need to add them to the solver in the correct // order (in case they reference one another) while (mWidgetsToAdd.size() > 0) { int numLayouts = mWidgetsToAdd.size(); VirtualLayout layout = null; for (ConstraintWidget widget : mWidgetsToAdd) { layout = (VirtualLayout) widget; // we'll go through the virtual layouts that references others first, to give // them a shot at setting their constraints. if (layout.contains(mWidgetsToAdd)) { layout.addToSolver(system, optimize); mWidgetsToAdd.remove(layout); break; } } if (numLayouts == mWidgetsToAdd.size()) { // looks we didn't find anymore dependency, let's add everything. for (ConstraintWidget widget : mWidgetsToAdd) { widget.addToSolver(system, optimize); } mWidgetsToAdd.clear(); } } if (LinearSystem.USE_DEPENDENCY_ORDERING) { HashSet<ConstraintWidget> widgetsToAdd = new HashSet<>(); for (int i = 0; i < count; i++) { ConstraintWidget widget = mChildren.get(i); if (!widget.addFirst()) { widgetsToAdd.add(widget); } } int orientation = VERTICAL; if (getHorizontalDimensionBehaviour() == WRAP_CONTENT) { orientation = HORIZONTAL; } addChildrenToSolverByDependency(this, system, widgetsToAdd, orientation, false); for (ConstraintWidget widget : widgetsToAdd) { Optimizer.checkMatchParent(this, system, widget); widget.addToSolver(system, optimize); } } else { for (int i = 0; i < count; i++) { ConstraintWidget widget = mChildren.get(i); if (widget instanceof ConstraintWidgetContainer) { DimensionBehaviour horizontalBehaviour = widget.mListDimensionBehaviors[DIMENSION_HORIZONTAL]; DimensionBehaviour verticalBehaviour = widget.mListDimensionBehaviors[DIMENSION_VERTICAL]; if (horizontalBehaviour == WRAP_CONTENT) { widget.setHorizontalDimensionBehaviour(FIXED); } if (verticalBehaviour == WRAP_CONTENT) { widget.setVerticalDimensionBehaviour(FIXED); } widget.addToSolver(system, optimize); if (horizontalBehaviour == WRAP_CONTENT) { widget.setHorizontalDimensionBehaviour(horizontalBehaviour); } if (verticalBehaviour == WRAP_CONTENT) { widget.setVerticalDimensionBehaviour(verticalBehaviour); } } else { Optimizer.checkMatchParent(this, system, widget); if (!(widget.addFirst())) { widget.addToSolver(system, optimize); } } } } if (mHorizontalChainsSize > 0) { Chain.applyChainConstraints(this, system, null, HORIZONTAL); } if (mVerticalChainsSize > 0) { Chain.applyChainConstraints(this, system, null, VERTICAL); } return true; } /** * Update the frame of the layout and its children from the solver * * @param system the solver we get the values from. */ public boolean updateChildrenFromSolver(LinearSystem system, boolean[] flags) { flags[Optimizer.FLAG_RECOMPUTE_BOUNDS] = false; boolean optimize = optimizeFor(Optimizer.OPTIMIZATION_GRAPH); updateFromSolver(system, optimize); final int count = mChildren.size(); boolean hasOverride = false; for (int i = 0; i < count; i++) { ConstraintWidget widget = mChildren.get(i); widget.updateFromSolver(system, optimize); if (widget.hasDimensionOverride()) { hasOverride = true; } } return hasOverride; } @Override public void updateFromRuns(boolean updateHorizontal, boolean updateVertical) { super.updateFromRuns(updateHorizontal, updateVertical); final int count = mChildren.size(); for (int i = 0; i < count; i++) { ConstraintWidget widget = mChildren.get(i); widget.updateFromRuns(updateHorizontal, updateVertical); } } /** * Set the padding on this container. It will apply to the position of the children. * * @param left left padding * @param top top padding * @param right right padding * @param bottom bottom padding */ public void setPadding(int left, int top, int right, int bottom) { mPaddingLeft = left; mPaddingTop = top; mPaddingRight = right; mPaddingBottom = bottom; } /** * Set the rtl status. This has implications for Chains. * * @param isRtl true if we are in RTL. */ public void setRtl(boolean isRtl) { mIsRtl = isRtl; } /** * Returns the rtl status. * * @return true if in RTL, false otherwise. */ public boolean isRtl() { return mIsRtl; } /*-----------------------------------------------------------------------*/ // Overloaded methods from ConstraintWidget /*-----------------------------------------------------------------------*/ public BasicMeasure.Measure mMeasure = new BasicMeasure.Measure(); public static boolean measure(int level, ConstraintWidget widget, BasicMeasure.Measurer measurer, BasicMeasure.Measure measure, int measureStrategy) { if (DEBUG) { System.out.println(Direct.ls(level) + "(M) call to measure " + widget.getDebugName()); } if (measurer == null) { return false; } if (widget.getVisibility() == GONE || widget instanceof Guideline || widget instanceof Barrier) { if (DEBUG) { System.out.println(Direct.ls(level) + "(M) no measure needed for " + widget.getDebugName()); } measure.measuredWidth = 0; measure.measuredHeight = 0; return false; } measure.horizontalBehavior = widget.getHorizontalDimensionBehaviour(); measure.verticalBehavior = widget.getVerticalDimensionBehaviour(); measure.horizontalDimension = widget.getWidth(); measure.verticalDimension = widget.getHeight(); measure.measuredNeedsSolverPass = false; measure.measureStrategy = measureStrategy; boolean horizontalMatchConstraints = (measure.horizontalBehavior == DimensionBehaviour.MATCH_CONSTRAINT); boolean verticalMatchConstraints = (measure.verticalBehavior == DimensionBehaviour.MATCH_CONSTRAINT); boolean horizontalUseRatio = horizontalMatchConstraints && widget.mDimensionRatio > 0; boolean verticalUseRatio = verticalMatchConstraints && widget.mDimensionRatio > 0; if (horizontalMatchConstraints && widget.hasDanglingDimension(HORIZONTAL) && widget.mMatchConstraintDefaultWidth == MATCH_CONSTRAINT_SPREAD && !horizontalUseRatio) { horizontalMatchConstraints = false; measure.horizontalBehavior = WRAP_CONTENT; if (verticalMatchConstraints && widget.mMatchConstraintDefaultHeight == MATCH_CONSTRAINT_SPREAD) { // if match x match, size would be zero. measure.horizontalBehavior = FIXED; } } if (verticalMatchConstraints && widget.hasDanglingDimension(VERTICAL) && widget.mMatchConstraintDefaultHeight == MATCH_CONSTRAINT_SPREAD && !verticalUseRatio) { verticalMatchConstraints = false; measure.verticalBehavior = WRAP_CONTENT; if (horizontalMatchConstraints && widget.mMatchConstraintDefaultWidth == MATCH_CONSTRAINT_SPREAD) { // if match x match, size would be zero. measure.verticalBehavior = FIXED; } } if (widget.isResolvedHorizontally()) { horizontalMatchConstraints = false; measure.horizontalBehavior = FIXED; } if (widget.isResolvedVertically()) { verticalMatchConstraints = false; measure.verticalBehavior = FIXED; } if (horizontalUseRatio) { if (widget.mResolvedMatchConstraintDefault[HORIZONTAL] == ConstraintWidget.MATCH_CONSTRAINT_RATIO_RESOLVED) { measure.horizontalBehavior = FIXED; } else if (!verticalMatchConstraints) { // let's measure here int measuredHeight; if (measure.verticalBehavior == FIXED) { measuredHeight = measure.verticalDimension; } else { measure.horizontalBehavior = WRAP_CONTENT; measurer.measure(widget, measure); measuredHeight = measure.measuredHeight; } measure.horizontalBehavior = FIXED; // regardless of which side we are using for the ratio, getDimensionRatio() already // made sure that it's expressed in WxH format, so we can simply go and multiply measure.horizontalDimension = (int) (widget.getDimensionRatio() * measuredHeight); if (DEBUG) { System.out.println("(M) Measured once for ratio on horizontal side..."); } } } if (verticalUseRatio) { if (widget.mResolvedMatchConstraintDefault[VERTICAL] == ConstraintWidget.MATCH_CONSTRAINT_RATIO_RESOLVED) { measure.verticalBehavior = FIXED; } else if (!horizontalMatchConstraints) { // let's measure here int measuredWidth; if (measure.horizontalBehavior == FIXED) { measuredWidth = measure.horizontalDimension; } else { measure.verticalBehavior = WRAP_CONTENT; measurer.measure(widget, measure); measuredWidth = measure.measuredWidth; } measure.verticalBehavior = FIXED; if (widget.getDimensionRatioSide() == -1) { // regardless of which side we are using for the ratio, // getDimensionRatio() already // made sure that it's expressed in WxH format, // so we can simply go and divide measure.verticalDimension = (int) (measuredWidth / widget.getDimensionRatio()); } else { // getDimensionRatio() already got reverted, so we can simply multiply measure.verticalDimension = (int) (widget.getDimensionRatio() * measuredWidth); } if (DEBUG) { System.out.println("(M) Measured once for ratio on vertical side..."); } } } measurer.measure(widget, measure); widget.setWidth(measure.measuredWidth); widget.setHeight(measure.measuredHeight); widget.setHasBaseline(measure.measuredHasBaseline); widget.setBaselineDistance(measure.measuredBaseline); measure.measureStrategy = BasicMeasure.Measure.SELF_DIMENSIONS; if (DEBUG) { System.out.println("(M) Measured " + widget.getDebugName() + " with : " + widget.getHorizontalDimensionBehaviour() + " x " + widget.getVerticalDimensionBehaviour() + " => " + widget.getWidth() + " x " + widget.getHeight()); } return measure.measuredNeedsSolverPass; } static int sMyCounter = 0; /** * Layout the tree of widgets */ @Override public void layout() { if (DEBUG) { System.out.println("\n#####################################"); System.out.println("## CL LAYOUT PASS ##"); System.out.println("#####################################\n"); mDebugSolverPassCount = 0; } mX = 0; mY = 0; mWidthMeasuredTooSmall = false; mHeightMeasuredTooSmall = false; final int count = mChildren.size(); int preW = Math.max(0, getWidth()); int preH = Math.max(0, getHeight()); DimensionBehaviour originalVerticalDimensionBehaviour = mListDimensionBehaviors[DIMENSION_VERTICAL]; DimensionBehaviour originalHorizontalDimensionBehaviour = mListDimensionBehaviors[DIMENSION_HORIZONTAL]; if (DEBUG_LAYOUT) { System.out.println("layout with preW: " + preW + " (" + mListDimensionBehaviors[DIMENSION_HORIZONTAL] + ") preH: " + preH + " (" + mListDimensionBehaviors[DIMENSION_VERTICAL] + ")"); } if (mMetrics != null) { mMetrics.layouts++; } boolean wrap_override = false; if (FULL_DEBUG) { System.out.println("OPTIMIZATION LEVEL " + mOptimizationLevel); } // Only try the direct optimization in the first layout pass if (mPass == 0 && Optimizer.enabled(mOptimizationLevel, Optimizer.OPTIMIZATION_DIRECT)) { if (FULL_DEBUG) { System.out.println("Direct pass " + sMyCounter++); } Direct.solvingPass(this, getMeasurer()); if (FULL_DEBUG) { System.out.println("Direct pass done."); } for (int i = 0; i < count; i++) { ConstraintWidget child = mChildren.get(i); if (FULL_DEBUG) { if (child.isInHorizontalChain()) { System.out.print("H"); } else { System.out.print(" "); } if (child.isInVerticalChain()) { System.out.print("V"); } else { System.out.print(" "); } if (child.isResolvedHorizontally() && child.isResolvedVertically()) { System.out.print("*"); } else { System.out.print(" "); } System.out.println("[" + i + "] child " + child.getDebugName() + " H: " + child.isResolvedHorizontally() + " V: " + child.isResolvedVertically()); } if (child.isMeasureRequested() && !(child instanceof Guideline) && !(child instanceof Barrier) && !(child instanceof VirtualLayout) && !(child.isInVirtualLayout())) { DimensionBehaviour widthBehavior = child.getDimensionBehaviour(HORIZONTAL); DimensionBehaviour heightBehavior = child.getDimensionBehaviour(VERTICAL); boolean skip = widthBehavior == DimensionBehaviour.MATCH_CONSTRAINT && child.mMatchConstraintDefaultWidth != MATCH_CONSTRAINT_WRAP && heightBehavior == DimensionBehaviour.MATCH_CONSTRAINT && child.mMatchConstraintDefaultHeight != MATCH_CONSTRAINT_WRAP; if (!skip) { BasicMeasure.Measure measure = new BasicMeasure.Measure(); ConstraintWidgetContainer.measure(0, child, mMeasurer, measure, BasicMeasure.Measure.SELF_DIMENSIONS); } } } // let's measure children if (FULL_DEBUG) { System.out.println("Direct pass all done."); } } else { if (FULL_DEBUG) { System.out.println("No DIRECT PASS"); } } if (count > 2 && (originalHorizontalDimensionBehaviour == WRAP_CONTENT || originalVerticalDimensionBehaviour == WRAP_CONTENT) && (Optimizer.enabled(mOptimizationLevel, Optimizer.OPTIMIZATION_GROUPING))) { if (Grouping.simpleSolvingPass(this, getMeasurer())) { if (originalHorizontalDimensionBehaviour == WRAP_CONTENT) { if (preW < getWidth() && preW > 0) { if (DEBUG_LAYOUT) { System.out.println("Override width " + getWidth() + " to " + preH); } setWidth(preW); mWidthMeasuredTooSmall = true; } else { preW = getWidth(); } } if (originalVerticalDimensionBehaviour == WRAP_CONTENT) { if (preH < getHeight() && preH > 0) { if (DEBUG_LAYOUT) { System.out.println("Override height " + getHeight() + " to " + preH); } setHeight(preH); mHeightMeasuredTooSmall = true; } else { preH = getHeight(); } } wrap_override = true; if (DEBUG_LAYOUT) { System.out.println("layout post opt, preW: " + preW + " (" + mListDimensionBehaviors[DIMENSION_HORIZONTAL] + ") preH: " + preH + " (" + mListDimensionBehaviors[DIMENSION_VERTICAL] + "), new size " + getWidth() + " x " + getHeight()); } } } boolean useGraphOptimizer = optimizeFor(Optimizer.OPTIMIZATION_GRAPH) || optimizeFor(Optimizer.OPTIMIZATION_GRAPH_WRAP); mSystem.graphOptimizer = false; mSystem.newgraphOptimizer = false; if (mOptimizationLevel != Optimizer.OPTIMIZATION_NONE && useGraphOptimizer) { mSystem.newgraphOptimizer = true; } int countSolve = 0; final List<ConstraintWidget> allChildren = mChildren; boolean hasWrapContent = getHorizontalDimensionBehaviour() == WRAP_CONTENT || getVerticalDimensionBehaviour() == WRAP_CONTENT; // Reset the chains before iterating on our children resetChains(); countSolve = 0; // Before we solve our system, we should call layout() on any // of our children that is a container. for (int i = 0; i < count; i++) { ConstraintWidget widget = mChildren.get(i); if (widget instanceof WidgetContainer) { ((WidgetContainer) widget).layout(); } } boolean optimize = optimizeFor(Optimizer.OPTIMIZATION_GRAPH); // Now let's solve our system as usual boolean needsSolving = true; while (needsSolving) { countSolve++; try { mSystem.reset(); resetChains(); if (DEBUG) { String debugName = getDebugName(); if (debugName == null) { debugName = "root"; } setDebugSolverName(mSystem, debugName); for (int i = 0; i < count; i++) { ConstraintWidget widget = mChildren.get(i); if (widget.getDebugName() != null) { widget.setDebugSolverName(mSystem, widget.getDebugName()); } } } else { createObjectVariables(mSystem); for (int i = 0; i < count; i++) { ConstraintWidget widget = mChildren.get(i); widget.createObjectVariables(mSystem); } } needsSolving = addChildrenToSolver(mSystem); if (mVerticalWrapMin != null && mVerticalWrapMin.get() != null) { addMinWrap(mVerticalWrapMin.get(), mSystem.createObjectVariable(mTop)); mVerticalWrapMin = null; } if (mVerticalWrapMax != null && mVerticalWrapMax.get() != null) { addMaxWrap(mVerticalWrapMax.get(), mSystem.createObjectVariable(mBottom)); mVerticalWrapMax = null; } if (mHorizontalWrapMin != null && mHorizontalWrapMin.get() != null) { addMinWrap(mHorizontalWrapMin.get(), mSystem.createObjectVariable(mLeft)); mHorizontalWrapMin = null; } if (mHorizontalWrapMax != null && mHorizontalWrapMax.get() != null) { addMaxWrap(mHorizontalWrapMax.get(), mSystem.createObjectVariable(mRight)); mHorizontalWrapMax = null; } if (needsSolving) { mSystem.minimize(); } } catch (Exception e) { e.printStackTrace(); System.out.println("EXCEPTION : " + e); } if (needsSolving) { needsSolving = updateChildrenFromSolver(mSystem, Optimizer.sFlags); } else { updateFromSolver(mSystem, optimize); for (int i = 0; i < count; i++) { ConstraintWidget widget = mChildren.get(i); widget.updateFromSolver(mSystem, optimize); } needsSolving = false; } if (hasWrapContent && countSolve < MAX_ITERATIONS && Optimizer.sFlags[Optimizer.FLAG_RECOMPUTE_BOUNDS]) { // let's get the new bounds int maxX = 0; int maxY = 0; for (int i = 0; i < count; i++) { ConstraintWidget widget = mChildren.get(i); maxX = Math.max(maxX, widget.mX + widget.getWidth()); maxY = Math.max(maxY, widget.mY + widget.getHeight()); } maxX = Math.max(mMinWidth, maxX); maxY = Math.max(mMinHeight, maxY); if (originalHorizontalDimensionBehaviour == WRAP_CONTENT) { if (getWidth() < maxX) { if (DEBUG_LAYOUT) { System.out.println( "layout override width from " + getWidth() + " vs " + maxX); } setWidth(maxX); // force using the solver mListDimensionBehaviors[DIMENSION_HORIZONTAL] = WRAP_CONTENT; wrap_override = true; needsSolving = true; } } if (originalVerticalDimensionBehaviour == WRAP_CONTENT) { if (getHeight() < maxY) { if (DEBUG_LAYOUT) { System.out.println( "layout override height from " + getHeight() + " vs " + maxY); } setHeight(maxY); // force using the solver mListDimensionBehaviors[DIMENSION_VERTICAL] = WRAP_CONTENT; wrap_override = true; needsSolving = true; } } } if (true) { int width = Math.max(mMinWidth, getWidth()); if (width > getWidth()) { if (DEBUG_LAYOUT) { System.out.println( "layout override 2, width from " + getWidth() + " vs " + width); } setWidth(width); mListDimensionBehaviors[DIMENSION_HORIZONTAL] = FIXED; wrap_override = true; needsSolving = true; } int height = Math.max(mMinHeight, getHeight()); if (height > getHeight()) { if (DEBUG_LAYOUT) { System.out.println( "layout override 2, height from " + getHeight() + " vs " + height); } setHeight(height); mListDimensionBehaviors[DIMENSION_VERTICAL] = FIXED; wrap_override = true; needsSolving = true; } if (!wrap_override) { if (mListDimensionBehaviors[DIMENSION_HORIZONTAL] == WRAP_CONTENT && preW > 0) { if (getWidth() > preW) { if (DEBUG_LAYOUT) { System.out.println( "layout override 3, width from " + getWidth() + " vs " + preW); } mWidthMeasuredTooSmall = true; wrap_override = true; mListDimensionBehaviors[DIMENSION_HORIZONTAL] = FIXED; setWidth(preW); needsSolving = true; } } if (mListDimensionBehaviors[DIMENSION_VERTICAL] == WRAP_CONTENT && preH > 0) { if (getHeight() > preH) { if (DEBUG_LAYOUT) { System.out.println( "layout override 3, height from " + getHeight() + " vs " + preH); } mHeightMeasuredTooSmall = true; wrap_override = true; mListDimensionBehaviors[DIMENSION_VERTICAL] = FIXED; setHeight(preH); needsSolving = true; } } } if (countSolve > MAX_ITERATIONS) { needsSolving = false; } } } if (DEBUG_LAYOUT) { System.out.println( "Solved system in " + countSolve + " iterations (" + getWidth() + " x " + getHeight() + ")"); } mChildren = (ArrayList<ConstraintWidget>) allChildren; if (wrap_override) { mListDimensionBehaviors[DIMENSION_HORIZONTAL] = originalHorizontalDimensionBehaviour; mListDimensionBehaviors[DIMENSION_VERTICAL] = originalVerticalDimensionBehaviour; } resetSolverVariables(mSystem.getCache()); } /** * Indicates if the container knows how to layout its content on its own * * @return true if the container does the layout, false otherwise */ public boolean handlesInternalConstraints() { return false; } /*-----------------------------------------------------------------------*/ // Guidelines /*-----------------------------------------------------------------------*/ /** * Accessor to the vertical guidelines contained in the table. * * @return array of guidelines */ public ArrayList<Guideline> getVerticalGuidelines() { ArrayList<Guideline> guidelines = new ArrayList<>(); for (int i = 0, mChildrenSize = mChildren.size(); i < mChildrenSize; i++) { final ConstraintWidget widget = mChildren.get(i); if (widget instanceof Guideline) { Guideline guideline = (Guideline) widget; if (guideline.getOrientation() == Guideline.VERTICAL) { guidelines.add(guideline); } } } return guidelines; } /** * Accessor to the horizontal guidelines contained in the table. * * @return array of guidelines */ public ArrayList<Guideline> getHorizontalGuidelines() { ArrayList<Guideline> guidelines = new ArrayList<>(); for (int i = 0, mChildrenSize = mChildren.size(); i < mChildrenSize; i++) { final ConstraintWidget widget = mChildren.get(i); if (widget instanceof Guideline) { Guideline guideline = (Guideline) widget; if (guideline.getOrientation() == Guideline.HORIZONTAL) { guidelines.add(guideline); } } } return guidelines; } public LinearSystem getSystem() { return mSystem; } /*-----------------------------------------------------------------------*/ // Chains /*-----------------------------------------------------------------------*/ /** * Reset the chains array. Need to be called before layout. */ private void resetChains() { mHorizontalChainsSize = 0; mVerticalChainsSize = 0; } /** * Add the chain which constraintWidget is part of. Called by ConstraintWidget::addToSolver() * * @param constraintWidget * @param type HORIZONTAL or VERTICAL chain */ void addChain(ConstraintWidget constraintWidget, int type) { ConstraintWidget widget = constraintWidget; if (type == HORIZONTAL) { addHorizontalChain(widget); } else if (type == VERTICAL) { addVerticalChain(widget); } } /** * Add a widget to the list of horizontal chains. The widget is the left-most widget * of the chain which doesn't have a left dual connection. * * @param widget widget starting the chain */ private void addHorizontalChain(ConstraintWidget widget) { if (mHorizontalChainsSize + 1 >= mHorizontalChainsArray.length) { mHorizontalChainsArray = Arrays .copyOf(mHorizontalChainsArray, mHorizontalChainsArray.length * 2); } mHorizontalChainsArray[mHorizontalChainsSize] = new ChainHead(widget, HORIZONTAL, isRtl()); mHorizontalChainsSize++; } /** * Add a widget to the list of vertical chains. The widget is the top-most widget * of the chain which doesn't have a top dual connection. * * @param widget widget starting the chain */ private void addVerticalChain(ConstraintWidget widget) { if (mVerticalChainsSize + 1 >= mVerticalChainsArray.length) { mVerticalChainsArray = Arrays .copyOf(mVerticalChainsArray, mVerticalChainsArray.length * 2); } mVerticalChainsArray[mVerticalChainsSize] = new ChainHead(widget, VERTICAL, isRtl()); mVerticalChainsSize++; } /** * Keep track of the # of passes * @param pass */ public void setPass(int pass) { this.mPass = pass; } public void getSceneString(StringBuilder ret) { ret.append(stringId + ":{\n"); ret.append(" actualWidth:" + mWidth); ret.append("\n"); ret.append(" actualHeight:" + mHeight); ret.append("\n"); ArrayList<ConstraintWidget> children = getChildren(); for (ConstraintWidget child : children) { child.getSceneString(ret); ret.append(",\n"); } ret.append("}"); } }
/* * This file is part of COMPASS. It is subject to the license terms in * the LICENSE file found in the top-level directory of this distribution. * (Also available at http://www.apache.org/licenses/LICENSE-2.0.txt) * You may not use this file except in compliance with the License. */ package de.dfki.asr.compass.business; import de.dfki.asr.compass.business.api.SceneTreeManager; import de.dfki.asr.compass.business.services.CRUDService; import de.dfki.asr.compass.business.exception.CompassRuntimeException; import de.dfki.asr.compass.business.exception.EntityNotFoundException; import de.dfki.asr.compass.math.Quat4f; import de.dfki.asr.compass.math.Vector3f; import de.dfki.asr.compass.model.SceneNode; import de.dfki.asr.compass.model.SceneNodeComponent; import java.io.IOException; import java.io.Serializable; import java.util.Collections; import java.util.List; import javax.ejb.Stateless; import javax.inject.Inject; import javax.inject.Named; import javax.vecmath.Matrix4f; @Named @Stateless public class SceneTreeManagerImpl implements Serializable, SceneTreeManager { private static final long serialVersionUID = 3073738672780116033L; @Inject private CRUDService crudService; @Override public SceneNode findById(final long id) throws EntityNotFoundException { return crudService.findById(SceneNode.class, id); } @Override public void remove(final SceneNode node) { crudService.remove(node); } @Override public void removeById(final long entityId) throws EntityNotFoundException, IllegalArgumentException { SceneNode toBeDeleted = findById(entityId); if (toBeDeleted.getParent() == null) { throw new IllegalArgumentException("Root SceneNodes for Scenarios may not be deleted."); } toBeDeleted.setParent(null); crudService.remove(toBeDeleted); } @Override public void save(final SceneNode entity) { crudService.save(entity); } @Override public SceneNode referenceById(final long id) throws EntityNotFoundException { return crudService.referenceById(SceneNode.class, id); } @Override public SceneNode createNode() { SceneNode node = new SceneNode("New scene node"); crudService.save(node); return node; } @Override public SceneNode createNewChild(final SceneNode parent) { SceneNode node = createNode(); node.setParent(parent); return node; } @Override public void reparentNode(final SceneNode node, final SceneNode newParent) { SceneNode oldParent = node.getParent(); node.setParent(newParent); adjustTransformsToKeepGlobalTransformation(node, oldParent, newParent); crudService.save(node); } @Override public void reparentNode(final long nodeId, final long parentId) throws EntityNotFoundException { reparentNode(findById(nodeId), findById(parentId)); } @Override public void swapChildren(final SceneNode node, final int childA, final int childB) { List<SceneNode> sceneNodes = node.getChildren(); Collections.swap(sceneNodes, childA, childB); sceneNodes.get(childA).updateOrderingIndex(); sceneNodes.get(childB).updateOrderingIndex(); crudService.save(node); } @Override public void swapChildren(final long nodeId, final int childA, final int childB) throws EntityNotFoundException { swapChildren(findById(nodeId), childA, childB); } @Override public SceneNode duplicateNode(final SceneNode node) { SceneNode newNode; try { newNode = (SceneNode) node.deepCopy(); } catch (IOException | ClassNotFoundException ex) { throw new CompassRuntimeException(ex); } newNode.setName(newNode.getName() + " - Copy"); crudService.save(newNode); return newNode; } @Override public SceneNode duplicateNode(final long id) throws EntityNotFoundException { return duplicateNode(findById(id)); } @Override public void addNode(final SceneNode node, final long parentID) throws IllegalArgumentException, EntityNotFoundException { addNode(node, findById(parentID)); } @Override public void addNode(final SceneNode node, final SceneNode parent) throws IllegalArgumentException { if (node.getParent() != null) { throw new IllegalArgumentException("Tried to add a scene node that already has a parent. Use reparentNode instead."); } node.setParent(parent); crudService.save(node); } @Override public SceneNode addPrefabInstance(final long prefabID, final long parentID) throws EntityNotFoundException { return addPrefabInstance(findById(prefabID), findById(parentID)); } @Override public SceneNode addPrefabInstance(final SceneNode prefab, final SceneNode parent) { SceneNode node; try { node = (SceneNode) prefab.deepCopy(); } catch (IOException | ClassNotFoundException ex) { throw new CompassRuntimeException(ex); } node.setParent(parent); crudService.save(node); return node; } @Override public void saveNode(final SceneNode node) { crudService.save(node); } @Override public void addComponentToSceneNode(final long parentID, final SceneNodeComponent newComponent) throws EntityNotFoundException, IllegalArgumentException { addComponentToSceneNode(findById(parentID), newComponent); } @Override public void addComponentToSceneNode(final SceneNode parent, final SceneNodeComponent newComponent) throws IllegalArgumentException { newComponent.setOwner(parent); parent.addComponent(newComponent); crudService.save(newComponent); } private void adjustTransformsToKeepGlobalTransformation(final SceneNode nodeToAdjust, final SceneNode oldParent, final SceneNode newParent) { Matrix4f localTransform = calculateNewLocalTransform(nodeToAdjust.getLocalTransform(), oldParent.getWorldSpaceTransform(), newParent.getWorldSpaceTransform()); setNewLocalScale(nodeToAdjust, localTransform); setNewLocalTranslation(nodeToAdjust, localTransform); setNewLocalRotation(nodeToAdjust, localTransform); } private Matrix4f calculateNewLocalTransform(final Matrix4f oldLocalTransform, final Matrix4f oldTransform, final Matrix4f newTransform) { Matrix4f out = new Matrix4f(oldLocalTransform); Matrix4f oldToNew = new Matrix4f(newTransform); oldToNew.invert(); oldToNew.mul(oldTransform); out.mul(oldToNew, out); return out; } private void setNewLocalScale(final SceneNode nodeToAdjust, final Matrix4f localTransform) { nodeToAdjust.setLocalScale(localTransform.getScale()); } private void setNewLocalTranslation(final SceneNode nodeToAdjust, final Matrix4f localTransform) { Vector3f translation = new Vector3f(); localTransform.get(translation); nodeToAdjust.setLocalTranslation(translation); } private void setNewLocalRotation(final SceneNode nodeToAdjust, final Matrix4f localTransform) { Quat4f rotation = new Quat4f(); localTransform.get(rotation); nodeToAdjust.setLocalRotation(rotation); } static public SceneNode getRootNode(final SceneNode node) { if (node.getParent() == null) { return node; } return getRootNode(node.getParent()); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sentry.tests.e2e.metastore; import java.io.IOException; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.security.UserGroupInformation; import org.apache.pig.PigServer; import org.apache.sentry.provider.file.PolicyFile; import org.apache.sentry.tests.e2e.hive.AbstractTestWithStaticConfiguration; import org.apache.sentry.tests.e2e.hive.hiveserver.HiveServerFactory.HiveServer2Type; import org.junit.After; import org.junit.BeforeClass; import com.google.common.collect.Maps; public abstract class AbstractMetastoreTestWithStaticConfiguration extends AbstractTestWithStaticConfiguration { @BeforeClass public static void setupTestStaticConfiguration() throws Exception { useSentryService = true; clearDbAfterPerTest = false; testServerType = HiveServer2Type.InternalMetastore.name(); AbstractTestWithStaticConfiguration.setupTestStaticConfiguration(); } protected static void writePolicyFile(PolicyFile policyFile) throws Exception { policyFile.write(context.getPolicyFile()); } public static PolicyFile setAdminOnServer1(String adminGroup) throws Exception { return SentryPolicyProviderForDb.setAdminOnServer1(adminGroup, getSentryClient()); } /** * create a metastore table using the given attributes * @param client * @param dbName * @param tabName * @param cols * @return * @throws Exception */ public Table createMetastoreTable(HiveMetaStoreClient client, String dbName, String tabName, List<FieldSchema> cols) throws Exception { Table tbl = makeMetastoreTableObject(client, dbName, tabName, cols); client.createTable(tbl); return tbl; } public Table createMetastoreTableWithLocation(HiveMetaStoreClient client, String dbName, String tabName, List<FieldSchema> cols, String location) throws Exception { Table tbl = makeMetastoreTableObject(client, dbName, tabName, cols); tbl.getSd().setLocation(location); client.createTable(tbl); return tbl; } public Table createMetastoreTableWithPartition(HiveMetaStoreClient client, String dbName, String tabName, List<FieldSchema> cols, List<FieldSchema> partionVals) throws Exception { Table tbl = makeMetastoreTableObject(client, dbName, tabName, cols); tbl.setPartitionKeys(partionVals); client.createTable(tbl); return client.getTable(dbName, tabName); } public void addPartition(HiveMetaStoreClient client, String dbName, String tblName, List<String> ptnVals, Table tbl) throws Exception { Partition part = makeMetastorePartitionObject(dbName, tblName, ptnVals, tbl); Partition retp = client.add_partition(part); } public void addPartitionWithLocation(HiveMetaStoreClient client, String dbName, String tblName, List<String> ptnVals, Table tbl, String location) throws Exception { Partition part = makeMetastorePartitionObject(dbName, tblName, ptnVals, tbl, location); client.add_partition(part); } public Table makeMetastoreTableObject(HiveMetaStoreClient client, String dbName, String tabName, List<FieldSchema> cols) throws Exception { Table tbl = new Table(); tbl.setDbName(dbName); tbl.setTableName(tabName); StorageDescriptor sd = new StorageDescriptor(); tbl.setSd(sd); tbl.setParameters(new HashMap<String, String>()); sd.setCols(cols); sd.setCompressed(false); sd.setParameters(new HashMap<String, String>()); sd.setSerdeInfo(new SerDeInfo()); sd.getSerdeInfo().setName(tbl.getTableName()); sd.getSerdeInfo().setParameters(new HashMap<String, String>()); sd.getSerdeInfo().getParameters() .put(serdeConstants.SERIALIZATION_FORMAT, "1"); sd.setSortCols(new ArrayList<Order>()); return tbl; } public Partition makeMetastorePartitionObject(String dbName, String tblName, List<String> ptnVals, Table tbl, String partitionLocation) { Partition part = makeMetastoreBasePartitionObject(dbName, tblName, ptnVals, tbl); part.getSd().setLocation(partitionLocation); return part; } public Partition makeMetastorePartitionObject(String dbName, String tblName, List<String> ptnVals, Table tbl) { Partition part = makeMetastoreBasePartitionObject(dbName, tblName, ptnVals, tbl); return part; } private Partition makeMetastoreBasePartitionObject(String dbName, String tblName, List<String> ptnVals, Table tbl) { Partition part4 = new Partition(); part4.setDbName(dbName); part4.setTableName(tblName); part4.setValues(ptnVals); part4.setParameters(new HashMap<String, String>()); part4.setSd(tbl.getSd().deepCopy()); part4.getSd().setSerdeInfo(tbl.getSd().getSerdeInfo().deepCopy()); part4.setParameters(new HashMap<String, String>()); return part4; } public void createMetastoreDB(HiveMetaStoreClient client, String dbName) throws Exception { Database db = new Database(); db.setName(dbName); client.createDatabase(db); } public void execHiveSQLwithOverlay(final String sqlStmt, final String userName, Map<String, String> overLay) throws Exception { final HiveConf hiveConf = new HiveConf(); for (Map.Entry<String, String> entry : overLay.entrySet()) { hiveConf.set(entry.getKey(), entry.getValue()); } UserGroupInformation clientUgi = UserGroupInformation .createRemoteUser(userName); clientUgi.doAs(new PrivilegedExceptionAction<Object>() { @Override public Void run() throws Exception { Driver driver = new Driver(hiveConf, userName); SessionState.start(new CliSessionState(hiveConf)); CommandProcessorResponse cpr = driver.run(sqlStmt); if (cpr.getResponseCode() != 0) { throw new IOException("Failed to execute \"" + sqlStmt + "\". Driver returned " + cpr.getResponseCode() + " Error: " + cpr.getErrorMessage()); } driver.close(); SessionState.get().close(); return null; } }); } public void execHiveSQL(String sqlStmt, String userName) throws Exception { execHiveSQLwithOverlay(sqlStmt, userName, new HashMap<String, String>()); } public void execPigLatin(String userName, final PigServer pigServer, final String pigLatin) throws Exception { UserGroupInformation clientUgi = UserGroupInformation .createRemoteUser(userName); clientUgi.doAs( new PrivilegedExceptionAction<Object>() { @Override public Void run() throws Exception { pigServer.registerQuery(pigLatin); return null; } }); } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.notification.impl.actions; import com.intellij.ide.util.PropertiesComponent; import com.intellij.internal.statistic.connect.StatisticsNotification; import com.intellij.internal.statistic.updater.StatisticsNotificationManager; import com.intellij.notification.*; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.project.DumbAware; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.MessageDialogBuilder; import com.intellij.openapi.util.IconLoader; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.wm.ToolWindowId; import com.intellij.util.messages.MessageBus; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.HyperlinkEvent; import java.awt.*; import java.util.ArrayList; import java.util.List; /** * @author spleaner * @author Sergey.Malenkov */ public class NotificationTestAction extends AnAction implements DumbAware { public static final String TEST_GROUP_ID = "Test Notification"; private static final NotificationGroup TEST_STICKY_GROUP = new NotificationGroup("Test Sticky Notification", NotificationDisplayType.STICKY_BALLOON, true); private static final NotificationGroup TEST_TOOLWINDOW_GROUP = NotificationGroup.toolWindowGroup("Test ToolWindow Notification", ToolWindowId.TODO_VIEW, true); private static final String MESSAGE_KEY = "NotificationTestAction_Message"; public void actionPerformed(@NotNull AnActionEvent event) { new NotificationDialog(event.getProject()).show(); } private static final class NotificationDialog extends DialogWrapper { private final JTextArea myMessage = new JTextArea(10, 50); private final MessageBus myMessageBus; private NotificationDialog(@Nullable Project project) { super(project, true, IdeModalityType.MODELESS); myMessageBus = project != null ? project.getMessageBus() : ApplicationManager.getApplication().getMessageBus(); init(); setOKButtonText("Notify"); setTitle("Test Notification"); myMessage.setText( PropertiesComponent.getInstance().getValue(MESSAGE_KEY, "GroupID:\nTitle:\nSubtitle:\nContent:\nContent:\nActions:\nSticky:\n")); } @Nullable @Override protected String getDimensionServiceKey() { return "NotificationTestAction"; } @Override protected JComponent createCenterPanel() { JPanel panel = new JPanel(new BorderLayout(10, 10)); panel.add(BorderLayout.CENTER, new JScrollPane(myMessage)); return panel; } @NotNull @Override protected Action[] createActions() { return new Action[]{getOKAction(), getCancelAction()}; } @Override public void doCancelAction() { PropertiesComponent.getInstance().setValue(MESSAGE_KEY, myMessage.getText()); super.doCancelAction(); } @Override protected void doOKAction() { newNotification(myMessage.getText()); } private void newNotification(String text) { final List<NotificationInfo> notifications = new ArrayList<>(); NotificationInfo notification = null; for (String line : StringUtil.splitByLines(text, false)) { if (line.length() == 0) { if (notification != null) { notification = null; continue; } } if (line.startsWith("//")) { continue; } if (line.startsWith("--")) { break; } if (notification == null) { notification = new NotificationInfo(); notifications.add(notification); } if (line.startsWith("GroupID:")) { notification.setGroupId(StringUtil.substringAfter(line, ":")); } else if (line.startsWith("Title:")) { notification.setTitle(StringUtil.substringAfter(line, ":")); } else if (line.startsWith("Content:")) { String value = StringUtil.substringAfter(line, ":"); if (value != null) { notification.addContent(value); } } else if (line.startsWith("Subtitle:")) { notification.setSubtitle(StringUtil.substringAfter(line, ":")); } else if (line.startsWith("Actions:")) { String value = StringUtil.substringAfter(line, ":"); if (value != null) { notification.setActions(StringUtil.split(value, ",")); } } else if (line.startsWith("Type:")) { notification.setType(StringUtil.substringAfter(line, ":")); } else if (line.startsWith("Sticky:")) { notification.setSticky("true".equals(StringUtil.substringAfter(line, ":"))); } else if (line.startsWith("Listener:")) { notification.setAddListener("true".equals(StringUtil.substringAfter(line, ":"))); } else if (line.startsWith("Toolwindow:")) { notification.setToolwindow("true".equals(StringUtil.substringAfter(line, ":"))); } } ApplicationManager.getApplication().executeOnPooledThread(() -> { for (NotificationInfo info : notifications) { myMessageBus.syncPublisher(Notifications.TOPIC).notify(info.getNotification()); } }); } } private static class NotificationInfo implements NotificationListener { private String myGroupId; private String myTitle; private String mySubtitle; private List<String> myContent; private List<String> myActions; private NotificationType myType = NotificationType.INFORMATION; private boolean mySticky; private boolean myAddListener; private boolean myToolwindow; private Notification myNotification; public Notification getNotification() { if (myNotification == null) { Icon icon = null; if (!StringUtil.isEmpty(myGroupId)) { icon = IconLoader.findIcon(myGroupId); } if ("!!!St!!!".equals(myTitle)) { return myNotification = new StatisticsNotification(StatisticsNotificationManager.GROUP_DISPLAY_ID, getListener()).setIcon(icon); } String displayId = mySticky ? TEST_STICKY_GROUP.getDisplayId() : TEST_GROUP_ID; if (myToolwindow) { displayId = TEST_TOOLWINDOW_GROUP.getDisplayId(); } String content = myContent == null ? "" : StringUtil.join(myContent, "\n"); if (icon == null) { myNotification = new Notification(displayId, StringUtil.notNullize(myTitle), content, myType, getListener()); } else { myNotification = new Notification(displayId, icon, myTitle, mySubtitle, content, myType, getListener()); if (myActions != null) { for (String action : myActions) { myNotification.addAction(new MyAnAction(action)); } } } } return myNotification; } @Nullable private NotificationListener getListener() { return myAddListener ? this : null; } public void setGroupId(@Nullable String groupId) { myGroupId = groupId; } public void setTitle(@Nullable String title) { myTitle = title; } public void setSubtitle(@Nullable String subtitle) { mySubtitle = subtitle; } public void setAddListener(boolean addListener) { myAddListener = addListener; } public void addContent(@NotNull String content) { if (myContent == null) { myContent = new ArrayList<>(); } myContent.add(content); } public void setActions(@NotNull List<String> actions) { myActions = actions; } public void setSticky(boolean sticky) { mySticky = sticky; } public void setToolwindow(boolean toolwindow) { myToolwindow = toolwindow; } public void setType(@Nullable String type) { if ("info".equals(type)) { myType = NotificationType.INFORMATION; } else if ("error".equals(type)) { myType = NotificationType.ERROR; } else if ("warn".equals(type)) { myType = NotificationType.WARNING; } } @Override public void hyperlinkUpdate(@NotNull Notification notification, @NotNull HyperlinkEvent event) { if (MessageDialogBuilder.yesNo("Notification Listener", event.getDescription() + " Expire?").isYes()) { myNotification.expire(); myNotification = null; } } private class MyAnAction extends AnAction { private MyAnAction(@Nullable String text) { if (text != null) { if (text.endsWith(".png")) { Icon icon = IconLoader.findIcon(text); if (icon != null) { getTemplatePresentation().setIcon(icon); return; } } getTemplatePresentation().setText(text); } } @Override public void actionPerformed(AnActionEvent e) { Notification.get(e); if (MessageDialogBuilder.yesNo("AnAction", getTemplatePresentation().getText() + " Expire?").isYes()) { myNotification.expire(); myNotification = null; } } } } }
/* * Copyright (c) 2012, United States Government, as represented by the Secretary of Health and Human Services. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above * copyright notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name of the United States Government nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE UNITED STATES GOVERNMENT BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package gov.hhs.fha.nhinc.patientdiscovery.inbound.deferred.response; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertSame; import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import gov.hhs.fha.nhinc.aspect.InboundProcessingEvent; import gov.hhs.fha.nhinc.common.nhinccommon.AssertionType; import gov.hhs.fha.nhinc.nhinclib.NhincConstants; import gov.hhs.fha.nhinc.patientcorrelation.nhinc.dao.PDDeferredCorrelationDao; import gov.hhs.fha.nhinc.patientdiscovery.PatientDiscovery201306PolicyChecker; import gov.hhs.fha.nhinc.patientdiscovery.PatientDiscovery201306Processor; import gov.hhs.fha.nhinc.patientdiscovery.PatientDiscoveryAuditor; import gov.hhs.fha.nhinc.patientdiscovery.adapter.deferred.response.proxy.AdapterPatientDiscoveryDeferredRespProxy; import gov.hhs.fha.nhinc.patientdiscovery.adapter.deferred.response.proxy.AdapterPatientDiscoveryDeferredRespProxyObjectFactory; import gov.hhs.fha.nhinc.patientdiscovery.aspect.MCCIIN000002UV01EventDescriptionBuilder; import gov.hhs.fha.nhinc.patientdiscovery.aspect.PRPAIN201306UV02EventDescriptionBuilder; import gov.hhs.fha.nhinc.patientdiscovery.response.ResponseFactory; import gov.hhs.fha.nhinc.patientdiscovery.response.ResponseFactory.ResponseModeType; import gov.hhs.fha.nhinc.patientdiscovery.response.ResponseMode; import gov.hhs.fha.nhinc.transform.subdisc.HL7AckTransforms; import java.lang.reflect.Method; import org.hl7.v3.II; import org.hl7.v3.MCCIIN000002UV01; import org.hl7.v3.PRPAIN201306UV02; import org.hl7.v3.RespondingGatewayPRPAIN201306UV02RequestType; import org.junit.Test; import org.mockito.ArgumentCaptor; /** * @author achidamb * */ public class StandardInboundPatientDiscoveryDeferredResponseTest { @Test public void hasInboundProcessingEvent() throws Exception { Class<StandardInboundPatientDiscoveryDeferredResponse> clazz = StandardInboundPatientDiscoveryDeferredResponse.class; Method method = clazz.getMethod("respondingGatewayDeferredPRPAIN201306UV02", PRPAIN201306UV02.class, AssertionType.class); InboundProcessingEvent annotation = method.getAnnotation(InboundProcessingEvent.class); assertNotNull(annotation); assertEquals(PRPAIN201306UV02EventDescriptionBuilder.class, annotation.beforeBuilder()); assertEquals(MCCIIN000002UV01EventDescriptionBuilder.class, annotation.afterReturningBuilder()); assertEquals("Patient Discovery Deferred Response", annotation.serviceType()); assertEquals("1.0", annotation.version()); } @Test public void invoke() { PRPAIN201306UV02 request = new PRPAIN201306UV02(); AssertionType assertion = new AssertionType(); MCCIIN000002UV01 expectedResponse = new MCCIIN000002UV01(); II patientId = new II(); // Mocks PatientDiscovery201306PolicyChecker policyChecker = mock(PatientDiscovery201306PolicyChecker.class); ResponseFactory responseFactory = mock(ResponseFactory.class); PatientDiscovery201306Processor msgProcessor = mock(PatientDiscovery201306Processor.class); PDDeferredCorrelationDao pdCorrelationDao = mock(PDDeferredCorrelationDao.class); PatientDiscoveryAuditor auditLogger = mock(PatientDiscoveryAuditor.class); ResponseMode responseMode = mock(ResponseMode.class); AdapterPatientDiscoveryDeferredRespProxyObjectFactory adapterProxyFactory = mock(AdapterPatientDiscoveryDeferredRespProxyObjectFactory.class); AdapterPatientDiscoveryDeferredRespProxy adapterProxy = mock(AdapterPatientDiscoveryDeferredRespProxy.class); // Stubbing the methods when(policyChecker.checkOutgoingPolicy(any(RespondingGatewayPRPAIN201306UV02RequestType.class))).thenReturn( true); when(responseFactory.getResponseModeType()).thenReturn(ResponseModeType.VERIFY); when(responseFactory.getResponseMode()).thenReturn(responseMode); when(pdCorrelationDao.queryByMessageId(any(String.class))).thenReturn(patientId); when(adapterProxyFactory.create()).thenReturn(adapterProxy); when(adapterProxy.processPatientDiscoveryAsyncResp(request, assertion)).thenReturn(expectedResponse); // Actual invocation StandardInboundPatientDiscoveryDeferredResponse standardPatientDiscovery = new StandardInboundPatientDiscoveryDeferredResponse( policyChecker, responseFactory, msgProcessor, adapterProxyFactory, pdCorrelationDao, auditLogger); MCCIIN000002UV01 actualResponse = standardPatientDiscovery.respondingGatewayDeferredPRPAIN201306UV02(request, assertion); assertSame(expectedResponse, actualResponse); // Verify policy check is with the correct request ArgumentCaptor<RespondingGatewayPRPAIN201306UV02RequestType> policyReqArgument = ArgumentCaptor .forClass(RespondingGatewayPRPAIN201306UV02RequestType.class); verify(policyChecker).checkOutgoingPolicy(policyReqArgument.capture()); assertEquals(request, policyReqArgument.getValue().getPRPAIN201306UV02()); // Verify response mode is called verify(responseMode).processResponse(request, assertion, patientId); // Verify audits verify(auditLogger).auditNhinDeferred201306(request, assertion, NhincConstants.AUDIT_LOG_INBOUND_DIRECTION); verify(auditLogger).auditAck(actualResponse, assertion, NhincConstants.AUDIT_LOG_OUTBOUND_DIRECTION, NhincConstants.AUDIT_LOG_NHIN_INTERFACE); verify(auditLogger).auditAdapterDeferred201306(request, assertion, NhincConstants.AUDIT_LOG_OUTBOUND_DIRECTION); verify(auditLogger).auditAck(actualResponse, assertion, NhincConstants.AUDIT_LOG_INBOUND_DIRECTION, NhincConstants.AUDIT_LOG_ADAPTER_INTERFACE); } @Test public void passthrough() { PRPAIN201306UV02 request = new PRPAIN201306UV02(); AssertionType assertion = new AssertionType(); MCCIIN000002UV01 expectedResponse = new MCCIIN000002UV01(); // Mocks PatientDiscovery201306PolicyChecker policyChecker = mock(PatientDiscovery201306PolicyChecker.class); ResponseFactory responseFactory = mock(ResponseFactory.class); PatientDiscoveryAuditor auditLogger = mock(PatientDiscoveryAuditor.class); ResponseMode responseMode = mock(ResponseMode.class); AdapterPatientDiscoveryDeferredRespProxyObjectFactory adapterProxyFactory = mock(AdapterPatientDiscoveryDeferredRespProxyObjectFactory.class); AdapterPatientDiscoveryDeferredRespProxy adapterProxy = mock(AdapterPatientDiscoveryDeferredRespProxy.class); // Stubbing the methods when(policyChecker.checkOutgoingPolicy(any(RespondingGatewayPRPAIN201306UV02RequestType.class))).thenReturn( true); when(responseFactory.getResponseModeType()).thenReturn(ResponseModeType.PASSTHROUGH); when(adapterProxyFactory.create()).thenReturn(adapterProxy); when(adapterProxy.processPatientDiscoveryAsyncResp(request, assertion)).thenReturn(expectedResponse); // Actual invocation StandardInboundPatientDiscoveryDeferredResponse standardPatientDiscovery = new StandardInboundPatientDiscoveryDeferredResponse( policyChecker, responseFactory, null, adapterProxyFactory, null, auditLogger); MCCIIN000002UV01 actualResponse = standardPatientDiscovery.respondingGatewayDeferredPRPAIN201306UV02(request, assertion); assertSame(expectedResponse, actualResponse); // Verify policy check is with the correct request ArgumentCaptor<RespondingGatewayPRPAIN201306UV02RequestType> policyReqArgument = ArgumentCaptor .forClass(RespondingGatewayPRPAIN201306UV02RequestType.class); verify(policyChecker).checkOutgoingPolicy(policyReqArgument.capture()); assertEquals(request, policyReqArgument.getValue().getPRPAIN201306UV02()); // Verify response mode processing is never called verify(responseMode, never()).processResponse(any(PRPAIN201306UV02.class), any(AssertionType.class), any(II.class)); // Verify audits verify(auditLogger).auditNhinDeferred201306(request, assertion, NhincConstants.AUDIT_LOG_INBOUND_DIRECTION); verify(auditLogger).auditAck(actualResponse, assertion, NhincConstants.AUDIT_LOG_OUTBOUND_DIRECTION, NhincConstants.AUDIT_LOG_NHIN_INTERFACE); } @Test public void policyFailed() { PRPAIN201306UV02 request = new PRPAIN201306UV02(); AssertionType assertion = new AssertionType(); // Mocks PatientDiscovery201306PolicyChecker policyChecker = mock(PatientDiscovery201306PolicyChecker.class); PatientDiscoveryAuditor auditLogger = mock(PatientDiscoveryAuditor.class); // Stubbing the methods when(policyChecker.checkOutgoingPolicy(any(RespondingGatewayPRPAIN201306UV02RequestType.class))).thenReturn( false); // Actual invocation StandardInboundPatientDiscoveryDeferredResponse standardPatientDiscovery = new StandardInboundPatientDiscoveryDeferredResponse( policyChecker, null, null, null, null, auditLogger); MCCIIN000002UV01 errorResponse = standardPatientDiscovery.respondingGatewayDeferredPRPAIN201306UV02(request, assertion); // Verify error response assertEquals(HL7AckTransforms.ACK_DETAIL_TYPE_CODE_ERROR, errorResponse.getAcknowledgement().get(0) .getAcknowledgementDetail().get(0).getTypeCode().toString()); assertEquals("Policy Check Failed", errorResponse.getAcknowledgement().get(0).getAcknowledgementDetail().get(0) .getText().getContent().get(0).toString()); // Verify policy check is with the correct request ArgumentCaptor<RespondingGatewayPRPAIN201306UV02RequestType> policyReqArgument = ArgumentCaptor .forClass(RespondingGatewayPRPAIN201306UV02RequestType.class); verify(policyChecker).checkOutgoingPolicy(policyReqArgument.capture()); assertEquals(request, policyReqArgument.getValue().getPRPAIN201306UV02()); // Verify audits verify(auditLogger).auditNhinDeferred201306(request, assertion, NhincConstants.AUDIT_LOG_INBOUND_DIRECTION); verify(auditLogger).auditAck(errorResponse, assertion, NhincConstants.AUDIT_LOG_OUTBOUND_DIRECTION, NhincConstants.AUDIT_LOG_NHIN_INTERFACE); } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.uiDesigner.wizard; import com.intellij.ide.wizard.StepAdapter; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.ui.ComboBox; import com.intellij.psi.PsiMethod; import com.intellij.psi.PsiType; import com.intellij.psi.util.PropertyUtil; import com.intellij.uiDesigner.UIDesignerBundle; import com.intellij.util.ArrayUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import javax.swing.*; import javax.swing.table.AbstractTableModel; import javax.swing.table.TableCellEditor; import javax.swing.table.TableColumn; import java.awt.*; import java.util.ArrayList; import java.util.Collections; /** * @author Anton Katilin * @author Vladimir Kondratyev */ final class BindToExistingBeanStep extends StepAdapter{ private static final Logger LOG = Logger.getInstance("#com.intellij.uiDesigner.wizard.BindToExistingBeanStep"); private JScrollPane myScrollPane; private JTable myTable; private final WizardData myData; private final MyTableModel myTableModel; private JCheckBox myChkIsModified; private JCheckBox myChkGetData; private JCheckBox myChkSetData; private JPanel myPanel; BindToExistingBeanStep(@NotNull final WizardData data) { myData = data; myTableModel = new MyTableModel(); myTable.setModel(myTableModel); myTable.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); myTable.getColumnModel().setColumnSelectionAllowed(true); myScrollPane.getViewport().setBackground(myTable.getBackground()); myTable.setSurrendersFocusOnKeystroke(true); // Customize "Form Property" column { final TableColumn column = myTable.getColumnModel().getColumn(0/*Form Property*/); column.setCellRenderer(new FormPropertyTableCellRenderer(myData.myProject)); } // Customize "Bean Property" column { final TableColumn column = myTable.getColumnModel().getColumn(1/*Bean Property*/); column.setCellRenderer(new BeanPropertyTableCellRenderer()); final MyTableCellEditor cellEditor = new MyTableCellEditor(); column.setCellEditor(cellEditor); final DefaultCellEditor editor = (DefaultCellEditor)myTable.getDefaultEditor(Object.class); editor.setClickCountToStart(1); myTable.setRowHeight(cellEditor.myCbx.getPreferredSize().height); } myChkGetData.setSelected(true); myChkGetData.setEnabled(false); myChkSetData.setSelected(true); myChkSetData.setEnabled(false); myChkIsModified.setSelected(myData.myGenerateIsModified); } public JComponent getComponent() { return myPanel; } public void _init() { // Check that data is correct LOG.assertTrue(!myData.myBindToNewBean); LOG.assertTrue(myData.myBeanClass != null); myTableModel.fireTableDataChanged(); } public void _commit(boolean finishChosen) { // Stop editing if any final TableCellEditor cellEditor = myTable.getCellEditor(); if(cellEditor != null){ cellEditor.stopCellEditing(); } myData.myGenerateIsModified = myChkIsModified.isSelected(); // TODO[vova] check that at least one binding field exists } private final class MyTableModel extends AbstractTableModel{ private final String[] myColumnNames; public MyTableModel() { myColumnNames = new String[]{ UIDesignerBundle.message("column.form.field"), UIDesignerBundle.message("column.bean.property")}; } public int getColumnCount() { return myColumnNames.length; } public String getColumnName(final int column) { return myColumnNames[column]; } public int getRowCount() { return myData.myBindings.length; } public boolean isCellEditable(final int row, final int column) { return column == 1/*Bean Property*/; } public Object getValueAt(final int row, final int column) { if(column == 0/*Form Property*/){ return myData.myBindings[row].myFormProperty; } else if(column == 1/*Bean Property*/){ return myData.myBindings[row].myBeanProperty; } else{ throw new IllegalArgumentException("unknown column: " + column); } } public void setValueAt(final Object value, final int row, final int column) { LOG.assertTrue(column == 1/*Bean Property*/); final FormProperty2BeanProperty binding = myData.myBindings[row]; binding.myBeanProperty = (BeanProperty)value; } } private final class MyTableCellEditor extends AbstractCellEditor implements TableCellEditor{ private final ComboBox myCbx; /* -1 if not defined*/ private int myEditingRow; public MyTableCellEditor() { myCbx = new ComboBox(); myCbx.setEditable(true); myCbx.setRenderer(new BeanPropertyListCellRenderer()); myCbx.registerTableCellEditor(this); final JComponent editorComponent = (JComponent)myCbx.getEditor().getEditorComponent(); editorComponent.setBorder(null); myEditingRow = -1; } /** * @return whether it's possible to convert {@code type1} into {@code type2} * and vice versa. */ private boolean canConvert(@NonNls final String type1, @NonNls final String type2){ if("boolean".equals(type1) || "boolean".equals(type2)){ return type1.equals(type2); } else{ return true; } } public Component getTableCellEditorComponent( final JTable table, final Object value, final boolean isSelected, final int row, final int column ) { myEditingRow = row; final DefaultComboBoxModel model = (DefaultComboBoxModel)myCbx.getModel(); model.removeAllElements(); model.addElement(null/*<not defined>*/); // Fill combobox with available bean's properties final String[] rProps = PropertyUtil.getReadableProperties(myData.myBeanClass, true); final String[] wProps = PropertyUtil.getWritableProperties(myData.myBeanClass, true); final ArrayList<BeanProperty> rwProps = new ArrayList<>(); outer: for(int i = rProps.length - 1; i >= 0; i--){ final String propName = rProps[i]; if(ArrayUtil.find(wProps, propName) != -1){ LOG.assertTrue(!rwProps.contains(propName)); final PsiMethod getter = PropertyUtil.findPropertyGetter(myData.myBeanClass, propName, false, true); if (getter == null) { // possible if the getter is static: getReadableProperties() does not filter out static methods, and // findPropertyGetter() checks for static/non-static continue; } final PsiType returnType = getter.getReturnType(); LOG.assertTrue(returnType != null); // There are two possible types: boolean and java.lang.String @NonNls final String typeName = returnType.getCanonicalText(); LOG.assertTrue(typeName != null); if(!"boolean".equals(typeName) && !"java.lang.String".equals(typeName)){ continue; } // Check that the property is not in use yet for(int j = myData.myBindings.length - 1; j >= 0; j--){ final BeanProperty _property = myData.myBindings[j].myBeanProperty; if(j != row && _property != null && propName.equals(_property.myName)){ continue outer; } } // Check that we conver types if( !canConvert( myData.myBindings[row].myFormProperty.getComponentPropertyClassName(), typeName ) ){ continue; } rwProps.add(new BeanProperty(propName, typeName)); } } Collections.sort(rwProps); for (BeanProperty rwProp : rwProps) { model.addElement(rwProp); } // Set initially selected item if(myData.myBindings[row].myBeanProperty != null){ myCbx.setSelectedItem(myData.myBindings[row].myBeanProperty); } else{ myCbx.setSelectedIndex(0/*<not defined>*/); } return myCbx; } public Object getCellEditorValue() { LOG.assertTrue(myEditingRow != -1); try { // our ComboBox is editable so its editor can contain: // 1) BeanProperty object (it user just selected something from ComboBox) // 2) java.lang.String if user type something into ComboBox final Object selectedItem = myCbx.getEditor().getItem(); if(selectedItem instanceof BeanProperty){ return selectedItem; } else if(selectedItem instanceof String){ final String fieldName = ((String)selectedItem).trim(); if(fieldName.length() == 0){ return null; // binding is not defined } final String fieldType = myData.myBindings[myEditingRow].myFormProperty.getComponentPropertyClassName(); return new BeanProperty(fieldName, fieldType); } else{ throw new IllegalArgumentException("unknown selectedItem: " + selectedItem); } } finally { myEditingRow = -1; // unset editing row. So it's possible to invoke this method only once per editing } } } }
/*L * Copyright (c) 2006 SAIC, SAIC-F. * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/rembrandt/LICENSE.txt for details. */ package gov.nih.nci.rembrandt.dto.query; import gov.nih.nci.caintegrator.dto.critieria.AllGenesCriteria; import gov.nih.nci.caintegrator.dto.critieria.AlleleFrequencyCriteria; import gov.nih.nci.caintegrator.dto.critieria.AnalysisTypeCriteria; import gov.nih.nci.caintegrator.dto.critieria.AssayPlatformCriteria; import gov.nih.nci.caintegrator.dto.critieria.CloneOrProbeIDCriteria; import gov.nih.nci.caintegrator.dto.critieria.CopyNumberCriteria; import gov.nih.nci.caintegrator.dto.critieria.SegmentMeanCriteria; import gov.nih.nci.caintegrator.dto.critieria.DiseaseOrGradeCriteria; import gov.nih.nci.caintegrator.dto.critieria.GeneIDCriteria; import gov.nih.nci.caintegrator.dto.critieria.InstitutionCriteria; import gov.nih.nci.caintegrator.dto.critieria.RegionCriteria; import gov.nih.nci.caintegrator.dto.critieria.SNPCriteria; import gov.nih.nci.caintegrator.dto.critieria.SampleCriteria; import gov.nih.nci.caintegrator.dto.de.AlleleFrequencyDE; import gov.nih.nci.caintegrator.dto.de.AssayPlatformDE; import gov.nih.nci.caintegrator.dto.de.CloneIdentifierDE; import gov.nih.nci.caintegrator.dto.de.DiseaseNameDE; import gov.nih.nci.caintegrator.dto.de.DomainElement; import gov.nih.nci.caintegrator.dto.de.InstitutionDE; import gov.nih.nci.caintegrator.dto.de.SNPIdentifierDE; import gov.nih.nci.caintegrator.dto.query.QueryType; import gov.nih.nci.caintegrator.dto.view.CopyNumberSegmentView; import gov.nih.nci.caintegrator.dto.view.ViewFactory; import gov.nih.nci.caintegrator.dto.view.ViewType; import gov.nih.nci.caintegrator.dto.view.Viewable; import gov.nih.nci.caintegrator.enumeration.SpecimenType; import gov.nih.nci.rembrandt.queryservice.queryprocessing.QueryHandler; import gov.nih.nci.rembrandt.queryservice.queryprocessing.ThreadController; import gov.nih.nci.rembrandt.queryservice.queryprocessing.cgh.CGHFactHandler; import gov.nih.nci.rembrandt.util.RembrandtConstants; import java.io.Serializable; import java.util.Collection; import java.util.Iterator; import java.util.Locale; import java.util.ResourceBundle; import org.apache.log4j.Logger; /** * caIntegrator License * * Copyright 2001-2005 Science Applications International Corporation ("SAIC"). * The software subject to this notice and license includes both human readable source code form and machine readable, * binary, object code form ("the caIntegrator Software"). The caIntegrator Software was developed in conjunction with * the National Cancer Institute ("NCI") by NCI employees and employees of SAIC. * To the extent government employees are authors, any rights in such works shall be subject to Title 17 of the United States * Code, section 105. * This caIntegrator Software License (the "License") is between NCI and You. "You (or "Your") shall mean a person or an * entity, and all other entities that control, are controlled by, or are under common control with the entity. "Control" * for purposes of this definition means (i) the direct or indirect power to cause the direction or management of such entity, * whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) * beneficial ownership of such entity. * This License is granted provided that You agree to the conditions described below. NCI grants You a non-exclusive, * worldwide, perpetual, fully-paid-up, no-charge, irrevocable, transferable and royalty-free right and license in its rights * in the caIntegrator Software to (i) use, install, access, operate, execute, copy, modify, translate, market, publicly * display, publicly perform, and prepare derivative works of the caIntegrator Software; (ii) distribute and have distributed * to and by third parties the caIntegrator Software and any modifications and derivative works thereof; * and (iii) sublicense the foregoing rights set out in (i) and (ii) to third parties, including the right to license such * rights to further third parties. For sake of clarity, and not by way of limitation, NCI shall have no right of accounting * or right of payment from You or Your sublicensees for the rights granted under this License. This License is granted at no * charge to You. * 1. Your redistributions of the source code for the Software must retain the above copyright notice, this list of conditions * and the disclaimer and limitation of liability of Article 6, below. Your redistributions in object code form must reproduce * the above copyright notice, this list of conditions and the disclaimer of Article 6 in the documentation and/or other materials * provided with the distribution, if any. * 2. Your end-user documentation included with the redistribution, if any, must include the following acknowledgment: "This * product includes software developed by SAIC and the National Cancer Institute." If You do not include such end-user * documentation, You shall include this acknowledgment in the Software itself, wherever such third-party acknowledgments * normally appear. * 3. You may not use the names "The National Cancer Institute", "NCI" "Science Applications International Corporation" and * "SAIC" to endorse or promote products derived from this Software. This License does not authorize You to use any * trademarks, service marks, trade names, logos or product names of either NCI or SAIC, except as required to comply with * the terms of this License. * 4. For sake of clarity, and not by way of limitation, You may incorporate this Software into Your proprietary programs and * into any third party proprietary programs. However, if You incorporate the Software into third party proprietary * programs, You agree that You are solely responsible for obtaining any permission from such third parties required to * incorporate the Software into such third party proprietary programs and for informing Your sublicensees, including * without limitation Your end-users, of their obligation to secure any required permissions from such third parties * before incorporating the Software into such third party proprietary software programs. In the event that You fail * to obtain such permissions, You agree to indemnify NCI for any claims against NCI by such third parties, except to * the extent prohibited by law, resulting from Your failure to obtain such permissions. * 5. For sake of clarity, and not by way of limitation, You may add Your own copyright statement to Your modifications and * to the derivative works, and You may provide additional or different license terms and conditions in Your sublicenses * of modifications of the Software, or any derivative works of the Software as a whole, provided Your use, reproduction, * and distribution of the Work otherwise complies with the conditions stated in this License. * 6. THIS SOFTWARE IS PROVIDED "AS IS," AND ANY EXPRESSED OR IMPLIED WARRANTIES, (INCLUDING, BUT NOT LIMITED TO, * THE IMPLIED WARRANTIES OF MERCHANTABILITY, NON-INFRINGEMENT AND FITNESS FOR A PARTICULAR PURPOSE) ARE DISCLAIMED. * IN NO EVENT SHALL THE NATIONAL CANCER INSTITUTE, SAIC, OR THEIR AFFILIATES BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE * GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * */ public class ComparativeGenomicQuery extends Query implements Serializable,Cloneable{ /** * IMPORTANT! This class requires a clone method! This requires that any new * data field that is added to this class also be cloneable and be added to * clone calls in the clone method.If you do not do this, you will not * seperate the references of at least one data field when we generate a * copy of this object.This means that if the data field ever changes in one * copy or the other it will affect both instances... this will be hell to * track down if you aren't ultra familiar with the code base, so add those * methods now! (Not necesary for primitives.) */ private AllGenesCriteria allGenesCrit; private static Logger logger = Logger .getLogger(ComparativeGenomicQuery.class); private GeneIDCriteria geneIDCriteria; private CopyNumberCriteria copyNumberCriteria; private SegmentMeanCriteria segmentMeanCriteria; private RegionCriteria regionCriteria; private CloneOrProbeIDCriteria cloneOrProbeIDCriteria; private SNPCriteria snpCriteria; private AlleleFrequencyCriteria alleleFrequencyCriteria; private AssayPlatformCriteria assayPlatformCriteria; private AnalysisTypeCriteria analysisTypeCriteria; private QueryHandler HANDLER; public QueryHandler getQueryHandler() throws Exception { return (HANDLER == null) ? new gov.nih.nci.rembrandt.queryservice.queryprocessing.cgh.CGHQueryHandler() : HANDLER; } public QueryType getQueryType() throws Exception { QueryType queryType = QueryType.CGH_GENE_QUERY_TYPE; if(getAssociatedView() instanceof CopyNumberSegmentView){ queryType = QueryType.CGH_QUERY_TYPE; } return queryType; } public ComparativeGenomicQuery() { super(); } public String toString() { ResourceBundle labels = null; String OutStr = "<B>Comparative Genomic Query</B>"; OutStr += "<BR><B class='otherBold'>Query Name: </b>" + this.getQueryName(); try { labels = ResourceBundle.getBundle( RembrandtConstants.APPLICATION_RESOURCES, Locale.US); // starting DiseaseOrGradeCriteria DiseaseOrGradeCriteria thisDiseaseCrit = this .getDiseaseOrGradeCriteria(); if ((thisDiseaseCrit != null) && !thisDiseaseCrit.isEmpty() && labels != null) { Collection diseaseColl = thisDiseaseCrit.getDiseases(); String thisCriteria = thisDiseaseCrit.getClass().getName(); OutStr += "<BR><B class='otherBold'>" + labels.getString(thisCriteria.substring(thisCriteria .lastIndexOf(".") + 1)) + "</B><BR>"; Iterator iter = diseaseColl.iterator(); while (iter.hasNext()) { DiseaseNameDE diseaseDE = (DiseaseNameDE) iter.next(); OutStr += "&nbsp;&nbsp;" + ((String) diseaseDE.getValue()) + " <BR>"; } } else { logger .debug("Disease Criteria is empty or Application Resources file is missing"); } // end of DiseaseOrGradeCriteria // start All Genes Criteria AllGenesCriteria thisAllGenesCrit = this.getAllGenesCrit(); if (thisAllGenesCrit != null && !thisAllGenesCrit.isEmpty()) { OutStr += "<br /><b class='otherbold'>Gene</b><br />&nbsp;&nbsp;&nbsp;All Genes"; } else logger.debug("This is not an All Genes Query"); // starting CopyNumberCriteria/SegmentMeanCriteria String cnView = null; CopyNumberCriteria thisCopyNumberCrit = this.getCopyNumberCriteria(); if ((thisCopyNumberCrit != null) && !thisCopyNumberCrit.isEmpty() && labels != null) { logger.debug(" I am in the CopyNumberCriteria"); String thisCriteria = thisCopyNumberCrit.getClass().getName(); OutStr += "<BR><B class='otherBold'>" + labels.getString(thisCriteria.substring(thisCriteria .lastIndexOf(".") + 1)) + "</B>"; Collection copyNoObjects = thisCopyNumberCrit.getCopyNummbers(); for (Iterator iter = copyNoObjects.iterator(); iter.hasNext();) { DomainElement de = (DomainElement) iter.next(); String thisDomainElement = de.getClass().getName(); OutStr += "<BR>&nbsp;&nbsp;" + labels.getString(thisDomainElement .substring(thisDomainElement .lastIndexOf(".") + 1)) + ": " + de.getValue(); } cnView = "calculatedCN"; } else { SegmentMeanCriteria thisSegmentMeanCrit = this.getSegmentMeanCriteria(); if ((thisSegmentMeanCrit != null) && !thisSegmentMeanCrit.isEmpty() && labels != null) { logger.debug(" I am in the SegmentMeanCriteria"); String thisCriteria = thisSegmentMeanCrit.getClass().getName(); OutStr += "<BR><B class='otherBold'>" + labels.getString(thisCriteria.substring(thisCriteria .lastIndexOf(".") + 1)) + "</B>"; Collection segMeanObjects = thisSegmentMeanCrit.getSegmentMeanData(); for (Iterator iter = segMeanObjects.iterator(); iter.hasNext();) { DomainElement de = (DomainElement) iter.next(); String thisDomainElement = de.getClass().getName(); OutStr += "<BR>&nbsp;&nbsp;" + labels.getString(thisDomainElement .substring(thisDomainElement .lastIndexOf(".") + 1)) + ": " + de.getValue(); } } cnView = "segmentMean"; } if ( cnView == null ) { logger.debug("Copy Number/Segment Mean Criteria is empty or Application Resources file is missing"); } // end of CopyNumberCriteria/SegmentMeanCriteria GeneIDCriteria thisGeneIDCrit = this.getGeneIDCriteria(); if ((thisGeneIDCrit != null) && !thisGeneIDCrit.isEmpty() && labels != null) { String thisCriteria = thisGeneIDCrit.getClass().getName(); OutStr += "<BR><B class='otherBold'>" + labels.getString(thisCriteria.substring(thisCriteria .lastIndexOf(".") + 1)) + "</B>"; Collection geneIDObjects = thisGeneIDCrit.getGeneIdentifiers(); int count = 0; for (Iterator iter = geneIDObjects.iterator(); iter.hasNext() && count < 5;) { count++; DomainElement de = (DomainElement) iter.next(); String thisDomainElement = de.getClass().getName(); OutStr += "<BR>&nbsp;&nbsp;" + labels.getString(thisDomainElement .substring(thisDomainElement .lastIndexOf(".") + 1)) + ": " + de.getValue(); } if (geneIDObjects != null && geneIDObjects.size() > 5) { OutStr += "<BR>&nbsp;&nbsp;..."; } } else logger .debug("Gene ID Criteria is empty or Application Resources file is missing"); SampleCriteria thisSampleIDCrit = this.getSampleIDCrit(); if ((thisSampleIDCrit != null) && !thisSampleIDCrit.isEmpty() && labels != null) { Collection sampleIDObjects = thisSampleIDCrit.getSampleIDs(); if(sampleIDObjects!= null){ String thisCriteria = thisSampleIDCrit.getClass().getName(); OutStr += "<BR><B class='otherBold'>" + labels.getString(thisCriteria.substring(thisCriteria .lastIndexOf(".") + 1)) + "</B>"; int count = 0; for (Iterator iter = sampleIDObjects.iterator(); iter.hasNext() && count < 5;) { count++; DomainElement de = (DomainElement) iter.next(); String thisDomainElement = de.getClass().getName(); OutStr += "<BR>&nbsp;&nbsp;" + labels.getString(thisDomainElement .substring(thisDomainElement .lastIndexOf(".") + 1)) + ": " + de.getValue(); } if (sampleIDObjects.size() > 5) { OutStr += "<BR>&nbsp;&nbsp;..."; } } SpecimenType specimenType = thisSampleIDCrit.getSpecimenType(); if (specimenType != null){ OutStr += "<BR><B class='otherBold'>" + "SpecimenType" + "</B>"; OutStr += "<BR>&nbsp;&nbsp;" + specimenType.toString(); } } else logger .debug("Sample ID Criteria is empty or Application Resources file is missing"); // starting RegionCriteria RegionCriteria thisRegionCrit = this.getRegionCriteria(); if ((thisRegionCrit != null) && !thisRegionCrit.isEmpty() && labels != null) { String thisCriteria = thisRegionCrit.getClass().getName(); OutStr += "<BR><B class='otherBold'>" + labels.getString(thisCriteria.substring(thisCriteria .lastIndexOf(".") + 1)) + "</B>"; DomainElement cytoBandDE = thisRegionCrit.getCytoband(); DomainElement cytoBandEndDE = thisRegionCrit.getEndCytoband(); DomainElement chromosomeDE = thisRegionCrit.getChromNumber(); DomainElement chrStartDE = thisRegionCrit.getStart(); DomainElement chrEndDE = thisRegionCrit.getEnd(); if (chromosomeDE != null) { String chromosomeDEStr = chromosomeDE.getClass().getName(); OutStr += "<BR>&nbsp;&nbsp;" + labels .getString(chromosomeDEStr .substring(chromosomeDEStr .lastIndexOf(".") + 1)) + ": " + chromosomeDE.getValue(); if (cytoBandDE != null && cytoBandEndDE != null) { String cytoBandStr = cytoBandDE.getClass().getName(); String cytoBandEndStr = cytoBandEndDE.getClass() .getName(); OutStr += "<BR>&nbsp;&nbsp;" + labels .getString(cytoBandStr .substring(cytoBandStr .lastIndexOf(".") + 1)) + ": " + cytoBandDE.getValue(); OutStr += "&nbsp;&nbsp;to " + cytoBandEndDE.getValue(); } else if (cytoBandDE != null && cytoBandEndDE == null) { String cytoBandStr = cytoBandDE.getClass().getName(); OutStr += "<BR>&nbsp;&nbsp;" + labels .getString(cytoBandStr .substring(cytoBandStr .lastIndexOf(".") + 1)) + ": " + cytoBandDE.getValue(); } else { if (chrStartDE != null && chrEndDE != null) { String chrStartDEStr = chrStartDE.getClass() .getName(); String chrEndDEStr = chrEndDE.getClass().getName(); OutStr += "<BR>&nbsp;&nbsp;" + labels.getString(chrStartDEStr.substring( chrStartDEStr.lastIndexOf(".") + 1, chrStartDEStr.lastIndexOf("$"))) + "(kb)"; OutStr += "<BR>&nbsp;&nbsp;&nbsp;" + labels.getString(chrStartDEStr .substring(chrStartDEStr .lastIndexOf(".") + 1)) + ": " + chrStartDE.getValue(); OutStr += "<BR>&nbsp;&nbsp;&nbsp;" + labels.getString(chrEndDEStr .substring(chrEndDEStr .lastIndexOf(".") + 1)) + ": " + chrEndDE.getValue(); } } } } else { logger .debug("Region Criteria is empty or Application Resources file is missing"); }// end of RegionCriteria // starting cloneorProbeCriteria CloneOrProbeIDCriteria thisCloneOrProbeCriteria = this .getCloneOrProbeIDCriteria(); if ((thisCloneOrProbeCriteria != null) && !thisCloneOrProbeCriteria.isEmpty() && labels != null) { String thisCriteria = thisCloneOrProbeCriteria.getClass() .getName(); OutStr += "<BR><B class='otherBold'>" + labels.getString(thisCriteria.substring(thisCriteria .lastIndexOf(".") + 1)) + "</B>"; Collection cloneColl = thisCloneOrProbeCriteria .getIdentifiers(); Iterator iter = cloneColl.iterator(); int count = 0; while (iter.hasNext() && count > 5) { CloneIdentifierDE cloneIdentifierDE = (CloneIdentifierDE) iter .next(); String cloneStr = cloneIdentifierDE.getClass().getName(); OutStr += "<BR>&nbsp;&nbsp;" + labels.getString(cloneStr.substring(cloneStr .lastIndexOf(".") + 1)) + ": " + cloneIdentifierDE.getValue() + ""; } if (cloneColl != null && cloneColl.size() > 5) { OutStr += "<BR>&nbsp;and&nbsp;..."; } } else { logger .debug("Clone or Probe Criteria is empty or Application Resources file is missing."); }// end of cloneorProbeCriteria // starting snpCriteria: SNPCriteria thisSNPCriteria = this.getSNPCriteria(); if ((thisSNPCriteria != null) && !thisSNPCriteria.isEmpty() && labels != null) { String thisCriteria = thisSNPCriteria.getClass().getName(); OutStr += "<BR><B class='otherBold'>" + labels.getString(thisCriteria.substring(thisCriteria .lastIndexOf(".") + 1)) + "</B>"; Collection cloneColl = thisSNPCriteria.getIdentifiers(); Iterator iter = cloneColl.iterator(); int count = 0; while (iter.hasNext() && count < 5) { count++; SNPIdentifierDE snpIdentifierDE = (SNPIdentifierDE) iter .next(); String snpIdStr = snpIdentifierDE.getClass().getName(); OutStr += "<BR>&nbsp;&nbsp;" + labels.getString(snpIdStr.substring(snpIdStr .lastIndexOf(".") + 1)) + ": " + snpIdentifierDE.getValue() + ""; } if (cloneColl != null && cloneColl.size() > 5) { OutStr += "<BR>&nbsp;&nbsp;..."; } } else { logger .debug("SNP Criteria is empty or Application Resources file is missing."); }// end of cloneorProbeCriteria // starting AlleleFrequencyCriteria: AlleleFrequencyCriteria thisAlleleFrequencyCriteria = this .getAlleleFrequencyCriteria(); if ((thisAlleleFrequencyCriteria != null) && !thisAlleleFrequencyCriteria.isEmpty() && labels != null) { AlleleFrequencyDE alleleFrequencyDE = thisAlleleFrequencyCriteria .getAlleleFrequencyDE(); String alleleStr = alleleFrequencyDE.getClass().getName(); OutStr += "<BR><B class='otherBold'>" + labels.getString(alleleStr.substring(alleleStr .lastIndexOf(".") + 1)) + "</B>"; OutStr += "<BR>&nbsp;&nbsp;" + alleleFrequencyDE.getValue(); } else { logger .debug("SNP Criteria is empty or Application Resources file is missing."); }// end of AlleleFrequencyCriteria // starting AssayPlatformCriteria AssayPlatformCriteria thisAssayPlatformCriteria = this .getAssayPlatformCriteria(); if ((thisAssayPlatformCriteria != null) && !thisAssayPlatformCriteria.isEmpty() && labels != null) { AssayPlatformDE assayPlatformDE = thisAssayPlatformCriteria .getAssayPlatformDE(); String assayStr = assayPlatformDE.getClass().getName(); OutStr += "<BR><B class='otherBold'>" + labels.getString(assayStr.substring(assayStr .lastIndexOf(".") + 1)) + "</B>"; OutStr += "<BR>&nbsp;&nbsp;" + assayPlatformDE.getValue(); } else { logger .debug("AssayPlatform Criteria is empty or Application Resources file is missing."); } // start institution Criteria /* InstitutionCriteria thisInstitutionCriteria = this.getInstitutionCriteria(); if ((thisInstitutionCriteria != null)&& !thisInstitutionCriteria.isEmpty() && labels != null) { Collection institutionColl = thisInstitutionCriteria.getInstitutions(); String thisCriteria = thisInstitutionCriteria.getClass().getName(); OutStr += "<BR><B class='otherBold'>" + labels.getString(thisCriteria.substring(thisCriteria .lastIndexOf(".") + 1)) + "</B><BR>"; Iterator iter = institutionColl.iterator(); while (iter.hasNext()) { InstitutionDE institutionDE= (InstitutionDE) iter.next(); OutStr += "" + ((String) institutionDE.getInstituteName()) + "<BR>"; } } else { logger.debug("institution Criteria is empty or Application Resources file is missing."); }// end of institution Criteria */ }// end of try catch (Exception ie) { logger.error("Error in ResourceBundle in CGH query - "); logger.error(ie); } OutStr += "<BR><BR>"; return OutStr; } public GeneIDCriteria getGeneIDCriteria() { return geneIDCriteria; } public void setGeneIDCrit(GeneIDCriteria geneIDCriteria) { this.geneIDCriteria = geneIDCriteria; } public void setGeneIDCriteria(GeneIDCriteria geneIDCriteria) { this.geneIDCriteria = geneIDCriteria; } public AllGenesCriteria getAllGenesCrit() { return allGenesCrit; } public void setAllGenesCrit(AllGenesCriteria allGenes) { this.allGenesCrit = allGenes; } public RegionCriteria getRegionCriteria() { return regionCriteria; } public void setRegionCrit(RegionCriteria regionCriteria) { this.regionCriteria = regionCriteria; } public void setRegionCriteria(RegionCriteria regionCriteria) { this.regionCriteria = regionCriteria; } public CopyNumberCriteria getCopyNumberCriteria() { return copyNumberCriteria; } public void setCopyNumberCrit(CopyNumberCriteria copyNumberCriteria) { this.copyNumberCriteria = copyNumberCriteria; } public void setCopyNumberCriteria(CopyNumberCriteria copyNumberCriteria) { this.copyNumberCriteria = copyNumberCriteria; } public SegmentMeanCriteria getSegmentMeanCriteria() { return segmentMeanCriteria; } public void setSegmentMeanCriteria(SegmentMeanCriteria segmentMeanCriteria) { this.segmentMeanCriteria = segmentMeanCriteria; } public CloneOrProbeIDCriteria getCloneOrProbeIDCriteria() { return cloneOrProbeIDCriteria; } public void setCloneOrProbeIDCrit( CloneOrProbeIDCriteria cloneOrProbeIDCriteria) { this.cloneOrProbeIDCriteria = cloneOrProbeIDCriteria; } public void setCloneOrProbeIDCriteria( CloneOrProbeIDCriteria cloneOrProbeIDCriteria) { this.cloneOrProbeIDCriteria = cloneOrProbeIDCriteria; } public SNPCriteria getSNPCriteria() { return snpCriteria; } public void setSNPCrit(SNPCriteria snpCriteria) { this.snpCriteria = snpCriteria; } public void setSNPCriteria(SNPCriteria snpCriteria) { this.snpCriteria = snpCriteria; } public AlleleFrequencyCriteria getAlleleFrequencyCriteria() { return alleleFrequencyCriteria; } public void setAlleleFrequencyCrit( AlleleFrequencyCriteria alleleFrequencyCriteria) { this.alleleFrequencyCriteria = alleleFrequencyCriteria; } public AssayPlatformCriteria getAssayPlatformCriteria() { return assayPlatformCriteria; } public void setAssayPlatformCrit(AssayPlatformCriteria assayPlatformCriteria) { this.assayPlatformCriteria = assayPlatformCriteria; } public void setAssayPlatformCriteria(AssayPlatformCriteria assayPlatformCriteria) { this.assayPlatformCriteria = assayPlatformCriteria; } /** * Returns a boolean true if the AllGenesCriteria has been set * * @return */ public boolean isAllGenesQuery() { if (allGenesCrit != null) { return true; } else { return false; } } public Viewable getAssociatedView() { AllGenesCriteria allGenesCrit = getAllGenesCrit(); Viewable associatedView = null; if (allGenesCrit!=null && allGenesCrit.isAllGenes() ) { associatedView = ViewFactory.newView(ViewType.COPYNUMBER_GENE_SAMPLE_VIEW); } if (getRegionCriteria() != null && getGeneIDCriteria() == null) { associatedView = ViewFactory.newView(ViewType.COPYNUMBER_SEGMENT_VIEW); } if (getGeneIDCriteria() != null && getRegionCriteria() == null ) { associatedView = ViewFactory.newView(ViewType.COPYNUMBER_GENE_SAMPLE_VIEW); } setAssociatedView(associatedView); return associatedView; } /** * Overrides the protected Object.clone() method exposing it as public. * It performs a 2 tier copy, that is, it does a memcopy of the instance * and then sets all the non-primitive data fields to clones of themselves. * * @return -A minimum 2 deep copy of this object. */ public Object clone() { ComparativeGenomicQuery myClone = null; myClone = (ComparativeGenomicQuery) super.clone(); if(alleleFrequencyCriteria != null){ myClone.alleleFrequencyCriteria = (AlleleFrequencyCriteria) alleleFrequencyCriteria.clone(); } if(allGenesCrit != null){ myClone.allGenesCrit = (AllGenesCriteria) allGenesCrit.clone(); } if(assayPlatformCriteria != null){ myClone.assayPlatformCriteria = (AssayPlatformCriteria) assayPlatformCriteria.clone(); } if(cloneOrProbeIDCriteria !=null){ myClone.cloneOrProbeIDCriteria = (CloneOrProbeIDCriteria) cloneOrProbeIDCriteria.clone(); } if(copyNumberCriteria != null){ myClone.copyNumberCriteria = (CopyNumberCriteria) copyNumberCriteria.clone(); } if(segmentMeanCriteria != null){ myClone.segmentMeanCriteria = (SegmentMeanCriteria) segmentMeanCriteria.clone(); } if(geneIDCriteria != null){ myClone.geneIDCriteria = (GeneIDCriteria) geneIDCriteria.clone(); } if(regionCriteria != null){ myClone.regionCriteria = (RegionCriteria) regionCriteria.clone(); } if(snpCriteria != null){ myClone.snpCriteria = (SNPCriteria) snpCriteria.clone(); } if(analysisTypeCriteria != null){ myClone.analysisTypeCriteria = (AnalysisTypeCriteria) analysisTypeCriteria.clone(); } return myClone; } class Handler { } /** * @return the analysisTypeCriteria */ public AnalysisTypeCriteria getAnalysisTypeCriteria() { return analysisTypeCriteria; } /** * @param analysisTypeCriteria the analysisTypeCriteria to set */ public void setAnalysisTypeCriteria(AnalysisTypeCriteria analysisTypeCriteria) { this.analysisTypeCriteria = analysisTypeCriteria; } }
/* Generated By:JavaCC: Do not edit this line. SimpleCharStream.java Version 5.0 */ /* JavaCCOptions:STATIC=false,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */ package org.iguanatool.testobject.cparser; /** * An implementation of interface CharStream, where the stream is assumed to * contain only ASCII characters (without unicode processing). */ public class SimpleCharStream { /** * Whether parser is static. */ public static final boolean staticFlag = false; /** * Position in buffer. */ public int bufpos = -1; protected int bufline[]; protected int bufcolumn[]; protected int column = 0; protected int line = 1; protected boolean prevCharIsCR = false; protected boolean prevCharIsLF = false; protected java.io.Reader inputStream; protected char[] buffer; protected int maxNextCharInd = 0; protected int inBuf = 0; protected int tabSize = 8; int bufsize; int available; int tokenBegin; /** * Constructor. */ public SimpleCharStream(java.io.Reader dstream, int startline, int startcolumn, int buffersize) { inputStream = dstream; line = startline; column = startcolumn - 1; available = bufsize = buffersize; buffer = new char[buffersize]; bufline = new int[buffersize]; bufcolumn = new int[buffersize]; } /** * Constructor. */ public SimpleCharStream(java.io.Reader dstream, int startline, int startcolumn) { this(dstream, startline, startcolumn, 4096); } /** * Constructor. */ public SimpleCharStream(java.io.Reader dstream) { this(dstream, 1, 1, 4096); } /** * Constructor. */ public SimpleCharStream(java.io.InputStream dstream, String encoding, int startline, int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException { this(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize); } /** * Constructor. */ public SimpleCharStream(java.io.InputStream dstream, int startline, int startcolumn, int buffersize) { this(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize); } /** * Constructor. */ public SimpleCharStream(java.io.InputStream dstream, String encoding, int startline, int startcolumn) throws java.io.UnsupportedEncodingException { this(dstream, encoding, startline, startcolumn, 4096); } /** * Constructor. */ public SimpleCharStream(java.io.InputStream dstream, int startline, int startcolumn) { this(dstream, startline, startcolumn, 4096); } /** * Constructor. */ public SimpleCharStream(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException { this(dstream, encoding, 1, 1, 4096); } /** * Constructor. */ public SimpleCharStream(java.io.InputStream dstream) { this(dstream, 1, 1, 4096); } protected void setTabSize(int i) { tabSize = i; } protected int getTabSize(int i) { return tabSize; } protected void ExpandBuff(boolean wrapAround) { char[] newbuffer = new char[bufsize + 2048]; int newbufline[] = new int[bufsize + 2048]; int newbufcolumn[] = new int[bufsize + 2048]; try { if (wrapAround) { System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin); System.arraycopy(buffer, 0, newbuffer, bufsize - tokenBegin, bufpos); buffer = newbuffer; System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin); System.arraycopy(bufline, 0, newbufline, bufsize - tokenBegin, bufpos); bufline = newbufline; System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin); System.arraycopy(bufcolumn, 0, newbufcolumn, bufsize - tokenBegin, bufpos); bufcolumn = newbufcolumn; maxNextCharInd = (bufpos += (bufsize - tokenBegin)); } else { System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin); buffer = newbuffer; System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin); bufline = newbufline; System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin); bufcolumn = newbufcolumn; maxNextCharInd = (bufpos -= tokenBegin); } } catch (Throwable t) { throw new Error(t.getMessage()); } bufsize += 2048; available = bufsize; tokenBegin = 0; } protected void FillBuff() throws java.io.IOException { if (maxNextCharInd == available) { if (available == bufsize) { if (tokenBegin > 2048) { bufpos = maxNextCharInd = 0; available = tokenBegin; } else if (tokenBegin < 0) bufpos = maxNextCharInd = 0; else ExpandBuff(false); } else if (available > tokenBegin) available = bufsize; else if ((tokenBegin - available) < 2048) ExpandBuff(true); else available = tokenBegin; } int i; try { if ((i = inputStream.read(buffer, maxNextCharInd, available - maxNextCharInd)) == -1) { inputStream.close(); throw new java.io.IOException(); } else maxNextCharInd += i; return; } catch (java.io.IOException e) { --bufpos; backup(0); if (tokenBegin == -1) tokenBegin = bufpos; throw e; } } /** * Start. */ public char BeginToken() throws java.io.IOException { tokenBegin = -1; char c = readChar(); tokenBegin = bufpos; return c; } protected void UpdateLineColumn(char c) { column++; if (prevCharIsLF) { prevCharIsLF = false; line += (column = 1); } else if (prevCharIsCR) { prevCharIsCR = false; if (c == '\n') { prevCharIsLF = true; } else line += (column = 1); } switch (c) { case '\r': prevCharIsCR = true; break; case '\n': prevCharIsLF = true; break; case '\t': column--; column += (tabSize - (column % tabSize)); break; default: break; } bufline[bufpos] = line; bufcolumn[bufpos] = column; } /** * Read a character. */ public char readChar() throws java.io.IOException { if (inBuf > 0) { --inBuf; if (++bufpos == bufsize) bufpos = 0; return buffer[bufpos]; } if (++bufpos >= maxNextCharInd) FillBuff(); char c = buffer[bufpos]; UpdateLineColumn(c); return c; } @Deprecated /** * @deprecated * @see #getEndColumn */ public int getColumn() { return bufcolumn[bufpos]; } @Deprecated /** * @deprecated * @see #getEndLine */ public int getLine() { return bufline[bufpos]; } /** * Get token end column number. */ public int getEndColumn() { return bufcolumn[bufpos]; } /** * Get token end line number. */ public int getEndLine() { return bufline[bufpos]; } /** * Get token beginning column number. */ public int getBeginColumn() { return bufcolumn[tokenBegin]; } /** * Get token beginning line number. */ public int getBeginLine() { return bufline[tokenBegin]; } /** * Backup a number of characters. */ public void backup(int amount) { inBuf += amount; if ((bufpos -= amount) < 0) bufpos += bufsize; } /** * Reinitialise. */ public void ReInit(java.io.Reader dstream, int startline, int startcolumn, int buffersize) { inputStream = dstream; line = startline; column = startcolumn - 1; if (buffer == null || buffersize != buffer.length) { available = bufsize = buffersize; buffer = new char[buffersize]; bufline = new int[buffersize]; bufcolumn = new int[buffersize]; } prevCharIsLF = prevCharIsCR = false; tokenBegin = inBuf = maxNextCharInd = 0; bufpos = -1; } /** * Reinitialise. */ public void ReInit(java.io.Reader dstream, int startline, int startcolumn) { ReInit(dstream, startline, startcolumn, 4096); } /** * Reinitialise. */ public void ReInit(java.io.Reader dstream) { ReInit(dstream, 1, 1, 4096); } /** * Reinitialise. */ public void ReInit(java.io.InputStream dstream, String encoding, int startline, int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException { ReInit(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize); } /** * Reinitialise. */ public void ReInit(java.io.InputStream dstream, int startline, int startcolumn, int buffersize) { ReInit(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize); } /** * Reinitialise. */ public void ReInit(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException { ReInit(dstream, encoding, 1, 1, 4096); } /** * Reinitialise. */ public void ReInit(java.io.InputStream dstream) { ReInit(dstream, 1, 1, 4096); } /** * Reinitialise. */ public void ReInit(java.io.InputStream dstream, String encoding, int startline, int startcolumn) throws java.io.UnsupportedEncodingException { ReInit(dstream, encoding, startline, startcolumn, 4096); } /** * Reinitialise. */ public void ReInit(java.io.InputStream dstream, int startline, int startcolumn) { ReInit(dstream, startline, startcolumn, 4096); } /** * Get token literal value. */ public String GetImage() { if (bufpos >= tokenBegin) return new String(buffer, tokenBegin, bufpos - tokenBegin + 1); else return new String(buffer, tokenBegin, bufsize - tokenBegin) + new String(buffer, 0, bufpos + 1); } /** * Get the suffix. */ public char[] GetSuffix(int len) { char[] ret = new char[len]; if ((bufpos + 1) >= len) System.arraycopy(buffer, bufpos - len + 1, ret, 0, len); else { System.arraycopy(buffer, bufsize - (len - bufpos - 1), ret, 0, len - bufpos - 1); System.arraycopy(buffer, 0, ret, len - bufpos - 1, bufpos + 1); } return ret; } /** * Reset buffer when finished. */ public void Done() { buffer = null; bufline = null; bufcolumn = null; } /** * Method to adjust line and column numbers for the start of a token. */ public void adjustBeginLineColumn(int newLine, int newCol) { int start = tokenBegin; int len; if (bufpos >= tokenBegin) { len = bufpos - tokenBegin + inBuf + 1; } else { len = bufsize - tokenBegin + bufpos + 1 + inBuf; } int i = 0, j = 0, k = 0; int nextColDiff = 0, columnDiff = 0; while (i < len && bufline[j = start % bufsize] == bufline[k = ++start % bufsize]) { bufline[j] = newLine; nextColDiff = columnDiff + bufcolumn[k] - bufcolumn[j]; bufcolumn[j] = newCol + columnDiff; columnDiff = nextColDiff; i++; } if (i < len) { bufline[j] = newLine++; bufcolumn[j] = newCol + columnDiff; while (i++ < len) { if (bufline[j = start % bufsize] != bufline[++start % bufsize]) bufline[j] = newLine++; else bufline[j] = newLine; } } line = bufline[j]; column = bufcolumn[j]; } } /* JavaCC - OriginalChecksum=a779efb0ebe77143da89cf6a61e896cc (do not edit this line) */
/* * Copyright (c) 2003, 2007, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package sun.security.pkcs11; import java.math.BigInteger; import java.security.*; import java.security.spec.*; import javax.crypto.*; import javax.crypto.interfaces.*; import javax.crypto.spec.*; import static sun.security.pkcs11.TemplateManager.*; import sun.security.pkcs11.wrapper.*; import static sun.security.pkcs11.wrapper.PKCS11Constants.*; /** * KeyAgreement implementation class. This class currently supports * DH. * * @author Andreas Sterbenz * @since 1.5 */ final class P11KeyAgreement extends KeyAgreementSpi { // token instance private final Token token; // algorithm name private final String algorithm; // mechanism id private final long mechanism; // private key, if initialized private P11Key privateKey; // other sides public value ("y"), if doPhase() already called private BigInteger publicValue; // length of the secret to be derived private int secretLen; // KeyAgreement from SunJCE as fallback for > 2 party agreement private KeyAgreement multiPartyAgreement; P11KeyAgreement(Token token, String algorithm, long mechanism) { super(); this.token = token; this.algorithm = algorithm; this.mechanism = mechanism; } // see JCE spec protected void engineInit(Key key, SecureRandom random) throws InvalidKeyException { if (key instanceof PrivateKey == false) { throw new InvalidKeyException ("Key must be instance of PrivateKey"); } privateKey = P11KeyFactory.convertKey(token, key, algorithm); publicValue = null; multiPartyAgreement = null; } // see JCE spec protected void engineInit(Key key, AlgorithmParameterSpec params, SecureRandom random) throws InvalidKeyException, InvalidAlgorithmParameterException { if (params != null) { throw new InvalidAlgorithmParameterException ("Parameters not supported"); } engineInit(key, random); } // see JCE spec protected Key engineDoPhase(Key key, boolean lastPhase) throws InvalidKeyException, IllegalStateException { if (privateKey == null) { throw new IllegalStateException("Not initialized"); } if (publicValue != null) { throw new IllegalStateException("Phase already executed"); } // PKCS#11 only allows key agreement between 2 parties // JCE allows >= 2 parties. To support that case (for compatibility // and to pass JCK), fall back to SunJCE in this case. // NOTE that we initialize using the P11Key, which will fail if it // is sensitive/unextractable. However, this is not an issue in the // compatibility configuration, which is all we are targeting here. if ((multiPartyAgreement != null) || (lastPhase == false)) { if (multiPartyAgreement == null) { try { multiPartyAgreement = KeyAgreement.getInstance ("DH", P11Util.getSunJceProvider()); multiPartyAgreement.init(privateKey); } catch (NoSuchAlgorithmException e) { throw new InvalidKeyException ("Could not initialize multi party agreement", e); } } return multiPartyAgreement.doPhase(key, lastPhase); } if ((key instanceof PublicKey == false) || (key.getAlgorithm().equals(algorithm) == false)) { throw new InvalidKeyException ("Key must be a PublicKey with algorithm DH"); } BigInteger p, g, y; if (key instanceof DHPublicKey) { DHPublicKey dhKey = (DHPublicKey)key; y = dhKey.getY(); DHParameterSpec params = dhKey.getParams(); p = params.getP(); g = params.getG(); } else { // normally, DH PublicKeys will always implement DHPublicKey // just in case not, attempt conversion P11DHKeyFactory kf = new P11DHKeyFactory(token, "DH"); try { DHPublicKeySpec spec = (DHPublicKeySpec)kf.engineGetKeySpec (key, DHPublicKeySpec.class); y = spec.getY(); p = spec.getP(); g = spec.getG(); } catch (InvalidKeySpecException e) { throw new InvalidKeyException("Could not obtain key values", e); } } // if parameters of private key are accessible, verify that // they match parameters of public key // XXX p and g should always be readable, even if the key is sensitive if (privateKey instanceof DHPrivateKey) { DHPrivateKey dhKey = (DHPrivateKey)privateKey; DHParameterSpec params = dhKey.getParams(); if ((p.equals(params.getP()) == false) || (g.equals(params.getG()) == false)) { throw new InvalidKeyException ("PublicKey DH parameters must match PrivateKey DH parameters"); } } publicValue = y; // length of the secret is length of key secretLen = (p.bitLength() + 7) >> 3; return null; } // see JCE spec protected byte[] engineGenerateSecret() throws IllegalStateException { if (multiPartyAgreement != null) { byte[] val = multiPartyAgreement.generateSecret(); multiPartyAgreement = null; return val; } if ((privateKey == null) || (publicValue == null)) { throw new IllegalStateException("Not initialized correctly"); } Session session = null; try { session = token.getOpSession(); CK_ATTRIBUTE[] attributes = new CK_ATTRIBUTE[] { new CK_ATTRIBUTE(CKA_CLASS, CKO_SECRET_KEY), new CK_ATTRIBUTE(CKA_KEY_TYPE, CKK_GENERIC_SECRET), }; attributes = token.getAttributes (O_GENERATE, CKO_SECRET_KEY, CKK_GENERIC_SECRET, attributes); long keyID = token.p11.C_DeriveKey(session.id(), new CK_MECHANISM(mechanism, publicValue), privateKey.keyID, attributes); attributes = new CK_ATTRIBUTE[] { new CK_ATTRIBUTE(CKA_VALUE) }; token.p11.C_GetAttributeValue(session.id(), keyID, attributes); byte[] secret = attributes[0].getByteArray(); token.p11.C_DestroyObject(session.id(), keyID); // trim leading 0x00 bytes per JCE convention return P11Util.trimZeroes(secret); } catch (PKCS11Exception e) { throw new ProviderException("Could not derive key", e); } finally { publicValue = null; token.releaseSession(session); } } // see JCE spec protected int engineGenerateSecret(byte[] sharedSecret, int offset) throws IllegalStateException, ShortBufferException { if (multiPartyAgreement != null) { int n = multiPartyAgreement.generateSecret(sharedSecret, offset); multiPartyAgreement = null; return n; } if (offset + secretLen > sharedSecret.length) { throw new ShortBufferException("Need " + secretLen + " bytes, only " + (sharedSecret.length - offset) + " available"); } byte[] secret = engineGenerateSecret(); System.arraycopy(secret, 0, sharedSecret, offset, secret.length); return secret.length; } // see JCE spec protected SecretKey engineGenerateSecret(String algorithm) throws IllegalStateException, NoSuchAlgorithmException, InvalidKeyException { if (multiPartyAgreement != null) { SecretKey key = multiPartyAgreement.generateSecret(algorithm); multiPartyAgreement = null; return key; } if (algorithm == null) { throw new NoSuchAlgorithmException("Algorithm must not be null"); } if (algorithm.equals("TlsPremasterSecret")) { // For now, only perform native derivation for TlsPremasterSecret // as that is required for FIPS compliance. // For other algorithms, there are unresolved issues regarding // how this should work in JCE plus a Solaris truncation bug. // (bug not yet filed). return nativeGenerateSecret(algorithm); } byte[] secret = engineGenerateSecret(); // Maintain compatibility for SunJCE: // verify secret length is sensible for algorithm / truncate // return generated key itself if possible int keyLen; if (algorithm.equalsIgnoreCase("DES")) { keyLen = 8; } else if (algorithm.equalsIgnoreCase("DESede")) { keyLen = 24; } else if (algorithm.equalsIgnoreCase("Blowfish")) { keyLen = Math.min(56, secret.length); } else if (algorithm.equalsIgnoreCase("TlsPremasterSecret")) { keyLen = secret.length; } else { throw new NoSuchAlgorithmException ("Unknown algorithm " + algorithm); } if (secret.length < keyLen) { throw new InvalidKeyException("Secret too short"); } if (algorithm.equalsIgnoreCase("DES") || algorithm.equalsIgnoreCase("DESede")) { for (int i = 0; i < keyLen; i+=8) { P11SecretKeyFactory.fixDESParity(secret, i); } } return new SecretKeySpec(secret, 0, keyLen, algorithm); } private SecretKey nativeGenerateSecret(String algorithm) throws IllegalStateException, NoSuchAlgorithmException, InvalidKeyException { if ((privateKey == null) || (publicValue == null)) { throw new IllegalStateException("Not initialized correctly"); } long keyType = CKK_GENERIC_SECRET; Session session = null; try { session = token.getObjSession(); CK_ATTRIBUTE[] attributes = new CK_ATTRIBUTE[] { new CK_ATTRIBUTE(CKA_CLASS, CKO_SECRET_KEY), new CK_ATTRIBUTE(CKA_KEY_TYPE, keyType), }; attributes = token.getAttributes (O_GENERATE, CKO_SECRET_KEY, keyType, attributes); long keyID = token.p11.C_DeriveKey(session.id(), new CK_MECHANISM(mechanism, publicValue), privateKey.keyID, attributes); CK_ATTRIBUTE[] lenAttributes = new CK_ATTRIBUTE[] { new CK_ATTRIBUTE(CKA_VALUE_LEN), }; token.p11.C_GetAttributeValue(session.id(), keyID, lenAttributes); int keyLen = (int)lenAttributes[0].getLong(); SecretKey key = P11Key.secretKey (session, keyID, algorithm, keyLen << 3, attributes); if ("RAW".equals(key.getFormat())) { // Workaround for Solaris bug 6318543. // Strip leading zeroes ourselves if possible (key not sensitive). // This should be removed once the Solaris fix is available // as here we always retrieve the CKA_VALUE even for tokens // that do not have that bug. byte[] keyBytes = key.getEncoded(); byte[] newBytes = P11Util.trimZeroes(keyBytes); if (keyBytes != newBytes) { key = new SecretKeySpec(newBytes, algorithm); } } return key; } catch (PKCS11Exception e) { throw new InvalidKeyException("Could not derive key", e); } finally { publicValue = null; token.releaseSession(session); } } }
/* * Copyright (C) 2015 Giuseppe Cardone <ippatsuman@gmail.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.gcardone.junidecode; /** * Character map for Unicode characters with codepoint U+FExx. * @author Giuseppe Cardone * @version 0.1 */ class Xfe { public static final String[] map = new String[]{ "[?]", // 0x00 "[?]", // 0x01 "[?]", // 0x02 "[?]", // 0x03 "[?]", // 0x04 "[?]", // 0x05 "[?]", // 0x06 "[?]", // 0x07 "[?]", // 0x08 "[?]", // 0x09 "[?]", // 0x0a "[?]", // 0x0b "[?]", // 0x0c "[?]", // 0x0d "[?]", // 0x0e "[?]", // 0x0f "[?]", // 0x10 "[?]", // 0x11 "[?]", // 0x12 "[?]", // 0x13 "[?]", // 0x14 "[?]", // 0x15 "[?]", // 0x16 "[?]", // 0x17 "[?]", // 0x18 "[?]", // 0x19 "[?]", // 0x1a "[?]", // 0x1b "[?]", // 0x1c "[?]", // 0x1d "[?]", // 0x1e "[?]", // 0x1f "", // 0x20 "", // 0x21 "", // 0x22 "~", // 0x23 "[?]", // 0x24 "[?]", // 0x25 "[?]", // 0x26 "[?]", // 0x27 "[?]", // 0x28 "[?]", // 0x29 "[?]", // 0x2a "[?]", // 0x2b "[?]", // 0x2c "[?]", // 0x2d "[?]", // 0x2e "[?]", // 0x2f "..", // 0x30 "--", // 0x31 "-", // 0x32 "_", // 0x33 "_", // 0x34 "(", // 0x35 ") ", // 0x36 "{", // 0x37 "} ", // 0x38 "[", // 0x39 "] ", // 0x3a "[(", // 0x3b ")] ", // 0x3c "<<", // 0x3d ">> ", // 0x3e "<", // 0x3f "> ", // 0x40 "[", // 0x41 "] ", // 0x42 "{", // 0x43 "}", // 0x44 "[?]", // 0x45 "[?]", // 0x46 "[?]", // 0x47 "[?]", // 0x48 "", // 0x49 "", // 0x4a "", // 0x4b "", // 0x4c "", // 0x4d "", // 0x4e "", // 0x4f ",", // 0x50 ",", // 0x51 ".", // 0x52 "", // 0x53 ";", // 0x54 ":", // 0x55 "?", // 0x56 "!", // 0x57 "-", // 0x58 "(", // 0x59 ")", // 0x5a "{", // 0x5b "}", // 0x5c "{", // 0x5d "}", // 0x5e "#", // 0x5f "&", // 0x60 "*", // 0x61 "+", // 0x62 "-", // 0x63 "<", // 0x64 ">", // 0x65 "=", // 0x66 "", // 0x67 "\\", // 0x68 "$", // 0x69 "%", // 0x6a "@", // 0x6b "[?]", // 0x6c "[?]", // 0x6d "[?]", // 0x6e "[?]", // 0x6f "", // 0x70 "", // 0x71 "", // 0x72 "[?]", // 0x73 "", // 0x74 "[?]", // 0x75 "", // 0x76 "", // 0x77 "", // 0x78 "", // 0x79 "", // 0x7a "", // 0x7b "", // 0x7c "", // 0x7d "", // 0x7e "", // 0x7f "", // 0x80 "", // 0x81 "", // 0x82 "", // 0x83 "", // 0x84 "", // 0x85 "", // 0x86 "", // 0x87 "", // 0x88 "", // 0x89 "", // 0x8a "", // 0x8b "", // 0x8c "", // 0x8d "", // 0x8e "", // 0x8f "", // 0x90 "", // 0x91 "", // 0x92 "", // 0x93 "", // 0x94 "", // 0x95 "", // 0x96 "", // 0x97 "", // 0x98 "", // 0x99 "", // 0x9a "", // 0x9b "", // 0x9c "", // 0x9d "", // 0x9e "", // 0x9f "", // 0xa0 "", // 0xa1 "", // 0xa2 "", // 0xa3 "", // 0xa4 "", // 0xa5 "", // 0xa6 "", // 0xa7 "", // 0xa8 "", // 0xa9 "", // 0xaa "", // 0xab "", // 0xac "", // 0xad "", // 0xae "", // 0xaf "", // 0xb0 "", // 0xb1 "", // 0xb2 "", // 0xb3 "", // 0xb4 "", // 0xb5 "", // 0xb6 "", // 0xb7 "", // 0xb8 "", // 0xb9 "", // 0xba "", // 0xbb "", // 0xbc "", // 0xbd "", // 0xbe "", // 0xbf "", // 0xc0 "", // 0xc1 "", // 0xc2 "", // 0xc3 "", // 0xc4 "", // 0xc5 "", // 0xc6 "", // 0xc7 "", // 0xc8 "", // 0xc9 "", // 0xca "", // 0xcb "", // 0xcc "", // 0xcd "", // 0xce "", // 0xcf "", // 0xd0 "", // 0xd1 "", // 0xd2 "", // 0xd3 "", // 0xd4 "", // 0xd5 "", // 0xd6 "", // 0xd7 "", // 0xd8 "", // 0xd9 "", // 0xda "", // 0xdb "", // 0xdc "", // 0xdd "", // 0xde "", // 0xdf "", // 0xe0 "", // 0xe1 "", // 0xe2 "", // 0xe3 "", // 0xe4 "", // 0xe5 "", // 0xe6 "", // 0xe7 "", // 0xe8 "", // 0xe9 "", // 0xea "", // 0xeb "", // 0xec "", // 0xed "", // 0xee "", // 0xef "", // 0xf0 "", // 0xf1 "", // 0xf2 "", // 0xf3 "", // 0xf4 "", // 0xf5 "", // 0xf6 "", // 0xf7 "", // 0xf8 "", // 0xf9 "", // 0xfa "", // 0xfb "", // 0xfc "[?]", // 0xfd "[?]", // 0xfe "" // 0xff }; }
/* * Copyright (c) 2004 - 2012 Eike Stepper (Berlin, Germany) and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Simon McDuff - initial API and implementation * Eike Stepper - maintenance */ package org.eclipse.emf.cdo.common.util; import java.util.Collection; import java.util.Comparator; import java.util.NoSuchElementException; import java.util.Queue; import java.util.concurrent.PriorityBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import org.eclipse.net4j.util.WrappedException; import org.eclipse.net4j.util.collection.Closeable; /** * The {@link Queue queue} that represents the result of a CDOQuery. * * @author Simon McDuff * @since 2.0 * @noextend This interface is not intended to be extended by clients. * @noimplement This interface is not intended to be implemented by clients. */ public class CDOQueryQueue<E> implements Queue<E>, Closeable { // Static not allowed due to <E> private final QueueEntry<E> QUEUE_CLOSED = new QueueEntry<E>(); private PriorityBlockingQueue<QueueEntry<E>> queue = new PriorityBlockingQueue<QueueEntry<E>>(10); private boolean closed; private Object closeLock = new Object(); public CDOQueryQueue() { } public void setException(Throwable exception) { queue.add(new QueueEntry<E>(exception)); } public void close() { synchronized (closeLock) { if (!closed) { closed = true; queue.add(QUEUE_CLOSED); } } } public boolean isClosed() { synchronized (closeLock) { return closed; } } public boolean add(E e) { return queue.add(new QueueEntry<E>(e)); } public void clear() { queue.clear(); } public boolean contains(Object o) { return queue.contains(o); } public E element() { return checkObject(queue.element()); } @Override public boolean equals(Object obj) { return queue.equals(obj); } @Override public int hashCode() { return queue.hashCode(); } public boolean isEmpty() { return queue.isEmpty(); } public BlockingCloseableIterator<E> iterator() { return new BlockingCloseableIteratorImpl(); } public boolean offer(E e, long timeout, TimeUnit unit) { return queue.offer(new QueueEntry<E>(e), timeout, unit); } public boolean offer(E e) { return queue.offer(new QueueEntry<E>(e)); } public E peek() { return checkObject(queue.peek()); } public E poll(long timeout, TimeUnit unit) throws InterruptedException { return checkObject(queue.poll(timeout, unit)); } public void put(E e) { queue.put(new QueueEntry<E>(e)); } public int remainingCapacity() { return queue.remainingCapacity(); } public E remove() { return checkObject(queue.remove()); } public boolean remove(Object o) { return queue.remove(o); } public int size() { return queue.size(); } public E take() throws InterruptedException { QueueEntry<E> entry = null; entry = queue.take(); return checkObject(entry); } public Object[] toArray() { return queue.toArray(); } @SuppressWarnings("unchecked") public Object[] toArray(Object[] a) { return queue.toArray(a); } @Override public String toString() { return queue.toString(); } public E poll() { QueueEntry<E> entry = queue.poll(); return checkObject(entry); } public Comparator<?> comparator() { throw new UnsupportedOperationException(); } public boolean containsAll(Collection<?> c) { throw new UnsupportedOperationException(); } public boolean addAll(Collection<? extends E> c) { throw new UnsupportedOperationException(); } public boolean removeAll(Collection<?> c) { throw new UnsupportedOperationException(); } public boolean retainAll(Collection<?> c) { throw new UnsupportedOperationException(); } private E checkObject(QueueEntry<E> entry) { if (entry == null || entry == QUEUE_CLOSED) { return null; } return entry.getObjectWithException(); } /** * @author Simon McDuff * @since 2.0 */ private static class QueueEntry<E> implements Comparable<QueueEntry<E>> { private static final AtomicLong nextSeq = new AtomicLong(0); private long seqNumber; private Object internalObject; public QueueEntry() { seqNumber = Long.MAX_VALUE; } public QueueEntry(E object) { internalObject = object; seqNumber = nextSeq.getAndIncrement(); } public QueueEntry(Throwable object) { internalObject = object; seqNumber = nextSeq.getAndIncrement(); } @SuppressWarnings("unchecked") public E getObjectWithException() { Throwable exception = getException(); if (exception instanceof Exception) { throw WrappedException.wrap((Exception)exception); } else if (exception instanceof Error) { throw (Error)exception; } return (E)internalObject; } public Throwable getException() { if (internalObject instanceof Throwable) { return (Throwable)internalObject; } return null; } public int compareTo(QueueEntry<E> o) { if (getException() != null) { return -1; } if (o.getException() != null) { return 1; } if (this == o) { return 0; } if (seqNumber == o.seqNumber) { // Should not be possible return 0; } return seqNumber < o.seqNumber ? -1 : 1; } // @Override // public boolean equals(Object obj) // { // if (this == obj) // { // return true; // } // // if (obj instanceof QueueEntry<?>) // { // @SuppressWarnings("unchecked") // QueueEntry<E> that = (QueueEntry<E>)obj; // return compareTo(that) == 0; // } // // return false; // } } /** * A blocking iterator that takes elements from a {@link CDOQueryQueue}. * * @author Simon McDuff * @since 2.0 */ public class BlockingCloseableIteratorImpl implements BlockingCloseableIterator<E> { private boolean closed; private E nextElement; public BlockingCloseableIteratorImpl() { } public E peek() { if (nextElement == null) { return CDOQueryQueue.this.peek(); } return nextElement; } public boolean hasNext() { privateNext(false); return nextElement != null; } private void privateNext(boolean failOnNull) { if (nextElement == null) { try { synchronized (closeLock) { if (CDOQueryQueue.this.isEmpty() && CDOQueryQueue.this.isClosed()) { if (failOnNull) { throw new NoSuchElementException(); } return; } } nextElement = take(); } catch (InterruptedException ex) { throw WrappedException.wrap(ex); } } } public E next() { try { privateNext(true); return nextElement; } finally { nextElement = null; } } public void remove() { throw new UnsupportedOperationException(); } public void close() { this.closed = true; } public boolean isClosed() { return this.closed; } } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vhudson-jaxb-ri-2.1-548 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2014.10.14 at 09:59:57 AM CDT // package com.mastercard.api.mdes.csrapi.v1.pan.swap.domain; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="CurrentPan" type="{http://www.w3.org/2001/XMLSchema}int"/> * &lt;element name="NewPan"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="Id" type="{http://www.w3.org/2001/XMLSchema}int"/> * &lt;element name="ExpiryDate" type="{http://www.w3.org/2001/XMLSchema}int"/> * &lt;element name="CardSequenceNumber" type="{http://www.w3.org/2001/XMLSchema}int"/> * &lt;element name="UpdateWalletServiceProvider" type="{http://www.w3.org/2001/XMLSchema}int"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;element name="Comments" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="AuditInfo"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="UserId" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="UserName" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="Organization" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "currentPan", "newPan", "comments", "auditInfo" }) @XmlRootElement(name = "SwapPANRequest") public class SwapPANRequest { @XmlElement(name = "CurrentPan") protected long currentPan; @XmlElement(name = "NewPan", required = true) protected NewPan newPan; @XmlElement(name = "Comments", required = true) protected String comments; @XmlElement(name = "AuditInfo", required = true) protected AuditInfo auditInfo; /** * Gets the value of the currentPan property. * */ public long getCurrentPan() { return currentPan; } /** * Sets the value of the currentPan property. * */ public void setCurrentPan(long value) { this.currentPan = value; } /** * Gets the value of the newPan property. * * @return * possible object is * {@link com.mastercard.api.mdes.csrapi.v1.pan.swap.domain.SwapPANRequest.NewPan } * */ public NewPan getNewPan() { return newPan; } /** * Sets the value of the newPan property. * * @param value * allowed object is * {@link com.mastercard.api.mdes.csrapi.v1.pan.swap.domain.SwapPANRequest.NewPan } * */ public void setNewPan(NewPan value) { this.newPan = value; } /** * Gets the value of the comments property. * * @return * possible object is * {@link String } * */ public String getComments() { return comments; } /** * Sets the value of the comments property. * * @param value * allowed object is * {@link String } * */ public void setComments(String value) { this.comments = value; } /** * Gets the value of the auditInfo property. * * @return * possible object is * {@link com.mastercard.api.mdes.csrapi.v1.pan.swap.domain.SwapPANRequest.AuditInfo } * */ public AuditInfo getAuditInfo() { return auditInfo; } /** * Sets the value of the auditInfo property. * * @param value * allowed object is * {@link com.mastercard.api.mdes.csrapi.v1.pan.swap.domain.SwapPANRequest.AuditInfo } * */ public void setAuditInfo(AuditInfo value) { this.auditInfo = value; } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="UserId" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="UserName" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="Organization" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "userId", "userName", "organization" }) public static class AuditInfo { @XmlElement(name = "UserId", required = true) protected String userId; @XmlElement(name = "UserName", required = true) protected String userName; @XmlElement(name = "Organization", required = true) protected String organization; /** * Gets the value of the userId property. * * @return * possible object is * {@link String } * */ public String getUserId() { return userId; } /** * Sets the value of the userId property. * * @param value * allowed object is * {@link String } * */ public void setUserId(String value) { this.userId = value; } /** * Gets the value of the userName property. * * @return * possible object is * {@link String } * */ public String getUserName() { return userName; } /** * Sets the value of the userName property. * * @param value * allowed object is * {@link String } * */ public void setUserName(String value) { this.userName = value; } /** * Gets the value of the organization property. * * @return * possible object is * {@link String } * */ public String getOrganization() { return organization; } /** * Sets the value of the organization property. * * @param value * allowed object is * {@link String } * */ public void setOrganization(String value) { this.organization = value; } } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="Id" type="{http://www.w3.org/2001/XMLSchema}int"/> * &lt;element name="ExpiryDate" type="{http://www.w3.org/2001/XMLSchema}int"/> * &lt;element name="CardSequenceNumber" type="{http://www.w3.org/2001/XMLSchema}int"/> * &lt;element name="UpdateWalletServiceProvider" type="{http://www.w3.org/2001/XMLSchema}int"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "id", "expiryDate", "cardSequenceNumber", "updateWalletServiceProvider" }) public static class NewPan { @XmlElement(name = "Id") protected long id; @XmlElement(name = "ExpiryDate") protected String expiryDate; @XmlElement(name = "CardSequenceNumber") protected String cardSequenceNumber; @XmlElement(name = "UpdateWalletServiceProvider") protected int updateWalletServiceProvider; /** * Gets the value of the id property. * */ public long getId() { return id; } /** * Sets the value of the id property. * */ public void setId(long value) { this.id = value; } /** * Gets the value of the expiryDate property. * */ public String getExpiryDate() { return expiryDate; } /** * Sets the value of the expiryDate property. * */ public void setExpiryDate(String value) { this.expiryDate = value; } /** * Gets the value of the cardSequenceNumber property. * */ public String getCardSequenceNumber() { return cardSequenceNumber; } /** * Sets the value of the cardSequenceNumber property. * */ public void setCardSequenceNumber(String value) { this.cardSequenceNumber = value; } /** * Gets the value of the updateWalletServiceProvider property. * */ public int getUpdateWalletServiceProvider() { return updateWalletServiceProvider; } /** * Sets the value of the updateWalletServiceProvider property. * */ public void setUpdateWalletServiceProvider(int value) { this.updateWalletServiceProvider = value; } } }
// // COPYRIGHT LICENSE: This information contains sample code provided in source code form. You may copy, // modify, and distribute these sample programs in any form without payment to IBM for the purposes of // developing, using, marketing or distributing application programs conforming to the application // programming interface for the operating platform for which the sample code is written. // Notwithstanding anything to the contrary, IBM PROVIDES THE SAMPLE SOURCE CODE ON AN "AS IS" BASIS // AND IBM DISCLAIMS ALL WARRANTIES, EXPRESS OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, ANY IMPLIED // WARRANTIES OR CONDITIONS OF MERCHANTABILITY, SATISFACTORY QUALITY, FITNESS FOR A PARTICULAR PURPOSE, // TITLE, AND ANY WARRANTY OR CONDITION OF NON-INFRINGEMENT. IBM SHALL NOT BE LIABLE FOR ANY DIRECT, // INDIRECT, INCIDENTAL, SPECIAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR OPERATION OF THE // SAMPLE SOURCE CODE. IBM HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS // OR MODIFICATIONS TO THE SAMPLE SOURCE CODE. // // (C) COPYRIGHT International Business Machines Corp., 2001,2011 // All Rights Reserved * Licensed Materials - Property of IBM // package com.ibm.websphere.samples.pbw.war; import java.io.IOException; import javax.ejb.EJB; import javax.servlet.ServletConfig; import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import com.ibm.websphere.samples.pbw.ejb.CatalogMgr; import com.ibm.websphere.samples.pbw.ejb.CustomerMgr; import com.ibm.websphere.samples.pbw.jpa.Customer; import com.ibm.websphere.samples.pbw.utils.Util; /** * Servlet to handle customer account actions, such as login and register. */ @WebServlet("/servlet/AccountServlet") public class AccountServlet extends HttpServlet { private static final long serialVersionUID = 1L; // Servlet action codes. public static final String ACTION_ACCOUNT = "account"; public static final String ACTION_ACCOUNTUPDATE = "accountUpdate"; public static final String ACTION_LOGIN = "login"; public static final String ACTION_REGISTER = "register"; public static final String ACTION_SETLOGGING = "SetLogging"; @EJB private CustomerMgr login; @EJB private CatalogMgr catalog; /** * Servlet initialization. */ public void init(ServletConfig config) throws ServletException { super.init(config); } /** * Process incoming HTTP GET requests * * @param request Object that encapsulates the request to the servlet * @param response Object that encapsulates the response from the servlet */ public void doGet(javax.servlet.http.HttpServletRequest request, javax.servlet.http.HttpServletResponse response) throws ServletException, IOException { performTask(request,response); } /** * Process incoming HTTP POST requests * * @param request Object that encapsulates the request to the servlet * @param response Object that encapsulates the response from the servlet */ public void doPost(javax.servlet.http.HttpServletRequest request, javax.servlet.http.HttpServletResponse response) throws ServletException, IOException { performTask(request,response); } /** * Main service method for AccountServlet * * @param request Object that encapsulates the request to the servlet * @param response Object that encapsulates the response from the servlet */ private void performTask(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { String action = null; action = req.getParameter(Util.ATTR_ACTION); Util.debug("action=" + action); if (action.equals(ACTION_LOGIN)) { try { HttpSession session = req.getSession(true); String userid = req.getParameter("userid"); String passwd = req.getParameter("passwd"); String updating = req.getParameter(Util.ATTR_UPDATING); String results= null; if (Util.validateString(userid)){ results= login.verifyUserAndPassword(userid, passwd); } else { //user id was invalid, and may contain XSS attack results = "\nEmail address was invalid."; Util.debug("User id or email address was invalid. id=" + userid); } // If results have an error msg, return it, otherwise continue. if (results != null) { // Proliferate UPDATING flag if user is trying to update his account. if (updating.equals("true")) req.setAttribute(Util.ATTR_UPDATING, "true"); req.setAttribute(Util.ATTR_RESULTS, results); requestDispatch(getServletConfig().getServletContext(), req, resp, Util.PAGE_LOGIN); } else { // If not logging in for the first time, then clear out the // session data for the old user. if (session.getAttribute(Util.ATTR_CUSTOMER) != null) { session.removeAttribute(Util.ATTR_CART); // session.removeAttribute(Util.ATTR_CART_CONTENTS); session.removeAttribute(Util.ATTR_CHECKOUT); session.removeAttribute(Util.ATTR_ORDERKEY); } // Store customer userid in HttpSession. Customer customer = login.getCustomer(userid); session.setAttribute(Util.ATTR_CUSTOMER, customer); Util.debug("updating=" + updating + "="); // Was customer trying to edit account information. if (updating.equals("true")) { req.setAttribute(Util.ATTR_EDITACCOUNTINFO, customer); requestDispatch( getServletConfig().getServletContext(), req, resp, Util.PAGE_ACCOUNT ); } else { // See if user was in the middle of checking out. Boolean checkingOut = (Boolean) session.getAttribute(Util.ATTR_CHECKOUT); Util.debug("checkingOut=" + checkingOut + "="); if ((checkingOut != null) && (checkingOut.booleanValue())) { Util.debug("must be checking out"); requestDispatch( getServletConfig().getServletContext(), req, resp, Util.PAGE_ORDERINFO); } else { Util.debug("must NOT be checking out"); String url; String category = (String) session.getAttribute(Util.ATTR_CATEGORY); // Default to plants if ((category == null) || (category.equals("null"))) { url = Util.PAGE_PROMO; } else { url = Util.PAGE_SHOPPING; req.setAttribute(Util.ATTR_INVITEMS, catalog.getItemsByCategory(Integer.parseInt(category))); } requestDispatch( getServletConfig().getServletContext(), req, resp, url); } } } } catch (ServletException e) { req.setAttribute(Util.ATTR_RESULTS, "/nException occurred"); throw e; } catch (Exception e) { req.setAttribute(Util.ATTR_RESULTS, "/nException occurred"); throw new ServletException(e.getMessage()); } } else if (action.equals(ACTION_REGISTER)) { // Register a new user. // try // { String url; HttpSession session = req.getSession(true); String userid = req.getParameter("userid"); String password = req.getParameter("passwd"); String cpassword = req.getParameter("vpasswd"); String firstName = req.getParameter("fname"); String lastName = req.getParameter("lname"); String addr1 = req.getParameter("addr1"); String addr2 = req.getParameter("addr2"); String addrCity = req.getParameter("city"); String addrState = req.getParameter("state"); String addrZip = req.getParameter("zip"); String phone = req.getParameter("phone"); //validate all user input if (!Util.validateString(userid)){ req.setAttribute(Util.ATTR_RESULTS, "Email address contains invalid characters."); url = Util.PAGE_REGISTER; } else if (!Util.validateString(firstName)){ req.setAttribute(Util.ATTR_RESULTS, "First Name contains invalid characters."); url = Util.PAGE_REGISTER; } else if (!Util.validateString(lastName)){ req.setAttribute(Util.ATTR_RESULTS, "Last Name contains invalid characters."); url = Util.PAGE_REGISTER; } else if (!Util.validateString(addr1)){ req.setAttribute(Util.ATTR_RESULTS, "Address Line 1 contains invalid characters."); url = Util.PAGE_REGISTER; } else if (!Util.validateString(addr2)){ req.setAttribute(Util.ATTR_RESULTS, "Address Line 2 contains invalid characters."); url = Util.PAGE_REGISTER; } else if (!Util.validateString(addrCity)){ req.setAttribute(Util.ATTR_RESULTS, "City contains invalid characters."); url = Util.PAGE_REGISTER; } else if (!Util.validateString(addrState)){ req.setAttribute(Util.ATTR_RESULTS, "State contains invalid characters."); url = Util.PAGE_REGISTER; } else if (!Util.validateString(addrZip)){ req.setAttribute(Util.ATTR_RESULTS, "Zip contains invalid characters."); url = Util.PAGE_REGISTER; } else if (!Util.validateString(phone)){ req.setAttribute(Util.ATTR_RESULTS, "Phone Number contains invalid characters."); url = Util.PAGE_REGISTER; } // Make sure passwords match. else if (!password.equals(cpassword)) { req.setAttribute(Util.ATTR_RESULTS, "Passwords do not match."); url = Util.PAGE_REGISTER; } else { // Create the new user. Customer customer = login.createCustomer(userid, password, firstName, lastName, addr1, addr2, addrCity, addrState, addrZip, phone); if (customer != null) { // Store customer info in HttpSession. session.setAttribute(Util.ATTR_CUSTOMER, customer); // See if user was in the middle of checking out. Boolean checkingOut = (Boolean) session.getAttribute(Util.ATTR_CHECKOUT); if ((checkingOut != null) && (checkingOut.booleanValue())) { url = Util.PAGE_ORDERINFO; } else { String category = (String) session.getAttribute(Util.ATTR_CATEGORY); // Default to plants if (category == null) { url = Util.PAGE_PROMO; } else { url = Util.PAGE_SHOPPING; req.setAttribute(Util.ATTR_INVITEMS, catalog.getItemsByCategory(Integer.parseInt(category))); } } } else { url = Util.PAGE_REGISTER; req.setAttribute(Util.ATTR_RESULTS, "New user NOT created!"); } } requestDispatch( getServletConfig().getServletContext(), req, resp, url); // } // catch (CreateException e) { } } else if (action.equals(ACTION_ACCOUNT)) { String url; HttpSession session = req.getSession(true); Customer customer = (Customer) session.getAttribute(Util.ATTR_CUSTOMER); if (customer == null) { url = Util.PAGE_LOGIN; req.setAttribute(Util.ATTR_UPDATING, "true"); req.setAttribute(Util.ATTR_RESULTS, "\nYou must login first."); } else { url = Util.PAGE_ACCOUNT; req.setAttribute(Util.ATTR_EDITACCOUNTINFO, customer); } requestDispatch( getServletConfig().getServletContext(), req, resp, url); } else if (action.equals(ACTION_ACCOUNTUPDATE)) { // try // { String url; HttpSession session = req.getSession(true); Customer customer = (Customer) session.getAttribute(Util.ATTR_CUSTOMER); String userid = customer.getCustomerID(); String firstName = req.getParameter("fname"); String lastName = req.getParameter("lname"); String addr1 = req.getParameter("addr1"); String addr2 = req.getParameter("addr2"); String addrCity = req.getParameter("city"); String addrState = req.getParameter("state"); String addrZip = req.getParameter("zip"); String phone = req.getParameter("phone"); // Create the new user. customer = login.updateUser(userid, firstName, lastName, addr1, addr2, addrCity, addrState, addrZip, phone); // Store updated customer info in HttpSession. session.setAttribute(Util.ATTR_CUSTOMER, customer); // See if user was in the middle of checking out. Boolean checkingOut = (Boolean) session.getAttribute(Util.ATTR_CHECKOUT); if ((checkingOut != null) && (checkingOut.booleanValue())) { url = Util.PAGE_ORDERINFO; } else { String category = (String) session.getAttribute(Util.ATTR_CATEGORY); // Default to plants if (category == null) { url = Util.PAGE_PROMO; } else { url = Util.PAGE_SHOPPING; req.setAttribute(Util.ATTR_INVITEMS, catalog.getItemsByCategory(Integer.parseInt(category))); } } requestDispatch( getServletConfig().getServletContext(), req, resp, url); // } // catch (CreateException e) { } } else if (action.equals(ACTION_SETLOGGING)) { String debugSetting = req.getParameter("logging"); if ((debugSetting == null) || (!debugSetting.equals("debug"))) Util.setDebug(false); else Util.setDebug(true); requestDispatch( getServletConfig().getServletContext(), req, resp, Util.PAGE_HELP); } } /** * Request dispatch. */ private void requestDispatch( ServletContext ctx, HttpServletRequest req, HttpServletResponse resp, String page) throws ServletException, IOException { resp.setContentType("text/html"); ctx.getRequestDispatcher(page).include(req, resp); } }
package php.runtime.env; import php.runtime.Memory; import php.runtime.lang.Closure; import php.runtime.lang.IObject; import php.runtime.memory.ArrayMemory; import php.runtime.memory.ObjectMemory; import php.runtime.memory.output.PlainPrinter; import php.runtime.reflection.ClassEntity; import java.io.StringWriter; public class CallStackItem { public TraceInfo trace; public IObject object; public Memory[] args; public String function; public String clazz; public String staticClazz; public ClassEntity classEntity; public ClassEntity staticClassEntity; public int flags; public CallStackItem(TraceInfo trace) { this.trace = trace; } public CallStackItem(CallStackItem copy){ this.trace = copy.trace; this.object = copy.object; this.args = copy.args; this.function = copy.function; this.clazz = copy.clazz; this.staticClazz = copy.staticClazz; } public CallStackItem(TraceInfo trace, IObject object, Memory[] args, String function, String clazz, String staticClazz) { this.trace = trace; this.object = object; this.args = args; this.function = function; this.clazz = clazz; this.staticClazz = staticClazz; } public TraceInfo getTrace() { return trace; } public void setTrace(TraceInfo trace) { this.trace = trace; } public void setParameters(TraceInfo trace, IObject object, Memory[] args, String function, String clazz, String staticClazz) { this.trace = trace; this.object = object; this.args = args; this.function = function; this.clazz = clazz; this.staticClazz = staticClazz; this.classEntity = null; this.staticClassEntity = null; } public void clear(){ this.object = null; this.args = null; this.flags = 0; } @Override public String toString() { return toString(false); } public ArrayMemory toArray(){ return toArray(true, false); } public ArrayMemory toArray(boolean provideObject, boolean ignoreArgs){ ArrayMemory el = new ArrayMemory(); if (trace != null) { if (trace.getFile() != null) el.refOfIndex("file").assign(trace.getFileName()); el.refOfIndex("line").assign(trace.getStartLine() + 1); } el.refOfIndex("function").assign(function); if (clazz != null) { el.refOfIndex("class").assign(clazz); el.refOfIndex("type").assign("::"); } if (object != null){ if (provideObject){ el.refOfIndex("object").assign(new ObjectMemory(object)); } el.refOfIndex("type").assign("->"); } if (!ignoreArgs){ el.refOfIndex("args").assign(ArrayMemory.of(args)); } if (trace != null) el.refOfIndex("position").assign(trace.getStartPosition() + 1); return el; } public static String toString(CallStackItem[] items, boolean withArgs) { int i = 0; StringBuilder sb = new StringBuilder(); if (items != null){ for (CallStackItem e : items){ if (i != 0) sb.append("\n"); sb.append("#").append(i).append(" ").append(e.toString(withArgs)); i++; } if (i != 0) sb.append("\n"); sb.append("#").append(i).append(" {main}"); } return sb.toString(); } public String getWhere() { StringBuilder sb = new StringBuilder(); if (object instanceof Closure) sb.append("{closure}"); else if (clazz != null){ sb.append(clazz); if (object == null) sb.append("::"); else sb.append("->"); sb.append(function); } else if (function != null){ sb.append(function); } else sb.append("<internal>"); return sb.toString(); } public String toString(boolean withArgs) { StringBuilder sb = new StringBuilder(); if (object instanceof Closure) sb.append("{closure}"); else if (clazz != null){ sb.append(clazz); if (object == null) sb.append("::"); else sb.append("->"); sb.append(function); } else if (function != null){ sb.append(function); } else sb.append("<internal>"); sb.append("("); if (withArgs) { StringWriter writer = new StringWriter(); PlainPrinter printer = new PlainPrinter(null, writer); int i = 0; if (args != null) for(Memory arg : args){ printer.print(arg); if (i != args.length - 1) writer.append(", "); i++; } sb.append(writer.toString()); } sb.append(")"); if (trace != null && trace != TraceInfo.UNKNOWN) { sb.append(" called at ["); sb.append(trace.getFileName()); sb.append(":"); sb.append(trace.getStartLine() + 1); sb.append("]"); } return sb.toString(); } }
/* * Copyright (c) 2017-2018, Adam <Adam@sigterm.info> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package net.runelite.http.service.cache; import com.google.common.collect.Iterables; import com.google.common.io.BaseEncoding; import com.google.common.io.ByteStreams; import io.minio.MinioClient; import io.minio.errors.ErrorResponseException; import io.minio.errors.InsufficientDataException; import io.minio.errors.InternalException; import io.minio.errors.InvalidArgumentException; import io.minio.errors.InvalidBucketNameException; import io.minio.errors.InvalidEndpointException; import io.minio.errors.InvalidPortException; import io.minio.errors.NoResponseException; import java.io.IOException; import java.io.InputStream; import java.security.InvalidKeyException; import java.security.NoSuchAlgorithmException; import java.util.ArrayList; import java.util.List; import lombok.extern.slf4j.Slf4j; import net.runelite.cache.ConfigType; import net.runelite.cache.IndexType; import net.runelite.cache.definitions.ItemDefinition; import net.runelite.cache.definitions.loaders.ItemLoader; import net.runelite.cache.fs.ArchiveFiles; import net.runelite.cache.fs.Container; import net.runelite.cache.fs.FSFile; import net.runelite.http.service.cache.beans.ArchiveEntry; import net.runelite.http.service.cache.beans.CacheEntry; import net.runelite.http.service.cache.beans.FileEntry; import net.runelite.http.service.cache.beans.IndexEntry; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.stereotype.Service; import org.sql2o.Connection; import org.sql2o.ResultSetIterable; import org.sql2o.Sql2o; import org.xmlpull.v1.XmlPullParserException; @Service @Slf4j public class CacheService { @Autowired @Qualifier("Runelite Cache SQL2O") private Sql2o sql2o; @Value("${minio.bucket}") private String minioBucket; private final MinioClient minioClient; @Autowired public CacheService( @Value("${minio.endpoint}") String minioEndpoint, @Value("${minio.accesskey}") String accessKey, @Value("${minio.secretkey}") String secretKey ) throws InvalidEndpointException, InvalidPortException { this.minioClient = new MinioClient(minioEndpoint, accessKey, secretKey); } @Bean public MinioClient minioClient() { return minioClient; } /** * retrieve archive from storage * * @param archiveEntry * @return */ public byte[] getArchive(ArchiveEntry archiveEntry) { String hashStr = BaseEncoding.base16().encode(archiveEntry.getHash()); String path = new StringBuilder() .append(hashStr.substring(0, 2)) .append('/') .append(hashStr.substring(2)) .toString(); try (InputStream in = minioClient.getObject(minioBucket, path)) { return ByteStreams.toByteArray(in); } catch (InvalidBucketNameException | NoSuchAlgorithmException | InsufficientDataException | IOException | InvalidKeyException | NoResponseException | XmlPullParserException | ErrorResponseException | InternalException | InvalidArgumentException ex) { log.warn(null, ex); return null; } } public ArchiveFiles getArchiveFiles(ArchiveEntry archiveEntry) throws IOException { CacheDAO cacheDao = new CacheDAO(); try (Connection con = sql2o.open(); ResultSetIterable<FileEntry> files = cacheDao.findFilesForArchive(con, archiveEntry)) { byte[] archiveData = getArchive(archiveEntry); if (archiveData == null) { return null; } Container result = Container.decompress(archiveData, null); if (result == null) { return null; } byte[] decompressedData = result.data; ArchiveFiles archiveFiles = new ArchiveFiles(); for (FileEntry fileEntry : files) { FSFile file = new FSFile(fileEntry.getFileId()); archiveFiles.addFile(file); file.setNameHash(fileEntry.getNameHash()); } archiveFiles.loadContents(decompressedData); return archiveFiles; } } public List<CacheEntry> listCaches() { try (Connection con = sql2o.open()) { CacheDAO cacheDao = new CacheDAO(); return cacheDao.listCaches(con); } } public CacheEntry findCache(int cacheId) { try (Connection con = sql2o.open()) { CacheDAO cacheDao = new CacheDAO(); return cacheDao.findCache(con, cacheId); } } public CacheEntry findMostRecent() { try (Connection con = sql2o.open()) { CacheDAO cacheDao = new CacheDAO(); return cacheDao.findMostRecent(con); } } public List<IndexEntry> findIndexesForCache(CacheEntry cacheEntry) { try (Connection con = sql2o.open()) { CacheDAO cacheDao = new CacheDAO(); return cacheDao.findIndexesForCache(con, cacheEntry); } } public IndexEntry findIndexForCache(CacheEntry cahceEntry, int indexId) { try (Connection con = sql2o.open()) { CacheDAO cacheDao = new CacheDAO(); return cacheDao.findIndexForCache(con, cahceEntry, indexId); } } public List<ArchiveEntry> findArchivesForIndex(IndexEntry indexEntry) { try (Connection con = sql2o.open()) { CacheDAO cacheDao = new CacheDAO(); ResultSetIterable<ArchiveEntry> archiveEntries = cacheDao.findArchivesForIndex(con, indexEntry); List<ArchiveEntry> archives = new ArrayList<>(); Iterables.addAll(archives, archiveEntries); return archives; } } public ArchiveEntry findArchiveForIndex(IndexEntry indexEntry, int archiveId) { try (Connection con = sql2o.open()) { CacheDAO cacheDao = new CacheDAO(); return cacheDao.findArchiveForIndex(con, indexEntry, archiveId); } } public ArchiveEntry findArchiveForTypeAndName(CacheEntry cache, IndexType index, int nameHash) { try (Connection con = sql2o.open()) { CacheDAO cacheDao = new CacheDAO(); return cacheDao.findArchiveByName(con, cache, index, nameHash); } } public List<ItemDefinition> getItems() throws IOException { CacheEntry cache = findMostRecent(); IndexEntry indexEntry = findIndexForCache(cache, IndexType.CONFIGS.getNumber()); ArchiveEntry archiveEntry = findArchiveForIndex(indexEntry, ConfigType.ITEM.getId()); ArchiveFiles archiveFiles = getArchiveFiles(archiveEntry); final ItemLoader itemLoader = new ItemLoader(); final List<ItemDefinition> result = new ArrayList<>(archiveFiles.getFiles().size()); for (FSFile file : archiveFiles.getFiles()) { ItemDefinition itemDef = itemLoader.load(file.getFileId(), file.getContents()); result.add(itemDef); } return result; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.cache.hibernate; import java.util.HashMap; import java.util.List; import javax.cache.Cache; import javax.persistence.Cacheable; import javax.persistence.Id; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.IgniteKernal; import org.apache.ignite.internal.processors.cache.IgniteCacheProxy; import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi; import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.hamcrest.core.Is; import org.hibernate.Session; import org.hibernate.SessionFactory; import org.hibernate.Transaction; import org.hibernate.cache.spi.access.AccessType; import org.hibernate.cfg.Configuration; import org.hibernate.service.ServiceRegistryBuilder; import org.junit.Test; import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL; import static org.apache.ignite.cache.CacheMode.PARTITIONED; import static org.apache.ignite.cache.hibernate.HibernateAccessStrategyFactory.DFLT_ACCESS_TYPE_PROPERTY; import static org.apache.ignite.cache.hibernate.HibernateAccessStrategyFactory.IGNITE_INSTANCE_NAME_PROPERTY; import static org.apache.ignite.cache.hibernate.HibernateAccessStrategyFactory.REGION_CACHE_PROPERTY; import static org.hibernate.cfg.AvailableSettings.CACHE_REGION_FACTORY; import static org.hibernate.cfg.AvailableSettings.GENERATE_STATISTICS; import static org.hibernate.cfg.AvailableSettings.HBM2DDL_AUTO; import static org.hibernate.cfg.AvailableSettings.RELEASE_CONNECTIONS; import static org.hibernate.cfg.AvailableSettings.USE_QUERY_CACHE; import static org.hibernate.cfg.AvailableSettings.USE_SECOND_LEVEL_CACHE; import static org.hibernate.cfg.AvailableSettings.USE_STRUCTURED_CACHE; import static org.junit.Assert.assertThat; /** * Tests Hibernate L2 cache configuration. */ @SuppressWarnings("unchecked") public class HibernateL2CacheStrategySelfTest extends GridCommonAbstractTest { /** */ private static final String ENTITY1_NAME = Entity1.class.getName(); /** */ private static final String ENTITY2_NAME = Entity2.class.getName(); /** */ private static final String ENTITY3_NAME = Entity3.class.getName(); /** */ private static final String ENTITY4_NAME = Entity4.class.getName(); /** */ private static final String TIMESTAMP_CACHE = "org.hibernate.cache.spi.UpdateTimestampsCache"; /** */ private static final String QUERY_CACHE = "org.hibernate.cache.internal.StandardQueryCache"; /** */ private static final String CONNECTION_URL = "jdbc:h2:mem:example;DB_CLOSE_DELAY=-1"; /** */ private SessionFactory sesFactory1; /** {@inheritDoc} */ @Override protected void beforeTestsStarted() throws Exception { startGrid(0); } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { for (IgniteCacheProxy<?, ?> cache : ((IgniteKernal)grid(0)).caches()) cache.clear(); } /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); ((TcpDiscoverySpi)cfg.getDiscoverySpi()).setIpFinder(new TcpDiscoveryVmIpFinder(true)); cfg.setCacheConfiguration(cacheConfiguration(ENTITY3_NAME), cacheConfiguration(ENTITY4_NAME), cacheConfiguration("cache1"), cacheConfiguration("cache2"), cacheConfiguration(TIMESTAMP_CACHE), cacheConfiguration(QUERY_CACHE)); return cfg; } /** * @param cacheName Cache name. * @return Cache configuration. */ private CacheConfiguration cacheConfiguration(String cacheName) { CacheConfiguration cfg = new CacheConfiguration(); cfg.setName(cacheName); cfg.setCacheMode(PARTITIONED); cfg.setAtomicityMode(TRANSACTIONAL); return cfg; } /** * @param accessType Cache access type. * @param igniteInstanceName Ignite instance name. * @return Hibernate configuration. */ private Configuration hibernateConfiguration(AccessType accessType, String igniteInstanceName) { Configuration cfg = new Configuration(); cfg.addAnnotatedClass(Entity1.class); cfg.addAnnotatedClass(Entity2.class); cfg.addAnnotatedClass(Entity3.class); cfg.addAnnotatedClass(Entity4.class); cfg.setCacheConcurrencyStrategy(ENTITY1_NAME, accessType.getExternalName()); cfg.setCacheConcurrencyStrategy(ENTITY2_NAME, accessType.getExternalName()); cfg.setCacheConcurrencyStrategy(ENTITY3_NAME, accessType.getExternalName()); cfg.setCacheConcurrencyStrategy(ENTITY4_NAME, accessType.getExternalName()); cfg.setProperty(DFLT_ACCESS_TYPE_PROPERTY, accessType.name()); cfg.setProperty(HBM2DDL_AUTO, "create"); cfg.setProperty(GENERATE_STATISTICS, "true"); cfg.setProperty(USE_SECOND_LEVEL_CACHE, "true"); cfg.setProperty(USE_QUERY_CACHE, "true"); cfg.setProperty(CACHE_REGION_FACTORY, HibernateRegionFactory.class.getName()); cfg.setProperty(RELEASE_CONNECTIONS, "on_close"); cfg.setProperty(USE_STRUCTURED_CACHE, "true"); cfg.setProperty(IGNITE_INSTANCE_NAME_PROPERTY, igniteInstanceName); cfg.setProperty(REGION_CACHE_PROPERTY + ENTITY1_NAME, "cache1"); cfg.setProperty(REGION_CACHE_PROPERTY + ENTITY2_NAME, "cache2"); cfg.setProperty(REGION_CACHE_PROPERTY + TIMESTAMP_CACHE, TIMESTAMP_CACHE); cfg.setProperty(REGION_CACHE_PROPERTY + QUERY_CACHE, QUERY_CACHE); return cfg; } /** * @throws Exception If failed. */ @Test public void testEntityCacheReadWrite() throws Exception { for (AccessType accessType : new AccessType[]{AccessType.READ_WRITE, AccessType.NONSTRICT_READ_WRITE}) testEntityCacheReadWrite(accessType); } /** * @param accessType Cache access type. * @throws Exception If failed. */ private void testEntityCacheReadWrite(AccessType accessType) throws Exception { log.info("Test access type: " + accessType); sesFactory1 = startHibernate(accessType, getTestIgniteInstanceName(0)); try { // 1 Adding. Session ses = sesFactory1.openSession(); try { Transaction tr = ses.beginTransaction(); ses.save(new Entity1(1, "entity-1#name-1")); ses.save(new Entity2(1, "entity-2#name-1")); tr.commit(); } finally { ses.close(); } loadEntities(sesFactory1); assertEquals(1, grid(0).cache("cache1").size()); assertEquals(1, grid(0).cache("cache2").size()); assertThat(getEntityNameFromRegion(sesFactory1, "cache1", 1), Is.is("entity-1#name-1")); assertThat(getEntityNameFromRegion(sesFactory1, "cache2", 1), Is.is("entity-2#name-1")); // 2. Updating and adding. ses = sesFactory1.openSession(); try { Transaction tx = ses.beginTransaction(); Entity1 e1 = (Entity1)ses.load(Entity1.class, 1); e1.setName("entity-1#name-1#UPDATED-1"); ses.update(e1); ses.save(new Entity2(2, "entity-2#name-2#ADDED")); tx.commit(); } finally { ses.close(); } loadEntities(sesFactory1); assertEquals(1, grid(0).cache("cache1").size()); assertEquals(2, grid(0).cache("cache2").size()); assertThat(getEntityNameFromRegion(sesFactory1, "cache1", 1), Is.is("entity-1#name-1#UPDATED-1")); assertThat(getEntityNameFromRegion(sesFactory1, "cache2", 1), Is.is("entity-2#name-1")); assertThat(getEntityNameFromRegion(sesFactory1, "cache2", 2), Is.is("entity-2#name-2#ADDED")); // 3. Updating, adding, updating. ses = sesFactory1.openSession(); try { Transaction tx = ses.beginTransaction(); Entity2 e2_1 = (Entity2)ses.load(Entity2.class, 1); e2_1.setName("entity-2#name-1#UPDATED-1"); ses.update(e2_1); ses.save(new Entity1(2, "entity-1#name-2#ADDED")); Entity1 e1_1 = (Entity1)ses.load(Entity1.class, 1); e1_1.setName("entity-1#name-1#UPDATED-2"); ses.update(e1_1); tx.commit(); } finally { ses.close(); } loadEntities(sesFactory1); assertEquals(2, grid(0).cache("cache1").size()); assertEquals(2, grid(0).cache("cache2").size()); assertThat(getEntityNameFromRegion(sesFactory1, "cache2", 1), Is.is("entity-2#name-1#UPDATED-1")); assertThat(getEntityNameFromRegion(sesFactory1, "cache1", 2), Is.is("entity-1#name-2#ADDED")); assertThat(getEntityNameFromRegion(sesFactory1, "cache1", 1), Is.is("entity-1#name-1#UPDATED-2")); ses = sesFactory1.openSession(); sesFactory1.getStatistics().logSummary(); ses.close(); } finally { cleanup(); } } /** * @param sesFactory Session factory. */ private void loadEntities(SessionFactory sesFactory) { Session ses = sesFactory.openSession(); try { List<Entity1> list1 = ses.createCriteria(ENTITY1_NAME).list(); for (Entity1 e1 : list1) assertNotNull(e1.getName()); List<Entity2> list2 = ses.createCriteria(ENTITY2_NAME).list(); for (Entity2 e2 : list2) assertNotNull(e2.getName()); } finally { ses.close(); } } /** * @param sesFactory Session Factory. * @param regionName Region Name. * @param id Id. * @return Entity Name. */ private String getEntityNameFromRegion(SessionFactory sesFactory, String regionName, int id) { Session ses = sesFactory.openSession(); try { for (Cache.Entry<Object, Object> entry : grid(0).cache(regionName)) { if (((HibernateKeyWrapper)entry.getKey()).id().equals(id)) return (String)((HashMap)entry.getValue()).get("name"); } return null; } finally { ses.close(); } } /** * @param accessType Cache access typr. * @param igniteInstanceName Name of the grid providing caches. * @return Session factory. */ private SessionFactory startHibernate(AccessType accessType, String igniteInstanceName) { Configuration cfg = hibernateConfiguration(accessType, igniteInstanceName); ServiceRegistryBuilder builder = new ServiceRegistryBuilder(); builder.applySetting("hibernate.connection.url", CONNECTION_URL); builder.applySetting("hibernate.show_sql", false); return cfg.buildSessionFactory(builder.buildServiceRegistry()); } /** * Test Hibernate entity1. */ @javax.persistence.Entity @SuppressWarnings({"PublicInnerClass", "UnnecessaryFullyQualifiedName"}) @Cacheable public static class Entity1 { /** */ private int id; /** */ private String name; /** * */ public Entity1() { // No-op. } /** * @param id ID. * @param name Name. */ Entity1(int id, String name) { this.id = id; this.name = name; } /** * @return ID. */ @Id public int getId() { return id; } /** * @param id ID. */ public void setId(int id) { this.id = id; } /** * @return Name. */ public String getName() { return name; } /** * @param name Name. */ public void setName(String name) { this.name = name; } } /** * Test Hibernate entity2. */ @javax.persistence.Entity @SuppressWarnings({"PublicInnerClass", "UnnecessaryFullyQualifiedName"}) @Cacheable public static class Entity2 { /** */ private int id; /** */ private String name; /** * */ public Entity2() { // No-op. } /** * @param id ID. * @param name Name. */ Entity2(int id, String name) { this.id = id; this.name = name; } /** * @return ID. */ @Id public int getId() { return id; } /** * @param id ID. */ public void setId(int id) { this.id = id; } /** * @return Name. */ public String getName() { return name; } /** * @param name Name. */ public void setName(String name) { this.name = name; } } /** * Test Hibernate entity3. */ @javax.persistence.Entity @SuppressWarnings({"PublicInnerClass", "UnnecessaryFullyQualifiedName"}) @Cacheable public static class Entity3 { /** */ private int id; /** */ private String name; /** * */ public Entity3() { // No-op. } /** * @param id ID. * @param name Name. */ public Entity3(int id, String name) { this.id = id; this.name = name; } /** * @return ID. */ @Id public int getId() { return id; } /** * @param id ID. */ public void setId(int id) { this.id = id; } /** * @return Name. */ public String getName() { return name; } /** * @param name Name. */ public void setName(String name) { this.name = name; } } /** * Test Hibernate entity4. */ @javax.persistence.Entity @SuppressWarnings({"PublicInnerClass", "UnnecessaryFullyQualifiedName"}) @Cacheable public static class Entity4 { /** */ private int id; /** */ private String name; /** * */ public Entity4() { // No-op. } /** * @param id ID. * @param name Name. */ public Entity4(int id, String name) { this.id = id; this.name = name; } /** * @return ID. */ @Id public int getId() { return id; } /** * @param id ID. */ public void setId(int id) { this.id = id; } /** * @return Name. */ public String getName() { return name; } /** * @param name Name. */ public void setName(String name) { this.name = name; } } /** * Closes session factories and clears data from caches. * * @throws Exception If failed. */ private void cleanup() throws Exception { if (sesFactory1 != null) sesFactory1.close(); sesFactory1 = null; for (IgniteCacheProxy<?, ?> cache : ((IgniteKernal)grid(0)).caches()) cache.clear(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.service.component.instance; import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.registry.client.api.RegistryConstants; import org.apache.hadoop.registry.client.binding.RegistryPathUtils; import org.apache.hadoop.registry.client.binding.RegistryUtils; import org.apache.hadoop.registry.client.types.ServiceRecord; import org.apache.hadoop.registry.client.types.yarn.PersistencePolicies; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerExitStatus; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.client.api.NMClient; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.security.ContainerTokenIdentifier; import org.apache.hadoop.yarn.server.utils.BuilderUtils; import org.apache.hadoop.yarn.service.ServiceScheduler; import org.apache.hadoop.yarn.service.api.records.Artifact; import org.apache.hadoop.yarn.service.api.records.ComponentState; import org.apache.hadoop.yarn.service.api.records.ContainerState; import org.apache.hadoop.yarn.service.api.records.ServiceState; import org.apache.hadoop.yarn.service.component.Component; import org.apache.hadoop.yarn.service.component.ComponentEvent; import org.apache.hadoop.yarn.service.component.ComponentEventType; import org.apache.hadoop.yarn.service.component.ComponentRestartPolicy; import org.apache.hadoop.yarn.service.monitor.probe.ProbeStatus; import org.apache.hadoop.yarn.service.registry.YarnRegistryViewForProviders; import org.apache.hadoop.yarn.service.timelineservice.ServiceTimelinePublisher; import org.apache.hadoop.yarn.service.utils.ServiceUtils; import org.apache.hadoop.yarn.state.InvalidStateTransitionException; import org.apache.hadoop.yarn.state.MultipleArcTransition; import org.apache.hadoop.yarn.state.SingleArcTransition; import org.apache.hadoop.yarn.state.StateMachine; import org.apache.hadoop.yarn.state.StateMachineFactory; import org.apache.hadoop.yarn.util.BoundedAppender; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.text.MessageFormat; import java.util.Date; import java.util.EnumSet; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock; import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock; import static org.apache.hadoop.registry.client.types.yarn.YarnRegistryAttributes.*; import static org.apache.hadoop.yarn.api.records.ContainerExitStatus .KILLED_AFTER_APP_COMPLETION; import static org.apache.hadoop.yarn.api.records.ContainerExitStatus.KILLED_BY_APPMASTER; import static org.apache.hadoop.yarn.service.component.instance.ComponentInstanceEventType.*; import static org.apache.hadoop.yarn.service.component.instance.ComponentInstanceState.*; public class ComponentInstance implements EventHandler<ComponentInstanceEvent>, Comparable<ComponentInstance> { private static final Logger LOG = LoggerFactory.getLogger(ComponentInstance.class); private static final String FAILED_BEFORE_LAUNCH_DIAG = "failed before launch"; private static final String UPGRADE_FAILED = "upgrade failed"; private StateMachine<ComponentInstanceState, ComponentInstanceEventType, ComponentInstanceEvent> stateMachine; private Component component; private final ReadLock readLock; private final WriteLock writeLock; private ComponentInstanceId compInstanceId = null; private Path compInstanceDir; private Container container; private YarnRegistryViewForProviders yarnRegistryOperations; private FileSystem fs; private boolean timelineServiceEnabled = false; private ServiceTimelinePublisher serviceTimelinePublisher; private ServiceScheduler scheduler; private BoundedAppender diagnostics = new BoundedAppender(64 * 1024); private volatile ScheduledFuture containerStatusFuture; private volatile ContainerStatus status; private long containerStartedTime = 0; // This container object is used for rest API query private org.apache.hadoop.yarn.service.api.records.Container containerSpec; private String serviceVersion; private AtomicBoolean upgradeInProgress = new AtomicBoolean(false); private boolean pendingCancelUpgrade = false; private static final StateMachineFactory<ComponentInstance, ComponentInstanceState, ComponentInstanceEventType, ComponentInstanceEvent> stateMachineFactory = new StateMachineFactory<ComponentInstance, ComponentInstanceState, ComponentInstanceEventType, ComponentInstanceEvent>(INIT) .addTransition(INIT, STARTED, START, new ContainerStartedTransition()) .addTransition(INIT, INIT, STOP, // container failed before launching, nothing to cleanup from registry // This could happen if NMClient#startContainerAsync failed, container // will be completed, but COMP_INSTANCE is still at INIT. new ContainerStoppedTransition(true)) //From Running .addTransition(STARTED, INIT, STOP, new ContainerStoppedTransition()) .addTransition(STARTED, READY, BECOME_READY, new ContainerBecomeReadyTransition()) // FROM READY .addTransition(READY, STARTED, BECOME_NOT_READY, new ContainerBecomeNotReadyTransition()) .addTransition(READY, INIT, STOP, new ContainerStoppedTransition()) .addTransition(READY, UPGRADING, UPGRADE, new UpgradeTransition()) .addTransition(READY, EnumSet.of(READY, CANCEL_UPGRADING), CANCEL_UPGRADE, new CancelUpgradeTransition()) // FROM UPGRADING .addTransition(UPGRADING, EnumSet.of(READY, CANCEL_UPGRADING), CANCEL_UPGRADE, new CancelUpgradeTransition()) .addTransition(UPGRADING, EnumSet.of(READY), BECOME_READY, new ReadyAfterUpgradeTransition()) .addTransition(UPGRADING, UPGRADING, STOP, new StoppedAfterUpgradeTransition()) // FROM CANCEL_UPGRADING .addTransition(CANCEL_UPGRADING, EnumSet.of(CANCEL_UPGRADING, READY), BECOME_READY, new ReadyAfterUpgradeTransition()) .addTransition(CANCEL_UPGRADING, EnumSet.of(CANCEL_UPGRADING, INIT), STOP, new StoppedAfterCancelUpgradeTransition()) .installTopology(); public ComponentInstance(Component component, ComponentInstanceId compInstanceId) { this.stateMachine = stateMachineFactory.make(this); this.component = component; this.compInstanceId = compInstanceId; this.scheduler = component.getScheduler(); this.yarnRegistryOperations = component.getScheduler().getYarnRegistryOperations(); this.serviceTimelinePublisher = component.getScheduler().getServiceTimelinePublisher(); if (YarnConfiguration .timelineServiceV2Enabled(component.getScheduler().getConfig())) { this.timelineServiceEnabled = true; } ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); this.readLock = lock.readLock(); this.writeLock = lock.writeLock(); this.fs = scheduler.getContext().fs.getFileSystem(); } private static class ContainerStartedTransition extends BaseTransition { @Override public void transition(ComponentInstance compInstance, ComponentInstanceEvent event) { // Query container status for ip and host boolean cancelOnSuccess = true; if (compInstance.getCompSpec().getArtifact() != null && compInstance .getCompSpec().getArtifact().getType() == Artifact.TypeEnum.DOCKER) { // A docker container might get a different IP if the container is // relaunched by the NM, so we need to keep checking the status. // This is a temporary fix until the NM provides a callback for // container relaunch (see YARN-8265). cancelOnSuccess = false; } compInstance.containerStatusFuture = compInstance.scheduler.executorService.scheduleAtFixedRate( new ContainerStatusRetriever(compInstance.scheduler, event.getContainerId(), compInstance, cancelOnSuccess), 0, 1, TimeUnit.SECONDS); long containerStartTime = System.currentTimeMillis(); try { ContainerTokenIdentifier containerTokenIdentifier = BuilderUtils .newContainerTokenIdentifier(compInstance.getContainer() .getContainerToken()); containerStartTime = containerTokenIdentifier.getCreationTime(); } catch (Exception e) { LOG.info("Could not get container creation time, using current time"); } org.apache.hadoop.yarn.service.api.records.Container container = new org.apache.hadoop.yarn.service.api.records.Container(); container.setId(event.getContainerId().toString()); container.setLaunchTime(new Date(containerStartTime)); container.setState(ContainerState.RUNNING_BUT_UNREADY); container.setBareHost(compInstance.getNodeId().getHost()); container.setComponentInstanceName(compInstance.getCompInstanceName()); if (compInstance.containerSpec != null) { // remove the previous container. compInstance.getCompSpec().removeContainer(compInstance.containerSpec); } compInstance.containerSpec = container; compInstance.getCompSpec().addContainer(container); compInstance.containerStartedTime = containerStartTime; compInstance.component.incRunningContainers(); compInstance.serviceVersion = compInstance.scheduler.getApp() .getVersion(); if (compInstance.timelineServiceEnabled) { compInstance.serviceTimelinePublisher .componentInstanceStarted(container, compInstance); } } } private static class ContainerBecomeReadyTransition extends BaseTransition { @Override public void transition(ComponentInstance compInstance, ComponentInstanceEvent event) { compInstance.setContainerState(ContainerState.READY); compInstance.component.incContainersReady(true); compInstance.postContainerReady(); } } private static class ReadyAfterUpgradeTransition implements MultipleArcTransition<ComponentInstance, ComponentInstanceEvent, ComponentInstanceState> { @Override public ComponentInstanceState transition(ComponentInstance instance, ComponentInstanceEvent event) { if (instance.pendingCancelUpgrade) { // cancellation of upgrade was triggered before the upgrade was // finished. LOG.info("{} received ready but cancellation pending", event.getContainerId()); instance.upgradeInProgress.set(true); instance.cancelUpgrade(); instance.pendingCancelUpgrade = false; return instance.getState(); } instance.upgradeInProgress.set(false); instance.setContainerState(ContainerState.READY); instance.component.incContainersReady(false); Component.UpgradeStatus status = instance.getState().equals(UPGRADING) ? instance.component.getUpgradeStatus() : instance.component.getCancelUpgradeStatus(); status.decContainersThatNeedUpgrade(); instance.serviceVersion = status.getTargetVersion(); ComponentEvent checkState = new ComponentEvent( instance.component.getName(), ComponentEventType.CHECK_STABLE); instance.scheduler.getDispatcher().getEventHandler().handle(checkState); instance.postContainerReady(); return ComponentInstanceState.READY; } } private void postContainerReady() { if (timelineServiceEnabled) { serviceTimelinePublisher.componentInstanceBecomeReady(containerSpec); } } private static class ContainerBecomeNotReadyTransition extends BaseTransition { @Override public void transition(ComponentInstance compInstance, ComponentInstanceEvent event) { compInstance.setContainerState(ContainerState.RUNNING_BUT_UNREADY); compInstance.component.decContainersReady(true); } } @VisibleForTesting static void handleComponentInstanceRelaunch(ComponentInstance compInstance, ComponentInstanceEvent event, boolean failureBeforeLaunch, String containerDiag) { Component comp = compInstance.getComponent(); // Do we need to relaunch the service? boolean hasContainerFailed = failureBeforeLaunch || hasContainerFailed( event.getStatus()); ComponentRestartPolicy restartPolicy = comp.getRestartPolicyHandler(); ContainerState containerState = hasContainerFailed ? ContainerState.FAILED : ContainerState.SUCCEEDED; if (compInstance.getContainerSpec() != null) { compInstance.getContainerSpec().setState(containerState); } if (restartPolicy.shouldRelaunchInstance(compInstance, event.getStatus())) { // re-ask the failed container. comp.requestContainers(1); comp.reInsertPendingInstance(compInstance); StringBuilder builder = new StringBuilder(); builder.append(compInstance.getCompInstanceId()).append(": "); builder.append(event.getContainerId()).append( " completed. Reinsert back to pending list and requested "); builder.append("a new container.").append(System.lineSeparator()); builder.append(" exitStatus=").append( failureBeforeLaunch || event.getStatus() == null ? null : event.getStatus().getExitStatus()); builder.append(", diagnostics="); builder.append(failureBeforeLaunch ? FAILED_BEFORE_LAUNCH_DIAG : (event.getStatus() != null ? event.getStatus().getDiagnostics() : UPGRADE_FAILED)); if (event.getStatus() != null && event.getStatus().getExitStatus() != 0) { LOG.error(builder.toString()); } else{ LOG.info(builder.toString()); } if (compInstance.timelineServiceEnabled) { // record in ATS LOG.info("Publishing component instance status {} {} ", event.getContainerId(), containerState); compInstance.serviceTimelinePublisher.componentInstanceFinished( event.getContainerId(), event.getStatus().getExitStatus(), containerState, containerDiag); } } else{ // When no relaunch, update component's #succeeded/#failed // instances. if (hasContainerFailed) { comp.markAsFailed(compInstance); } else{ comp.markAsSucceeded(compInstance); } if (compInstance.timelineServiceEnabled) { // record in ATS compInstance.serviceTimelinePublisher.componentInstanceFinished( event.getContainerId(), event.getStatus().getExitStatus(), containerState, containerDiag); } LOG.info(compInstance.getCompInstanceId() + (!hasContainerFailed ? " succeeded" : " failed") + " without retry, exitStatus=" + event.getStatus()); comp.getScheduler().terminateServiceIfAllComponentsFinished(); } } public static boolean hasContainerFailed(ContainerStatus containerStatus) { //Mark conainer as failed if we cant get its exit status i.e null? return containerStatus == null || containerStatus .getExitStatus() != ContainerExitStatus.SUCCESS; } private static class ContainerStoppedTransition extends BaseTransition { // whether the container failed before launched by AM or not. boolean failedBeforeLaunching = false; public ContainerStoppedTransition(boolean failedBeforeLaunching) { this.failedBeforeLaunching = failedBeforeLaunching; } public ContainerStoppedTransition() { this(false); } @Override public void transition(ComponentInstance compInstance, ComponentInstanceEvent event) { Component comp = compInstance.component; ContainerStatus status = event.getStatus(); // status is not available when upgrade fails String containerDiag = compInstance.getCompInstanceId() + ": " + ( failedBeforeLaunching ? FAILED_BEFORE_LAUNCH_DIAG : (status != null ? status.getDiagnostics() : UPGRADE_FAILED)); compInstance.diagnostics.append(containerDiag + System.lineSeparator()); compInstance.cancelContainerStatusRetriever(); if (compInstance.getState().equals(READY)) { compInstance.component.decContainersReady(true); } compInstance.component.decRunningContainers(); // Should we fail (terminate) the service? boolean shouldFailService = false; final ServiceScheduler scheduler = comp.getScheduler(); scheduler.getAmRMClient().releaseAssignedContainer( event.getContainerId()); // Check if it exceeds the failure threshold, but only if health threshold // monitor is not enabled if (!comp.isHealthThresholdMonitorEnabled() && comp.currentContainerFailure.get() > comp.maxContainerFailurePerComp) { String exitDiag = MessageFormat.format( "[COMPONENT {0}]: Failed {1} times, exceeded the limit - {2}. " + "Shutting down now... " + System.lineSeparator(), comp.getName(), comp.currentContainerFailure.get(), comp.maxContainerFailurePerComp); compInstance.diagnostics.append(exitDiag); // append to global diagnostics that will be reported to RM. scheduler.getDiagnostics().append(containerDiag); scheduler.getDiagnostics().append(exitDiag); LOG.warn(exitDiag); compInstance.getContainerSpec().setState(ContainerState.FAILED); comp.getComponentSpec().setState(ComponentState.FAILED); comp.getScheduler().getApp().setState(ServiceState.FAILED); if (compInstance.timelineServiceEnabled) { // record in ATS compInstance.scheduler.getServiceTimelinePublisher() .componentInstanceFinished(compInstance.getContainer().getId(), failedBeforeLaunching || status == null ? -1 : status.getExitStatus(), ContainerState.FAILED, containerDiag); // mark other component-instances/containers as STOPPED for (ContainerId containerId : scheduler.getLiveInstances() .keySet()) { if (!compInstance.container.getId().equals(containerId) && !isFinalState(compInstance.getContainerSpec().getState())) { compInstance.getContainerSpec().setState(ContainerState.STOPPED); compInstance.scheduler.getServiceTimelinePublisher() .componentInstanceFinished(containerId, KILLED_AFTER_APP_COMPLETION, ContainerState.STOPPED, scheduler.getDiagnostics().toString()); } } compInstance.scheduler.getServiceTimelinePublisher() .componentFinished(comp.getComponentSpec(), ComponentState.FAILED, scheduler.getSystemClock().getTime()); compInstance.scheduler.getServiceTimelinePublisher() .serviceAttemptUnregistered(comp.getContext(), FinalApplicationStatus.FAILED, scheduler.getDiagnostics().toString()); } shouldFailService = true; } if (!failedBeforeLaunching) { // clean up registry // If the container failed before launching, no need to cleanup // registry, // because it was not registered before. // hdfs dir content will be overwritten when a new container gets // started, // so no need remove. compInstance.scheduler.executorService.submit( () -> compInstance.cleanupRegistry(event.getContainerId())); } // remove the failed ContainerId -> CompInstance mapping scheduler.removeLiveCompInstance(event.getContainerId()); // According to component restart policy, handle container restart // or finish the service (if all components finished) handleComponentInstanceRelaunch(compInstance, event, failedBeforeLaunching, containerDiag); if (shouldFailService) { scheduler.getTerminationHandler().terminate(-1); } } } public static boolean isFinalState(ContainerState state) { return ContainerState.FAILED.equals(state) || ContainerState.STOPPED .equals(state) || ContainerState.SUCCEEDED.equals(state); } private static class StoppedAfterUpgradeTransition extends BaseTransition { @Override public void transition(ComponentInstance instance, ComponentInstanceEvent event) { instance.component.getUpgradeStatus().decContainersThatNeedUpgrade(); instance.component.decRunningContainers(); final ServiceScheduler scheduler = instance.component.getScheduler(); scheduler.getAmRMClient().releaseAssignedContainer( event.getContainerId()); instance.scheduler.executorService.submit( () -> instance.cleanupRegistry(event.getContainerId())); scheduler.removeLiveCompInstance(event.getContainerId()); instance.component.getUpgradeStatus().containerFailedUpgrade(); instance.setContainerState(ContainerState.FAILED_UPGRADE); instance.upgradeInProgress.set(false); } } private static class StoppedAfterCancelUpgradeTransition implements MultipleArcTransition<ComponentInstance, ComponentInstanceEvent, ComponentInstanceState> { private ContainerStoppedTransition stoppedTransition = new ContainerStoppedTransition(); @Override public ComponentInstanceState transition(ComponentInstance instance, ComponentInstanceEvent event) { if (instance.pendingCancelUpgrade) { // cancellation of upgrade was triggered before the upgrade was // finished. LOG.info("{} received stopped but cancellation pending", event.getContainerId()); instance.upgradeInProgress.set(true); instance.cancelUpgrade(); instance.pendingCancelUpgrade = false; return instance.getState(); } // When upgrade is cancelled, and container re-init fails instance.component.getCancelUpgradeStatus() .decContainersThatNeedUpgrade(); instance.upgradeInProgress.set(false); stoppedTransition.transition(instance, event); return ComponentInstanceState.INIT; } } private static class UpgradeTransition extends BaseTransition { @Override public void transition(ComponentInstance instance, ComponentInstanceEvent event) { if (!instance.component.getCancelUpgradeStatus().isCompleted()) { // last check to see if cancellation was triggered. The component may // have processed the cancel upgrade event but the instance doesn't know // it yet. If cancellation has been triggered then no point in // upgrading. return; } instance.upgradeInProgress.set(true); instance.setContainerState(ContainerState.UPGRADING); instance.component.decContainersReady(false); Component.UpgradeStatus status = instance.component.getUpgradeStatus(); instance.scheduler.getContainerLaunchService() .reInitCompInstance(instance.scheduler.getApp(), instance, instance.container, instance.component.createLaunchContext( status.getTargetSpec(), status.getTargetVersion())); } } private static class CancelUpgradeTransition implements MultipleArcTransition<ComponentInstance, ComponentInstanceEvent, ComponentInstanceState> { @Override public ComponentInstanceState transition(ComponentInstance instance, ComponentInstanceEvent event) { if (instance.upgradeInProgress.compareAndSet(false, true)) { Component.UpgradeStatus cancelStatus = instance.component .getCancelUpgradeStatus(); if (instance.getServiceVersion().equals( cancelStatus.getTargetVersion())) { // previous upgrade didn't happen so just go back to READY LOG.info("{} nothing to cancel", event.getContainerId()); cancelStatus.decContainersThatNeedUpgrade(); instance.setContainerState(ContainerState.READY); ComponentEvent checkState = new ComponentEvent( instance.component.getName(), ComponentEventType.CHECK_STABLE); instance.scheduler.getDispatcher().getEventHandler() .handle(checkState); return ComponentInstanceState.READY; } else { instance.component.decContainersReady(false); instance.cancelUpgrade(); } } else { LOG.info("{} pending cancellation", event.getContainerId()); instance.pendingCancelUpgrade = true; } return ComponentInstanceState.CANCEL_UPGRADING; } } private void cancelUpgrade() { LOG.info("{} cancelling upgrade", container.getId()); setContainerState(ContainerState.UPGRADING); Component.UpgradeStatus cancelStatus = component.getCancelUpgradeStatus(); scheduler.getContainerLaunchService() .reInitCompInstance(scheduler.getApp(), this, this.container, this.component.createLaunchContext( cancelStatus.getTargetSpec(), cancelStatus.getTargetVersion())); } public ComponentInstanceState getState() { this.readLock.lock(); try { return this.stateMachine.getCurrentState(); } finally { this.readLock.unlock(); } } /** * Returns the version of service at which the instance is at. */ public String getServiceVersion() { this.readLock.lock(); try { return this.serviceVersion; } finally { this.readLock.unlock(); } } /** * Returns the state of the container in the container spec. */ public ContainerState getContainerState() { this.readLock.lock(); try { return this.containerSpec.getState(); } finally { this.readLock.unlock(); } } /** * Sets the state of the container in the container spec. It is write * protected. * * @param state container state */ public void setContainerState(ContainerState state) { this.writeLock.lock(); try { ContainerState curState = containerSpec.getState(); if (!curState.equals(state)) { containerSpec.setState(state); LOG.info("{} spec state state changed from {} -> {}", getCompInstanceId(), curState, state); } } finally { this.writeLock.unlock(); } } @Override public void handle(ComponentInstanceEvent event) { try { writeLock.lock(); ComponentInstanceState oldState = getState(); try { stateMachine.doTransition(event.getType(), event); } catch (InvalidStateTransitionException e) { LOG.error(getCompInstanceId() + ": Invalid event " + event.getType() + " at " + oldState, e); } if (oldState != getState()) { LOG.info(getCompInstanceId() + " Transitioned from " + oldState + " to " + getState() + " on " + event.getType() + " event"); } } finally { writeLock.unlock(); } } public void setContainer(Container container) { this.container = container; this.compInstanceId.setContainerId(container.getId()); } public String getCompInstanceName() { return compInstanceId.getCompInstanceName(); } public ContainerStatus getContainerStatus() { return status; } public void updateContainerStatus(ContainerStatus status) { this.status = status; org.apache.hadoop.yarn.service.api.records.Container container = getCompSpec().getContainer(status.getContainerId().toString()); boolean doRegistryUpdate = true; if (container != null) { String existingIP = container.getIp(); String newIP = StringUtils.join(",", status.getIPs()); container.setIp(newIP); container.setHostname(status.getHost()); if (existingIP != null && newIP.equals(existingIP)) { doRegistryUpdate = false; } if (timelineServiceEnabled && doRegistryUpdate) { serviceTimelinePublisher.componentInstanceIPHostUpdated(container); } } if (doRegistryUpdate) { cleanupRegistry(status.getContainerId()); LOG.info( getCompInstanceId() + " new IP = " + status.getIPs() + ", host = " + status.getHost() + ", updating registry"); updateServiceRecord(yarnRegistryOperations, status); } } public String getCompName() { return compInstanceId.getCompName(); } public void setCompInstanceDir(Path dir) { this.compInstanceDir = dir; } public Component getComponent() { return component; } public Container getContainer() { return container; } public ComponentInstanceId getCompInstanceId() { return compInstanceId; } public NodeId getNodeId() { return this.container.getNodeId(); } private org.apache.hadoop.yarn.service.api.records.Component getCompSpec() { return component.getComponentSpec(); } private static class BaseTransition implements SingleArcTransition<ComponentInstance, ComponentInstanceEvent> { @Override public void transition(ComponentInstance compInstance, ComponentInstanceEvent event) { } } public ProbeStatus ping() { if (component.getProbe() == null) { ProbeStatus status = new ProbeStatus(); status.setSuccess(true); return status; } return component.getProbe().ping(this); } // Write service record into registry private void updateServiceRecord( YarnRegistryViewForProviders yarnRegistry, ContainerStatus status) { ServiceRecord record = new ServiceRecord(); String containerId = status.getContainerId().toString(); record.set(YARN_ID, containerId); record.description = getCompInstanceName(); record.set(YARN_PERSISTENCE, PersistencePolicies.CONTAINER); record.set(YARN_IP, status.getIPs().get(0)); record.set(YARN_HOSTNAME, status.getHost()); record.set(YARN_COMPONENT, component.getName()); try { yarnRegistry .putComponent(RegistryPathUtils.encodeYarnID(containerId), record); } catch (IOException e) { LOG.error( "Failed to update service record in registry: " + containerId + ""); } } // Called when user flexed down the container and ContainerStoppedTransition // is not executed in this case. // Release the container, dec running, // cleanup registry, hdfs dir, and send record to ATS public void destroy() { LOG.info(getCompInstanceId() + ": Flexed down by user, destroying."); diagnostics.append(getCompInstanceId() + ": Flexed down by user"); // update metrics if (getState() == STARTED) { component.decRunningContainers(); } if (getState() == READY) { component.decContainersReady(true); component.decRunningContainers(); } getCompSpec().removeContainer(containerSpec); if (container == null) { LOG.info(getCompInstanceId() + " no container is assigned when " + "destroying"); return; } ContainerId containerId = container.getId(); scheduler.removeLiveCompInstance(containerId); component.getScheduler().getAmRMClient() .releaseAssignedContainer(containerId); if (timelineServiceEnabled) { serviceTimelinePublisher.componentInstanceFinished(containerId, KILLED_BY_APPMASTER, ContainerState.STOPPED, diagnostics.toString()); } cancelContainerStatusRetriever(); scheduler.executorService.submit(() -> cleanupRegistryAndCompHdfsDir(containerId)); } private void cleanupRegistry(ContainerId containerId) { String cid = RegistryPathUtils.encodeYarnID(containerId.toString()); try { yarnRegistryOperations.deleteComponent(getCompInstanceId(), cid); } catch (IOException e) { LOG.error(getCompInstanceId() + ": Failed to delete registry", e); } } //TODO Maybe have a dedicated cleanup service. public void cleanupRegistryAndCompHdfsDir(ContainerId containerId) { cleanupRegistry(containerId); try { if (compInstanceDir != null && fs.exists(compInstanceDir)) { boolean deleted = fs.delete(compInstanceDir, true); if (!deleted) { LOG.error(getCompInstanceId() + ": Failed to delete component instance dir: " + compInstanceDir); } else { LOG.info(getCompInstanceId() + ": Deleted component instance dir: " + compInstanceDir); } } } catch (IOException e) { LOG.warn(getCompInstanceId() + ": Failed to delete directory", e); } } // Query container status until ip and hostname are available and update // the service record into registry service private static class ContainerStatusRetriever implements Runnable { private ContainerId containerId; private NodeId nodeId; private NMClient nmClient; private ComponentInstance instance; private boolean cancelOnSuccess; ContainerStatusRetriever(ServiceScheduler scheduler, ContainerId containerId, ComponentInstance instance, boolean cancelOnSuccess) { this.containerId = containerId; this.nodeId = instance.getNodeId(); this.nmClient = scheduler.getNmClient().getClient(); this.instance = instance; this.cancelOnSuccess = cancelOnSuccess; } @Override public void run() { ContainerStatus status = null; try { status = nmClient.getContainerStatus(containerId, nodeId); } catch (Exception e) { if (e instanceof YarnException) { throw new YarnRuntimeException( instance.compInstanceId + " Failed to get container status on " + nodeId + " , cancelling.", e); } LOG.error(instance.compInstanceId + " Failed to get container status on " + nodeId + ", will try again", e); return; } if (ServiceUtils.isEmpty(status.getIPs()) || ServiceUtils .isUnset(status.getHost())) { return; } instance.updateContainerStatus(status); if (cancelOnSuccess) { LOG.info( instance.compInstanceId + " IP = " + status.getIPs() + ", host = " + status.getHost() + ", cancel container status retriever"); instance.containerStatusFuture.cancel(false); } } } private void cancelContainerStatusRetriever() { if (containerStatusFuture != null && !containerStatusFuture.isDone()) { containerStatusFuture.cancel(true); } } public String getHostname() { String domain = getComponent().getScheduler().getConfig() .get(RegistryConstants.KEY_DNS_DOMAIN); String hostname; if (domain == null || domain.isEmpty()) { hostname = MessageFormat .format("{0}.{1}.{2}", getCompInstanceName(), getComponent().getContext().service.getName(), RegistryUtils.currentUser()); } else { hostname = MessageFormat .format("{0}.{1}.{2}.{3}", getCompInstanceName(), getComponent().getContext().service.getName(), RegistryUtils.currentUser(), domain); } return hostname; } @Override public int compareTo(ComponentInstance to) { return getCompInstanceId().compareTo(to.getCompInstanceId()); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ComponentInstance instance = (ComponentInstance) o; if (containerStartedTime != instance.containerStartedTime) return false; return compInstanceId.equals(instance.compInstanceId); } @Override public int hashCode() { int result = compInstanceId.hashCode(); result = 31 * result + (int) (containerStartedTime ^ (containerStartedTime >>> 32)); return result; } /** * Returns container spec. */ public org.apache.hadoop.yarn.service.api.records .Container getContainerSpec() { readLock.lock(); try { return containerSpec; } finally { readLock.unlock(); } } }
package com.example.amir.dhp; import android.content.Context; import android.graphics.PorterDuff; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import android.text.Editable; import android.text.TextWatcher; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.EditText; import android.widget.Filter; import android.widget.Filterable; import android.widget.ImageView; import android.widget.ListView; import android.widget.TextView; import android.widget.Toast; import java.util.ArrayList; /** * Created by amir on 2/23/16. */ public class FocoMealActivity extends AppCompatActivity { private MenuListAdapter adapter; private ArrayList<MenuItem> menuItems; private TextView totalItems; private TextView calories; private TextView fat; private TextView satFat; private TextView protein; private TextView sodium; private TextView potassium; private EditText mealName; private EditText searchBar; private Button gluten; private Button kosher; private Button vegan; @Override protected void onCreate(Bundle savedInstanceState) { // Initialize super.onCreate(savedInstanceState); setContentView(R.layout.foco_activity_meal); // Set up toolbar getSupportActionBar().setTitle("FoCo Meal Creation"); // Set up all TextViews totalItems = (TextView) findViewById(R.id.foco_meal_items_count); calories = (TextView) findViewById(R.id.foco_meal_calories); fat = (TextView) findViewById(R.id.foco_meal_total_fat); satFat = (TextView) findViewById(R.id.foco_meal_sat_fat); protein = (TextView) findViewById(R.id.foco_meal_protein); sodium = (TextView) findViewById(R.id.foco_meal_sodium); potassium = (TextView) findViewById(R.id.foco_meal_potassium); mealName = (EditText) findViewById(R.id.foco_meal_name); searchBar = (EditText) findViewById(R.id.foco_meal_search); vegan = ((Button) findViewById(R.id.foco_meal_vegan)); kosher = ((Button) findViewById(R.id.foco_meal_kosher)); gluten = ((Button) findViewById(R.id.foco_meal_gluten)); // Set up list adapter menuItems = Util.getFocoMenu(); ListView listView = (ListView) findViewById(R.id.food_item_list); // Check if loading if (getIntent().getExtras() != null && getIntent().getExtras().containsKey("name") && getIntent().getExtras().containsKey("items")) { String name = getIntent().getExtras().getString("name"); int [] items = getIntent().getExtras().getIntArray("items"); mealName.setText(name); for (int i = 0; i < items.length; i++) { menuItems.get(items[i]).selected = true; } adapter = new MenuListAdapter(this, menuItems); listView.setAdapter(adapter); updateMealDetails(); } else { adapter = new MenuListAdapter(this, menuItems); listView.setAdapter(adapter); } searchBar.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { } @Override public void onTextChanged(CharSequence s, int start, int before, int count) { adapter.getFilter().filter(s); } @Override public void afterTextChanged(Editable s) { } }); listView.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { adapter.getMainItem(position).selected ^= true; adapter.getFilter().filter(searchBar.getText().toString()); if (adapter.getItem(position).selected) { view.setBackgroundColor(0x332B9927); } else { view.setBackgroundColor(0x00000000); } updateMealDetails(); } }); // Toggle buttons gluten.getBackground().setColorFilter(0x00000000, PorterDuff.Mode.MULTIPLY); gluten.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { adapter.glutenOnly ^= true; adapter.getFilter().filter(searchBar.getText().toString()); if (adapter.glutenOnly) v.getBackground().setColorFilter(0x332B9927, PorterDuff.Mode.MULTIPLY); else v.getBackground().setColorFilter(0x00000000, PorterDuff.Mode.MULTIPLY); } }); kosher.getBackground().setColorFilter(0x00000000, PorterDuff.Mode.MULTIPLY); kosher.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { adapter.kosherOnly ^= true; adapter.getFilter().filter(searchBar.getText().toString()); if (adapter.kosherOnly) v.getBackground().setColorFilter(0x332B9927, PorterDuff.Mode.MULTIPLY); else v.getBackground().setColorFilter(0x00000000, PorterDuff.Mode.MULTIPLY); } }); vegan.getBackground().setColorFilter(0x00000000, PorterDuff.Mode.MULTIPLY); vegan.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { adapter.veganOnly ^= true; adapter.getFilter().filter(searchBar.getText().toString()); if (adapter.veganOnly) v.getBackground().setColorFilter(0x332B9927, PorterDuff.Mode.MULTIPLY); else v.getBackground().setColorFilter(0x00000000, PorterDuff.Mode.MULTIPLY); } }); // Generate a meal buttons (currently random, and cheat) ((Button) findViewById(R.id.foco_2000)).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { // Reset any searching / selecting adapter.unselectAll(); adapter.getFilter().filter(""); // Get count of available options int count = adapter.getCount(); // Generate 3 random indices, select them, and fudge the numbers ArrayList<Integer> indexes = Util.pickOptions(count); for (Integer index : indexes) { adapter.getMainItem(index).selected = true; } adapter.getFilter().filter(""); updateMealDetails(); calories.setText("Calories: 2000 cal"); } }); ((Button) findViewById(R.id.foco_2500)).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { // Reset any searching / selecting adapter.unselectAll(); adapter.getFilter().filter(""); // Get count of available options int count = adapter.getCount(); // Generate 3 random indices, select them, and fudge the numbers ArrayList<Integer> indexes = Util.pickOptions(count); for (Integer index : indexes) { adapter.getMainItem(index).selected = true; } adapter.getFilter().filter(""); updateMealDetails(); calories.setText("Calories: 2500"); } }); ((Button) findViewById(R.id.foco_3000)).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { // Reset any searching / selecting adapter.unselectAll(); adapter.getFilter().filter(""); // Get count of available options int count = adapter.getCount(); // Generate 3 random indices, select them, and fudge the numbers ArrayList<Integer> indexes = Util.pickOptions(count); for (Integer index : indexes) { adapter.getMainItem(index).selected = true; } adapter.getFilter().filter(""); updateMealDetails(); calories.setText("Calories: 3000"); } }); } private void updateMealDetails() { int count = 0; int calories = 0; int fat = 0; int satFat = 0; int protein = 0; int sodium = 0; int potassium = 0; for (MenuItem item : adapter.items) { if (item.selected) { count += 1; calories += item.calories; fat += item.totalFat; satFat += item.saturatedFat; protein += item.protein; sodium += item.sodium; potassium += item.potassium; } } this.totalItems.setText("Total Items: " + count); this.calories.setText("Calories: " + calories + " cal"); this.fat.setText("Total Fat: " + fat + "g"); this.satFat.setText("Saturated Fat: " + satFat + "g"); this.protein.setText("Protein: " + protein + "g"); this.sodium.setText("Sodium: " + sodium + "mg"); this.potassium.setText("Potassium: " + potassium + "mg"); } public void focoSaveMealClicked(View view) { String name = mealName.getText().toString(); if (name.length() == 0) { Toast.makeText(getApplicationContext(), "Name your meal!", Toast.LENGTH_SHORT).show(); } else { Toast.makeText(getApplicationContext(), "Meal '" + name + "' has been saved!", Toast.LENGTH_SHORT).show(); finish(); } } // Full reset public void focoClearMealClicked(View view) { adapter.unselectAll(); adapter.glutenOnly = false; adapter.kosherOnly = false; adapter.veganOnly = false; gluten.getBackground().setColorFilter(0x00000000, PorterDuff.Mode.MULTIPLY); kosher.getBackground().setColorFilter(0x00000000, PorterDuff.Mode.MULTIPLY); vegan.getBackground().setColorFilter(0x00000000, PorterDuff.Mode.MULTIPLY); adapter.getFilter().filter(""); } private class MenuListAdapter extends ArrayAdapter<MenuItem> implements Filterable { public ArrayList<MenuItem> items; private ArrayList<MenuItem> filtered; public boolean glutenOnly = false; public boolean kosherOnly = false; public boolean veganOnly = false; public MenuListAdapter(Context context, ArrayList<MenuItem> items) { super(context, 0, items); this.items = items; this.filtered = items; } @Override public int getCount() { return filtered == null ? 0 : filtered.size(); } @Override public MenuItem getItem(int position) { return filtered == null ? null : filtered.get(position); } public MenuItem getMainItem(int position) { return items.get(items.indexOf(filtered.get(position))); } public void unselectAll() { for (MenuItem item : items) item.selected = false; } @Override public View getView(int position, View convertView, ViewGroup parent) { MenuItem item = getItem(position); if (convertView == null) { convertView = LayoutInflater.from(getContext()).inflate(R.layout.food_item, parent, false); } // Name change TextView name = (TextView) convertView.findViewById(R.id.food_item_name); name.setText(item.name); // Color change if (item.selected) { convertView.setBackgroundColor(0x332B9927); } else { convertView.setBackgroundColor(0x00000000); } return convertView; } @Override public Filter getFilter() { return new Filter() { @Override protected FilterResults performFiltering(CharSequence constraint) { FilterResults filterResults = new FilterResults(); ArrayList<MenuItem> filter = new ArrayList<>(); for (MenuItem item : items) { if (item.name.toLowerCase().contains(constraint.toString().toLowerCase())) { // text match if ((item.glutenFree || !glutenOnly) && (item.kosher || !kosherOnly) && (item.vegan || !veganOnly)) { filter.add(item); } } } filtered = filter; filterResults.values = filter; filterResults.count = filter.size(); return filterResults; } @Override protected void publishResults(CharSequence constraint, FilterResults results) { notifyDataSetChanged(); } }; } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper.core; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.search.suggest.analyzing.XAnalyzingSuggester; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.NumberType; import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperException; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.similarity.SimilarityProvider; import org.elasticsearch.search.suggest.completion.AnalyzingCompletionLookupProvider; import org.elasticsearch.search.suggest.completion.Completion090PostingsFormat; import org.elasticsearch.search.suggest.completion.CompletionTokenStream; import org.elasticsearch.search.suggest.context.ContextBuilder; import org.elasticsearch.search.suggest.context.ContextMapping; import org.elasticsearch.search.suggest.context.ContextMapping.ContextConfig; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.SortedMap; import static org.elasticsearch.index.mapper.MapperBuilders.completionField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField; /** * */ public class CompletionFieldMapper extends AbstractFieldMapper<String> { public static final String CONTENT_TYPE = "completion"; public static class Defaults extends AbstractFieldMapper.Defaults { public static final FieldType FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE); static { FIELD_TYPE.setOmitNorms(true); FIELD_TYPE.freeze(); } public static final boolean DEFAULT_PRESERVE_SEPARATORS = true; public static final boolean DEFAULT_POSITION_INCREMENTS = true; public static final boolean DEFAULT_HAS_PAYLOADS = false; public static final int DEFAULT_MAX_INPUT_LENGTH = 50; } public static class Fields { // Mapping field names public static final String ANALYZER = "analyzer"; public static final ParseField SEARCH_ANALYZER = new ParseField("search_analyzer"); public static final ParseField PRESERVE_SEPARATORS = new ParseField("preserve_separators"); public static final ParseField PRESERVE_POSITION_INCREMENTS = new ParseField("preserve_position_increments"); public static final String PAYLOADS = "payloads"; public static final String TYPE = "type"; public static final ParseField MAX_INPUT_LENGTH = new ParseField("max_input_length", "max_input_len"); // Content field names public static final String CONTENT_FIELD_NAME_INPUT = "input"; public static final String CONTENT_FIELD_NAME_OUTPUT = "output"; public static final String CONTENT_FIELD_NAME_PAYLOAD = "payload"; public static final String CONTENT_FIELD_NAME_WEIGHT = "weight"; public static final String CONTEXT = "context"; } public static final Set<String> ALLOWED_CONTENT_FIELD_NAMES = Sets.newHashSet(Fields.CONTENT_FIELD_NAME_INPUT, Fields.CONTENT_FIELD_NAME_OUTPUT, Fields.CONTENT_FIELD_NAME_PAYLOAD, Fields.CONTENT_FIELD_NAME_WEIGHT, Fields.CONTEXT); public static class Builder extends AbstractFieldMapper.Builder<Builder, CompletionFieldMapper> { private boolean preserveSeparators = Defaults.DEFAULT_PRESERVE_SEPARATORS; private boolean payloads = Defaults.DEFAULT_HAS_PAYLOADS; private boolean preservePositionIncrements = Defaults.DEFAULT_POSITION_INCREMENTS; private int maxInputLength = Defaults.DEFAULT_MAX_INPUT_LENGTH; private SortedMap<String, ContextMapping> contextMapping = ContextMapping.EMPTY_MAPPING; public Builder(String name) { super(name, new FieldType(Defaults.FIELD_TYPE)); builder = this; } public Builder payloads(boolean payloads) { this.payloads = payloads; return this; } public Builder preserveSeparators(boolean preserveSeparators) { this.preserveSeparators = preserveSeparators; return this; } public Builder preservePositionIncrements(boolean preservePositionIncrements) { this.preservePositionIncrements = preservePositionIncrements; return this; } public Builder maxInputLength(int maxInputLength) { if (maxInputLength <= 0) { throw new IllegalArgumentException(Fields.MAX_INPUT_LENGTH.getPreferredName() + " must be > 0 but was [" + maxInputLength + "]"); } this.maxInputLength = maxInputLength; return this; } public Builder contextMapping(SortedMap<String, ContextMapping> contextMapping) { this.contextMapping = contextMapping; return this; } @Override public CompletionFieldMapper build(Mapper.BuilderContext context) { return new CompletionFieldMapper(buildNames(context), indexAnalyzer, searchAnalyzer, null, similarity, payloads, preserveSeparators, preservePositionIncrements, maxInputLength, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo, this.contextMapping); } } public static class TypeParser implements Mapper.TypeParser { @Override public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException { CompletionFieldMapper.Builder builder = completionField(name); NamedAnalyzer indexAnalyzer = null; NamedAnalyzer searchAnalyzer = null; for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry<String, Object> entry = iterator.next(); String fieldName = entry.getKey(); Object fieldNode = entry.getValue(); if (fieldName.equals("type")) { continue; } if (Fields.ANALYZER.equals(fieldName) || // index_analyzer is for backcompat, remove for v3.0 fieldName.equals("index_analyzer") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) { indexAnalyzer = getNamedAnalyzer(parserContext, fieldNode.toString()); iterator.remove(); } else if (Fields.SEARCH_ANALYZER.match(fieldName)) { searchAnalyzer = getNamedAnalyzer(parserContext, fieldNode.toString()); iterator.remove(); } else if (fieldName.equals(Fields.PAYLOADS)) { builder.payloads(Boolean.parseBoolean(fieldNode.toString())); iterator.remove(); } else if (Fields.PRESERVE_SEPARATORS.match(fieldName)) { builder.preserveSeparators(Boolean.parseBoolean(fieldNode.toString())); iterator.remove(); } else if (Fields.PRESERVE_POSITION_INCREMENTS.match(fieldName)) { builder.preservePositionIncrements(Boolean.parseBoolean(fieldNode.toString())); iterator.remove(); } else if (Fields.MAX_INPUT_LENGTH.match(fieldName)) { builder.maxInputLength(Integer.parseInt(fieldNode.toString())); iterator.remove(); } else if (parseMultiField(builder, name, parserContext, fieldName, fieldNode)) { iterator.remove(); } else if (fieldName.equals(Fields.CONTEXT)) { builder.contextMapping(ContextBuilder.loadMappings(fieldNode, parserContext.indexVersionCreated())); iterator.remove(); } } if (indexAnalyzer == null) { if (searchAnalyzer != null) { throw new MapperParsingException("analyzer on completion field [" + name + "] must be set when search_analyzer is set"); } indexAnalyzer = searchAnalyzer = parserContext.analysisService().analyzer("simple"); } else if (searchAnalyzer == null) { searchAnalyzer = indexAnalyzer; } builder.indexAnalyzer(indexAnalyzer); builder.searchAnalyzer(searchAnalyzer); return builder; } private NamedAnalyzer getNamedAnalyzer(ParserContext parserContext, String name) { NamedAnalyzer analyzer = parserContext.analysisService().analyzer(name); if (analyzer == null) { throw new IllegalArgumentException("Can't find default or mapped analyzer with name [" + name + "]"); } return analyzer; } } private static final BytesRef EMPTY = new BytesRef(); private PostingsFormat postingsFormat; private final AnalyzingCompletionLookupProvider analyzingSuggestLookupProvider; private final boolean payloads; private final boolean preservePositionIncrements; private final boolean preserveSeparators; private int maxInputLength; private final SortedMap<String, ContextMapping> contextMapping; /** * * @param contextMappings Configuration of context type. If none should be used set {@link ContextMapping.EMPTY_MAPPING} * @param wrappedPostingsFormat the postings format to wrap, or {@code null} to wrap the codec's default postings format */ // Custom postings formats are deprecated but we still accept a postings format here to be able to test backward compatibility // with older postings formats such as Elasticsearch090 public CompletionFieldMapper(Names names, NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer, PostingsFormat wrappedPostingsFormat, SimilarityProvider similarity, boolean payloads, boolean preserveSeparators, boolean preservePositionIncrements, int maxInputLength, Settings indexSettings, MultiFields multiFields, CopyTo copyTo, SortedMap<String, ContextMapping> contextMappings) { super(names, 1.0f, Defaults.FIELD_TYPE, false, indexAnalyzer, searchAnalyzer, similarity, null, null, indexSettings, multiFields, copyTo); analyzingSuggestLookupProvider = new AnalyzingCompletionLookupProvider(preserveSeparators, false, preservePositionIncrements, payloads); if (wrappedPostingsFormat == null) { // delayed until postingsFormat() is called this.postingsFormat = null; } else { this.postingsFormat = new Completion090PostingsFormat(wrappedPostingsFormat, analyzingSuggestLookupProvider); } this.preserveSeparators = preserveSeparators; this.payloads = payloads; this.preservePositionIncrements = preservePositionIncrements; this.maxInputLength = maxInputLength; this.contextMapping = contextMappings; } public synchronized PostingsFormat postingsFormat(PostingsFormat in) { if (in instanceof Completion090PostingsFormat) { throw new IllegalStateException("Double wrapping of " + Completion090PostingsFormat.class); } if (postingsFormat == null) { postingsFormat = new Completion090PostingsFormat(in, analyzingSuggestLookupProvider); } return postingsFormat; } @Override public Mapper parse(ParseContext context) throws IOException { XContentParser parser = context.parser(); XContentParser.Token token = parser.currentToken(); String surfaceForm = null; BytesRef payload = null; long weight = -1; List<String> inputs = Lists.newArrayListWithExpectedSize(4); SortedMap<String, ContextConfig> contextConfig = null; if (token == XContentParser.Token.VALUE_STRING) { inputs.add(parser.text()); multiFields.parse(this, context); } else { String currentFieldName = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); if (!ALLOWED_CONTENT_FIELD_NAMES.contains(currentFieldName)) { throw new IllegalArgumentException("Unknown field name[" + currentFieldName + "], must be one of " + ALLOWED_CONTENT_FIELD_NAMES); } } else if (Fields.CONTEXT.equals(currentFieldName)) { SortedMap<String, ContextConfig> configs = Maps.newTreeMap(); if (token == Token.START_OBJECT) { while ((token = parser.nextToken()) != Token.END_OBJECT) { String name = parser.text(); ContextMapping mapping = contextMapping.get(name); if (mapping == null) { throw new ElasticsearchParseException("context [" + name + "] is not defined"); } else { token = parser.nextToken(); configs.put(name, mapping.parseContext(context, parser)); } } contextConfig = Maps.newTreeMap(); for (ContextMapping mapping : contextMapping.values()) { ContextConfig config = configs.get(mapping.name()); contextConfig.put(mapping.name(), config==null ? mapping.defaultConfig() : config); } } else { throw new ElasticsearchParseException("context must be an object"); } } else if (Fields.CONTENT_FIELD_NAME_PAYLOAD.equals(currentFieldName)) { if (!isStoringPayloads()) { throw new MapperException("Payloads disabled in mapping"); } if (token == XContentParser.Token.START_OBJECT) { XContentBuilder payloadBuilder = XContentFactory.contentBuilder(parser.contentType()).copyCurrentStructure(parser); payload = payloadBuilder.bytes().toBytesRef(); payloadBuilder.close(); } else if (token.isValue()) { payload = parser.utf8BytesOrNull(); } else { throw new MapperException("payload doesn't support type " + token); } } else if (token == XContentParser.Token.VALUE_STRING) { if (Fields.CONTENT_FIELD_NAME_OUTPUT.equals(currentFieldName)) { surfaceForm = parser.text(); } if (Fields.CONTENT_FIELD_NAME_INPUT.equals(currentFieldName)) { inputs.add(parser.text()); } if (Fields.CONTENT_FIELD_NAME_WEIGHT.equals(currentFieldName)) { Number weightValue; try { weightValue = Long.parseLong(parser.text()); } catch (NumberFormatException e) { throw new IllegalArgumentException("Weight must be a string representing a numeric value, but was [" + parser.text() + "]"); } weight = weightValue.longValue(); // always parse a long to make sure we don't get overflow checkWeight(weight); } } else if (token == XContentParser.Token.VALUE_NUMBER) { if (Fields.CONTENT_FIELD_NAME_WEIGHT.equals(currentFieldName)) { NumberType numberType = parser.numberType(); if (NumberType.LONG != numberType && NumberType.INT != numberType) { throw new IllegalArgumentException("Weight must be an integer, but was [" + parser.numberValue() + "]"); } weight = parser.longValue(); // always parse a long to make sure we don't get overflow checkWeight(weight); } } else if (token == XContentParser.Token.START_ARRAY) { if (Fields.CONTENT_FIELD_NAME_INPUT.equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { inputs.add(parser.text()); } } } } } if(contextConfig == null) { contextConfig = Maps.newTreeMap(); for (ContextMapping mapping : contextMapping.values()) { contextConfig.put(mapping.name(), mapping.defaultConfig()); } } final ContextMapping.Context ctx = new ContextMapping.Context(contextConfig, context.doc()); payload = payload == null ? EMPTY : payload; if (surfaceForm == null) { // no surface form use the input for (String input : inputs) { BytesRef suggestPayload = analyzingSuggestLookupProvider.buildPayload(new BytesRef( input), weight, payload); context.doc().add(getCompletionField(ctx, input, suggestPayload)); } } else { BytesRef suggestPayload = analyzingSuggestLookupProvider.buildPayload(new BytesRef( surfaceForm), weight, payload); for (String input : inputs) { context.doc().add(getCompletionField(ctx, input, suggestPayload)); } } return null; } private void checkWeight(long weight) { if (weight < 0 || weight > Integer.MAX_VALUE) { throw new IllegalArgumentException("Weight must be in the interval [0..2147483647], but was [" + weight + "]"); } } /** * Get the context mapping associated with this completion field. */ public SortedMap<String, ContextMapping> getContextMapping() { return contextMapping; } /** @return true if a context mapping has been defined */ public boolean requiresContext() { return !contextMapping.isEmpty(); } public Field getCompletionField(String input, BytesRef payload) { return getCompletionField(ContextMapping.EMPTY_CONTEXT, input, payload); } public Field getCompletionField(ContextMapping.Context ctx, String input, BytesRef payload) { final String originalInput = input; if (input.length() > maxInputLength) { final int len = correctSubStringLen(input, Math.min(maxInputLength, input.length())); input = input.substring(0, len); } for (int i = 0; i < input.length(); i++) { if (isReservedChar(input.charAt(i))) { throw new IllegalArgumentException("Illegal input [" + originalInput + "] UTF-16 codepoint [0x" + Integer.toHexString((int) input.charAt(i)).toUpperCase(Locale.ROOT) + "] at position " + i + " is a reserved character"); } } return new SuggestField(names.indexName(), ctx, input, this.fieldType, payload, analyzingSuggestLookupProvider); } public static int correctSubStringLen(String input, int len) { if (Character.isHighSurrogate(input.charAt(len - 1))) { assert input.length() >= len + 1 && Character.isLowSurrogate(input.charAt(len)); return len + 1; } return len; } public BytesRef buildPayload(BytesRef surfaceForm, long weight, BytesRef payload) throws IOException { return analyzingSuggestLookupProvider.buildPayload( surfaceForm, weight, payload); } private static final class SuggestField extends Field { private final BytesRef payload; private final CompletionTokenStream.ToFiniteStrings toFiniteStrings; private final ContextMapping.Context ctx; public SuggestField(String name, ContextMapping.Context ctx, String value, FieldType type, BytesRef payload, CompletionTokenStream.ToFiniteStrings toFiniteStrings) { super(name, value, type); this.payload = payload; this.toFiniteStrings = toFiniteStrings; this.ctx = ctx; } @Override public TokenStream tokenStream(Analyzer analyzer, TokenStream previous) throws IOException { TokenStream ts = ctx.wrapTokenStream(super.tokenStream(analyzer, previous)); return new CompletionTokenStream(ts, payload, toFiniteStrings); } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(name()) .field(Fields.TYPE, CONTENT_TYPE); builder.field(Fields.ANALYZER, indexAnalyzer.name()); if (indexAnalyzer.name().equals(searchAnalyzer.name()) == false) { builder.field(Fields.SEARCH_ANALYZER.getPreferredName(), searchAnalyzer.name()); } builder.field(Fields.PAYLOADS, this.payloads); builder.field(Fields.PRESERVE_SEPARATORS.getPreferredName(), this.preserveSeparators); builder.field(Fields.PRESERVE_POSITION_INCREMENTS.getPreferredName(), this.preservePositionIncrements); builder.field(Fields.MAX_INPUT_LENGTH.getPreferredName(), this.maxInputLength); multiFields.toXContent(builder, params); if(!contextMapping.isEmpty()) { builder.startObject(Fields.CONTEXT); for (ContextMapping mapping : contextMapping.values()) { builder.value(mapping); } builder.endObject(); } return builder.endObject(); } @Override protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException { } @Override protected String contentType() { return CONTENT_TYPE; } @Override public boolean isSortable() { return false; } @Override public boolean supportsNullValue() { return false; } @Override public FieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @Override public FieldDataType defaultFieldDataType() { return null; } @Override public String value(Object value) { if (value == null) { return null; } return value.toString(); } public boolean isStoringPayloads() { return payloads; } @Override public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { super.merge(mergeWith, mergeResult); CompletionFieldMapper fieldMergeWith = (CompletionFieldMapper) mergeWith; if (payloads != fieldMergeWith.payloads) { mergeResult.addConflict("mapper [" + names.fullName() + "] has different payload values"); } if (preservePositionIncrements != fieldMergeWith.preservePositionIncrements) { mergeResult.addConflict("mapper [" + names.fullName() + "] has different 'preserve_position_increments' values"); } if (preserveSeparators != fieldMergeWith.preserveSeparators) { mergeResult.addConflict("mapper [" + names.fullName() + "] has different 'preserve_separators' values"); } if(!ContextMapping.mappingsAreEqual(getContextMapping(), fieldMergeWith.getContextMapping())) { mergeResult.addConflict("mapper [" + names.fullName() + "] has different 'context_mapping' values"); } if (!mergeResult.simulate()) { this.maxInputLength = fieldMergeWith.maxInputLength; } } // this should be package private but our tests don't allow it. public static boolean isReservedChar(char character) { /* we use 0x001F as a SEP_LABEL in the suggester but we can use the UTF-16 representation since they * are equivalent. We also don't need to convert the input character to UTF-8 here to check for * the 0x00 end label since all multi-byte UTF-8 chars start with 0x10 binary so if the UTF-16 CP is == 0x00 * it's the single byte UTF-8 CP */ assert XAnalyzingSuggester.PAYLOAD_SEP == XAnalyzingSuggester.SEP_LABEL; // ensure they are the same! switch(character) { case XAnalyzingSuggester.END_BYTE: case XAnalyzingSuggester.SEP_LABEL: case XAnalyzingSuggester.HOLE_CHARACTER: case ContextMapping.SEPARATOR: return true; default: return false; } } }
/** * Code contributed to the Learning Layers project * http://www.learning-layers.eu * Development is partly funded by the FP7 Programme of the European Commission under * Grant Agreement FP7-ICT-318209. * Copyright (c) 2014, Graz University of Technology - KTI (Knowledge Technologies Institute). * For a list of contributors see the AUTHORS file at the top-level directory of this distribution. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package at.kc.tugraz.ss.serv.dataimport.serv; import at.tugraz.sss.serv.util.SSDateU; import at.tugraz.sss.serv.util.SSLogU; import at.tugraz.sss.serv.util.*; import at.tugraz.sss.serv.conf.api.SSCoreConfA; import at.kc.tugraz.ss.serv.dataimport.api.SSDataImportClientI; import at.kc.tugraz.ss.serv.dataimport.api.SSDataImportServerI; import at.kc.tugraz.ss.serv.dataimport.conf.SSDataImportConf; import at.kc.tugraz.ss.serv.dataimport.datatypes.pars.*; import at.kc.tugraz.ss.serv.jobs.evernote.conf.SSEvernoteConf; import at.tugraz.sss.servs.dataimport.impl.SSDataImportImpl; import at.tugraz.sss.serv.conf.SSConf; import at.tugraz.sss.serv.conf.api.SSConfA; import at.tugraz.sss.serv.datatype.SSErr; import at.tugraz.sss.serv.reg.*; import at.tugraz.sss.serv.container.api.*; import at.tugraz.sss.serv.datatype.enums.*; import at.tugraz.sss.serv.datatype.par.*; import at.tugraz.sss.serv.impl.api.SSServImplA; import at.tugraz.sss.servs.conf.SSCoreConf; import java.util.*; import java.util.Date; public class SSDataImportServ extends SSServContainerI{ public static final SSDataImportServ inst = new SSDataImportServ(SSDataImportClientI.class, SSDataImportServerI.class); protected SSDataImportServ( final Class servImplClientInteraceClass, final Class servImplServerInteraceClass){ super(servImplClientInteraceClass, servImplServerInteraceClass); } @Override public SSServImplA getServImpl() throws SSErr{ if(!conf.use){ throw SSErr.get(SSErrE.servNotRunning); } if(servImpl != null){ return servImpl; } synchronized(this){ servImpl = new SSDataImportImpl((SSDataImportConf)conf); } return servImpl; } @Override public SSServContainerI regServ(final SSConfA conf) throws SSErr{ this.conf = conf; SSServReg.inst.regServ(this); return this; } @Override public void initServ() throws SSErr{ final SSDataImportConf dataImportConf = (SSDataImportConf)conf; if(!dataImportConf.use){ return; } if(!dataImportConf.initAtStartUp){ return; } } @Override public void schedule() throws SSErr{ final SSDataImportConf dataImportConf = (SSDataImportConf)conf; if( !dataImportConf.use || !dataImportConf.schedule){ return; } if( SSObjU.isNull(dataImportConf.scheduleOps, dataImportConf.scheduleIntervals) || dataImportConf.scheduleOps.isEmpty() || dataImportConf.scheduleIntervals.isEmpty() || dataImportConf.scheduleOps.size() != dataImportConf.scheduleIntervals.size()){ SSLogU.warn(SSWarnE.scheduleConfigInvalid, null); return; } final SSEvernoteConf evernoteConf = SSCoreConf.instGet().getEvernote(); final SSServPar servPar = new SSServPar(null); Date startDate; for(int scheduleOpsCounter = 0; scheduleOpsCounter < dataImportConf.scheduleOps.size(); scheduleOpsCounter++){ if(SSStrU.isEqual(dataImportConf.scheduleOps.get(scheduleOpsCounter), SSVarNames.dataImportMediaWikiUser)){ SSServReg.regScheduler(SSDateU.scheduleNow(new SSDataImportMediaWikiUserTask())); continue; } if(SSStrU.isEqual(dataImportConf.scheduleOps.get(scheduleOpsCounter), SSVarNames.dataImportBitsAndPieces)){ if(dataImportConf.executeScheduleAtStartUp){ startDate = new Date(); }else{ startDate = SSDateU.getDatePlusMinutes(conf.scheduleIntervals.get(scheduleOpsCounter)); } for(int counter = 0; counter < evernoteConf.getAuthTokens().size(); counter++){ try{ SSServReg.regScheduler( SSDateU.scheduleWithFixedDelay( new SSDataImportBitsAndPiecesTask( new SSDataImportBitsAndPiecesPar( servPar, SSConf.systemUserUri, evernoteConf.getAuthTokens().get(counter), //authToken evernoteConf.getAuthEmails().get(counter), //authEmail null, //emailInUser null, //emailInPassword null, //emailInEmail true, //importEvernote, false, //importEmail, true, //withUserRestriction, true)), //shouldCommit startDate, conf.scheduleIntervals.get(scheduleOpsCounter) * SSDateU.minuteInMilliSeconds)); }catch(Exception error){ SSLogU.err(error); } } for(int counter = 0; counter < evernoteConf.getEmailInEmails().size(); counter++){ try{ SSServReg.regScheduler( SSDateU.scheduleWithFixedDelay( new SSDataImportBitsAndPiecesTask( new SSDataImportBitsAndPiecesPar( servPar, SSConf.systemUserUri, null, //authToken evernoteConf.getAuthEmails().get(counter), //authEmail evernoteConf.getEmailInUsers().get(counter), //emailInUser evernoteConf.getEmailInPasswords().get(counter), //emailInPassword evernoteConf.getEmailInEmails().get(counter), //emailInEmail false, //importEvernote, true, //importEmail, true, //withUserRestriction, true)), //shouldCommit startDate, conf.scheduleIntervals.get(scheduleOpsCounter) * SSDateU.minuteInMilliSeconds)); }catch(Exception error){ SSLogU.err(error); } } } } } @Override public SSCoreConfA getConfForCloudDeployment( final SSCoreConfA coreConfA, final List<Class> configuredServs) throws SSErr{ throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. } }
package us.deathmarine.luyten; import java.io.StringWriter; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import com.strobel.assembler.metadata.FieldDefinition; import com.strobel.assembler.metadata.FieldReference; import com.strobel.assembler.metadata.MetadataSystem; import com.strobel.assembler.metadata.MethodDefinition; import com.strobel.assembler.metadata.MethodReference; import com.strobel.assembler.metadata.TypeDefinition; import com.strobel.assembler.metadata.TypeReference; import com.strobel.core.StringUtilities; import com.strobel.decompiler.DecompilationOptions; import com.strobel.decompiler.DecompilerSettings; import com.strobel.decompiler.PlainTextOutput; public class DecompilerLinkProvider implements LinkProvider { private Map<String, Selection> definitionToSelectionMap = new HashMap<>(); private Map<String, Set<Selection>> referenceToSelectionsMap = new HashMap<>(); private boolean isSelectionMapsPopulated = false; private MetadataSystem metadataSystem; private DecompilerSettings settings; private DecompilationOptions decompilationOptions; private TypeDefinition type; private String currentTypeQualifiedName; private String textContent = ""; @Override public void generateContent() { definitionToSelectionMap = new HashMap<>(); referenceToSelectionsMap = new HashMap<>(); currentTypeQualifiedName = type.getPackageName() + "." + type.getName(); final StringWriter stringwriter = new StringWriter(); PlainTextOutput plainTextOutput = new PlainTextOutput(stringwriter) { @Override public void writeDefinition(String text, Object definition, boolean isLocal) { super.writeDefinition(text, definition, isLocal); try { if (text != null && definition != null) { String uniqueStr = createUniqueStrForReference(definition); if (uniqueStr != null) { // fix link's underline length: _java.util.HashSet_ // -> _HashSet_ text = text.replaceAll("[^\\.]*\\.", ""); int from = stringwriter.getBuffer().length() - text.length(); int to = stringwriter.getBuffer().length(); definitionToSelectionMap.put(uniqueStr, new Selection(from, to)); } } } catch (Exception e) { Luyten.showExceptionDialog("Exception!", e); } } @Override public void writeReference(String text, Object reference, boolean isLocal) { super.writeReference(text, reference, isLocal); try { if (text != null && reference != null) { String uniqueStr = createUniqueStrForReference(reference); if (uniqueStr != null) { text = text.replaceAll("[^\\.]*\\.", ""); int from = stringwriter.getBuffer().length() - text.length(); int to = stringwriter.getBuffer().length(); if (reference instanceof FieldReference) { // fix enum definition links (note: could not fix enum reference links) if (((FieldReference) reference).isDefinition()) { definitionToSelectionMap.put(uniqueStr, new Selection(from, to)); return; } } if (referenceToSelectionsMap.containsKey(uniqueStr)) { Set<Selection> selectionsSet = referenceToSelectionsMap.get(uniqueStr); if (selectionsSet != null) { selectionsSet.add(new Selection(from, to)); } } else { Set<Selection> selectionsSet = new HashSet<>(); selectionsSet.add(new Selection(from, to)); referenceToSelectionsMap.put(uniqueStr, selectionsSet); } } } } catch (Exception e) { Luyten.showExceptionDialog("Exception!", e); } } }; plainTextOutput.setUnicodeOutputEnabled(decompilationOptions.getSettings().isUnicodeOutputEnabled()); settings.getLanguage().decompileType(type, plainTextOutput, decompilationOptions); textContent = stringwriter.toString(); isSelectionMapsPopulated = true; } private String createUniqueStrForReference(Object reference) { String uniqueStr = null; if (reference instanceof TypeReference) { TypeReference type = (TypeReference) reference; String pathAndTypeStr = getPathAndTypeStr(type); if (pathAndTypeStr != null) { uniqueStr = "type|" + pathAndTypeStr; } } else if (reference instanceof MethodReference) { MethodReference method = (MethodReference) reference; String pathAndTypeStr = getPathAndTypeStr(method.getDeclaringType()); if (pathAndTypeStr != null) { uniqueStr = "method|" + pathAndTypeStr + "|" + method.getName() + "|" + method.getErasedSignature(); } } else if (reference instanceof FieldReference) { FieldReference field = (FieldReference) reference; String pathAndTypeStr = getPathAndTypeStr(field.getDeclaringType()); if (pathAndTypeStr != null) { uniqueStr = "field|" + pathAndTypeStr + "|" + field.getName(); } } return uniqueStr; } private String getPathAndTypeStr(TypeReference typeRef) { String name = typeRef.getName(); String packageStr = typeRef.getPackageName(); TypeReference mostOuterTypeRef = getMostOuterTypeRef(typeRef); String mostOuterTypeName = mostOuterTypeRef.getName(); if (name != null && packageStr != null && mostOuterTypeName != null && name.trim().length() > 0 && mostOuterTypeName.trim().length() > 0) { String pathStr = packageStr.replaceAll("\\.", "/") + "/" + mostOuterTypeName; String typeStr = packageStr + "." + name.replace(".", "$"); return pathStr + "|" + typeStr; } return null; } private TypeReference getMostOuterTypeRef(TypeReference typeRef) { int maxDecraringDepth = typeRef.getFullName().split("(\\.|\\$)").length; for (int i = 0; i < maxDecraringDepth; i++) { TypeReference declaringTypeRef = typeRef.getDeclaringType(); if (declaringTypeRef == null) { break; } else { typeRef = declaringTypeRef; } } if (typeRef.getName().contains("$")) { return getMostOuterTypeRefBySlowLookuping(typeRef); } return typeRef; } private TypeReference getMostOuterTypeRefBySlowLookuping(TypeReference typeRef) { String name = typeRef.getName(); if (name == null) return typeRef; String packageName = typeRef.getPackageName(); if (packageName == null) return typeRef; String[] nameParts = name.split("\\$"); String newName = ""; String sep = ""; for (int i = 0; i < nameParts.length - 1; i++) { newName = newName + sep + nameParts[i]; sep = "$"; String newInternalName = packageName.replaceAll("\\.", "/") + "/" + newName; TypeReference newTypeRef = metadataSystem.lookupType(newInternalName); if (newTypeRef != null) { TypeDefinition newTypeDef = newTypeRef.resolve(); if (newTypeDef != null) { return newTypeRef; } } } return typeRef; } @Override public String getTextContent() { return textContent; } @Override public void processLinks() { } @Override public Map<String, Selection> getDefinitionToSelectionMap() { return definitionToSelectionMap; } @Override public Map<String, Set<Selection>> getReferenceToSelectionsMap() { return referenceToSelectionsMap; } @Override public boolean isLinkNavigable(String uniqueStr) { if (isSelectionMapsPopulated && definitionToSelectionMap.containsKey(uniqueStr)) return true; if (uniqueStr == null) return false; String[] linkParts = uniqueStr.split("\\|"); if (linkParts.length < 3) return false; String typeStr = linkParts[2]; if (typeStr.trim().length() <= 0) return false; TypeReference typeRef = metadataSystem.lookupType(typeStr.replaceAll("\\.", "/")); if (typeRef == null) return false; TypeDefinition typeDef = typeRef.resolve(); if (typeDef == null) return false; if (typeDef.isSynthetic()) return false; if (isSelectionMapsPopulated) { // current type's navigable definitions checked already, now it's erroneous if (currentTypeQualifiedName == null || currentTypeQualifiedName.trim().length() <= 0) return false; if (typeStr.equals(currentTypeQualifiedName) || typeStr.startsWith(currentTypeQualifiedName + ".") || typeStr.startsWith(currentTypeQualifiedName + "$")) return false; } // check linked field/method exists if (uniqueStr.startsWith("method")) { if (findMethodInType(typeDef, uniqueStr) == null) { return false; } } else if (uniqueStr.startsWith("field")) { if (findFieldInType(typeDef, uniqueStr) == null) { return false; } } return true; } private MethodDefinition findMethodInType(TypeDefinition typeDef, String uniqueStr) { String[] linkParts = uniqueStr.split("\\|"); if (linkParts.length != 5) return null; String methodName = linkParts[3]; String methodErasedSignature = linkParts[4]; if (methodName.trim().length() <= 0 || methodErasedSignature.trim().length() <= 0) return null; List<MethodDefinition> declaredMethods = typeDef.getDeclaredMethods(); if (declaredMethods == null) return null; boolean isFound = false; for (MethodDefinition declaredMethod : declaredMethods) { isFound = (declaredMethod != null && methodName.equals(declaredMethod.getName())); isFound = (isFound && methodErasedSignature.equals(declaredMethod.getErasedSignature())); if (isFound) { if (declaredMethod.isSynthetic() && !settings.getShowSyntheticMembers()) { return null; } else { return declaredMethod; } } } return null; } private FieldDefinition findFieldInType(TypeDefinition typeDef, String uniqueStr) { String[] linkParts = uniqueStr.split("\\|"); if (linkParts.length != 4) return null; String fieldName = linkParts[3]; if (fieldName.trim().length() <= 0) return null; List<FieldDefinition> declaredFields = typeDef.getDeclaredFields(); if (declaredFields == null) return null; boolean isFound = false; for (FieldDefinition declaredField : declaredFields) { isFound = (declaredField != null && fieldName.equals(declaredField.getName())); if (isFound) { if (declaredField.isSynthetic()) { return null; } else { return declaredField; } } } return null; } @Override public String getLinkDescription(String uniqueStr) { String readableLink = null; try { if (uniqueStr == null) return null; String[] linkParts = uniqueStr.split("\\|"); if (linkParts.length < 3) return null; String typeStr = linkParts[2]; TypeReference typeRef = metadataSystem.lookupType(typeStr.replaceAll("\\.", "/")); if (typeRef == null) return null; TypeDefinition typeDef = typeRef.resolve(); if (typeDef == null) return null; String declaredSuffix = ""; String mostOuterTypeStr = linkParts[1].replaceAll("/", "."); boolean isOwnFile = mostOuterTypeStr.equals(currentTypeQualifiedName); if (!isOwnFile) { declaredSuffix = " - Declared: " + mostOuterTypeStr; } if (uniqueStr.startsWith("type")) { String desc = typeDef.getBriefDescription(); if (desc != null && desc.trim().length() > 0) { readableLink = desc; } } else if (uniqueStr.startsWith("method")) { MethodDefinition methodDef = findMethodInType(typeDef, uniqueStr); if (methodDef == null) return null; String desc = methodDef.getBriefDescription(); if (desc != null && desc.trim().length() > 0) { if (desc.contains("void <init>")) { String constructorName = typeDef.getName(); TypeReference declaringTypeRef = typeRef.getDeclaringType(); if (declaringTypeRef != null) { TypeDefinition declaringTypeDef = declaringTypeRef.resolve(); if (declaringTypeDef != null) { String declaringTypeName = declaringTypeDef.getName(); if (declaringTypeName != null) { constructorName = StringUtilities.removeLeft(constructorName, declaringTypeName); constructorName = constructorName.replaceAll("^(\\.|\\$)", ""); } } } desc = desc.replace("void <init>", constructorName); readableLink = "Constructor: " + erasePackageInfoFromDesc(desc) + declaredSuffix; } else { readableLink = erasePackageInfoFromDesc(desc) + declaredSuffix; } } } else if (uniqueStr.startsWith("field")) { FieldDefinition fieldDef = findFieldInType(typeDef, uniqueStr); if (fieldDef == null) return null; String desc = fieldDef.getBriefDescription(); if (desc != null && desc.trim().length() > 0) { readableLink = erasePackageInfoFromDesc(desc) + declaredSuffix; } } if (readableLink != null) { readableLink = readableLink.replace("$", "."); } } catch (Exception e) { readableLink = null; Luyten.showExceptionDialog("Exception!", e); } return readableLink; } private String erasePackageInfoFromDesc(String desc) { String limiters = "\\(\\)\\<\\>\\[\\]\\?\\s,"; desc = desc.replaceAll("(?<=[^" + limiters + "]*)([^" + limiters + "]*)\\.", ""); return desc; } public void setDecompilerReferences(MetadataSystem metadataSystem, DecompilerSettings settings, DecompilationOptions decompilationOptions) { this.metadataSystem = metadataSystem; this.settings = settings; this.decompilationOptions = decompilationOptions; } public void setType(TypeDefinition type) { this.type = type; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.plugins; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import com.google.common.jimfs.Configuration; import com.google.common.jimfs.Jimfs; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.Version; import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.cli.UserError; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.io.PathUtilsForTesting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.PosixPermissionsResetter; import org.junit.After; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.StringReader; import java.net.MalformedURLException; import java.net.URL; import java.nio.charset.StandardCharsets; import java.nio.file.DirectoryStream; import java.nio.file.FileAlreadyExistsException; import java.nio.file.FileSystem; import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.nio.file.Path; import java.nio.file.SimpleFileVisitor; import java.nio.file.StandardCopyOption; import java.nio.file.attribute.BasicFileAttributes; import java.nio.file.attribute.PosixFileAttributeView; import java.nio.file.attribute.PosixFileAttributes; import java.nio.file.attribute.PosixFilePermission; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.containsInAnyOrder; @LuceneTestCase.SuppressFileSystems("*") public class InstallPluginCommandTests extends ESTestCase { private final Function<String, Path> temp; private final FileSystem fs; private final boolean isPosix; private final boolean isReal; private final String javaIoTmpdir; @SuppressForbidden(reason = "sets java.io.tmpdir") public InstallPluginCommandTests(FileSystem fs, Function<String, Path> temp) { this.fs = fs; this.temp = temp; this.isPosix = fs.supportedFileAttributeViews().contains("posix"); this.isReal = fs == PathUtils.getDefaultFileSystem(); PathUtilsForTesting.installMock(fs); javaIoTmpdir = System.getProperty("java.io.tmpdir"); System.setProperty("java.io.tmpdir", temp.apply("tmpdir").toString()); } @After @SuppressForbidden(reason = "resets java.io.tmpdir") public void tearDown() throws Exception { System.setProperty("java.io.tmpdir", javaIoTmpdir); PathUtilsForTesting.teardown(); super.tearDown(); } @ParametersFactory public static Iterable<Object[]> parameters() { class Parameter { private final FileSystem fileSystem; private final Function<String, Path> temp; public Parameter(FileSystem fileSystem, String root) { this(fileSystem, s -> { try { return Files.createTempDirectory(fileSystem.getPath(root), s); } catch (IOException e) { throw new RuntimeException(e); } }); } public Parameter(FileSystem fileSystem, Function<String, Path> temp) { this.fileSystem = fileSystem; this.temp = temp; } } List<Parameter> parameters = new ArrayList<>(); parameters.add(new Parameter(Jimfs.newFileSystem(Configuration.windows()), "c:\\")); parameters.add(new Parameter(Jimfs.newFileSystem(toPosix(Configuration.osX())), "/")); parameters.add(new Parameter(Jimfs.newFileSystem(toPosix(Configuration.unix())), "/")); parameters.add(new Parameter(PathUtils.getDefaultFileSystem(), LuceneTestCase::createTempDir )); return parameters.stream().map(p -> new Object[] { p.fileSystem, p.temp }).collect(Collectors.toList()); } private static Configuration toPosix(Configuration configuration) { return configuration.toBuilder().setAttributeViews("basic", "owner", "posix", "unix").build(); } /** Creates a test environment with bin, config and plugins directories. */ static Tuple<Path, Environment> createEnv(FileSystem fs, Function<String, Path> temp) throws IOException { Path home = temp.apply("install-plugin-command-tests"); Files.createDirectories(home.resolve("bin")); Files.createFile(home.resolve("bin").resolve("elasticsearch")); Files.createDirectories(home.resolve("config")); Files.createFile(home.resolve("config").resolve("elasticsearch.yml")); Path plugins = Files.createDirectories(home.resolve("plugins")); assertTrue(Files.exists(plugins)); Settings settings = Settings.builder() .put("path.home", home) .build(); return Tuple.tuple(home, new Environment(settings)); } static Path createPluginDir(Function<String, Path> temp) throws IOException { return temp.apply("pluginDir"); } /** creates a fake jar file with empty class files */ static void writeJar(Path jar, String... classes) throws IOException { try (ZipOutputStream stream = new ZipOutputStream(Files.newOutputStream(jar))) { for (String clazz : classes) { stream.putNextEntry(new ZipEntry(clazz + ".class")); // no package names, just support simple classes } } } static String writeZip(Path structure, String prefix) throws IOException { Path zip = createTempDir().resolve(structure.getFileName() + ".zip"); try (ZipOutputStream stream = new ZipOutputStream(Files.newOutputStream(zip))) { Files.walkFileTree(structure, new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { String target = (prefix == null ? "" : prefix + "/") + structure.relativize(file).toString(); stream.putNextEntry(new ZipEntry(target)); Files.copy(file, stream); return FileVisitResult.CONTINUE; } }); } return zip.toUri().toURL().toString(); } /** creates a plugin .zip and returns the url for testing */ static String createPlugin(String name, Path structure) throws IOException { PluginTestUtil.writeProperties(structure, "description", "fake desc", "name", name, "version", "1.0", "elasticsearch.version", Version.CURRENT.toString(), "java.version", System.getProperty("java.specification.version"), "classname", "FakePlugin"); writeJar(structure.resolve("plugin.jar"), "FakePlugin"); return writeZip(structure, "elasticsearch"); } static MockTerminal installPlugin(String pluginUrl, Path home) throws Exception { return installPlugin(pluginUrl, home, false); } static MockTerminal installPlugin(String pluginUrl, Path home, boolean jarHellCheck) throws Exception { Map<String, String> settings = new HashMap<>(); settings.put("path.home", home.toString()); MockTerminal terminal = new MockTerminal(); new InstallPluginCommand() { @Override void jarHellCheck(Path candidate, Path pluginsDir) throws Exception { if (jarHellCheck) { super.jarHellCheck(candidate, pluginsDir); } } }.execute(terminal, pluginUrl, true, settings); return terminal; } void assertPlugin(String name, Path original, Environment env) throws IOException { Path got = env.pluginsFile().resolve(name); assertTrue("dir " + name + " exists", Files.exists(got)); if (isPosix) { Set<PosixFilePermission> perms = Files.getPosixFilePermissions(got); assertThat( perms, containsInAnyOrder( PosixFilePermission.OWNER_READ, PosixFilePermission.OWNER_WRITE, PosixFilePermission.OWNER_EXECUTE, PosixFilePermission.GROUP_READ, PosixFilePermission.GROUP_EXECUTE, PosixFilePermission.OTHERS_READ, PosixFilePermission.OTHERS_EXECUTE)); } assertTrue("jar was copied", Files.exists(got.resolve("plugin.jar"))); assertFalse("bin was not copied", Files.exists(got.resolve("bin"))); assertFalse("config was not copied", Files.exists(got.resolve("config"))); if (Files.exists(original.resolve("bin"))) { Path binDir = env.binFile().resolve(name); assertTrue("bin dir exists", Files.exists(binDir)); assertTrue("bin is a dir", Files.isDirectory(binDir)); PosixFileAttributes binAttributes = null; if (isPosix) { binAttributes = Files.readAttributes(env.binFile(), PosixFileAttributes.class); } try (DirectoryStream<Path> stream = Files.newDirectoryStream(binDir)) { for (Path file : stream) { assertFalse("not a dir", Files.isDirectory(file)); if (isPosix) { PosixFileAttributes attributes = Files.readAttributes(file, PosixFileAttributes.class); assertEquals(InstallPluginCommand.DIR_AND_EXECUTABLE_PERMS, attributes.permissions()); } } } } if (Files.exists(original.resolve("config"))) { Path configDir = env.configFile().resolve(name); assertTrue("config dir exists", Files.exists(configDir)); assertTrue("config is a dir", Files.isDirectory(configDir)); if (isPosix) { Path configRoot = env.configFile(); PosixFileAttributes configAttributes = Files.getFileAttributeView(configRoot, PosixFileAttributeView.class).readAttributes(); PosixFileAttributes attributes = Files.getFileAttributeView(configDir, PosixFileAttributeView.class).readAttributes(); assertThat(attributes.owner(), equalTo(configAttributes.owner())); assertThat(attributes.group(), equalTo(configAttributes.group())); } try (DirectoryStream<Path> stream = Files.newDirectoryStream(configDir)) { for (Path file : stream) { assertFalse("not a dir", Files.isDirectory(file)); } } } assertInstallCleaned(env); } void assertInstallCleaned(Environment env) throws IOException { try (DirectoryStream<Path> stream = Files.newDirectoryStream(env.pluginsFile())) { for (Path file : stream) { if (file.getFileName().toString().startsWith(".installing")) { fail("Installation dir still exists, " + file); } } } } public void testSomethingWorks() throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); String pluginZip = createPlugin("fake", pluginDir); installPlugin(pluginZip, env.v1()); assertPlugin("fake", pluginDir, env.v2()); } public void testSpaceInUrl() throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); String pluginZip = createPlugin("fake", pluginDir); Path pluginZipWithSpaces = createTempFile("foo bar", ".zip"); try (InputStream in = new URL(pluginZip).openStream()) { Files.copy(in, pluginZipWithSpaces, StandardCopyOption.REPLACE_EXISTING); } installPlugin(pluginZipWithSpaces.toUri().toURL().toString(), env.v1()); assertPlugin("fake", pluginDir, env.v2()); } public void testMalformedUrlNotMaven() throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); // has two colons, so it appears similar to maven coordinates MalformedURLException e = expectThrows(MalformedURLException.class, () -> installPlugin("://host:1234", env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("no protocol")); } public void testPluginsDirMissing() throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); Files.delete(env.v2().pluginsFile()); Path pluginDir = createPluginDir(temp); String pluginZip = createPlugin("fake", pluginDir); installPlugin(pluginZip, env.v1()); assertPlugin("fake", pluginDir, env.v2()); } public void testPluginsDirReadOnly() throws Exception { assumeTrue("posix and filesystem", isPosix && isReal); Tuple<Path, Environment> env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); try (PosixPermissionsResetter pluginsAttrs = new PosixPermissionsResetter(env.v2().pluginsFile())) { pluginsAttrs.setPermissions(new HashSet<>()); String pluginZip = createPlugin("fake", pluginDir); IOException e = expectThrows(IOException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains(env.v2().pluginsFile().toString())); } assertInstallCleaned(env.v2()); } public void testBuiltinModule() throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); String pluginZip = createPlugin("lang-groovy", pluginDir); UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("is a system module")); assertInstallCleaned(env.v2()); } public void testJarHell() throws Exception { // jar hell test needs a real filesystem assumeTrue("real filesystem", isReal); Tuple<Path, Environment> environment = createEnv(fs, temp); Path pluginDirectory = createPluginDir(temp); writeJar(pluginDirectory.resolve("other.jar"), "FakePlugin"); String pluginZip = createPlugin("fake", pluginDirectory); // adds plugin.jar with FakePlugin IllegalStateException e = expectThrows(IllegalStateException.class, () -> installPlugin(pluginZip, environment.v1(), true)); assertTrue(e.getMessage(), e.getMessage().contains("jar hell")); assertInstallCleaned(environment.v2()); } public void testIsolatedPlugins() throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); // these both share the same FakePlugin class Path pluginDir1 = createPluginDir(temp); String pluginZip1 = createPlugin("fake1", pluginDir1); installPlugin(pluginZip1, env.v1()); Path pluginDir2 = createPluginDir(temp); String pluginZip2 = createPlugin("fake2", pluginDir2); installPlugin(pluginZip2, env.v1()); assertPlugin("fake1", pluginDir1, env.v2()); assertPlugin("fake2", pluginDir2, env.v2()); } public void testExistingPlugin() throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); String pluginZip = createPlugin("fake", pluginDir); installPlugin(pluginZip, env.v1()); UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("already exists")); assertInstallCleaned(env.v2()); } public void testBin() throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path binDir = pluginDir.resolve("bin"); Files.createDirectory(binDir); Files.createFile(binDir.resolve("somescript")); String pluginZip = createPlugin("fake", pluginDir); installPlugin(pluginZip, env.v1()); assertPlugin("fake", pluginDir, env.v2()); } public void testBinNotDir() throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path binDir = pluginDir.resolve("bin"); Files.createFile(binDir); String pluginZip = createPlugin("fake", pluginDir); UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("not a directory")); assertInstallCleaned(env.v2()); } public void testBinContainsDir() throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path dirInBinDir = pluginDir.resolve("bin").resolve("foo"); Files.createDirectories(dirInBinDir); Files.createFile(dirInBinDir.resolve("somescript")); String pluginZip = createPlugin("fake", pluginDir); UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("Directories not allowed in bin dir for plugin")); assertInstallCleaned(env.v2()); } public void testBinConflict() throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path binDir = pluginDir.resolve("bin"); Files.createDirectory(binDir); Files.createFile(binDir.resolve("somescript")); String pluginZip = createPlugin("elasticsearch", pluginDir); FileAlreadyExistsException e = expectThrows(FileAlreadyExistsException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains(env.v2().binFile().resolve("elasticsearch").toString())); assertInstallCleaned(env.v2()); } public void testBinPermissions() throws Exception { assumeTrue("posix filesystem", isPosix); Tuple<Path, Environment> env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path binDir = pluginDir.resolve("bin"); Files.createDirectory(binDir); Files.createFile(binDir.resolve("somescript")); String pluginZip = createPlugin("fake", pluginDir); try (PosixPermissionsResetter binAttrs = new PosixPermissionsResetter(env.v2().binFile())) { Set<PosixFilePermission> perms = binAttrs.getCopyPermissions(); // make sure at least one execute perm is missing, so we know we forced it during installation perms.remove(PosixFilePermission.GROUP_EXECUTE); binAttrs.setPermissions(perms); installPlugin(pluginZip, env.v1()); assertPlugin("fake", pluginDir, env.v2()); } } public void testConfig() throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path configDir = pluginDir.resolve("config"); Files.createDirectory(configDir); Files.createFile(configDir.resolve("custom.yaml")); String pluginZip = createPlugin("fake", pluginDir); installPlugin(pluginZip, env.v1()); assertPlugin("fake", pluginDir, env.v2()); } public void testExistingConfig() throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); Path envConfigDir = env.v2().configFile().resolve("fake"); Files.createDirectories(envConfigDir); Files.write(envConfigDir.resolve("custom.yaml"), "existing config".getBytes(StandardCharsets.UTF_8)); Path pluginDir = createPluginDir(temp); Path configDir = pluginDir.resolve("config"); Files.createDirectory(configDir); Files.write(configDir.resolve("custom.yaml"), "new config".getBytes(StandardCharsets.UTF_8)); Files.createFile(configDir.resolve("other.yaml")); String pluginZip = createPlugin("fake", pluginDir); installPlugin(pluginZip, env.v1()); assertPlugin("fake", pluginDir, env.v2()); List<String> configLines = Files.readAllLines(envConfigDir.resolve("custom.yaml"), StandardCharsets.UTF_8); assertEquals(1, configLines.size()); assertEquals("existing config", configLines.get(0)); assertTrue(Files.exists(envConfigDir.resolve("other.yaml"))); } public void testConfigNotDir() throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path configDir = pluginDir.resolve("config"); Files.createFile(configDir); String pluginZip = createPlugin("fake", pluginDir); UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("not a directory")); assertInstallCleaned(env.v2()); } public void testConfigContainsDir() throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path dirInConfigDir = pluginDir.resolve("config").resolve("foo"); Files.createDirectories(dirInConfigDir); Files.createFile(dirInConfigDir.resolve("myconfig.yml")); String pluginZip = createPlugin("fake", pluginDir); UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("Directories not allowed in config dir for plugin")); assertInstallCleaned(env.v2()); } public void testConfigConflict() throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Path configDir = pluginDir.resolve("config"); Files.createDirectory(configDir); Files.createFile(configDir.resolve("myconfig.yml")); String pluginZip = createPlugin("elasticsearch.yml", pluginDir); FileAlreadyExistsException e = expectThrows(FileAlreadyExistsException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains(env.v2().configFile().resolve("elasticsearch.yml").toString())); assertInstallCleaned(env.v2()); } public void testMissingDescriptor() throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Files.createFile(pluginDir.resolve("fake.yml")); String pluginZip = writeZip(pluginDir, "elasticsearch"); NoSuchFileException e = expectThrows(NoSuchFileException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("plugin-descriptor.properties")); assertInstallCleaned(env.v2()); } public void testMissingDirectory() throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); Files.createFile(pluginDir.resolve(PluginInfo.ES_PLUGIN_PROPERTIES)); String pluginZip = writeZip(pluginDir, null); UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("`elasticsearch` directory is missing in the plugin zip")); assertInstallCleaned(env.v2()); } public void testZipRelativeOutsideEntryName() throws Exception { Tuple<Path, Environment> env = createEnv(fs, temp); Path zip = createTempDir().resolve("broken.zip"); try (ZipOutputStream stream = new ZipOutputStream(Files.newOutputStream(zip))) { stream.putNextEntry(new ZipEntry("elasticsearch/../blah")); } String pluginZip = zip.toUri().toURL().toString(); IOException e = expectThrows(IOException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("resolving outside of plugin directory")); } public void testOfficialPluginsHelpSorted() throws Exception { MockTerminal terminal = new MockTerminal(); new InstallPluginCommand().main(new String[] { "--help" }, terminal); try (BufferedReader reader = new BufferedReader(new StringReader(terminal.getOutput()))) { String line = reader.readLine(); // first find the beginning of our list of official plugins while (line.endsWith("may be installed by name:") == false) { line = reader.readLine(); } // now check each line compares greater than the last, until we reach an empty line String prev = reader.readLine(); line = reader.readLine(); while (line != null && line.trim().isEmpty() == false) { assertTrue(prev + " < " + line, prev.compareTo(line) < 0); prev = line; line = reader.readLine(); } } } public void testOfficialPluginsIncludesXpack() throws Exception { MockTerminal terminal = new MockTerminal(); new InstallPluginCommand().main(new String[] { "--help" }, terminal); assertTrue(terminal.getOutput(), terminal.getOutput().contains("x-pack")); } // TODO: test batch flag? // TODO: test checksum (need maven/official below) // TODO: test maven, official, and staging install...need tests with fixtures... }
/* * Copyright 2017 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.vitess.jdbc; import java.sql.DriverPropertyInfo; import java.sql.SQLException; import java.util.Arrays; import java.util.Properties; import org.junit.Assert; import org.junit.Test; import org.mockito.Mockito; import io.vitess.proto.Query; import io.vitess.proto.Topodata; import io.vitess.util.Constants; import org.junit.Assert; import org.junit.Test; import org.mockito.Mockito; import io.vitess.proto.Query; import io.vitess.proto.Topodata; import io.vitess.util.Constants; public class ConnectionPropertiesTest { private static final int NUM_PROPS = 32; @Test public void testReflection() throws Exception { ConnectionProperties props = new ConnectionProperties(); Properties info = Mockito.spy(Properties.class); Mockito.doReturn(info).when(info).clone(); props.initializeProperties(info); // Just testing that we are properly picking up all the fields defined in the properties // For each field we call initializeFrom, which should call getProperty and remove Mockito.verify(info, Mockito.times(NUM_PROPS)).getProperty(Mockito.anyString()); Mockito.verify(info, Mockito.times(NUM_PROPS)).remove(Mockito.anyString()); } @Test public void testDefaults() throws SQLException { ConnectionProperties props = new ConnectionProperties(); props.initializeProperties(new Properties()); Assert.assertEquals("blobsAreStrings", false, props.getBlobsAreStrings()); Assert.assertEquals("functionsNeverReturnBlobs", false, props.getFunctionsNeverReturnBlobs()); Assert.assertEquals("tinyInt1isBit", true, props.getTinyInt1isBit()); Assert.assertEquals("yearIsDateType", true, props.getYearIsDateType()); Assert.assertEquals("useBlobToStoreUTF8OutsideBMP", false, props.getUseBlobToStoreUTF8OutsideBMP()); Assert.assertEquals("utf8OutsideBmpIncludedColumnNamePattern", null, props.getUtf8OutsideBmpIncludedColumnNamePattern()); Assert.assertEquals("utf8OutsideBmpExcludedColumnNamePattern", null, props.getUtf8OutsideBmpExcludedColumnNamePattern()); Assert.assertEquals("characterEncoding", null, props.getEncoding()); Assert.assertEquals("executeType", Constants.DEFAULT_EXECUTE_TYPE, props.getExecuteType()); Assert.assertEquals("twopcEnabled", false, props.getTwopcEnabled()); Assert.assertEquals("includedFields", Constants.DEFAULT_INCLUDED_FIELDS, props.getIncludedFields()); Assert.assertEquals("includedFieldsCache", true, props.isIncludeAllFields()); Assert.assertEquals("tabletType", Constants.DEFAULT_TABLET_TYPE, props.getTabletType()); Assert.assertEquals("useSSL", false, props.getUseSSL()); } @Test public void testInitializeFromProperties() throws SQLException { ConnectionProperties props = new ConnectionProperties(); Properties info = new Properties(); info.setProperty("blobsAreStrings", "yes"); info.setProperty("functionsNeverReturnBlobs", "yes"); info.setProperty("tinyInt1isBit", "yes"); info.setProperty("yearIsDateType", "yes"); info.setProperty("useBlobToStoreUTF8OutsideBMP", "yes"); info.setProperty("utf8OutsideBmpIncludedColumnNamePattern", "(foo|bar)?baz"); info.setProperty("utf8OutsideBmpExcludedColumnNamePattern", "(foo|bar)?baz"); info.setProperty("characterEncoding", "utf-8"); info.setProperty("executeType", Constants.QueryExecuteType.STREAM.name()); info.setProperty("twopcEnabled", "yes"); info.setProperty("includedFields", Query.ExecuteOptions.IncludedFields.TYPE_ONLY.name()); info.setProperty(Constants.Property.TABLET_TYPE, Topodata.TabletType.BACKUP.name()); props.initializeProperties(info); Assert.assertEquals("blobsAreStrings", true, props.getBlobsAreStrings()); Assert.assertEquals("functionsNeverReturnBlobs", true, props.getFunctionsNeverReturnBlobs()); Assert.assertEquals("tinyInt1isBit", true, props.getTinyInt1isBit()); Assert.assertEquals("yearIsDateType", true, props.getYearIsDateType()); Assert.assertEquals("useBlobToStoreUTF8OutsideBMP", true, props.getUseBlobToStoreUTF8OutsideBMP()); Assert.assertEquals("utf8OutsideBmpIncludedColumnNamePattern", "(foo|bar)?baz", props.getUtf8OutsideBmpIncludedColumnNamePattern()); Assert.assertEquals("utf8OutsideBmpExcludedColumnNamePattern", "(foo|bar)?baz", props.getUtf8OutsideBmpExcludedColumnNamePattern()); Assert.assertEquals("characterEncoding", "utf-8", props.getEncoding()); Assert.assertEquals("executeType", Constants.QueryExecuteType.STREAM, props.getExecuteType()); Assert.assertEquals("twopcEnabled", true, props.getTwopcEnabled()); Assert.assertEquals("includedFields", Query.ExecuteOptions.IncludedFields.TYPE_ONLY, props.getIncludedFields()); Assert.assertEquals("includedFieldsCache", false, props.isIncludeAllFields()); Assert.assertEquals("tabletType", Topodata.TabletType.BACKUP, props.getTabletType()); } @Test(expected = SQLException.class) public void testEncodingValidation() throws SQLException { ConnectionProperties props = new ConnectionProperties(); Properties info = new Properties(); String fakeEncoding = "utf-12345"; info.setProperty("characterEncoding", fakeEncoding); try { props.initializeProperties(info); Assert.fail("should have failed to parse encoding " + fakeEncoding); } catch (SQLException e) { Assert.assertEquals("Unsupported character encoding: " + fakeEncoding, e.getMessage()); throw e; } } @Test public void testDriverPropertiesOutput() throws SQLException { Properties info = new Properties(); DriverPropertyInfo[] infos = ConnectionProperties.exposeAsDriverPropertyInfo(info, 0); Assert.assertEquals(NUM_PROPS, infos.length); // Test the expected fields for just 1 int indexForFullTest = 15; Assert.assertEquals("executeType", infos[indexForFullTest].name); Assert.assertEquals("Query execution type: simple or stream", infos[indexForFullTest].description); Assert.assertEquals(false, infos[indexForFullTest].required); Constants.QueryExecuteType[] enumConstants = Constants.QueryExecuteType.values(); String[] allowed = new String[enumConstants.length]; for (int i = 0; i < enumConstants.length; i++) { allowed[i] = enumConstants[i].toString(); } Assert.assertArrayEquals(allowed, infos[indexForFullTest].choices); // Test that name exists for the others, as a sanity check Assert.assertEquals("functionsNeverReturnBlobs", infos[1].name); Assert.assertEquals("tinyInt1isBit", infos[2].name); Assert.assertEquals("yearIsDateType", infos[3].name); Assert.assertEquals("useBlobToStoreUTF8OutsideBMP", infos[4].name); Assert.assertEquals("utf8OutsideBmpIncludedColumnNamePattern", infos[5].name); Assert.assertEquals("utf8OutsideBmpExcludedColumnNamePattern", infos[6].name); Assert.assertEquals("characterEncoding", infos[7].name); Assert.assertEquals(Constants.Property.TWOPC_ENABLED, infos[indexForFullTest+1].name); Assert.assertEquals(Constants.Property.INCLUDED_FIELDS, infos[indexForFullTest+2].name); Assert.assertEquals(Constants.Property.TABLET_TYPE, infos[indexForFullTest-2].name); } @Test public void testValidBooleanValues() throws SQLException { ConnectionProperties props = new ConnectionProperties(); Properties info = new Properties(); info.setProperty("blobsAreStrings", "true"); info.setProperty("functionsNeverReturnBlobs", "yes"); info.setProperty("tinyInt1isBit", "no"); props.initializeProperties(info); info.setProperty(Constants.Property.TWOPC_ENABLED, "false-ish"); try { props.initializeProperties(info); Assert.fail("should have thrown an exception on bad value false-ish"); } catch (IllegalArgumentException e) { Assert.assertEquals( "Property '" + Constants.Property.TWOPC_ENABLED + "' Value 'false-ish' not in the list of allowable values: " + Arrays.toString(new String[] { Boolean.toString(true), Boolean.toString(false), "yes", "no"}) , e.getMessage()); } } @Test public void testValidEnumValues() throws SQLException { ConnectionProperties props = new ConnectionProperties(); Properties info = new Properties(); info.setProperty("executeType", "foo"); try { props.initializeProperties(info); Assert.fail("should have thrown an exception on bad value foo"); } catch (IllegalArgumentException e) { Assert.assertEquals( "Property 'executeType' Value 'foo' not in the list of allowable values: " + Arrays.toString(Constants.QueryExecuteType.values()) , e.getMessage()); } } @Test public void testSettersUpdateCaches() throws SQLException { ConnectionProperties props = new ConnectionProperties(); props.initializeProperties(new Properties()); // included fields and all boolean cache Assert.assertEquals(Constants.DEFAULT_INCLUDED_FIELDS, props.getIncludedFields()); Assert.assertEquals(true, props.isIncludeAllFields()); // execute type and simple boolean cache Assert.assertEquals(Constants.DEFAULT_EXECUTE_TYPE, props.getExecuteType()); Assert.assertEquals(Constants.DEFAULT_EXECUTE_TYPE == Constants.QueryExecuteType.SIMPLE, props.isSimpleExecute()); // tablet type and twopc Assert.assertEquals(Constants.DEFAULT_TABLET_TYPE, props.getTabletType()); Assert.assertEquals(false, props.getTwopcEnabled()); props.setIncludedFields(Query.ExecuteOptions.IncludedFields.TYPE_AND_NAME); props.setExecuteType(Constants.QueryExecuteType.STREAM); props.setTabletType(Topodata.TabletType.BACKUP); props.setTwopcEnabled(true); // included fields and all boolean cache Assert.assertEquals(Query.ExecuteOptions.IncludedFields.TYPE_AND_NAME, props.getIncludedFields()); Assert.assertEquals(false, props.isIncludeAllFields()); // execute type and simple boolean cache Assert.assertEquals(Constants.QueryExecuteType.STREAM, props.getExecuteType()); Assert.assertEquals(Constants.DEFAULT_EXECUTE_TYPE != Constants.QueryExecuteType.SIMPLE, props.isSimpleExecute()); // tablet type and twopc Assert.assertEquals(Topodata.TabletType.BACKUP, props.getTabletType()); Assert.assertEquals(true, props.getTwopcEnabled()); } @Test public void testTarget() throws SQLException { ConnectionProperties props = new ConnectionProperties(); // Setting keyspace Properties info = new Properties(); info.setProperty(Constants.Property.KEYSPACE, "test_keyspace"); props.initializeProperties(info); Assert.assertEquals("target", "test_keyspace@master", props.getTarget()); // Setting keyspace and shard info = new Properties(); info.setProperty(Constants.Property.KEYSPACE, "test_keyspace"); info.setProperty(Constants.Property.SHARD, "80-c0"); props.initializeProperties(info); Assert.assertEquals("target", "test_keyspace:80-c0@master", props.getTarget()); // Setting tablet type info = new Properties(); info.setProperty(Constants.Property.TABLET_TYPE, "replica"); props.initializeProperties(info); Assert.assertEquals("target", "@replica", props.getTarget()); // Setting shard which will have no impact without keyspace info = new Properties(); info.setProperty(Constants.Property.SHARD, "80-c0"); props.initializeProperties(info); Assert.assertEquals("target", "@master", props.getTarget()); // Setting shard and tablet type. Shard will have no impact. info = new Properties(); info.setProperty(Constants.Property.SHARD, "80-c0"); info.setProperty(Constants.Property.TABLET_TYPE, "replica"); props.initializeProperties(info); Assert.assertEquals("target", "@replica", props.getTarget()); // Setting keyspace, shard and tablet type. info = new Properties(); info.setProperty(Constants.Property.KEYSPACE, "test_keyspace"); info.setProperty(Constants.Property.SHARD, "80-c0"); info.setProperty(Constants.Property.TABLET_TYPE, "rdonly"); props.initializeProperties(info); Assert.assertEquals("target", "test_keyspace:80-c0@rdonly", props.getTarget()); // Setting keyspace, shard, tablet type and target. Target supersede others. info = new Properties(); info.setProperty(Constants.Property.KEYSPACE, "test_keyspace"); info.setProperty(Constants.Property.SHARD, "80-c0"); info.setProperty(Constants.Property.TABLET_TYPE, "rdonly"); info.setProperty(Constants.Property.TARGET, "dummy"); props.initializeProperties(info); Assert.assertEquals("target", "dummy", props.getTarget()); } }
/* * Copyright 2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.language.nativeplatform.tasks; import org.gradle.api.DefaultTask; import org.gradle.api.Incubating; import org.gradle.api.Transformer; import org.gradle.api.file.ConfigurableFileCollection; import org.gradle.api.file.DirectoryProperty; import org.gradle.api.file.FileCollection; import org.gradle.api.internal.file.FileCollectionFactory; import org.gradle.api.internal.file.TaskFileVarFactory; import org.gradle.api.model.ObjectFactory; import org.gradle.api.provider.ListProperty; import org.gradle.api.provider.Property; import org.gradle.api.tasks.Input; import org.gradle.api.tasks.InputFiles; import org.gradle.api.tasks.Internal; import org.gradle.api.tasks.Nested; import org.gradle.api.tasks.OutputDirectory; import org.gradle.api.tasks.PathSensitive; import org.gradle.api.tasks.PathSensitivity; import org.gradle.api.tasks.TaskAction; import org.gradle.api.tasks.WorkResult; import org.gradle.api.tasks.incremental.IncrementalTaskInputs; import org.gradle.internal.Cast; import org.gradle.internal.operations.logging.BuildOperationLogger; import org.gradle.internal.operations.logging.BuildOperationLoggerFactory; import org.gradle.language.base.internal.compile.Compiler; import org.gradle.language.nativeplatform.internal.incremental.IncrementalCompilerBuilder; import org.gradle.nativeplatform.internal.BuildOperationLoggingCompilerDecorator; import org.gradle.nativeplatform.platform.NativePlatform; import org.gradle.nativeplatform.platform.internal.NativePlatformInternal; import org.gradle.nativeplatform.toolchain.Clang; import org.gradle.nativeplatform.toolchain.Gcc; import org.gradle.nativeplatform.toolchain.NativeToolChain; import org.gradle.nativeplatform.toolchain.internal.NativeCompileSpec; import org.gradle.nativeplatform.toolchain.internal.NativeToolChainInternal; import org.gradle.nativeplatform.toolchain.internal.PlatformToolProvider; import javax.inject.Inject; import java.util.LinkedHashMap; import java.util.Map; /** * Compiles native source files into object files. */ @Incubating public abstract class AbstractNativeCompileTask extends DefaultTask { private final Property<NativePlatform> targetPlatform; private final Property<NativeToolChain> toolChain; private boolean positionIndependentCode; private boolean debug; private boolean optimize; private final DirectoryProperty objectFileDir; private final ConfigurableFileCollection includes; private final ConfigurableFileCollection systemIncludes; private final ConfigurableFileCollection source; private final Map<String, String> macros = new LinkedHashMap<String, String>(); private final ListProperty<String> compilerArgs; private final IncrementalCompilerBuilder.IncrementalCompiler incrementalCompiler; public AbstractNativeCompileTask() { ObjectFactory objectFactory = getProject().getObjects(); this.includes = getProject().files(); this.systemIncludes = getProject().files(); dependsOn(includes); dependsOn(systemIncludes); this.source = getTaskFileVarFactory().newInputFileCollection(this); this.objectFileDir = objectFactory.directoryProperty(); this.compilerArgs = getProject().getObjects().listProperty(String.class).empty(); this.targetPlatform = objectFactory.property(NativePlatform.class); this.toolChain = objectFactory.property(NativeToolChain.class); this.incrementalCompiler = getIncrementalCompilerBuilder().newCompiler(this, source, includes.plus(systemIncludes), macros, toolChain.map(new Transformer<Boolean, NativeToolChain>() { @Override public Boolean transform(NativeToolChain nativeToolChain) { return nativeToolChain instanceof Gcc || nativeToolChain instanceof Clang; } })); } @Inject protected TaskFileVarFactory getTaskFileVarFactory() { throw new UnsupportedOperationException(); } @Inject protected IncrementalCompilerBuilder getIncrementalCompilerBuilder() { throw new UnsupportedOperationException(); } @Inject protected BuildOperationLoggerFactory getOperationLoggerFactory() { throw new UnsupportedOperationException(); } @Inject protected FileCollectionFactory getFileCollectionFactory() { throw new UnsupportedOperationException(); } @TaskAction public void compile(IncrementalTaskInputs inputs) { BuildOperationLogger operationLogger = getOperationLoggerFactory().newOperationLogger(getName(), getTemporaryDir()); NativeCompileSpec spec = createCompileSpec(); spec.setTargetPlatform(targetPlatform.get()); spec.setTempDir(getTemporaryDir()); spec.setObjectFileDir(objectFileDir.get().getAsFile()); spec.include(includes); spec.systemInclude(systemIncludes); spec.source(getSource()); spec.setMacros(getMacros()); spec.args(getCompilerArgs().get()); spec.setPositionIndependentCode(isPositionIndependentCode()); spec.setDebuggable(isDebuggable()); spec.setOptimized(isOptimized()); spec.setIncrementalCompile(inputs.isIncremental()); spec.setOperationLogger(operationLogger); configureSpec(spec); NativeToolChainInternal nativeToolChain = (NativeToolChainInternal) toolChain.get(); NativePlatformInternal nativePlatform = (NativePlatformInternal) targetPlatform.get(); PlatformToolProvider platformToolProvider = nativeToolChain.select(nativePlatform); setDidWork(doCompile(spec, platformToolProvider).getDidWork()); } protected void configureSpec(NativeCompileSpec spec) { } private <T extends NativeCompileSpec> WorkResult doCompile(T spec, PlatformToolProvider platformToolProvider) { Class<T> specType = Cast.uncheckedCast(spec.getClass()); Compiler<T> baseCompiler = platformToolProvider.newCompiler(specType); Compiler<T> incrementalCompiler = this.incrementalCompiler.createCompiler(baseCompiler); Compiler<T> loggingCompiler = BuildOperationLoggingCompilerDecorator.wrap(incrementalCompiler); return loggingCompiler.execute(spec); } protected abstract NativeCompileSpec createCompileSpec(); /** * The tool chain used for compilation. * * @since 4.7 */ @Internal public Property<NativeToolChain> getToolChain() { return toolChain; } /** * The platform being compiled for. * * @since 4.7 */ @Nested public Property<NativePlatform> getTargetPlatform() { return targetPlatform; } /** * Should the compiler generate position independent code? */ @Input public boolean isPositionIndependentCode() { return positionIndependentCode; } public void setPositionIndependentCode(boolean positionIndependentCode) { this.positionIndependentCode = positionIndependentCode; } /** * Should the compiler generate debuggable code? * * @since 4.3 */ @Input public boolean isDebuggable() { return debug; } /** * Should the compiler generate debuggable code? * * @since 4.3 */ public void setDebuggable(boolean debug) { this.debug = debug; } /** * Should the compiler generate optimized code? * * @since 4.3 */ @Input public boolean isOptimized() { return optimize; } /** * Should the compiler generate optimized code? * * @since 4.3 */ public void setOptimized(boolean optimize) { this.optimize = optimize; } /** * The directory where object files will be generated. * * @since 4.3 */ @OutputDirectory public DirectoryProperty getObjectFileDir() { return objectFileDir; } /** * Returns the header directories to be used for compilation. */ @Internal("The paths for include directories are tracked via the includePaths property, the contents are tracked via discovered inputs") public ConfigurableFileCollection getIncludes() { return includes; } /** * Add directories where the compiler should search for header files. */ public void includes(Object includeRoots) { includes.from(includeRoots); } /** * Returns the system include directories to be used for compilation. * * @since 4.8 */ @Internal("The paths for include directories are tracked via the includePaths property, the contents are tracked via discovered inputs") public ConfigurableFileCollection getSystemIncludes() { return systemIncludes; } /** * Returns the source files to be compiled. */ @InputFiles @PathSensitive(PathSensitivity.RELATIVE) public ConfigurableFileCollection getSource() { return source; } /** * Adds a set of source files to be compiled. The provided sourceFiles object is evaluated as per {@link org.gradle.api.Project#files(Object...)}. */ public void source(Object sourceFiles) { source.from(sourceFiles); } /** * Macros that should be defined for the compiler. */ @Input public Map<String, String> getMacros() { return macros; } public void setMacros(Map<String, String> macros) { this.macros.clear(); this.macros.putAll(macros); } /** * <em>Additional</em> arguments to provide to the compiler. * * @since 4.3 */ @Input public ListProperty<String> getCompilerArgs() { return compilerArgs; } /** * The set of dependent headers. This is used for up-to-date checks only. * * @since 4.3 */ @InputFiles @PathSensitive(PathSensitivity.NAME_ONLY) protected FileCollection getHeaderDependencies() { return incrementalCompiler.getHeaderFiles(); } }
/* * Copyright (c) 2017-2019 Arrow Electronics, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License 2.0 * which accompanies this distribution, and is available at * http://apache.org/licenses/LICENSE-2.0 * * Contributors: * Arrow Electronics, Inc. * Konexios, Inc. */ package com.konexios.sample; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.annotation.TargetApi; import android.content.Intent; import android.content.SharedPreferences; import android.os.Build; import android.os.Bundle; import android.preference.PreferenceManager; import androidx.annotation.NonNull; import androidx.appcompat.app.AppCompatActivity; import android.text.TextUtils; import android.util.Log; import android.view.KeyEvent; import android.view.View; import android.view.View.OnClickListener; import android.view.inputmethod.EditorInfo; import android.widget.AutoCompleteTextView; import android.widget.Button; import android.widget.EditText; import android.widget.TextView; import com.konexios.api.Api; import com.konexios.api.ApiService; import com.konexios.api.listeners.RegisterAccount2Listener; import com.konexios.api.models.AccountRequest2; import com.konexios.api.models.AccountResponse2; import com.konexios.api.models.ApiError; /** * A login screen that offers login via email/password. */ public class LoginActivity extends AppCompatActivity { public final static String ACCOUNT_RESPONSE_EXTRA = "account_response"; private final static String TAG = LoginActivity.class.getSimpleName(); private static final String ACCOUNT_LOGIN_SP_KEY = "com.arrow.kronos.sample.login_key"; private static final String ACCOUNT_PASSWORD_SP_KEY = "com.arrow.kronos.sample.password_key"; private static final String CODE_PASSWORD_SP_KEY = "com.arrow.kronos.sample.code_key"; // UI references. private AutoCompleteTextView mEmailView; private EditText mPasswordView; private EditText mCode; private View mProgressView; private View mLoginFormView; private ApiService mApiService; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_login); // Set up the login form. mEmailView = (AutoCompleteTextView) findViewById(R.id.email); mPasswordView = (EditText) findViewById(R.id.password); mCode = (EditText) findViewById(R.id.code); mPasswordView.setOnEditorActionListener(new TextView.OnEditorActionListener() { @Override public boolean onEditorAction(TextView textView, int id, KeyEvent keyEvent) { if (id == EditorInfo.IME_ACTION_GO || id == EditorInfo.IME_NULL) { attemptLogin(); return true; } return false; } }); Button mEmailSignInButton = (Button) findViewById(R.id.email_sign_in_button); mEmailSignInButton.setOnClickListener(new OnClickListener() { @Override public void onClick(View view) { attemptLogin(); } }); mLoginFormView = findViewById(R.id.login_form); mProgressView = findViewById(R.id.login_progress); //the first creation of acn lib mApiService = App.getAcnApiService(); initViews(); } /** * Attempts to sign in or register the account specified by the login form. * If there are form errors (invalid email, missing fields, etc.), the * errors are presented and no actual login attempt is made. */ private void attemptLogin() { // Reset errors. mEmailView.setError(null); mPasswordView.setError(null); // Store values at the time of the login attempt. final String email = mEmailView.getText().toString(); final String password = mPasswordView.getText().toString(); final String code = mCode.getText().toString(); boolean cancel = false; View focusView = null; // Check for a valid password, if the user entered one. if (!TextUtils.isEmpty(password) && !isPasswordValid(password)) { mPasswordView.setError(getString(R.string.error_invalid_password)); focusView = mPasswordView; cancel = true; } // Check for a valid email address. if (TextUtils.isEmpty(email)) { mEmailView.setError(getString(R.string.error_field_required)); focusView = mEmailView; cancel = true; } else if (!isEmailValid(email)) { mEmailView.setError(getString(R.string.error_invalid_email)); focusView = mEmailView; cancel = true; } if (cancel) { // There was an error; don't attempt login and focus the first // form field with an error. focusView.requestFocus(); } else { // Show a progress spinner, and kick off a background task to // perform the user login attempt. showProgress(true); /*AccountRequest model = new AccountRequest(); model.setName("Some Name"); model.setEmail(email.toLowerCase()); model.setPassword(password); if (!TextUtils.isEmpty(code)) { model.setCode(code); } mApiService.registerAccount(model, new RegisterAccountListener() { @Override public void onAccountRegistered(AccountResponse accountResponse) { Log.v(TAG, "onAccountRegistered"); saveCredentials(email, password, code); Intent intent = new Intent(LoginActivity.this, MainActivity.class); intent.putExtra(ACCOUNT_RESPONSE_EXTRA, accountResponse); startActivity(intent); finish(); showProgress(false); } @Override public void onAccountRegisterFailed(ApiError e) { Log.v(TAG, "onAccountRegisterFailed"); mPasswordView.setError(getString(R.string.error_fatal)); mPasswordView.requestFocus(); showProgress(false); } });*/ AccountRequest2 model = new AccountRequest2(); model.setUsername(email.trim()); model.setPassword(password.trim()); model.setApplicationCode(code.trim()); Api.Builder.resetDefaultAdminEndpoint(); mApiService.registerAccount2(model, new RegisterAccount2Listener() { @Override public void onAccountRegistered(AccountResponse2 response) { Log.v(TAG, "onAccountRegistered"); saveCredentials(email, password, code); Intent intent = new Intent(LoginActivity.this, MainActivity.class); intent.putExtra(ACCOUNT_RESPONSE_EXTRA, response); Api.Builder.resetDefaultGatewayEndpoint(); startActivity(intent); finish(); showProgress(false); } @Override public void onAccountRegisterFailed(ApiError error) { Log.v(TAG, "onAccountRegisterFailed"); mPasswordView.setError(getString(R.string.error_fatal)); mPasswordView.requestFocus(); showProgress(false); } }); } } private boolean isEmailValid(@NonNull String email) { //TODO: Replace this with your own logic return email.contains("@"); } private boolean isPasswordValid(@NonNull String password) { //TODO: Replace this with your own logic return password.length() > 4; } /** * Shows the progress UI and hides the login form. */ @TargetApi(Build.VERSION_CODES.HONEYCOMB_MR2) private void showProgress(final boolean show) { // On Honeycomb MR2 we have the ViewPropertyAnimator APIs, which allow // for very easy animations. If available, use these APIs to fade-in // the progress spinner. if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB_MR2) { int shortAnimTime = getResources().getInteger(android.R.integer.config_shortAnimTime); mLoginFormView.setVisibility(show ? View.GONE : View.VISIBLE); mLoginFormView.animate().setDuration(shortAnimTime).alpha( show ? 0 : 1).setListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { mLoginFormView.setVisibility(show ? View.GONE : View.VISIBLE); } }); mProgressView.setVisibility(show ? View.VISIBLE : View.GONE); mProgressView.animate().setDuration(shortAnimTime).alpha( show ? 1 : 0).setListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { mProgressView.setVisibility(show ? View.VISIBLE : View.GONE); } }); } else { // The ViewPropertyAnimator APIs are not available, so simply show // and hide the relevant UI components. mProgressView.setVisibility(show ? View.VISIBLE : View.GONE); mLoginFormView.setVisibility(show ? View.GONE : View.VISIBLE); } } private void saveCredentials(String login, String password, String code) { SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(this); SharedPreferences.Editor editor = sp.edit(); editor.putString(ACCOUNT_LOGIN_SP_KEY, login); editor.putString(ACCOUNT_PASSWORD_SP_KEY, password); editor.putString(CODE_PASSWORD_SP_KEY, code); editor.commit(); } private void initViews() { SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(this); String login = sp.getString(ACCOUNT_LOGIN_SP_KEY, ""); String password = sp.getString(ACCOUNT_PASSWORD_SP_KEY, ""); String code = sp.getString(CODE_PASSWORD_SP_KEY, ""); if (!TextUtils.isEmpty(login) && !TextUtils.isEmpty(password)) { mEmailView.setText(login); mPasswordView.setText(password); mCode.setText(code); attemptLogin(); } //todo: remove mEmailView.setText("ew2018-1011@arrowconnect.io"); mPasswordView.setText("X6c5o8P7J=Fb5f9~"); mCode.setText("JEZ546"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.builder.endpoint.dsl; import java.util.Map; import javax.annotation.Generated; import org.apache.camel.ExchangePattern; import org.apache.camel.builder.EndpointConsumerBuilder; import org.apache.camel.builder.EndpointProducerBuilder; import org.apache.camel.builder.endpoint.AbstractEndpointBuilder; import org.apache.camel.spi.ExceptionHandler; /** * The azure-eventhubs component that integrates Azure Event Hubs using AMQP * protocol. Azure EventHubs is a highly scalable publish-subscribe service that * can ingest millions of events per second and stream them to multiple * consumers. * * Generated by camel build tools - do NOT edit this file! */ @Generated("org.apache.camel.maven.packaging.EndpointDslMojo") public interface EventHubsEndpointBuilderFactory { /** * Builder for endpoint consumers for the Azure Event Hubs component. */ public interface EventHubsEndpointConsumerBuilder extends EndpointConsumerBuilder { default AdvancedEventHubsEndpointConsumerBuilder advanced() { return (AdvancedEventHubsEndpointConsumerBuilder) this; } /** * Sets the retry policy for EventHubAsyncClient. If not specified, the * default retry options are used. * * The option is a: <code>com.azure.core.amqp.AmqpRetryOptions</code> * type. * * Group: common */ default EventHubsEndpointConsumerBuilder amqpRetryOptions( Object amqpRetryOptions) { doSetProperty("amqpRetryOptions", amqpRetryOptions); return this; } /** * Sets the retry policy for EventHubAsyncClient. If not specified, the * default retry options are used. * * The option will be converted to a * <code>com.azure.core.amqp.AmqpRetryOptions</code> type. * * Group: common */ default EventHubsEndpointConsumerBuilder amqpRetryOptions( String amqpRetryOptions) { doSetProperty("amqpRetryOptions", amqpRetryOptions); return this; } /** * Sets the transport type by which all the communication with Azure * Event Hubs occurs. Default value is AmqpTransportType#AMQP. * * The option is a: <code>com.azure.core.amqp.AmqpTransportType</code> * type. * * Default: AMQP * Group: common */ default EventHubsEndpointConsumerBuilder amqpTransportType( AmqpTransportType amqpTransportType) { doSetProperty("amqpTransportType", amqpTransportType); return this; } /** * Sets the transport type by which all the communication with Azure * Event Hubs occurs. Default value is AmqpTransportType#AMQP. * * The option will be converted to a * <code>com.azure.core.amqp.AmqpTransportType</code> type. * * Default: AMQP * Group: common */ default EventHubsEndpointConsumerBuilder amqpTransportType( String amqpTransportType) { doSetProperty("amqpTransportType", amqpTransportType); return this; } /** * Setting the autoDiscoverClient mechanism, if true, the component will * look for a client instance in the registry automatically otherwise it * will skip that checking. * * The option is a: <code>boolean</code> type. * * Default: true * Group: common */ default EventHubsEndpointConsumerBuilder autoDiscoverClient( boolean autoDiscoverClient) { doSetProperty("autoDiscoverClient", autoDiscoverClient); return this; } /** * Setting the autoDiscoverClient mechanism, if true, the component will * look for a client instance in the registry automatically otherwise it * will skip that checking. * * The option will be converted to a <code>boolean</code> type. * * Default: true * Group: common */ default EventHubsEndpointConsumerBuilder autoDiscoverClient( String autoDiscoverClient) { doSetProperty("autoDiscoverClient", autoDiscoverClient); return this; } /** * In case you chose the default BlobCheckpointStore, this sets access * key for the associated azure account name to be used for * authentication with azure blob services. * * The option is a: <code>java.lang.String</code> type. * * Group: consumer */ default EventHubsEndpointConsumerBuilder blobAccessKey( String blobAccessKey) { doSetProperty("blobAccessKey", blobAccessKey); return this; } /** * In case you chose the default BlobCheckpointStore, this sets Azure * account name to be used for authentication with azure blob services. * * The option is a: <code>java.lang.String</code> type. * * Group: consumer */ default EventHubsEndpointConsumerBuilder blobAccountName( String blobAccountName) { doSetProperty("blobAccountName", blobAccountName); return this; } /** * In case you chose the default BlobCheckpointStore, this sets the blob * container that shall be used by the BlobCheckpointStore to store the * checkpoint offsets. * * The option is a: <code>java.lang.String</code> type. * * Group: consumer */ default EventHubsEndpointConsumerBuilder blobContainerName( String blobContainerName) { doSetProperty("blobContainerName", blobContainerName); return this; } /** * In case you chose the default BlobCheckpointStore, * StorageSharedKeyCredential can be injected to create the azure * client, this holds the important authentication information. * * The option is a: * <code>com.azure.storage.common.StorageSharedKeyCredential</code> * type. * * Group: consumer */ default EventHubsEndpointConsumerBuilder blobStorageSharedKeyCredential( Object blobStorageSharedKeyCredential) { doSetProperty("blobStorageSharedKeyCredential", blobStorageSharedKeyCredential); return this; } /** * In case you chose the default BlobCheckpointStore, * StorageSharedKeyCredential can be injected to create the azure * client, this holds the important authentication information. * * The option will be converted to a * <code>com.azure.storage.common.StorageSharedKeyCredential</code> * type. * * Group: consumer */ default EventHubsEndpointConsumerBuilder blobStorageSharedKeyCredential( String blobStorageSharedKeyCredential) { doSetProperty("blobStorageSharedKeyCredential", blobStorageSharedKeyCredential); return this; } /** * Allows for bridging the consumer to the Camel routing Error Handler, * which mean any exceptions occurred while the consumer is trying to * pickup incoming messages, or the likes, will now be processed as a * message and handled by the routing Error Handler. By default the * consumer will use the org.apache.camel.spi.ExceptionHandler to deal * with exceptions, that will be logged at WARN or ERROR level and * ignored. * * The option is a: <code>boolean</code> type. * * Default: false * Group: consumer */ default EventHubsEndpointConsumerBuilder bridgeErrorHandler( boolean bridgeErrorHandler) { doSetProperty("bridgeErrorHandler", bridgeErrorHandler); return this; } /** * Allows for bridging the consumer to the Camel routing Error Handler, * which mean any exceptions occurred while the consumer is trying to * pickup incoming messages, or the likes, will now be processed as a * message and handled by the routing Error Handler. By default the * consumer will use the org.apache.camel.spi.ExceptionHandler to deal * with exceptions, that will be logged at WARN or ERROR level and * ignored. * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: consumer */ default EventHubsEndpointConsumerBuilder bridgeErrorHandler( String bridgeErrorHandler) { doSetProperty("bridgeErrorHandler", bridgeErrorHandler); return this; } /** * Sets the CheckpointStore the EventProcessorClient will use for * storing partition ownership and checkpoint information. Users can, * optionally, provide their own implementation of CheckpointStore which * will store ownership and checkpoint information. By default it set to * use * com.azure.messaging.eventhubs.checkpointstore.blob.BlobCheckpointStore which stores all checkpoint offsets into Azure Blob Storage. * * The option is a: * <code>com.azure.messaging.eventhubs.CheckpointStore</code> type. * * Default: BlobCheckpointStore * Group: consumer */ default EventHubsEndpointConsumerBuilder checkpointStore( Object checkpointStore) { doSetProperty("checkpointStore", checkpointStore); return this; } /** * Sets the CheckpointStore the EventProcessorClient will use for * storing partition ownership and checkpoint information. Users can, * optionally, provide their own implementation of CheckpointStore which * will store ownership and checkpoint information. By default it set to * use * com.azure.messaging.eventhubs.checkpointstore.blob.BlobCheckpointStore which stores all checkpoint offsets into Azure Blob Storage. * * The option will be converted to a * <code>com.azure.messaging.eventhubs.CheckpointStore</code> type. * * Default: BlobCheckpointStore * Group: consumer */ default EventHubsEndpointConsumerBuilder checkpointStore( String checkpointStore) { doSetProperty("checkpointStore", checkpointStore); return this; } /** * Sets the name of the consumer group this consumer is associated with. * Events are read in the context of this group. The name of the * consumer group that is created by default is {link * #DEFAULT_CONSUMER_GROUP_NAME $Default}. * * The option is a: <code>java.lang.String</code> type. * * Default: $Default * Group: consumer */ default EventHubsEndpointConsumerBuilder consumerGroupName( String consumerGroupName) { doSetProperty("consumerGroupName", consumerGroupName); return this; } /** * Sets the map containing the event position to use for each partition * if a checkpoint for the partition does not exist in CheckpointStore. * This map is keyed off of the partition id. If there is no checkpoint * in CheckpointStore and there is no entry in this map, the processing * of the partition will start from {link EventPosition#latest() latest} * position. * * The option is a: <code>java.util.Map&lt;java.lang.String, * com.azure.messaging.eventhubs.models.EventPosition&gt;</code> type. * * Group: consumer */ default EventHubsEndpointConsumerBuilder eventPosition( Map<String, Object> eventPosition) { doSetProperty("eventPosition", eventPosition); return this; } /** * Sets the map containing the event position to use for each partition * if a checkpoint for the partition does not exist in CheckpointStore. * This map is keyed off of the partition id. If there is no checkpoint * in CheckpointStore and there is no entry in this map, the processing * of the partition will start from {link EventPosition#latest() latest} * position. * * The option will be converted to a * <code>java.util.Map&lt;java.lang.String, * com.azure.messaging.eventhubs.models.EventPosition&gt;</code> type. * * Group: consumer */ default EventHubsEndpointConsumerBuilder eventPosition( String eventPosition) { doSetProperty("eventPosition", eventPosition); return this; } /** * Sets the count used by the receiver to control the number of events * the Event Hub consumer will actively receive and queue locally * without regard to whether a receive operation is currently active. * * The option is a: <code>int</code> type. * * Default: 500 * Group: consumer */ default EventHubsEndpointConsumerBuilder prefetchCount(int prefetchCount) { doSetProperty("prefetchCount", prefetchCount); return this; } /** * Sets the count used by the receiver to control the number of events * the Event Hub consumer will actively receive and queue locally * without regard to whether a receive operation is currently active. * * The option will be converted to a <code>int</code> type. * * Default: 500 * Group: consumer */ default EventHubsEndpointConsumerBuilder prefetchCount( String prefetchCount) { doSetProperty("prefetchCount", prefetchCount); return this; } /** * Instead of supplying namespace, sharedAccessKey, sharedAccessName ... * etc, you can just supply the connection string for your eventHub. The * connection string for EventHubs already include all the necessary * information to connection to your EventHub. To learn on how to * generate the connection string, take a look at this documentation: * https://docs.microsoft.com/en-us/azure/event-hubs/event-hubs-get-connection-string. * * The option is a: <code>java.lang.String</code> type. * * Group: security */ default EventHubsEndpointConsumerBuilder connectionString( String connectionString) { doSetProperty("connectionString", connectionString); return this; } /** * The generated value for the SharedAccessName. * * The option is a: <code>java.lang.String</code> type. * * Group: security */ default EventHubsEndpointConsumerBuilder sharedAccessKey( String sharedAccessKey) { doSetProperty("sharedAccessKey", sharedAccessKey); return this; } /** * The name you chose for your EventHubs SAS keys. * * The option is a: <code>java.lang.String</code> type. * * Group: security */ default EventHubsEndpointConsumerBuilder sharedAccessName( String sharedAccessName) { doSetProperty("sharedAccessName", sharedAccessName); return this; } } /** * Advanced builder for endpoint consumers for the Azure Event Hubs * component. */ public interface AdvancedEventHubsEndpointConsumerBuilder extends EndpointConsumerBuilder { default EventHubsEndpointConsumerBuilder basic() { return (EventHubsEndpointConsumerBuilder) this; } /** * To let the consumer use a custom ExceptionHandler. Notice if the * option bridgeErrorHandler is enabled then this option is not in use. * By default the consumer will deal with exceptions, that will be * logged at WARN or ERROR level and ignored. * * The option is a: <code>org.apache.camel.spi.ExceptionHandler</code> * type. * * Group: consumer (advanced) */ default AdvancedEventHubsEndpointConsumerBuilder exceptionHandler( ExceptionHandler exceptionHandler) { doSetProperty("exceptionHandler", exceptionHandler); return this; } /** * To let the consumer use a custom ExceptionHandler. Notice if the * option bridgeErrorHandler is enabled then this option is not in use. * By default the consumer will deal with exceptions, that will be * logged at WARN or ERROR level and ignored. * * The option will be converted to a * <code>org.apache.camel.spi.ExceptionHandler</code> type. * * Group: consumer (advanced) */ default AdvancedEventHubsEndpointConsumerBuilder exceptionHandler( String exceptionHandler) { doSetProperty("exceptionHandler", exceptionHandler); return this; } /** * Sets the exchange pattern when the consumer creates an exchange. * * The option is a: <code>org.apache.camel.ExchangePattern</code> type. * * Group: consumer (advanced) */ default AdvancedEventHubsEndpointConsumerBuilder exchangePattern( ExchangePattern exchangePattern) { doSetProperty("exchangePattern", exchangePattern); return this; } /** * Sets the exchange pattern when the consumer creates an exchange. * * The option will be converted to a * <code>org.apache.camel.ExchangePattern</code> type. * * Group: consumer (advanced) */ default AdvancedEventHubsEndpointConsumerBuilder exchangePattern( String exchangePattern) { doSetProperty("exchangePattern", exchangePattern); return this; } /** * Whether the endpoint should use basic property binding (Camel 2.x) or * the newer property binding with additional capabilities. * * The option is a: <code>boolean</code> type. * * Default: false * Group: advanced */ default AdvancedEventHubsEndpointConsumerBuilder basicPropertyBinding( boolean basicPropertyBinding) { doSetProperty("basicPropertyBinding", basicPropertyBinding); return this; } /** * Whether the endpoint should use basic property binding (Camel 2.x) or * the newer property binding with additional capabilities. * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: advanced */ default AdvancedEventHubsEndpointConsumerBuilder basicPropertyBinding( String basicPropertyBinding) { doSetProperty("basicPropertyBinding", basicPropertyBinding); return this; } /** * Sets whether synchronous processing should be strictly used, or Camel * is allowed to use asynchronous processing (if supported). * * The option is a: <code>boolean</code> type. * * Default: false * Group: advanced */ default AdvancedEventHubsEndpointConsumerBuilder synchronous( boolean synchronous) { doSetProperty("synchronous", synchronous); return this; } /** * Sets whether synchronous processing should be strictly used, or Camel * is allowed to use asynchronous processing (if supported). * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: advanced */ default AdvancedEventHubsEndpointConsumerBuilder synchronous( String synchronous) { doSetProperty("synchronous", synchronous); return this; } } /** * Builder for endpoint producers for the Azure Event Hubs component. */ public interface EventHubsEndpointProducerBuilder extends EndpointProducerBuilder { default AdvancedEventHubsEndpointProducerBuilder advanced() { return (AdvancedEventHubsEndpointProducerBuilder) this; } /** * Sets the retry policy for EventHubAsyncClient. If not specified, the * default retry options are used. * * The option is a: <code>com.azure.core.amqp.AmqpRetryOptions</code> * type. * * Group: common */ default EventHubsEndpointProducerBuilder amqpRetryOptions( Object amqpRetryOptions) { doSetProperty("amqpRetryOptions", amqpRetryOptions); return this; } /** * Sets the retry policy for EventHubAsyncClient. If not specified, the * default retry options are used. * * The option will be converted to a * <code>com.azure.core.amqp.AmqpRetryOptions</code> type. * * Group: common */ default EventHubsEndpointProducerBuilder amqpRetryOptions( String amqpRetryOptions) { doSetProperty("amqpRetryOptions", amqpRetryOptions); return this; } /** * Sets the transport type by which all the communication with Azure * Event Hubs occurs. Default value is AmqpTransportType#AMQP. * * The option is a: <code>com.azure.core.amqp.AmqpTransportType</code> * type. * * Default: AMQP * Group: common */ default EventHubsEndpointProducerBuilder amqpTransportType( AmqpTransportType amqpTransportType) { doSetProperty("amqpTransportType", amqpTransportType); return this; } /** * Sets the transport type by which all the communication with Azure * Event Hubs occurs. Default value is AmqpTransportType#AMQP. * * The option will be converted to a * <code>com.azure.core.amqp.AmqpTransportType</code> type. * * Default: AMQP * Group: common */ default EventHubsEndpointProducerBuilder amqpTransportType( String amqpTransportType) { doSetProperty("amqpTransportType", amqpTransportType); return this; } /** * Setting the autoDiscoverClient mechanism, if true, the component will * look for a client instance in the registry automatically otherwise it * will skip that checking. * * The option is a: <code>boolean</code> type. * * Default: true * Group: common */ default EventHubsEndpointProducerBuilder autoDiscoverClient( boolean autoDiscoverClient) { doSetProperty("autoDiscoverClient", autoDiscoverClient); return this; } /** * Setting the autoDiscoverClient mechanism, if true, the component will * look for a client instance in the registry automatically otherwise it * will skip that checking. * * The option will be converted to a <code>boolean</code> type. * * Default: true * Group: common */ default EventHubsEndpointProducerBuilder autoDiscoverClient( String autoDiscoverClient) { doSetProperty("autoDiscoverClient", autoDiscoverClient); return this; } /** * Whether the producer should be started lazy (on the first message). * By starting lazy you can use this to allow CamelContext and routes to * startup in situations where a producer may otherwise fail during * starting and cause the route to fail being started. By deferring this * startup to be lazy then the startup failure can be handled during * routing messages via Camel's routing error handlers. Beware that when * the first message is processed then creating and starting the * producer may take a little time and prolong the total processing time * of the processing. * * The option is a: <code>boolean</code> type. * * Default: false * Group: producer */ default EventHubsEndpointProducerBuilder lazyStartProducer( boolean lazyStartProducer) { doSetProperty("lazyStartProducer", lazyStartProducer); return this; } /** * Whether the producer should be started lazy (on the first message). * By starting lazy you can use this to allow CamelContext and routes to * startup in situations where a producer may otherwise fail during * starting and cause the route to fail being started. By deferring this * startup to be lazy then the startup failure can be handled during * routing messages via Camel's routing error handlers. Beware that when * the first message is processed then creating and starting the * producer may take a little time and prolong the total processing time * of the processing. * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: producer */ default EventHubsEndpointProducerBuilder lazyStartProducer( String lazyStartProducer) { doSetProperty("lazyStartProducer", lazyStartProducer); return this; } /** * Sets the identifier of the Event Hub partition that the events will * be sent to. If the identifier is not specified, the Event Hubs * service will be responsible for routing events that are sent to an * available partition. * * The option is a: <code>java.lang.String</code> type. * * Group: producer */ default EventHubsEndpointProducerBuilder partitionId(String partitionId) { doSetProperty("partitionId", partitionId); return this; } /** * Sets a hashing key to be provided for the batch of events, which * instructs the Event Hubs service to map this key to a specific * partition. The selection of a partition is stable for a given * partition hashing key. Should any other batches of events be sent * using the same exact partition hashing key, the Event Hubs service * will route them all to the same partition. This should be specified * only when there is a need to group events by partition, but there is * flexibility into which partition they are routed. If ensuring that a * batch of events is sent only to a specific partition, it is * recommended that the {link #setPartitionId(String) identifier of the * position be specified directly} when sending the batch. * * The option is a: <code>java.lang.String</code> type. * * Group: producer */ default EventHubsEndpointProducerBuilder partitionKey( String partitionKey) { doSetProperty("partitionKey", partitionKey); return this; } /** * Sets the EventHubProducerAsyncClient.An asynchronous producer * responsible for transmitting EventData to a specific Event Hub, * grouped together in batches. Depending on the options specified when * creating an {linkEventDataBatch}, the events may be automatically * routed to an available partition or specific to a partition. Use by * this component to produce the data in camel producer. * * The option is a: * <code>com.azure.messaging.eventhubs.EventHubProducerAsyncClient</code> type. * * Group: producer */ default EventHubsEndpointProducerBuilder producerAsyncClient( Object producerAsyncClient) { doSetProperty("producerAsyncClient", producerAsyncClient); return this; } /** * Sets the EventHubProducerAsyncClient.An asynchronous producer * responsible for transmitting EventData to a specific Event Hub, * grouped together in batches. Depending on the options specified when * creating an {linkEventDataBatch}, the events may be automatically * routed to an available partition or specific to a partition. Use by * this component to produce the data in camel producer. * * The option will be converted to a * <code>com.azure.messaging.eventhubs.EventHubProducerAsyncClient</code> type. * * Group: producer */ default EventHubsEndpointProducerBuilder producerAsyncClient( String producerAsyncClient) { doSetProperty("producerAsyncClient", producerAsyncClient); return this; } /** * Instead of supplying namespace, sharedAccessKey, sharedAccessName ... * etc, you can just supply the connection string for your eventHub. The * connection string for EventHubs already include all the necessary * information to connection to your EventHub. To learn on how to * generate the connection string, take a look at this documentation: * https://docs.microsoft.com/en-us/azure/event-hubs/event-hubs-get-connection-string. * * The option is a: <code>java.lang.String</code> type. * * Group: security */ default EventHubsEndpointProducerBuilder connectionString( String connectionString) { doSetProperty("connectionString", connectionString); return this; } /** * The generated value for the SharedAccessName. * * The option is a: <code>java.lang.String</code> type. * * Group: security */ default EventHubsEndpointProducerBuilder sharedAccessKey( String sharedAccessKey) { doSetProperty("sharedAccessKey", sharedAccessKey); return this; } /** * The name you chose for your EventHubs SAS keys. * * The option is a: <code>java.lang.String</code> type. * * Group: security */ default EventHubsEndpointProducerBuilder sharedAccessName( String sharedAccessName) { doSetProperty("sharedAccessName", sharedAccessName); return this; } } /** * Advanced builder for endpoint producers for the Azure Event Hubs * component. */ public interface AdvancedEventHubsEndpointProducerBuilder extends EndpointProducerBuilder { default EventHubsEndpointProducerBuilder basic() { return (EventHubsEndpointProducerBuilder) this; } /** * Whether the endpoint should use basic property binding (Camel 2.x) or * the newer property binding with additional capabilities. * * The option is a: <code>boolean</code> type. * * Default: false * Group: advanced */ default AdvancedEventHubsEndpointProducerBuilder basicPropertyBinding( boolean basicPropertyBinding) { doSetProperty("basicPropertyBinding", basicPropertyBinding); return this; } /** * Whether the endpoint should use basic property binding (Camel 2.x) or * the newer property binding with additional capabilities. * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: advanced */ default AdvancedEventHubsEndpointProducerBuilder basicPropertyBinding( String basicPropertyBinding) { doSetProperty("basicPropertyBinding", basicPropertyBinding); return this; } /** * Sets whether synchronous processing should be strictly used, or Camel * is allowed to use asynchronous processing (if supported). * * The option is a: <code>boolean</code> type. * * Default: false * Group: advanced */ default AdvancedEventHubsEndpointProducerBuilder synchronous( boolean synchronous) { doSetProperty("synchronous", synchronous); return this; } /** * Sets whether synchronous processing should be strictly used, or Camel * is allowed to use asynchronous processing (if supported). * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: advanced */ default AdvancedEventHubsEndpointProducerBuilder synchronous( String synchronous) { doSetProperty("synchronous", synchronous); return this; } } /** * Builder for endpoint for the Azure Event Hubs component. */ public interface EventHubsEndpointBuilder extends EventHubsEndpointConsumerBuilder, EventHubsEndpointProducerBuilder { default AdvancedEventHubsEndpointBuilder advanced() { return (AdvancedEventHubsEndpointBuilder) this; } /** * Sets the retry policy for EventHubAsyncClient. If not specified, the * default retry options are used. * * The option is a: <code>com.azure.core.amqp.AmqpRetryOptions</code> * type. * * Group: common */ default EventHubsEndpointBuilder amqpRetryOptions( Object amqpRetryOptions) { doSetProperty("amqpRetryOptions", amqpRetryOptions); return this; } /** * Sets the retry policy for EventHubAsyncClient. If not specified, the * default retry options are used. * * The option will be converted to a * <code>com.azure.core.amqp.AmqpRetryOptions</code> type. * * Group: common */ default EventHubsEndpointBuilder amqpRetryOptions( String amqpRetryOptions) { doSetProperty("amqpRetryOptions", amqpRetryOptions); return this; } /** * Sets the transport type by which all the communication with Azure * Event Hubs occurs. Default value is AmqpTransportType#AMQP. * * The option is a: <code>com.azure.core.amqp.AmqpTransportType</code> * type. * * Default: AMQP * Group: common */ default EventHubsEndpointBuilder amqpTransportType( AmqpTransportType amqpTransportType) { doSetProperty("amqpTransportType", amqpTransportType); return this; } /** * Sets the transport type by which all the communication with Azure * Event Hubs occurs. Default value is AmqpTransportType#AMQP. * * The option will be converted to a * <code>com.azure.core.amqp.AmqpTransportType</code> type. * * Default: AMQP * Group: common */ default EventHubsEndpointBuilder amqpTransportType( String amqpTransportType) { doSetProperty("amqpTransportType", amqpTransportType); return this; } /** * Setting the autoDiscoverClient mechanism, if true, the component will * look for a client instance in the registry automatically otherwise it * will skip that checking. * * The option is a: <code>boolean</code> type. * * Default: true * Group: common */ default EventHubsEndpointBuilder autoDiscoverClient( boolean autoDiscoverClient) { doSetProperty("autoDiscoverClient", autoDiscoverClient); return this; } /** * Setting the autoDiscoverClient mechanism, if true, the component will * look for a client instance in the registry automatically otherwise it * will skip that checking. * * The option will be converted to a <code>boolean</code> type. * * Default: true * Group: common */ default EventHubsEndpointBuilder autoDiscoverClient( String autoDiscoverClient) { doSetProperty("autoDiscoverClient", autoDiscoverClient); return this; } /** * Instead of supplying namespace, sharedAccessKey, sharedAccessName ... * etc, you can just supply the connection string for your eventHub. The * connection string for EventHubs already include all the necessary * information to connection to your EventHub. To learn on how to * generate the connection string, take a look at this documentation: * https://docs.microsoft.com/en-us/azure/event-hubs/event-hubs-get-connection-string. * * The option is a: <code>java.lang.String</code> type. * * Group: security */ default EventHubsEndpointBuilder connectionString( String connectionString) { doSetProperty("connectionString", connectionString); return this; } /** * The generated value for the SharedAccessName. * * The option is a: <code>java.lang.String</code> type. * * Group: security */ default EventHubsEndpointBuilder sharedAccessKey(String sharedAccessKey) { doSetProperty("sharedAccessKey", sharedAccessKey); return this; } /** * The name you chose for your EventHubs SAS keys. * * The option is a: <code>java.lang.String</code> type. * * Group: security */ default EventHubsEndpointBuilder sharedAccessName( String sharedAccessName) { doSetProperty("sharedAccessName", sharedAccessName); return this; } } /** * Advanced builder for endpoint for the Azure Event Hubs component. */ public interface AdvancedEventHubsEndpointBuilder extends AdvancedEventHubsEndpointConsumerBuilder, AdvancedEventHubsEndpointProducerBuilder { default EventHubsEndpointBuilder basic() { return (EventHubsEndpointBuilder) this; } /** * Whether the endpoint should use basic property binding (Camel 2.x) or * the newer property binding with additional capabilities. * * The option is a: <code>boolean</code> type. * * Default: false * Group: advanced */ default AdvancedEventHubsEndpointBuilder basicPropertyBinding( boolean basicPropertyBinding) { doSetProperty("basicPropertyBinding", basicPropertyBinding); return this; } /** * Whether the endpoint should use basic property binding (Camel 2.x) or * the newer property binding with additional capabilities. * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: advanced */ default AdvancedEventHubsEndpointBuilder basicPropertyBinding( String basicPropertyBinding) { doSetProperty("basicPropertyBinding", basicPropertyBinding); return this; } /** * Sets whether synchronous processing should be strictly used, or Camel * is allowed to use asynchronous processing (if supported). * * The option is a: <code>boolean</code> type. * * Default: false * Group: advanced */ default AdvancedEventHubsEndpointBuilder synchronous(boolean synchronous) { doSetProperty("synchronous", synchronous); return this; } /** * Sets whether synchronous processing should be strictly used, or Camel * is allowed to use asynchronous processing (if supported). * * The option will be converted to a <code>boolean</code> type. * * Default: false * Group: advanced */ default AdvancedEventHubsEndpointBuilder synchronous(String synchronous) { doSetProperty("synchronous", synchronous); return this; } } /** * Proxy enum for <code>com.azure.core.amqp.AmqpTransportType</code> enum. */ enum AmqpTransportType { AMQP, AMQP_WEB_SOCKETS; } public interface EventHubsBuilders { /** * Azure Event Hubs (camel-azure-eventhubs) * The azure-eventhubs component that integrates Azure Event Hubs using * AMQP protocol. Azure EventHubs is a highly scalable publish-subscribe * service that can ingest millions of events per second and stream them * to multiple consumers. * * Category: cloud,messaging * Since: 3.5 * Maven coordinates: org.apache.camel:camel-azure-eventhubs * * Syntax: <code>azure-eventhubs:namespace/eventHubName</code> * * Path parameter: namespace * EventHubs namespace created in Azure Portal * * Path parameter: eventHubName * EventHubs name under a specific namcespace * * @param path namespace/eventHubName */ default EventHubsEndpointBuilder azureEventhubs(String path) { return EventHubsEndpointBuilderFactory.endpointBuilder("azure-eventhubs", path); } /** * Azure Event Hubs (camel-azure-eventhubs) * The azure-eventhubs component that integrates Azure Event Hubs using * AMQP protocol. Azure EventHubs is a highly scalable publish-subscribe * service that can ingest millions of events per second and stream them * to multiple consumers. * * Category: cloud,messaging * Since: 3.5 * Maven coordinates: org.apache.camel:camel-azure-eventhubs * * Syntax: <code>azure-eventhubs:namespace/eventHubName</code> * * Path parameter: namespace * EventHubs namespace created in Azure Portal * * Path parameter: eventHubName * EventHubs name under a specific namcespace * * @param componentName to use a custom component name for the endpoint * instead of the default name * @param path namespace/eventHubName */ default EventHubsEndpointBuilder azureEventhubs( String componentName, String path) { return EventHubsEndpointBuilderFactory.endpointBuilder(componentName, path); } } static EventHubsEndpointBuilder endpointBuilder( String componentName, String path) { class EventHubsEndpointBuilderImpl extends AbstractEndpointBuilder implements EventHubsEndpointBuilder, AdvancedEventHubsEndpointBuilder { public EventHubsEndpointBuilderImpl(String path) { super(componentName, path); } } return new EventHubsEndpointBuilderImpl(path); } }
package com.draagon.meta.util; import com.draagon.meta.loader.LoaderOptions; import org.junit.Test; import java.util.Date; import static org.junit.Assert.*; public class DataConverterTests { @Test public void testByteConvert() { assertEquals( Byte.valueOf((byte)5), DataConverter.toByte((byte)5)); assertEquals( Byte.valueOf((byte)5), DataConverter.toByte((short)5)); assertEquals( Byte.valueOf((byte)5), DataConverter.toByte(5)); assertEquals( Byte.valueOf((byte)5), DataConverter.toByte((long)5)); assertEquals( Byte.valueOf((byte)5), DataConverter.toByte((float)5)); assertEquals( Byte.valueOf((byte)5), DataConverter.toByte((double)5)); assertEquals( Byte.valueOf((byte)5), DataConverter.toByte(new Date(5))); assertEquals( Byte.valueOf((byte)5), DataConverter.toByte("5")); assertNull( DataConverter.toByte("")); } @Test public void testByteFails() { try { DataConverter.toByte(Short.MAX_VALUE); fail( "Max value should fail" ); } catch( NumberFormatException e ) { try { DataConverter.toByte(Integer.MAX_VALUE); fail( "Max value should fail" ); } catch( NumberFormatException e0 ) { try { DataConverter.toByte(Long.MAX_VALUE); fail("Max value should fail"); } catch (NumberFormatException e1) { try { DataConverter.toByte(Float.MAX_VALUE); fail("Max value should fail"); } catch (NumberFormatException e2) { try { DataConverter.toByte(Double.MAX_VALUE); fail("Max value should fail"); } catch (NumberFormatException e3) { try { DataConverter.toByte(new Date((long) Byte.MAX_VALUE + 1L)); fail("Max value should fail"); } catch (NumberFormatException e4) { try { DataConverter.toByte("fail"); fail("Convert should fail"); } catch (NumberFormatException e5) { try { DataConverter.toByte(new LoaderOptions()); fail("Convert should fail"); } catch (Exception ex) { assertTrue(ex instanceof NumberFormatException); } } } } } } } } } @Test public void testShortConvert() { assertEquals( Short.valueOf((short)5), DataConverter.toShort((byte)5)); assertEquals( Short.valueOf((short)5), DataConverter.toShort((short)5)); assertEquals( Short.valueOf((short)5), DataConverter.toShort(5)); assertEquals( Short.valueOf((short)5), DataConverter.toShort((long)5)); assertEquals( Short.valueOf((short)5), DataConverter.toShort((float)5)); assertEquals( Short.valueOf((short)5), DataConverter.toShort((double)5)); assertEquals( Short.valueOf((short)5), DataConverter.toShort(new Date(5))); assertEquals( Short.valueOf((short)5), DataConverter.toShort("5")); assertNull( DataConverter.toShort("")); } @Test public void testShortFails() { try { DataConverter.toShort(Integer.MAX_VALUE); fail( "Max value should fail" ); } catch( NumberFormatException e0 ) { try { DataConverter.toShort(Long.MAX_VALUE); fail("Max value should fail"); } catch (NumberFormatException e1) { try { DataConverter.toShort(Float.MAX_VALUE); fail("Max value should fail"); } catch (NumberFormatException e2) { try { DataConverter.toShort(Double.MAX_VALUE); fail("Max value should fail"); } catch (NumberFormatException e3) { try { DataConverter.toShort(new Date((long) Short.MAX_VALUE + 1L)); fail("Max value should fail"); } catch (NumberFormatException e4) { try { DataConverter.toShort("fail"); fail("Convert should fail"); } catch (NumberFormatException e5) { try { DataConverter.toShort(new LoaderOptions()); fail("Convert should fail"); } catch (Exception ex) { assertTrue(ex instanceof NumberFormatException); } } } } } } } } @Test public void testIntConvert() { assertEquals( Integer.valueOf(5), DataConverter.toInt((byte)5)); assertEquals( Integer.valueOf(5), DataConverter.toInt((short)5)); assertEquals( Integer.valueOf(5), DataConverter.toInt(5)); assertEquals( Integer.valueOf(5), DataConverter.toInt((long)5)); assertEquals( Integer.valueOf(5), DataConverter.toInt((float)5)); assertEquals( Integer.valueOf(5), DataConverter.toInt((double)5)); assertEquals( Integer.valueOf(Integer.MAX_VALUE-100), DataConverter.toInt(new Date(Integer.MAX_VALUE-100))); assertEquals( Integer.valueOf(5), DataConverter.toInt("5")); assertNull( DataConverter.toInt("")); } @Test public void testIntFails() { try { DataConverter.toInt(Long.MAX_VALUE); fail( "Max value should fail" ); } catch( NumberFormatException e ) { try { DataConverter.toInt(Float.MAX_VALUE); fail( "Max value should fail" ); } catch( NumberFormatException e2 ) { try { DataConverter.toInt(Double.MAX_VALUE); fail( "Max value should fail" ); } catch( NumberFormatException e3 ) { try { DataConverter.toInt( new Date( (long) Integer.MAX_VALUE + 1L )); fail( "Max value should fail" ); } catch( NumberFormatException e4 ) { try { DataConverter.toInt( "fail"); fail( "Convert should fail" ); } catch( NumberFormatException e5 ) { try { DataConverter.toInt( new LoaderOptions()); fail( "Convert should fail" ); } catch( Exception e6 ) { assertTrue( e6 instanceof NumberFormatException ); } } } } } } } @Test public void testLongConvert() { assertEquals( Long.valueOf(5), DataConverter.toLong((byte)5)); assertEquals( Long.valueOf(5), DataConverter.toLong((short)5)); assertEquals( Long.valueOf(5), DataConverter.toLong(5)); assertEquals( Long.valueOf(5), DataConverter.toLong((long)5)); assertEquals( Long.valueOf(5), DataConverter.toLong((float)5)); assertEquals( Long.valueOf(5), DataConverter.toLong((double)5)); //assertEquals( Double.valueOf(Double.MAX_VALUE), Double.valueOf( (double) DataConverter.toLong(Double.MAX_VALUE))); //System.out.println( "1 " + Double.MIN_VALUE ); //System.out.println( "2 " + (double) Long.MIN_VALUE ); //System.out.println( "3 " + Double.compare( Double.MIN_VALUE, (double) Long.MIN_VALUE )); //System.out.println( "4 " + (Double.MIN_VALUE < (double) Long.MIN_VALUE )); //System.out.println( "5 " + (double) (long) Double.MIN_VALUE ); //System.out.println( "6 " + (Double.MIN_VALUE - (double) (long) Double.MIN_VALUE )); assertEquals( Float.valueOf((float)Long.MIN_VALUE+1f), Float.valueOf( (float) DataConverter.toLong(Float.valueOf((float)Long.MIN_VALUE+1f)))); assertEquals( Float.valueOf((float)Long.MIN_VALUE-1f), Float.valueOf( (float) DataConverter.toLong(Float.valueOf((float)Long.MIN_VALUE-1f)))); assertEquals( Double.valueOf((double)Long.MIN_VALUE-1d), Double.valueOf( (double) DataConverter.toLong(Double.valueOf((double)Long.MIN_VALUE-1d)))); assertEquals( Long.valueOf(Long.MAX_VALUE-100L), DataConverter.toLong(new Date(Long.MAX_VALUE-100))); assertEquals( Long.valueOf(5), DataConverter.toLong("5")); assertNull( DataConverter.toLong("")); } @Test public void testLongFails() { try { DataConverter.toLong(Double.MAX_VALUE); fail( "Max value should fail" ); } catch( NumberFormatException e3 ) { try { DataConverter.toLong("fail"); fail("Convert should fail"); } catch (NumberFormatException e5) { try { DataConverter.toLong(new LoaderOptions()); fail("Convert should fail"); } catch (Exception e6) { assertTrue(e6 instanceof NumberFormatException); } } } } @Test public void testFloatConvert() { assertEquals( Float.valueOf(5), DataConverter.toFloat((byte)5)); assertEquals( Float.valueOf(5), DataConverter.toFloat((short)5)); assertEquals( Float.valueOf(5), DataConverter.toFloat(5)); assertEquals( Float.valueOf(Long.MAX_VALUE), DataConverter.toFloat(Long.MAX_VALUE)); assertEquals( Float.valueOf(5), DataConverter.toFloat((float)5)); assertEquals( Float.valueOf(5), DataConverter.toFloat((double)5)); assertEquals( Double.valueOf(-5.5d), Double.valueOf( DataConverter.toFloat((double)-5.5d))); assertEquals( Float.valueOf(Long.MAX_VALUE-100L), Float.valueOf( (float) DataConverter.toFloat(new Date(Long.MAX_VALUE-100L)))); assertEquals( Float.valueOf(Float.MAX_VALUE-100f), Float.valueOf( (float) DataConverter.toFloat( Float.MAX_VALUE-100f))); assertEquals( Float.valueOf(5), DataConverter.toFloat("5")); assertEquals( Float.valueOf(5.5f), DataConverter.toFloat("5.5")); assertNull( DataConverter.toInt("")); } @Test public void testFloatFails() { try { DataConverter.toFloat( "fail"); fail( "Convert should fail" ); } catch( NumberFormatException e5 ) { try { DataConverter.toFloat( new LoaderOptions()); fail( "Convert should fail" ); } catch( Exception e6 ) { assertTrue( e6 instanceof NumberFormatException ); } } } @Test public void testDateConvert() { assertEquals( new Date(5), DataConverter.toDate((byte)5)); assertEquals( new Date(5), DataConverter.toDate((short)5)); assertEquals( new Date(5), DataConverter.toDate(5)); assertEquals( new Date(5), DataConverter.toDate((long)5)); assertEquals( new Date(5), DataConverter.toDate((float)5)); assertEquals( new Date(5), DataConverter.toDate((double)5)); assertEquals( new Date(Long.MAX_VALUE-100L), DataConverter.toDate(new Date(Long.MAX_VALUE-100))); assertEquals( new Date(23412341234L), DataConverter.toDate("23412341234")); assertNull( DataConverter.toLong("")); } @Test public void testDateFails() { try { DataConverter.toDate(Double.MAX_VALUE); fail( "Max value should fail" ); } catch( NumberFormatException e3 ) { try { DataConverter.toDate("fail"); fail("Convert should fail"); } catch (NumberFormatException e5) { try { DataConverter.toDate(new LoaderOptions()); fail("Convert should fail"); } catch (Exception e6) { assertTrue(e6 instanceof NumberFormatException); } } } } @Test public void testStringConvert() { assertEquals( "5", DataConverter.toString((byte)5)); assertEquals( "5", DataConverter.toString((short)5)); assertEquals( "5", DataConverter.toString(5)); assertEquals( "5", DataConverter.toString((long)5)); assertEquals( ""+Long.MAX_VALUE, DataConverter.toString(Long.MAX_VALUE)); assertEquals( "5.0", DataConverter.toString((float)5)); assertEquals( ""+Float.MAX_VALUE, DataConverter.toString(Float.MAX_VALUE)); assertEquals( "5.0", DataConverter.toString((double)5)); assertEquals( ""+Double.MAX_VALUE, DataConverter.toString(Double.MAX_VALUE)); assertEquals( ""+(Long.MAX_VALUE-100), DataConverter.toString(new Date(Long.MAX_VALUE-100))); assertEquals( "5", DataConverter.toString("5")); assertEquals( "", DataConverter.toString("")); LoaderOptions o = new LoaderOptions(); assertEquals( ""+o, DataConverter.toString(o)); assertNull( DataConverter.toString(null)); } // TODO: Add ArrayList tests }
/* $Id$ */ package com.zoho.books.model; import org.json.JSONObject; /** * This class is used to make an object for line item. */ public class LineItem { private String taxName = ""; private String taxType = ""; private String expenseId = ""; private String expenseItemId = ""; private String lineItemId = ""; private double discount = 0.00; private String expenseReceiptName = ""; private String unit = ""; private int itemOrder = 0; private double rate = 0.00; private double bcyRate = 0.00; private String itemId = ""; private String timeEntryIds = ""; private String description = ""; private String projectId = ""; private String name = ""; private String taxId = ""; private double quantity = 0.00; private double taxPercentage = 0.00; private double itemTotal = 0.00; private String accountId = ""; private String accountName = ""; private String lineId = ""; private String debitOrCredit = ""; private double amount = 0.00; private double discountAmount; private String taxExemptionId; private String salesorderItemId; private String stockOnHand; private boolean isInvoiced; /** * set the tax name. * @param taxName Name of the tax or tax group applied to the line item. */ public void setTaxName(String taxName) { this.taxName = taxName; } /** * get the tax name. * @return Returns the name of the tax or tax group applied to the line item. */ public String getTaxName() { return taxName; } /** * set the tax type. * @param taxType Type of the tax or tax group applied to the line item. */ public void setTaxType(String taxType) { this.taxType = taxType; } /** * get the tax type. * @return Returns the type of the tax or tax group applied to the line item. */ public String getTaxType() { return taxType; } /** * set the expense id. * @param expenseId ID of the expense which is invoiced. */ public void setExpenseId(String expenseId) { this.expenseId = expenseId; } /** * get the expense id. * @return Returns the id of the expense which is invoiced. */ public String getExpenseId() { return expenseId; } /** * set the expense item id. * @param expenseItemId ID of the expense item which is invoiced. */ public void setExpenseItemId(String expenseItemId) { this.expenseItemId = expenseItemId; } /** * get the expense item id. * @return Returns the id of the expense item which is invoiced. */ public String getExpenseItemId() { return expenseItemId; } /** * set the line item id. * @param lineItemId ID of the line item. */ public void setLineItemId(String lineItemId) { this.lineItemId = lineItemId; } /** * get the line item id. * @return Returns the id of the line item. */ public String getLineItemId() { return lineItemId; } /** * set the discount. * @param discount Discount applied to the line item. It can be either in % or in amount. e.g. 12.5% or 190. */ public void setDiscount(double discount) { this.discount = discount; } /** * get the discount. * @return Returns the discount applied to the line item. */ public double getDiscount() { return discount; } /** * set the expense receipt name. * @param expenseReceiptName Expense receipt name for the line item. */ public void setExpenseReceiptName(String expenseReceiptName) { this.expenseReceiptName = expenseReceiptName; } /** * get the expense receipt name. * @return Returns the expense receipt name for the line item. */ public String getExpenseReceiptName() { return expenseReceiptName; } /** * set the unit. * @param unit Unit of the line item e.g. kgs, Nos. */ public void setUnit(String unit) { this.unit = unit; } /** * get the unit. * @return Returns the unit of the line item. */ public String getUnit() { return unit; } /** * set the item order. * @param itemOrder Order of the line item. */ public void setItemOrder(int itemOrder) { this.itemOrder = itemOrder; } /** * get the item order. * @return Returns the order of the line item. */ public int getItemOrder() { return itemOrder; } /** * set the rate. * @param rate Rate of the line item. */ public void setRate(double rate) { this.rate = rate; } /** * get the rate. * @return Returns the rate of the line item. */ public double getRate() { return rate; } /** * set the bcy rate. * @param bcyRate Bcy rate of the line item. */ public void setBcyRate(double bcyRate) { this.bcyRate = bcyRate; } /** * get the bcy rate. * @return Returns the bcy rate of the line item. */ public double getBcyRate() { return bcyRate; } /** * set the item id. * @param itemId ID of the item. */ public void setItemId(String itemId) { this.itemId = itemId; } /** * get the item id. * @return Returns the id of the item. */ public String getItemId() { return itemId; } /** * set the time entry ids. * @param timeEntryIds IDs of the time entries associated with the project. */ public void setTimeEntryIds(String timeEntryIds) { this.timeEntryIds = timeEntryIds; } /** * get the time entry ids. * @return Returns the IDs of the time entries associated with the project. */ public String getTimeEntryIds() { return timeEntryIds; } /** * set the description. * @param description Description of the line item. */ public void setDescription(String description) { this.description = description; } /** * get the description. * @return Returns the description of the line item. */ public String getDescription() { return description; } /** * set the project id. * @param projectId ID of the project. */ public void setProjectId(String projectId) { this.projectId = projectId; } /** * get the project id. * @return Returns the ID of the project. */ public String getProjectId() { return projectId; } /** * set the name. * @param name Name of the line item. */ public void setName(String name) { this.name = name; } /** * get the name. * @return Returns the name of the line item. */ public String getName() { return name; } /** * set the tax id. * @param taxId ID of the tax or tax group applied to the line item. */ public void setTaxId(String taxId) { this.taxId = taxId; } /** * get the tax id. * @return Returns the id of the tax or tax group applied to the line item. */ public String getTaxId() { return taxId; } /** * set the quantity. * @param quantity Quantity of the line item. */ public void setQuantity(double quantity) { this.quantity = quantity; } /** * get the quantity. * @return Returns the quantity of the line item. */ public double getQuantity() { return quantity; } /** * set the tax percentage. * @param taxPercentage Percentage of the tax or tax group applied to the line item. */ public void setTaxPercentage(double taxPercentage) { this.taxPercentage = taxPercentage; } /** * get the tax percentage. * @return Returns the percentage of the tax or tax group applied to the line item. */ public double getTaxPercentage() { return taxPercentage; } /** * set the item total. * @param itemTotal Total of the line item. */ public void setItemTotal(double itemTotal) { this.itemTotal = itemTotal; } /** * get the item total. * @return Returns the total of the line item. */ public double getItemTotal() { return itemTotal; } /** * set the account id. * @param accountId ID of the account associated with the line item. */ public void setAccountId(String accountId) { this.accountId = accountId; } /** * get the account id. * @return Returns the ID of the account associated with the line item. */ public String getAccountId() { return accountId; } /** * set the account name. * @param accountName Name of the account associated with the line item. */ public void setAccountName(String accountName) { this.accountName = accountName; } /** * get the account name. * @return Returns the Name of the account associated with the line item. */ public String getAccountName() { return accountName; } /** * set the line id. * @param lineId ID of the line item. */ public void setLineId(String lineId) { this.lineId = lineId; } /** * get the line id. * @return Returns the ID of the line item. */ public String getLineId() { return lineId; } /** * set the debit or credit. * @param debitOrCredit Whether the accounts needs to be debited or credited. */ public void setDebitOrCredit(String debitOrCredit) { this.debitOrCredit = debitOrCredit; } /** * get the debit or credit. * @return Returns the accounts needs to be debited or credited. */ public String getDebitOrCredit() { return debitOrCredit; } /** * set the amount. * @param amount Amount to be recorded for the journal. */ public void setAmount(double amount) { this.amount = amount; } /** * get the amount. * @return Returns the Amount to be recorded for the journal. */ public double getAmount() { return amount; } /** * Get the discount amount. * * @return Returns the discount amount. */ public double getDiscountAmount() { return discountAmount; } /** * Set the discount amount. * * @param discountAmount Discount amount of the item. */ public void setDiscountAmount(double discountAmount) { this.discountAmount = discountAmount; } /** * Get the tax exemption id(US Edition only). * * @return Returns the tax exemption id. */ public String getTaxExemptionId() { return taxExemptionId; } /** * Set the tax exemption id(US Edition only). * * @param taxExemptionId Tax exemption id for US Edition. */ public void setTaxExemptionId(String taxExemptionId) { this.taxExemptionId = taxExemptionId; } /** * Get the sales order item id. * * @return Returns the sales order item id. */ public String getSalesorderItemId() { return salesorderItemId; } /** * Set the sales order item id. * * @param salesorderItemId ID of the sales order item. */ public void setSalesorderItemId(String salesorderItemId) { this.salesorderItemId = salesorderItemId; } /** * Get the stack on hand. * * @return Returns the stack on hand. */ public String getStockOnHand() { return stockOnHand; } /** * Set the stack on hand. * * @param stockOnHand Stack on hand. */ public void setStockOnHand(String stockOnHand) { this.stockOnHand = stockOnHand; } /** * Get the item is invoiced or not. * * @return Returns true if the item is invoiced else returns false. */ public boolean isInvoiced() { return isInvoiced; } /** * Set the item is invoiced or not. * * @param isInvoiced True or flase to set the item is invoiced or not. */ public void setInvoiced(boolean isInvoiced) { this.isInvoiced = isInvoiced; } /** * Convert Item object into JSONObject. * @return Returns a JSONObject. */ public JSONObject toJSON()throws Exception { JSONObject jsonObject = new JSONObject(); if(itemId != null && !itemId.equals("")) { jsonObject.put("item_id", itemId); } if(lineItemId != null && !lineItemId.equals("")) { jsonObject.put("line_item_id", lineItemId); } if(accountId != null && !accountId.equals("") ) { jsonObject.put("account_id", accountId); } if(projectId != null && !projectId.equals("")) { jsonObject.put("project_id", projectId); } if(expenseId != null && !expenseId.equals("")) { jsonObject.put("expense_id", expenseId); } if(name != null && !name.equals("")) { jsonObject.put("name", name); } if(description != null && !description.equals("")) { jsonObject.put("description", description); } if((Integer)itemOrder != null) { jsonObject.put("item_order", itemOrder); } if((Double)rate != null) { jsonObject.put("rate", rate); } if(unit != null && !unit.equals("")) { jsonObject.put("unit", unit); } if((Double)quantity != null) { jsonObject.put("quantity", quantity); } if((Double)discount != null) { jsonObject.put("discount", discount); } if(taxId != null) { jsonObject.put("tax_id", taxId); } if(debitOrCredit != null && !debitOrCredit.equals("")) { jsonObject.put("debit_or_credit", debitOrCredit); } if(taxExemptionId != null && !taxExemptionId.equals("")) { jsonObject.put("tax_exemption_id", taxExemptionId); } return jsonObject; } }
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.jps.intellilang.instrumentation; import com.intellij.compiler.instrumentation.InstrumentationClassFinder; import com.intellij.openapi.util.Ref; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.asm4.*; import org.jetbrains.jps.intellilang.model.InstrumentationException; import java.io.IOException; import java.io.InputStream; import java.util.*; import java.util.regex.Pattern; class PatternInstrumenter extends ClassVisitor implements Opcodes { @NonNls static final String PATTERN_CACHE_NAME = "$_PATTERN_CACHE_$"; @NonNls static final String ASSERTIONS_DISABLED_NAME = "$assertionsDisabled"; @NonNls static final String JAVA_LANG_STRING = "Ljava/lang/String;"; @NonNls static final String JAVA_UTIL_REGEX_PATTERN = "[Ljava/util/regex/Pattern;"; private boolean myHasAssertions; private boolean myHasStaticInitializer; private final LinkedHashSet<String> myPatterns = new LinkedHashSet<String>(); private final String myPatternAnnotationClassName; final InstrumentationType myInstrumentationType; private final InstrumentationClassFinder myClassFinder; private final Map<String, String> myAnnotationNameToPatternMap = new HashMap<String, String>(); // can contain null values! private final Set<String> myProcessedAnnotations = new HashSet<String>(); // checked annotation classes String myClassName; private boolean myInstrumented; private RuntimeException myPostponedError; boolean myIsNonStaticInnerClass; public PatternInstrumenter(@NotNull String patternAnnotationClassName, ClassVisitor classvisitor, InstrumentationType instrumentation, InstrumentationClassFinder classFinder) { super(Opcodes.ASM4, classvisitor); myPatternAnnotationClassName = patternAnnotationClassName; myInstrumentationType = instrumentation; myClassFinder = classFinder; // initial setup: null value means we should discover the pattern string 'inplace' myAnnotationNameToPatternMap.put(patternAnnotationClassName, null); myProcessedAnnotations.add(patternAnnotationClassName); } public boolean instrumented() { return myInstrumented; } void markInstrumented() { myInstrumented = true; processPostponedErrors(); } public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) { super.visit(version, access, name, signature, superName, interfaces); myClassName = name; } public void visitInnerClass(String name, String outerName, String innerName, int access) { super.visitInnerClass(name, outerName, innerName, access); if (myClassName.equals(name)) { myIsNonStaticInnerClass = (access & ACC_STATIC) == 0; } } public FieldVisitor visitField(final int access, final String name, final String desc, final String signature, final Object value) { if (name.equals(ASSERTIONS_DISABLED_NAME)) { myHasAssertions = true; } else if (name.equals(PATTERN_CACHE_NAME)) { throw new InstrumentationException("Error: Processing an already instrumented class: " + myClassName + ". Please recompile the affected class(es) or rebuild the project."); } return super.visitField(access, name, desc, signature, value); } public void visitEnd() { if (myInstrumented) { addField(PATTERN_CACHE_NAME, ACC_PRIVATE + ACC_FINAL + ACC_STATIC + ACC_SYNTHETIC, JAVA_UTIL_REGEX_PATTERN); if (myInstrumentationType == InstrumentationType.ASSERT) { if (!myHasAssertions) { addField(ASSERTIONS_DISABLED_NAME, ACC_FINAL + ACC_STATIC + ACC_SYNTHETIC, "Z"); } } if (!myHasStaticInitializer) { createStaticInitializer(); } } super.visitEnd(); } private void addField(String name, int modifiers, String type) { final FieldVisitor fv = cv.visitField(modifiers, name, type, null, null); fv.visitEnd(); } private void createStaticInitializer() { final MethodVisitor mv = cv.visitMethod(ACC_STATIC, "<clinit>", "()V", null, null); mv.visitCode(); patchStaticInitializer(mv); mv.visitInsn(RETURN); mv.visitMaxs(0, 0); mv.visitEnd(); } private void patchStaticInitializer(MethodVisitor mv) { if (!myHasAssertions && myInstrumentationType == InstrumentationType.ASSERT) { initAssertions(mv); } initPatterns(mv); } // verify pattern and add compiled pattern to static cache private void initPatterns(MethodVisitor mv) { mv.visitIntInsn(BIPUSH, myPatterns.size()); mv.visitTypeInsn(ANEWARRAY, "java/util/regex/Pattern"); mv.visitFieldInsn(PUTSTATIC, myClassName, PATTERN_CACHE_NAME, JAVA_UTIL_REGEX_PATTERN); int i = 0; for (String pattern : myPatterns) { // check the pattern so we can rely on the pattern being valid at runtime try { Pattern.compile(pattern); } catch (Exception e) { throw new InstrumentationException("Illegal Pattern: " + pattern, e); } mv.visitFieldInsn(GETSTATIC, myClassName, PATTERN_CACHE_NAME, JAVA_UTIL_REGEX_PATTERN); mv.visitIntInsn(BIPUSH, i++); mv.visitLdcInsn(pattern); mv.visitMethodInsn(INVOKESTATIC, "java/util/regex/Pattern", "compile", "(Ljava/lang/String;)Ljava/util/regex/Pattern;"); mv.visitInsn(AASTORE); } } // add assert startup code private void initAssertions(MethodVisitor mv) { mv.visitLdcInsn(Type.getType("L" + myClassName + ";")); mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/Class", "desiredAssertionStatus", "()Z"); Label l0 = new Label(); mv.visitJumpInsn(IFNE, l0); mv.visitInsn(ICONST_1); Label l1 = new Label(); mv.visitJumpInsn(GOTO, l1); mv.visitLabel(l0); mv.visitInsn(ICONST_0); mv.visitLabel(l1); mv.visitFieldInsn(PUTSTATIC, myClassName, ASSERTIONS_DISABLED_NAME, "Z"); } public MethodVisitor visitMethod(final int access, final String name, String desc, String signature, String[] exceptions) { final MethodVisitor methodvisitor = cv.visitMethod(access, name, desc, signature, exceptions); // patch static initializer if ((access & ACC_STATIC) != 0 && name.equals("<clinit>")) { myHasStaticInitializer = true; return new ErrorPostponingMethodVisitor(this, name, methodvisitor) { public void visitCode() { super.visitCode(); patchStaticInitializer(mv); } }; } final Type[] argTypes = Type.getArgumentTypes(desc); final Type returnType = Type.getReturnType(desc); // don't dig through the whole method if there's nothing to do in it if (isStringType(returnType)) { return new InstrumentationAdapter(this, methodvisitor, argTypes, returnType, access, name); } else { for (Type type : argTypes) { if (isStringType(type)) { return new InstrumentationAdapter(this, methodvisitor, argTypes, returnType, access, name); } } } return new ErrorPostponingMethodVisitor(this, name, methodvisitor); } private static boolean isStringType(Type type) { return type.getSort() == Type.OBJECT && type.getDescriptor().equals(JAVA_LANG_STRING); } public int addPattern(String s) { if (myPatterns.add(s)) { return myPatterns.size() - 1; } return Arrays.asList(myPatterns.toArray()).indexOf(s); } public boolean acceptAnnotation(String annotationClassName) { if (annotationClassName == null) { // unfortunately sometimes ASM may return null values return false; } processAnnotation(annotationClassName); return myAnnotationNameToPatternMap.containsKey(annotationClassName); } /** * @param annotationClassname * @return pattern string for 'alias' annotations, as specified in the 'base' annotation, * otherwise null, (for the 'base' annotation class name null is returned as well) */ @Nullable public String getAnnotationPattern(String annotationClassName) { processAnnotation(annotationClassName); return myAnnotationNameToPatternMap.get(annotationClassName); } private void processAnnotation(String annotationClassName) { if (!myProcessedAnnotations.add(annotationClassName)) { return; } try { final InputStream is = myClassFinder.getClassBytesAsStream(annotationClassName); if (is != null) { try { final Ref<String> patternString = new Ref<String>(null); // dig into annotation class and check if it is annotated with pattern annotation. // if yes, load the pattern string from the pattern annotation and associate it with this annotation final ClassVisitor visitor = new ClassVisitor(Opcodes.ASM4) { @Override public AnnotationVisitor visitAnnotation(String desc, boolean visible) { if (patternString.get() != null || !myPatternAnnotationClassName.equals(Type.getType(desc).getClassName())) { return null; // already found or is not pattern annotation } // dig into pattern annotation in order to discover the pattern string return new AnnotationVisitor(Opcodes.ASM4) { public void visit(@NonNls String name, Object value) { if ("value".equals(name) && value instanceof String) { patternString.set((String)value); } } }; } }; new ClassReader(is).accept(visitor, ClassReader.SKIP_CODE | ClassReader.SKIP_DEBUG | ClassReader.SKIP_FRAMES); final String pattern = patternString.get(); if (pattern != null) { myAnnotationNameToPatternMap.put(annotationClassName, pattern); } } finally { is.close(); } } } catch (IOException ignored) { // todo } } void registerError(String methodName, String operationName, Throwable e) { if (myPostponedError == null) { // throw the first error that occurred Throwable err = e.getCause(); if (err == null) { err = e; } myPostponedError = new RuntimeException("Operation '" + operationName + "' failed for " + myClassName + "." + methodName + "(): " + err.getMessage(), err); } if (myInstrumented) { processPostponedErrors(); } } private void processPostponedErrors() { final RuntimeException error = myPostponedError; if (error != null) { throw error; } } }
/* * Copyright (c) 2015-present, Facebook, Inc. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ package com.facebook.drawee.drawable; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Matrix; import android.graphics.Paint; import android.graphics.Path; import android.graphics.Rect; import android.graphics.RectF; import android.graphics.drawable.Drawable; import com.facebook.common.internal.Preconditions; import com.facebook.common.internal.VisibleForTesting; import java.util.Arrays; import javax.annotation.Nullable; /** * Drawable that draws underlying drawable with rounded corners. */ public class RoundedCornersDrawable extends ForwardingDrawable implements Rounded { public enum Type { /** * Draws rounded corners on top of the underlying drawable by overlaying a solid color which * is specified by {@code setOverlayColor}. This option should only be used when the * background beneath the underlying drawable is static and of the same solid color. */ OVERLAY_COLOR, /** * Clips the drawable to be rounded. This option is not supported right now but is expected to * be made available in the future. */ CLIPPING } @VisibleForTesting Type mType = Type.OVERLAY_COLOR; private final RectF mBounds = new RectF(); @Nullable private RectF mInsideBorderBounds; @Nullable private Matrix mInsideBorderTransform; private final float[] mRadii = new float[8]; @VisibleForTesting final float[] mBorderRadii = new float[8]; @VisibleForTesting final Paint mPaint = new Paint(Paint.ANTI_ALIAS_FLAG); private boolean mIsCircle = false; private float mBorderWidth = 0; private int mBorderColor = Color.TRANSPARENT; private int mOverlayColor = Color.TRANSPARENT; private float mPadding = 0; private boolean mScaleDownInsideBorders = false; private final Path mPath = new Path(); private final Path mBorderPath = new Path(); private final RectF mTempRectangle = new RectF(); /** * Creates a new RoundedCornersDrawable with given underlying drawable. * * @param drawable underlying drawable */ public RoundedCornersDrawable(Drawable drawable) { super(Preconditions.checkNotNull(drawable)); } /** * Sets the type of rounding process * * @param type type of rounding process */ public void setType(Type type) { mType = type; invalidateSelf(); } /** * Sets whether to round as circle. * * @param isCircle whether or not to round as circle */ @Override public void setCircle(boolean isCircle) { mIsCircle = isCircle; updatePath(); invalidateSelf(); } /** Returns whether or not this drawable rounds as circle. */ @Override public boolean isCircle() { return mIsCircle; } /** * Sets radius to be used for rounding * * @param radius corner radius in pixels */ @Override public void setRadius(float radius) { Arrays.fill(mRadii, radius); updatePath(); invalidateSelf(); } /** * Sets radii values to be used for rounding. * Each corner receive two radius values [X, Y]. The corners are ordered * top-left, top-right, bottom-right, bottom-left * * @param radii Array of 8 values, 4 pairs of [X,Y] radii */ @Override public void setRadii(float[] radii) { if (radii == null) { Arrays.fill(mRadii, 0); } else { Preconditions.checkArgument(radii.length == 8, "radii should have exactly 8 values"); System.arraycopy(radii, 0, mRadii, 0, 8); } updatePath(); invalidateSelf(); } /** Gets the radii. */ @Override public float[] getRadii() { return mRadii; } /** * Sets the overlay color. * * @param overlayColor the color to filled outside the rounded corners */ public void setOverlayColor(int overlayColor) { mOverlayColor = overlayColor; invalidateSelf(); } /** Gets the overlay color. */ public int getOverlayColor() { return mOverlayColor; } /** * Sets the border * @param color of the border * @param width of the border */ @Override public void setBorder(int color, float width) { mBorderColor = color; mBorderWidth = width; updatePath(); invalidateSelf(); } /** Gets the border color. */ @Override public int getBorderColor() { return mBorderColor; } /** Gets the border width. */ @Override public float getBorderWidth() { return mBorderWidth; } @Override public void setPadding(float padding) { mPadding = padding; updatePath(); invalidateSelf(); } /** Gets the padding. */ @Override public float getPadding() { return mPadding; } /** * Sets whether image should be scaled down inside borders. * * @param scaleDownInsideBorders */ @Override public void setScaleDownInsideBorders(boolean scaleDownInsideBorders) { mScaleDownInsideBorders = scaleDownInsideBorders; updatePath(); invalidateSelf(); } /** Gets whether image should be scaled down inside borders. */ @Override public boolean getScaleDownInsideBorders() { return mScaleDownInsideBorders; } @Override protected void onBoundsChange(Rect bounds) { super.onBoundsChange(bounds); updatePath(); } private void updatePath() { mPath.reset(); mBorderPath.reset(); mTempRectangle.set(getBounds()); mTempRectangle.inset(mPadding, mPadding); mPath.addRect(mTempRectangle, Path.Direction.CW); if (mIsCircle) { mPath.addCircle( mTempRectangle.centerX(), mTempRectangle.centerY(), Math.min(mTempRectangle.width(), mTempRectangle.height())/2, Path.Direction.CW); } else { mPath.addRoundRect(mTempRectangle, mRadii, Path.Direction.CW); } mTempRectangle.inset(-mPadding, -mPadding); mTempRectangle.inset(mBorderWidth/2, mBorderWidth/2); if (mIsCircle) { float radius = Math.min(mTempRectangle.width(), mTempRectangle.height())/2; mBorderPath.addCircle( mTempRectangle.centerX(), mTempRectangle.centerY(), radius, Path.Direction.CW); } else { for (int i = 0; i < mBorderRadii.length; i++) { mBorderRadii[i] = mRadii[i] + mPadding - mBorderWidth/2; } mBorderPath.addRoundRect(mTempRectangle, mBorderRadii, Path.Direction.CW); } mTempRectangle.inset(-mBorderWidth/2, -mBorderWidth/2); } @Override public void draw(Canvas canvas) { mBounds.set(getBounds()); switch (mType) { case CLIPPING: int saveCount = canvas.save(); // clip, note: doesn't support anti-aliasing mPath.setFillType(Path.FillType.EVEN_ODD); canvas.clipPath(mPath); super.draw(canvas); canvas.restoreToCount(saveCount); break; case OVERLAY_COLOR: if (mScaleDownInsideBorders) { if (mInsideBorderBounds == null) { mInsideBorderBounds = new RectF(mBounds); mInsideBorderTransform = new Matrix(); } else { mInsideBorderBounds.set(mBounds); } mInsideBorderBounds.inset(mBorderWidth, mBorderWidth); mInsideBorderTransform.setRectToRect( mBounds, mInsideBorderBounds, Matrix.ScaleToFit.FILL); saveCount = canvas.save(); canvas.clipRect(mBounds); canvas.concat(mInsideBorderTransform); super.draw(canvas); canvas.restoreToCount(saveCount); } else { super.draw(canvas); } mPaint.setStyle(Paint.Style.FILL); mPaint.setColor(mOverlayColor); mPaint.setStrokeWidth(0f); mPath.setFillType(Path.FillType.EVEN_ODD); canvas.drawPath(mPath, mPaint); if (mIsCircle) { // INVERSE_EVEN_ODD will only draw inverse circle within its bounding box, so we need to // fill the rest manually if the bounds are not square. float paddingH = (mBounds.width() - mBounds.height() + mBorderWidth) / 2f; float paddingV = (mBounds.height() - mBounds.width() + mBorderWidth) / 2f; if (paddingH > 0) { canvas.drawRect(mBounds.left, mBounds.top, mBounds.left + paddingH, mBounds.bottom, mPaint); canvas.drawRect( mBounds.right - paddingH, mBounds.top, mBounds.right, mBounds.bottom, mPaint); } if (paddingV > 0) { canvas.drawRect(mBounds.left, mBounds.top, mBounds.right, mBounds.top + paddingV, mPaint); canvas.drawRect( mBounds.left, mBounds.bottom - paddingV, mBounds.right, mBounds.bottom, mPaint); } } break; } if (mBorderColor != Color.TRANSPARENT) { mPaint.setStyle(Paint.Style.STROKE); mPaint.setColor(mBorderColor); mPaint.setStrokeWidth(mBorderWidth); mPath.setFillType(Path.FillType.EVEN_ODD); canvas.drawPath(mBorderPath, mPaint); } } }
package de.bmoth.parser.ast; import de.bmoth.antlr.BMoThParser; import de.bmoth.antlr.BMoThParser.*; import de.bmoth.antlr.BMoThParserBaseVisitor; import de.bmoth.parser.ast.nodes.*; import de.bmoth.parser.ast.nodes.ExpressionOperatorNode.ExpressionOperator; import de.bmoth.parser.ast.nodes.ltl.*; import de.bmoth.parser.cst.BDefinition; import de.bmoth.parser.cst.BDefinition.KIND; import de.bmoth.parser.cst.FormulaAnalyser; import de.bmoth.parser.cst.LTLFormulaAnalyser; import de.bmoth.parser.cst.MachineAnalyser; import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.tree.RuleNode; import org.antlr.v4.runtime.tree.TerminalNode; import java.math.BigInteger; import java.util.*; import java.util.Map.Entry; import java.util.stream.Collectors; import java.util.stream.IntStream; import static de.bmoth.parser.ast.nodes.FormulaNode.FormulaType.EXPRESSION_FORMULA; import static de.bmoth.parser.ast.nodes.FormulaNode.FormulaType.PREDICATE_FORMULA; public class SemanticAstCreator { private final Map<TerminalNode, TerminalNode> declarationReferences; private final HashMap<TerminalNode, DeclarationNode> declarationMap = new HashMap<>(); private final Map<ParserRuleContext, BDefinition> definitionCallReplacements; private final Map<TerminalNode, ExpressionContext> argumentReplacement = new HashMap<>(); private final Map<EnumeratedSetContext, EnumeratedSetDeclarationNode> enumerations = new HashMap<>(); private final Node semanticNode; public Node getAstNode() { return this.semanticNode; } public SemanticAstCreator(MachineAnalyser machineAnalyser) { this.declarationReferences = machineAnalyser.getDeclarationReferences(); this.definitionCallReplacements = machineAnalyser.getDefinitionCallReplacements(); MachineNode machineNode = new MachineNode(null, null); machineNode.setConstants(createDeclarationList(machineAnalyser.getConstants())); machineNode.setVariables(createDeclarationList(machineAnalyser.getVariables())); addEnumeratedSets(machineAnalyser.getEnumeratedSets(), machineNode); addDeferredSets(machineAnalyser.getDeferredSetContexts(), machineNode); FormulaVisitor formulaVisitor = new FormulaVisitor(); if (null != machineAnalyser.getPropertiesClause()) { PredicateNode pred = (PredicateNode) machineAnalyser.getPropertiesClause().predicate() .accept(formulaVisitor); machineNode.setProperties(pred); } if (null != machineAnalyser.getInvariantClause()) { PredicateNode pred = (PredicateNode) machineAnalyser.getInvariantClause().predicate() .accept(formulaVisitor); machineNode.setInvariant(pred); } if (null != machineAnalyser.getInitialisationClause()) { SubstitutionNode substitution = (SubstitutionNode) machineAnalyser.getInitialisationClause().substitution() .accept(formulaVisitor); machineNode.setInitialisation(substitution); } Map<String, LtlStartContext> ltlFormulaMap = machineAnalyser.getLTLFormulaMap(); for (Entry<String, LtlStartContext> entry : ltlFormulaMap.entrySet()) { String name = entry.getKey(); LtlStartContext value = entry.getValue(); LTLNode ltlNode = (LTLNode) value.accept(formulaVisitor); LTLFormula ltlFormula = new LTLFormula(); ltlFormula.setName(name); ltlFormula.setFormula(ltlNode); machineNode.addLTLFormula(ltlFormula); } createOperations(machineAnalyser, machineNode, formulaVisitor); this.semanticNode = machineNode; } private void createOperations(MachineAnalyser machineAnalyser, MachineNode machineNode, FormulaVisitor formulaVisitor) { List<OperationNode> operationNodes = new ArrayList<>(); for (OperationContext operationContext : machineAnalyser.getOperations()) { List<DeclarationNode> outputParamNodes = new ArrayList<>(); if (operationContext.outputParams != null) { outputParamNodes = createDeclarationNodeList(operationContext.outputParams.IDENTIFIER()); } List<DeclarationNode> paramNodes = new ArrayList<>(); if (operationContext.params != null) { paramNodes = createDeclarationNodeList(operationContext.params.IDENTIFIER()); } SubstitutionNode sub = (SubstitutionNode) operationContext.substitution().accept(formulaVisitor); OperationNode operationNode = new OperationNode(operationContext.IDENTIFIER().getText(),outputParamNodes, sub, paramNodes); operationNodes.add(operationNode); } machineNode.setOperations(operationNodes); } public SemanticAstCreator(FormulaAnalyser formulaAnalyser) { this.declarationReferences = formulaAnalyser.getDeclarationReferences(); this.definitionCallReplacements = new LinkedHashMap<>(); FormulaContext formulaContext = formulaAnalyser.getFormula(); FormulaNode.FormulaType type = formulaContext.expression() != null ? EXPRESSION_FORMULA : PREDICATE_FORMULA; FormulaNode formulaNode = new FormulaNode(type); formulaNode.setImplicitDeclarations(createDeclarationList(formulaAnalyser.getImplicitDeclarations())); FormulaVisitor formulaVisitor = new FormulaVisitor(); Node node; if (type == EXPRESSION_FORMULA) { node = formulaContext.expression().accept(formulaVisitor); } else { node = formulaContext.predicate().accept(formulaVisitor); } formulaNode.setFormula(node); this.semanticNode = formulaNode; } public SemanticAstCreator(LTLFormulaAnalyser formulaAnalyser) { this.declarationReferences = formulaAnalyser.getDeclarationReferences(); this.definitionCallReplacements = new LinkedHashMap<>(); FormulaVisitor formulaVisitor = new FormulaVisitor(); LTLNode node = (LTLNode) formulaAnalyser.getLTLStartContext().ltlFormula().accept(formulaVisitor); LTLFormula ltlFormula = new LTLFormula(); ltlFormula.setFormula(node); ltlFormula.setImplicitDeclarations(createDeclarationList(formulaAnalyser.getImplicitDeclarations())); this.semanticNode = ltlFormula; } private void addDeferredSets(List<DeferredSetContext> deferredSetContexts, MachineNode machineNode) { for (DeferredSetContext deferredSetContext : deferredSetContexts) { Token token = deferredSetContext.IDENTIFIER().getSymbol(); DeclarationNode setDeclNode = new DeclarationNode(deferredSetContext.IDENTIFIER(), token.getText()); declarationMap.put(deferredSetContext.IDENTIFIER(), setDeclNode); machineNode.addDeferredSet(setDeclNode); } } private List<DeclarationNode> createDeclarationList(List<TerminalNode> list) { List<DeclarationNode> declarationList = new ArrayList<>(); for (TerminalNode terminalNode : list) { DeclarationNode declNode = new DeclarationNode(terminalNode, terminalNode.getSymbol().getText()); declarationList.add(declNode); declarationMap.put(terminalNode, declNode); } return declarationList; } private List<DeclarationNode> createDeclarationList(Map<String, TerminalNode> map) { List<DeclarationNode> declarationList = new ArrayList<>(); for (Entry<String, TerminalNode> entry : map.entrySet()) { TerminalNode terminalNode = entry.getValue(); DeclarationNode declNode = new DeclarationNode(terminalNode, terminalNode.getSymbol().getText()); declarationList.add(declNode); declarationMap.put(terminalNode, declNode); } return declarationList; } private List<DeclarationNode> createDeclarationNodeList(List<TerminalNode> list) { List<DeclarationNode> declarationList = new ArrayList<>(); for (TerminalNode terminalNode : list) { Token token = terminalNode.getSymbol(); DeclarationNode declNode = new DeclarationNode(terminalNode, token.getText()); declarationList.add(declNode); declarationMap.put(terminalNode, declNode); } return declarationList; } private void addEnumeratedSets(List<EnumeratedSetContext> enumerationsContexts, MachineNode machineNode) { for (EnumeratedSetContext enumeratedSetContext : enumerationsContexts) { Token token = enumeratedSetContext.IDENTIFIER().getSymbol(); DeclarationNode setDeclNode = new DeclarationNode(enumeratedSetContext.IDENTIFIER(), token.getText()); declarationMap.put(enumeratedSetContext.IDENTIFIER(), setDeclNode); List<DeclarationNode> declarationList = createDeclarationNodeList( enumeratedSetContext.identifier_list().IDENTIFIER()); EnumeratedSetDeclarationNode enumerationSet = new EnumeratedSetDeclarationNode(setDeclNode, declarationList); enumerations.put(enumeratedSetContext, enumerationSet); machineNode.addSetEnumeration(enumerationSet); } } class FormulaVisitor extends BMoThParserBaseVisitor<Node> { // TODO refactor definitions handling BDefinition.KIND currentKind; @Override public Node visitChildren(RuleNode node) { throw new AssertionError(node.getClass() + " is not implemented yet in semantic Ast creator."); } @Override public Node visitQuantifiedPredicate(BMoThParser.QuantifiedPredicateContext ctx) { List<DeclarationNode> declarationList = createDeclarationNodeList( ctx.quantified_variables_list().identifier_list().IDENTIFIER()); PredicateNode predNode = (PredicateNode) ctx.predicate().accept(this); return new QuantifiedPredicateNode(ctx, declarationList, predNode); } @Override public Node visitEmptySequenceExpression(BMoThParser.EmptySequenceExpressionContext ctx) { return new ExpressionOperatorNode(ctx, new ArrayList<>(), ExpressionOperator.EMPTY_SEQUENCE); } @Override public Node visitSequenceEnumerationExpression(BMoThParser.SequenceEnumerationExpressionContext ctx) { if (ctx.expression_list() == null) { return new ExpressionOperatorNode(ctx, new ArrayList<>(), ExpressionOperator.EMPTY_SEQUENCE); } else { return new ExpressionOperatorNode(ctx, createExprNodeList(ctx.expression_list().expression()), ExpressionOperator.SEQ_ENUMERATION); } } @Override public Node visitFunctionCallExpression(BMoThParser.FunctionCallExpressionContext ctx) { if (definitionCallReplacements.containsKey(ctx)) { currentKind = KIND.EXPRESSION; return replaceByDefinitionBody(ctx, definitionCallReplacements.get(ctx)); } else { return new ExpressionOperatorNode(ctx, createExprNodeList(ctx.expression()), ExpressionOperator.FUNCTION_CALL); } } @Override public ExprNode visitIdentifierExpression(BMoThParser.IdentifierExpressionContext ctx) { TerminalNode declNode = SemanticAstCreator.this.declarationReferences.get(ctx.IDENTIFIER()); return handleExpressionIdentifier(ctx, ctx.IDENTIFIER(), declNode); } private ExprNode handleExpressionIdentifier(ParserRuleContext ctx, TerminalNode terminalNode, TerminalNode declNode) { if (definitionCallReplacements.containsKey(ctx)) { currentKind = KIND.EXPRESSION; return (ExprNode) definitionCallReplacements.get(ctx).getDefinitionContext().definition_body() .accept(this); } else if (argumentReplacement.containsKey(declNode)) { ExpressionContext expressionContext = argumentReplacement.get(declNode); return (ExprNode) expressionContext.accept(this); } else if (declNode.getParent() instanceof EnumeratedSetContext) { EnumeratedSetDeclarationNode enumeratedSetDeclarationNode = enumerations.get(declNode.getParent()); return new EnumerationSetNode(terminalNode, enumeratedSetDeclarationNode, terminalNode.getText()); } else if (declNode.getParent() instanceof DeferredSetContext) { DeclarationNode declarationNode = declarationMap.get(declNode); return new DeferredSetNode(terminalNode, declarationNode, terminalNode.getText()); } else if (declNode.getParent().getParent() instanceof EnumeratedSetContext) { EnumeratedSetDeclarationNode enumeratedSetDeclarationNode = enumerations .get(declNode.getParent().getParent()); DeclarationNode declarationNode = declarationMap.get(declNode); return new EnumeratedSetElementNode(terminalNode, enumeratedSetDeclarationNode, terminalNode.getText(), declarationNode); } else { return createIdentifierExprNode(terminalNode); } } @Override public Node visitDefinitionAmbiguousCall(BMoThParser.DefinitionAmbiguousCallContext ctx) { TerminalNode declToken = SemanticAstCreator.this.declarationReferences.get(ctx.IDENTIFIER()); if (currentKind == KIND.EXPRESSION) { if (null != ctx.expression_list()) { List<ExpressionContext> exprs = ctx.expression_list().exprs; if (definitionCallReplacements.containsKey(ctx)) { currentKind = KIND.EXPRESSION; BDefinition bDefinition = definitionCallReplacements.get(ctx); for (ExpressionContext value : exprs) { TerminalNode terminalNode = bDefinition.getDefinitionContext().IDENTIFIER(); argumentReplacement.put(terminalNode, value); } return bDefinition.getDefinitionContext().definition_body().accept(this); } else { List<ExprNode> exprNodes = new ArrayList<>(); exprNodes.add(createIdentifierExprNode(ctx.IDENTIFIER())); return new ExpressionOperatorNode(ctx, exprNodes, ExpressionOperator.FUNCTION_CALL); } } else { return handleExpressionIdentifier(ctx, ctx.IDENTIFIER(), declToken); } } else if (currentKind == KIND.PREDICATE) { return handlePredicateIdentifier(ctx, ctx.IDENTIFIER()); } return visitChildren(ctx); } @Override public ExprNode visitDefinitionExpression(BMoThParser.DefinitionExpressionContext ctx) { return (ExprNode) ctx.expression().accept(this); } @Override public PredicateNode visitDefinitionPredicate(BMoThParser.DefinitionPredicateContext ctx) { return (PredicateNode) ctx.predicate().accept(this); } @Override public SubstitutionNode visitDefinitionSubstitution(BMoThParser.DefinitionSubstitutionContext ctx) { return (SubstitutionNode) ctx.substitution().accept(this); } @Override public PredicateNode visitPredicateIdentifier(BMoThParser.PredicateIdentifierContext ctx) { currentKind = KIND.PREDICATE; return handlePredicateIdentifier(ctx, ctx.IDENTIFIER()); } private PredicateNode handlePredicateIdentifier(ParserRuleContext ctx, TerminalNode terminalNode) { if (definitionCallReplacements.containsKey(ctx)) { BDefinition bDefinition = definitionCallReplacements.get(ctx); return (PredicateNode) bDefinition.getDefinitionContext().definition_body().accept(this); } else { return createIdentifierPredicateNode(terminalNode); } } private Node replaceByDefinitionBody(FunctionCallExpressionContext ctx, BDefinition bDefinition) { for (int i = 1; i < ctx.exprs.size(); i++) { ExpressionContext value = ctx.exprs.get(i); TerminalNode terminalNode = bDefinition.getDefinitionContext().identifier_list().IDENTIFIER() .get(i - 1); argumentReplacement.put(terminalNode, value); } return bDefinition.getDefinitionContext().definition_body().accept(this); } @Override public PredicateNode visitPredicateDefinitionCall(BMoThParser.PredicateDefinitionCallContext ctx) { BDefinition bDefinition = definitionCallReplacements.get(ctx); for (int i = 0; i < ctx.expression().size(); i++) { ExpressionContext value = ctx.exprs.get(i); TerminalNode terminalNode = bDefinition.getDefinitionContext().identifier_list().IDENTIFIER(i); argumentReplacement.put(terminalNode, value); } DefinitionPredicateContext defContext = (DefinitionPredicateContext) bDefinition.getDefinitionContext() .definition_body(); return (PredicateNode) defContext.predicate().accept(this); } @Override public Node visitParenthesesPredicate(BMoThParser.ParenthesesPredicateContext ctx) { return ctx.predicate().accept(this); } @Override public Node visitParenthesesExpression(BMoThParser.ParenthesesExpressionContext ctx) { return ctx.expression().accept(this); } @Override public Node visitCastPredicateExpression(BMoThParser.CastPredicateExpressionContext ctx) { // internally, we do not distinguish bools and predicates PredicateNode predicate = (PredicateNode) ctx.predicate().accept(this); return new CastPredicateExpressionNode(ctx, predicate); } @Override public Node visitQuantifiedExpression(BMoThParser.QuantifiedExpressionContext ctx) { List<DeclarationNode> declarationList = createDeclarationNodeList( ctx.quantified_variables_list().identifier_list().IDENTIFIER()); PredicateNode predNode = (PredicateNode) ctx.predicate().accept(this); ExprNode exprNode = (ExprNode) ctx.expression().accept(this); return new QuantifiedExpressionNode(ctx, declarationList, predNode, exprNode, ctx.operator); } @Override public Node visitSetComprehensionExpression(BMoThParser.SetComprehensionExpressionContext ctx) { List<DeclarationNode> declarationList = createDeclarationNodeList(ctx.identifier_list().IDENTIFIER()); PredicateNode predNode = (PredicateNode) ctx.predicate().accept(this); return new SetComprehensionNode(ctx, declarationList, predNode); } @Override public Node visitNestedCoupleAsTupleExpression(BMoThParser.NestedCoupleAsTupleExpressionContext ctx) { List<ExpressionContext> exprs = ctx.exprs; ExprNode left = (ExprNode) exprs.get(0).accept(this); for (int i = 1; i < exprs.size(); i++) { List<ExprNode> list = new ArrayList<>(); list.add(left); list.add((ExprNode) exprs.get(i).accept(this)); left = new ExpressionOperatorNode(ctx, list, ExpressionOperator.COUPLE); } return left; } @Override public ExpressionOperatorNode visitExpressionOperator(BMoThParser.ExpressionOperatorContext ctx) { String operator = ctx.operator.getText(); return new ExpressionOperatorNode(ctx, createExprNodeList(ctx.expression()), operator); } @Override public ExprNode visitSetEnumerationExpression(BMoThParser.SetEnumerationExpressionContext ctx) { return new ExpressionOperatorNode(ctx, createExprNodeList(ctx.expression_list().expression()), ExpressionOperator.SET_ENUMERATION); } @Override public ExprNode visitEmptySetExpression(BMoThParser.EmptySetExpressionContext ctx) { return new ExpressionOperatorNode(ctx, new ArrayList<>(), ExpressionOperator.EMPTY_SET); } @Override public ExprNode visitNumberExpression(BMoThParser.NumberExpressionContext ctx) { BigInteger value = new BigInteger(ctx.Number().getText()); return new NumberNode(ctx, value); } // Predicates @Override public PredicateNode visitPredicateOperator(BMoThParser.PredicateOperatorContext ctx) { List<PredicateNode> list = new ArrayList<>(); List<PredicateContext> predicate = ctx.predicate(); for (PredicateContext predicateContext : predicate) { PredicateNode predNode = (PredicateNode) predicateContext.accept(this); list.add(predNode); } return new PredicateOperatorNode(ctx, list); } @Override public PredicateNode visitPredicateOperatorWithExprArgs(BMoThParser.PredicateOperatorWithExprArgsContext ctx) { return new PredicateOperatorWithExprArgsNode(ctx, createExprNodeList(ctx.expression())); } private List<ExprNode> createExprNodeList(List<ExpressionContext> list) { ArrayList<ExprNode> exprNodes = new ArrayList<>(); for (ExpressionContext expressionContext : list) { ExprNode exprNode = (ExprNode) expressionContext.accept(this); exprNodes.add(exprNode); } return exprNodes; } // Substitutions @Override public Node visitBlockSubstitution(BMoThParser.BlockSubstitutionContext ctx) { return ctx.substitution().accept(this); } @Override public SubstitutionNode visitAssignSubstitution(BMoThParser.AssignSubstitutionContext ctx) { List<IdentifierExprNode> idents = ctx.identifier_list().IDENTIFIER().stream() .map(this::createIdentifierExprNode).collect(Collectors.toList()); List<ExprNode> expressions = ctx.expression_list().exprs.stream().map(t -> (ExprNode) t.accept(this)) .collect(Collectors.toList()); List<SubstitutionNode> sublist = IntStream.range(0, idents.size()) .mapToObj(t -> new SingleAssignSubstitutionNode(idents.get(t), expressions.get(t))) .collect(Collectors.toList()); if (sublist.size() == 1) { return sublist.get(0); } else { return new ParallelSubstitutionNode(sublist); } } @Override public SubstitutionNode visitBecomesElementOfSubstitution(BMoThParser.BecomesElementOfSubstitutionContext ctx) { List<IdentifierExprNode> idents = ctx.identifier_list().IDENTIFIER().stream() .map(this::createIdentifierExprNode).collect(Collectors.toList()); ExprNode expression = (ExprNode) ctx.expression().accept(this); return new BecomesElementOfSubstitutionNode(idents, expression); } @Override public SubstitutionNode visitBecomesSuchThatSubstitution(BMoThParser.BecomesSuchThatSubstitutionContext ctx) { List<IdentifierExprNode> idents = ctx.identifier_list().IDENTIFIER().stream() .map(this::createIdentifierExprNode).collect(Collectors.toList()); PredicateNode predicate = (PredicateNode) ctx.predicate().accept(this); return new BecomesSuchThatSubstitutionNode(idents, predicate); } @Override public SubstitutionNode visitAnySubstitution(BMoThParser.AnySubstitutionContext ctx) { List<DeclarationNode> declarationList = createDeclarationNodeList(ctx.identifier_list().IDENTIFIER()); PredicateNode predNode = (PredicateNode) ctx.predicate().accept(this); SubstitutionNode sub = (SubstitutionNode) ctx.substitution().accept(this); return new AnySubstitutionNode(declarationList, predNode, sub); } @Override public SelectSubstitutionNode visitSelectSubstitution(BMoThParser.SelectSubstitutionContext ctx) { List<PredicateNode> predNodes = ctx.preds.stream().map(t -> (PredicateNode) t.accept(this)) .collect(Collectors.toList()); List<SubstitutionNode> subNodes = ctx.subs.stream().map(t -> (SubstitutionNode) t.accept(this)) .collect(Collectors.toList()); SubstitutionNode elseSubNode = null; if (ctx.elseSub != null) { elseSubNode = (SubstitutionNode) ctx.elseSub.accept(this); } else { elseSubNode = new SkipSubstitutionNode(); } return new SelectSubstitutionNode(predNodes, subNodes, elseSubNode); } @Override public SubstitutionNode visitIfSubstitution(BMoThParser.IfSubstitutionContext ctx) { List<PredicateNode> predNodes = ctx.preds.stream().map(t -> (PredicateNode) t.accept(this)) .collect(Collectors.toList()); List<SubstitutionNode> subNodes = ctx.subs.stream().map(t -> (SubstitutionNode) t.accept(this)) .collect(Collectors.toList()); SubstitutionNode elseSubNode = null; if (ctx.elseSub != null) { elseSubNode = (SubstitutionNode) ctx.elseSub.accept(this); } else { elseSubNode = new SkipSubstitutionNode(); } return new IfSubstitutionNode(predNodes, subNodes, elseSubNode); } @Override public ConditionSubstitutionNode visitConditionSubstitution(BMoThParser.ConditionSubstitutionContext ctx) { PredicateNode predicate = (PredicateNode) ctx.predicate().accept(this); SubstitutionNode sub = (SubstitutionNode) ctx.substitution().accept(this); if (ctx.keyword.getType() == BMoThParser.PRE) { return new ConditionSubstitutionNode(ConditionSubstitutionNode.Kind.PRECONDITION, predicate, sub); } else { return new ConditionSubstitutionNode(ConditionSubstitutionNode.Kind.ASSERT, predicate, sub); } } private IdentifierExprNode createIdentifierExprNode(TerminalNode terminalNode) { Token token = terminalNode.getSymbol(); TerminalNode declNode = SemanticAstCreator.this.declarationReferences.get(terminalNode); DeclarationNode declarationNode = declarationMap.get(declNode); if (declarationNode == null) { throw new AssertionError("Can not find declaration node of identifier '" + token.getText() + "' Line " + token.getLine() + " Pos " + token.getCharPositionInLine()); } return new IdentifierExprNode(terminalNode, declarationNode); } private IdentifierPredicateNode createIdentifierPredicateNode(TerminalNode terminalNode) { Token token = terminalNode.getSymbol(); TerminalNode declNode = SemanticAstCreator.this.declarationReferences.get(terminalNode); DeclarationNode declarationNode = declarationMap.get(declNode); if (declarationNode == null) { throw new AssertionError(token.getText() + " Line " + token.getLine()); } return new IdentifierPredicateNode(terminalNode, declarationNode); } @Override public SubstitutionNode visitSkipSubstitution(BMoThParser.SkipSubstitutionContext ctx) { return new SkipSubstitutionNode(); } @Override public SubstitutionNode visitParallelSubstitution(BMoThParser.ParallelSubstitutionContext ctx) { List<SubstitutionNode> result = new ArrayList<>(); List<SubstitutionContext> substitution = ctx.substitution(); for (SubstitutionContext substitutionContext : substitution) { SubstitutionNode sub = (SubstitutionNode) substitutionContext.accept(this); result.add(sub); } return new ParallelSubstitutionNode(result); } // LTL @Override public LTLNode visitLtlStart(BMoThParser.LtlStartContext ctx) { return (LTLNode) ctx.ltlFormula().accept(this); } @Override public Node visitLTLPrefixOperator(BMoThParser.LTLPrefixOperatorContext ctx) { LTLNode argument = (LTLNode) ctx.ltlFormula().accept(this); LTLPrefixOperatorNode.Kind kind = null; switch (ctx.operator.getType()) { case BMoThParser.LTL_GLOBALLY: kind = LTLPrefixOperatorNode.Kind.GLOBALLY; break; case BMoThParser.LTL_FINALLY: kind = LTLPrefixOperatorNode.Kind.FINALLY; break; case BMoThParser.LTL_NEXT: kind = LTLPrefixOperatorNode.Kind.NEXT; break; case BMoThParser.LTL_NOT: kind = LTLPrefixOperatorNode.Kind.NOT; break; default: throw new AssertionError(); } return new LTLPrefixOperatorNode(kind, argument); } @Override public Node visitLTLKeyword(BMoThParser.LTLKeywordContext ctx) { LTLKeywordNode.Kind kind = null; switch (ctx.keyword.getType()) { case BMoThParser.LTL_TRUE: kind = LTLKeywordNode.Kind.TRUE; break; case BMoThParser.LTL_FALSE: kind = LTLKeywordNode.Kind.FALSE; break; default: throw new AssertionError(); } return new LTLKeywordNode(kind); } @Override public Node visitLTLBPredicate(BMoThParser.LTLBPredicateContext ctx) { PredicateNode node = (PredicateNode) ctx.predicate().accept(this); return new LTLBPredicateNode(node); } @Override public Node visitLTLParentheses(BMoThParser.LTLParenthesesContext ctx) { return ctx.ltlFormula().accept(this); } @Override public Node visitLTLInfixOperator(BMoThParser.LTLInfixOperatorContext ctx) { LTLNode left = (LTLNode) ctx.ltlFormula(0).accept(this); LTLNode right = (LTLNode) ctx.ltlFormula(1).accept(this); LTLInfixOperatorNode.Kind kind = null; switch (ctx.operator.getType()) { case BMoThParser.LTL_IMPLIES: kind = LTLInfixOperatorNode.Kind.IMPLICATION; break; case BMoThParser.LTL_UNTIL: kind = LTLInfixOperatorNode.Kind.UNTIL; break; case BMoThParser.LTL_WEAK_UNTIL: kind = LTLInfixOperatorNode.Kind.WEAK_UNTIL; break; case BMoThParser.LTL_RELEASE: kind = LTLInfixOperatorNode.Kind.RELEASE; break; case BMoThParser.LTL_AND: kind = LTLInfixOperatorNode.Kind.AND; break; case BMoThParser.LTL_OR: kind = LTLInfixOperatorNode.Kind.OR; break; default: throw new AssertionError(); } return new LTLInfixOperatorNode(kind, left, right); } } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.externalSystem.service.project.autoimport; import com.intellij.ProjectTopics; import com.intellij.ide.file.BatchFileChangeListener; import com.intellij.notification.*; import com.intellij.openapi.application.*; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.EditorFactory; import com.intellij.openapi.editor.event.*; import com.intellij.openapi.extensions.ExtensionPointName; import com.intellij.openapi.externalSystem.ExternalSystemAutoImportAware; import com.intellij.openapi.externalSystem.ExternalSystemManager; import com.intellij.openapi.externalSystem.model.DataNode; import com.intellij.openapi.externalSystem.model.ExternalSystemDataKeys; import com.intellij.openapi.externalSystem.model.ProjectSystemId; import com.intellij.openapi.externalSystem.model.project.ProjectData; import com.intellij.openapi.externalSystem.model.task.*; import com.intellij.openapi.externalSystem.service.execution.ProgressExecutionMode; import com.intellij.openapi.externalSystem.service.internal.ExternalSystemProcessingManager; import com.intellij.openapi.externalSystem.service.notification.ExternalSystemProgressNotificationManager; import com.intellij.openapi.externalSystem.service.project.ExternalProjectRefreshCallback; import com.intellij.openapi.externalSystem.service.project.ProjectDataManager; import com.intellij.openapi.externalSystem.settings.ExternalProjectSettings; import com.intellij.openapi.externalSystem.util.ExternalSystemApiUtil; import com.intellij.openapi.externalSystem.util.ExternalSystemBundle; import com.intellij.openapi.externalSystem.util.ExternalSystemUtil; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.fileEditor.impl.FileDocumentManagerImpl; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ModuleRootEvent; import com.intellij.openapi.roots.ModuleRootListener; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.vfs.*; import com.intellij.openapi.vfs.newvfs.events.VFileContentChangeEvent; import com.intellij.openapi.vfs.newvfs.events.VFileEvent; import com.intellij.openapi.vfs.pointers.VirtualFilePointer; import com.intellij.openapi.vfs.pointers.VirtualFilePointerManager; import com.intellij.psi.*; import com.intellij.psi.impl.source.tree.LeafElement; import com.intellij.util.PathUtil; import com.intellij.util.SmartList; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.MultiMap; import com.intellij.util.messages.MessageBusConnection; import com.intellij.util.ui.update.MergingUpdateQueue; import com.intellij.util.ui.update.Update; import gnu.trove.THashSet; import org.jetbrains.annotations.ApiStatus; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.event.HyperlinkEvent; import java.io.File; import java.util.*; import java.util.stream.Collectors; import java.util.zip.CRC32; import static com.intellij.util.ui.update.MergingUpdateQueue.ANY_COMPONENT; /** * @author Vladislav.Soroka * @since 1/30/2017 */ public class ExternalSystemProjectsWatcherImpl extends ExternalSystemTaskNotificationListenerAdapter implements ExternalSystemProjectsWatcher { private static final ExtensionPointName<Contributor> EP_NAME = ExtensionPointName.create("com.intellij.externalProjectWatcherContributor"); private static final Key<Long> CRC_WITHOUT_SPACES_CURRENT = Key.create("ExternalSystemProjectsWatcher.CRC_WITHOUT_SPACES_CURRENT"); private static final Key<Long> CRC_WITHOUT_SPACES_BEFORE_LAST_IMPORT = Key.create("ExternalSystemProjectsWatcher.CRC_WITHOUT_SPACES_BEFORE_LAST_IMPORT"); private static final int DOCUMENT_SAVE_DELAY = 1000; private final Project myProject; private final Set<Document> myChangedDocuments = new THashSet<>(); private final MergingUpdateQueue myChangedDocumentsQueue; private final List<ExternalSystemAutoImportAware> myImportAwareManagers; private final MergingUpdateQueue myUpdatesQueue; private final Map<ProjectSystemId, MyNotification> myNotificationMap; private final MultiMap<String/* project path */, String /* files paths */> myKnownAffectedFiles = MultiMap.createConcurrentSet(); private final MultiMap<VirtualFilePointer, String /* project path */> myFilesPointers = MultiMap.createConcurrentSet(); private final List<LocalFileSystem.WatchRequest> myWatchedRoots = new ArrayList<>(); private final MergingUpdateQueue myRefreshRequestsQueue; public ExternalSystemProjectsWatcherImpl(Project project) { myProject = project; myChangedDocumentsQueue = new MergingUpdateQueue("ExternalSystemProjectsWatcher: Document changes queue", DOCUMENT_SAVE_DELAY, false, ANY_COMPONENT, myProject); myRefreshRequestsQueue = new MergingUpdateQueue("ExternalSystemProjectsWatcher: Refresh requests queue", 0, false, ANY_COMPONENT, myProject, null, false); myImportAwareManagers = ContainerUtil.newArrayList(); for (ExternalSystemManager<?, ?, ?, ?, ?> manager : ExternalSystemApiUtil.getAllManagers()) { if (manager instanceof ExternalSystemAutoImportAware) { myImportAwareManagers.add((ExternalSystemAutoImportAware)manager); NotificationsConfiguration.getNotificationsConfiguration().register( manager.getSystemId().getReadableName() + " Import", NotificationDisplayType.STICKY_BALLOON, false); } } myUpdatesQueue = new MergingUpdateQueue("ExternalSystemProjectsWatcher: Notifier queue", 500, false, ANY_COMPONENT, myProject); myNotificationMap = ContainerUtil.newConcurrentMap(); ApplicationManager.getApplication().getMessageBus().connect(myProject).subscribe(BatchFileChangeListener.TOPIC, new BatchFileChangeListener() { @Override public void batchChangeStarted(Project project) { myRefreshRequestsQueue.suspend(); } @Override public void batchChangeCompleted(Project project) { myRefreshRequestsQueue.resume(); } }); } @Override public void markDirtyAllExternalProjects() { findLinkedProjectsSettings().forEach(this::scheduleUpdate); for (Contributor contributor : EP_NAME.getExtensions()) { contributor.markDirtyAllExternalProjects(myProject); } } @Override public void markDirty(Module module) { scheduleUpdate(ExternalSystemApiUtil.getExternalProjectPath(module)); for (Contributor contributor : EP_NAME.getExtensions()) { contributor.markDirty(module); } } @Override public void markDirty(String projectPath) { scheduleUpdate(projectPath); } public synchronized void start() { if (ExternalSystemUtil.isNoBackgroundMode()) { return; } myUpdatesQueue.activate(); final MessageBusConnection myBusConnection = myProject.getMessageBus().connect(myChangedDocumentsQueue); myBusConnection.subscribe(VirtualFileManager.VFS_CHANGES, new MyFileChangeListener(this)); makeUserAware(myChangedDocumentsQueue, myProject); myChangedDocumentsQueue.activate(); myRefreshRequestsQueue.activate(); DocumentListener myDocumentListener = new DocumentListener() { @Override public void documentChanged(DocumentEvent event) { Document doc = event.getDocument(); VirtualFile file = FileDocumentManager.getInstance().getFile(doc); if (file == null) return; String externalProjectPath = getRelatedExternalProjectPath(file); if (externalProjectPath == null) return; synchronized (myChangedDocuments) { myChangedDocuments.add(doc); } myChangedDocumentsQueue.queue(new Update(ExternalSystemProjectsWatcherImpl.this) { @Override public void run() { final Document[] copy; synchronized (myChangedDocuments) { copy = myChangedDocuments.toArray(new Document[myChangedDocuments.size()]); myChangedDocuments.clear(); } ExternalSystemUtil.invokeLater(myProject, () -> new WriteAction() { @Override protected void run(@NotNull Result result) { for (Document each : copy) { PsiDocumentManager.getInstance(myProject).commitDocument(each); ((FileDocumentManagerImpl)FileDocumentManager.getInstance()).saveDocument(each, false); } } }.execute()); } }); } }; EditorFactory.getInstance().getEventMulticaster().addDocumentListener(myDocumentListener, myBusConnection); ServiceManager.getService(ExternalSystemProgressNotificationManager.class).addNotificationListener(this); updateWatchedRoots(true); Disposer.register(myChangedDocumentsQueue, () -> myFilesPointers.clear()); } public synchronized void stop() { Disposer.dispose(myChangedDocumentsQueue); Disposer.dispose(myUpdatesQueue); Disposer.dispose(myRefreshRequestsQueue); myNotificationMap.clear(); ServiceManager.getService(ExternalSystemProgressNotificationManager.class).removeNotificationListener(this); } @Override public void onStart(@NotNull ExternalSystemTaskId id, String workingDir) { if (id.getType() == ExternalSystemTaskType.RESOLVE_PROJECT) { final ProjectSystemId systemId = id.getProjectSystemId(); for (String filePath : ContainerUtil.newArrayList(myKnownAffectedFiles.get(workingDir))) { VirtualFile file = VfsUtil.findFileByIoFile(new File(filePath), false); if (file != null && !file.isDirectory()) { file.putUserData(CRC_WITHOUT_SPACES_BEFORE_LAST_IMPORT, file.getUserData(CRC_WITHOUT_SPACES_CURRENT)); } } myUpdatesQueue.queue(new Update(Pair.create(systemId, workingDir)) { @Override public void run() { doUpdateNotifications(true, systemId, workingDir); } }); } } @Override public void onSuccess(@NotNull ExternalSystemTaskId id) { if (id.getType() == ExternalSystemTaskType.RESOLVE_PROJECT) { updateWatchedRoots(false); } } private void scheduleUpdate(String projectPath) { if (ExternalSystemUtil.isNoBackgroundMode()) { return; } Pair<ExternalSystemManager, ExternalProjectSettings> linkedProject = findLinkedProjectSettings(projectPath); if (linkedProject == null) return; scheduleUpdate(linkedProject); } private void scheduleUpdate(@NotNull Pair<ExternalSystemManager, ExternalProjectSettings> linkedProject) { if (ExternalSystemUtil.isNoBackgroundMode()) { return; } ExternalSystemManager<?, ?, ?, ?, ?> manager = linkedProject.first; String projectPath = linkedProject.second.getExternalProjectPath(); ProjectSystemId systemId = manager.getSystemId(); boolean useAutoImport = linkedProject.second.isUseAutoImport(); if (useAutoImport) { final ExternalSystemTask resolveTask = ServiceManager.getService(ExternalSystemProcessingManager.class) .findTask(ExternalSystemTaskType.RESOLVE_PROJECT, systemId, projectPath); final ExternalSystemTaskState taskState = resolveTask == null ? null : resolveTask.getState(); if (taskState == null || taskState.isStopped()) { addToRefreshQueue(projectPath, systemId); } else if (taskState != ExternalSystemTaskState.NOT_STARTED) { // re-schedule to wait for the active project import task end final ExternalSystemProgressNotificationManager progressManager = ServiceManager.getService(ExternalSystemProgressNotificationManager.class); final ExternalSystemTaskNotificationListenerAdapter taskListener = new ExternalSystemTaskNotificationListenerAdapter() { @Override public void onEnd(@NotNull ExternalSystemTaskId id) { progressManager.removeNotificationListener(this); addToRefreshQueue(projectPath, systemId); } }; progressManager.addNotificationListener(resolveTask.getId(), taskListener); } } else { myUpdatesQueue.queue(new Update(Pair.create(systemId, projectPath)) { @Override public void run() { doUpdateNotifications(false, systemId, projectPath); } }); } } private void addToRefreshQueue(String projectPath, ProjectSystemId systemId) { myRefreshRequestsQueue.queue(new Update(Pair.create(systemId, projectPath)) { @Override public void run() { scheduleRefresh(myProject, projectPath, systemId, false); } }); } private void updateWatchedRoots(boolean isProjectOpen) { List<String> pathsToWatch = new SmartList<>(); myFilesPointers.clear(); LocalFileSystem.getInstance().removeWatchedRoots(myWatchedRoots); Map<String, VirtualFilePointer> pointerMap = ContainerUtil.newHashMap(); for (ExternalSystemManager<?, ?, ?, ?, ?> manager : ExternalSystemApiUtil.getAllManagers()) { if (!(manager instanceof ExternalSystemAutoImportAware)) continue; ExternalSystemAutoImportAware importAware = (ExternalSystemAutoImportAware)manager; for (ExternalProjectSettings settings : manager.getSettingsProvider().fun(myProject).getLinkedProjectsSettings()) { List<File> files = importAware.getAffectedExternalProjectFiles(settings.getExternalProjectPath(), myProject); long timeStamp = 0; for (File file : files) { timeStamp += file.lastModified(); } Map<String, Long> modificationStamps = manager.getLocalSettingsProvider().fun(myProject).getExternalConfigModificationStamps(); if (isProjectOpen && myProject.getUserData(ExternalSystemDataKeys.NEWLY_CREATED_PROJECT) != Boolean.TRUE) { Long affectedFilesTimestamp = modificationStamps.get(settings.getExternalProjectPath()); affectedFilesTimestamp = affectedFilesTimestamp == null ? -1L : affectedFilesTimestamp; if (timeStamp != affectedFilesTimestamp.longValue()) { scheduleUpdate(settings.getExternalProjectPath()); } } else { modificationStamps.put(settings.getExternalProjectPath(), timeStamp); } for (File file : files) { if (file == null) continue; String path = getNormalizedPath(file); if (path == null) continue; pathsToWatch.add(path); String url = VfsUtilCore.pathToUrl(path); VirtualFilePointer pointer = pointerMap.get(url); if (pointer == null) { pointer = VirtualFilePointerManager.getInstance().create(url, myChangedDocumentsQueue, null); pointerMap.put(url, pointer); // update timestamps based on file crc and local settings final VirtualFile virtualFile = pointer.getFile(); if (virtualFile != null) { Long crc = virtualFile.getUserData(CRC_WITHOUT_SPACES_BEFORE_LAST_IMPORT); if (crc != null) { modificationStamps.put(path, crc); } } } myFilesPointers.putValue(pointer, settings.getExternalProjectPath()); } } } myWatchedRoots.addAll(LocalFileSystem.getInstance().addRootsToWatch(pathsToWatch, false)); } @Nullable private String getRelatedExternalProjectPath(VirtualFile file) { String path = file.getPath(); return getRelatedExternalProjectPath(path); } @Nullable private String getRelatedExternalProjectPath(String path) { String externalProjectPath = null; for (ExternalSystemAutoImportAware importAware : myImportAwareManagers) { externalProjectPath = importAware.getAffectedExternalProjectPath(path, myProject); if (externalProjectPath != null) { break; } } if (externalProjectPath != null) { myKnownAffectedFiles.putValue(externalProjectPath, path); } return externalProjectPath; } private void doUpdateNotifications(boolean close, @NotNull ProjectSystemId systemId, @NotNull String projectPath) { MyNotification notification = myNotificationMap.get(systemId); if (close) { if (notification == null) return; notification.projectPaths.remove(projectPath); if (notification.projectPaths.isEmpty()) { notification.expire(); } } else { if (notification != null && !notification.isExpired()) { notification.projectPaths.add(projectPath); return; } notification = new MyNotification(myProject, myNotificationMap, systemId, projectPath); myNotificationMap.put(systemId, notification); Notifications.Bus.notify(notification, myProject); } } private static void scheduleRefresh(@NotNull final Project project, String projectPath, ProjectSystemId systemId, final boolean reportRefreshError) { ExternalSystemUtil.refreshProject( project, systemId, projectPath, new ExternalProjectRefreshCallback() { @Override public void onSuccess(@Nullable final DataNode<ProjectData> externalProject) { if (externalProject != null) { ServiceManager.getService(ProjectDataManager.class).importData(externalProject, project, true); } } @Override public void onFailure(@NotNull String errorMessage, @Nullable String errorDetails) { // Do nothing. } }, false, ProgressExecutionMode.IN_BACKGROUND_ASYNC, reportRefreshError); } private static void makeUserAware(final MergingUpdateQueue mergingUpdateQueue, final Project project) { AccessToken accessToken = ReadAction.start(); try { EditorEventMulticaster multicaster = EditorFactory.getInstance().getEventMulticaster(); multicaster.addCaretListener(new CaretListener() { @Override public void caretPositionChanged(CaretEvent e) { mergingUpdateQueue.restartTimer(); } }, mergingUpdateQueue); multicaster.addDocumentListener(new DocumentListener() { @Override public void documentChanged(DocumentEvent event) { mergingUpdateQueue.restartTimer(); } }, mergingUpdateQueue); project.getMessageBus().connect(mergingUpdateQueue).subscribe(ProjectTopics.PROJECT_ROOTS, new ModuleRootListener() { int beforeCalled; @Override public void beforeRootsChange(ModuleRootEvent event) { if (beforeCalled++ == 0) { mergingUpdateQueue.suspend(); } } @Override public void rootsChanged(ModuleRootEvent event) { if (beforeCalled == 0) { return; // This may occur if listener has been added between beforeRootsChange() and rootsChanged() calls. } if (--beforeCalled == 0) { mergingUpdateQueue.resume(); mergingUpdateQueue.restartTimer(); } } }); } finally { accessToken.finish(); } } private static class MyNotification extends Notification { private final ProjectSystemId mySystemId; private final Map<ProjectSystemId, MyNotification> myNotificationMap; private final Set<String> projectPaths; public MyNotification(Project project, Map<ProjectSystemId, MyNotification> notificationMap, ProjectSystemId systemId, String projectPath) { super(systemId.getReadableName() + " Import", ExternalSystemBundle.message("import.needed", systemId.getReadableName()), "<a href='reimport'>" + ExternalSystemBundle.message("import.importChanged") + "</a>" + " &nbsp;&nbsp;" + "<a href='autoImport'>" + ExternalSystemBundle.message("import.enableAutoImport") + "</a>", NotificationType.INFORMATION, null); mySystemId = systemId; myNotificationMap = notificationMap; projectPaths = ContainerUtil.newHashSet(projectPath); setListener(new NotificationListener.Adapter() { @Override protected void hyperlinkActivated(@NotNull Notification notification, @NotNull HyperlinkEvent event) { boolean isReimport = event.getDescription().equals("reimport"); boolean isAutoImport = event.getDescription().equals("autoImport"); projectPaths.stream() .map(path -> ExternalSystemApiUtil.getSettings(project, systemId).getLinkedProjectSettings(path)) .distinct() .filter(Objects::nonNull) .forEach(settings -> { if (isReimport) { scheduleRefresh(project, settings.getExternalProjectPath(), systemId, true); } if (isAutoImport) { settings.setUseAutoImport(true); scheduleRefresh(project, settings.getExternalProjectPath(), systemId, false); } }); notification.expire(); } }); } @Override public void expire() { super.expire(); projectPaths.clear(); myNotificationMap.remove(mySystemId); } } private class MyFileChangeListener extends FileChangeListenerBase { private final ExternalSystemProjectsWatcherImpl myWatcher; private MultiMap<String/* file path */, String /* project path */> myKnownFiles = MultiMap.createSet(); private List<VirtualFile> filesToUpdate; private List<VirtualFile> filesToRemove; public MyFileChangeListener(ExternalSystemProjectsWatcherImpl watcher) { myWatcher = watcher; } @Override protected boolean isRelevant(String path) { if (!myKnownFiles.get(path).isEmpty()) return true; for (VirtualFilePointer pointer : myFilesPointers.keySet()) { VirtualFile f = pointer.getFile(); if (f != null && FileUtil.pathsEqual(path, f.getPath())) { for (String projectPath : myFilesPointers.get(pointer)) { myKnownFiles.putValue(path, projectPath); myKnownAffectedFiles.putValue(projectPath, path); } return true; } } String affectedProjectPath = getRelatedExternalProjectPath(path); if (affectedProjectPath != null) { myKnownFiles.putValue(path, affectedProjectPath); } return affectedProjectPath != null; } @Override protected void updateFile(VirtualFile file, VFileEvent event) { doUpdateFile(file, event, false); } @Override protected void deleteFile(VirtualFile file, VFileEvent event) { doUpdateFile(file, event, true); } private void doUpdateFile(VirtualFile file, VFileEvent event, boolean remove) { init(); if (remove) { filesToRemove.add(file); } else { if (fileWasChanged(file, event)) { filesToUpdate.add(file); } else { for (String externalProjectPath : myKnownFiles.get(file.getPath())) { handleRevertedChanges(externalProjectPath); } } } } private void handleRevertedChanges(final String externalProjectPath) { for (String filePath : ContainerUtil.newArrayList(myKnownAffectedFiles.get(externalProjectPath))) { VirtualFile f = VfsUtil.findFileByIoFile(new File(filePath), false); if (f == null || !Objects.equals(f.getUserData(CRC_WITHOUT_SPACES_BEFORE_LAST_IMPORT), f.getUserData(CRC_WITHOUT_SPACES_CURRENT))) { return; } } ProjectSystemId systemId = null; for (ExternalSystemManager<?, ?, ?, ?, ?> manager : ExternalSystemApiUtil.getAllManagers()) { if (manager.getSettingsProvider().fun(myProject).getLinkedProjectSettings(externalProjectPath) != null) { systemId = manager.getSystemId(); } } if (systemId != null) { ProjectSystemId finalSystemId = systemId; myUpdatesQueue.queue(new Update(Pair.create(finalSystemId, externalProjectPath)) { @Override public void run() { doUpdateNotifications(true, finalSystemId, externalProjectPath); } }); } } private boolean fileWasChanged(VirtualFile file, VFileEvent event) { if (!file.isValid() || !(event instanceof VFileContentChangeEvent)) return true; Long newCrc = calculateCrc(file); file.putUserData(CRC_WITHOUT_SPACES_CURRENT, newCrc); Long crc = file.getUserData(CRC_WITHOUT_SPACES_BEFORE_LAST_IMPORT); if (crc == null) { file.putUserData(CRC_WITHOUT_SPACES_BEFORE_LAST_IMPORT, newCrc); return true; } return !newCrc.equals(crc); } @Override protected void apply() { // the save may occur during project close. in this case the background task // can not be started since the window has already been closed. if (areFileSetsInitialised()) { filesToUpdate.removeAll(filesToRemove); scheduleUpdate(ContainerUtil.concat(filesToUpdate, filesToRemove)); } clear(); } private boolean areFileSetsInitialised() { return filesToUpdate != null; } private void scheduleUpdate(List<VirtualFile> filesToUpdate) { filesToUpdate.stream() .flatMap(f -> myKnownFiles.get(f.getPath()).stream()) .distinct() .forEach(path -> myWatcher.scheduleUpdate(path)); } private void init() { // Do not use before() method to initialize the lists // since the listener can be attached during the update // and before method can be skipped. // The better way to fix if, of course, is to do something with // subscription - add listener not during postStartupActivity // but on project initialization to avoid this situation. if (areFileSetsInitialised()) return; filesToUpdate = new ArrayList<>(); filesToRemove = new ArrayList<>(); } private void clear() { filesToUpdate = null; filesToRemove = null; myKnownFiles.clear(); } } @Nullable private Pair<ExternalSystemManager, ExternalProjectSettings> findLinkedProjectSettings(String projectPath) { final ExternalProjectSettings[] linkedProjectSettings = new ExternalProjectSettings[1]; Optional<ExternalSystemManager<?, ?, ?, ?, ?>> systemManager = ExternalSystemApiUtil.getAllManagers().stream() .filter(m -> { linkedProjectSettings[0] = m.getSettingsProvider().fun(myProject).getLinkedProjectSettings(projectPath); return linkedProjectSettings[0] != null; }).findAny(); if (!systemManager.isPresent()) return null; ExternalSystemManager<?, ?, ?, ?, ?> manager = systemManager.get(); return Pair.create(manager, linkedProjectSettings[0]); } @NotNull private List<Pair<ExternalSystemManager, ExternalProjectSettings>> findLinkedProjectsSettings() { return ExternalSystemApiUtil.getAllManagers().stream() .flatMap( manager -> manager.getSettingsProvider().fun(myProject).getLinkedProjectsSettings().stream() .map(settings -> Pair.create((ExternalSystemManager)manager, (ExternalProjectSettings)settings))) .collect(Collectors.toList()); } @Nullable private static String getNormalizedPath(@NotNull File file) { String canonized = PathUtil.getCanonicalPath(file.getAbsolutePath()); return canonized == null ? null : FileUtil.toSystemIndependentName(canonized); } @NotNull private Long calculateCrc(VirtualFile file) { Long newCrc; PsiFile psiFile = PsiManager.getInstance(myProject).findFile(file); if (psiFile != null) { final CRC32 crc32 = new CRC32(); ApplicationManager.getApplication().runReadAction(() -> psiFile.acceptChildren(new PsiRecursiveElementVisitor() { @Override public void visitElement(PsiElement element) { if (element instanceof LeafElement && !(element instanceof PsiWhiteSpace) && !(element instanceof PsiComment)) { String text = element.getText(); if (!text.trim().isEmpty()) { for (int i = 0, end = text.length(); i < end; i++) { crc32.update(text.charAt(i)); } } } super.visitElement(element); } })); newCrc = crc32.getValue(); } else { newCrc = file.getModificationStamp(); } return newCrc; } @ApiStatus.Experimental public interface Contributor { void markDirtyAllExternalProjects(@NotNull Project project); void markDirty(@NotNull Module module); } }
package org.handwerkszeug.mvnhack.repository.impl; import java.io.InputStream; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.logging.Level; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; import org.handwerkszeug.common.util.Streams; import org.handwerkszeug.common.util.StringUtil; import org.handwerkszeug.common.util.XMLEventParser; import org.handwerkszeug.common.util.XMLEventParser.DefaultHandler; import org.handwerkszeug.mvnhack.Constants; import org.handwerkszeug.mvnhack.repository.Artifact; import org.handwerkszeug.mvnhack.repository.ArtifactBuilder; import org.handwerkszeug.mvnhack.repository.Context; public class StAXArtifactBuilder implements ArtifactBuilder { @Override public Artifact build(Context context, InputStream pom) { DefaultArtifact result = new DefaultArtifact(); try { XMLEventParser parser = new XMLEventParser(pom); addArtifactParseHandlers(parser, result); parser.add(new DefaultHandler("project")); parser.add(new Type(result, "packaging")); Parent parent = new Parent(); parser.add(parent); parser.add(new Dependencies(result)); Set<Artifact> managed = new HashSet<Artifact>(); parser.add(new DependencyManagement(managed)); try { parser.parse(); } finally { Streams.close(pom); } resolveParent(context, parent.getArtifact()); reconcile(context, result, parent, managed); if (validate(result)) { return result; } } catch (Exception e) { Constants.LOG.log(Level.WARNING, e.getMessage(), e); } return null; } protected void resolveParent(Context context, Artifact parent) { if (validate(parent)) { context.resolve(parent.getGroupId(), parent.getArtifactId(), parent .getVersion()); } } protected void reconcile(Context context, DefaultArtifact result, Parent parent, Set<Artifact> managed) { Map<String, String> m = new HashMap<String, String>(); putContextValues(m, parent.getArtifact(), "parent."); reconcileProject(parent.getArtifact(), result, m); reconcileDependencies(context, result, managed, m); } protected void putContextValues(Map<String, String> m, Artifact a, String prefix) { m.put(prefix + "groupId", a.getGroupId()); m.put(prefix + "artifactId", a.getArtifactId()); m.put(prefix + "version", a.getVersion()); } protected void reconcileProject(Artifact parent, DefaultArtifact project, Map<String, String> m) { reconcile(project, project, m); if (StringUtil.isEmpty(project.getGroupId())) { project.setGroupId(parent.getGroupId()); } if (StringUtil.isEmpty(project.getArtifactId())) { project.setArtifactId(parent.getArtifactId()); } if (StringUtil.isEmpty(project.getVersion())) { project.setVersion(parent.getVersion()); } putContextValues(m, project, ""); putContextValues(m, project, "pom."); putContextValues(m, project, "project."); } protected void reconcile(Artifact src, DefaultArtifact dest, Map<String, String> m) { dest.setGroupId(StringUtil.replace(src.getGroupId(), m)); dest.setArtifactId(StringUtil.replace(src.getArtifactId(), m)); dest.setVersion(StringUtil.replace(src.getVersion(), m)); } protected void reconcileDependencies(Context context, DefaultArtifact project, Set<Artifact> managed, Map<String, String> m) { for (Artifact a : managed) { DefaultArtifact newone = new DefaultArtifact(); reconcile(a, newone, m); context.addManagedDependency(newone); } List<Artifact> copy = new ArrayList<Artifact>(project.getDependencies()); project.dependencies.clear(); for (Artifact a : copy) { DefaultArtifact newone = new DefaultArtifact(); reconcile(a, newone, m); if (StringUtil.isEmpty(newone.getVersion())) { newone.setVersion(context.getManagedDependency(a)); } // in the case of RELEASE , // get maven-metadata.xml from repository and find <release> if (validate(newone)) { String v = newone.getVersion(); if ("RELEASE".equals(v) || 0 < v.indexOf('[')) { Constants.LOG .log(Level.INFO, "Unsupported Versioning " + v); } else { project.add(newone); } } } } protected boolean validate(Artifact a) { return validate(a.getGroupId(), a.getArtifactId(), a.getVersion()); } protected boolean validate(String... ids) { for (String s : ids) { if (StringUtil.isEmpty(s)) { return false; } } return true; } protected void addArtifactParseHandlers(XMLEventParser parser, DefaultArtifact a) { parser.add(new GroupId(a)); parser.add(new ArtifactId(a)); parser.add(new Version(a)); } protected class GroupId extends DefaultHandler { protected DefaultArtifact a; protected GroupId(DefaultArtifact a) { super("groupId"); this.a = a; } @Override public void handle(XMLStreamReader reader) throws XMLStreamException { a.setGroupId(reader.getElementText()); } } protected class ArtifactId extends DefaultHandler { protected DefaultArtifact a; protected ArtifactId(DefaultArtifact a) { super("artifactId"); this.a = a; } @Override public void handle(XMLStreamReader reader) throws XMLStreamException { a.setArtifactId(reader.getElementText()); } } protected class Version extends DefaultHandler { protected DefaultArtifact a; protected Version(DefaultArtifact a) { super("version"); this.a = a; } @Override public void handle(XMLStreamReader reader) throws XMLStreamException { a.setVersion(reader.getElementText()); } } protected class Type extends DefaultHandler { protected DefaultArtifact a; protected Type(DefaultArtifact a) { this(a, "type"); } protected Type(DefaultArtifact a, String tag) { super(tag); this.a = a; } @Override public void handle(XMLStreamReader reader) throws XMLStreamException { a.setType(reader.getElementText()); } } protected class Parent extends DefaultHandler { protected DefaultArtifact a; protected Parent() { super("parent"); this.a = new DefaultArtifact(); } public Artifact getArtifact() { return this.a; } @Override public void handle(XMLStreamReader reader) throws XMLStreamException { XMLEventParser parser = new XMLEventParser(reader); addArtifactParseHandlers(parser, a); parser.parse(getTagName()); } } protected class Dependencies extends DefaultHandler { protected DefaultArtifact project; protected Dependencies(DefaultArtifact project) { super("dependencies"); this.project = project; } @Override public void handle(XMLStreamReader reader) throws XMLStreamException { XMLEventParser parser = new XMLEventParser(reader); parser.add(new Dependency(this.project)); parser.parse(getTagName()); } } protected class Dependency extends DefaultHandler { protected DefaultArtifact project; protected Dependency(DefaultArtifact a) { super("dependency"); this.project = a; } @Override public void handle(XMLStreamReader reader) throws XMLStreamException { DefaultArtifact newone = new DefaultArtifact(); XMLEventParser parser = new XMLEventParser(reader); addArtifactParseHandlers(parser, newone); parser.add(new Type(newone)); Scope scope = new Scope(); parser.add(scope); Optional optional = new Optional(newone); parser.add(optional); parser.parse(getTagName()); if (scope.isNotTest() && newone.isOptional() == false) { this.project.add(newone); } } } protected class Scope extends DefaultHandler { protected String scope; protected Scope() { super("scope"); } @Override public void handle(XMLStreamReader reader) throws XMLStreamException { this.scope = reader.getElementText(); } protected boolean isNotTest() { return StringUtil.isEmpty(scope) || "test".equalsIgnoreCase(scope) == false; } } protected class Optional extends DefaultHandler { protected DefaultArtifact a; protected Optional(DefaultArtifact a) { super("optional"); this.a = a; } @Override public void handle(XMLStreamReader reader) throws XMLStreamException { String optional = reader.getElementText(); a.setOptional(isOptional(optional)); } protected boolean isOptional(String optional) { return StringUtil.isEmpty(optional) == false && Boolean.parseBoolean(optional); } } protected class DependencyManagement extends DefaultHandler { protected Set<Artifact> managed; protected DependencyManagement(Set<Artifact> managed) { super("dependencyManagement"); this.managed = managed; } @Override public void handle(XMLStreamReader reader) throws XMLStreamException { XMLEventParser parser = new XMLEventParser(reader); DefaultArtifact newone = new DefaultArtifact(); parser.add(new Dependencies(newone)); parser.parse(getTagName()); for (Artifact a : newone.getDependencies()) { if (validate(a)) { this.managed.add(a); } } } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.search; import com.carrotsearch.hppc.IntArrayList; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.apache.lucene.search.ScoreDoc; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.NoShardAvailableActionException; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.fetch.ShardFetchSearchRequest; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.internal.ShardSearchTransportRequest; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.QuerySearchResultProvider; import java.util.List; import java.util.Map; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; abstract class AbstractSearchAsyncAction<FirstResult extends SearchPhaseResult> extends AbstractAsyncAction { protected final Logger logger; protected final SearchTransportService searchTransportService; private final Executor executor; protected final ActionListener<SearchResponse> listener; private final GroupShardsIterator shardsIts; protected final SearchRequest request; /** Used by subclasses to resolve node ids to DiscoveryNodes. **/ protected final Function<String, DiscoveryNode> nodeIdToDiscoveryNode; protected final int expectedSuccessfulOps; private final int expectedTotalOps; protected final AtomicInteger successfulOps = new AtomicInteger(); private final AtomicInteger totalOps = new AtomicInteger(); protected final AtomicArray<FirstResult> firstResults; private final Map<String, AliasFilter> aliasFilter; private final long clusterStateVersion; private volatile AtomicArray<ShardSearchFailure> shardFailures; private final Object shardFailuresMutex = new Object(); protected volatile ScoreDoc[] sortedShardDocs; protected AbstractSearchAsyncAction(Logger logger, SearchTransportService searchTransportService, Function<String, DiscoveryNode> nodeIdToDiscoveryNode, Map<String, AliasFilter> aliasFilter, Executor executor, SearchRequest request, ActionListener<SearchResponse> listener, GroupShardsIterator shardsIts, long startTime, long clusterStateVersion) { super(startTime); this.logger = logger; this.searchTransportService = searchTransportService; this.executor = executor; this.request = request; this.listener = listener; this.nodeIdToDiscoveryNode = nodeIdToDiscoveryNode; this.clusterStateVersion = clusterStateVersion; this.shardsIts = shardsIts; expectedSuccessfulOps = shardsIts.size(); // we need to add 1 for non active partition, since we count it in the total! expectedTotalOps = shardsIts.totalSizeWith1ForEmpty(); firstResults = new AtomicArray<>(shardsIts.size()); this.aliasFilter = aliasFilter; } public void start() { if (expectedSuccessfulOps == 0) { //no search shards to search on, bail with empty response //(it happens with search across _all with no indices around and consistent with broadcast operations) listener.onResponse(new SearchResponse(InternalSearchResponse.empty(), null, 0, 0, buildTookInMillis(), ShardSearchFailure.EMPTY_ARRAY)); return; } int shardIndex = -1; for (final ShardIterator shardIt : shardsIts) { shardIndex++; final ShardRouting shard = shardIt.nextOrNull(); if (shard != null) { performFirstPhase(shardIndex, shardIt, shard); } else { // really, no shards active in this group onFirstPhaseResult(shardIndex, null, null, shardIt, new NoShardAvailableActionException(shardIt.shardId())); } } } void performFirstPhase(final int shardIndex, final ShardIterator shardIt, final ShardRouting shard) { if (shard == null) { // no more active shards... (we should not really get here, but just for safety) onFirstPhaseResult(shardIndex, null, null, shardIt, new NoShardAvailableActionException(shardIt.shardId())); } else { final DiscoveryNode node = nodeIdToDiscoveryNode.apply(shard.currentNodeId()); if (node == null) { onFirstPhaseResult(shardIndex, shard, null, shardIt, new NoShardAvailableActionException(shardIt.shardId())); } else { AliasFilter filter = this.aliasFilter.get(shard.index().getName()); ShardSearchTransportRequest transportRequest = new ShardSearchTransportRequest(request, shard, shardsIts.size(), filter, startTime()); sendExecuteFirstPhase(node, transportRequest , new ActionListener<FirstResult>() { @Override public void onResponse(FirstResult result) { onFirstPhaseResult(shardIndex, shard, result, shardIt); } @Override public void onFailure(Exception t) { onFirstPhaseResult(shardIndex, shard, node.getId(), shardIt, t); } }); } } } void onFirstPhaseResult(int shardIndex, ShardRouting shard, FirstResult result, ShardIterator shardIt) { result.shardTarget(new SearchShardTarget(shard.currentNodeId(), shard.index(), shard.id())); processFirstPhaseResult(shardIndex, result); // we need to increment successful ops first before we compare the exit condition otherwise if we // are fast we could concurrently update totalOps but then preempt one of the threads which can // cause the successor to read a wrong value from successfulOps if second phase is very fast ie. count etc. successfulOps.incrementAndGet(); // increment all the "future" shards to update the total ops since we some may work and some may not... // and when that happens, we break on total ops, so we must maintain them final int xTotalOps = totalOps.addAndGet(shardIt.remaining() + 1); if (xTotalOps == expectedTotalOps) { try { innerMoveToSecondPhase(); } catch (Exception e) { if (logger.isDebugEnabled()) { logger.debug( (Supplier<?>) () -> new ParameterizedMessage( "{}: Failed to execute [{}] while moving to second phase", shardIt.shardId(), request), e); } raiseEarlyFailure(new ReduceSearchPhaseException(firstPhaseName(), "", e, buildShardFailures())); } } else if (xTotalOps > expectedTotalOps) { raiseEarlyFailure(new IllegalStateException("unexpected higher total ops [" + xTotalOps + "] compared " + "to expected [" + expectedTotalOps + "]")); } } void onFirstPhaseResult(final int shardIndex, @Nullable ShardRouting shard, @Nullable String nodeId, final ShardIterator shardIt, Exception e) { // we always add the shard failure for a specific shard instance // we do make sure to clean it on a successful response from a shard SearchShardTarget shardTarget = new SearchShardTarget(nodeId, shardIt.shardId().getIndex(), shardIt.shardId().getId()); addShardFailure(shardIndex, shardTarget, e); if (totalOps.incrementAndGet() == expectedTotalOps) { if (logger.isDebugEnabled()) { if (e != null && !TransportActions.isShardNotAvailableException(e)) { logger.debug( (Supplier<?>) () -> new ParameterizedMessage( "{}: Failed to execute [{}]", shard != null ? shard.shortSummary() : shardIt.shardId(), request), e); } else if (logger.isTraceEnabled()) { logger.trace((Supplier<?>) () -> new ParameterizedMessage("{}: Failed to execute [{}]", shard, request), e); } } final ShardSearchFailure[] shardSearchFailures = buildShardFailures(); if (successfulOps.get() == 0) { if (logger.isDebugEnabled()) { logger.debug((Supplier<?>) () -> new ParameterizedMessage("All shards failed for phase: [{}]", firstPhaseName()), e); } // no successful ops, raise an exception raiseEarlyFailure(new SearchPhaseExecutionException(firstPhaseName(), "all shards failed", e, shardSearchFailures)); } else { try { innerMoveToSecondPhase(); } catch (Exception inner) { inner.addSuppressed(e); raiseEarlyFailure(new ReduceSearchPhaseException(firstPhaseName(), "", inner, shardSearchFailures)); } } } else { final ShardRouting nextShard = shardIt.nextOrNull(); final boolean lastShard = nextShard == null; // trace log this exception logger.trace( (Supplier<?>) () -> new ParameterizedMessage( "{}: Failed to execute [{}] lastShard [{}]", shard != null ? shard.shortSummary() : shardIt.shardId(), request, lastShard), e); if (!lastShard) { try { performFirstPhase(shardIndex, shardIt, nextShard); } catch (Exception inner) { inner.addSuppressed(e); onFirstPhaseResult(shardIndex, shard, shard.currentNodeId(), shardIt, inner); } } else { // no more shards active, add a failure if (logger.isDebugEnabled() && !logger.isTraceEnabled()) { // do not double log this exception if (e != null && !TransportActions.isShardNotAvailableException(e)) { logger.debug( (Supplier<?>) () -> new ParameterizedMessage( "{}: Failed to execute [{}] lastShard [{}]", shard != null ? shard.shortSummary() : shardIt.shardId(), request, lastShard), e); } } } } } protected final ShardSearchFailure[] buildShardFailures() { AtomicArray<ShardSearchFailure> shardFailures = this.shardFailures; if (shardFailures == null) { return ShardSearchFailure.EMPTY_ARRAY; } List<AtomicArray.Entry<ShardSearchFailure>> entries = shardFailures.asList(); ShardSearchFailure[] failures = new ShardSearchFailure[entries.size()]; for (int i = 0; i < failures.length; i++) { failures[i] = entries.get(i).value; } return failures; } protected final void addShardFailure(final int shardIndex, @Nullable SearchShardTarget shardTarget, Exception e) { // we don't aggregate shard failures on non active shards (but do keep the header counts right) if (TransportActions.isShardNotAvailableException(e)) { return; } // lazily create shard failures, so we can early build the empty shard failure list in most cases (no failures) if (shardFailures == null) { synchronized (shardFailuresMutex) { if (shardFailures == null) { shardFailures = new AtomicArray<>(shardsIts.size()); } } } ShardSearchFailure failure = shardFailures.get(shardIndex); if (failure == null) { shardFailures.set(shardIndex, new ShardSearchFailure(e, shardTarget)); } else { // the failure is already present, try and not override it with an exception that is less meaningless // for example, getting illegal shard state if (TransportActions.isReadOverrideException(e)) { shardFailures.set(shardIndex, new ShardSearchFailure(e, shardTarget)); } } } private void raiseEarlyFailure(Exception e) { for (AtomicArray.Entry<FirstResult> entry : firstResults.asList()) { try { DiscoveryNode node = nodeIdToDiscoveryNode.apply(entry.value.shardTarget().nodeId()); sendReleaseSearchContext(entry.value.id(), node); } catch (Exception inner) { inner.addSuppressed(e); logger.trace("failed to release context", inner); } } listener.onFailure(e); } /** * Releases shard targets that are not used in the docsIdsToLoad. */ protected void releaseIrrelevantSearchContexts(AtomicArray<? extends QuerySearchResultProvider> queryResults, AtomicArray<IntArrayList> docIdsToLoad) { if (docIdsToLoad == null) { return; } // we only release search context that we did not fetch from if we are not scrolling if (request.scroll() == null) { for (AtomicArray.Entry<? extends QuerySearchResultProvider> entry : queryResults.asList()) { QuerySearchResult queryResult = entry.value.queryResult(); if (queryResult.hasHits() && docIdsToLoad.get(entry.index) == null) { // but none of them made it to the global top docs try { DiscoveryNode node = nodeIdToDiscoveryNode.apply(entry.value.queryResult().shardTarget().nodeId()); sendReleaseSearchContext(entry.value.queryResult().id(), node); } catch (Exception e) { logger.trace("failed to release context", e); } } } } } protected void sendReleaseSearchContext(long contextId, DiscoveryNode node) { if (node != null) { searchTransportService.sendFreeContext(node, contextId, request); } } protected ShardFetchSearchRequest createFetchRequest(QuerySearchResult queryResult, AtomicArray.Entry<IntArrayList> entry, ScoreDoc[] lastEmittedDocPerShard) { final ScoreDoc lastEmittedDoc = (lastEmittedDocPerShard != null) ? lastEmittedDocPerShard[entry.index] : null; return new ShardFetchSearchRequest(request, queryResult.id(), entry.value, lastEmittedDoc); } protected abstract void sendExecuteFirstPhase(DiscoveryNode node, ShardSearchTransportRequest request, ActionListener<FirstResult> listener); protected final void processFirstPhaseResult(int shardIndex, FirstResult result) { firstResults.set(shardIndex, result); if (logger.isTraceEnabled()) { logger.trace("got first-phase result from {}", result != null ? result.shardTarget() : null); } // clean a previous error on this shard group (note, this code will be serialized on the same shardIndex value level // so its ok concurrency wise to miss potentially the shard failures being created because of another failure // in the #addShardFailure, because by definition, it will happen on *another* shardIndex AtomicArray<ShardSearchFailure> shardFailures = this.shardFailures; if (shardFailures != null) { shardFailures.set(shardIndex, null); } } final void innerMoveToSecondPhase() throws Exception { if (logger.isTraceEnabled()) { StringBuilder sb = new StringBuilder(); boolean hadOne = false; for (int i = 0; i < firstResults.length(); i++) { FirstResult result = firstResults.get(i); if (result == null) { continue; // failure } if (hadOne) { sb.append(","); } else { hadOne = true; } sb.append(result.shardTarget()); } logger.trace("Moving to second phase, based on results from: {} (cluster state version: {})", sb, clusterStateVersion); } moveToSecondPhase(); } protected abstract void moveToSecondPhase() throws Exception; protected abstract String firstPhaseName(); protected Executor getExecutor() { return executor; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.seqno; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.replication.ReplicationResponse; import org.elasticsearch.cluster.routing.AllocationId; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.IndexSettingsModule; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.LongConsumer; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; import static java.util.Collections.emptySet; import static org.elasticsearch.index.seqno.SequenceNumbers.NO_OPS_PERFORMED; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; public class ReplicationTrackerTests extends ReplicationTrackerTestCase { public void testEmptyShards() { final ReplicationTracker tracker = newTracker(AllocationId.newInitializing()); assertThat(tracker.getGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO)); } private Map<AllocationId, Long> randomAllocationsWithLocalCheckpoints(int min, int max) { Map<AllocationId, Long> allocations = new HashMap<>(); for (int i = randomIntBetween(min, max); i > 0; i--) { allocations.put(AllocationId.newInitializing(), (long) randomInt(1000)); } return allocations; } private static Set<String> ids(Set<AllocationId> allocationIds) { return allocationIds.stream().map(AllocationId::getId).collect(Collectors.toSet()); } private void updateLocalCheckpoint(final ReplicationTracker tracker, final String allocationId, final long localCheckpoint) { tracker.updateLocalCheckpoint(allocationId, localCheckpoint); assertThat(updatedGlobalCheckpoint.get(), equalTo(tracker.getGlobalCheckpoint())); } public void testGlobalCheckpointUpdate() { final long initialClusterStateVersion = randomNonNegativeLong(); Map<AllocationId, Long> allocations = new HashMap<>(); Map<AllocationId, Long> activeWithCheckpoints = randomAllocationsWithLocalCheckpoints(1, 5); Set<AllocationId> active = new HashSet<>(activeWithCheckpoints.keySet()); allocations.putAll(activeWithCheckpoints); Map<AllocationId, Long> initializingWithCheckpoints = randomAllocationsWithLocalCheckpoints(0, 5); Set<AllocationId> initializing = new HashSet<>(initializingWithCheckpoints.keySet()); allocations.putAll(initializingWithCheckpoints); assertThat(allocations.size(), equalTo(active.size() + initializing.size())); // note: allocations can never be empty in practice as we always have at least one primary shard active/in sync // it is however nice not to assume this on this level and check we do the right thing. final long minLocalCheckpoint = allocations.values().stream().min(Long::compare).orElse(UNASSIGNED_SEQ_NO); final AllocationId primaryId = active.iterator().next(); final ReplicationTracker tracker = newTracker(primaryId); assertThat(tracker.getGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO)); logger.info("--> using allocations"); allocations.keySet().forEach(aId -> { final String type; if (active.contains(aId)) { type = "active"; } else if (initializing.contains(aId)) { type = "init"; } else { throw new IllegalStateException(aId + " not found in any map"); } logger.info(" - [{}], local checkpoint [{}], [{}]", aId, allocations.get(aId), type); }); tracker.updateFromMaster(initialClusterStateVersion, ids(active), routingTable(initializing, primaryId)); tracker.activatePrimaryMode(NO_OPS_PERFORMED); assertThat(tracker.getReplicationGroup().getReplicationTargets().size(), equalTo(1)); initializing.forEach(aId -> markAsTrackingAndInSyncQuietly(tracker, aId.getId(), NO_OPS_PERFORMED)); assertThat(tracker.getReplicationGroup().getReplicationTargets().size(), equalTo(1 + initializing.size())); allocations.keySet().forEach(aId -> updateLocalCheckpoint(tracker, aId.getId(), allocations.get(aId))); assertThat(tracker.getGlobalCheckpoint(), equalTo(minLocalCheckpoint)); // increment checkpoints active.forEach(aId -> allocations.put(aId, allocations.get(aId) + 1 + randomInt(4))); initializing.forEach(aId -> allocations.put(aId, allocations.get(aId) + 1 + randomInt(4))); allocations.keySet().forEach(aId -> updateLocalCheckpoint(tracker, aId.getId(), allocations.get(aId))); final long minLocalCheckpointAfterUpdates = allocations.entrySet().stream().map(Map.Entry::getValue).min(Long::compareTo).orElse(UNASSIGNED_SEQ_NO); // now insert an unknown active/insync id , the checkpoint shouldn't change but a refresh should be requested. final AllocationId extraId = AllocationId.newInitializing(); // first check that adding it without the master blessing doesn't change anything. updateLocalCheckpoint(tracker, extraId.getId(), minLocalCheckpointAfterUpdates + 1 + randomInt(4)); assertNull(tracker.checkpoints.get(extraId.getId())); expectThrows(IllegalStateException.class, () -> tracker.initiateTracking(extraId.getId())); Set<AllocationId> newInitializing = new HashSet<>(initializing); newInitializing.add(extraId); tracker.updateFromMaster(initialClusterStateVersion + 1, ids(active), routingTable(newInitializing, primaryId)); addPeerRecoveryRetentionLease(tracker, extraId); tracker.initiateTracking(extraId.getId()); // now notify for the new id if (randomBoolean()) { updateLocalCheckpoint(tracker, extraId.getId(), minLocalCheckpointAfterUpdates + 1 + randomInt(4)); markAsTrackingAndInSyncQuietly(tracker, extraId.getId(), randomInt((int) minLocalCheckpointAfterUpdates)); } else { markAsTrackingAndInSyncQuietly(tracker, extraId.getId(), minLocalCheckpointAfterUpdates + 1 + randomInt(4)); } // now it should be incremented assertThat(tracker.getGlobalCheckpoint(), greaterThan(minLocalCheckpoint)); } public void testUpdateGlobalCheckpointOnReplica() { final AllocationId active = AllocationId.newInitializing(); final ReplicationTracker tracker = newTracker(active); final long globalCheckpoint = randomLongBetween(NO_OPS_PERFORMED, Long.MAX_VALUE - 1); tracker.updateGlobalCheckpointOnReplica(globalCheckpoint, "test"); assertThat(updatedGlobalCheckpoint.get(), equalTo(globalCheckpoint)); final long nonUpdate = randomLongBetween(NO_OPS_PERFORMED, globalCheckpoint); updatedGlobalCheckpoint.set(UNASSIGNED_SEQ_NO); tracker.updateGlobalCheckpointOnReplica(nonUpdate, "test"); assertThat(updatedGlobalCheckpoint.get(), equalTo(UNASSIGNED_SEQ_NO)); final long update = randomLongBetween(globalCheckpoint, Long.MAX_VALUE); tracker.updateGlobalCheckpointOnReplica(update, "test"); assertThat(updatedGlobalCheckpoint.get(), equalTo(update)); } public void testMarkAllocationIdAsInSync() throws Exception { final long initialClusterStateVersion = randomNonNegativeLong(); Map<AllocationId, Long> activeWithCheckpoints = randomAllocationsWithLocalCheckpoints(1, 1); Set<AllocationId> active = new HashSet<>(activeWithCheckpoints.keySet()); Map<AllocationId, Long> initializingWithCheckpoints = randomAllocationsWithLocalCheckpoints(1, 1); Set<AllocationId> initializing = new HashSet<>(initializingWithCheckpoints.keySet()); final AllocationId primaryId = active.iterator().next(); final AllocationId replicaId = initializing.iterator().next(); final ReplicationTracker tracker = newTracker(primaryId); tracker.updateFromMaster(initialClusterStateVersion, ids(active), routingTable(initializing, primaryId)); final long localCheckpoint = randomLongBetween(0, Long.MAX_VALUE - 1); tracker.activatePrimaryMode(localCheckpoint); addPeerRecoveryRetentionLease(tracker, replicaId); tracker.initiateTracking(replicaId.getId()); final CyclicBarrier barrier = new CyclicBarrier(2); final Thread thread = new Thread(() -> { try { barrier.await(); tracker.markAllocationIdAsInSync( replicaId.getId(), randomLongBetween(NO_OPS_PERFORMED, localCheckpoint - 1)); barrier.await(); } catch (BrokenBarrierException | InterruptedException e) { throw new AssertionError(e); } }); thread.start(); barrier.await(); assertBusy(() -> assertTrue(tracker.pendingInSync())); final long updatedLocalCheckpoint = randomLongBetween(1 + localCheckpoint, Long.MAX_VALUE); // there is a shard copy pending in sync, the global checkpoint can not advance updatedGlobalCheckpoint.set(UNASSIGNED_SEQ_NO); tracker.updateLocalCheckpoint(primaryId.getId(), updatedLocalCheckpoint); assertThat(updatedGlobalCheckpoint.get(), equalTo(UNASSIGNED_SEQ_NO)); // we are implicitly marking the pending in sync copy as in sync with the current global checkpoint, no advancement should occur tracker.updateLocalCheckpoint(replicaId.getId(), localCheckpoint); assertThat(updatedGlobalCheckpoint.get(), equalTo(UNASSIGNED_SEQ_NO)); barrier.await(); thread.join(); // now we expect that the global checkpoint would advance tracker.markAllocationIdAsInSync(replicaId.getId(), updatedLocalCheckpoint); assertThat(updatedGlobalCheckpoint.get(), equalTo(updatedLocalCheckpoint)); } public void testMissingActiveIdsPreventAdvance() { final Map<AllocationId, Long> active = randomAllocationsWithLocalCheckpoints(2, 5); final Map<AllocationId, Long> initializing = randomAllocationsWithLocalCheckpoints(0, 5); final Map<AllocationId, Long> assigned = new HashMap<>(); assigned.putAll(active); assigned.putAll(initializing); AllocationId primaryId = active.keySet().iterator().next(); final ReplicationTracker tracker = newTracker(primaryId); tracker.updateFromMaster(randomNonNegativeLong(), ids(active.keySet()), routingTable(initializing.keySet(), primaryId)); tracker.activatePrimaryMode(NO_OPS_PERFORMED); randomSubsetOf(initializing.keySet()).forEach(k -> markAsTrackingAndInSyncQuietly(tracker, k.getId(), NO_OPS_PERFORMED)); final AllocationId missingActiveID = randomFrom(active.keySet()); assigned .entrySet() .stream() .filter(e -> !e.getKey().equals(missingActiveID)) .forEach(e -> updateLocalCheckpoint(tracker, e.getKey().getId(), e.getValue())); if (missingActiveID.equals(primaryId) == false) { assertThat(tracker.getGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO)); assertThat(updatedGlobalCheckpoint.get(), equalTo(UNASSIGNED_SEQ_NO)); } // now update all knowledge of all shards assigned.forEach((aid, localCP) -> updateLocalCheckpoint(tracker, aid.getId(), localCP)); assertThat(tracker.getGlobalCheckpoint(), not(equalTo(UNASSIGNED_SEQ_NO))); assertThat(updatedGlobalCheckpoint.get(), not(equalTo(UNASSIGNED_SEQ_NO))); } public void testMissingInSyncIdsPreventAdvance() { final Map<AllocationId, Long> active = randomAllocationsWithLocalCheckpoints(1, 5); final Map<AllocationId, Long> initializing = randomAllocationsWithLocalCheckpoints(2, 5); logger.info("active: {}, initializing: {}", active, initializing); AllocationId primaryId = active.keySet().iterator().next(); final ReplicationTracker tracker = newTracker(primaryId); tracker.updateFromMaster(randomNonNegativeLong(), ids(active.keySet()), routingTable(initializing.keySet(), primaryId)); tracker.activatePrimaryMode(NO_OPS_PERFORMED); randomSubsetOf(randomIntBetween(1, initializing.size() - 1), initializing.keySet()).forEach(aId -> markAsTrackingAndInSyncQuietly(tracker, aId.getId(), NO_OPS_PERFORMED)); active.forEach((aid, localCP) -> updateLocalCheckpoint(tracker, aid.getId(), localCP)); assertThat(tracker.getGlobalCheckpoint(), equalTo(NO_OPS_PERFORMED)); assertThat(updatedGlobalCheckpoint.get(), equalTo(NO_OPS_PERFORMED)); // update again initializing.forEach((aid, localCP) -> updateLocalCheckpoint(tracker, aid.getId(), localCP)); assertThat(tracker.getGlobalCheckpoint(), not(equalTo(UNASSIGNED_SEQ_NO))); assertThat(updatedGlobalCheckpoint.get(), not(equalTo(UNASSIGNED_SEQ_NO))); } public void testInSyncIdsAreIgnoredIfNotValidatedByMaster() { final Map<AllocationId, Long> active = randomAllocationsWithLocalCheckpoints(1, 5); final Map<AllocationId, Long> initializing = randomAllocationsWithLocalCheckpoints(1, 5); final Map<AllocationId, Long> nonApproved = randomAllocationsWithLocalCheckpoints(1, 5); final AllocationId primaryId = active.keySet().iterator().next(); final ReplicationTracker tracker = newTracker(primaryId); tracker.updateFromMaster(randomNonNegativeLong(), ids(active.keySet()), routingTable(initializing.keySet(), primaryId)); tracker.activatePrimaryMode(NO_OPS_PERFORMED); initializing.keySet().forEach(k -> markAsTrackingAndInSyncQuietly(tracker, k.getId(), NO_OPS_PERFORMED)); nonApproved.keySet().forEach(k -> expectThrows(IllegalStateException.class, () -> markAsTrackingAndInSyncQuietly(tracker, k.getId(), NO_OPS_PERFORMED))); List<Map<AllocationId, Long>> allocations = Arrays.asList(active, initializing, nonApproved); Collections.shuffle(allocations, random()); allocations.forEach(a -> a.forEach((aid, localCP) -> updateLocalCheckpoint(tracker, aid.getId(), localCP))); assertThat(tracker.getGlobalCheckpoint(), not(equalTo(UNASSIGNED_SEQ_NO))); } public void testInSyncIdsAreRemovedIfNotValidatedByMaster() { final long initialClusterStateVersion = randomNonNegativeLong(); final Map<AllocationId, Long> activeToStay = randomAllocationsWithLocalCheckpoints(1, 5); final Map<AllocationId, Long> initializingToStay = randomAllocationsWithLocalCheckpoints(1, 5); final Map<AllocationId, Long> activeToBeRemoved = randomAllocationsWithLocalCheckpoints(1, 5); final Map<AllocationId, Long> initializingToBeRemoved = randomAllocationsWithLocalCheckpoints(1, 5); final Set<AllocationId> active = Sets.union(activeToStay.keySet(), activeToBeRemoved.keySet()); final Set<AllocationId> initializing = Sets.union(initializingToStay.keySet(), initializingToBeRemoved.keySet()); final Map<AllocationId, Long> allocations = new HashMap<>(); final AllocationId primaryId = active.iterator().next(); if (activeToBeRemoved.containsKey(primaryId)) { activeToStay.put(primaryId, activeToBeRemoved.remove(primaryId)); } allocations.putAll(activeToStay); if (randomBoolean()) { allocations.putAll(activeToBeRemoved); } allocations.putAll(initializingToStay); if (randomBoolean()) { allocations.putAll(initializingToBeRemoved); } final ReplicationTracker tracker = newTracker(primaryId); tracker.updateFromMaster(initialClusterStateVersion, ids(active), routingTable(initializing, primaryId)); tracker.activatePrimaryMode(NO_OPS_PERFORMED); if (randomBoolean()) { initializingToStay.keySet().forEach(k -> markAsTrackingAndInSyncQuietly(tracker, k.getId(), NO_OPS_PERFORMED)); } else { initializing.forEach(k -> markAsTrackingAndInSyncQuietly(tracker, k.getId(), NO_OPS_PERFORMED)); } if (randomBoolean()) { allocations.forEach((aid, localCP) -> updateLocalCheckpoint(tracker, aid.getId(), localCP)); } // now remove shards if (randomBoolean()) { tracker.updateFromMaster( initialClusterStateVersion + 1, ids(activeToStay.keySet()), routingTable(initializingToStay.keySet(), primaryId)); allocations.forEach((aid, ckp) -> updateLocalCheckpoint(tracker, aid.getId(), ckp + 10L)); } else { allocations.forEach((aid, ckp) -> updateLocalCheckpoint(tracker, aid.getId(), ckp + 10L)); tracker.updateFromMaster( initialClusterStateVersion + 2, ids(activeToStay.keySet()), routingTable(initializingToStay.keySet(), primaryId)); } final long checkpoint = Stream.concat(activeToStay.values().stream(), initializingToStay.values().stream()) .min(Long::compare).get() + 10; // we added 10 to make sure it's advanced in the second time assertThat(tracker.getGlobalCheckpoint(), equalTo(checkpoint)); } public void testWaitForAllocationIdToBeInSync() throws Exception { final int localCheckpoint = randomIntBetween(1, 32); final int globalCheckpoint = randomIntBetween(localCheckpoint + 1, 64); final CyclicBarrier barrier = new CyclicBarrier(2); final AtomicBoolean complete = new AtomicBoolean(); final AllocationId inSyncAllocationId = AllocationId.newInitializing(); final AllocationId trackingAllocationId = AllocationId.newInitializing(); final ReplicationTracker tracker = newTracker(inSyncAllocationId); final long clusterStateVersion = randomNonNegativeLong(); tracker.updateFromMaster(clusterStateVersion, Collections.singleton(inSyncAllocationId.getId()), routingTable(Collections.singleton(trackingAllocationId), inSyncAllocationId)); tracker.activatePrimaryMode(globalCheckpoint); addPeerRecoveryRetentionLease(tracker, trackingAllocationId); final Thread thread = new Thread(() -> { try { // synchronize starting with the test thread barrier.await(); tracker.initiateTracking(trackingAllocationId.getId()); tracker.markAllocationIdAsInSync(trackingAllocationId.getId(), localCheckpoint); complete.set(true); // synchronize with the test thread checking if we are no longer waiting barrier.await(); } catch (final BrokenBarrierException | InterruptedException e) { throw new RuntimeException(e); } }); thread.start(); // synchronize starting with the waiting thread barrier.await(); final List<Integer> elements = IntStream.rangeClosed(0, globalCheckpoint - 1).boxed().collect(Collectors.toList()); Randomness.shuffle(elements); for (int i = 0; i < elements.size(); i++) { updateLocalCheckpoint(tracker, trackingAllocationId.getId(), elements.get(i)); assertFalse(complete.get()); assertFalse(tracker.getTrackedLocalCheckpointForShard(trackingAllocationId.getId()).inSync); assertBusy(() -> assertTrue(tracker.pendingInSync.contains(trackingAllocationId.getId()))); } if (randomBoolean()) { // normal path, shard catches up updateLocalCheckpoint(tracker, trackingAllocationId.getId(), randomIntBetween(globalCheckpoint, 64)); // synchronize with the waiting thread to mark that it is complete barrier.await(); assertTrue(complete.get()); assertTrue(tracker.getTrackedLocalCheckpointForShard(trackingAllocationId.getId()).inSync); } else { // master changes its mind and cancels the allocation tracker.updateFromMaster(clusterStateVersion + 1, Collections.singleton(inSyncAllocationId.getId()), routingTable(emptySet(), inSyncAllocationId)); barrier.await(); assertTrue(complete.get()); assertNull(tracker.getTrackedLocalCheckpointForShard(trackingAllocationId.getId())); } assertFalse(tracker.pendingInSync.contains(trackingAllocationId.getId())); thread.join(); } private AtomicLong updatedGlobalCheckpoint = new AtomicLong(UNASSIGNED_SEQ_NO); private ReplicationTracker newTracker(final AllocationId allocationId) { return newTracker(allocationId, updatedGlobalCheckpoint::set, () -> 0L); } public void testWaitForAllocationIdToBeInSyncCanBeInterrupted() throws BrokenBarrierException, InterruptedException { final int localCheckpoint = randomIntBetween(1, 32); final int globalCheckpoint = randomIntBetween(localCheckpoint + 1, 64); final CyclicBarrier barrier = new CyclicBarrier(2); final AtomicBoolean interrupted = new AtomicBoolean(); final AllocationId inSyncAllocationId = AllocationId.newInitializing(); final AllocationId trackingAllocationId = AllocationId.newInitializing(); final ReplicationTracker tracker = newTracker(inSyncAllocationId); tracker.updateFromMaster(randomNonNegativeLong(), Collections.singleton(inSyncAllocationId.getId()), routingTable(Collections.singleton(trackingAllocationId), inSyncAllocationId)); tracker.activatePrimaryMode(globalCheckpoint); addPeerRecoveryRetentionLease(tracker, trackingAllocationId); final Thread thread = new Thread(() -> { try { // synchronize starting with the test thread barrier.await(); } catch (final BrokenBarrierException | InterruptedException e) { throw new RuntimeException(e); } try { tracker.initiateTracking(trackingAllocationId.getId()); tracker.markAllocationIdAsInSync(trackingAllocationId.getId(), localCheckpoint); } catch (final InterruptedException e) { interrupted.set(true); // synchronize with the test thread checking if we are interrupted } try { barrier.await(); } catch (final BrokenBarrierException | InterruptedException e) { throw new RuntimeException(e); } }); thread.start(); // synchronize starting with the waiting thread barrier.await(); thread.interrupt(); // synchronize with the waiting thread to mark that it is complete barrier.await(); assertTrue(interrupted.get()); thread.join(); } public void testUpdateAllocationIdsFromMaster() throws Exception { final long initialClusterStateVersion = randomNonNegativeLong(); final int numberOfActiveAllocationsIds = randomIntBetween(2, 16); final int numberOfInitializingIds = randomIntBetween(2, 16); final Tuple<Set<AllocationId>, Set<AllocationId>> activeAndInitializingAllocationIds = randomActiveAndInitializingAllocationIds(numberOfActiveAllocationsIds, numberOfInitializingIds); final Set<AllocationId> activeAllocationIds = activeAndInitializingAllocationIds.v1(); final Set<AllocationId> initializingIds = activeAndInitializingAllocationIds.v2(); AllocationId primaryId = activeAllocationIds.iterator().next(); IndexShardRoutingTable routingTable = routingTable(initializingIds, primaryId); final ReplicationTracker tracker = newTracker(primaryId); tracker.updateFromMaster(initialClusterStateVersion, ids(activeAllocationIds), routingTable); tracker.activatePrimaryMode(NO_OPS_PERFORMED); assertThat(tracker.getReplicationGroup().getInSyncAllocationIds(), equalTo(ids(activeAllocationIds))); assertThat(tracker.getReplicationGroup().getRoutingTable(), equalTo(routingTable)); // first we assert that the in-sync and tracking sets are set up correctly assertTrue(activeAllocationIds.stream().allMatch(a -> tracker.getTrackedLocalCheckpointForShard(a.getId()).inSync)); assertTrue( activeAllocationIds .stream() .filter(a -> a.equals(primaryId) == false) .allMatch(a -> tracker.getTrackedLocalCheckpointForShard(a.getId()).getLocalCheckpoint() == SequenceNumbers.UNASSIGNED_SEQ_NO)); assertTrue(initializingIds.stream().noneMatch(a -> tracker.getTrackedLocalCheckpointForShard(a.getId()).inSync)); assertTrue( initializingIds .stream() .filter(a -> a.equals(primaryId) == false) .allMatch(a -> tracker.getTrackedLocalCheckpointForShard(a.getId()).getLocalCheckpoint() == SequenceNumbers.UNASSIGNED_SEQ_NO)); // now we will remove some allocation IDs from these and ensure that they propagate through final Set<AllocationId> removingActiveAllocationIds = new HashSet<>(randomSubsetOf(activeAllocationIds)); removingActiveAllocationIds.remove(primaryId); final Set<AllocationId> newActiveAllocationIds = activeAllocationIds.stream().filter(a -> !removingActiveAllocationIds.contains(a)).collect(Collectors.toSet()); final List<AllocationId> removingInitializingAllocationIds = randomSubsetOf(initializingIds); final Set<AllocationId> newInitializingAllocationIds = initializingIds.stream().filter(a -> !removingInitializingAllocationIds.contains(a)).collect(Collectors.toSet()); routingTable = routingTable(newInitializingAllocationIds, primaryId); tracker.updateFromMaster(initialClusterStateVersion + 1, ids(newActiveAllocationIds), routingTable); assertTrue(newActiveAllocationIds.stream().allMatch(a -> tracker.getTrackedLocalCheckpointForShard(a.getId()).inSync)); assertTrue(removingActiveAllocationIds.stream().allMatch(a -> tracker.getTrackedLocalCheckpointForShard(a.getId()) == null)); assertTrue(newInitializingAllocationIds.stream().noneMatch(a -> tracker.getTrackedLocalCheckpointForShard(a.getId()).inSync)); assertTrue(removingInitializingAllocationIds.stream().allMatch(a -> tracker.getTrackedLocalCheckpointForShard(a.getId()) == null)); assertThat(tracker.getReplicationGroup().getInSyncAllocationIds(), equalTo( ids(Sets.difference(Sets.union(activeAllocationIds, newActiveAllocationIds), removingActiveAllocationIds)))); assertThat(tracker.getReplicationGroup().getRoutingTable(), equalTo(routingTable)); /* * Now we will add an allocation ID to each of active and initializing and ensure they propagate through. Using different lengths * than we have been using above ensures that we can not collide with a previous allocation ID */ newInitializingAllocationIds.add(AllocationId.newInitializing()); tracker.updateFromMaster( initialClusterStateVersion + 2, ids(newActiveAllocationIds), routingTable(newInitializingAllocationIds, primaryId)); assertTrue(newActiveAllocationIds.stream().allMatch(a -> tracker.getTrackedLocalCheckpointForShard(a.getId()).inSync)); assertTrue( newActiveAllocationIds .stream() .filter(a -> a.equals(primaryId) == false) .allMatch(a -> tracker.getTrackedLocalCheckpointForShard(a.getId()).getLocalCheckpoint() == SequenceNumbers.UNASSIGNED_SEQ_NO)); assertTrue(newInitializingAllocationIds.stream().noneMatch(a -> tracker.getTrackedLocalCheckpointForShard(a.getId()).inSync)); assertTrue( newInitializingAllocationIds .stream() .allMatch(a -> tracker.getTrackedLocalCheckpointForShard(a.getId()).getLocalCheckpoint() == SequenceNumbers.UNASSIGNED_SEQ_NO)); // the tracking allocation IDs should play no role in determining the global checkpoint final Map<AllocationId, Integer> activeLocalCheckpoints = newActiveAllocationIds.stream().collect(Collectors.toMap(Function.identity(), a -> randomIntBetween(1, 1024))); activeLocalCheckpoints.forEach((a, l) -> updateLocalCheckpoint(tracker, a.getId(), l)); final Map<AllocationId, Integer> initializingLocalCheckpoints = newInitializingAllocationIds.stream().collect(Collectors.toMap(Function.identity(), a -> randomIntBetween(1, 1024))); initializingLocalCheckpoints.forEach((a, l) -> updateLocalCheckpoint(tracker, a.getId(), l)); assertTrue( activeLocalCheckpoints .entrySet() .stream() .allMatch(e -> tracker.getTrackedLocalCheckpointForShard(e.getKey().getId()).getLocalCheckpoint() == e.getValue())); assertTrue( initializingLocalCheckpoints .entrySet() .stream() .allMatch(e -> tracker.getTrackedLocalCheckpointForShard(e.getKey().getId()).getLocalCheckpoint() == e.getValue())); final long minimumActiveLocalCheckpoint = activeLocalCheckpoints.values().stream().min(Integer::compareTo).get(); assertThat(tracker.getGlobalCheckpoint(), equalTo(minimumActiveLocalCheckpoint)); assertThat(updatedGlobalCheckpoint.get(), equalTo(minimumActiveLocalCheckpoint)); final long minimumInitailizingLocalCheckpoint = initializingLocalCheckpoints.values().stream().min(Integer::compareTo).get(); // now we are going to add a new allocation ID and bring it in sync which should move it to the in-sync allocation IDs final long localCheckpoint = randomIntBetween(0, Math.toIntExact(Math.min(minimumActiveLocalCheckpoint, minimumInitailizingLocalCheckpoint) - 1)); // using a different length than we have been using above ensures that we can not collide with a previous allocation ID final AllocationId newSyncingAllocationId = AllocationId.newInitializing(); newInitializingAllocationIds.add(newSyncingAllocationId); tracker.updateFromMaster( initialClusterStateVersion + 3, ids(newActiveAllocationIds), routingTable(newInitializingAllocationIds, primaryId)); addPeerRecoveryRetentionLease(tracker, newSyncingAllocationId); final CyclicBarrier barrier = new CyclicBarrier(2); final Thread thread = new Thread(() -> { try { barrier.await(); tracker.initiateTracking(newSyncingAllocationId.getId()); tracker.markAllocationIdAsInSync(newSyncingAllocationId.getId(), localCheckpoint); barrier.await(); } catch (final BrokenBarrierException | InterruptedException e) { throw new RuntimeException(e); } }); thread.start(); barrier.await(); assertBusy(() -> { assertTrue(tracker.pendingInSync.contains(newSyncingAllocationId.getId())); assertFalse(tracker.getTrackedLocalCheckpointForShard(newSyncingAllocationId.getId()).inSync); }); tracker.updateLocalCheckpoint(newSyncingAllocationId.getId(), randomIntBetween(Math.toIntExact(minimumActiveLocalCheckpoint), 1024)); barrier.await(); assertFalse(tracker.pendingInSync.contains(newSyncingAllocationId.getId())); assertTrue(tracker.getTrackedLocalCheckpointForShard(newSyncingAllocationId.getId()).inSync); /* * The new in-sync allocation ID is in the in-sync set now yet the master does not know this; the allocation ID should still be in * the in-sync set even if we receive a cluster state update that does not reflect this. * */ tracker.updateFromMaster( initialClusterStateVersion + 4, ids(newActiveAllocationIds), routingTable(newInitializingAllocationIds, primaryId)); assertTrue(tracker.getTrackedLocalCheckpointForShard(newSyncingAllocationId.getId()).inSync); assertFalse(tracker.pendingInSync.contains(newSyncingAllocationId.getId())); } /** * If we do not update the global checkpoint in {@link ReplicationTracker#markAllocationIdAsInSync(String, long)} after adding the * allocation ID to the in-sync set and removing it from pending, the local checkpoint update that freed the thread waiting for the * local checkpoint to advance could miss updating the global checkpoint in a race if the waiting thread did not add the allocation * ID to the in-sync set and remove it from the pending set before the local checkpoint updating thread executed the global checkpoint * update. This test fails without an additional call to {@code ReplicationTracker#updateGlobalCheckpointOnPrimary()} after * removing the allocation ID from the pending set in {@link ReplicationTracker#markAllocationIdAsInSync(String, long)} (even if a * call is added after notifying all waiters in {@link ReplicationTracker#updateLocalCheckpoint(String, long)}). * * @throws InterruptedException if the main test thread was interrupted while waiting * @throws BrokenBarrierException if the barrier was broken while the main test thread was waiting */ public void testRaceUpdatingGlobalCheckpoint() throws InterruptedException, BrokenBarrierException { final AllocationId active = AllocationId.newInitializing(); final AllocationId initializing = AllocationId.newInitializing(); final CyclicBarrier barrier = new CyclicBarrier(4); final int activeLocalCheckpoint = randomIntBetween(0, Integer.MAX_VALUE - 1); final ReplicationTracker tracker = newTracker(active); tracker.updateFromMaster( randomNonNegativeLong(), Collections.singleton(active.getId()), routingTable(Collections.singleton(initializing), active)); tracker.activatePrimaryMode(activeLocalCheckpoint); addPeerRecoveryRetentionLease(tracker, initializing); final int nextActiveLocalCheckpoint = randomIntBetween(activeLocalCheckpoint + 1, Integer.MAX_VALUE); final Thread activeThread = new Thread(() -> { try { barrier.await(); } catch (final BrokenBarrierException | InterruptedException e) { throw new RuntimeException(e); } tracker.updateLocalCheckpoint(active.getId(), nextActiveLocalCheckpoint); }); final int initializingLocalCheckpoint = randomIntBetween(0, nextActiveLocalCheckpoint - 1); final Thread initializingThread = new Thread(() -> { try { barrier.await(); } catch (final BrokenBarrierException | InterruptedException e) { throw new RuntimeException(e); } tracker.updateLocalCheckpoint(initializing.getId(), nextActiveLocalCheckpoint); }); final Thread markingThread = new Thread(() -> { try { barrier.await(); tracker.initiateTracking(initializing.getId()); tracker.markAllocationIdAsInSync(initializing.getId(), initializingLocalCheckpoint - 1); } catch (final BrokenBarrierException | InterruptedException e) { throw new RuntimeException(e); } }); activeThread.start(); initializingThread.start(); markingThread.start(); barrier.await(); activeThread.join(); initializingThread.join(); markingThread.join(); assertThat(tracker.getGlobalCheckpoint(), equalTo((long) nextActiveLocalCheckpoint)); } public void testPrimaryContextHandoff() throws IOException { final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", Settings.EMPTY); final ShardId shardId = new ShardId("test", "_na_", 0); FakeClusterState clusterState = initialState(); final AllocationId aId = clusterState.routingTable.primaryShard().allocationId(); final LongConsumer onUpdate = updatedGlobalCheckpoint -> {}; final long primaryTerm = randomNonNegativeLong(); final long globalCheckpoint = UNASSIGNED_SEQ_NO; final BiConsumer<RetentionLeases, ActionListener<ReplicationResponse>> onNewRetentionLease = (leases, listener) -> {}; ReplicationTracker oldPrimary = new ReplicationTracker(shardId, aId.getId(), indexSettings, primaryTerm, globalCheckpoint, onUpdate, () -> 0L, onNewRetentionLease, OPS_BASED_RECOVERY_ALWAYS_REASONABLE); ReplicationTracker newPrimary = new ReplicationTracker(shardId, aId.getRelocationId(), indexSettings, primaryTerm, globalCheckpoint, onUpdate, () -> 0L, onNewRetentionLease, OPS_BASED_RECOVERY_ALWAYS_REASONABLE); Set<String> allocationIds = new HashSet<>(Arrays.asList(oldPrimary.shardAllocationId, newPrimary.shardAllocationId)); clusterState.apply(oldPrimary); clusterState.apply(newPrimary); oldPrimary.activatePrimaryMode(randomIntBetween(Math.toIntExact(NO_OPS_PERFORMED), 10)); addPeerRecoveryRetentionLease(oldPrimary, newPrimary.shardAllocationId); newPrimary.updateRetentionLeasesOnReplica(oldPrimary.getRetentionLeases()); final int numUpdates = randomInt(10); for (int i = 0; i < numUpdates; i++) { if (rarely()) { clusterState = randomUpdateClusterState(allocationIds, clusterState); clusterState.apply(oldPrimary); clusterState.apply(newPrimary); } if (randomBoolean()) { randomLocalCheckpointUpdate(oldPrimary); } if (randomBoolean()) { randomMarkInSync(oldPrimary, newPrimary); } } // simulate transferring the global checkpoint to the new primary after finalizing recovery before the handoff markAsTrackingAndInSyncQuietly( oldPrimary, newPrimary.shardAllocationId, Math.max(SequenceNumbers.NO_OPS_PERFORMED, oldPrimary.getGlobalCheckpoint() + randomInt(5))); oldPrimary.updateGlobalCheckpointForShard(newPrimary.shardAllocationId, oldPrimary.getGlobalCheckpoint()); ReplicationTracker.PrimaryContext primaryContext = oldPrimary.startRelocationHandoff(newPrimary.shardAllocationId); if (randomBoolean()) { // cluster state update after primary context handoff if (randomBoolean()) { clusterState = randomUpdateClusterState(allocationIds, clusterState); clusterState.apply(oldPrimary); clusterState.apply(newPrimary); } // abort handoff, check that we can continue updates and retry handoff oldPrimary.abortRelocationHandoff(); if (rarely()) { clusterState = randomUpdateClusterState(allocationIds, clusterState); clusterState.apply(oldPrimary); clusterState.apply(newPrimary); } if (randomBoolean()) { randomLocalCheckpointUpdate(oldPrimary); } if (randomBoolean()) { randomMarkInSync(oldPrimary, newPrimary); } // do another handoff primaryContext = oldPrimary.startRelocationHandoff(newPrimary.shardAllocationId); } // send primary context through the wire BytesStreamOutput output = new BytesStreamOutput(); primaryContext.writeTo(output); StreamInput streamInput = output.bytes().streamInput(); primaryContext = new ReplicationTracker.PrimaryContext(streamInput); switch (randomInt(3)) { case 0: { // apply cluster state update on old primary while primary context is being transferred clusterState = randomUpdateClusterState(allocationIds, clusterState); clusterState.apply(oldPrimary); // activate new primary newPrimary.activateWithPrimaryContext(primaryContext); // apply cluster state update on new primary so that the states on old and new primary are comparable clusterState.apply(newPrimary); break; } case 1: { // apply cluster state update on new primary while primary context is being transferred clusterState = randomUpdateClusterState(allocationIds, clusterState); clusterState.apply(newPrimary); // activate new primary newPrimary.activateWithPrimaryContext(primaryContext); // apply cluster state update on old primary so that the states on old and new primary are comparable clusterState.apply(oldPrimary); break; } case 2: { // apply cluster state update on both copies while primary context is being transferred clusterState = randomUpdateClusterState(allocationIds, clusterState); clusterState.apply(oldPrimary); clusterState.apply(newPrimary); newPrimary.activateWithPrimaryContext(primaryContext); break; } case 3: { // no cluster state update newPrimary.activateWithPrimaryContext(primaryContext); break; } } assertTrue(oldPrimary.primaryMode); assertTrue(newPrimary.primaryMode); assertThat(newPrimary.appliedClusterStateVersion, equalTo(oldPrimary.appliedClusterStateVersion)); /* * We can not assert on shared knowledge of the global checkpoint between the old primary and the new primary as the new primary * will update its global checkpoint state without the old primary learning of it, and the old primary could have updated its * global checkpoint state after the primary context was transferred. */ Map<String, ReplicationTracker.CheckpointState> oldPrimaryCheckpointsCopy = new HashMap<>(oldPrimary.checkpoints); oldPrimaryCheckpointsCopy.remove(oldPrimary.shardAllocationId); oldPrimaryCheckpointsCopy.remove(newPrimary.shardAllocationId); Map<String, ReplicationTracker.CheckpointState> newPrimaryCheckpointsCopy = new HashMap<>(newPrimary.checkpoints); newPrimaryCheckpointsCopy.remove(oldPrimary.shardAllocationId); newPrimaryCheckpointsCopy.remove(newPrimary.shardAllocationId); assertThat(newPrimaryCheckpointsCopy, equalTo(oldPrimaryCheckpointsCopy)); // we can however assert that shared knowledge of the local checkpoint and in-sync status is equal assertThat( oldPrimary.checkpoints.get(oldPrimary.shardAllocationId).localCheckpoint, equalTo(newPrimary.checkpoints.get(oldPrimary.shardAllocationId).localCheckpoint)); assertThat( oldPrimary.checkpoints.get(newPrimary.shardAllocationId).localCheckpoint, equalTo(newPrimary.checkpoints.get(newPrimary.shardAllocationId).localCheckpoint)); assertThat( oldPrimary.checkpoints.get(oldPrimary.shardAllocationId).inSync, equalTo(newPrimary.checkpoints.get(oldPrimary.shardAllocationId).inSync)); assertThat( oldPrimary.checkpoints.get(newPrimary.shardAllocationId).inSync, equalTo(newPrimary.checkpoints.get(newPrimary.shardAllocationId).inSync)); assertThat(newPrimary.getGlobalCheckpoint(), equalTo(oldPrimary.getGlobalCheckpoint())); assertThat(newPrimary.routingTable, equalTo(oldPrimary.routingTable)); assertThat(newPrimary.replicationGroup, equalTo(oldPrimary.replicationGroup)); assertFalse(oldPrimary.relocated); oldPrimary.completeRelocationHandoff(); assertFalse(oldPrimary.primaryMode); assertTrue(oldPrimary.relocated); } public void testIllegalStateExceptionIfUnknownAllocationId() { final AllocationId active = AllocationId.newInitializing(); final AllocationId initializing = AllocationId.newInitializing(); final ReplicationTracker tracker = newTracker(active); tracker.updateFromMaster(randomNonNegativeLong(), Collections.singleton(active.getId()), routingTable(Collections.singleton(initializing), active)); tracker.activatePrimaryMode(NO_OPS_PERFORMED); expectThrows(IllegalStateException.class, () -> tracker.initiateTracking(randomAlphaOfLength(10))); expectThrows(IllegalStateException.class, () -> tracker.markAllocationIdAsInSync(randomAlphaOfLength(10), randomNonNegativeLong())); } private static class FakeClusterState { final long version; final Set<AllocationId> inSyncIds; final IndexShardRoutingTable routingTable; private FakeClusterState(long version, Set<AllocationId> inSyncIds, IndexShardRoutingTable routingTable) { this.version = version; this.inSyncIds = Collections.unmodifiableSet(inSyncIds); this.routingTable = routingTable; } public Set<AllocationId> allIds() { return Sets.union(initializingIds(), inSyncIds); } public Set<AllocationId> initializingIds() { return routingTable.getAllInitializingShards().stream() .map(ShardRouting::allocationId).collect(Collectors.toSet()); } public void apply(ReplicationTracker gcp) { gcp.updateFromMaster(version, ids(inSyncIds), routingTable); } } private static FakeClusterState initialState() { final long initialClusterStateVersion = randomIntBetween(1, Integer.MAX_VALUE); final int numberOfActiveAllocationsIds = randomIntBetween(1, 8); final int numberOfInitializingIds = randomIntBetween(0, 8); final Tuple<Set<AllocationId>, Set<AllocationId>> activeAndInitializingAllocationIds = randomActiveAndInitializingAllocationIds(numberOfActiveAllocationsIds, numberOfInitializingIds); final Set<AllocationId> activeAllocationIds = activeAndInitializingAllocationIds.v1(); final Set<AllocationId> initializingAllocationIds = activeAndInitializingAllocationIds.v2(); final AllocationId primaryId = randomFrom(activeAllocationIds); final AllocationId relocatingId = AllocationId.newRelocation(primaryId); activeAllocationIds.remove(primaryId); activeAllocationIds.add(relocatingId); final ShardId shardId = new ShardId("test", "_na_", 0); final ShardRouting primaryShard = TestShardRouting.newShardRouting( shardId, nodeIdFromAllocationId(relocatingId), nodeIdFromAllocationId(AllocationId.newInitializing(relocatingId.getRelocationId())), true, ShardRoutingState.RELOCATING, relocatingId); return new FakeClusterState( initialClusterStateVersion, activeAllocationIds, routingTable(initializingAllocationIds, primaryShard)); } private static void randomLocalCheckpointUpdate(ReplicationTracker gcp) { String allocationId = randomFrom(gcp.checkpoints.keySet()); long currentLocalCheckpoint = gcp.checkpoints.get(allocationId).getLocalCheckpoint(); gcp.updateLocalCheckpoint(allocationId, Math.max(SequenceNumbers.NO_OPS_PERFORMED, currentLocalCheckpoint + randomInt(5))); } private static void randomMarkInSync(ReplicationTracker oldPrimary, ReplicationTracker newPrimary) { final String allocationId = randomFrom(oldPrimary.checkpoints.keySet()); final long newLocalCheckpoint = Math.max(NO_OPS_PERFORMED, oldPrimary.getGlobalCheckpoint() + randomInt(5)); markAsTrackingAndInSyncQuietly(oldPrimary, allocationId, newLocalCheckpoint); newPrimary.updateRetentionLeasesOnReplica(oldPrimary.getRetentionLeases()); } private static FakeClusterState randomUpdateClusterState(Set<String> allocationIds, FakeClusterState clusterState) { final Set<AllocationId> initializingIdsToAdd = randomAllocationIdsExcludingExistingIds(exclude(clusterState.allIds(), allocationIds), randomInt(2)); final Set<AllocationId> initializingIdsToRemove = new HashSet<>( exclude(randomSubsetOf(randomInt(clusterState.initializingIds().size()), clusterState.initializingIds()), allocationIds)); final Set<AllocationId> inSyncIdsToRemove = new HashSet<>( exclude(randomSubsetOf(randomInt(clusterState.inSyncIds.size()), clusterState.inSyncIds), allocationIds)); final Set<AllocationId> remainingInSyncIds = Sets.difference(clusterState.inSyncIds, inSyncIdsToRemove); final Set<AllocationId> initializingIdsExceptRelocationTargets = exclude(clusterState.initializingIds(), clusterState.routingTable.activeShards().stream().filter(ShardRouting::relocating) .map(s -> s.allocationId().getRelocationId()).collect(Collectors.toSet())); return new FakeClusterState( clusterState.version + randomIntBetween(1, 5), remainingInSyncIds.isEmpty() ? clusterState.inSyncIds : remainingInSyncIds, routingTable( Sets.difference(Sets.union(initializingIdsExceptRelocationTargets, initializingIdsToAdd), initializingIdsToRemove), clusterState.routingTable.primaryShard())); } private static Set<AllocationId> exclude(Collection<AllocationId> allocationIds, Set<String> excludeIds) { return allocationIds.stream().filter(aId -> !excludeIds.contains(aId.getId())).collect(Collectors.toSet()); } private static Tuple<Set<AllocationId>, Set<AllocationId>> randomActiveAndInitializingAllocationIds( final int numberOfActiveAllocationsIds, final int numberOfInitializingIds) { final Set<AllocationId> activeAllocationIds = IntStream.range(0, numberOfActiveAllocationsIds).mapToObj(i -> AllocationId.newInitializing()).collect(Collectors.toSet()); final Set<AllocationId> initializingIds = randomAllocationIdsExcludingExistingIds(activeAllocationIds, numberOfInitializingIds); return Tuple.tuple(activeAllocationIds, initializingIds); } private static Set<AllocationId> randomAllocationIdsExcludingExistingIds(final Set<AllocationId> existingAllocationIds, final int numberOfAllocationIds) { return IntStream.range(0, numberOfAllocationIds).mapToObj(i -> { do { final AllocationId newAllocationId = AllocationId.newInitializing(); // ensure we do not duplicate an allocation ID if (!existingAllocationIds.contains(newAllocationId)) { return newAllocationId; } } while (true); }).collect(Collectors.toSet()); } private static void markAsTrackingAndInSyncQuietly( final ReplicationTracker tracker, final String allocationId, final long localCheckpoint) { try { addPeerRecoveryRetentionLease(tracker, allocationId); tracker.initiateTracking(allocationId); tracker.markAllocationIdAsInSync(allocationId, localCheckpoint); } catch (final InterruptedException e) { throw new RuntimeException(e); } } private static void addPeerRecoveryRetentionLease(final ReplicationTracker tracker, final AllocationId allocationId) { final String nodeId = nodeIdFromAllocationId(allocationId); if (tracker.getRetentionLeases().contains(ReplicationTracker.getPeerRecoveryRetentionLeaseId(nodeId)) == false) { tracker.addPeerRecoveryRetentionLease(nodeId, NO_OPS_PERFORMED, ActionListener.wrap(() -> { })); } } private static void addPeerRecoveryRetentionLease(final ReplicationTracker tracker, final String allocationId) { addPeerRecoveryRetentionLease(tracker, AllocationId.newInitializing(allocationId)); } public void testPeerRecoveryRetentionLeaseCreationAndRenewal() { final int numberOfActiveAllocationsIds = randomIntBetween(1, 8); final int numberOfInitializingIds = randomIntBetween(0, 8); final Tuple<Set<AllocationId>, Set<AllocationId>> activeAndInitializingAllocationIds = randomActiveAndInitializingAllocationIds(numberOfActiveAllocationsIds, numberOfInitializingIds); final Set<AllocationId> activeAllocationIds = activeAndInitializingAllocationIds.v1(); final Set<AllocationId> initializingAllocationIds = activeAndInitializingAllocationIds.v2(); final AllocationId primaryId = activeAllocationIds.iterator().next(); final long initialClusterStateVersion = randomNonNegativeLong(); final AtomicLong currentTimeMillis = new AtomicLong(0L); final ReplicationTracker tracker = newTracker(primaryId, updatedGlobalCheckpoint::set, currentTimeMillis::get); final long retentionLeaseExpiryTimeMillis = tracker.indexSettings().getRetentionLeaseMillis(); final long peerRecoveryRetentionLeaseRenewalTimeMillis = retentionLeaseExpiryTimeMillis / 2; final long maximumTestTimeMillis = 13 * retentionLeaseExpiryTimeMillis; final long testStartTimeMillis = randomLongBetween(0L, Long.MAX_VALUE - maximumTestTimeMillis); currentTimeMillis.set(testStartTimeMillis); final Function<AllocationId, RetentionLease> retentionLeaseFromAllocationId = allocationId -> new RetentionLease(ReplicationTracker.getPeerRecoveryRetentionLeaseId(nodeIdFromAllocationId(allocationId)), 0L, currentTimeMillis.get(), ReplicationTracker.PEER_RECOVERY_RETENTION_LEASE_SOURCE); final List<RetentionLease> initialLeases = new ArrayList<>(); if (randomBoolean()) { initialLeases.add(retentionLeaseFromAllocationId.apply(primaryId)); } for (final AllocationId replicaId : initializingAllocationIds) { if (randomBoolean()) { initialLeases.add(retentionLeaseFromAllocationId.apply(replicaId)); } } for (int i = randomIntBetween(0, 5); i > 0; i--) { initialLeases.add(retentionLeaseFromAllocationId.apply(AllocationId.newInitializing())); } tracker.updateRetentionLeasesOnReplica(new RetentionLeases(randomNonNegativeLong(), randomNonNegativeLong(), initialLeases)); IndexShardRoutingTable routingTable = routingTable(initializingAllocationIds, primaryId); tracker.updateFromMaster(initialClusterStateVersion, ids(activeAllocationIds), routingTable); tracker.activatePrimaryMode(NO_OPS_PERFORMED); assertTrue("primary's retention lease should exist", tracker.getRetentionLeases().contains(ReplicationTracker.getPeerRecoveryRetentionLeaseId(routingTable.primaryShard()))); final Consumer<Runnable> assertAsTimePasses = assertion -> { final long startTime = currentTimeMillis.get(); while (currentTimeMillis.get() < startTime + retentionLeaseExpiryTimeMillis * 2) { currentTimeMillis.addAndGet(randomLongBetween(0L, retentionLeaseExpiryTimeMillis * 2)); tracker.renewPeerRecoveryRetentionLeases(); tracker.getRetentionLeases(true); assertion.run(); } }; assertAsTimePasses.accept(() -> { // Leases for assigned replicas do not expire final RetentionLeases retentionLeases = tracker.getRetentionLeases(); for (final AllocationId replicaId : initializingAllocationIds) { final String leaseId = retentionLeaseFromAllocationId.apply(replicaId).id(); assertTrue("should not have removed lease for " + replicaId + " in " + retentionLeases, initialLeases.stream().noneMatch(l -> l.id().equals(leaseId)) || retentionLeases.contains(leaseId)); } }); // Leases that don't correspond to assigned replicas, however, are expired by this time. final Set<String> expectedLeaseIds = Stream.concat(Stream.of(primaryId), initializingAllocationIds.stream()) .map(allocationId -> retentionLeaseFromAllocationId.apply(allocationId).id()).collect(Collectors.toSet()); for (final RetentionLease retentionLease : tracker.getRetentionLeases().leases()) { assertThat(expectedLeaseIds, hasItem(retentionLease.id())); } for (AllocationId replicaId : initializingAllocationIds) { markAsTrackingAndInSyncQuietly(tracker, replicaId.getId(), NO_OPS_PERFORMED); } assertThat(tracker.getRetentionLeases().leases().stream().map(RetentionLease::id).collect(Collectors.toSet()), equalTo(expectedLeaseIds)); assertAsTimePasses.accept(() -> { // Leases still don't expire assertThat(tracker.getRetentionLeases().leases().stream().map(RetentionLease::id).collect(Collectors.toSet()), equalTo(expectedLeaseIds)); // Also leases are renewed before reaching half the expiry time //noinspection OptionalGetWithoutIsPresent assertThat(tracker.getRetentionLeases() + " renewed before too long", tracker.getRetentionLeases().leases().stream().mapToLong(RetentionLease::timestamp).min().getAsLong(), greaterThanOrEqualTo(currentTimeMillis.get() - peerRecoveryRetentionLeaseRenewalTimeMillis)); }); IndexShardRoutingTable.Builder routingTableBuilder = new IndexShardRoutingTable.Builder(routingTable); for (ShardRouting replicaShard : routingTable.replicaShards()) { routingTableBuilder.removeShard(replicaShard); routingTableBuilder.addShard(replicaShard.moveToStarted()); } routingTable = routingTableBuilder.build(); activeAllocationIds.addAll(initializingAllocationIds); tracker.updateFromMaster(initialClusterStateVersion + randomLongBetween(1, 10), ids(activeAllocationIds), routingTable); assertAsTimePasses.accept(() -> { // Leases still don't expire assertThat(tracker.getRetentionLeases().leases().stream().map(RetentionLease::id).collect(Collectors.toSet()), equalTo(expectedLeaseIds)); // ... and any extra peer recovery retention leases are expired immediately since the shard is fully active tracker.addPeerRecoveryRetentionLease(randomAlphaOfLength(10), randomNonNegativeLong(), ActionListener.wrap(() -> {})); }); tracker.renewPeerRecoveryRetentionLeases(); assertTrue("expired extra lease", tracker.getRetentionLeases(true).v1()); final AllocationId advancingAllocationId = initializingAllocationIds.isEmpty() || rarely() ? primaryId : randomFrom(initializingAllocationIds); final String advancingLeaseId = retentionLeaseFromAllocationId.apply(advancingAllocationId).id(); final long initialGlobalCheckpoint = Math.max(NO_OPS_PERFORMED, tracker.getTrackedLocalCheckpointForShard(advancingAllocationId.getId()).globalCheckpoint); assertThat(tracker.getRetentionLeases().get(advancingLeaseId).retainingSequenceNumber(), equalTo(initialGlobalCheckpoint + 1)); final long newGlobalCheckpoint = initialGlobalCheckpoint + randomLongBetween(1, 1000); tracker.updateGlobalCheckpointForShard(advancingAllocationId.getId(), newGlobalCheckpoint); tracker.renewPeerRecoveryRetentionLeases(); assertThat("lease was renewed because the shard advanced its global checkpoint", tracker.getRetentionLeases().get(advancingLeaseId).retainingSequenceNumber(), equalTo(newGlobalCheckpoint + 1)); final long initialVersion = tracker.getRetentionLeases().version(); tracker.renewPeerRecoveryRetentionLeases(); assertThat("immediate renewal is a no-op", tracker.getRetentionLeases().version(), equalTo(initialVersion)); //noinspection OptionalGetWithoutIsPresent final long millisUntilFirstRenewal = tracker.getRetentionLeases().leases().stream().mapToLong(RetentionLease::timestamp).min().getAsLong() + peerRecoveryRetentionLeaseRenewalTimeMillis - currentTimeMillis.get(); if (millisUntilFirstRenewal != 0) { final long shorterThanRenewalTime = randomLongBetween(0L, millisUntilFirstRenewal - 1); currentTimeMillis.addAndGet(shorterThanRenewalTime); tracker.renewPeerRecoveryRetentionLeases(); assertThat("renewal is a no-op after a short time", tracker.getRetentionLeases().version(), equalTo(initialVersion)); currentTimeMillis.addAndGet(millisUntilFirstRenewal - shorterThanRenewalTime); } tracker.renewPeerRecoveryRetentionLeases(); assertThat("renewal happens after a sufficiently long time", tracker.getRetentionLeases().version(), greaterThan(initialVersion)); assertTrue("all leases were renewed", tracker.getRetentionLeases().leases().stream().allMatch(l -> l.timestamp() == currentTimeMillis.get())); assertThat("test ran for too long, potentially leading to overflow", currentTimeMillis.get(), lessThanOrEqualTo(testStartTimeMillis + maximumTestTimeMillis)); } }
/** * Most of the code in the Qalingo project is copyrighted Hoteia and licensed * under the Apache License Version 2.0 (release version 0.8.0) * http://www.apache.org/licenses/LICENSE-2.0 * * Copyright (c) Hoteia, 2012-2014 * http://www.hoteia.com - http://twitter.com/hoteia - contact@hoteia.com * */ package org.hoteia.qalingo.core.domain; import java.util.Arrays; import java.util.Date; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.Lob; import javax.persistence.ManyToOne; import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.persistence.Version; import org.hoteia.qalingo.core.domain.impl.DomainEntity; @Entity @Table(name="TECO_CUSTOMER_PAYMENT_ATTRIBUTE") public class CustomerPaymentAttribute extends AbstractAttribute<CustomerPaymentAttribute> implements DomainEntity { /** * Generated UID */ private static final long serialVersionUID = 1567080325619993090L; @Id @GeneratedValue(strategy = GenerationType.AUTO) @Column(name="ID", nullable=false) private Long id; @Version @Column(name="VERSION", nullable=false) // , columnDefinition="int(11) default 1" private int version; @ManyToOne(fetch = FetchType.EAGER) @JoinColumn(name="ATTRIBUTE_DEFINITION_ID", insertable = true, updatable = true) private AttributeDefinition attributeDefinition; @Column(name = "SHORT_STRING_VALUE") private String shortStringValue; @Column(name = "LONG_STRING_VALUE") @Lob private String longStringValue; @Column(name="INTEGER_VALUE") private Integer integerValue; @Column(name="DOUBLE_VALUE") private Double doubleValue; @Column(name="FLOAT_VALUE") private Float floatValue; @Column(name="BLOB_VALUE") @Lob private byte[] blobValue; @Column(name="BOOLEAN_VALUE") private Boolean booleanValue; @Temporal(TemporalType.TIMESTAMP) @Column(name = "DATE_VALUE") private Date dateValue; @Column(name = "LOCALIZATION_CODE") private String localizationCode; @Column(name="MARKET_AREA_ID") private Long marketAreaId; @Temporal(TemporalType.TIMESTAMP) @Column(name="START_DATE") private Date startDate; @Temporal(TemporalType.TIMESTAMP) @Column(name="END_DATE") private Date endDate; @Temporal(TemporalType.TIMESTAMP) @Column(name="DATE_CREATE") private Date dateCreate; @Temporal(TemporalType.TIMESTAMP) @Column(name="DATE_UPDATE") private Date dateUpdate; public CustomerPaymentAttribute() { this.dateCreate = new Date(); this.dateUpdate = new Date(); } public Long getId() { return id; } public void setId(Long id) { this.id = id; } public int getVersion() { return version; } public void setVersion(int version) { this.version = version; } @Override public AttributeDefinition getAttributeDefinition() { return attributeDefinition; } public void setAttributeDefinition(AttributeDefinition attributeDefinition) { this.attributeDefinition = attributeDefinition; } public String getShortStringValue() { return shortStringValue; } public void setShortStringValue(String shortStringValue) { this.shortStringValue = shortStringValue; } public String getLongStringValue() { return longStringValue; } public void setLongStringValue(String longStringValue) { this.longStringValue = longStringValue; } @Override public Integer getIntegerValue() { return integerValue; } public void setIntegerValue(Integer integerValue) { this.integerValue = integerValue; } @Override public Double getDoubleValue() { return doubleValue; } public void setDoubleValue(Double doubleValue) { this.doubleValue = doubleValue; } @Override public Float getFloatValue() { return floatValue; } public void setFloatValue(Float floatValue) { this.floatValue = floatValue; } @Override public byte[] getBlobValue() { return blobValue; } public void setBlobValue(byte[] blobValue) { this.blobValue = blobValue; } @Override public Boolean getBooleanValue() { return booleanValue; } public void setBooleanValue(Boolean booleanValue) { this.booleanValue = booleanValue; } public Date getDateValue() { return dateValue; } public void setDateValue(Date dateValue) { this.dateValue = dateValue; } public String getLocalizationCode() { return localizationCode; } public void setLocalizationCode(String localizationCode) { this.localizationCode = localizationCode; } public Long getMarketAreaId() { return marketAreaId; } public void setMarketAreaId(Long marketAreaId) { this.marketAreaId = marketAreaId; } public Date getStartDate() { return startDate; } public void setStartDate(Date startDate) { this.startDate = startDate; } public Date getEndDate() { return endDate; } public void setEndDate(Date endDate) { this.endDate = endDate; } public Date getDateCreate() { return dateCreate; } public void setDateCreate(Date dateCreate) { this.dateCreate = dateCreate; } public Date getDateUpdate() { return dateUpdate; } public void setDateUpdate(Date dateUpdate) { this.dateUpdate = dateUpdate; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((dateCreate == null) ? 0 : dateCreate.hashCode()); result = prime * result + ((id == null) ? 0 : id.hashCode()); return result; } @Override public boolean equals(Object sourceObj) { Object obj = deproxy(sourceObj); if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; CustomerPaymentAttribute other = (CustomerPaymentAttribute) obj; if (dateCreate == null) { if (other.dateCreate != null) return false; } else if (!dateCreate.equals(other.dateCreate)) return false; if (id == null) { if (other.id != null) return false; } else if (!id.equals(other.id)) return false; return true; } @Override public String toString() { return "OrderAttribute [id=" + id + ", version=" + version + ", shortStringValue=" + shortStringValue + ", longStringValue=" + longStringValue + ", integerValue=" + integerValue + ", doubleValue=" + doubleValue + ", floatValue=" + floatValue + ", blobValue=" + Arrays.toString(blobValue) + ", booleanValue=" + booleanValue + ", localizationCode=" + localizationCode + ", marketAreaId=" + marketAreaId + ", startDate=" + startDate + ", endDate=" + endDate + ", dateCreate=" + dateCreate + ", dateUpdate=" + dateUpdate + "]"; } }
// ASM: a very small and fast Java bytecode manipulation framework // Copyright (c) 2000-2011 INRIA, France Telecom // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // 1. Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // 2. Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // 3. Neither the name of the copyright holders nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. package org.objectweb.asm; /** * The path to a type argument, wildcard bound, array element type, or static inner type within an * enclosing type. * * @author Eric Bruneton */ public final class TypePath { /** A type path step that steps into the element type of an array type. See {@link #getStep}. */ public static final int ARRAY_ELEMENT = 0; /** A type path step that steps into the nested type of a class type. See {@link #getStep}. */ public static final int INNER_TYPE = 1; /** A type path step that steps into the bound of a wildcard type. See {@link #getStep}. */ public static final int WILDCARD_BOUND = 2; /** A type path step that steps into a type argument of a generic type. See {@link #getStep}. */ public static final int TYPE_ARGUMENT = 3; /** * The byte array where the 'type_path' structure - as defined in the Java Virtual Machine * Specification (JVMS) - corresponding to this TypePath is stored. The first byte of the * structure in this array is given by {@link #typePathOffset}. * * @see <a * href="https://docs.oracle.com/javase/specs/jvms/se9/html/jvms-4.html#jvms-4.7.20.2">JVMS * 4.7.20.2</a> */ private final byte[] typePathContainer; /** The offset of the first byte of the type_path JVMS structure in {@link #typePathContainer}. */ private final int typePathOffset; /** * Constructs a new TypePath. * * @param typePathContainer a byte array containing a type_path JVMS structure. * @param typePathOffset the offset of the first byte of the type_path structure in * typePathContainer. */ TypePath(final byte[] typePathContainer, final int typePathOffset) { this.typePathContainer = typePathContainer; this.typePathOffset = typePathOffset; } /** * Returns the length of this path, i.e. its number of steps. * * @return the length of this path. */ public int getLength() { // path_length is stored in the first byte of a type_path. return typePathContainer[typePathOffset]; } /** * Returns the value of the given step of this path. * * @param index an index between 0 and {@link #getLength()}, exclusive. * @return one of {@link #ARRAY_ELEMENT}, {@link #INNER_TYPE}, {@link #WILDCARD_BOUND}, or {@link * #TYPE_ARGUMENT}. */ public int getStep(final int index) { // Returns the type_path_kind of the path element of the given index. return typePathContainer[typePathOffset + 2 * index + 1]; } /** * Returns the index of the type argument that the given step is stepping into. This method should * only be used for steps whose value is {@link #TYPE_ARGUMENT}. * * @param index an index between 0 and {@link #getLength()}, exclusive. * @return the index of the type argument that the given step is stepping into. */ public int getStepArgument(final int index) { // Returns the type_argument_index of the path element of the given index. return typePathContainer[typePathOffset + 2 * index + 2]; } /** * Converts a type path in string form, in the format used by {@link #toString()}, into a TypePath * object. * * @param typePath a type path in string form, in the format used by {@link #toString()}. May be * {@literal null} or empty. * @return the corresponding TypePath object, or {@literal null} if the path is empty. */ public static TypePath fromString(final String typePath) { if (typePath == null || typePath.length() == 0) { return null; } int typePathLength = typePath.length(); ByteVector output = new ByteVector(typePathLength); output.putByte(0); int typePathIndex = 0; while (typePathIndex < typePathLength) { char c = typePath.charAt(typePathIndex++); if (c == '[') { output.put11(ARRAY_ELEMENT, 0); } else if (c == '.') { output.put11(INNER_TYPE, 0); } else if (c == '*') { output.put11(WILDCARD_BOUND, 0); } else if (c >= '0' && c <= '9') { int typeArg = c - '0'; while (typePathIndex < typePathLength) { c = typePath.charAt(typePathIndex++); if (c >= '0' && c <= '9') { typeArg = typeArg * 10 + c - '0'; } else if (c == ';') { break; } else { throw new IllegalArgumentException(); } } output.put11(TYPE_ARGUMENT, typeArg); } else { throw new IllegalArgumentException(); } } output.data[0] = (byte) (output.length / 2); return new TypePath(output.data, 0); } /** * Returns a string representation of this type path. {@link #ARRAY_ELEMENT} steps are represented * with '[', {@link #INNER_TYPE} steps with '.', {@link #WILDCARD_BOUND} steps with '*' and {@link * #TYPE_ARGUMENT} steps with their type argument index in decimal form followed by ';'. */ @Override public String toString() { int length = getLength(); StringBuilder result = new StringBuilder(length * 2); for (int i = 0; i < length; ++i) { switch (getStep(i)) { case ARRAY_ELEMENT: result.append('['); break; case INNER_TYPE: result.append('.'); break; case WILDCARD_BOUND: result.append('*'); break; case TYPE_ARGUMENT: result.append(getStepArgument(i)).append(';'); break; default: throw new AssertionError(); } } return result.toString(); } /** * Puts the type_path JVMS structure corresponding to the given TypePath into the given * ByteVector. * * @param typePath a TypePath instance, or {@literal null} for empty paths. * @param output where the type path must be put. */ static void put(final TypePath typePath, final ByteVector output) { if (typePath == null) { output.putByte(0); } else { int length = typePath.typePathContainer[typePath.typePathOffset] * 2 + 1; output.putByteArray(typePath.typePathContainer, typePath.typePathOffset, length); } } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.apache.doris.task; import org.apache.doris.analysis.LiteralExpr; import org.apache.doris.catalog.AggregateType; import org.apache.doris.catalog.Column; import org.apache.doris.catalog.DistributionInfo; import org.apache.doris.catalog.HashDistributionInfo; import org.apache.doris.catalog.KeysType; import org.apache.doris.catalog.OlapTable; import org.apache.doris.catalog.Partition; import org.apache.doris.catalog.PartitionInfo; import org.apache.doris.catalog.PartitionKey; import org.apache.doris.catalog.PrimitiveType; import org.apache.doris.catalog.RangePartitionInfo; import org.apache.doris.common.LoadException; import org.apache.doris.common.Pair; import org.apache.doris.load.DppConfig; import org.apache.doris.load.DppScheduler; import org.apache.doris.load.EtlSubmitResult; import org.apache.doris.load.LoadErrorHub; import org.apache.doris.load.LoadErrorHub.HubType; import org.apache.doris.load.LoadJob; import org.apache.doris.load.PartitionLoadInfo; import org.apache.doris.load.Source; import org.apache.doris.load.TableLoadInfo; import org.apache.doris.thrift.TStatusCode; import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Range; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; public class HadoopLoadPendingTask extends LoadPendingTask { private static final Logger LOG = LogManager.getLogger(HadoopLoadPendingTask.class); private Map<String, Object> etlTaskConf; public HadoopLoadPendingTask(LoadJob job) { super(job); } @Override protected void createEtlRequest() throws Exception { // yiguolei: add a db read lock here? because the schema maybe changed during create etl task db.readLock(); try { EtlTaskConf taskConf = new EtlTaskConf(); // output path taskConf.setOutputPath(getOutputPath()); // output file pattern taskConf.setOutputFilePattern(job.getLabel() + ".%(table)s.%(view)s.%(bucket)s"); // tables (partitions) Map<String, EtlPartitionConf> etlPartitions = createEtlPartitions(); Preconditions.checkNotNull(etlPartitions); taskConf.setEtlPartitions(etlPartitions); LoadErrorHub.Param info = load.getLoadErrorHubInfo(); // hadoop load only support mysql load error hub if (info != null && info.getType() == HubType.MYSQL_TYPE) { taskConf.setHubInfo(new EtlErrorHubInfo(this.job.getId(), info)); } etlTaskConf = taskConf.toDppTaskConf(); Preconditions.checkNotNull(etlTaskConf); } finally { db.readUnlock(); } } @Override protected EtlSubmitResult submitEtlJob(int retry) { LOG.info("begin submit hadoop etl job: {}", job); // retry different output path etlTaskConf.put("output_path", getOutputPath()); DppScheduler dppScheduler = new DppScheduler(job.getHadoopDppConfig()); EtlSubmitResult result = dppScheduler.submitEtlJob(job.getId(), job.getLabel(), job.getHadoopCluster(), db.getFullName(), etlTaskConf, retry); if (result != null && result.getStatus().getStatus_code() == TStatusCode.OK) { job.setHadoopEtlJobId(result.getEtlJobId()); } return result; } private Map<String, EtlPartitionConf> createEtlPartitions() throws LoadException { Map<String, EtlPartitionConf> etlPartitions = Maps.newHashMap(); for (Entry<Long, TableLoadInfo> tableEntry : job.getIdToTableLoadInfo().entrySet()) { long tableId = tableEntry.getKey(); TableLoadInfo tableLoadInfo = tableEntry.getValue(); OlapTable table = (OlapTable) db.getTable(tableId); if (table == null) { throw new LoadException("table does not exist. id: " + tableId); } // columns Map<String, EtlColumn> etlColumns = createEtlColumns(table); // partitions Map<Long, PartitionLoadInfo> idToPartitionLoadInfo = tableLoadInfo.getIdToPartitionLoadInfo(); for (Entry<Long, PartitionLoadInfo> partitionEntry : idToPartitionLoadInfo.entrySet()) { long partitionId = partitionEntry.getKey(); PartitionLoadInfo partitionLoadInfo = partitionEntry.getValue(); EtlPartitionConf etlPartitionConf = new EtlPartitionConf(); // columns etlPartitionConf.setColumns(etlColumns); // indices (views) Map<String, EtlIndex> etlIndices = createEtlIndices(table, partitionId); Preconditions.checkNotNull(etlIndices); etlPartitionConf.setIndices(etlIndices); // source file schema etlPartitionConf.setSources(createSources(partitionLoadInfo)); // partition info etlPartitionConf.setPartitionInfo(createPartitionInfo(table, partitionId)); etlPartitions.put(String.valueOf(partitionId), etlPartitionConf); } } return etlPartitions; } private Map<String, EtlColumn> createEtlColumns(OlapTable table) { Map<String, EtlColumn> etlColumns = Maps.newHashMap(); for (Column column : table.getBaseSchema()) { etlColumns.put(column.getName(), new EtlColumn(column)); } return etlColumns; } private Map<String, EtlIndex> createEtlIndices(OlapTable table, long partitionId) throws LoadException { Map<String, EtlIndex> etlIndices = Maps.newHashMap(); TableLoadInfo tableLoadInfo = job.getTableLoadInfo(table.getId()); for (Entry<Long, List<Column>> entry : table.getIndexIdToSchema().entrySet()) { long indexId = entry.getKey(); List<Column> indexColumns = entry.getValue(); Partition partition = table.getPartition(partitionId); if (partition == null) { throw new LoadException("partition does not exist. id: " + partitionId); } EtlIndex etlIndex = new EtlIndex(); etlIndex.setKeysType(table.getKeysType()); // index id etlIndex.setIndexId(indexId); // schema hash int schemaHash = table.getSchemaHashByIndexId(indexId); etlIndex.setSchemaHash(schemaHash); tableLoadInfo.addIndexSchemaHash(indexId, schemaHash); int keySize = 0; List<Map<String, Object>> columnRefs = Lists.newArrayList(); for (Column column : indexColumns) { Map<String, Object> dppColumn = Maps.newHashMap(); dppColumn.put("name", column.getName()); if (column.isKey()) { dppColumn.put("is_key", true); ++keySize; } else { dppColumn.put("is_key", false); String aggregation = "none"; if ("AGG_KEYS" == table.getKeysType().name()) { AggregateType aggregateType = column.getAggregationType(); if (AggregateType.SUM == aggregateType) { aggregation = "ADD"; } else { aggregation = aggregateType.name(); } } else if ("UNIQUE_KEYS" == table.getKeysType().name()) { aggregation = "REPLACE"; } dppColumn.put("aggregation_method", aggregation); } columnRefs.add(dppColumn); } // distribution infos DistributionInfo distributionInfo = partition.getDistributionInfo(); List<String> distributionColumnRefs = Lists.newArrayList(); etlIndex.setDistributionColumnRefs(distributionColumnRefs); etlIndex.setPartitionMethod("hash"); etlIndex.setHashMod(distributionInfo.getBucketNum()); switch (distributionInfo.getType()) { case RANDOM: etlIndex.setHashMethod("CRC32"); for (int i = 0; i < keySize; ++i) { distributionColumnRefs.add(columnRefs.get(i).get("name").toString()); } break; case HASH: etlIndex.setHashMethod("CRC32"); HashDistributionInfo hashDistributionInfo = (HashDistributionInfo) distributionInfo; for (Column column : hashDistributionInfo.getDistributionColumns()) { distributionColumnRefs.add(column.getName()); boolean isImplicit = true; Iterator<Map<String, Object>> iter = columnRefs.iterator(); while (iter.hasNext()) { if (iter.next().get("name").equals(column.getName())) { isImplicit = false; break; } } if (isImplicit) { Map<String, Object> dppColumn = Maps.newHashMap(); dppColumn.put("name", column.getName()); dppColumn.put("is_key", true); dppColumn.put("is_implicit", true); columnRefs.add(keySize, dppColumn); ++keySize; } } break; default: LOG.warn("unknown distribution type. type: {}", distributionInfo.getType().name()); throw new LoadException("unknown distribution type. type: " + distributionInfo.getType().name()); } etlIndex.setPidKeyCount(keySize); etlIndex.setColumnRefs(columnRefs); etlIndices.put(String.valueOf(indexId), etlIndex); } return etlIndices; } private Map<String, EtlSource> createSources(PartitionLoadInfo partitionLoadInfo) { Map<String, EtlSource> sources = Maps.newHashMap(); int sourceIndex = 0; for (Source source : partitionLoadInfo.getSources()) { sources.put("schema" + sourceIndex, new EtlSource(source)); ++sourceIndex; } return sources; } private EtlPartitionInfo createPartitionInfo(OlapTable table, long partitionId) { PartitionInfo partitionInfo = table.getPartitionInfo(); switch (partitionInfo.getType()) { case RANGE: RangePartitionInfo rangePartitionInfo = (RangePartitionInfo) partitionInfo; // partition columns List<String> partitionColumnNames = Lists.newArrayList(); for (Column column : rangePartitionInfo.getPartitionColumns()) { partitionColumnNames.add(column.getName()); } // begin keys // is max partition Range<PartitionKey> range = rangePartitionInfo.getRange(partitionId); boolean isMaxPartition = range.upperEndpoint().isMaxValue(); // start keys List<LiteralExpr> rangeKeyExprs = range.lowerEndpoint().getKeys(); List<Object> startKeys = Lists.newArrayList(); for (int i = 0; i < rangeKeyExprs.size(); ++i) { LiteralExpr literalExpr = rangeKeyExprs.get(i); Object keyValue = literalExpr.getRealValue(); startKeys.add(keyValue); } // end keys // is empty list when max partition List<Object> endKeys = Lists.newArrayList(); if (!isMaxPartition) { rangeKeyExprs = range.upperEndpoint().getKeys(); for (int i = 0; i < rangeKeyExprs.size(); ++i) { LiteralExpr literalExpr = rangeKeyExprs.get(i); Object keyValue = literalExpr.getRealValue(); endKeys.add(keyValue); } } return new EtlPartitionInfo(table.getId(), partitionColumnNames, "range", startKeys, endKeys, isMaxPartition); case UNPARTITIONED: break; default: LOG.error("unknown partition type. type: {}", partitionInfo.getType().name()); break; } return null; } private String getOutputPath() { job.setHadoopEtlOutputDir(String.valueOf(System.currentTimeMillis())); String loadLabel = job.getLabel(); DppConfig dppConfig = job.getHadoopDppConfig(); String outputPath = DppScheduler.getEtlOutputPath(dppConfig.getFsDefaultName(), dppConfig.getOutputPath(), job.getDbId(), loadLabel, job.getHadoopEtlOutputDir()); return outputPath; } private class EtlTaskConf { private static final String JOB_TYPE = "palo"; private String outputFilePattern; private String outputPath; private Map<String, EtlPartitionConf> etlPartitions; private EtlErrorHubInfo hubInfo; public void setOutputFilePattern(String outputFilePattern) { this.outputFilePattern = outputFilePattern; } public void setOutputPath(String outputPath) { this.outputPath = outputPath; } public void setEtlPartitions(Map<String, EtlPartitionConf> etlPartitions) { this.etlPartitions = etlPartitions; } public void setHubInfo(EtlErrorHubInfo info) { this.hubInfo = info; } public Map<String, Object> toDppTaskConf() { // dpp group -> tables // dpp tables -> partitions // dpp views -> indices Map<String, Object> taskConf = Maps.newHashMap(); taskConf.put("job_type", JOB_TYPE); taskConf.put("output_file_pattern", outputFilePattern); taskConf.put("output_path", outputPath); taskConf.put("tables", Maps.transformValues(etlPartitions, new Function<EtlPartitionConf, Map<String, Object>>() { @Override public Map<String, Object> apply(EtlPartitionConf partition) { return partition.toDppPartitionConf(); } })); if (hubInfo != null) { taskConf.put("hub_info", hubInfo.toDppHubInfo()); } return taskConf; } } private class EtlErrorHubInfo { LoadErrorHub.Param hubInfo; long jobId; private static final int MAX_EXPORT_LINE_NUM = 10; private static final int MAX_EXPORT_LINE_SIZE = 500; public EtlErrorHubInfo(long jobId, LoadErrorHub.Param info) { this.jobId = jobId; this.hubInfo = info; } public Map<String, Object> toDppHubInfo() { Map<String, Object> dppHubInfo = hubInfo.toDppConfigInfo(); dppHubInfo.put("job_id", jobId); dppHubInfo.put("max_export_line_num", MAX_EXPORT_LINE_NUM); dppHubInfo.put("max_export_line_size", MAX_EXPORT_LINE_SIZE); return dppHubInfo; } } private class EtlPartitionConf { private Map<String, EtlSource> sources; private Map<String, EtlColumn> columns; private Map<String, EtlIndex> indices; private EtlPartitionInfo partitionInfo; public void setSources(Map<String, EtlSource> sources) { this.sources = sources; } public void setColumns(Map<String, EtlColumn> columns) { this.columns = columns; } public void setIndices(Map<String, EtlIndex> indices) { this.indices = indices; } public void setPartitionInfo(EtlPartitionInfo partitionInfo) { this.partitionInfo = partitionInfo; } public Map<String, Object> toDppPartitionConf() { Map<String, Object> partitionConf = Maps.newHashMap(); partitionConf.put("source_file_schema", Maps.transformValues(sources, new Function<EtlSource, Map<String, Object>>() { @Override public Map<String, Object> apply(EtlSource source) { return source.toDppSource(); } })); partitionConf.put("columns", Maps.transformValues(columns, new Function<EtlColumn, Map<String, Object>>() { @Override public Map<String, Object> apply(EtlColumn column) { return column.toDppColumn(); } })); partitionConf.put("views", Maps.transformValues(indices, new Function<EtlIndex, Map<String, Object>>() { @Override public Map<String, Object> apply(EtlIndex index) { return index.toDppView(); } })); if (partitionInfo != null) { partitionConf.put("partition_info", partitionInfo.toDppPartitionInfo()); } return partitionConf; } } private class EtlSource { private final Source source; public EtlSource(Source source) { this.source = source; } public Map<String, Object> toDppSource() { Map<String, Object> dppSource = Maps.newHashMap(); dppSource.put("file_urls", source.getFileUrls()); dppSource.put("columns", source.getColumnNames()); dppSource.put("column_separator", source.getColumnSeparator()); dppSource.put("is_negative", source.isNegative()); Map<String, Pair<String, List<String>>> columnToFunction = source.getColumnToFunction(); if (columnToFunction != null && !columnToFunction.isEmpty()) { Map<String, Map<String, Object>> columnMapping = Maps.newHashMap(); for (Entry<String, Pair<String, List<String>>> entry : columnToFunction.entrySet()) { Pair<String, List<String>> functionPair = entry.getValue(); Map<String, Object> functionMap = Maps.newHashMap(); functionMap.put("function_name", functionPair.first); functionMap.put("args", functionPair.second); columnMapping.put(entry.getKey(), functionMap); } dppSource.put("column_mappings", columnMapping); } return dppSource; } } private class EtlColumn { private final Column column; public EtlColumn(Column column) { this.column = column; } public Map<String, Object> toDppColumn() { Map<String, Object> dppColumn = Maps.newHashMap(); // column type PrimitiveType type = column.getDataType(); String columnType = null; switch (type) { case TINYINT: columnType = "TINY"; break; case SMALLINT: columnType = "SHORT"; break; case INT: columnType = "INT"; break; case BIGINT: columnType = "LONG"; break; case LARGEINT: columnType = "LARGEINT"; break; case FLOAT: columnType = "FLOAT"; break; case DOUBLE: columnType = "DOUBLE"; break; case DATE: columnType = "DATE"; break; case DATETIME: columnType = "DATETIME"; break; case CHAR: columnType = "STRING"; break; case VARCHAR: columnType = "VARCHAR"; break; case HLL: columnType = "HLL"; break; case DECIMAL: columnType = "DECIMAL"; break; case DECIMALV2: columnType = "DECIMAL"; break; default: columnType = type.toString(); break; } dppColumn.put("column_type", columnType); // is allow null if (column.isAllowNull()) { dppColumn.put("is_allow_null", column.isAllowNull()); } // default value if (column.getDefaultValue() != null) { dppColumn.put("default_value", column.getDefaultValue()); } if (column.isAllowNull() && null == column.getDefaultValue()) { dppColumn.put("default_value", "\\N"); } // string length if (type == PrimitiveType.CHAR || type == PrimitiveType.VARCHAR || type == PrimitiveType.HLL) { dppColumn.put("string_length", column.getStrLen()); } // decimal precision scale if (type == PrimitiveType.DECIMAL || type == PrimitiveType.DECIMALV2) { dppColumn.put("precision", column.getPrecision()); dppColumn.put("scale", column.getScale()); } return dppColumn; } public boolean isKey() { return column.isKey(); } public AggregateType getAggregationType() { return column.getAggregationType(); } } private class EtlIndex { private static final String OUTPUT_FORMAT = "palo"; private static final String BE_INDEX_NAME = "PRIMARY"; private KeysType keysType; private long indexId; private List<Map<String, Object>> columnRefs; private int schemaHash; private String partitionMethod; private String hashMethod; private int hashMod; private List<String> distributionColumnRefs; private int pidKeyCount; public void setKeysType(KeysType keysType) { this.keysType = keysType; } public void setIndexId(long indexId) { this.indexId = indexId; } public void setColumnRefs(List<Map<String, Object>> columnRefs) { this.columnRefs = columnRefs; } public void setSchemaHash(int schemaHash) { this.schemaHash = schemaHash; } public void setPartitionMethod(String partitionMethod) { this.partitionMethod = partitionMethod; } public void setHashMethod(String hashMethod) { this.hashMethod = hashMethod; } public void setHashMod(int hashMod) { this.hashMod = hashMod; } public void setDistributionColumnRefs(List<String> distributionColumnRefs) { this.distributionColumnRefs = distributionColumnRefs; } public void setPidKeyCount(int pidKeyCount) { this.pidKeyCount = pidKeyCount; } public Map<String, Object> toDppView() { Map<String, Object> index = Maps.newHashMap(); index.put("keys_type", keysType.name()); index.put("output_format", OUTPUT_FORMAT); index.put("index_name", BE_INDEX_NAME); index.put("table_name", String.valueOf(indexId)); index.put("table_id", indexId); index.put("column_refs", columnRefs); index.put("schema_hash", schemaHash); index.put("partition_method", partitionMethod); index.put("hash_method", hashMethod); index.put("hash_mod", hashMod); index.put("partition_column_refs", distributionColumnRefs); index.put("pid_key_count", pidKeyCount); return index; } } private class EtlPartitionInfo { private long tableId; private List<String> tablePartitionColumnRefs; private String tablePartitionMethod; private List<Object> startKeys; private List<Object> endKeys; private boolean isMaxPartition; public EtlPartitionInfo(long tableId, List<String> tablePartitionColumnRefs, String tablePartitionMethod, List<Object> startKeys, List<Object> endKeys, boolean isMaxPartition) { this.tableId = tableId; this.tablePartitionColumnRefs = tablePartitionColumnRefs; this.tablePartitionMethod = tablePartitionMethod; this.startKeys = startKeys; this.endKeys = endKeys; this.isMaxPartition = isMaxPartition; } public Map<String, Object> toDppPartitionInfo() { Map<String, Object> partitionInfo = Maps.newHashMap(); partitionInfo.put("group_id", tableId); partitionInfo.put("group_partition_column_refs", tablePartitionColumnRefs); partitionInfo.put("group_partition_method", tablePartitionMethod); partitionInfo.put("start_keys", startKeys); partitionInfo.put("end_keys", endKeys); partitionInfo.put("is_max_partition", isMaxPartition); return partitionInfo; } } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.core.util; import java.io.ByteArrayInputStream; import java.io.IOException; import java.lang.reflect.Array; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.List; import javax.xml.parsers.ParserConfigurationException; import org.pentaho.di.core.Const; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.core.xml.XMLParserFactoryProducer; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; public class SerializationHelper { private static final String INDENT_STRING = " "; /** * This method will perform the work that used to be done by hand in each kettle input meta for: readData(Node * stepnode). We handle all primitive types, complex user types, arrays, lists and any number of nested object levels, * via recursion of this method. * * @param object * The object to be persisted * @param node * The node to 'attach' our XML to */ public static void read( Object object, Node node ) { // get this classes fields, public, private, protected, package, everything Field[] fields = object.getClass().getFields(); for ( Field field : fields ) { // ignore fields which are final, static or transient if ( Modifier.isFinal( field.getModifiers() ) || Modifier.isStatic( field.getModifiers() ) || Modifier.isTransient( field.getModifiers() ) ) { continue; } // if the field is not accessible (private), we'll open it up so we can operate on it if ( !field.isAccessible() ) { field.setAccessible( true ); } // check if we're going to try to read an array if ( field.getType().isArray() ) { try { // get the node (if available) for the field Node fieldNode = XMLHandler.getSubNode( node, field.getName() ); if ( fieldNode == null ) { // doesn't exist (this is possible if fields were empty/null when persisted) continue; } // get the Java classname for the array elements String fieldClassName = XMLHandler.getTagAttribute( fieldNode, "class" ); Class<?> clazz = null; // primitive types require special handling if ( fieldClassName.equals( "boolean" ) ) { clazz = boolean.class; } else if ( fieldClassName.equals( "int" ) ) { clazz = int.class; } else if ( fieldClassName.equals( "float" ) ) { clazz = float.class; } else if ( fieldClassName.equals( "double" ) ) { clazz = double.class; } else if ( fieldClassName.equals( "long" ) ) { clazz = long.class; } else { // normal, non primitive array class clazz = Class.forName( fieldClassName ); } // get the child nodes for the field NodeList childrenNodes = fieldNode.getChildNodes(); // create a new, appropriately sized array int arrayLength = 0; for ( int i = 0; i < childrenNodes.getLength(); i++ ) { Node child = childrenNodes.item( i ); // ignore TEXT_NODE, they'll cause us to have a larger count than reality, even if they are empty if ( child.getNodeType() != Node.TEXT_NODE ) { arrayLength++; } } // create a new instance of our array Object array = Array.newInstance( clazz, arrayLength ); // set the new array on the field (on object, passed in) field.set( object, array ); int arrayIndex = 0; for ( int i = 0; i < childrenNodes.getLength(); i++ ) { Node child = childrenNodes.item( i ); if ( child.getNodeType() == Node.TEXT_NODE ) { continue; } // roll through all of our array elements setting them as encountered if ( String.class.isAssignableFrom( clazz ) || Number.class.isAssignableFrom( clazz ) ) { Constructor<?> constructor = clazz.getConstructor( String.class ); Object instance = constructor.newInstance( XMLHandler.getTagAttribute( child, "value" ) ); Array.set( array, arrayIndex++, instance ); } else if ( Boolean.class.isAssignableFrom( clazz ) || boolean.class.isAssignableFrom( clazz ) ) { Object value = Boolean.valueOf( XMLHandler.getTagAttribute( child, "value" ) ); Array.set( array, arrayIndex++, value ); } else if ( Integer.class.isAssignableFrom( clazz ) || int.class.isAssignableFrom( clazz ) ) { Object value = Integer.valueOf( XMLHandler.getTagAttribute( child, "value" ) ); Array.set( array, arrayIndex++, value ); } else if ( Float.class.isAssignableFrom( clazz ) || float.class.isAssignableFrom( clazz ) ) { Object value = Float.valueOf( XMLHandler.getTagAttribute( child, "value" ) ); Array.set( array, arrayIndex++, value ); } else if ( Double.class.isAssignableFrom( clazz ) || double.class.isAssignableFrom( clazz ) ) { Object value = Double.valueOf( XMLHandler.getTagAttribute( child, "value" ) ); Array.set( array, arrayIndex++, value ); } else if ( Long.class.isAssignableFrom( clazz ) || long.class.isAssignableFrom( clazz ) ) { Object value = Long.valueOf( XMLHandler.getTagAttribute( child, "value" ) ); Array.set( array, arrayIndex++, value ); } else { // create an instance of 'fieldClassName' Object instance = clazz.newInstance(); // add the instance to the array Array.set( array, arrayIndex++, instance ); // read child, the same way as the parent read( instance, child ); } } } catch ( Throwable t ) { t.printStackTrace(); // TODO: log this } } else if ( List.class.isAssignableFrom( field.getType() ) ) { // handle lists try { // get the node (if available) for the field Node fieldNode = XMLHandler.getSubNode( node, field.getName() ); if ( fieldNode == null ) { // doesn't exist (this is possible if fields were empty/null when persisted) continue; } // get the Java classname for the array elements String fieldClassName = XMLHandler.getTagAttribute( fieldNode, "class" ); Class<?> clazz = Class.forName( fieldClassName ); // create a new, appropriately sized array List<Object> list = new ArrayList<Object>(); field.set( object, list ); // iterate over all of the array elements and add them one by one as encountered NodeList childrenNodes = fieldNode.getChildNodes(); for ( int i = 0; i < childrenNodes.getLength(); i++ ) { Node child = childrenNodes.item( i ); if ( child.getNodeType() == Node.TEXT_NODE ) { continue; } // create an instance of 'fieldClassName' if ( String.class.isAssignableFrom( clazz ) || Number.class.isAssignableFrom( clazz ) || Boolean.class.isAssignableFrom( clazz ) ) { Constructor<?> constructor = clazz.getConstructor( String.class ); Object instance = constructor.newInstance( XMLHandler.getTagAttribute( child, "value" ) ); list.add( instance ); } else { // read child, the same way as the parent Object instance = clazz.newInstance(); // add the instance to the array list.add( instance ); read( instance, child ); } } } catch ( Throwable t ) { t.printStackTrace(); // TODO: log this } } else { // we're handling a regular field (not an array or list) try { Object value = XMLHandler.getTagValue( node, field.getName() ); if ( value == null ) { continue; } // System.out.println("Setting " + field.getName() + "(" + field.getType().getSimpleName() + ") = " + value + // " on: " + object.getClass().getName()); if ( !( field.getType().isPrimitive() && "".equals( value ) ) ) { // skip setting of primitives if we see null if ( "".equals( value ) ) { field.set( object, value ); } else if ( field.getType().isPrimitive() ) { // special primitive handling if ( double.class.isAssignableFrom( field.getType() ) ) { field.set( object, Double.parseDouble( value.toString() ) ); } else if ( float.class.isAssignableFrom( field.getType() ) ) { field.set( object, Float.parseFloat( value.toString() ) ); } else if ( long.class.isAssignableFrom( field.getType() ) ) { field.set( object, Long.parseLong( value.toString() ) ); } else if ( int.class.isAssignableFrom( field.getType() ) ) { field.set( object, Integer.parseInt( value.toString() ) ); } else if ( byte.class.isAssignableFrom( field.getType() ) ) { field.set( object, value.toString().getBytes() ); } else if ( boolean.class.isAssignableFrom( field.getType() ) ) { field.set( object, "true".equalsIgnoreCase( value.toString() ) ); } } else if ( String.class.isAssignableFrom( field.getType() ) || Number.class.isAssignableFrom( field.getType() ) ) { Constructor<?> constructor = field.getType().getConstructor( String.class ); Object instance = constructor.newInstance( value ); field.set( object, instance ); } else { // we don't know what we're handling, but we'll give it a shot Node fieldNode = XMLHandler.getSubNode( node, field.getName() ); if ( fieldNode == null ) { // doesn't exist (this is possible if fields were empty/null when persisted) continue; } // get the Java classname for the array elements String fieldClassName = XMLHandler.getTagAttribute( fieldNode, "class" ); Class<?> clazz = Class.forName( fieldClassName ); Object instance = clazz.newInstance(); field.set( object, instance ); read( instance, fieldNode ); } } } catch ( Throwable t ) { // TODO: log this t.printStackTrace(); } } } } /** * This method will perform the work that used to be done by hand in each kettle input meta for: getXML(). We handle * all primitive types, complex user types, arrays, lists and any number of nested object levels, via recursion of * this method. * * @param object * @param buffer */ @SuppressWarnings( "unchecked" ) public static void write( Object object, int indentLevel, StringBuilder buffer ) { // don't even attempt to persist if ( object == null ) { return; } // get this classes fields, public, private, protected, package, everything Field[] fields = object.getClass().getFields(); for ( Field field : fields ) { // ignore fields which are final, static or transient if ( Modifier.isFinal( field.getModifiers() ) || Modifier.isStatic( field.getModifiers() ) || Modifier.isTransient( field.getModifiers() ) ) { continue; } // if the field is not accessible (private), we'll open it up so we can operate on it if ( !field.isAccessible() ) { field.setAccessible( true ); } try { Object fieldValue = field.get( object ); // no value? null? skip it! if ( fieldValue == null || "".equals( fieldValue ) ) { continue; } if ( field.getType().isPrimitive() || String.class.isAssignableFrom( field.getType() ) || Number.class.isAssignableFrom( field.getType() ) ) { indent( buffer, indentLevel ); buffer.append( XMLHandler.addTagValue( field.getName(), fieldValue.toString() ) ); } else if ( field.getType().isArray() ) { // write array values int length = Array.getLength( fieldValue ); // open node (add class name attribute) indent( buffer, indentLevel ); buffer .append( "<" + field.getName() + " class=\"" + fieldValue.getClass().getComponentType().getName() + "\">" ) .append( Const.CR ); for ( int i = 0; i < length; i++ ) { Object childObject = Array.get( fieldValue, i ); // handle all strings/numbers if ( String.class.isAssignableFrom( childObject.getClass() ) || Number.class.isAssignableFrom( childObject.getClass() ) ) { indent( buffer, indentLevel + 1 ); buffer.append( "<" ).append( fieldValue.getClass().getComponentType().getSimpleName() ); buffer.append( " value=\"" + childObject.toString() + "\"/>" ).append( Const.CR ); } else if ( Boolean.class.isAssignableFrom( childObject.getClass() ) || boolean.class.isAssignableFrom( childObject.getClass() ) ) { // handle booleans (special case) indent( buffer, indentLevel + 1 ); buffer.append( "<" ).append( fieldValue.getClass().getComponentType().getSimpleName() ); buffer.append( " value=\"" + childObject.toString() + "\"/>" ).append( Const.CR ); } else { // array element is a user defined/complex type, recurse into it indent( buffer, indentLevel + 1 ); buffer.append( "<" + fieldValue.getClass().getComponentType().getSimpleName() + ">" ).append( Const.CR ); write( childObject, indentLevel + 1, buffer ); indent( buffer, indentLevel + 1 ); buffer.append( "</" + fieldValue.getClass().getComponentType().getSimpleName() + ">" ).append( Const.CR ); } } // close node buffer.append( " </" + field.getName() + ">" ).append( Const.CR ); } else if ( List.class.isAssignableFrom( field.getType() ) ) { // write list values List<Object> list = (List<Object>) fieldValue; if ( list.size() == 0 ) { continue; } Class<?> listClass = list.get( 0 ).getClass(); // open node (add class name attribute) indent( buffer, indentLevel ); buffer.append( "<" + field.getName() + " class=\"" + listClass.getName() + "\">" ).append( Const.CR ); for ( Object childObject : list ) { // handle all strings/numbers if ( String.class.isAssignableFrom( childObject.getClass() ) || Number.class.isAssignableFrom( childObject.getClass() ) ) { indent( buffer, indentLevel + 1 ); buffer.append( "<" ).append( listClass.getSimpleName() ); buffer.append( " value=\"" + childObject.toString() + "\"/>" ).append( Const.CR ); } else if ( Boolean.class.isAssignableFrom( childObject.getClass() ) || boolean.class.isAssignableFrom( childObject.getClass() ) ) { // handle booleans (special case) indent( buffer, indentLevel + 1 ); buffer.append( "<" ).append( listClass.getSimpleName() ); buffer.append( " value=\"" + childObject.toString() + "\"/>" ).append( Const.CR ); } else { // array element is a user defined/complex type, recurse into it indent( buffer, indentLevel + 1 ); buffer.append( "<" + listClass.getSimpleName() + ">" ).append( Const.CR ); write( childObject, indentLevel + 1, buffer ); indent( buffer, indentLevel + 1 ); buffer.append( "</" + listClass.getSimpleName() + ">" ).append( Const.CR ); } } // close node indent( buffer, indentLevel ); buffer.append( "</" + field.getName() + ">" ).append( Const.CR ); } else { // if we don't now what it is, let's treat it like a first class citizen and try to write it out // open node (add class name attribute) indent( buffer, indentLevel ); buffer.append( "<" + field.getName() + " class=\"" + fieldValue.getClass().getName() + "\">" ).append( Const.CR ); write( fieldValue, indentLevel + 1, buffer ); // close node indent( buffer, indentLevel ); buffer.append( "</" + field.getName() + ">" ).append( Const.CR ); } } catch ( Throwable t ) { t.printStackTrace(); // TODO: log this } } } /** * Handle saving of the input (object) to the kettle repository using the most simple method available, by calling * write and then saving the job-xml as a job attribute. * * @param object * @param rep * @param id_transformation * @param id_step * @throws KettleException */ public static void saveJobRep( Object object, Repository rep, ObjectId id_job, ObjectId id_job_entry ) throws KettleException { StringBuilder sb = new StringBuilder( 1024 ); write( object, 0, sb ); rep.saveJobEntryAttribute( id_job, id_job_entry, "job-xml", sb.toString() ); } /** * Handle reading of the input (object) from the kettle repository by getting the job-xml from the repository step * attribute string and then re-hydrate the job entry (object) with our already existing read method. * * @param object * @param rep * @param id_step * @param databases * @throws KettleException */ public static void readJobRep( Object object, Repository rep, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { try { String jobXML = rep.getJobEntryAttributeString( id_step, "job-xml" ); ByteArrayInputStream bais = new ByteArrayInputStream( jobXML.getBytes() ); Document doc = XMLParserFactoryProducer.createSecureDocBuilderFactory().newDocumentBuilder().parse( bais ); read( object, doc.getDocumentElement() ); } catch ( ParserConfigurationException ex ) { throw new KettleException( ex.getMessage(), ex ); } catch ( SAXException ex ) { throw new KettleException( ex.getMessage(), ex ); } catch ( IOException ex ) { throw new KettleException( ex.getMessage(), ex ); } } /** * Handle saving of the input (object) to the kettle repository using the most simple method available, by calling * write and then saving the step-xml as a step attribute. * * @param object * @param rep * @param id_transformation * @param id_step * @throws KettleException */ public static void saveStepRep( Object object, Repository rep, ObjectId id_transformation, ObjectId id_step ) throws KettleException { StringBuilder sb = new StringBuilder( 1024 ); write( object, 0, sb ); rep.saveStepAttribute( id_transformation, id_step, "step-xml", sb.toString() ); } /** * Handle reading of the input (object) from the kettle repository by getting the step-xml from the repository step * attribute string and then re-hydrate the step (object) with our already existing read method. * * @param object * @param rep * @param id_step * @param databases * @param counters * @throws KettleException */ public static void readStepRep( Object object, Repository rep, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { try { String stepXML = rep.getStepAttributeString( id_step, "step-xml" ); ByteArrayInputStream bais = new ByteArrayInputStream( stepXML.getBytes() ); Document doc = XMLParserFactoryProducer.createSecureDocBuilderFactory().newDocumentBuilder().parse( bais ); read( object, doc.getDocumentElement() ); } catch ( ParserConfigurationException ex ) { throw new KettleException( ex.getMessage(), ex ); } catch ( SAXException ex ) { throw new KettleException( ex.getMessage(), ex ); } catch ( IOException ex ) { throw new KettleException( ex.getMessage(), ex ); } } private static void indent( StringBuilder sb, int indentLevel ) { for ( int i = 0; i < indentLevel; i++ ) { sb.append( INDENT_STRING ); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.service; import java.lang.management.ManagementFactory; import java.lang.management.MemoryUsage; import java.lang.reflect.Field; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import javax.management.MBeanServer; import javax.management.Notification; import javax.management.NotificationListener; import javax.management.ObjectName; import javax.management.openmbean.CompositeData; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.sun.management.GarbageCollectionNotificationInfo; import org.apache.cassandra.io.sstable.SSTableDeletingTask; import org.apache.cassandra.utils.StatusLogger; public class GCInspector implements NotificationListener, GCInspectorMXBean { public static final String MBEAN_NAME = "org.apache.cassandra.service:type=GCInspector"; private static final Logger logger = LoggerFactory.getLogger(GCInspector.class); final static long MIN_LOG_DURATION = 200; final static long MIN_LOG_DURATION_TPSTATS = 1000; /* * The field from java.nio.Bits that tracks the total number of allocated * bytes of direct memory requires via ByteBuffer.allocateDirect that have not been GCed. */ final static Field BITS_TOTAL_CAPACITY; static { Field temp = null; try { Class<?> bitsClass = Class.forName("java.nio.Bits"); Field f = bitsClass.getDeclaredField("totalCapacity"); f.setAccessible(true); temp = f; } catch (Throwable t) { logger.debug("Error accessing field of java.nio.Bits", t); //Don't care, will just return the dummy value -1 if we can't get at the field in this JVM } BITS_TOTAL_CAPACITY = temp; } static final class State { final double maxRealTimeElapsed; final double totalRealTimeElapsed; final double sumSquaresRealTimeElapsed; final double totalBytesReclaimed; final double count; final long startNanos; State(double extraElapsed, double extraBytes, State prev) { this.totalRealTimeElapsed = prev.totalRealTimeElapsed + extraElapsed; this.totalBytesReclaimed = prev.totalBytesReclaimed + extraBytes; this.sumSquaresRealTimeElapsed = prev.sumSquaresRealTimeElapsed + (extraElapsed * extraElapsed); this.startNanos = prev.startNanos; this.count = prev.count + 1; this.maxRealTimeElapsed = Math.max(prev.maxRealTimeElapsed, extraElapsed); } State() { count = maxRealTimeElapsed = sumSquaresRealTimeElapsed = totalRealTimeElapsed = totalBytesReclaimed = 0; startNanos = System.nanoTime(); } } final AtomicReference<State> state = new AtomicReference<>(new State()); public GCInspector() { MBeanServer mbs = ManagementFactory.getPlatformMBeanServer(); try { mbs.registerMBean(this, new ObjectName(MBEAN_NAME)); } catch (Exception e) { throw new RuntimeException(e); } } public static void register() throws Exception { GCInspector inspector = new GCInspector(); MBeanServer server = ManagementFactory.getPlatformMBeanServer(); ObjectName gcName = new ObjectName(ManagementFactory.GARBAGE_COLLECTOR_MXBEAN_DOMAIN_TYPE + ",*"); for (ObjectName name : server.queryNames(gcName, null)) { server.addNotificationListener(name, inspector, null, null); } } public void handleNotification(Notification notification, Object handback) { String type = notification.getType(); if (type.equals(GarbageCollectionNotificationInfo.GARBAGE_COLLECTION_NOTIFICATION)) { // retrieve the garbage collection notification information CompositeData cd = (CompositeData) notification.getUserData(); GarbageCollectionNotificationInfo info = GarbageCollectionNotificationInfo.from(cd); long duration = info.getGcInfo().getDuration(); StringBuilder sb = new StringBuilder(); sb.append(info.getGcName()).append(" GC in ").append(duration).append("ms. "); long bytes = 0; List<String> keys = new ArrayList<>(info.getGcInfo().getMemoryUsageBeforeGc().keySet()); Collections.sort(keys); for (String key : keys) { MemoryUsage before = info.getGcInfo().getMemoryUsageBeforeGc().get(key); MemoryUsage after = info.getGcInfo().getMemoryUsageAfterGc().get(key); if (after != null && after.getUsed() != before.getUsed()) { sb.append(key).append(": ").append(before.getUsed()); sb.append(" -> "); sb.append(after.getUsed()); if (!key.equals(keys.get(keys.size() - 1))) sb.append("; "); bytes += before.getUsed() - after.getUsed(); } } while (true) { State prev = state.get(); if (state.compareAndSet(prev, new State(duration, bytes, prev))) break; } String st = sb.toString(); if (duration > MIN_LOG_DURATION) logger.info(st); else if (logger.isDebugEnabled()) logger.debug(st); if (duration > MIN_LOG_DURATION_TPSTATS) StatusLogger.log(); // if we just finished a full collection and we're still using a lot of memory, try to reduce the pressure if (info.getGcName().equals("ConcurrentMarkSweep")) SSTableDeletingTask.rescheduleFailedTasks(); } } public State getTotalSinceLastCheck() { return state.getAndSet(new State()); } public double[] getAndResetStats() { State state = getTotalSinceLastCheck(); double[] r = new double[7]; r[0] = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - state.startNanos); r[1] = state.maxRealTimeElapsed; r[2] = state.totalRealTimeElapsed; r[3] = state.sumSquaresRealTimeElapsed; r[4] = state.totalBytesReclaimed; r[5] = state.count; r[6] = getAllocatedDirectMemory(); return r; } private static long getAllocatedDirectMemory() { if (BITS_TOTAL_CAPACITY == null) return -1; try { return BITS_TOTAL_CAPACITY.getLong(null); } catch (Throwable t) { logger.trace("Error accessing field of java.nio.Bits", t); //Don't care how or why we failed to get the value in this JVM. Return -1 to indicate failure return -1; } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.cluster.metadata; import com.carrotsearch.hppc.ObjectHashSet; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import com.google.common.collect.ImmutableSet; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.Diffable; import org.elasticsearch.cluster.DiffableUtils; import org.elasticsearch.cluster.DiffableUtils.KeyedReader; import org.elasticsearch.cluster.InternalClusterInfoService; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider; import org.elasticsearch.cluster.service.InternalClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.HppcMaps; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.common.xcontent.FromXContentBuilder; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.store.IndicesStore; import org.elasticsearch.indices.ttl.IndicesTTLService; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.warmer.IndexWarmersMetaData; import java.io.IOException; import java.util.*; import java.util.stream.Collectors; import static org.elasticsearch.common.settings.Settings.readSettingsFromStream; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.settings.Settings.writeSettingsToStream; public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, FromXContentBuilder<MetaData>, ToXContent { public static final MetaData PROTO = builder().build(); public static final String ALL = "_all"; public enum XContentContext { /* Custom metadata should be returns as part of API call */ API, /* Custom metadata should be stored as part of the persistent cluster state */ GATEWAY, /* Custom metadata should be stored as part of a snapshot */ SNAPSHOT } public static EnumSet<XContentContext> API_ONLY = EnumSet.of(XContentContext.API); public static EnumSet<XContentContext> API_AND_GATEWAY = EnumSet.of(XContentContext.API, XContentContext.GATEWAY); public static EnumSet<XContentContext> API_AND_SNAPSHOT = EnumSet.of(XContentContext.API, XContentContext.SNAPSHOT); public interface Custom extends Diffable<Custom>, ToXContent { String type(); Custom fromXContent(XContentParser parser) throws IOException; EnumSet<XContentContext> context(); } public static Map<String, Custom> customPrototypes = new HashMap<>(); static { // register non plugin custom metadata registerPrototype(RepositoriesMetaData.TYPE, RepositoriesMetaData.PROTO); } /** * Register a custom index meta data factory. Make sure to call it from a static block. */ public static void registerPrototype(String type, Custom proto) { customPrototypes.put(type, proto); } @Nullable public static <T extends Custom> T lookupPrototype(String type) { //noinspection unchecked return (T) customPrototypes.get(type); } public static <T extends Custom> T lookupPrototypeSafe(String type) { //noinspection unchecked T proto = (T) customPrototypes.get(type); if (proto == null) { throw new IllegalArgumentException("No custom metadata prototype registered for type [" + type + "]"); } return proto; } public static final String SETTING_READ_ONLY = "cluster.blocks.read_only"; public static final ClusterBlock CLUSTER_READ_ONLY_BLOCK = new ClusterBlock(6, "cluster read-only (api)", false, false, RestStatus.FORBIDDEN, EnumSet.of(ClusterBlockLevel.WRITE, ClusterBlockLevel.METADATA_WRITE)); public static final MetaData EMPTY_META_DATA = builder().build(); public static final String CONTEXT_MODE_PARAM = "context_mode"; public static final String CONTEXT_MODE_SNAPSHOT = XContentContext.SNAPSHOT.toString(); public static final String CONTEXT_MODE_GATEWAY = XContentContext.GATEWAY.toString(); private final String clusterUUID; private final long version; private final Settings transientSettings; private final Settings persistentSettings; private final Settings settings; private final ImmutableOpenMap<String, IndexMetaData> indices; private final ImmutableOpenMap<String, IndexTemplateMetaData> templates; private final ImmutableOpenMap<String, Custom> customs; private final transient int totalNumberOfShards; // Transient ? not serializable anyway? private final int numberOfShards; private final String[] allIndices; private final String[] allOpenIndices; private final String[] allClosedIndices; private final SortedMap<String, AliasOrIndex> aliasAndIndexLookup; @SuppressWarnings("unchecked") MetaData(String clusterUUID, long version, Settings transientSettings, Settings persistentSettings, ImmutableOpenMap<String, IndexMetaData> indices, ImmutableOpenMap<String, IndexTemplateMetaData> templates, ImmutableOpenMap<String, Custom> customs, String[] allIndices, String[] allOpenIndices, String[] allClosedIndices, SortedMap<String, AliasOrIndex> aliasAndIndexLookup) { this.clusterUUID = clusterUUID; this.version = version; this.transientSettings = transientSettings; this.persistentSettings = persistentSettings; this.settings = Settings.settingsBuilder().put(persistentSettings).put(transientSettings).build(); this.indices = indices; this.customs = customs; this.templates = templates; int totalNumberOfShards = 0; int numberOfShards = 0; for (ObjectCursor<IndexMetaData> cursor : indices.values()) { totalNumberOfShards += cursor.value.totalNumberOfShards(); numberOfShards += cursor.value.numberOfShards(); } this.totalNumberOfShards = totalNumberOfShards; this.numberOfShards = numberOfShards; this.allIndices = allIndices; this.allOpenIndices = allOpenIndices; this.allClosedIndices = allClosedIndices; this.aliasAndIndexLookup = aliasAndIndexLookup; } public long version() { return this.version; } public String clusterUUID() { return this.clusterUUID; } /** * Returns the merged transient and persistent settings. */ public Settings settings() { return this.settings; } public Settings transientSettings() { return this.transientSettings; } public Settings persistentSettings() { return this.persistentSettings; } public boolean hasAlias(String alias) { AliasOrIndex aliasOrIndex = getAliasAndIndexLookup().get(alias); if (aliasOrIndex != null) { return aliasOrIndex.isAlias(); } else { return false; } } public boolean equalsAliases(MetaData other) { for (ObjectCursor<IndexMetaData> cursor : other.indices().values()) { IndexMetaData otherIndex = cursor.value; IndexMetaData thisIndex= indices().get(otherIndex.getIndex()); if (thisIndex == null) { return false; } if (otherIndex.getAliases().equals(thisIndex.getAliases()) == false) { return false; } } return true; } public SortedMap<String, AliasOrIndex> getAliasAndIndexLookup() { return aliasAndIndexLookup; } /** * Finds the specific index aliases that match with the specified aliases directly or partially via wildcards and * that point to the specified concrete indices or match partially with the indices via wildcards. * * @param aliases The names of the index aliases to find * @param concreteIndices The concrete indexes the index aliases must point to order to be returned. * @return the found index aliases grouped by index */ public ImmutableOpenMap<String, List<AliasMetaData>> findAliases(final String[] aliases, String[] concreteIndices) { assert aliases != null; assert concreteIndices != null; if (concreteIndices.length == 0) { return ImmutableOpenMap.of(); } boolean matchAllAliases = matchAllAliases(aliases); ImmutableOpenMap.Builder<String, List<AliasMetaData>> mapBuilder = ImmutableOpenMap.builder(); Iterable<String> intersection = HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys()); for (String index : intersection) { IndexMetaData indexMetaData = indices.get(index); List<AliasMetaData> filteredValues = new ArrayList<>(); for (ObjectCursor<AliasMetaData> cursor : indexMetaData.getAliases().values()) { AliasMetaData value = cursor.value; if (matchAllAliases || Regex.simpleMatch(aliases, value.alias())) { filteredValues.add(value); } } if (!filteredValues.isEmpty()) { // Make the list order deterministic CollectionUtil.timSort(filteredValues, new Comparator<AliasMetaData>() { @Override public int compare(AliasMetaData o1, AliasMetaData o2) { return o1.alias().compareTo(o2.alias()); } }); mapBuilder.put(index, Collections.unmodifiableList(filteredValues)); } } return mapBuilder.build(); } private static boolean matchAllAliases(final String[] aliases) { for (String alias : aliases) { if (alias.equals(ALL)) { return true; } } return aliases.length == 0; } /** * Checks if at least one of the specified aliases exists in the specified concrete indices. Wildcards are supported in the * alias names for partial matches. * * @param aliases The names of the index aliases to find * @param concreteIndices The concrete indexes the index aliases must point to order to be returned. * @return whether at least one of the specified aliases exists in one of the specified concrete indices. */ public boolean hasAliases(final String[] aliases, String[] concreteIndices) { assert aliases != null; assert concreteIndices != null; if (concreteIndices.length == 0) { return false; } Iterable<String> intersection = HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys()); for (String index : intersection) { IndexMetaData indexMetaData = indices.get(index); List<AliasMetaData> filteredValues = new ArrayList<>(); for (ObjectCursor<AliasMetaData> cursor : indexMetaData.getAliases().values()) { AliasMetaData value = cursor.value; if (Regex.simpleMatch(aliases, value.alias())) { filteredValues.add(value); } } if (!filteredValues.isEmpty()) { return true; } } return false; } /* * Finds all mappings for types and concrete indices. Types are expanded to * include all types that match the glob patterns in the types array. Empty * types array, null or {"_all"} will be expanded to all types available for * the given indices. */ public ImmutableOpenMap<String, ImmutableOpenMap<String, MappingMetaData>> findMappings(String[] concreteIndices, final String[] types) { assert types != null; assert concreteIndices != null; if (concreteIndices.length == 0) { return ImmutableOpenMap.of(); } ImmutableOpenMap.Builder<String, ImmutableOpenMap<String, MappingMetaData>> indexMapBuilder = ImmutableOpenMap.builder(); Iterable<String> intersection = HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys()); for (String index : intersection) { IndexMetaData indexMetaData = indices.get(index); ImmutableOpenMap.Builder<String, MappingMetaData> filteredMappings; if (isAllTypes(types)) { indexMapBuilder.put(index, indexMetaData.getMappings()); // No types specified means get it all } else { filteredMappings = ImmutableOpenMap.builder(); for (ObjectObjectCursor<String, MappingMetaData> cursor : indexMetaData.mappings()) { if (Regex.simpleMatch(types, cursor.key)) { filteredMappings.put(cursor.key, cursor.value); } } if (!filteredMappings.isEmpty()) { indexMapBuilder.put(index, filteredMappings.build()); } } } return indexMapBuilder.build(); } public ImmutableOpenMap<String, List<IndexWarmersMetaData.Entry>> findWarmers(String[] concreteIndices, final String[] types, final String[] uncheckedWarmers) { assert uncheckedWarmers != null; assert concreteIndices != null; if (concreteIndices.length == 0) { return ImmutableOpenMap.of(); } // special _all check to behave the same like not specifying anything for the warmers (not for the indices) final String[] warmers = Strings.isAllOrWildcard(uncheckedWarmers) ? Strings.EMPTY_ARRAY : uncheckedWarmers; ImmutableOpenMap.Builder<String, List<IndexWarmersMetaData.Entry>> mapBuilder = ImmutableOpenMap.builder(); Iterable<String> intersection = HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys()); for (String index : intersection) { IndexMetaData indexMetaData = indices.get(index); IndexWarmersMetaData indexWarmersMetaData = indexMetaData.custom(IndexWarmersMetaData.TYPE); if (indexWarmersMetaData == null || indexWarmersMetaData.entries().isEmpty()) { continue; } // TODO: make this a List so we don't have to copy below Collection<IndexWarmersMetaData.Entry> filteredWarmers = indexWarmersMetaData .entries() .stream() .filter(warmer -> { if (warmers.length != 0 && types.length != 0) { return Regex.simpleMatch(warmers, warmer.name()) && Regex.simpleMatch(types, warmer.types()); } else if (warmers.length != 0) { return Regex.simpleMatch(warmers, warmer.name()); } else if (types.length != 0) { return Regex.simpleMatch(types, warmer.types()); } else { return true; } }) .collect(Collectors.toCollection(ArrayList::new)); if (!filteredWarmers.isEmpty()) { mapBuilder.put(index, Collections.unmodifiableList(new ArrayList<>(filteredWarmers))); } } return mapBuilder.build(); } /** * Returns all the concrete indices. */ public String[] concreteAllIndices() { return allIndices; } public String[] getConcreteAllIndices() { return concreteAllIndices(); } public String[] concreteAllOpenIndices() { return allOpenIndices; } public String[] getConcreteAllOpenIndices() { return allOpenIndices; } public String[] concreteAllClosedIndices() { return allClosedIndices; } public String[] getConcreteAllClosedIndices() { return allClosedIndices; } /** * Returns indexing routing for the given index. */ // TODO: This can be moved to IndexNameExpressionResolver too, but this means that we will support wildcards and other expressions // in the index,bulk,update and delete apis. public String resolveIndexRouting(@Nullable String routing, String aliasOrIndex) { if (aliasOrIndex == null) { return routing; } AliasOrIndex result = getAliasAndIndexLookup().get(aliasOrIndex); if (result == null || result.isAlias() == false) { return routing; } AliasOrIndex.Alias alias = (AliasOrIndex.Alias) result; if (result.getIndices().size() > 1) { String[] indexNames = new String[result.getIndices().size()]; int i = 0; for (IndexMetaData indexMetaData : result.getIndices()) { indexNames[i++] = indexMetaData.getIndex(); } throw new IllegalArgumentException("Alias [" + aliasOrIndex + "] has more than one index associated with it [" + Arrays.toString(indexNames) + "], can't execute a single index op"); } AliasMetaData aliasMd = alias.getFirstAliasMetaData(); if (aliasMd.indexRouting() != null) { if (routing != null) { if (!routing.equals(aliasMd.indexRouting())) { throw new IllegalArgumentException("Alias [" + aliasOrIndex + "] has index routing associated with it [" + aliasMd.indexRouting() + "], and was provided with routing value [" + routing + "], rejecting operation"); } } routing = aliasMd.indexRouting(); } if (routing != null) { if (routing.indexOf(',') != -1) { throw new IllegalArgumentException("index/alias [" + aliasOrIndex + "] provided with routing value [" + routing + "] that resolved to several routing values, rejecting operation"); } } return routing; } public boolean hasIndex(String index) { return indices.containsKey(index); } public boolean hasConcreteIndex(String index) { return getAliasAndIndexLookup().containsKey(index); } public IndexMetaData index(String index) { return indices.get(index); } public ImmutableOpenMap<String, IndexMetaData> indices() { return this.indices; } public ImmutableOpenMap<String, IndexMetaData> getIndices() { return indices(); } public ImmutableOpenMap<String, IndexTemplateMetaData> templates() { return this.templates; } public ImmutableOpenMap<String, IndexTemplateMetaData> getTemplates() { return this.templates; } public ImmutableOpenMap<String, Custom> customs() { return this.customs; } public ImmutableOpenMap<String, Custom> getCustoms() { return this.customs; } public <T extends Custom> T custom(String type) { return (T) customs.get(type); } public int totalNumberOfShards() { return this.totalNumberOfShards; } public int getTotalNumberOfShards() { return totalNumberOfShards(); } public int numberOfShards() { return this.numberOfShards; } public int getNumberOfShards() { return numberOfShards(); } /** * Identifies whether the array containing type names given as argument refers to all types * The empty or null array identifies all types * * @param types the array containing types * @return true if the provided array maps to all types, false otherwise */ public static boolean isAllTypes(String[] types) { return types == null || types.length == 0 || isExplicitAllType(types); } /** * Identifies whether the array containing type names given as argument explicitly refers to all types * The empty or null array doesn't explicitly map to all types * * @param types the array containing index names * @return true if the provided array explicitly maps to all types, false otherwise */ public static boolean isExplicitAllType(String[] types) { return types != null && types.length == 1 && ALL.equals(types[0]); } /** * @param concreteIndex The concrete index to check if routing is required * @param type The type to check if routing is required * @return Whether routing is required according to the mapping for the specified index and type */ public boolean routingRequired(String concreteIndex, String type) { IndexMetaData indexMetaData = indices.get(concreteIndex); if (indexMetaData != null) { MappingMetaData mappingMetaData = indexMetaData.getMappings().get(type); if (mappingMetaData != null) { return mappingMetaData.routing().required(); } } return false; } @Override public Iterator<IndexMetaData> iterator() { return indices.valuesIt(); } public static boolean isGlobalStateEquals(MetaData metaData1, MetaData metaData2) { if (!metaData1.persistentSettings.equals(metaData2.persistentSettings)) { return false; } if (!metaData1.templates.equals(metaData2.templates())) { return false; } // Check if any persistent metadata needs to be saved int customCount1 = 0; for (ObjectObjectCursor<String, Custom> cursor : metaData1.customs) { if (customPrototypes.get(cursor.key).context().contains(XContentContext.GATEWAY)) { if (!cursor.value.equals(metaData2.custom(cursor.key))) return false; customCount1++; } } int customCount2 = 0; for (ObjectObjectCursor<String, Custom> cursor : metaData2.customs) { if (customPrototypes.get(cursor.key).context().contains(XContentContext.GATEWAY)) { customCount2++; } } if (customCount1 != customCount2) return false; return true; } @Override public Diff<MetaData> diff(MetaData previousState) { return new MetaDataDiff(previousState, this); } @Override public Diff<MetaData> readDiffFrom(StreamInput in) throws IOException { return new MetaDataDiff(in); } @Override public MetaData fromXContent(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException { return Builder.fromXContent(parser); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { Builder.toXContent(this, builder, params); return builder; } private static class MetaDataDiff implements Diff<MetaData> { private long version; private String clusterUUID; private Settings transientSettings; private Settings persistentSettings; private Diff<ImmutableOpenMap<String, IndexMetaData>> indices; private Diff<ImmutableOpenMap<String, IndexTemplateMetaData>> templates; private Diff<ImmutableOpenMap<String, Custom>> customs; public MetaDataDiff(MetaData before, MetaData after) { clusterUUID = after.clusterUUID; version = after.version; transientSettings = after.transientSettings; persistentSettings = after.persistentSettings; indices = DiffableUtils.diff(before.indices, after.indices); templates = DiffableUtils.diff(before.templates, after.templates); customs = DiffableUtils.diff(before.customs, after.customs); } public MetaDataDiff(StreamInput in) throws IOException { clusterUUID = in.readString(); version = in.readLong(); transientSettings = Settings.readSettingsFromStream(in); persistentSettings = Settings.readSettingsFromStream(in); indices = DiffableUtils.readImmutableOpenMapDiff(in, IndexMetaData.PROTO); templates = DiffableUtils.readImmutableOpenMapDiff(in, IndexTemplateMetaData.PROTO); customs = DiffableUtils.readImmutableOpenMapDiff(in, new KeyedReader<Custom>() { @Override public Custom readFrom(StreamInput in, String key) throws IOException { return lookupPrototypeSafe(key).readFrom(in); } @Override public Diff<Custom> readDiffFrom(StreamInput in, String key) throws IOException { return lookupPrototypeSafe(key).readDiffFrom(in); } }); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(clusterUUID); out.writeLong(version); Settings.writeSettingsToStream(transientSettings, out); Settings.writeSettingsToStream(persistentSettings, out); indices.writeTo(out); templates.writeTo(out); customs.writeTo(out); } @Override public MetaData apply(MetaData part) { Builder builder = builder(); builder.clusterUUID(clusterUUID); builder.version(version); builder.transientSettings(transientSettings); builder.persistentSettings(persistentSettings); builder.indices(indices.apply(part.indices)); builder.templates(templates.apply(part.templates)); builder.customs(customs.apply(part.customs)); return builder.build(); } } @Override public MetaData readFrom(StreamInput in) throws IOException { Builder builder = new Builder(); builder.version = in.readLong(); builder.clusterUUID = in.readString(); builder.transientSettings(readSettingsFromStream(in)); builder.persistentSettings(readSettingsFromStream(in)); int size = in.readVInt(); for (int i = 0; i < size; i++) { builder.put(IndexMetaData.Builder.readFrom(in), false); } size = in.readVInt(); for (int i = 0; i < size; i++) { builder.put(IndexTemplateMetaData.Builder.readFrom(in)); } int customSize = in.readVInt(); for (int i = 0; i < customSize; i++) { String type = in.readString(); Custom customIndexMetaData = lookupPrototypeSafe(type).readFrom(in); builder.putCustom(type, customIndexMetaData); } return builder.build(); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeLong(version); out.writeString(clusterUUID); writeSettingsToStream(transientSettings, out); writeSettingsToStream(persistentSettings, out); out.writeVInt(indices.size()); for (IndexMetaData indexMetaData : this) { indexMetaData.writeTo(out); } out.writeVInt(templates.size()); for (ObjectCursor<IndexTemplateMetaData> cursor : templates.values()) { cursor.value.writeTo(out); } out.writeVInt(customs.size()); for (ObjectObjectCursor<String, Custom> cursor : customs) { out.writeString(cursor.key); cursor.value.writeTo(out); } } public static Builder builder() { return new Builder(); } public static Builder builder(MetaData metaData) { return new Builder(metaData); } /** All known byte-sized cluster settings. */ public static final Set<String> CLUSTER_BYTES_SIZE_SETTINGS = ImmutableSet.of( IndicesStore.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC, RecoverySettings.INDICES_RECOVERY_FILE_CHUNK_SIZE, RecoverySettings.INDICES_RECOVERY_TRANSLOG_SIZE, RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC, RecoverySettings.INDICES_RECOVERY_MAX_SIZE_PER_SEC); /** All known time cluster settings. */ public static final Set<String> CLUSTER_TIME_SETTINGS = ImmutableSet.of( IndicesTTLService.INDICES_TTL_INTERVAL, RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC, RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK, RecoverySettings.INDICES_RECOVERY_ACTIVITY_TIMEOUT, RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT, RecoverySettings.INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT, DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL, InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, InternalClusterInfoService.INTERNAL_CLUSTER_INFO_TIMEOUT, DiscoverySettings.PUBLISH_TIMEOUT, InternalClusterService.SETTING_CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD); /** As of 2.0 we require units for time and byte-sized settings. This methods adds default units to any cluster settings that don't * specify a unit. */ public static MetaData addDefaultUnitsIfNeeded(ESLogger logger, MetaData metaData) { Settings.Builder newPersistentSettings = null; for(Map.Entry<String,String> ent : metaData.persistentSettings().getAsMap().entrySet()) { String settingName = ent.getKey(); String settingValue = ent.getValue(); if (CLUSTER_BYTES_SIZE_SETTINGS.contains(settingName)) { try { Long.parseLong(settingValue); } catch (NumberFormatException nfe) { continue; } // It's a naked number that previously would be interpreted as default unit (bytes); now we add it: logger.warn("byte-sized cluster setting [{}] with value [{}] is missing units; assuming default units (b) but in future versions this will be a hard error", settingName, settingValue); if (newPersistentSettings == null) { newPersistentSettings = Settings.builder(); newPersistentSettings.put(metaData.persistentSettings()); } newPersistentSettings.put(settingName, settingValue + "b"); } if (CLUSTER_TIME_SETTINGS.contains(settingName)) { try { Long.parseLong(settingValue); } catch (NumberFormatException nfe) { continue; } // It's a naked number that previously would be interpreted as default unit (ms); now we add it: logger.warn("time cluster setting [{}] with value [{}] is missing units; assuming default units (ms) but in future versions this will be a hard error", settingName, settingValue); if (newPersistentSettings == null) { newPersistentSettings = Settings.builder(); newPersistentSettings.put(metaData.persistentSettings()); } newPersistentSettings.put(settingName, settingValue + "ms"); } } if (newPersistentSettings != null) { return new MetaData( metaData.clusterUUID(), metaData.version(), metaData.transientSettings(), newPersistentSettings.build(), metaData.getIndices(), metaData.getTemplates(), metaData.getCustoms(), metaData.concreteAllIndices(), metaData.concreteAllOpenIndices(), metaData.concreteAllClosedIndices(), metaData.getAliasAndIndexLookup()); } else { // No changes: return metaData; } } public static class Builder { private String clusterUUID; private long version; private Settings transientSettings = Settings.Builder.EMPTY_SETTINGS; private Settings persistentSettings = Settings.Builder.EMPTY_SETTINGS; private final ImmutableOpenMap.Builder<String, IndexMetaData> indices; private final ImmutableOpenMap.Builder<String, IndexTemplateMetaData> templates; private final ImmutableOpenMap.Builder<String, Custom> customs; public Builder() { clusterUUID = "_na_"; indices = ImmutableOpenMap.builder(); templates = ImmutableOpenMap.builder(); customs = ImmutableOpenMap.builder(); } public Builder(MetaData metaData) { this.clusterUUID = metaData.clusterUUID; this.transientSettings = metaData.transientSettings; this.persistentSettings = metaData.persistentSettings; this.version = metaData.version; this.indices = ImmutableOpenMap.builder(metaData.indices); this.templates = ImmutableOpenMap.builder(metaData.templates); this.customs = ImmutableOpenMap.builder(metaData.customs); } public Builder put(IndexMetaData.Builder indexMetaDataBuilder) { // we know its a new one, increment the version and store indexMetaDataBuilder.version(indexMetaDataBuilder.version() + 1); IndexMetaData indexMetaData = indexMetaDataBuilder.build(); indices.put(indexMetaData.index(), indexMetaData); return this; } public Builder put(IndexMetaData indexMetaData, boolean incrementVersion) { if (indices.get(indexMetaData.index()) == indexMetaData) { return this; } // if we put a new index metadata, increment its version if (incrementVersion) { indexMetaData = IndexMetaData.builder(indexMetaData).version(indexMetaData.version() + 1).build(); } indices.put(indexMetaData.index(), indexMetaData); return this; } public IndexMetaData get(String index) { return indices.get(index); } public Builder remove(String index) { indices.remove(index); return this; } public Builder removeAllIndices() { indices.clear(); return this; } public Builder indices(ImmutableOpenMap<String, IndexMetaData> indices) { this.indices.putAll(indices); return this; } public Builder put(IndexTemplateMetaData.Builder template) { return put(template.build()); } public Builder put(IndexTemplateMetaData template) { templates.put(template.name(), template); return this; } public Builder removeTemplate(String templateName) { templates.remove(templateName); return this; } public Builder templates(ImmutableOpenMap<String, IndexTemplateMetaData> templates) { this.templates.putAll(templates); return this; } public Custom getCustom(String type) { return customs.get(type); } public Builder putCustom(String type, Custom custom) { customs.put(type, custom); return this; } public Builder removeCustom(String type) { customs.remove(type); return this; } public Builder customs(ImmutableOpenMap<String, Custom> customs) { this.customs.putAll(customs); return this; } public Builder updateSettings(Settings settings, String... indices) { if (indices == null || indices.length == 0) { indices = this.indices.keys().toArray(String.class); } for (String index : indices) { IndexMetaData indexMetaData = this.indices.get(index); if (indexMetaData == null) { throw new IndexNotFoundException(index); } put(IndexMetaData.builder(indexMetaData) .settings(settingsBuilder().put(indexMetaData.settings()).put(settings))); } return this; } public Builder updateNumberOfReplicas(int numberOfReplicas, String... indices) { if (indices == null || indices.length == 0) { indices = this.indices.keys().toArray(String.class); } for (String index : indices) { IndexMetaData indexMetaData = this.indices.get(index); if (indexMetaData == null) { throw new IndexNotFoundException(index); } put(IndexMetaData.builder(indexMetaData).numberOfReplicas(numberOfReplicas)); } return this; } public Settings transientSettings() { return this.transientSettings; } public Builder transientSettings(Settings settings) { this.transientSettings = settings; return this; } public Settings persistentSettings() { return this.persistentSettings; } public Builder persistentSettings(Settings settings) { this.persistentSettings = settings; return this; } public Builder version(long version) { this.version = version; return this; } public Builder clusterUUID(String clusterUUID) { this.clusterUUID = clusterUUID; return this; } public Builder generateClusterUuidIfNeeded() { if (clusterUUID.equals("_na_")) { clusterUUID = Strings.randomBase64UUID(); } return this; } public MetaData build() { // TODO: We should move these datastructures to IndexNameExpressionResolver, this will give the following benefits: // 1) The datastructures will only be rebuilded when needed. Now during serailizing we rebuild these datastructures // while these datastructures aren't even used. // 2) The aliasAndIndexLookup can be updated instead of rebuilding it all the time. // build all concrete indices arrays: // TODO: I think we can remove these arrays. it isn't worth the effort, for operations on all indices. // When doing an operation across all indices, most of the time is spent on actually going to all shards and // do the required operations, the bottleneck isn't resolving expressions into concrete indices. List<String> allIndicesLst = new ArrayList<>(); for (ObjectCursor<IndexMetaData> cursor : indices.values()) { allIndicesLst.add(cursor.value.index()); } String[] allIndices = allIndicesLst.toArray(new String[allIndicesLst.size()]); List<String> allOpenIndicesLst = new ArrayList<>(); List<String> allClosedIndicesLst = new ArrayList<>(); for (ObjectCursor<IndexMetaData> cursor : indices.values()) { IndexMetaData indexMetaData = cursor.value; if (indexMetaData.state() == IndexMetaData.State.OPEN) { allOpenIndicesLst.add(indexMetaData.index()); } else if (indexMetaData.state() == IndexMetaData.State.CLOSE) { allClosedIndicesLst.add(indexMetaData.index()); } } String[] allOpenIndices = allOpenIndicesLst.toArray(new String[allOpenIndicesLst.size()]); String[] allClosedIndices = allClosedIndicesLst.toArray(new String[allClosedIndicesLst.size()]); // build all indices map SortedMap<String, AliasOrIndex> aliasAndIndexLookup = new TreeMap<>(); for (ObjectCursor<IndexMetaData> cursor : indices.values()) { IndexMetaData indexMetaData = cursor.value; aliasAndIndexLookup.put(indexMetaData.getIndex(), new AliasOrIndex.Index(indexMetaData)); for (ObjectObjectCursor<String, AliasMetaData> aliasCursor : indexMetaData.getAliases()) { AliasMetaData aliasMetaData = aliasCursor.value; AliasOrIndex.Alias aliasOrIndex = (AliasOrIndex.Alias) aliasAndIndexLookup.get(aliasMetaData.getAlias()); if (aliasOrIndex == null) { aliasOrIndex = new AliasOrIndex.Alias(aliasMetaData, indexMetaData); aliasAndIndexLookup.put(aliasMetaData.getAlias(), aliasOrIndex); } else { aliasOrIndex.addIndex(indexMetaData); } } } aliasAndIndexLookup = Collections.unmodifiableSortedMap(aliasAndIndexLookup); return new MetaData(clusterUUID, version, transientSettings, persistentSettings, indices.build(), templates.build(), customs.build(), allIndices, allOpenIndices, allClosedIndices, aliasAndIndexLookup); } public static String toXContent(MetaData metaData) throws IOException { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.startObject(); toXContent(metaData, builder, ToXContent.EMPTY_PARAMS); builder.endObject(); return builder.string(); } public static void toXContent(MetaData metaData, XContentBuilder builder, ToXContent.Params params) throws IOException { XContentContext context = XContentContext.valueOf(params.param(CONTEXT_MODE_PARAM, "API")); builder.startObject("meta-data"); builder.field("version", metaData.version()); builder.field("cluster_uuid", metaData.clusterUUID); if (!metaData.persistentSettings().getAsMap().isEmpty()) { builder.startObject("settings"); for (Map.Entry<String, String> entry : metaData.persistentSettings().getAsMap().entrySet()) { builder.field(entry.getKey(), entry.getValue()); } builder.endObject(); } if (context == XContentContext.API && !metaData.transientSettings().getAsMap().isEmpty()) { builder.startObject("transient_settings"); for (Map.Entry<String, String> entry : metaData.transientSettings().getAsMap().entrySet()) { builder.field(entry.getKey(), entry.getValue()); } builder.endObject(); } builder.startObject("templates"); for (ObjectCursor<IndexTemplateMetaData> cursor : metaData.templates().values()) { IndexTemplateMetaData.Builder.toXContent(cursor.value, builder, params); } builder.endObject(); if (context == XContentContext.API && !metaData.indices().isEmpty()) { builder.startObject("indices"); for (IndexMetaData indexMetaData : metaData) { IndexMetaData.Builder.toXContent(indexMetaData, builder, params); } builder.endObject(); } for (ObjectObjectCursor<String, Custom> cursor : metaData.customs()) { Custom proto = lookupPrototypeSafe(cursor.key); if (proto.context().contains(context)) { builder.startObject(cursor.key); cursor.value.toXContent(builder, params); builder.endObject(); } } builder.endObject(); } public static MetaData fromXContent(XContentParser parser) throws IOException { Builder builder = new Builder(); // we might get here after the meta-data element, or on a fresh parser XContentParser.Token token = parser.currentToken(); String currentFieldName = parser.currentName(); if (!"meta-data".equals(currentFieldName)) { token = parser.nextToken(); if (token == XContentParser.Token.START_OBJECT) { // move to the field name (meta-data) token = parser.nextToken(); // move to the next object token = parser.nextToken(); } currentFieldName = parser.currentName(); if (token == null) { // no data... return builder.build(); } } while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if ("settings".equals(currentFieldName)) { builder.persistentSettings(Settings.settingsBuilder().put(SettingsLoader.Helper.loadNestedFromMap(parser.mapOrdered())).build()); } else if ("indices".equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { builder.put(IndexMetaData.Builder.fromXContent(parser), false); } } else if ("templates".equals(currentFieldName)) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { builder.put(IndexTemplateMetaData.Builder.fromXContent(parser, parser.currentName())); } } else { // check if its a custom index metadata Custom proto = lookupPrototype(currentFieldName); if (proto == null) { //TODO warn parser.skipChildren(); } else { Custom custom = proto.fromXContent(parser); builder.putCustom(custom.type(), custom); } } } else if (token.isValue()) { if ("version".equals(currentFieldName)) { builder.version = parser.longValue(); } else if ("cluster_uuid".equals(currentFieldName) || "uuid".equals(currentFieldName)) { builder.clusterUUID = parser.text(); } } } return builder.build(); } public static MetaData readFrom(StreamInput in) throws IOException { return PROTO.readFrom(in); } } }
/** * Copyright 2016 LinkedIn Corp. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */ package com.github.ambry.utils; import java.io.BufferedReader; import java.io.DataInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.RandomAccessFile; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Properties; import java.util.Random; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A set of utility methods */ public class Utils { /** * Constant to define "infinite" time. * <p/> * Currently used in lieu of either an epoch based ms expiration time or a seconds based TTL (relative to creation * time). */ public static final long Infinite_Time = -1; private static final Logger logger = LoggerFactory.getLogger(Utils.class); // The read*String methods assume that the underlying stream is blocking /** * Reads a String whose length is a short from the given input stream * @param input The input stream from which to read the String from * @return The String read from the stream * @throws IOException */ public static String readShortString(DataInputStream input) throws IOException { Short size = input.readShort(); if (size < 0) { throw new IllegalArgumentException("readShortString : the size cannot be negative"); } byte[] bytes = new byte[size]; int read = 0; while (read < size) { int readBytes = input.read(bytes, read, size - read); if (readBytes == -1 || readBytes == 0) { break; } read += readBytes; } if (read != size) { throw new IllegalArgumentException("readShortString : the size of the input does not match the actual data size"); } return new String(bytes, "UTF-8"); } /** * Gets the size of the string in serialized form * @param value the string of interest to be serialized * @return the size of the string in serialized form */ public static int getIntStringLength(String value) { return value == null ? Integer.BYTES : Integer.BYTES + value.length(); } /** * Reads a String whose length is an int from the given input stream * @param input The input stream from which to read the String from * @return The String read from the stream * @throws IOException */ public static String readIntString(DataInputStream input) throws IOException { return readIntString(input, StandardCharsets.UTF_8); } /** * Reads a String whose length is an int from the given input stream * @param input The input stream from which to read the String from * @param charset the charset to use. * @return The String read from the stream * @throws IOException */ public static String readIntString(DataInputStream input, Charset charset) throws IOException { int size = input.readInt(); if (size < 0) { throw new IllegalArgumentException("readIntString : the size cannot be negative"); } byte[] bytes = new byte[size]; int read = 0; while (read < size) { int readBytes = input.read(bytes, read, size - read); if (readBytes == -1 || readBytes == 0) { break; } read += readBytes; } if (read != size) { throw new IllegalArgumentException("readIntString : the size of the input does not match the actual data size"); } return new String(bytes, charset); } /** * * @param input * @return * @throws IOException */ public static ByteBuffer readIntBuffer(DataInputStream input) throws IOException { int size = input.readInt(); if (size < 0) { throw new IllegalArgumentException("readIntBuffer : the size cannot be negative"); } ByteBuffer buffer = ByteBuffer.allocate(size); int read = 0; while (read < size) { int readBytes = input.read(buffer.array()); if (readBytes == -1 || readBytes == 0) { break; } read += readBytes; } if (read != size) { throw new IllegalArgumentException("readIntBuffer : the size of the input does not match the actual data size"); } return buffer; } /** * * @param input * @return * @throws IOException */ public static ByteBuffer readShortBuffer(DataInputStream input) throws IOException { short size = input.readShort(); if (size < 0) { throw new IllegalArgumentException("readShortBuffer : the size cannot be negative"); } ByteBuffer buffer = ByteBuffer.allocate(size); int read = 0; while (read < size) { int readBytes = input.read(buffer.array()); if (readBytes == -1 || readBytes == 0) { break; } read += readBytes; } if (read != size) { throw new IllegalArgumentException("readShortBuffer the size of the input does not match the actual data size"); } return buffer; } /** * Create a new thread * * @param runnable The work for the thread to do * @param daemon Should the thread block JVM shutdown? * @return The unstarted thread */ public static Thread newThread(Runnable runnable, boolean daemon) { Thread thread = new Thread(runnable); thread.setDaemon(daemon); thread.setUncaughtExceptionHandler((t, e) -> { logger.error("Encountered throwable in {}", t, e); }); return thread; } /** * Create a new thread * * @param name The name of the thread * @param runnable The work for the thread to do * @param daemon Should the thread block JVM shutdown? * @return The unstarted thread */ public static Thread newThread(String name, Runnable runnable, boolean daemon) { Thread thread = new Thread(runnable, name); thread.setDaemon(daemon); thread.setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() { public void uncaughtException(Thread t, Throwable e) { e.printStackTrace(); } }); return thread; } /** * Create a daemon thread * * @param runnable The runnable to execute in the background * @return The unstarted thread */ public static Thread daemonThread(Runnable runnable) { return newThread(runnable, true); } /** * Create a daemon thread * * @param name The name of the thread * @param runnable The runnable to execute in the background * @return The unstarted thread */ public static Thread daemonThread(String name, Runnable runnable) { return newThread(name, runnable, true); } /** * Create a {@link ScheduledExecutorService} with the given properties. * @param numThreads The number of threads in the scheduler's thread pool. * @param threadNamePrefix The prefix string for thread names in this thread pool. * @param isDaemon {@code true} if the threads in this scheduler's should be daemon threads. * @return A {@link ScheduledExecutorService}. */ public static ScheduledExecutorService newScheduler(int numThreads, String threadNamePrefix, boolean isDaemon) { ScheduledThreadPoolExecutor scheduler = new ScheduledThreadPoolExecutor(numThreads, new SchedulerThreadFactory(threadNamePrefix, isDaemon)); scheduler.setContinueExistingPeriodicTasksAfterShutdownPolicy(false); scheduler.setExecuteExistingDelayedTasksAfterShutdownPolicy(false); return scheduler; } /** * Create a {@link ScheduledExecutorService} with the given properties. * @param numThreads The number of threads in the scheduler's thread pool. * @param isDaemon {@code true} if the threads in this scheduler's should be daemon threads. * @return A {@link ScheduledExecutorService}. */ public static ScheduledExecutorService newScheduler(int numThreads, boolean isDaemon) { return newScheduler(numThreads, "ambry-scheduler-", isDaemon); } /** * Open a channel for the given file * @param file * @param mutable * @return * @throws FileNotFoundException */ public static FileChannel openChannel(File file, boolean mutable) throws FileNotFoundException { if (mutable) { return new RandomAccessFile(file, "rw").getChannel(); } else { return new FileInputStream(file).getChannel(); } } /** * Instantiate a class instance from a given className. * @param className * @param <T> * @return * @throws ClassNotFoundException * @throws InstantiationException * @throws IllegalAccessException */ public static <T> T getObj(String className) throws ClassNotFoundException, InstantiationException, IllegalAccessException { return (T) Class.forName(className).newInstance(); } /** * Instantiate a class instance from a given className with an arg * @param className * @param arg * @param <T> * @return * @throws ClassNotFoundException * @throws InstantiationException * @throws IllegalAccessException * @throws NoSuchMethodException * @throws InvocationTargetException */ public static <T> T getObj(String className, Object arg) throws ClassNotFoundException, InstantiationException, IllegalAccessException, NoSuchMethodException, InvocationTargetException { for (Constructor<?> ctor : Class.forName(className).getDeclaredConstructors()) { if (ctor.getParameterTypes().length == 1 && checkAssignable(ctor.getParameterTypes()[0], arg)) { return (T) ctor.newInstance(arg); } } return null; } /** * Instantiate a class instance from a given className with two args * @param className * @param arg1 * @param arg2 * @param <T> * @return * @throws ClassNotFoundException * @throws InstantiationException * @throws IllegalAccessException * @throws NoSuchMethodException * @throws InvocationTargetException */ public static <T> T getObj(String className, Object arg1, Object arg2) throws ClassNotFoundException, InstantiationException, IllegalAccessException, NoSuchMethodException, InvocationTargetException { for (Constructor<?> ctor : Class.forName(className).getDeclaredConstructors()) { if (ctor.getParameterTypes().length == 2 && checkAssignable(ctor.getParameterTypes()[0], arg1) && checkAssignable( ctor.getParameterTypes()[1], arg2)) { return (T) ctor.newInstance(arg1, arg2); } } return null; } /** * Instantiate a class instance from a given className with three args * @param className * @param arg1 * @param arg2 * @param arg3 * @param <T> * @return * @throws ClassNotFoundException * @throws InstantiationException * @throws IllegalAccessException * @throws NoSuchMethodException * @throws InvocationTargetException */ public static <T> T getObj(String className, Object arg1, Object arg2, Object arg3) throws ClassNotFoundException, InstantiationException, IllegalAccessException, NoSuchMethodException, InvocationTargetException { for (Constructor<?> ctor : Class.forName(className).getDeclaredConstructors()) { if (ctor.getParameterTypes().length == 3 && checkAssignable(ctor.getParameterTypes()[0], arg1) && checkAssignable( ctor.getParameterTypes()[1], arg2) && checkAssignable(ctor.getParameterTypes()[2], arg3)) { return (T) ctor.newInstance(arg1, arg2, arg3); } } return null; } /** * Instantiate a class instance from a given className with variable number of args * @param className * @param objects * @param <T> * @return * @throws ClassNotFoundException * @throws InstantiationException * @throws IllegalAccessException * @throws NoSuchMethodException * @throws InvocationTargetException */ public static <T> T getObj(String className, Object... objects) throws ClassNotFoundException, InstantiationException, IllegalAccessException, NoSuchMethodException, InvocationTargetException { for (Constructor<?> ctor : Class.forName(className).getDeclaredConstructors()) { if (ctor.getParameterTypes().length == objects.length) { int i = 0; for (; i < objects.length; i++) { if (!checkAssignable(ctor.getParameterTypes()[i], objects[i])) { break; } } if (i == objects.length) { return (T) ctor.newInstance(objects); } } } return null; } /** * Check if the given constructor parameter type is assignable from the provided argument object. * @param parameterType the {@link Class} of the constructor parameter. * @param arg the argument to test. * @return {@code true} if it is assignable. Note: this will return true if {@code arg} is {@code null}. */ private static boolean checkAssignable(Class<?> parameterType, Object arg) { return arg == null || parameterType.isAssignableFrom(arg.getClass()); } /** * Compute the hash code for the given items * @param items * @return */ public static int hashcode(Object[] items) { if (items == null) { return 0; } int h = 1; int i = 0; while (i < items.length) { if (items[i] != null) { h = 31 * h + items[i].hashCode(); i += 1; } } return h; } /** * Compute the CRC32 of the byte array * * @param bytes The array to compute the checksum for * @return The CRC32 */ public static long crc32(byte[] bytes) { return crc32(bytes, 0, bytes.length); } /** * Compute the CRC32 of the segment of the byte array given by the specificed size and offset * * @param bytes The bytes to checksum * @param offset the offset at which to begin checksumming * @param size the number of bytes to checksum * @return The CRC32 */ public static long crc32(byte[] bytes, int offset, int size) { Crc32 crc = new Crc32(); crc.update(bytes, offset, size); return crc.getValue(); } /** * Read a properties file from the given path * * @param filename The path of the file to read */ public static Properties loadProps(String filename) throws FileNotFoundException, IOException { InputStream propStream = new FileInputStream(filename); Properties props = new Properties(); props.load(propStream); return props; } /** * Serializes a nullable string into byte buffer * * @param outputBuffer The output buffer to serialize the value to * @param value The value to serialize */ public static void serializeNullableString(ByteBuffer outputBuffer, String value) { if (value == null) { outputBuffer.putInt(0); } else { outputBuffer.putInt(value.length()); outputBuffer.put(value.getBytes()); } } /** * Serializes a string into byte buffer * @param outputBuffer The output buffer to serialize the value to * @param value The value to serialize * @param charset {@link Charset} to be used to encode */ public static void serializeString(ByteBuffer outputBuffer, String value, Charset charset) { outputBuffer.putInt(value.length()); outputBuffer.put(value.getBytes(charset)); } /** * Deserializes a string from byte buffer * @param inputBuffer The input buffer to deserialize the value from * @param charset {@link Charset} to be used to decode * @return the deserialized string */ public static String deserializeString(ByteBuffer inputBuffer, Charset charset) { int size = inputBuffer.getInt(); byte[] value = new byte[size]; inputBuffer.get(value); return new String(value, charset); } /** * Returns the length of a nullable string * * @param value The string whose length is needed * @return The length of the string. 0 if null. */ public static int getNullableStringLength(String value) { return value == null ? 0 : value.length(); } /** * Writes specified string to specified file path. * * @param string to write * @param path file path * @throws IOException */ public static void writeStringToFile(String string, String path) throws IOException { FileWriter fileWriter = null; try { File file = new File(path); fileWriter = new FileWriter(file); fileWriter.write(string); } finally { if (fileWriter != null) { fileWriter.close(); } } } /** * Pretty prints specified jsonObject to specified file path. * * @param jsonObject to pretty print * @param path file path * @throws IOException * @throws JSONException */ public static void writeJsonObjectToFile(JSONObject jsonObject, String path) throws IOException, JSONException { writeStringToFile(jsonObject.toString(2), path); } /** * Pretty prints specified {@link JSONArray} to specified file path. * * @param jsonArray to pretty print * @param path file path * @throws IOException * @throws JSONException */ public static void writeJsonArrayToFile(JSONArray jsonArray, String path) throws IOException, JSONException { writeStringToFile(jsonArray.toString(2), path); } /** * Reads entire contents of specified file as a string. * * @param path file path to read * @return string read from specified file * @throws IOException */ public static String readStringFromFile(String path) throws IOException { File file = new File(path); byte[] encoded = new byte[(int) file.length()]; DataInputStream ds = null; try { ds = new DataInputStream(new FileInputStream(file)); ds.readFully(encoded); } finally { if (ds != null) { ds.close(); } } return Charset.defaultCharset().decode(ByteBuffer.wrap(encoded)).toString(); } /** * Reads JSON object (in string format) from specified file. * * @param path file path to read * @return JSONObject read from specified file * @throws IOException * @throws JSONException */ public static JSONObject readJsonFromFile(String path) throws IOException, JSONException { return new JSONObject(readStringFromFile(path)); } /** * Ensures that a given File is present. The file is pre-allocated with a given capacity using fallocate on linux * @param file file path to create and allocate * @param capacityBytes the number of bytes to pre-allocate * @throws IOException */ public static void preAllocateFileIfNeeded(File file, long capacityBytes) throws IOException { if (!file.exists()) { file.createNewFile(); } if (System.getProperty("os.name").toLowerCase().startsWith("linux")) { Runtime runtime = Runtime.getRuntime(); Process process = runtime.exec("fallocate --keep-size -l " + capacityBytes + " " + file.getAbsolutePath()); try { process.waitFor(); } catch (InterruptedException e) { // ignore the interruption and check the exit value to be sure } if (process.exitValue() != 0) { throw new IOException( "error while trying to preallocate file " + file.getAbsolutePath() + " exitvalue " + process.exitValue() + " error string " + new BufferedReader(new InputStreamReader(process.getErrorStream())).lines() .collect(Collectors.joining("/n"))); } } } /** * Get a pseudo-random long uniformly between 0 and n-1. Stolen from {@link java.util.Random#nextInt()}. * * @param random random object used to generate the random number so that we generate * uniforml random between 0 and n-1 * @param n the bound * @return a value select randomly from the range {@code [0..n)}. */ public static long getRandomLong(Random random, long n) { if (n <= 0) { throw new IllegalArgumentException("Cannot generate random long in range [0,n) for n<=0."); } final int BITS_PER_LONG = 63; long bits, val; do { bits = random.nextLong() & (~(1L << BITS_PER_LONG)); val = bits % n; } while (bits - val + (n - 1) < 0L); return val; } /** * Returns a random short using the {@code Random} passed as arg * @param random the {@link Random} object that needs to be used to generate the random short * @return a random short */ public static short getRandomShort(Random random) { return (short) random.nextInt(Short.MAX_VALUE + 1); } /** * Adds some number of seconds to an epoch time in ms. * * @param epochTimeInMs Epoch time in ms to which {@code deltaTimeInSeconds} needs to be added * @param deltaTimeInSeconds delta time in seconds which needs to be added to {@code epochTimeInMs} * @return epoch time in milliseconds after adding {@code deltaTimeInSeconds} to {@code epochTimeInMs} or * {@link Utils#Infinite_Time} if either of them is {@link Utils#Infinite_Time} */ public static long addSecondsToEpochTime(long epochTimeInMs, long deltaTimeInSeconds) { if (deltaTimeInSeconds == Infinite_Time || epochTimeInMs == Infinite_Time) { return Infinite_Time; } return epochTimeInMs + (TimeUnit.SECONDS.toMillis(deltaTimeInSeconds)); } /** * Read "size" length of bytes from stream to a byte array. If "size" length of bytes can't be read because the end of * the stream has been reached, IOException is thrown. This method blocks until input data is available, the end of * the stream is detected, or an exception is thrown. * @param stream from which data to be read from * @param size max length of bytes to be read from the stream. * @return byte[] which has the data that is read from the stream * @throws IOException */ public static byte[] readBytesFromStream(InputStream stream, int size) throws IOException { return readBytesFromStream(stream, new byte[size], 0, size); } /** * Read "size" length of bytes from stream to a byte array starting at the given offset in the byte[]. If "size" * length of bytes can't be read because the end of the stream has been reached, IOException is thrown. This method * blocks until input data is available, the end of the stream is detected, or an exception is thrown. * @param stream from which data to be read from * @param data byte[] into which the data has to be written * @param offset starting offset in the byte[] at which the data has to be written to * @param size length of bytes to be read from the stream * @return byte[] which has the data that is read from the stream. Same as @param data * @throws IOException */ public static byte[] readBytesFromStream(InputStream stream, byte[] data, int offset, int size) throws IOException { int read = 0; while (read < size) { int sizeRead = stream.read(data, offset, size - read); if (sizeRead == 0 || sizeRead == -1) { throw new IOException("Total size read " + read + " is less than the size to be read " + size); } read += sizeRead; offset += sizeRead; } return data; } /** * Split the input string "data" using the delimiter and return as list of strings for the slices obtained * @param data * @param delimiter * @return */ public static ArrayList<String> splitString(String data, String delimiter) { if (data == null) { throw new IllegalArgumentException("Passed in string is null "); } ArrayList<String> toReturn = new ArrayList<String>(); String[] slices = data.split(delimiter); toReturn.addAll(Arrays.asList(slices)); return toReturn; } /** * Merge/Concatenate the input list of strings using the delimiter and return the new string * @param data List of strings to be merged/concatenated * @param delimiter using which the list of strings need to be merged/concatenated * @return the obtained string after merging/concatenating */ public static String concatenateString(ArrayList<String> data, String delimiter) { if (data == null) { throw new IllegalArgumentException("Passed in List is null "); } StringBuilder sb = new StringBuilder(); if (data.size() > 1) { for (int i = 0; i < data.size() - 1; i++) { sb.append(data.get(i)).append(delimiter); } sb.append(data.get(data.size() - 1)); } return sb.toString(); } /** * Make sure that the ByteBuffer capacity is equal to or greater than the expected length. * If not, create a new ByteBuffer of expected length and copy contents from previous ByteBuffer to the new one * @param existingBuffer ByteBuffer capacity to check * @param newLength new length for the ByteBuffer. * returns ByteBuffer with a minimum capacity of new length */ public static ByteBuffer ensureCapacity(ByteBuffer existingBuffer, int newLength) { if (newLength > existingBuffer.capacity()) { ByteBuffer newBuffer = ByteBuffer.allocate(newLength); existingBuffer.flip(); newBuffer.put(existingBuffer); return newBuffer; } return existingBuffer; } /** * Gets the root cause for {@code t}. * @param t the {@link Throwable} whose root cause is required. * @return the root cause for {@code t}. */ public static Throwable getRootCause(Throwable t) { Throwable throwable = t; while (throwable != null && throwable.getCause() != null) { throwable = throwable.getCause(); } return throwable; } /** * Convert ms to nearest second(floor) and back to ms to get the approx value in ms if not for * {@link Utils#Infinite_Time}. * @param timeInMs the time in ms that needs to be converted * @return the time in ms to the nearest second(floored) for the given time in ms */ public static long getTimeInMsToTheNearestSec(long timeInMs) { long timeInSecs = timeInMs / Time.MsPerSec; return timeInMs != Utils.Infinite_Time ? (timeInSecs * Time.MsPerSec) : Utils.Infinite_Time; } /** * A thread factory to use for {@link ScheduledExecutorService}s instantiated using * {@link #newScheduler(int, String, boolean)}. */ private static class SchedulerThreadFactory implements ThreadFactory { private final AtomicInteger schedulerThreadId = new AtomicInteger(0); private final String threadNamePrefix; private final boolean isDaemon; /** * Create a {@link SchedulerThreadFactory} * @param threadNamePrefix the prefix string for threads in this scheduler's thread pool. * @param isDaemon {@code true} if the created threads should be daemon threads. */ SchedulerThreadFactory(String threadNamePrefix, boolean isDaemon) { this.threadNamePrefix = threadNamePrefix; this.isDaemon = isDaemon; } @Override public Thread newThread(Runnable r) { return Utils.newThread(threadNamePrefix + schedulerThreadId.getAndIncrement(), r, isDaemon); } } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.idea.maven.embedder; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.JarFileSystem; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import org.apache.maven.settings.MavenSettingsBuilder; import org.apache.maven.settings.RuntimeInfo; import org.apache.maven.settings.Settings; import org.codehaus.classworlds.ClassWorld; import org.codehaus.plexus.DefaultPlexusContainer; import org.codehaus.plexus.PlexusContainer; import org.codehaus.plexus.PlexusContainerException; import org.codehaus.plexus.component.repository.exception.ComponentLookupException; import org.codehaus.plexus.util.xml.pull.XmlPullParserException; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.idea.maven.project.MavenGeneralSettings; import org.jetbrains.idea.maven.utils.JDOMReader; import org.jetbrains.idea.maven.utils.MavenConstants; import org.jetbrains.idea.maven.utils.MavenLog; import org.jetbrains.idea.maven.utils.MavenUtil; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.net.URL; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.regex.Pattern; public class MavenEmbedderFactory { @NonNls private static final String PROP_MAVEN_HOME = "maven.home"; @NonNls private static final String PROP_USER_HOME = "user.home"; @NonNls private static final String ENV_M2_HOME = "M2_HOME"; @NonNls private static final String M2_DIR = "m2"; @NonNls private static final String BIN_DIR = "bin"; @NonNls private static final String DOT_M2_DIR = ".m2"; @NonNls private static final String CONF_DIR = "conf"; @NonNls private static final String LIB_DIR = "lib"; @NonNls private static final String M2_CONF_FILE = "m2.conf"; @NonNls private static final String REPOSITORY_DIR = "repository"; @NonNls private static final String LOCAL_REPOSITORY_TAG = "localRepository"; @NonNls private static final String[] basicPhases = {"clean", "validate", "compile", "test", "package", "install", "deploy", "site"}; @NonNls private static final String[] phases = {"clean", "validate", "generate-sources", "process-sources", "generate-resources", "process-resources", "compile", "process-classes", "generate-test-sources", "process-test-sources", "generate-test-resources", "process-test-resources", "test-compile", "test", "package", "pre-integration-test", "integration-test", "post-integration-test", "verify", "install", "site", "deploy"}; private static volatile Properties mySystemPropertiesCache; private static final String SUPER_POM_PATH = "org/apache/maven/project/" + MavenConstants.SUPER_POM_XML; @Nullable public static File resolveMavenHomeDirectory(@Nullable String overrideMavenHome) { if (!StringUtil.isEmptyOrSpaces(overrideMavenHome)) { return new File(overrideMavenHome); } final String m2home = System.getenv(ENV_M2_HOME); if (!StringUtil.isEmptyOrSpaces(m2home)) { final File homeFromEnv = new File(m2home); if (isValidMavenHome(homeFromEnv)) { return homeFromEnv; } } String userHome = System.getProperty(PROP_USER_HOME); if (!StringUtil.isEmptyOrSpaces(userHome)) { final File underUserHome = new File(userHome, M2_DIR); if (isValidMavenHome(underUserHome)) { return underUserHome; } } return null; } public static boolean isValidMavenHome(File home) { return getMavenConfFile(home).exists(); } public static File getMavenConfFile(File mavenHome) { return new File(new File(mavenHome, BIN_DIR), M2_CONF_FILE); } @Nullable public static File resolveGlobalSettingsFile(@Nullable String overrideMavenHome) { File directory = resolveMavenHomeDirectory(overrideMavenHome); if (directory == null) return null; return new File(new File(directory, CONF_DIR), MavenConstants.SETTINGS_XML); } @NotNull public static VirtualFile resolveSuperPomFile(@Nullable String overrideMavenHome) { VirtualFile result = doResolveSuperPomFile(overrideMavenHome); if (result == null) { URL resource = MavenEmbedderFactory.class.getResource("/" + SUPER_POM_PATH); return VfsUtil.findFileByURL(resource); } return result; } @Nullable private static VirtualFile doResolveSuperPomFile(String overrideMavenHome) { File lib = resolveMavenLib(overrideMavenHome); if (lib == null) return null; VirtualFile file = LocalFileSystem.getInstance().findFileByIoFile(lib); if (file == null) return null; VirtualFile root = JarFileSystem.getInstance().getJarRootForLocalFile(file); if (root == null) return null; return root.findFileByRelativePath(SUPER_POM_PATH); } private static File resolveMavenLib(String overrideMavenHome) { File directory = resolveMavenHomeDirectory(overrideMavenHome); if (directory == null) return null; File libs = new File(directory, LIB_DIR); File[] files = libs.listFiles(); if (files != null) { Pattern pattern = Pattern.compile("maven-\\d+\\.\\d+\\.\\d+-uber\\.jar"); for (File each : files) { if (pattern.matcher(each.getName()).matches()) { return each; } } } return null; } @Nullable public static File resolveUserSettingsFile(@Nullable String overrideSettingsFile) { if (!StringUtil.isEmptyOrSpaces(overrideSettingsFile)) return new File(overrideSettingsFile); String userHome = System.getProperty(PROP_USER_HOME); if (StringUtil.isEmptyOrSpaces(userHome)) return null; return new File(new File(userHome, DOT_M2_DIR), MavenConstants.SETTINGS_XML); } @Nullable public static File resolveLocalRepository(@Nullable String mavenHome, @Nullable String userSettings, @Nullable String override) { if (!StringUtil.isEmpty(override)) { return new File(override); } final File userSettingsFile = resolveUserSettingsFile(userSettings); if (userSettingsFile != null) { final String fromUserSettings = getRepositoryFromSettings(userSettingsFile); if (!StringUtil.isEmpty(fromUserSettings)) { return new File(fromUserSettings); } } final File globalSettingsFile = resolveGlobalSettingsFile(mavenHome); if (globalSettingsFile != null) { final String fromGlobalSettings = getRepositoryFromSettings(globalSettingsFile); if (!StringUtil.isEmpty(fromGlobalSettings)) { return new File(fromGlobalSettings); } } return new File(new File(System.getProperty(PROP_USER_HOME), DOT_M2_DIR), REPOSITORY_DIR); } private static String getRepositoryFromSettings(File file) { try { FileInputStream is = new FileInputStream(file); try { JDOMReader reader = new JDOMReader(is); return reader.getChildText(reader.getRootElement(), LOCAL_REPOSITORY_TAG); } finally { is.close(); } } catch (IOException ignore) { return null; } } public static List<String> getBasicPhasesList() { return Arrays.asList(basicPhases); } public static List<String> getPhasesList() { return Arrays.asList(phases); } public static MavenEmbedderWrapper createEmbedder(MavenGeneralSettings generalSettings) { DefaultPlexusContainer container = new DefaultPlexusContainer(); container.setClassWorld(new ClassWorld("plexus.core", generalSettings.getClass().getClassLoader())); CustomLoggerManager loggerManager = new CustomLoggerManager(generalSettings.getLoggingLevel()); container.setLoggerManager(loggerManager); try { container.initialize(); container.start(); } catch (PlexusContainerException e) { MavenLog.LOG.error(e); throw new RuntimeException(e); } File mavenHome = generalSettings.getEffectiveMavenHome(); if (mavenHome != null) { System.setProperty(PROP_MAVEN_HOME, mavenHome.getPath()); } Settings settings = buildSettings(container, generalSettings); return new MavenEmbedderWrapper(container, settings, loggerManager.getLogger(), generalSettings); } private static Settings buildSettings(PlexusContainer container, MavenGeneralSettings generalSettings) { File file = generalSettings.getEffectiveGlobalSettingsIoFile(); if (file != null) { System.setProperty(MavenSettingsBuilder.ALT_GLOBAL_SETTINGS_XML_LOCATION, file.getPath()); } Settings settings = null; try { MavenSettingsBuilder builder = (MavenSettingsBuilder)container.lookup(MavenSettingsBuilder.ROLE); File userSettingsFile = generalSettings.getEffectiveUserSettingsIoFile(); if (userSettingsFile != null && userSettingsFile.exists() && !userSettingsFile.isDirectory()) { settings = builder.buildSettings(userSettingsFile, false); } if (settings == null) { settings = builder.buildSettings(); } } catch (ComponentLookupException e) { MavenLog.LOG.error(e); } catch (IOException e) { MavenLog.LOG.warn(e); } catch (XmlPullParserException e) { MavenLog.LOG.warn(e); } if (settings == null) { settings = new Settings(); } File localRepository = generalSettings.getEffectiveLocalRepository(); if (localRepository != null) { settings.setLocalRepository(localRepository.getPath()); } settings.setOffline(generalSettings.isWorkOffline()); settings.setInteractiveMode(false); settings.setUsePluginRegistry(generalSettings.isUsePluginRegistry()); RuntimeInfo runtimeInfo = new RuntimeInfo(settings); runtimeInfo.setPluginUpdateOverride(generalSettings.getPluginUpdatePolicy() == MavenExecutionOptions.PluginUpdatePolicy.UPDATE); settings.setRuntimeInfo(runtimeInfo); return settings; } public static Properties collectSystemProperties() { if (mySystemPropertiesCache == null) { Properties result = new Properties(); result.putAll(MavenUtil.getSystemProperties()); Properties envVars = MavenUtil.getEnvProperties(); for (Map.Entry<Object, Object> each : envVars.entrySet()) { result.setProperty("env." + each.getKey().toString(), each.getValue().toString()); } mySystemPropertiesCache = result; } return mySystemPropertiesCache; } public static void resetSystemPropertiesCacheInTests() { mySystemPropertiesCache = null; } }
package cz.habarta.typescript.generator.maven; import cz.habarta.typescript.generator.*; import java.io.*; import java.net.*; import java.util.*; import org.apache.maven.artifact.*; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugins.annotations.*; import org.apache.maven.project.MavenProject; /** * Generates TypeScript declaration file from specified java classes. * For more information see README and Wiki on GitHub. */ @Mojo(name = "generate", defaultPhase = LifecyclePhase.PROCESS_CLASSES, requiresDependencyResolution = ResolutionScope.COMPILE) public class GenerateMojo extends AbstractMojo { /** * Path and name of generated TypeScript file. * Required parameter. */ @Parameter(required = true) private File outputFile; /** * Output file format, can be 'declarationFile' (.d.ts) or 'implementationFile' (.ts). * Setting this parameter to 'implementationFile' allows extensions to generate runnable TypeScript code. * Default value is 'declarationFile'. */ @Parameter private TypeScriptFileType outputFileType; /** * Kind of generated TypeScript output, allowed values are 'global', 'module' or 'ambientModule'. * Value 'global' means that declarations will be in global scope or namespace (no module). * Value 'module' means that generated file will contain top-level 'export' declarations. * Value 'ambientModule' means that generated declarations will be wrapped in 'declare module "mod" { }' declaration. * Required parameter. * For more information see Wiki page 'http://vojtechhabarta.github.io/typescript-generator/doc/ModulesAndNamespaces.html'. */ @Parameter(required = true) private TypeScriptOutputKind outputKind; /** * Name of generated ambient module. * Used when 'outputKind' is set to 'ambientModule'. */ @Parameter private String module; /** * Generates specified namespace. Not recommended to combine with modules. Default is no namespace. */ @Parameter private String namespace; /** * JSON classes to process. */ @Parameter private List<String> classes; /** * JSON classes to process specified using glog pattern * so it is possible to specify package or class name suffix. * Glob patterns support two wildcards: * Single "*" wildcard matches any character except for "." and "$". * Double "**" wildcard matches any character. * For more information and examples see Wiki page 'https://github.com/vojtechhabarta/typescript-generator/wiki/Class-Names-Glob-Patterns'. */ @Parameter private List<String> classPatterns; /** * Scans specified JAX-RS {@link javax.ws.rs.core.Application} for JSON classes to process. * Parameter contains fully-qualified class name. * It is possible to exclude particular REST resource classes using {@link #excludeClasses} parameter. */ @Parameter private String classesFromJaxrsApplication; /** * Scans JAX-RS resources for JSON classes to process. * It is possible to exclude particular REST resource classes using {@link #excludeClasses} parameter. */ @Parameter private boolean classesFromAutomaticJaxrsApplication; /** * List of classes excluded from processing. */ @Parameter private List<String> excludeClasses; /** * If this list is not empty then TypeScript will only be generated for * methods with one of the annotations defined in this list */ @Parameter private List<String> includePropertyAnnotations; /** * Library used in JSON classes. * Supported values are 'jackson1', 'jackson2'. * Required parameter, recommended value is 'jackson2'. */ @Parameter(required = true) private JsonLibrary jsonLibrary; /** * If true declared properties will be optional. */ @Parameter private boolean declarePropertiesAsOptional; /** * Prefix which will be removed from names of classes, interfaces, enums. * For example if set to "Json" then mapping for "JsonData" will be "Data". */ @Parameter private String removeTypeNamePrefix; /** * Suffix which will be removed from names of classes, interfaces, enums. * For example if set to "JSON" then mapping for "DataJSON" will be "Data". */ @Parameter private String removeTypeNameSuffix; /** * Prefix which will be added to names of classes, interfaces, enums. * For example if set to "I" then mapping for "Data" will be "IData". */ @Parameter private String addTypeNamePrefix; /** * Suffix which will be added to names of classes, interfaces, enums. * For example if set to "Data" then mapping for "Person" will be "PersonData". */ @Parameter private String addTypeNameSuffix; /** * Specifies custom TypeScript name for Java classes. * Multiple mappings can be specified, each using this format: "javaClassName:typescriptName". * This takes precedence over other naming settings. */ @Parameter private List<String> customTypeNaming; /** * List of files which will be referenced using triple-slash directive: /// &lt;reference path="file" />. * This can be used with "customTypeMappings" to provide needed TypeScript types. */ @Parameter private List<String> referencedFiles; /** * List of import declarations which will be added to generated output. * This can be used with "customTypeMappings" to provide needed TypeScript types. */ @Parameter private List<String> importDeclarations; /** * List of custom mappings. * Each item specifies TypeScript type which will be used for particular Java class. * Item format is: "javaClass:typescriptType". * For example mapping "ZonedDateTime" to "string" would be added as "java.time.ZonedDateTime:string". */ @Parameter private List<String> customTypeMappings; /** * Specifies how {@link java.util.Date} will be mapped. * Supported values are 'asDate', 'asNumber, 'asString'. * Default value is 'asDate'. */ @Parameter private DateMapping mapDate; /** * Specifies custom class implementing {@link cz.habarta.typescript.generator.TypeProcessor}. * This allows to customize how Java types are mapped to TypeScript. * For example it is possible to implement TypeProcessor * for {@link com.google.common.base.Optional} from guava or for Java 8 date/time classes. */ @Parameter private String customTypeProcessor; /** * If true TypeScript declarations (interfaces, properties) will be sorted alphabetically. */ @Parameter private boolean sortDeclarations; /** * If true TypeScript type declarations (interfaces) will be sorted alphabetically. */ @Parameter private boolean sortTypeDeclarations; /** * If true generated file will not contain comment at the top. * By default there is a comment with timestamp and typescript-generator version. * So it might be useful to suppress this comment if the file is in source control and is regenerated in build. */ @Parameter private boolean noFileComment; /** * List of Javadoc XML files to search for documentation comments. * These files should be created using "com.github.markusbernhardt.xmldoclet.XmlDoclet" (com.github.markusbernhardt:xml-doclet). * Javadoc comments are added to output declarations as JSDoc comments. * For more information see Wiki page 'https://github.com/vojtechhabarta/typescript-generator/wiki/Javadoc'. */ @Parameter private List<File> javadocXmlFiles; /** * List of extensions specified as fully qualified class name. * Known extensions: * cz.habarta.typescript.generator.ext.TypeGuardsForJackson2PolymorphismExtension */ @Parameter private List<String> extensions; /** * The presence of any annotation in this list on a JSON property will cause * the typescript-generator to treat that property as optional when generating * the corresponding TypeScript interface. * Example optional annotation: @javax.annotation.Nullable */ @Parameter private List<String> optionalAnnotations; /** * Defines enum type on places where the enum is used (inline). * (Without this flag enum type is created once as type alias and is referenced from places where the enum is used.) */ @Parameter private boolean experimentalInlineEnums; /** * Display warnings when bean serializer is not found. */ @Parameter(defaultValue = "true") private boolean displaySerializerWarning; @Parameter(defaultValue = "${project}", readonly = true, required = true) private MavenProject project; @Override public void execute() { try { TypeScriptGenerator.printVersion(); // class loader final List<URL> urls = new ArrayList<>(); for (String element : project.getCompileClasspathElements()) { urls.add(new File(element).toURI().toURL()); } final URLClassLoader classLoader = new URLClassLoader(urls.toArray(new URL[0]), Thread.currentThread().getContextClassLoader()); // Settings final Settings settings = new Settings(); if (outputFileType != null) { settings.outputFileType = outputFileType; } settings.outputKind = outputKind; settings.module = module; settings.namespace = namespace; settings.excludedClassNames = excludeClasses; settings.jsonLibrary = jsonLibrary; settings.declarePropertiesAsOptional = declarePropertiesAsOptional; settings.removeTypeNamePrefix = removeTypeNamePrefix; settings.removeTypeNameSuffix = removeTypeNameSuffix; settings.addTypeNamePrefix = addTypeNamePrefix; settings.addTypeNameSuffix = addTypeNameSuffix; settings.customTypeNaming = Settings.convertToMap(customTypeNaming); settings.referencedFiles = referencedFiles; settings.importDeclarations = importDeclarations; settings.customTypeMappings = Settings.convertToMap(customTypeMappings); settings.mapDate = mapDate; settings.loadCustomTypeProcessor(classLoader, customTypeProcessor); settings.sortDeclarations = sortDeclarations; settings.sortTypeDeclarations = sortTypeDeclarations; settings.noFileComment = noFileComment; settings.javadocXmlFiles = javadocXmlFiles; settings.loadExtensions(classLoader, extensions); settings.loadIncludePropertyAnnotations(classLoader, includePropertyAnnotations); settings.loadOptionalAnnotations(classLoader, optionalAnnotations); settings.experimentalInlineEnums = experimentalInlineEnums; settings.displaySerializerWarning = displaySerializerWarning; settings.validateFileName(outputFile); // TypeScriptGenerator new TypeScriptGenerator(settings).generateTypeScript( Input.fromClassNamesAndJaxrsApplication(classes, classPatterns, classesFromJaxrsApplication, classesFromAutomaticJaxrsApplication, excludeClasses, classLoader), Output.to(outputFile) ); } catch (DependencyResolutionRequiredException | IOException e) { throw new RuntimeException(e); } } }
/** * Copyright 2015-2017 Red Hat, Inc, and individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wildfly.swarm.container.config; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; import org.wildfly.swarm.spi.api.config.ConfigKey; import org.wildfly.swarm.spi.api.config.ConfigTree; import org.wildfly.swarm.spi.api.config.SimpleKey; /** * A configuration node capable of having a direct value in addition to key/value children. * * @author Bob McWhirter */ public class ConfigNode implements ConfigTree { public ConfigNode() { } ConfigNode(Object value) { this.value = value; } /** * Set the value of an immediate child. * * @param key The simple child key. * @param value The value to set. */ public void child(SimpleKey key, Object value) { if (value instanceof ConfigNode) { this.children.put(key, (ConfigNode) value); } else { if (this.children.containsKey(key)) { this.children.get(key).value = value; } else { this.children.put(key, new ConfigNode(value)); } } } /** * Set the value of an immediate child. * * @param key The simple child key. * @param value The value to set. */ public void child(String key, Object value) { child(new SimpleKey(key), value); } /** * Set the value of a descendant. * * <p>Any intermediate leafs will be created as-needed.</p> * * @param key The possibly-complex key to a descendant. * @param value The value to set. */ public void recursiveChild(String key, Object value) { recursiveChild(ConfigKey.parse(key), value); } /** * Set the value of a descendant. * * <p>Any intermediate leafs will be created as-needed.</p> * * @param key The possibly-complex key to a descendant. * @param value The value to set. */ public void recursiveChild(ConfigKey key, Object value) { SimpleKey head = key.head(); if (head == ConfigKey.EMPTY) { value(value); } ConfigKey rest = key.subkey(1); if (rest == ConfigKey.EMPTY) { child(head, value); } else { ConfigNode child = child(head); if (child == null) { child = new ConfigNode(); child(head, child); } child.recursiveChild(rest, value); } } ConfigNode descendant(ConfigKey key) { SimpleKey head = key.head(); if (head == ConfigKey.EMPTY) { return this; } ConfigKey rest = key.subkey(1); ConfigNode child = child(head); if (child == null) { return null; } return child.descendant(rest); } /** * Retrieve the immediate child node. * * @param key The child's key. * @return The node or {@code null} is none present. */ ConfigNode child(SimpleKey key) { ConfigNode child = this.children.get(key); return child; } /** * Retrieve the immediate child node. * * @param key The child's key. * @return The node or {@code null} is none present. */ ConfigNode child(String key) { return child(new SimpleKey(key)); } /** * Retrieve all immediate children keys. * * @return All immediate children keys. */ public Set<SimpleKey> childrenKeys() { return this.children.keySet(); } /** * Retrieve all descendent keys. * * @return A stream of all descendent keys. */ public Stream<ConfigKey> allKeysRecursively() { Stream<ConfigKey> str = Stream.empty(); if (this.value != null) { str = Stream.of(ConfigKey.EMPTY); } str = Stream.concat(str, this.children.entrySet() .stream() .flatMap((kv) -> { ConfigKey key = kv.getKey(); Object value = kv.getValue(); if (value instanceof ConfigNode) { return ((ConfigNode) value).allKeysRecursively() .map(childKey -> key.append(childKey)); } return Stream.empty(); })); return str; } /** * Set the value on this node. * * @param value The value. */ void value(Object value) { if (value instanceof ConfigNode) { throw new RuntimeException("Cannot set config-node as a value of a tree config-node"); } this.value = value; } /** * Retrieve a value. * * @param key The possibly-complex key of the value to retrieve. * @return The value of {@code null} if none. */ public Object valueOf(ConfigKey key) { SimpleKey head = key.head(); if (head == ConfigKey.EMPTY) { if (this.value == null && this.children != null) { return this; } return this.value; } ConfigNode child = child(head); if (child != null) { ConfigKey rest = key.subkey(1); return child.valueOf(rest); } return null; } protected boolean isListLike() { return this.children.keySet().stream() .allMatch(e -> e.toString().matches("^[0-9]*$")); } public Object asObject() { if (this.value != null) { return this.value; } if (isListLike()) { return asList(); } return asMap(); } public List asList() { return this.children.values().stream() .map(e -> e.asObject()) .collect(Collectors.toList()); } public Map asMap() { Map<String,Object> map = new HashMap<>(); this.children.entrySet() .forEach(entry -> { map.put(entry.getKey().toString(), entry.getValue().asObject()); }); return map; } @Override public Properties asProperties() { Properties properties = new Properties(); this.children.entrySet() .stream() .filter(entry -> entry.getValue().value != null) .forEach(entry -> { properties.setProperty(entry.getKey().toString(), entry.getValue().value.toString()); }); return properties; } public String toString() { return "[ConfigNode: (" + System.identityHashCode(this.children) + ") children=" + this.children + "; value=" + this.value + "]"; } private Map<SimpleKey, ConfigNode> children = new LinkedHashMap<>(); private Object value; }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.bazel.rules; import com.google.common.collect.ImmutableList; import com.google.devtools.build.lib.analysis.BlazeDirectories; import com.google.devtools.build.lib.analysis.ConfiguredRuleClassProvider; import com.google.devtools.build.lib.analysis.config.BuildOptions; import com.google.devtools.build.lib.bazel.rules.cpp.BazelCppRuleClasses; import com.google.devtools.build.lib.bazel.rules.sh.BazelShRuleClasses; import com.google.devtools.build.lib.remote.options.RemoteOptions; import com.google.devtools.build.lib.rules.cpp.CcSkyframeFdoSupportFunction; import com.google.devtools.build.lib.rules.cpp.CcSkyframeFdoSupportValue; import com.google.devtools.build.lib.rules.cpp.CppOptions; import com.google.devtools.build.lib.rules.java.JavaOptions; import com.google.devtools.build.lib.runtime.BlazeModule; import com.google.devtools.build.lib.runtime.BlazeRuntime; import com.google.devtools.build.lib.runtime.Command; import com.google.devtools.build.lib.runtime.CommandEnvironment; import com.google.devtools.build.lib.runtime.WorkspaceBuilder; import com.google.devtools.build.lib.server.FailureDetails.FailureDetail; import com.google.devtools.build.lib.server.FailureDetails.RemoteExecution; import com.google.devtools.build.lib.server.FailureDetails.RemoteExecution.Code; import com.google.devtools.build.lib.util.AbruptExitException; import com.google.devtools.build.lib.util.DetailedExitCode; import com.google.devtools.build.lib.util.ResourceFileLoader; import com.google.devtools.common.options.Option; import com.google.devtools.common.options.OptionDocumentationCategory; import com.google.devtools.common.options.OptionEffectTag; import com.google.devtools.common.options.OptionMetadataTag; import com.google.devtools.common.options.OptionsBase; import java.io.IOException; /** Module implementing the rule set of Bazel. */ public class BazelRulesModule extends BlazeModule { /** This is where deprecated options go to die. */ public static class GraveyardOptions extends OptionsBase { @Option( name = "incompatible_load_python_rules_from_bzl", defaultValue = "false", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.LOADING_AND_ANALYSIS}, metadataTags = { OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES, }, help = "Deprecated no-op.") public boolean loadPythonRulesFromBzl; @Option( name = "incompatible_load_proto_rules_from_bzl", defaultValue = "false", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.LOADING_AND_ANALYSIS}, metadataTags = { OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES }, help = "Deprecated no-op.") public boolean loadProtoRulesFromBzl; @Option( name = "incompatible_load_java_rules_from_bzl", defaultValue = "false", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.LOADING_AND_ANALYSIS}, metadataTags = { OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES }, help = "Deprecated no-op.") public boolean loadJavaRulesFromBzl; @Option( name = "incompatible_disable_legacy_proto_provider", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.NO_OP}, metadataTags = { OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES, }, help = "Deprecated no-op.") public boolean disableLegacyProtoProvider; @Option( name = "incompatible_disable_proto_source_root", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.NO_OP}, metadataTags = { OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES, }, help = "Deprecated no-op.") public boolean disableProtoSourceRoot; @Option( name = "incompatible_do_not_emit_buggy_external_repo_import", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.NO_OP}, metadataTags = { OptionMetadataTag.DEPRECATED, OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES }, help = "Deprecated no-op.") public boolean doNotUseBuggyImportPath; @Option( name = "incompatible_disable_crosstool_file", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.LOADING_AND_ANALYSIS}, metadataTags = { OptionMetadataTag.DEPRECATED, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES, OptionMetadataTag.INCOMPATIBLE_CHANGE }, help = "Deprecated no-op.") public boolean disableCrosstool; @Option( name = "incompatible_disable_legacy_crosstool_fields", oldName = "experimental_disable_legacy_crosstool_fields", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.LOADING_AND_ANALYSIS}, metadataTags = { OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES }, help = "Deprecated no-op.") public boolean disableLegacyCrosstoolFields; @Option( name = "incompatible_require_feature_configuration_for_pic", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.LOADING_AND_ANALYSIS}, metadataTags = { OptionMetadataTag.DEPRECATED, OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES }, help = "Deprecated no-op.") public boolean requireFeatureConfigurationForPic; @Option( name = "incompatible_disable_depset_in_cc_user_flags", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.LOADING_AND_ANALYSIS}, metadataTags = { OptionMetadataTag.DEPRECATED, OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES }, help = "Deprecated no-op.") public boolean disableDepsetInUserFlags; @Option( name = "incompatible_dont_emit_static_libgcc", oldName = "experimental_dont_emit_static_libgcc", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.ACTION_COMMAND_LINES, OptionEffectTag.LOADING_AND_ANALYSIS}, metadataTags = { OptionMetadataTag.DEPRECATED, OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES }, help = "Deprecated no-op.") public boolean disableEmittingStaticLibgcc; @Option( name = "incompatible_linkopts_in_user_link_flags", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.ACTION_COMMAND_LINES, OptionEffectTag.LOADING_AND_ANALYSIS}, metadataTags = { OptionMetadataTag.DEPRECATED, OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES }, help = "Deprecated no-op.") public boolean enableLinkoptsInUserLinkFlags; @Option( name = "incompatible_disable_runtimes_filegroups", defaultValue = "false", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.ACTION_COMMAND_LINES, OptionEffectTag.LOADING_AND_ANALYSIS}, metadataTags = { OptionMetadataTag.DEPRECATED, OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES }, help = "Deprecated no-op.") public boolean disableRuntimesFilegroups; @Option( name = "incompatible_disable_tools_defaults_package", defaultValue = "false", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.AFFECTS_OUTPUTS, OptionEffectTag.LOADING_AND_ANALYSIS}, metadataTags = { OptionMetadataTag.DEPRECATED, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES, OptionMetadataTag.INCOMPATIBLE_CHANGE }, help = "Deprecated no-op.") public boolean incompatibleDisableInMemoryToolsDefaultsPackage; @Option( name = "experimental_enable_cc_toolchain_config_info", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.NO_OP}, metadataTags = {OptionMetadataTag.DEPRECATED}, help = "No-op") public boolean enableCcToolchainConfigInfoFromStarlark; @Option( name = "output_symbol_counts", defaultValue = "false", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.ACTION_COMMAND_LINES, OptionEffectTag.AFFECTS_OUTPUTS}, metadataTags = {OptionMetadataTag.HIDDEN, OptionMetadataTag.DEPRECATED}, help = "Deprecated no-op.") public boolean symbolCounts; @Option( name = "incompatible_disable_sysroot_from_configuration", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.LOADING_AND_ANALYSIS}, metadataTags = { OptionMetadataTag.DEPRECATED, OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES }, help = "Deprecated no-op.") public boolean disableSysrootFromConfiguration; @Option( name = "incompatible_provide_cc_toolchain_info_from_cc_toolchain_suite", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.LOADING_AND_ANALYSIS}, metadataTags = { OptionMetadataTag.DEPRECATED, OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES }, help = "Deprecated no-op.") public boolean provideCcToolchainInfoFromCcToolchainSuite; @Option( name = "incompatible_disable_cc_toolchain_label_from_crosstool_proto", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.LOADING_AND_ANALYSIS, OptionEffectTag.EAGERNESS_TO_EXIT}, metadataTags = { OptionMetadataTag.DEPRECATED, OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES }, help = "Deprecated no-op.") public boolean disableCcToolchainFromCrosstool; @Option( name = "incompatible_disable_cc_configuration_make_variables", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, metadataTags = { OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES, OptionMetadataTag.DEPRECATED, }, help = "Deprecated no-op.") public boolean disableMakeVariables; @Option( name = "make_variables_source", defaultValue = "configuration", metadataTags = {OptionMetadataTag.HIDDEN, OptionMetadataTag.DEPRECATED}, documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}) public String makeVariableSource; @Option( name = "incompatible_disable_legacy_flags_cc_toolchain_api", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.LOADING_AND_ANALYSIS}, metadataTags = { OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES, OptionMetadataTag.DEPRECATED }, help = "Flag for disabling the legacy cc_toolchain Starlark API for accessing legacy " + "CROSSTOOL fields.") public boolean disableLegacyFlagsCcToolchainApi; @Option( name = "incompatible_enable_legacy_cpp_toolchain_skylark_api", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, metadataTags = { OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES, OptionMetadataTag.DEPRECATED }, help = "Obsolete, no effect.") public boolean enableLegacyToolchainStarlarkApi; @Option( name = "incompatible_disable_legacy_cpp_toolchain_skylark_api", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, metadataTags = { OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES, OptionMetadataTag.DEPRECATED }, help = "Obsolete, no effect.") public boolean disableLegacyToolchainStarlarkApi; @Option( name = "incompatible_cc_coverage", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = { OptionEffectTag.UNKNOWN, }, oldName = "experimental_cc_coverage", metadataTags = { OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES, OptionMetadataTag.DEPRECATED }, help = "Obsolete, no effect.") public boolean useGcovCoverage; @Deprecated @Option( name = "direct_run", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, metadataTags = {OptionMetadataTag.DEPRECATED}, help = "Deprecated no-op.") public boolean directRun; @Deprecated @Option( name = "glibc", defaultValue = "null", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, metadataTags = {OptionMetadataTag.DEPRECATED}, help = "Deprecated no-op.") public String glibc; @Deprecated @Option( name = "experimental_shortened_obj_file_path", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.EXECUTION}, defaultValue = "true", help = "This option is deprecated and has no effect.") public boolean shortenObjFilePath; @Option( name = "force_ignore_dash_static", defaultValue = "false", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.LOADING_AND_ANALYSIS, OptionEffectTag.AFFECTS_OUTPUTS}, help = "noop") public boolean forceIgnoreDashStatic; @Option( name = "incompatible_disable_late_bound_option_defaults", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.NO_OP}, metadataTags = { OptionMetadataTag.DEPRECATED, OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES }, help = "This option is deprecated and has no effect.") public boolean incompatibleDisableLateBoundOptionDefaults; @Option( name = "incompatible_use_native_patch", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.NO_OP}, metadataTags = { OptionMetadataTag.DEPRECATED, OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES }, help = "This option is deprecated and has no effect.") public boolean useNativePatch; @Deprecated @Option( name = "ui", oldName = "experimental_ui", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, help = "No-op.") public boolean experimentalUi; @Option( name = "experimental_profile_action_counts", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, help = "No-op.") public boolean enableActionCountProfile; @Option( name = "incompatible_remove_binary_profile", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, metadataTags = { OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES }, help = "No-op.") public boolean removeBinaryProfile; @Option( name = "experimental_post_profile_started_event", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, help = "No-op.") public boolean postProfileStartedEvent; @Option( name = "incompatible_enable_profile_by_default", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, metadataTags = { OptionMetadataTag.INCOMPATIBLE_CHANGE, OptionMetadataTag.TRIGGERED_BY_ALL_INCOMPATIBLE_CHANGES }, help = "No-op.") public boolean enableProfileByDefault; } @Override public void initializeRuleClasses(ConfiguredRuleClassProvider.Builder builder) { BazelRuleClassProvider.setup(builder); try { // Load auto-configuration files, it is made outside of the rule class provider so that it // will not be loaded for our Java tests. builder.addWorkspaceFileSuffix( ResourceFileLoader.loadResource(BazelCppRuleClasses.class, "cc_configure.WORKSPACE")); builder.addWorkspaceFileSuffix( ResourceFileLoader.loadResource(BazelRulesModule.class, "xcode_configure.WORKSPACE")); builder.addWorkspaceFileSuffix( ResourceFileLoader.loadResource(BazelShRuleClasses.class, "sh_configure.WORKSPACE")); } catch (IOException e) { throw new IllegalStateException(e); } } @Override public void beforeCommand(CommandEnvironment env) throws AbruptExitException { validateRemoteOutputsMode(env); } @Override public void workspaceInit( BlazeRuntime runtime, BlazeDirectories directories, WorkspaceBuilder builder) { builder.addSkyFunction( CcSkyframeFdoSupportValue.SKYFUNCTION, new CcSkyframeFdoSupportFunction(directories)); } @Override public BuildOptions getDefaultBuildOptions(BlazeRuntime blazeRuntime) { return BuildOptions.getDefaultBuildOptionsForFragments( blazeRuntime.getRuleClassProvider().getConfigurationOptions()); } @Override public Iterable<Class<? extends OptionsBase>> getCommandOptions(Command command) { return "build".equals(command.name()) ? ImmutableList.of(GraveyardOptions.class) : ImmutableList.of(); } private static void validateRemoteOutputsMode(CommandEnvironment env) throws AbruptExitException { RemoteOptions remoteOptions = env.getOptions().getOptions(RemoteOptions.class); if (remoteOptions == null) { return; } if (!remoteOptions.remoteOutputsMode.downloadAllOutputs()) { JavaOptions javaOptions = env.getOptions().getOptions(JavaOptions.class); if (javaOptions != null && !javaOptions.inmemoryJdepsFiles) { throw createRemoteExecutionExitException( "--experimental_remote_download_outputs=minimal requires" + " --experimental_inmemory_jdeps_files to be enabled", Code.REMOTE_DOWNLOAD_OUTPUTS_MINIMAL_WITHOUT_INMEMORY_JDEPS); } CppOptions cppOptions = env.getOptions().getOptions(CppOptions.class); if (cppOptions != null && !cppOptions.inmemoryDotdFiles) { throw createRemoteExecutionExitException( "--experimental_remote_download_outputs=minimal requires" + " --experimental_inmemory_dotd_files to be enabled", Code.REMOTE_DOWNLOAD_OUTPUTS_MINIMAL_WITHOUT_INMEMORY_DOTD); } } } private static AbruptExitException createRemoteExecutionExitException( String message, Code remoteExecutionCode) { return new AbruptExitException( DetailedExitCode.of( FailureDetail.newBuilder() .setMessage(message) .setRemoteExecution(RemoteExecution.newBuilder().setCode(remoteExecutionCode)) .build())); } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.app.service.idm; import java.io.IOException; import java.net.URLEncoder; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.List; import javax.annotation.PostConstruct; import org.apache.commons.codec.binary.Base64; import org.apache.http.HttpHeaders; import org.apache.http.HttpResponse; import org.apache.http.HttpStatus; import org.apache.http.client.methods.HttpGet; import org.apache.http.conn.ssl.SSLConnectionSocketFactory; import org.apache.http.conn.ssl.SSLContextBuilder; import org.apache.http.conn.ssl.TrustSelfSignedStrategy; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; import org.flowable.app.model.common.RemoteGroup; import org.flowable.app.model.common.RemoteToken; import org.flowable.app.model.common.RemoteUser; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.core.env.Environment; import org.springframework.stereotype.Service; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ArrayNode; @Service public class RemoteIdmServiceImpl implements RemoteIdmService { private static final Logger logger = LoggerFactory.getLogger(RemoteIdmService.class); private static final String PROPERTY_URL = "idm.app.url"; private static final String PROPERTY_ADMIN_USER = "idm.admin.user"; private static final String PROPERTY_ADMIN_PASSWORD = "idm.admin.password"; @Autowired protected Environment environment; @Autowired protected ObjectMapper objectMapper; protected String url; protected String adminUser; protected String adminPassword; @PostConstruct protected void init() { url = environment.getRequiredProperty(PROPERTY_URL); adminUser = environment.getRequiredProperty(PROPERTY_ADMIN_USER); adminPassword = environment.getRequiredProperty(PROPERTY_ADMIN_PASSWORD); } @Override public RemoteUser authenticateUser(String username, String password) { JsonNode json = callRemoteIdmService(url + "/api/idm/users/" + encode(username), username, password); if (json != null) { return parseUserInfo(json); } return null; } @Override public RemoteToken getToken(String tokenValue) { JsonNode json = callRemoteIdmService(url + "/api/idm/tokens/" + encode(tokenValue), adminUser, adminPassword); if (json != null) { RemoteToken token = new RemoteToken(); token.setId(json.get("id").asText()); token.setValue(json.get("value").asText()); token.setUserId(json.get("userId").asText()); return token; } return null; } @Override public RemoteUser getUser(String userId) { JsonNode json = callRemoteIdmService(url + "/api/idm/users/" + encode(userId), adminUser, adminPassword); if (json != null) { return parseUserInfo(json); } return null; } @Override public List<RemoteUser> findUsersByNameFilter(String filter) { JsonNode json = callRemoteIdmService(url + "/api/idm/users?filter=" + encode(filter), adminUser, adminPassword); if (json != null) { return parseUsersInfo(json); } return new ArrayList<RemoteUser>(); } @Override public List<RemoteUser> findUsersByGroup(String groupId) { JsonNode json = callRemoteIdmService(url + "/api/idm/groups/" + encode(groupId) + "/users", adminUser, adminPassword); if (json != null) { return parseUsersInfo(json); } return new ArrayList<RemoteUser>(); } @Override public List<RemoteGroup> findGroupsByNameFilter(String filter) { JsonNode json = callRemoteIdmService(url + "/api/idm/groups?filter=" + encode(filter), adminUser, adminPassword); if (json != null) { return parseGroupsInfo(json); } return new ArrayList<RemoteGroup>(); } protected JsonNode callRemoteIdmService(String url, String username, String password) { HttpGet httpGet = new HttpGet(url); httpGet.setHeader(HttpHeaders.AUTHORIZATION, "Basic " + new String( Base64.encodeBase64((username + ":" + password).getBytes(Charset.forName("UTF-8"))))); HttpClientBuilder clientBuilder = HttpClientBuilder.create(); SSLConnectionSocketFactory sslsf = null; try { SSLContextBuilder builder = new SSLContextBuilder(); builder.loadTrustMaterial(null, new TrustSelfSignedStrategy()); sslsf = new SSLConnectionSocketFactory(builder.build(), SSLConnectionSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER); clientBuilder.setSSLSocketFactory(sslsf); } catch (Exception e) { logger.warn("Could not configure SSL for http client", e); } CloseableHttpClient client = clientBuilder.build(); try { HttpResponse response = client.execute(httpGet); if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) { return objectMapper.readTree(response.getEntity().getContent()); } } catch (Exception e) { logger.warn("Exception while getting token", e); } finally { if (client != null) { try { client.close(); } catch (IOException e) { logger.warn("Exception while closing http client", e); } } } return null; } protected List<RemoteUser> parseUsersInfo(JsonNode json) { List<RemoteUser> result = new ArrayList<RemoteUser>(); if (json != null && json.isArray()) { ArrayNode array = (ArrayNode) json; for (JsonNode userJson : array) { result.add(parseUserInfo(userJson)); } } return result; } protected RemoteUser parseUserInfo(JsonNode json) { RemoteUser user = new RemoteUser(); user.setId(json.get("id").asText()); user.setFirstName(json.get("firstName").asText()); user.setLastName(json.get("lastName").asText()); user.setEmail(json.get("email").asText()); user.setFullName(json.get("fullName").asText()); if (json.has("groups")) { for (JsonNode groupNode : ((ArrayNode) json.get("groups"))) { user.getGroups().add(new RemoteGroup(groupNode.get("id").asText(), groupNode.get("name").asText())); } } if (json.has("privileges")) { for (JsonNode privilegeNode : ((ArrayNode) json.get("privileges"))) { user.getPrivileges().add(privilegeNode.asText()); } } return user; } protected List<RemoteGroup> parseGroupsInfo(JsonNode json) { List<RemoteGroup> result = new ArrayList<RemoteGroup>(); if (json != null && json.isArray()) { ArrayNode array = (ArrayNode) json; for (JsonNode userJson : array) { result.add(parseGroupInfo(userJson)); } } return result; } protected RemoteGroup parseGroupInfo(JsonNode json) { RemoteGroup group = new RemoteGroup(); group.setId(json.get("id").asText()); group.setName(json.get("name").asText()); return group; } protected String encode(String s) { if (s == null) { return ""; } try { return URLEncoder.encode(s, "UTF-8"); } catch (Exception e) { logger.warn("Could not encode url param", e); return null; } } }
package com.safecharge.biz; import javax.validation.ConstraintViolationException; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import com.safecharge.exception.SafechargeConfigurationException; import com.safecharge.exception.SafechargeException; import com.safecharge.request.AccountCaptureRequest; import com.safecharge.request.Authorize3dRequest; import com.safecharge.request.CardDetailsRequest; import com.safecharge.request.DccDetailsRequest; import com.safecharge.request.GetPaymentStatusRequest; import com.safecharge.request.GetSessionTokenRequest; import com.safecharge.request.InitPaymentRequest; import com.safecharge.request.McpRatesRequest; import com.safecharge.request.OpenOrderRequest; import com.safecharge.request.PaymentRequest; import com.safecharge.request.RefundTransactionRequest; import com.safecharge.request.SettleTransactionRequest; import com.safecharge.request.Verify3dRequest; import com.safecharge.request.VoidTransactionRequest; import com.safecharge.response.AccountCaptureResponse; import com.safecharge.response.Authorize3dResponse; import com.safecharge.response.CardDetailsResponse; import com.safecharge.response.DccDetailsResponse; import com.safecharge.response.GetPaymentStatusResponse; import com.safecharge.response.InitPaymentResponse; import com.safecharge.response.McpRatesResponse; import com.safecharge.response.OpenOrderResponse; import com.safecharge.response.PaymentResponse; import com.safecharge.response.RefundTransactionResponse; import com.safecharge.response.SafechargeResponse; import com.safecharge.response.SettleTransactionResponse; import com.safecharge.response.Verify3dResponse; import com.safecharge.response.VoidTransactionResponse; import com.safecharge.util.Constants; import static org.junit.Assert.assertNotNull; import static org.mockito.Mockito.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; /* * Copyright (c) 2007-2020 SafeCharge International Group Limited. */ public class SafechargeTest { @Mock private SafechargeRequestExecutor executor; @Rule public ExpectedException exception = ExpectedException.none(); @InjectMocks private Safecharge sut; @Before public void setUp() throws Exception { sut = new Safecharge(); MockitoAnnotations.initMocks(this); } @Test public void shouldCorrectlyInitializeMerchantInfo() throws SafechargeException { SafechargeResponse response = mock(SafechargeResponse.class); when(executor.execute(any(GetSessionTokenRequest.class))).thenReturn(response); sut.initialize("merchantKey", "id", "siteId", "localhost", Constants.HashAlgorithm.SHA256); verify(executor).execute(any(GetSessionTokenRequest.class)); verify(response).getStatus(); } @Test public void shouldThrowExceptionWithGivenReasonWhenResponseStatusIsError() throws SafechargeException { SafechargeResponse response = mock(SafechargeResponse.class); when(response.getStatus()).thenReturn(Constants.APIResponseStatus.ERROR); when(response.getReason()).thenReturn("Invalid data."); when(executor.execute(any(GetSessionTokenRequest.class))).thenReturn(response); exception.expect(SafechargeConfigurationException.class); exception.expectMessage("Invalid data."); sut.initialize("merchantKey", "id", "siteId", "localhost", Constants.HashAlgorithm.SHA256); } @Test public void shouldExecutePaymentRequestAndReturnResponse() throws SafechargeException { SafechargeResponse sessionResponse = mock(SafechargeResponse.class); when(sessionResponse.getSessionToken()).thenReturn("sessionToken"); SafechargeResponse paymentResponse = new PaymentResponse(); when(executor.execute(any(GetSessionTokenRequest.class))).thenReturn(sessionResponse); when(executor.execute(any(PaymentRequest.class))).thenReturn(paymentResponse); sut.initialize("merchantKey", "id", "siteId", "localhost", Constants.HashAlgorithm.SHA256); PaymentResponse response = sut.payment("userTokenId", "clientUniqueId", "clientRequestId", null, null, "BGN", "11", null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null); verify(executor).execute(any(GetSessionTokenRequest.class)); verify(executor).execute(any(PaymentRequest.class)); verifyNoMoreInteractions(executor); verify(sessionResponse).getStatus(); verify(sessionResponse).getSessionToken(); assertNotNull(response); } @Test public void shouldThrowExceptionIfPaymentRequestIsExecutedWithoutInitializeRequestBeforehand() throws SafechargeException { exception.expect(SafechargeConfigurationException.class); exception.expectMessage("Missing mandatory info for execution of payments! Please run initialization method before creating payments."); sut.payment("userTokenId", "clientUniqueId", "clientRequestId", null, null, "BGN", "11", null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null); } @Test public void shouldExecuteInitPaymentRequestAndReturnResponse() throws SafechargeException { SafechargeResponse sessionResponse = mock(SafechargeResponse.class); when(sessionResponse.getSessionToken()).thenReturn("sessionToken"); SafechargeResponse initPaymentResponse = new InitPaymentResponse(); when(executor.execute(any(GetSessionTokenRequest.class))).thenReturn(sessionResponse); when(executor.execute(any(InitPaymentRequest.class))).thenReturn(initPaymentResponse); sut.initialize("merchantKey", "id", "siteId", "localhost", Constants.HashAlgorithm.SHA256); InitPaymentResponse response = sut.initPayment("userTokenId", "clientUniqueId", "clientRequestId", "BGN", "11", null, null, null, null, null, null); verify(executor).execute(any(GetSessionTokenRequest.class)); verify(executor).execute(any(InitPaymentRequest.class)); verifyNoMoreInteractions(executor); verify(sessionResponse).getStatus(); verify(sessionResponse).getSessionToken(); assertNotNull(response); } @Test public void shouldThrowExceptionWhenInitPaymentRequestIsExecutedWithoutInitializeRequestBeforehand() throws SafechargeException { exception.expect(SafechargeConfigurationException.class); exception.expectMessage("Missing mandatory info for execution of payments! Please run initialization method before creating payments."); sut.initPayment("userTokenId", "clientUniqueId", "clientRequestId", "BGN", "11", null, null, null, null, null, null); } @Test public void shouldExecuteOpenOrderRequestAndReturnResponse() throws SafechargeException { SafechargeResponse sessionResponse = mock(SafechargeResponse.class); when(sessionResponse.getSessionToken()).thenReturn("sessionToken"); SafechargeResponse openOrderResponse = new OpenOrderResponse(); when(executor.execute(any(GetSessionTokenRequest.class))).thenReturn(sessionResponse); when(executor.execute(any(OpenOrderRequest.class))).thenReturn(openOrderResponse); sut.initialize("merchantKey", "id", "siteId", "localhost", Constants.HashAlgorithm.SHA256); OpenOrderResponse response = sut.openOrder("userTokenId", "clientRequestId", "clientUniqueId", null, null, null, null, "BGN", "11", null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null); verify(executor).execute(any(GetSessionTokenRequest.class)); verify(executor).execute(any(OpenOrderRequest.class)); verifyNoMoreInteractions(executor); verify(sessionResponse).getStatus(); verify(sessionResponse).getSessionToken(); assertNotNull(response); } @Test public void shouldThrowExceptionWhenOpenOrderRequestIsExecutedWithoutInitializeRequestBeforehand() throws SafechargeException { exception.expect(SafechargeConfigurationException.class); exception.expectMessage("Missing mandatory info for execution of payments! Please run initialization method before creating payments."); sut.openOrder("userTokenId", "clientRequestId", "clientUniqueId", null, null, null, null, "BGN", "11", null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null); } @Test public void shouldExecuteGetPaymentStatusRequestAndReturnResponse() throws SafechargeException { SafechargeResponse sessionResponse = mock(SafechargeResponse.class); when(sessionResponse.getSessionToken()).thenReturn("sessionToken"); SafechargeResponse getPaymentStatusResponse = new GetPaymentStatusResponse(); when(executor.execute(any(GetSessionTokenRequest.class))).thenReturn(sessionResponse); when(executor.execute(any(GetPaymentStatusRequest.class))).thenReturn(getPaymentStatusResponse); sut.initialize("merchantKey", "id", "siteId", "localhost", Constants.HashAlgorithm.SHA256); GetPaymentStatusResponse response = sut.getPaymentStatus(); verify(executor).execute(any(GetSessionTokenRequest.class)); verify(executor).execute(any(GetPaymentStatusRequest.class)); verifyNoMoreInteractions(executor); verify(sessionResponse).getStatus(); verify(sessionResponse).getSessionToken(); assertNotNull(response); } @Test public void shouldThrowExceptionWhenGetPaymentStatusRequestIsExecutedWithoutInitializeRequstBeforehand() throws SafechargeException { exception.expect(SafechargeConfigurationException.class); exception.expectMessage("Missing mandatory info for execution of payments! Please run initialization method before creating payments."); sut.getPaymentStatus(); } @Test public void shouldExecuteVoidTransactionRequestAndReturnResponse() throws SafechargeException { SafechargeResponse sessionResponse = mock(SafechargeResponse.class); when(sessionResponse.getSessionToken()).thenReturn("sessionToken"); SafechargeResponse voidTransactionResponse = new VoidTransactionResponse(); when(executor.execute(any(GetSessionTokenRequest.class))).thenReturn(sessionResponse); when(executor.execute(any(VoidTransactionRequest.class))).thenReturn(voidTransactionResponse); sut.initialize("merchantKey", "id", "siteId", "localhost", Constants.HashAlgorithm.SHA256); VoidTransactionResponse response = sut.voidTransaction("clientRequestId", "relatedTransactionId", "11", "BGN", "authCode", null, null, null, null, null, null, null); verify(executor).execute(any(GetSessionTokenRequest.class)); verify(executor).execute(any(VoidTransactionRequest.class)); verifyNoMoreInteractions(executor); verify(sessionResponse).getStatus(); verify(sessionResponse).getSessionToken(); assertNotNull(response); } @Test public void shouldThrowExceptionWhenVoidTransactionRequestIsExecutedWithoutInitializeRequestBeforehand() throws SafechargeException { exception.expect(SafechargeConfigurationException.class); exception.expectMessage("Missing mandatory info for execution of payments! Please run initialization method before creating payments."); sut.voidTransaction("clientRequestId", "relatedTransactionId", "11", "BGN", "authCode", null, null, null, null, null, null, null); } @Test public void shouldExecuteSettleTransactionRequestAndReturnResponse() throws SafechargeException { SafechargeResponse sessionResponse = mock(SafechargeResponse.class); when(sessionResponse.getSessionToken()).thenReturn("sessionToken"); SafechargeResponse settleTransactionResponse = new SettleTransactionResponse(); when(executor.execute(any(GetSessionTokenRequest.class))).thenReturn(sessionResponse); when(executor.execute(any(SettleTransactionRequest.class))).thenReturn(settleTransactionResponse); sut.initialize("merchantKey", "id", "siteId", "localhost", Constants.HashAlgorithm.SHA256); SettleTransactionResponse response = sut.settleTransaction("clientUniqueId", "clientRequestId", null, null, null, null, "11", "authCode", null, null, "BGN", null, null, "relatedTransactionId", null); verify(executor).execute(any(GetSessionTokenRequest.class)); verify(executor).execute(any(SettleTransactionRequest.class)); verifyNoMoreInteractions(executor); verify(sessionResponse).getStatus(); verify(sessionResponse).getSessionToken(); assertNotNull(response); } @Test public void shouldThrowExceptionWhenSettleTransactionRequestIsExecutedWithoutInitializeRequestBeforehand() throws SafechargeException { exception.expect(SafechargeConfigurationException.class); exception.expectMessage("Missing mandatory info for execution of payments! Please run initialization method before creating payments."); sut.settleTransaction("clientUniqueId", "clientRequestId", null, null, null, null, "11", "authCode", null, null, "BGN", null, null, "relatedTransactionId", null); } @Test public void shouldExecuteRefundTransactionRequestAndReturnResponse() throws SafechargeException { SafechargeResponse sessionResponse = mock(SafechargeResponse.class); when(sessionResponse.getSessionToken()).thenReturn("sessionToken"); SafechargeResponse refundTransactionResponse = new RefundTransactionResponse(); when(executor.execute(any(GetSessionTokenRequest.class))).thenReturn(sessionResponse); when(executor.execute(any(RefundTransactionRequest.class))).thenReturn(refundTransactionResponse); sut.initialize("merchantKey", "id", "siteId", "localhost", Constants.HashAlgorithm.SHA256); RefundTransactionResponse response = sut.refundTransaction("clientUniqueId", "clientRequestId", null, "11", "authCode", null, "BGN", null, null, null, "relatedTransactionId", null); verify(executor).execute(any(GetSessionTokenRequest.class)); verify(executor).execute(any(RefundTransactionRequest.class)); verifyNoMoreInteractions(executor); verify(sessionResponse).getStatus(); verify(sessionResponse).getSessionToken(); assertNotNull(response); } @Test public void shouldThrowExceptionWhenRefundTransactionRequestIsExecutedWithoutInitializeRequestBeforehand() throws SafechargeException { exception.expect(SafechargeConfigurationException.class); exception.expectMessage("Missing mandatory info for execution of payments! Please run initialization method before creating payments."); sut.refundTransaction("clientUniqueId", "clientRequestId", null, "11", "authCode", null, "BGN", null, null, null, "relatedTransactionId", null); } @Test public void shouldExecuteVerify3dRequestAndReturnResponse() throws SafechargeException { SafechargeResponse sessionResponse = mock(SafechargeResponse.class); when(sessionResponse.getSessionToken()).thenReturn("sessionToken"); SafechargeResponse verify3dResponse = new Verify3dResponse(); when(executor.execute(any(GetSessionTokenRequest.class))).thenReturn(sessionResponse); when(executor.execute(any(Verify3dRequest.class))).thenReturn(verify3dResponse); sut.initialize("merchantKey", "id", "siteId", "localhost", Constants.HashAlgorithm.SHA256); Verify3dResponse response = sut.verify3d("clientUniqueId", "clienRequestId", "11", "BGN", null, null, null, null, "relatedTransaction", null, null, null, null); verify(executor).execute(any(GetSessionTokenRequest.class)); verify(executor).execute(any(Verify3dRequest.class)); verifyNoMoreInteractions(executor); verify(sessionResponse).getStatus(); verify(sessionResponse).getSessionToken(); assertNotNull(response); } @Test public void shouldThrowExceptionWhenVerify3dRequestIsExecutedWithoutInitializeRequestBeforehand() throws SafechargeException { exception.expect(SafechargeConfigurationException.class); exception.expectMessage("Missing mandatory info for execution of payments! Please run initialization method before creating payments."); sut.verify3d("clientUniqueId", "clienRequestId", "11", "BGN", null, null, null, null, "relatedTransaction", null, null, null, null); } @Test public void shouldExecuteAuthorize3dRequestAndReturnResponse() throws SafechargeException { SafechargeResponse sessionResponse = mock(SafechargeResponse.class); when(sessionResponse.getSessionToken()).thenReturn("sessionToken"); SafechargeResponse authorize3dResponse = new Authorize3dResponse(); when(executor.execute(any(GetSessionTokenRequest.class))).thenReturn(sessionResponse); when(executor.execute(any(Authorize3dRequest.class))).thenReturn(authorize3dResponse); sut.initialize("merchantKey", "id", "siteId", "localhost", Constants.HashAlgorithm.SHA256); Authorize3dResponse response = sut.authorize3d("usertTokenId", "clientUniqueId", "clientRequestId", null, null, "BGN", "11", null, null, null, null, null, null, null, null, null, null, null, null, null, "relatedTransaction", null, null, null, null, null, null, null); verify(executor).execute(any(GetSessionTokenRequest.class)); verify(executor).execute(any(Authorize3dRequest.class)); verifyNoMoreInteractions(executor); verify(sessionResponse).getStatus(); verify(sessionResponse).getSessionToken(); assertNotNull(response); } @Test public void shouldThrowExceptionWhenAuthorize3dRequestIsExecutedWithouthInitializeRequestBeforehand() throws SafechargeException { exception.expect(SafechargeConfigurationException.class); exception.expectMessage("Missing mandatory info for execution of payments! Please run initialization method before creating payments."); sut.authorize3d("usertTokenId", "clientUniqueId", "clientRequestId", null, null, "BGN", "11", null, null, null, null, null, null, null, null, null, null, null, null, null, "relatedTransaction", null, null, null, null, null, null, null); } @Test public void shouldExecuteGetGardDetailsRequestAndReturnResponse() throws SafechargeException { SafechargeResponse sessionResponse = mock(SafechargeResponse.class); when(sessionResponse.getSessionToken()).thenReturn("sessionToken"); SafechargeResponse getCardDetailsResponse = new CardDetailsResponse(); when(executor.execute(any(GetSessionTokenRequest.class))).thenReturn(sessionResponse); when(executor.execute(any(CardDetailsRequest.class))).thenReturn(getCardDetailsResponse); sut.initialize("merchantKey", "id", "siteId", "localhost", Constants.HashAlgorithm.SHA256); CardDetailsResponse response = sut.getCardDetails(null, null, "123456"); verify(executor).execute(any(GetSessionTokenRequest.class)); verify(executor).execute(any(CardDetailsRequest.class)); verifyNoMoreInteractions(executor); verify(sessionResponse).getStatus(); verify(sessionResponse).getSessionToken(); assertNotNull(response); } @Test public void shouldThrowExceptionWhenGetCardDetailsRequestIsExecutedWithoutInitializeRequestBeforehand() throws SafechargeException { exception.expect(SafechargeConfigurationException.class); exception.expectMessage("Missing mandatory info for execution of payments! Please run initialization method before creating payments."); sut.getCardDetails(null, null, "123456"); } @Test public void shouldExecuteGetDccDetailsRequestAndReturnResponse() throws SafechargeException { SafechargeResponse sessionResponse = mock(SafechargeResponse.class); when(sessionResponse.getSessionToken()).thenReturn("sessionToken"); SafechargeResponse dccDetailsResponse = new DccDetailsResponse(); when(executor.execute(any(GetSessionTokenRequest.class))).thenReturn(sessionResponse); when(executor.execute(any(DccDetailsRequest.class))).thenReturn(dccDetailsResponse); sut.initialize("merchantKey", "id", "siteId", "localhost", Constants.HashAlgorithm.SHA256); DccDetailsResponse response = sut.getDccDetails("clientUniqueId", "clentRequestId", "123456", null, "10", "BGN", "EUR", null); verify(executor).execute(any(GetSessionTokenRequest.class)); verify(executor).execute(any(DccDetailsRequest.class)); verifyNoMoreInteractions(executor); verify(sessionResponse).getStatus(); verify(sessionResponse).getSessionToken(); assertNotNull(response); } @Test public void shouldThrowExceptionWhenGetDccDetailsRequestIsExecutedWithoutInitializeRequestBeforehand() throws SafechargeException { exception.expect(SafechargeConfigurationException.class); exception.expectMessage("Missing mandatory info for execution of payments! Please run initialization method before creating payments."); sut.getDccDetails("clientUniqueId", "clentRequestId", "123456", null, "10", "BGN", "EUR", null); } @Test public void shouldExecuteGetMcpRatesRequestAndReturnResponse() throws SafechargeException { SafechargeResponse sessionResponse = mock(SafechargeResponse.class); when(sessionResponse.getSessionToken()).thenReturn("sessionToken"); SafechargeResponse mcpRatesRequest = new McpRatesResponse(); when(executor.execute(any(GetSessionTokenRequest.class))).thenReturn(sessionResponse); when(executor.execute(any(McpRatesRequest.class))).thenReturn(mcpRatesRequest); sut.initialize("merchantKey", "id", "siteId", "localhost", Constants.HashAlgorithm.SHA256); McpRatesResponse response = sut.getMcpRates("clientUniqueId", "clentRequestId", "BGN", null, null); verify(executor).execute(any(GetSessionTokenRequest.class)); verify(executor).execute(any(McpRatesRequest.class)); verifyNoMoreInteractions(executor); verify(sessionResponse).getStatus(); verify(sessionResponse).getSessionToken(); assertNotNull(response); } @Test public void shouldThrowExceptionWhenGetMcpRatesRequestIsExecutedWithoutInitializeRequestBeforehand() throws SafechargeException { exception.expect(SafechargeConfigurationException.class); exception.expectMessage("Missing mandatory info for execution of payments! Please run initialization method before creating payments."); sut.getMcpRates("clientUniqueId", "clentRequestId", "BGN", null, null); } @Test public void shouldExecuteAccountCaptureRequestAndReturnResponse() throws SafechargeException { SafechargeResponse sessionResponse = mock(SafechargeResponse.class); when(sessionResponse.getSessionToken()).thenReturn("sessionToken"); SafechargeResponse accountCaptureResponse = new AccountCaptureResponse(); when(executor.execute(any(GetSessionTokenRequest.class))).thenReturn(sessionResponse); when(executor.execute(any(AccountCaptureRequest.class))).thenReturn(accountCaptureResponse); sut.initialize("merchantKey", "id", "siteId", "localhost", Constants.HashAlgorithm.SHA256); AccountCaptureResponse response = sut.accountCapture("clientRequestId", "userTokenId", "paymentMethod", "BGN", "BG", "BG", null); verify(executor).execute(any(GetSessionTokenRequest.class)); verify(executor).execute(any(AccountCaptureRequest.class)); verifyNoMoreInteractions(executor); verify(sessionResponse).getStatus(); verify(sessionResponse).getSessionToken(); assertNotNull(response); } @Test public void shouldThrowExceptionWhenAccountCaptureRequestIsExecutedWithoutInitializeRequestBeforehand() throws SafechargeException { exception.expect(SafechargeConfigurationException.class); exception.expectMessage("Missing mandatory info for execution of payments! Please run initialization method before creating payments."); sut.accountCapture("clientRequestId", "userTokenId", "paymentMethod", "BGN", "BG", null, null); } @Test public void shouldThrowExceptionIfARequestIsExecutedWithoutSomeMandatoryField() throws SafechargeException { SafechargeResponse sessionResponse = mock(SafechargeResponse.class); when(sessionResponse.getSessionToken()).thenReturn("sessionToken"); when(executor.execute(any(GetSessionTokenRequest.class))).thenReturn(sessionResponse); exception.expect(ConstraintViolationException.class); sut.initialize("merchantKey", "id", "siteId", "localhost", Constants.HashAlgorithm.SHA256); sut.getCardDetails(null, null, null); } }
/* * Copyright (C) 2012 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.server.display; import android.graphics.Rect; import android.view.Display; import android.view.DisplayInfo; import android.view.Surface; import java.io.PrintWriter; import java.util.Arrays; import java.util.List; import libcore.util.Objects; /** * Describes how a logical display is configured. * <p> * At this time, we only support logical displays that are coupled to a particular * primary display device from which the logical display derives its basic properties * such as its size, density and refresh rate. * </p><p> * A logical display may be mirrored onto multiple display devices in addition to its * primary display device. Note that the contents of a logical display may not * always be visible, even on its primary display device, such as in the case where * the primary display device is currently mirroring content from a different * logical display. * </p><p> * This object is designed to encapsulate as much of the policy of logical * displays as possible. The idea is to make it easy to implement new kinds of * logical displays mostly by making local changes to this class. * </p><p> * Note: The display manager architecture does not actually require logical displays * to be associated with any individual display device. Logical displays and * display devices are orthogonal concepts. Some mapping will exist between * logical displays and display devices but it can be many-to-many and * and some might have no relation at all. * </p><p> * Logical displays are guarded by the {@link DisplayManagerService.SyncRoot} lock. * </p> */ final class LogicalDisplay { private final DisplayInfo mBaseDisplayInfo = new DisplayInfo(); // The layer stack we use when the display has been blanked to prevent any // of its content from appearing. private static final int BLANK_LAYER_STACK = -1; private final int mDisplayId; private final int mLayerStack; private DisplayInfo mOverrideDisplayInfo; // set by the window manager private DisplayInfo mInfo; // The display device that this logical display is based on and which // determines the base metrics that it uses. private DisplayDevice mPrimaryDisplayDevice; private DisplayDeviceInfo mPrimaryDisplayDeviceInfo; // True if the logical display has unique content. private boolean mHasContent; private int mRequestedModeId; private int mRequestedColorTransformId; // The display offsets to apply to the display projection. private int mDisplayOffsetX; private int mDisplayOffsetY; // Temporary rectangle used when needed. private final Rect mTempLayerStackRect = new Rect(); private final Rect mTempDisplayRect = new Rect(); public LogicalDisplay(int displayId, int layerStack, DisplayDevice primaryDisplayDevice) { mDisplayId = displayId; mLayerStack = layerStack; mPrimaryDisplayDevice = primaryDisplayDevice; } /** * Gets the logical display id of this logical display. * * @return The logical display id. */ public int getDisplayIdLocked() { return mDisplayId; } /** * Gets the primary display device associated with this logical display. * * @return The primary display device. */ public DisplayDevice getPrimaryDisplayDeviceLocked() { return mPrimaryDisplayDevice; } /** * Gets information about the logical display. * * @return The device info, which should be treated as immutable by the caller. * The logical display should allocate a new display info object whenever * the data changes. */ public DisplayInfo getDisplayInfoLocked() { if (mInfo == null) { mInfo = new DisplayInfo(); mInfo.copyFrom(mBaseDisplayInfo); if (mOverrideDisplayInfo != null) { mInfo.appWidth = mOverrideDisplayInfo.appWidth; mInfo.appHeight = mOverrideDisplayInfo.appHeight; mInfo.smallestNominalAppWidth = mOverrideDisplayInfo.smallestNominalAppWidth; mInfo.smallestNominalAppHeight = mOverrideDisplayInfo.smallestNominalAppHeight; mInfo.largestNominalAppWidth = mOverrideDisplayInfo.largestNominalAppWidth; mInfo.largestNominalAppHeight = mOverrideDisplayInfo.largestNominalAppHeight; mInfo.logicalWidth = mOverrideDisplayInfo.logicalWidth; mInfo.logicalHeight = mOverrideDisplayInfo.logicalHeight; mInfo.overscanLeft = mOverrideDisplayInfo.overscanLeft; mInfo.overscanTop = mOverrideDisplayInfo.overscanTop; mInfo.overscanRight = mOverrideDisplayInfo.overscanRight; mInfo.overscanBottom = mOverrideDisplayInfo.overscanBottom; mInfo.rotation = mOverrideDisplayInfo.rotation; mInfo.logicalDensityDpi = mOverrideDisplayInfo.logicalDensityDpi; mInfo.physicalXDpi = mOverrideDisplayInfo.physicalXDpi; mInfo.physicalYDpi = mOverrideDisplayInfo.physicalYDpi; } } return mInfo; } /** * Sets overridden logical display information from the window manager. * This method can be used to adjust application insets, rotation, and other * properties that the window manager takes care of. * * @param info The logical display information, may be null. */ public boolean setDisplayInfoOverrideFromWindowManagerLocked(DisplayInfo info) { if (info != null) { if (mOverrideDisplayInfo == null) { mOverrideDisplayInfo = new DisplayInfo(info); mInfo = null; return true; } if (!mOverrideDisplayInfo.equals(info)) { mOverrideDisplayInfo.copyFrom(info); mInfo = null; return true; } } else if (mOverrideDisplayInfo != null) { mOverrideDisplayInfo = null; mInfo = null; return true; } return false; } /** * Returns true if the logical display is in a valid state. * This method should be checked after calling {@link #updateLocked} to handle the * case where a logical display should be removed because all of its associated * display devices are gone or if it is otherwise no longer needed. * * @return True if the logical display is still valid. */ public boolean isValidLocked() { return mPrimaryDisplayDevice != null; } /** * Updates the state of the logical display based on the available display devices. * The logical display might become invalid if it is attached to a display device * that no longer exists. * * @param devices The list of all connected display devices. */ public void updateLocked(List<DisplayDevice> devices) { // Nothing to update if already invalid. if (mPrimaryDisplayDevice == null) { return; } // Check whether logical display has become invalid. if (!devices.contains(mPrimaryDisplayDevice)) { mPrimaryDisplayDevice = null; return; } // Bootstrap the logical display using its associated primary physical display. // We might use more elaborate configurations later. It's possible that the // configuration of several physical displays might be used to determine the // logical display that they are sharing. (eg. Adjust size for pixel-perfect // mirroring over HDMI.) DisplayDeviceInfo deviceInfo = mPrimaryDisplayDevice.getDisplayDeviceInfoLocked(); if (!Objects.equal(mPrimaryDisplayDeviceInfo, deviceInfo)) { mBaseDisplayInfo.layerStack = mLayerStack; mBaseDisplayInfo.flags = 0; if ((deviceInfo.flags & DisplayDeviceInfo.FLAG_SUPPORTS_PROTECTED_BUFFERS) != 0) { mBaseDisplayInfo.flags |= Display.FLAG_SUPPORTS_PROTECTED_BUFFERS; } if ((deviceInfo.flags & DisplayDeviceInfo.FLAG_SECURE) != 0) { mBaseDisplayInfo.flags |= Display.FLAG_SECURE; } if ((deviceInfo.flags & DisplayDeviceInfo.FLAG_PRIVATE) != 0) { mBaseDisplayInfo.flags |= Display.FLAG_PRIVATE; } if ((deviceInfo.flags & DisplayDeviceInfo.FLAG_PRESENTATION) != 0) { mBaseDisplayInfo.flags |= Display.FLAG_PRESENTATION; } if ((deviceInfo.flags & DisplayDeviceInfo.FLAG_ROUND) != 0) { mBaseDisplayInfo.flags |= Display.FLAG_ROUND; } mBaseDisplayInfo.type = deviceInfo.type; mBaseDisplayInfo.address = deviceInfo.address; mBaseDisplayInfo.name = deviceInfo.name; mBaseDisplayInfo.uniqueId = deviceInfo.uniqueId; mBaseDisplayInfo.appWidth = deviceInfo.width; mBaseDisplayInfo.appHeight = deviceInfo.height; mBaseDisplayInfo.logicalWidth = deviceInfo.width; mBaseDisplayInfo.logicalHeight = deviceInfo.height; mBaseDisplayInfo.rotation = Surface.ROTATION_0; mBaseDisplayInfo.modeId = deviceInfo.modeId; mBaseDisplayInfo.defaultModeId = deviceInfo.defaultModeId; mBaseDisplayInfo.supportedModes = Arrays.copyOf( deviceInfo.supportedModes, deviceInfo.supportedModes.length); mBaseDisplayInfo.colorTransformId = deviceInfo.colorTransformId; mBaseDisplayInfo.defaultColorTransformId = deviceInfo.defaultColorTransformId; mBaseDisplayInfo.supportedColorTransforms = Arrays.copyOf( deviceInfo.supportedColorTransforms, deviceInfo.supportedColorTransforms.length); mBaseDisplayInfo.logicalDensityDpi = deviceInfo.densityDpi; mBaseDisplayInfo.physicalXDpi = deviceInfo.xDpi; mBaseDisplayInfo.physicalYDpi = deviceInfo.yDpi; mBaseDisplayInfo.appVsyncOffsetNanos = deviceInfo.appVsyncOffsetNanos; mBaseDisplayInfo.presentationDeadlineNanos = deviceInfo.presentationDeadlineNanos; mBaseDisplayInfo.state = deviceInfo.state; mBaseDisplayInfo.smallestNominalAppWidth = deviceInfo.width; mBaseDisplayInfo.smallestNominalAppHeight = deviceInfo.height; mBaseDisplayInfo.largestNominalAppWidth = deviceInfo.width; mBaseDisplayInfo.largestNominalAppHeight = deviceInfo.height; mBaseDisplayInfo.ownerUid = deviceInfo.ownerUid; mBaseDisplayInfo.ownerPackageName = deviceInfo.ownerPackageName; mPrimaryDisplayDeviceInfo = deviceInfo; mInfo = null; } } /** * Applies the layer stack and transformation to the given display device * so that it shows the contents of this logical display. * * We know that the given display device is only ever showing the contents of * a single logical display, so this method is expected to blow away all of its * transformation properties to make it happen regardless of what the * display device was previously showing. * * The caller must have an open Surface transaction. * * The display device may not be the primary display device, in the case * where the display is being mirrored. * * @param device The display device to modify. * @param isBlanked True if the device is being blanked. */ public void configureDisplayInTransactionLocked(DisplayDevice device, boolean isBlanked) { // Set the layer stack. device.setLayerStackInTransactionLocked(isBlanked ? BLANK_LAYER_STACK : mLayerStack); // Set the color transform and mode. if (device == mPrimaryDisplayDevice) { device.requestColorTransformAndModeInTransactionLocked( mRequestedColorTransformId, mRequestedModeId); } else { device.requestColorTransformAndModeInTransactionLocked(0, 0); // Revert to default. } // Only grab the display info now as it may have been changed based on the requests above. final DisplayInfo displayInfo = getDisplayInfoLocked(); final DisplayDeviceInfo displayDeviceInfo = device.getDisplayDeviceInfoLocked(); // Set the viewport. // This is the area of the logical display that we intend to show on the // display device. For now, it is always the full size of the logical display. mTempLayerStackRect.set(0, 0, displayInfo.logicalWidth, displayInfo.logicalHeight); // Set the orientation. // The orientation specifies how the physical coordinate system of the display // is rotated when the contents of the logical display are rendered. int orientation = Surface.ROTATION_0; if ((displayDeviceInfo.flags & DisplayDeviceInfo.FLAG_ROTATES_WITH_CONTENT) != 0) { orientation = displayInfo.rotation; } // Apply the physical rotation of the display device itself. orientation = (orientation + displayDeviceInfo.rotation) % 4; // Set the frame. // The frame specifies the rotated physical coordinates into which the viewport // is mapped. We need to take care to preserve the aspect ratio of the viewport. // Currently we maximize the area to fill the display, but we could try to be // more clever and match resolutions. boolean rotated = (orientation == Surface.ROTATION_90 || orientation == Surface.ROTATION_270); int physWidth = rotated ? displayDeviceInfo.height : displayDeviceInfo.width; int physHeight = rotated ? displayDeviceInfo.width : displayDeviceInfo.height; // Determine whether the width or height is more constrained to be scaled. // physWidth / displayInfo.logicalWidth => letter box // or physHeight / displayInfo.logicalHeight => pillar box // // We avoid a division (and possible floating point imprecision) here by // multiplying the fractions by the product of their denominators before // comparing them. int displayRectWidth, displayRectHeight; if ((displayInfo.flags & Display.FLAG_SCALING_DISABLED) != 0) { displayRectWidth = displayInfo.logicalWidth; displayRectHeight = displayInfo.logicalHeight; } else if (physWidth * displayInfo.logicalHeight < physHeight * displayInfo.logicalWidth) { // Letter box. displayRectWidth = physWidth; displayRectHeight = displayInfo.logicalHeight * physWidth / displayInfo.logicalWidth; } else { // Pillar box. displayRectWidth = displayInfo.logicalWidth * physHeight / displayInfo.logicalHeight; displayRectHeight = physHeight; } int displayRectTop = (physHeight - displayRectHeight) / 2; int displayRectLeft = (physWidth - displayRectWidth) / 2; mTempDisplayRect.set(displayRectLeft, displayRectTop, displayRectLeft + displayRectWidth, displayRectTop + displayRectHeight); mTempDisplayRect.left += mDisplayOffsetX; mTempDisplayRect.right += mDisplayOffsetX; mTempDisplayRect.top += mDisplayOffsetY; mTempDisplayRect.bottom += mDisplayOffsetY; device.setProjectionInTransactionLocked(orientation, mTempLayerStackRect, mTempDisplayRect); } /** * Returns true if the logical display has unique content. * <p> * If the display has unique content then we will try to ensure that it is * visible on at least its primary display device. Otherwise we will ignore the * logical display and perhaps show mirrored content on the primary display device. * </p> * * @return True if the display has unique content. */ public boolean hasContentLocked() { return mHasContent; } /** * Sets whether the logical display has unique content. * * @param hasContent True if the display has unique content. */ public void setHasContentLocked(boolean hasContent) { mHasContent = hasContent; } /** * Requests the given mode. */ public void setRequestedModeIdLocked(int modeId) { mRequestedModeId = modeId; } /** * Returns the pending requested mode. */ public int getRequestedModeIdLocked() { return mRequestedModeId; } /** * Requests the given color transform. */ public void setRequestedColorTransformIdLocked(int colorTransformId) { mRequestedColorTransformId = colorTransformId; } /** Returns the pending requested color transform. */ public int getRequestedColorTransformIdLocked() { return mRequestedColorTransformId; } /** * Gets the burn-in offset in X. */ public int getDisplayOffsetXLocked() { return mDisplayOffsetX; } /** * Gets the burn-in offset in Y. */ public int getDisplayOffsetYLocked() { return mDisplayOffsetY; } /** * Sets the burn-in offsets. */ public void setDisplayOffsetsLocked(int x, int y) { mDisplayOffsetX = x; mDisplayOffsetY = y; } public void dumpLocked(PrintWriter pw) { pw.println("mDisplayId=" + mDisplayId); pw.println("mLayerStack=" + mLayerStack); pw.println("mHasContent=" + mHasContent); pw.println("mRequestedMode=" + mRequestedModeId); pw.println("mRequestedColorTransformId=" + mRequestedColorTransformId); pw.println("mDisplayOffset=(" + mDisplayOffsetX + ", " + mDisplayOffsetY + ")"); pw.println("mPrimaryDisplayDevice=" + (mPrimaryDisplayDevice != null ? mPrimaryDisplayDevice.getNameLocked() : "null")); pw.println("mBaseDisplayInfo=" + mBaseDisplayInfo); pw.println("mOverrideDisplayInfo=" + mOverrideDisplayInfo); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.adapter.druid; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexLiteral; import org.apache.calcite.rex.RexNode; import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.type.SqlTypeFamily; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.calcite.util.Pair; import org.apache.calcite.util.TimestampString; import com.fasterxml.jackson.core.JsonGenerator; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.List; import java.util.Locale; import java.util.TimeZone; import javax.annotation.Nullable; /** * Filter element of a Druid "groupBy" or "topN" query. */ abstract class DruidJsonFilter implements DruidJson { /** * @param rexNode rexNode to translate to Druid Json Filter * @param rowType rowType associated to rexNode * @param druidQuery druid query * * @return Druid Json filter or null if it can not translate */ @Nullable private static DruidJsonFilter toEqualityKindDruidFilter(RexNode rexNode, RelDataType rowType, DruidQuery druidQuery) { if (rexNode.getKind() != SqlKind.EQUALS && rexNode.getKind() != SqlKind.NOT_EQUALS) { throw new AssertionError( DruidQuery.format("Expecting EQUALS or NOT_EQUALS but got [%s]", rexNode.getKind())); } final RexCall rexCall = (RexCall) rexNode; if (rexCall.getOperands().size() < 2) { return null; } final RexLiteral rexLiteral; final RexNode refNode; final RexNode lhs = rexCall.getOperands().get(0); final RexNode rhs = rexCall.getOperands().get(1); if (lhs.getKind() == SqlKind.LITERAL && rhs.getKind() != SqlKind.LITERAL) { rexLiteral = (RexLiteral) lhs; refNode = rhs; } else if (rhs.getKind() == SqlKind.LITERAL && lhs.getKind() != SqlKind.LITERAL) { rexLiteral = (RexLiteral) rhs; refNode = lhs; } else { // must have at least one literal return null; } if (RexLiteral.isNullLiteral(rexLiteral)) { // we are not handling is NULL filter here thus we bail out if Literal is null return null; } final String literalValue = toDruidLiteral(rexLiteral, rowType, druidQuery); if (literalValue == null) { // can not translate literal better bail out return null; } final boolean isNumeric = refNode.getType().getFamily() == SqlTypeFamily.NUMERIC || rexLiteral.getType().getFamily() == SqlTypeFamily.NUMERIC; final Pair<String, ExtractionFunction> druidColumn = DruidQuery.toDruidColumn(refNode, rowType, druidQuery); final String columnName = druidColumn.left; final ExtractionFunction extractionFunction = druidColumn.right; if (columnName == null) { // no column name better bail out. return null; } final DruidJsonFilter partialFilter; if (isNumeric) { //need bound filter since it one of operands is numeric partialFilter = new JsonBound(columnName, literalValue, false, literalValue, false, true, extractionFunction); } else { partialFilter = new JsonSelector(columnName, literalValue, extractionFunction); } if (rexNode.getKind() == SqlKind.EQUALS) { return partialFilter; } return toNotDruidFilter(partialFilter); } /** * @param rexNode rexNode to translate * @param rowType row type associated to Filter * @param druidQuery druid query * * @return valid Druid Json Bound Filter or null if it can not translate the rexNode. */ @Nullable private static DruidJsonFilter toBoundDruidFilter(RexNode rexNode, RelDataType rowType, DruidQuery druidQuery) { final RexCall rexCall = (RexCall) rexNode; final RexLiteral rexLiteral; if (rexCall.getOperands().size() < 2) { return null; } final RexNode refNode; final RexNode lhs = rexCall.getOperands().get(0); final RexNode rhs = rexCall.getOperands().get(1); final boolean lhsIsRef; if (lhs.getKind() == SqlKind.LITERAL && rhs.getKind() != SqlKind.LITERAL) { rexLiteral = (RexLiteral) lhs; refNode = rhs; lhsIsRef = false; } else if (rhs.getKind() == SqlKind.LITERAL && lhs.getKind() != SqlKind.LITERAL) { rexLiteral = (RexLiteral) rhs; refNode = lhs; lhsIsRef = true; } else { // must have at least one literal return null; } if (RexLiteral.isNullLiteral(rexLiteral)) { // we are not handling is NULL filter here thus we bail out if Literal is null return null; } final String literalValue = DruidJsonFilter.toDruidLiteral(rexLiteral, rowType, druidQuery); if (literalValue == null) { // can not translate literal better bail out return null; } final boolean isNumeric = refNode.getType().getFamily() == SqlTypeFamily.NUMERIC || rexLiteral.getType().getFamily() == SqlTypeFamily.NUMERIC; final Pair<String, ExtractionFunction> druidColumn = DruidQuery.toDruidColumn(refNode, rowType, druidQuery); final String columnName = druidColumn.left; final ExtractionFunction extractionFunction = druidColumn.right; if (columnName == null) { // no column name better bail out. return null; } switch (rexCall.getKind()) { case LESS_THAN_OR_EQUAL: case LESS_THAN: if (lhsIsRef) { return new JsonBound(columnName, null, false, literalValue, rexCall.getKind() == SqlKind.LESS_THAN, isNumeric, extractionFunction); } else { return new JsonBound(columnName, literalValue, rexCall.getKind() == SqlKind.LESS_THAN, null, false, isNumeric, extractionFunction); } case GREATER_THAN_OR_EQUAL: case GREATER_THAN: if (!lhsIsRef) { return new JsonBound(columnName, null, false, literalValue, rexCall.getKind() == SqlKind.GREATER_THAN, isNumeric, extractionFunction); } else { return new JsonBound(columnName, literalValue, rexCall.getKind() == SqlKind.GREATER_THAN, null, false, isNumeric, extractionFunction); } default: return null; } } /** * @param rexNode rexNode to translate to Druid literal equivalante * @param rowType rowType associated to rexNode * @param druidQuery druid Query * * @return non null string or null if it can not translate to valid Druid equivalent */ @Nullable private static String toDruidLiteral(RexNode rexNode, RelDataType rowType, DruidQuery druidQuery) { final SimpleDateFormat dateFormatter = new SimpleDateFormat( TimeExtractionFunction.ISO_TIME_FORMAT, Locale.ROOT); final String timeZone = druidQuery.getConnectionConfig().timeZone(); if (timeZone != null) { dateFormatter.setTimeZone(TimeZone.getTimeZone(timeZone)); } final String val; final RexLiteral rhsLiteral = (RexLiteral) rexNode; if (SqlTypeName.NUMERIC_TYPES.contains(rhsLiteral.getTypeName())) { val = String.valueOf(RexLiteral.value(rhsLiteral)); } else if (SqlTypeName.CHAR_TYPES.contains(rhsLiteral.getTypeName())) { val = String.valueOf(RexLiteral.stringValue(rhsLiteral)); } else if (SqlTypeName.TIMESTAMP == rhsLiteral.getTypeName() || SqlTypeName.DATE == rhsLiteral .getTypeName() || SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE == rhsLiteral.getTypeName()) { TimestampString timestampString = DruidDateTimeUtils .literalValue(rexNode, TimeZone.getTimeZone(timeZone)); if (timestampString == null) { throw new AssertionError( "Cannot translate Literal" + rexNode + " of type " + rhsLiteral.getTypeName() + " to TimestampString"); } //@TODO this is unnecessary we can send time as Long (eg millis since epoch) to druid val = dateFormatter.format(timestampString.getMillisSinceEpoch()); } else { // Don't know how to filter on this kind of literal. val = null; } return val; } @Nullable private static DruidJsonFilter toIsNullKindDruidFilter(RexNode rexNode, RelDataType rowType, DruidQuery druidQuery) { if (rexNode.getKind() != SqlKind.IS_NULL && rexNode.getKind() != SqlKind.IS_NOT_NULL) { throw new AssertionError( DruidQuery.format("Expecting IS_NULL or IS_NOT_NULL but got [%s]", rexNode.getKind())); } final RexCall rexCall = (RexCall) rexNode; final RexNode refNode = rexCall.getOperands().get(0); Pair<String, ExtractionFunction> druidColumn = DruidQuery .toDruidColumn(refNode, rowType, druidQuery); final String columnName = druidColumn.left; final ExtractionFunction extractionFunction = druidColumn.right; if (columnName == null) { return null; } if (rexNode.getKind() == SqlKind.IS_NOT_NULL) { return toNotDruidFilter(new JsonSelector(columnName, null, extractionFunction)); } return new JsonSelector(columnName, null, extractionFunction); } @Nullable private static DruidJsonFilter toInKindDruidFilter(RexNode e, RelDataType rowType, DruidQuery druidQuery) { if (e.getKind() != SqlKind.IN && e.getKind() != SqlKind.NOT_IN) { throw new AssertionError( DruidQuery.format("Expecting IN or NOT IN but got [%s]", e.getKind())); } ImmutableList.Builder<String> listBuilder = ImmutableList.builder(); for (RexNode rexNode : ((RexCall) e).getOperands()) { if (rexNode.getKind() == SqlKind.LITERAL) { String value = toDruidLiteral(rexNode, rowType, druidQuery); if (value == null) { return null; } listBuilder.add(value); } } Pair<String, ExtractionFunction> druidColumn = DruidQuery .toDruidColumn(((RexCall) e).getOperands().get(0), rowType, druidQuery); final String columnName = druidColumn.left; final ExtractionFunction extractionFunction = druidColumn.right; if (columnName == null) { return null; } if (e.getKind() != SqlKind.NOT_IN) { return new DruidJsonFilter.JsonInFilter(columnName, listBuilder.build(), extractionFunction); } else { return toNotDruidFilter( new DruidJsonFilter.JsonInFilter(columnName, listBuilder.build(), extractionFunction)); } } @Nullable protected static DruidJsonFilter toNotDruidFilter(DruidJsonFilter druidJsonFilter) { if (druidJsonFilter == null) { return null; } return new JsonCompositeFilter(Type.NOT, druidJsonFilter); } @Nullable private static DruidJsonFilter toBetweenDruidFilter(RexNode rexNode, RelDataType rowType, DruidQuery query) { if (rexNode.getKind() != SqlKind.BETWEEN) { return null; } final RexCall rexCall = (RexCall) rexNode; if (rexCall.getOperands().size() < 4) { return null; } // BETWEEN (ASYMMETRIC, REF, 'lower-bound', 'upper-bound') final RexNode refNode = rexCall.getOperands().get(1); final RexNode lhs = rexCall.getOperands().get(2); final RexNode rhs = rexCall.getOperands().get(3); final String lhsLiteralValue = toDruidLiteral(lhs, rowType, query); final String rhsLiteralValue = toDruidLiteral(rhs, rowType, query); if (lhsLiteralValue == null || rhsLiteralValue == null) { return null; } final boolean isNumeric = lhs.getType().getFamily() == SqlTypeFamily.NUMERIC || lhs.getType().getFamily() == SqlTypeFamily.NUMERIC; final Pair<String, ExtractionFunction> druidColumn = DruidQuery .toDruidColumn(refNode, rowType, query); final String columnName = druidColumn.left; final ExtractionFunction extractionFunction = druidColumn.right; if (columnName == null) { return null; } return new JsonBound(columnName, lhsLiteralValue, false, rhsLiteralValue, false, isNumeric, extractionFunction); } @Nullable private static DruidJsonFilter toSimpleDruidFilter(RexNode e, RelDataType rowType, DruidQuery druidQuery) { switch (e.getKind()) { case EQUALS: case NOT_EQUALS: return toEqualityKindDruidFilter(e, rowType, druidQuery); case GREATER_THAN: case GREATER_THAN_OR_EQUAL: case LESS_THAN: case LESS_THAN_OR_EQUAL: return toBoundDruidFilter(e, rowType, druidQuery); case BETWEEN: return toBetweenDruidFilter(e, rowType, druidQuery); case IN: case NOT_IN: return toInKindDruidFilter(e, rowType, druidQuery); case IS_NULL: case IS_NOT_NULL: return toIsNullKindDruidFilter(e, rowType, druidQuery); default: return null; } } /** * @param rexNode rexNode to translate to Druid Filter * @param rowType rowType of filter input * @param druidQuery Druid query * * @return Druid Json Filters or null when can not translate to valid Druid Filters. */ @Nullable static DruidJsonFilter toDruidFilters(final RexNode rexNode, RelDataType rowType, DruidQuery druidQuery) { if (rexNode.isAlwaysTrue()) { return JsonExpressionFilter.alwaysTrue(); } if (rexNode.isAlwaysFalse()) { return JsonExpressionFilter.alwaysFalse(); } switch (rexNode.getKind()) { case IS_TRUE: case IS_NOT_FALSE: return toDruidFilters(Iterables.getOnlyElement(((RexCall) rexNode).getOperands()), rowType, druidQuery); case IS_NOT_TRUE: case IS_FALSE: final DruidJsonFilter simpleFilter = toDruidFilters(Iterables .getOnlyElement(((RexCall) rexNode).getOperands()), rowType, druidQuery); return simpleFilter != null ? new JsonCompositeFilter(Type.NOT, simpleFilter) : simpleFilter; case AND: case OR: case NOT: final RexCall call = (RexCall) rexNode; final List<DruidJsonFilter> jsonFilters = Lists.newArrayList(); for (final RexNode e : call.getOperands()) { final DruidJsonFilter druidFilter = toDruidFilters(e, rowType, druidQuery); if (druidFilter == null) { return null; } jsonFilters.add(druidFilter); } return new JsonCompositeFilter(Type.valueOf(rexNode.getKind().name()), jsonFilters); } final DruidJsonFilter simpleLeafFilter = toSimpleDruidFilter(rexNode, rowType, druidQuery); return simpleLeafFilter == null ? toDruidExpressionFilter(rexNode, rowType, druidQuery) : simpleLeafFilter; } @Nullable private static DruidJsonFilter toDruidExpressionFilter(RexNode rexNode, RelDataType rowType, DruidQuery query) { final String expression = DruidExpressions.toDruidExpression(rexNode, rowType, query); return expression == null ? null : new JsonExpressionFilter(expression); } /** * Supported filter types */ protected enum Type { AND, OR, NOT, SELECTOR, IN, BOUND, EXPRESSION; public String lowercase() { return name().toLowerCase(Locale.ROOT); } } protected final Type type; private DruidJsonFilter(Type type) { this.type = type; } /** * Druid Expression filter. */ public static class JsonExpressionFilter extends DruidJsonFilter { private final String expression; JsonExpressionFilter(String expression) { super(Type.EXPRESSION); this.expression = Preconditions.checkNotNull(expression); } @Override public void write(JsonGenerator generator) throws IOException { generator.writeStartObject(); generator.writeStringField("type", type.lowercase()); generator.writeStringField("expression", expression); generator.writeEndObject(); } /** * We need to push to Druid an expression that always evaluates to true. */ private static JsonExpressionFilter alwaysTrue() { return new JsonExpressionFilter("1 == 1"); } /** * We need to push to Druid an expression that always evaluates to false. */ private static JsonExpressionFilter alwaysFalse() { return new JsonExpressionFilter("1 == 2"); } } /** * Equality filter. */ private static class JsonSelector extends DruidJsonFilter { private final String dimension; private final String value; private final ExtractionFunction extractionFunction; private JsonSelector(String dimension, String value, ExtractionFunction extractionFunction) { super(Type.SELECTOR); this.dimension = dimension; this.value = value; this.extractionFunction = extractionFunction; } public void write(JsonGenerator generator) throws IOException { generator.writeStartObject(); generator.writeStringField("type", type.lowercase()); generator.writeStringField("dimension", dimension); generator.writeStringField("value", value); DruidQuery.writeFieldIf(generator, "extractionFn", extractionFunction); generator.writeEndObject(); } } /** * Bound filter. */ @VisibleForTesting protected static class JsonBound extends DruidJsonFilter { private final String dimension; private final String lower; private final boolean lowerStrict; private final String upper; private final boolean upperStrict; private final boolean alphaNumeric; private final ExtractionFunction extractionFunction; protected JsonBound(String dimension, String lower, boolean lowerStrict, String upper, boolean upperStrict, boolean alphaNumeric, ExtractionFunction extractionFunction) { super(Type.BOUND); this.dimension = dimension; this.lower = lower; this.lowerStrict = lowerStrict; this.upper = upper; this.upperStrict = upperStrict; this.alphaNumeric = alphaNumeric; this.extractionFunction = extractionFunction; } public void write(JsonGenerator generator) throws IOException { generator.writeStartObject(); generator.writeStringField("type", type.lowercase()); generator.writeStringField("dimension", dimension); if (lower != null) { generator.writeStringField("lower", lower); generator.writeBooleanField("lowerStrict", lowerStrict); } if (upper != null) { generator.writeStringField("upper", upper); generator.writeBooleanField("upperStrict", upperStrict); } if (alphaNumeric) { generator.writeStringField("ordering", "numeric"); } else { generator.writeStringField("ordering", "lexicographic"); } DruidQuery.writeFieldIf(generator, "extractionFn", extractionFunction); generator.writeEndObject(); } } /** * Filter that combines other filters using a boolean operator. */ private static class JsonCompositeFilter extends DruidJsonFilter { private final List<? extends DruidJsonFilter> fields; private JsonCompositeFilter(Type type, Iterable<? extends DruidJsonFilter> fields) { super(type); this.fields = ImmutableList.copyOf(fields); } private JsonCompositeFilter(Type type, DruidJsonFilter... fields) { this(type, ImmutableList.copyOf(fields)); } public void write(JsonGenerator generator) throws IOException { generator.writeStartObject(); generator.writeStringField("type", type.lowercase()); switch (type) { case NOT: DruidQuery.writeField(generator, "field", fields.get(0)); break; default: DruidQuery.writeField(generator, "fields", fields); } generator.writeEndObject(); } } /** * IN filter. */ protected static class JsonInFilter extends DruidJsonFilter { private final String dimension; private final List<String> values; private final ExtractionFunction extractionFunction; protected JsonInFilter(String dimension, List<String> values, ExtractionFunction extractionFunction) { super(Type.IN); this.dimension = dimension; this.values = values; this.extractionFunction = extractionFunction; } public void write(JsonGenerator generator) throws IOException { generator.writeStartObject(); generator.writeStringField("type", type.lowercase()); generator.writeStringField("dimension", dimension); DruidQuery.writeField(generator, "values", values); DruidQuery.writeFieldIf(generator, "extractionFn", extractionFunction); generator.writeEndObject(); } } public static DruidJsonFilter getSelectorFilter(String column, String value, ExtractionFunction extractionFunction) { Preconditions.checkNotNull(column); return new JsonSelector(column, value, extractionFunction); } /** * Druid Having Filter spec */ protected static class JsonDimHavingFilter implements DruidJson { private final DruidJsonFilter filter; public JsonDimHavingFilter(DruidJsonFilter filter) { this.filter = filter; } @Override public void write(JsonGenerator generator) throws IOException { generator.writeStartObject(); generator.writeStringField("type", "filter"); DruidQuery.writeField(generator, "filter", filter); generator.writeEndObject(); } } } // End DruidJsonFilter.java
package org.inaturalist.android; import java.util.ArrayList; import java.util.List; import android.annotation.SuppressLint; import android.content.Intent; import android.graphics.Color; import android.os.Bundle; import android.os.Handler; import androidx.fragment.app.Fragment; import androidx.viewpager.widget.ViewPager; import androidx.viewpager.widget.ViewPager.OnPageChangeListener; import androidx.appcompat.app.ActionBar; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.TabHost; import android.widget.TabHost.OnTabChangeListener; import android.widget.TabWidget; import android.widget.TextView; public class ProjectsActivity extends BaseFragmentActivity implements OnTabChangeListener, OnPageChangeListener { MyPageAdapter mPageAdapter; private ViewPager mViewPager; private TabHost mTabHost; private List<Fragment> mFragments; private INaturalistApp mApp; @Override protected void onCreate(Bundle savedInstanceState) { setTheme(R.style.NoActionBarShadowTheme); super.onCreate(savedInstanceState); mApp = (INaturalistApp) getApplicationContext(); mApp.applyLocaleSettings(getBaseContext()); setContentView(R.layout.projects); onDrawerCreate(savedInstanceState); getSupportActionBar().setElevation(0); mViewPager = (ViewPager) findViewById(R.id.viewpager); mViewPager.setOffscreenPageLimit(3); // Tab Initialization initialiseTabHost(); // Fragments and ViewPager Initialization if (savedInstanceState == null) { mFragments = getFragments(); } else { mFragments = new ArrayList<Fragment>(); mFragments.add(getSupportFragmentManager().getFragment(savedInstanceState, "joined_projects")); mFragments.add(getSupportFragmentManager().getFragment(savedInstanceState, "nearby_projects")); mFragments.add(getSupportFragmentManager().getFragment(savedInstanceState, "featured_projects")); } mPageAdapter = new MyPageAdapter(getSupportFragmentManager(), mFragments); mViewPager.setAdapter(mPageAdapter); mViewPager.setOnPageChangeListener(this); new Handler().postDelayed(new Runnable() { @Override public void run() { ((BaseTab) mPageAdapter.getItem(mViewPager.getCurrentItem())).loadProjects(); } }, 1000); } // Method to add a TabHost private static void AddTab(ProjectsActivity activity, TabHost tabHost, TabHost.TabSpec tabSpec) { tabSpec.setContent(new MyTabFactory(activity)); tabHost.addTab(tabSpec); } private View createTabContent(String titleRes, String fallbackRes) { View view = LayoutInflater.from(this).inflate(R.layout.tab, null); TextView tabTitle = (TextView) view.findViewById(R.id.tab_title); tabTitle.setText(mApp.getStringResourceByName(titleRes, fallbackRes)); return view; } // Manages the Tab changes, synchronizing it with Pages public void onTabChanged(String tag) { int pos = this.mTabHost.getCurrentTab(); this.mViewPager.setCurrentItem(pos); refreshTabs(pos); } private void refreshTabs(int pos) { TabWidget tabWidget = mTabHost.getTabWidget(); for (int i = 0; i < 3; i++) { tabWidget.getChildAt(i).findViewById(R.id.bottom_line).setVisibility(View.GONE); ((TextView) tabWidget.getChildAt(i).findViewById(R.id.tab_title)).setTextColor(Color.parseColor("#84000000")); } tabWidget.getChildAt(pos).findViewById(R.id.bottom_line).setVisibility(View.VISIBLE); ((TextView)tabWidget.getChildAt(pos).findViewById(R.id.tab_title)).setTextColor(Color.parseColor("#000000")); if (mPageAdapter != null) { ((BaseTab) mPageAdapter.getItem(pos)).loadProjects(); } } @Override public void onPageScrollStateChanged(int arg0) { } // Manages the Page changes, synchronizing it with Tabs @Override public void onPageScrolled(int arg0, float arg1, int arg2) { int pos = this.mViewPager.getCurrentItem(); this.mTabHost.setCurrentTab(pos); } @Override public void onPageSelected(int arg0) { } private List<Fragment> getFragments(){ List<Fragment> fList = new ArrayList<Fragment>(); JoinedProjectsTab f1 = new JoinedProjectsTab(); NearByProjectsTab f2 = new NearByProjectsTab(); FeaturedProjectsTab f3 = new FeaturedProjectsTab(); fList.add(f1); fList.add(f2); fList.add(f3); return fList; } // Tabs Creation @SuppressLint("NewApi") private void initialiseTabHost() { mTabHost = (TabHost) findViewById(android.R.id.tabhost); mTabHost.setup(); ProjectsActivity.AddTab(this, this.mTabHost, this.mTabHost.newTabSpec("joined_projects").setIndicator(createTabContent("joined_projects_all_caps", "joined_projects"))); ProjectsActivity.AddTab(this, this.mTabHost, this.mTabHost.newTabSpec("nearby_projects").setIndicator(createTabContent("nearby_projects_all_caps", "nearby_projects"))); ProjectsActivity.AddTab(this, this.mTabHost, this.mTabHost.newTabSpec("featured_projects").setIndicator(createTabContent("featured_projects_all_caps", "featured_projects"))); mTabHost.getTabWidget().setDividerDrawable(null); mTabHost.setOnTabChangedListener(this); refreshTabs(0); } @Override protected void onSaveInstanceState(Bundle outState) { getSupportFragmentManager().putFragment(outState, "joined_projects", mFragments.get(0)); getSupportFragmentManager().putFragment(outState, "nearby_projects", mFragments.get(1)); getSupportFragmentManager().putFragment(outState, "featured_projects", mFragments.get(2)); super.onSaveInstanceState(outState); } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.search: Intent intent = new Intent(this, ItemSearchActivity.class); intent.putExtra(ItemSearchActivity.RESULT_VIEWER_ACTIVITY, ProjectDetails.class); intent.putExtra(ItemSearchActivity.RESULT_VIEWER_ACTIVITY_PARAM_NAME, "project"); intent.putExtra(ItemSearchActivity.SEARCH_HINT_TEXT, BaseProjectsTab.getSearchFilterTextHint(this)); intent.putExtra(ItemSearchActivity.SEARCH_URL, BaseProjectsTab.getSearchUrl((INaturalistApp) getApplicationContext())); startActivity(intent); return true; } return super.onOptionsItemSelected(item); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.search_menu, menu); return true; } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (resultCode == ProjectDetails.RESULT_REFRESH_RESULTS) { // Refresh all projects result for (int i = 0; i < mFragments.size(); i++) { BaseTab tab = (BaseTab) mFragments.get(i); tab.refresh(); } } } @Override public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) { ((INaturalistApp) getApplicationContext()).onRequestPermissionsResult(requestCode, permissions, grantResults); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.testing; import com.google.common.collect.ImmutableMap; import io.airlift.units.Duration; import io.prestosql.Session; import io.prestosql.client.ClientSelectedRole; import io.prestosql.client.ClientSession; import io.prestosql.client.Column; import io.prestosql.client.QueryError; import io.prestosql.client.QueryStatusInfo; import io.prestosql.client.StatementClient; import io.prestosql.connector.CatalogName; import io.prestosql.metadata.MetadataUtil; import io.prestosql.metadata.QualifiedObjectName; import io.prestosql.metadata.QualifiedTablePrefix; import io.prestosql.server.testing.TestingPrestoServer; import io.prestosql.spi.QueryId; import io.prestosql.spi.session.ResourceEstimates; import io.prestosql.spi.type.Type; import okhttp3.OkHttpClient; import org.intellij.lang.annotations.Language; import java.io.Closeable; import java.net.URI; import java.time.ZoneId; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Optional; import java.util.concurrent.TimeUnit; import static com.google.common.base.Preconditions.checkState; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.ImmutableMap.toImmutableMap; import static io.prestosql.client.StatementClientFactory.newStatementClient; import static io.prestosql.spi.session.ResourceEstimates.CPU_TIME; import static io.prestosql.spi.session.ResourceEstimates.EXECUTION_TIME; import static io.prestosql.spi.session.ResourceEstimates.PEAK_MEMORY; import static io.prestosql.transaction.TransactionBuilder.transaction; import static java.util.Objects.requireNonNull; public abstract class AbstractTestingPrestoClient<T> implements Closeable { private final TestingPrestoServer prestoServer; private final Session defaultSession; private final OkHttpClient httpClient = new OkHttpClient(); protected AbstractTestingPrestoClient(TestingPrestoServer prestoServer, Session defaultSession) { this.prestoServer = requireNonNull(prestoServer, "prestoServer is null"); this.defaultSession = requireNonNull(defaultSession, "defaultSession is null"); } @Override public void close() { httpClient.dispatcher().executorService().shutdown(); httpClient.connectionPool().evictAll(); } protected abstract ResultsSession<T> getResultSession(Session session); public ResultWithQueryId<T> execute(@Language("SQL") String sql) { return execute(defaultSession, sql); } public ResultWithQueryId<T> execute(Session session, @Language("SQL") String sql) { ResultsSession<T> resultsSession = getResultSession(session); ClientSession clientSession = toClientSession(session, prestoServer.getBaseUrl(), new Duration(2, TimeUnit.MINUTES)); try (StatementClient client = newStatementClient(httpClient, clientSession, sql)) { while (client.isRunning()) { resultsSession.addResults(client.currentStatusInfo(), client.currentData()); client.advance(); } checkState(client.isFinished()); QueryError error = client.finalStatusInfo().getError(); if (error == null) { QueryStatusInfo results = client.finalStatusInfo(); if (results.getUpdateType() != null) { resultsSession.setUpdateType(results.getUpdateType()); } if (results.getUpdateCount() != null) { resultsSession.setUpdateCount(results.getUpdateCount()); } resultsSession.setWarnings(results.getWarnings()); T result = resultsSession.build(client.getSetSessionProperties(), client.getResetSessionProperties()); return new ResultWithQueryId<>(new QueryId(results.getId()), result); } if (error.getFailureInfo() != null) { RuntimeException remoteException = error.getFailureInfo().toException(); throw new RuntimeException(Optional.ofNullable(remoteException.getMessage()).orElseGet(remoteException::toString), remoteException); } throw new RuntimeException("Query failed: " + error.getMessage()); // dump query info to console for debugging (NOTE: not pretty printed) // JsonCodec<QueryInfo> queryInfoJsonCodec = createCodecFactory().prettyPrint().jsonCodec(QueryInfo.class); // log.info("\n" + queryInfoJsonCodec.toJson(queryInfo)); } } private static ClientSession toClientSession(Session session, URI server, Duration clientRequestTimeout) { ImmutableMap.Builder<String, String> properties = ImmutableMap.builder(); properties.putAll(session.getSystemProperties()); for (Entry<CatalogName, Map<String, String>> catalogAndConnectorProperties : session.getConnectorProperties().entrySet()) { for (Entry<String, String> connectorProperties : catalogAndConnectorProperties.getValue().entrySet()) { properties.put(catalogAndConnectorProperties.getKey() + "." + connectorProperties.getKey(), connectorProperties.getValue()); } } for (Entry<String, Map<String, String>> connectorProperties : session.getUnprocessedCatalogProperties().entrySet()) { for (Entry<String, String> entry : connectorProperties.getValue().entrySet()) { properties.put(connectorProperties.getKey() + "." + entry.getKey(), entry.getValue()); } } ImmutableMap.Builder<String, String> resourceEstimates = ImmutableMap.builder(); ResourceEstimates estimates = session.getResourceEstimates(); estimates.getExecutionTime().ifPresent(e -> resourceEstimates.put(EXECUTION_TIME, e.toString())); estimates.getCpuTime().ifPresent(e -> resourceEstimates.put(CPU_TIME, e.toString())); estimates.getPeakMemoryBytes().ifPresent(e -> resourceEstimates.put(PEAK_MEMORY, e.toString())); return new ClientSession( server, session.getIdentity().getUser(), session.getSource().orElse(null), session.getTraceToken(), session.getClientTags(), session.getClientInfo().orElse(null), session.getCatalog().orElse(null), session.getSchema().orElse(null), session.getPath().toString(), ZoneId.of(session.getTimeZoneKey().getId()), session.getLocale(), resourceEstimates.build(), properties.build(), session.getPreparedStatements(), session.getIdentity().getRoles().entrySet().stream() .collect(toImmutableMap(Entry::getKey, entry -> new ClientSelectedRole( ClientSelectedRole.Type.valueOf(entry.getValue().getType().toString()), entry.getValue().getRole()))), session.getIdentity().getExtraCredentials(), session.getTransactionId().map(Object::toString).orElse(null), clientRequestTimeout, true); } public List<QualifiedObjectName> listTables(Session session, String catalog, String schema) { return transaction(prestoServer.getTransactionManager(), prestoServer.getAccessControl()) .readOnly() .execute(session, transactionSession -> { return prestoServer.getMetadata().listTables(transactionSession, new QualifiedTablePrefix(catalog, schema)); }); } public boolean tableExists(Session session, String table) { return transaction(prestoServer.getTransactionManager(), prestoServer.getAccessControl()) .readOnly() .execute(session, transactionSession -> { return MetadataUtil.tableExists(prestoServer.getMetadata(), transactionSession, table); }); } public Session getDefaultSession() { return defaultSession; } public TestingPrestoServer getServer() { return prestoServer; } protected List<Type> getTypes(List<Column> columns) { return columns.stream() .map(Column::getType) .map(prestoServer.getMetadata()::fromSqlType) .collect(toImmutableList()); } }