repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
orientechnologies/orientdb
graphdb/src/test/java/com/orientechnologies/orient/graph/GraphNonBlockingQueryRemoteTest.java
5112
package com.orientechnologies.orient.graph; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import com.orientechnologies.orient.core.Orient; import com.orientechnologies.orient.core.command.OCommandResultListener; import com.orientechnologies.orient.core.db.OrientDB; import com.orientechnologies.orient.core.db.OrientDBConfig; import com.orientechnologies.orient.core.metadata.schema.OType; import com.orientechnologies.orient.core.record.impl.ODocument; import com.orientechnologies.orient.core.sql.query.OSQLNonBlockingQuery; import com.orientechnologies.orient.server.OServer; import com.tinkerpop.blueprints.impls.orient.OrientGraph; import com.tinkerpop.blueprints.impls.orient.OrientGraphRemoteTest; import com.tinkerpop.blueprints.impls.orient.OrientVertex; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import javax.management.InstanceAlreadyExistsException; import javax.management.MBeanRegistrationException; import javax.management.MalformedObjectNameException; import javax.management.NotCompliantMBeanException; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.Test; /** Created by tglman on 01/07/16. */ public class GraphNonBlockingQueryRemoteTest { private OServer server; private String serverHome; private String oldOrientDBHome; @Before public void before() throws ClassNotFoundException, MalformedObjectNameException, InstanceAlreadyExistsException, NotCompliantMBeanException, MBeanRegistrationException, InvocationTargetException, NoSuchMethodException, InstantiationException, IOException, IllegalAccessException { final String buildDirectory = System.getProperty("buildDirectory", "."); serverHome = buildDirectory + "/" + GraphNonBlockingQueryRemoteTest.class.getSimpleName(); deleteDirectory(new File(serverHome)); final File file = new File(serverHome); Assert.assertTrue(file.mkdir()); oldOrientDBHome = System.getProperty("ORIENTDB_HOME"); System.setProperty("ORIENTDB_HOME", serverHome); server = new OServer(false); server.startup( OrientGraphRemoteTest.class.getResourceAsStream("/embedded-server-config-single-run.xml")); server.activate(); try (OrientDB orientDB = new OrientDB("remote:localhost:3064", "root", "root", OrientDBConfig.defaultConfig())) { orientDB.execute( "create database ? memory users(admin identified by 'admin' role admin)", GraphNonBlockingQueryRemoteTest.class.getSimpleName()); } } @After public void after() { server.shutdown(); if (oldOrientDBHome != null) System.setProperty("ORIENTDB_HOME", oldOrientDBHome); else System.clearProperty("ORIENTDB_HOME"); } @AfterClass public static void afterClass() { Orient.instance().shutdown(); Orient.instance().startup(); } @Test public void testNonBlockingClose() throws ExecutionException, InterruptedException { OrientGraph database = new OrientGraph( "remote:localhost:3064/" + GraphNonBlockingQueryRemoteTest.class.getSimpleName()); database.createVertexType("Prod").createProperty("something", OType.STRING); for (int i = 0; i < 21; i++) { OrientVertex vertex = database.addVertex("class:Prod"); vertex.setProperty("something", "value"); vertex.save(); } database.commit(); final CountDownLatch ended = new CountDownLatch(21); try { OSQLNonBlockingQuery<Object> test = new OSQLNonBlockingQuery<Object>( "select * from Prod ", new OCommandResultListener() { int resultCount = 0; @Override public boolean result(Object iRecord) { resultCount++; ODocument odoc = ((ODocument) iRecord); for (String name : odoc.fieldNames()) { // <----------- PROBLEM assertEquals("something", name); } ended.countDown(); return resultCount > 20 ? false : true; } @Override public void end() { ended.countDown(); } @Override public Object getResult() { return resultCount; } }); database.command(test).execute(); assertTrue(ended.await(10, TimeUnit.SECONDS)); } finally { database.shutdown(); } } private static void deleteDirectory(File f) throws IOException { if (f.isDirectory()) { final File[] files = f.listFiles(); if (files != null) { for (File c : files) deleteDirectory(c); } } if (f.exists() && !f.delete()) throw new FileNotFoundException("Failed to delete file: " + f); } }
apache-2.0
tientq/jhipster-microservices
authorization-service/src/main/java/net/tinyset/authorization/repository/AddressRepository.java
338
package net.tinyset.authorization.repository; import net.tinyset.authorization.domain.Address; import org.springframework.data.jpa.repository.*; import java.util.List; /** * Spring Data JPA repository for the Address entity. */ @SuppressWarnings("unused") public interface AddressRepository extends JpaRepository<Address,Long> { }
apache-2.0
airbnb/elephant-bird
core/src/main/java/com/twitter/elephantbird/mapreduce/output/LzoThriftBlockOutputFormat.java
1746
package com.twitter.elephantbird.mapreduce.output; import java.io.IOException; import com.twitter.elephantbird.mapreduce.io.ThriftBlockWriter; import com.twitter.elephantbird.mapreduce.io.ThriftWritable; import com.twitter.elephantbird.util.ThriftUtils; import com.twitter.elephantbird.util.TypeRef; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.RecordWriter; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.thrift.TBase; /** * Data is written as one base64 encoded serialized thrift per line. <br><br> * * Do not forget to set Thrift class using setClassConf(). */ public class LzoThriftBlockOutputFormat<M extends TBase<?, ?>> extends LzoOutputFormat<M, ThriftWritable<M>> { protected TypeRef<M> typeRef_; public LzoThriftBlockOutputFormat() {} public LzoThriftBlockOutputFormat(TypeRef<M> typeRef) { typeRef_ = typeRef; } /** * Sets an internal configuration in jobConf so that remote Tasks * instantiate appropriate object for this generic class based on thriftClass */ public static <M extends TBase<?, ?>> void setClassConf(Class<M> thriftClass, Configuration jobConf) { ThriftUtils.setClassConf(jobConf, LzoThriftBlockOutputFormat.class, thriftClass); } public RecordWriter<M, ThriftWritable<M>> getRecordWriter(TaskAttemptContext job) throws IOException, InterruptedException { if (typeRef_ == null) { typeRef_ = ThriftUtils.getTypeRef(job.getConfiguration(), LzoThriftBlockOutputFormat.class); } return new LzoBinaryBlockRecordWriter<M, ThriftWritable<M>>( new ThriftBlockWriter<M>(getOutputStream(job), typeRef_.getRawClass())); } }
apache-2.0
yehchilai/IQ
Easy/223 Rectangle Area.java
907
/* This question is from https://leetcode.com/problems/rectangle-area/ Find the total area covered by two rectilinear rectangles in a 2D plane. Each rectangle is defined by its bottom left corner and top right corner as shown in the figure. Rectangle Area Assume that the total area is never beyond the maximum possible value of int. Time Complexity: O(1) */ public class Solution { public int computeArea(int A, int B, int C, int D, int E, int F, int G, int H) { int leftBottomX = Math.max(A,E); int leftBottomY = Math.max(B,F); int rightTopX = Math.min(C,G); int rightTopY = Math.min(D,H); int unionArea = 0; if(leftBottomX < rightTopX && leftBottomY < rightTopY){ unionArea = (rightTopY - leftBottomY) * (rightTopX - leftBottomX); } return (C-A)*(D - B) + (G - E)*(H - F) - unionArea; } }
apache-2.0
STRiDGE/dozer
core/src/main/java/org/dozer/classmap/generator/BeanMappingGenerator.java
3318
/* * Copyright 2005-2017 Dozer Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.dozer.classmap.generator; import java.util.ArrayList; import java.util.List; import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; import org.dozer.classmap.ClassMap; import org.dozer.classmap.ClassMapBuilder; import org.dozer.classmap.Configuration; import org.dozer.util.CollectionUtils; /** * @author Dmitry Spikhalskiy */ public class BeanMappingGenerator implements ClassMapBuilder.ClassMappingGenerator { static final List<BeanFieldsDetector> pluggedFieldDetectors = new ArrayList<BeanFieldsDetector>(); static final List<BeanFieldsDetector> availableFieldDetectors = new ArrayList<BeanFieldsDetector>() {{ add(new JavaBeanFieldsDetector()); }}; public boolean accepts(ClassMap classMap) { return true; } public boolean apply(ClassMap classMap, Configuration configuration) { Class<?> srcClass = classMap.getSrcClassToMap(); Class<?> destClass = classMap.getDestClassToMap(); Set<String> destFieldNames = getAcceptsFieldsDetector(destClass).getWritableFieldNames(destClass); Set<String> srcFieldNames = getAcceptsFieldsDetector(srcClass).getReadableFieldNames(srcClass); Set<String> commonFieldNames = CollectionUtils.intersection(srcFieldNames, destFieldNames); for (String fieldName : commonFieldNames) { if (GeneratorUtils.shouldIgnoreField(fieldName, srcClass, destClass)) { continue; } // If field has already been accounted for, then skip if (classMap.getFieldMapUsingDest(fieldName) != null || classMap.getFieldMapUsingSrc(fieldName) != null) { continue; } GeneratorUtils.addGenericMapping(MappingType.GETTER_TO_SETTER, classMap, configuration, fieldName, fieldName); } return false; } private static BeanFieldsDetector getAcceptsFieldsDetector(Class<?> clazz) { BeanFieldsDetector detector = getAcceptsFieldDetector(clazz, pluggedFieldDetectors); if (detector == null) { detector = getAcceptsFieldDetector(clazz, availableFieldDetectors); } return detector; } private static BeanFieldsDetector getAcceptsFieldDetector(Class<?> clazz, List<BeanFieldsDetector> detectors) { for (BeanFieldsDetector detector : new CopyOnWriteArrayList<BeanFieldsDetector>(detectors)) { if (detector.accepts(clazz)) { return detector; } } return null; } public static void addPluggedFieldDetector(BeanFieldsDetector protobufBeanFieldsDetector) { pluggedFieldDetectors.add(protobufBeanFieldsDetector); } protected interface BeanFieldsDetector { boolean accepts(Class<?> clazz); Set<String> getReadableFieldNames(Class<?> clazz); Set<String> getWritableFieldNames(Class<?> clazz); } }
apache-2.0
google/caliper
caliper-runner/src/main/java/com/google/caliper/runner/CaliperRunnerFactory.java
780
/* * Copyright (C) 2017 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.caliper.runner; /** Factory for the {@link CaliperRunner}. */ interface CaliperRunnerFactory { /** Returns the Caliper runner. */ CaliperRunner getRunner(); }
apache-2.0
AKSW/SmartDataWebKG
KnowledgeGraphUtils/test/aksw/org/kg/handler/solr/TestSolrHandler.java
5683
package aksw.org.kg.handler.solr; import static org.junit.Assert.*; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.Map; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import aksw.org.kg.KgException; import aksw.org.sdw.kg.handler.solr.KgSolrResultDocument; import aksw.org.sdw.kg.handler.solr.SolrHandler; import aksw.org.sdw.kg.handler.solr.SolrHandler.AnnotationInfo; import aksw.org.sdw.kg.handler.solr.SolrHandler.TAGGER_ANNOTATION_OVERLAP; import aksw.org.sdw.kg.handler.solr.SolrHandler.TAGGER_LANGUAGE; public class TestSolrHandler { static String solrUrl = "http://localhost:8983/solr/companies"; SolrHandler solrHandler; @BeforeClass public static void setUpBeforeClass() throws Exception { } @AfterClass public static void tearDownAfterClass() throws Exception { } @Before public void setUp() throws Exception { this.solrHandler = new SolrHandler("http://localhost:8983/solr/companies"); } @After public void tearDown() throws Exception { this.solrHandler.close(); } /** * This method can be used to check whether we can connect with SOLR * @throws IOException */ @Test public void contactSolr() throws IOException { //SolrHandler solrHandler = new SolrHandler("http://localhost:8983/solr/companies"); //solrHandler.close(); } /** * This method can be used to execute solr search queries * * @throws IOException * @throws KgException */ @Test public void testQueryExecutionSolr() throws IOException, KgException { //solrHandler = new SolrHandler(solrUrl); List<KgSolrResultDocument> resuts = solrHandler.executeQuery("nameEn:\"Germany\"", null); assertNotNull("Got results", resuts); assertFalse("Got results", resuts.isEmpty()); solrHandler.close(); } /** * This method can be used to execute solr search queries with filters * * @throws IOException * @throws KgException */ @Test public void testQueryFilterExecutionSolr() throws IOException, KgException { SolrHandler solrHandler = new SolrHandler(solrUrl); List<KgSolrResultDocument> resuts = solrHandler.executeQuery("nameEn:\"Germany\"", Arrays.asList("type:\"http://dbpedia.org/ontology/Country\"")); assertNotNull("Got results", resuts); assertFalse("Got results", resuts.isEmpty()); solrHandler.close(); } /** * Check whether basic annotations can be found and are returned. * * @throws Exception */ @Test public void testQueryFromText() throws Exception { SolrHandler solrHandler = new SolrHandler(solrUrl); Map<AnnotationInfo, List<KgSolrResultDocument>> result = solrHandler.getNamedEntitiesFromText("Berlin is a great city in Germany", null, null, TAGGER_LANGUAGE.ENGLISH, TAGGER_ANNOTATION_OVERLAP.ALL); solrHandler.close(); assertNotNull("Got a result", result); assertEquals("Got two resutls", 2, result.size()); List<String> matchingTexts = Arrays.asList("Berlin", "Germany"); for (AnnotationInfo info : result.keySet()) { assertTrue("Found correct match", matchingTexts.contains(info.matchText)); } } /** * Check whether it is possible to filter by type * * @throws Exception */ @Test public void testQueryFromTextWithFilter() throws Exception { SolrHandler solrHandler = new SolrHandler(solrUrl); List<String> filterQuery = Arrays.asList("type:\"http://dbpedia.org/ontology/Country\""); Map<AnnotationInfo, List<KgSolrResultDocument>> result = solrHandler.getNamedEntitiesFromText("Berlin is a great city in Germany", filterQuery, null, TAGGER_LANGUAGE.ENGLISH, TAGGER_ANNOTATION_OVERLAP.ALL); solrHandler.close(); assertNotNull("Got a result", result); assertEquals("Got two resutls", 1, result.size()); List<String> matchingTexts = Arrays.asList("Germany"); for (AnnotationInfo info : result.keySet()) { assertTrue("Found correct match", matchingTexts.contains(info.matchText)); } } /** * Check whether it is possible to use multiple filters * * @throws Exception */ @Test public void testQueryFromTextWithFilter2() throws Exception { SolrHandler solrHandler = new SolrHandler(solrUrl); List<String> filterQuery = Arrays.asList("type:\"http://dbpedia.org/ontology/Country\"", "nameEn:\"England\""); //Map<AnnotationInfo, List<KgSolrResultDocument>> result = solrHandler.getNamedEntitiesFromTextEn("Berlin is a great city in Germany", filterQuery, null); Map<AnnotationInfo, List<KgSolrResultDocument>> result = solrHandler.getNamedEntitiesFromText("Berlin is a great city in Germany", filterQuery, null, TAGGER_LANGUAGE.ENGLISH, TAGGER_ANNOTATION_OVERLAP.ALL); solrHandler.close(); assertNull("Got no result", result); } /** * Check whether it is possible to filter by type * * @throws Exception */ @Test public void testQueryFromTextWithFilter3() throws Exception { SolrHandler solrHandler = new SolrHandler(solrUrl); List<String> filterQuery = Arrays.asList("type:\"http://dbpedia.org/ontology/Country\"", "nameDe:Deutschland"); Map<AnnotationInfo, List<KgSolrResultDocument>> result = solrHandler.getNamedEntitiesFromText("Berlin is a great city in Germany", filterQuery, null, TAGGER_LANGUAGE.ENGLISH, TAGGER_ANNOTATION_OVERLAP.ALL); solrHandler.close(); assertNotNull("Got a result", result); assertEquals("Got two resutls", 1, result.size()); List<String> matchingTexts = Arrays.asList("Germany"); for (AnnotationInfo info : result.keySet()) { assertTrue("Found correct match", matchingTexts.contains(info.matchText)); } } }
apache-2.0
kavehg/raider
raider-core/src/main/java/com/kg/raider/NotificationSocket.java
2143
package com.kg.raider; import com.google.protobuf.InvalidProtocolBufferException; import com.kg.raider.pb.MetricPB; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.zeromq.ZContext; import org.zeromq.ZMQ; /** * User: kaveh * Date: 8/14/13 * Time: 10:24 PM */ public class NotificationSocket { private int number_of_events_published = 0; public int getNumber_of_events_published() { return number_of_events_published; } private Logger logger = LoggerFactory.getLogger(NotificationSocket.class); /** * The port to which the send socket will bind */ private static final Integer PUBLISH_PORT = 6000; /** * zero-mq context */ private ZContext ctx; /** * zero-mq outgoing notification/subscription socket */ private ZMQ.Socket publisher; /** * The notification socket has no external dependencies */ public NotificationSocket() { ctx = new ZContext(); publisher = ctx.createSocket(ZMQ.PUB); // bind to port publisher.bind("tcp://*:" + PUBLISH_PORT); } /** * Any metrics passed to this method will be published out * to subscribing clients - IF the byte array converts to * metric PB pojo * * @param metricToPublish byte array representation of the * metric to be published */ public void publish(byte[] metricToPublish) { try { // convert the byte array to PB pojo MetricPB metricToReport = MetricPB.parseFrom(metricToPublish); // publish the key - used for filtering client subscriptions publisher.send(metricToReport.getKey().getBytes(), ZMQ.SNDMORE); // publish the body publisher.send(metricToReport.toByteArray(), 0); number_of_events_published++; } catch (InvalidProtocolBufferException e) { logger.error(e.getMessage()); } } public void close() { logger.info("closing notification socket"); publisher.close(); ctx.close(); ctx.destroy(); } }
apache-2.0
Otaka/mydifferentprojects
MacroProcessor/src/com/MacroProcessorMain.java
1376
package com; import com.macro.tokenizer.Token; import com.macro.tokenizer.Tokenizer; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import org.apache.commons.io.IOUtils; /** * * @author sad */ public class MacroProcessorMain { /** * @param args the command line arguments */ public static void main(String[] args) throws FileNotFoundException, IOException, Exception { for (File f : new File("d:\\java\\comscore\\XPLORE_FRM_9.0_RC2_VZWCPMC_INT\\Xplore\\Professional Services\\Storm\\tools\\metadata\\scripts\\").listFiles()) { if (f.isFile()) { fileCheck(f); } } } private static void fileCheck(File f) throws Exception { String fileContent = IOUtils.toString(new FileInputStream(f)); Tokenizer tokenizer = new Tokenizer(fileContent); StringBuilder sb = new StringBuilder(); while (true) { Token t = tokenizer.nextToken(); if (t == null) { break; } sb.append(t.getValue()); } if (fileContent.equals(sb.toString())) { System.out.println("Strings equal"); } else { System.out.println("Strings different"); } } }
apache-2.0
jihunhamm/Crowd-ML
client/android/Crowd-ML/app/src/osu/crowd_ml/trainers/TensorFlowTrainer.java
6969
package osu.crowd_ml.trainers; /* Copyright 2017 Crowd-ML team Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License */ import android.content.res.AssetManager; import android.os.Trace; import android.util.Log; import org.tensorflow.contrib.android.TensorFlowTrainingInterface; import java.util.ArrayList; import java.util.List; import java.util.Random; import osu.crowd_ml.CrowdMLApplication; import osu.crowd_ml.Parameters; import osu.crowd_ml.R; import osu.crowd_ml.TrainingDataIO; public class TensorFlowTrainer implements Trainer { final static private int stepsToTest = 10; final static private int testN = 1000; // TODO: hardcoded for MNIST 10 class private Parameters params; private List<Double> weights; private int t; private boolean first; private static TensorFlowTrainer instance = null; private TensorFlowTrainingInterface trainingInterface = null; private TensorFlowTrainer(){ this.first = true; } public static Trainer getInstance() { if (instance == null){ instance = new TensorFlowTrainer(); } return instance; } private TensorFlowTrainingInterface getTrainingInterface(){ if (trainingInterface == null){ AssetManager am = CrowdMLApplication.getAppContext().getAssets(); String modelName = CrowdMLApplication.getAppContext().getString(R.string.model_name_TF); trainingInterface = new TensorFlowTrainingInterface(am, modelName); } return trainingInterface; } @Override public List<Double> train(final int numIterations) { int batchSize = params.getClientBatchSize(); int N = params.getN(); int K = params.getK(); int D = params.getD(); String initName = CrowdMLApplication.getAppContext().getString(R.string.init_name_TF); String trainName = CrowdMLApplication.getAppContext().getString(R.string.train_name_TF); String testName = CrowdMLApplication.getAppContext().getString(R.string.test_name_TF); String feedName = CrowdMLApplication.getAppContext().getString(R.string.input_name_TF); String fetchName = CrowdMLApplication.getAppContext().getString(R.string.label_name_TF); String weightsIn = CrowdMLApplication.getAppContext().getString(R.string.weights_in_TF); String weightsOp = CrowdMLApplication.getAppContext().getString(R.string.weights_op_TF); float[] w = new float[D * K]; for (int j = 0; j < weights.size(); j++) { w[j] = (float)(double)weights.get(j); } // Initialize the training interface if this is the first round of training trainingInterface = getTrainingInterface(); // Log this method so that it can be analyzed with systrace. Trace.beginSection("beginTraining"); if (first) { Trace.beginSection("init_vars"); first = false; trainingInterface.run(new String[]{}, new String[]{initName}); } //trainingInterface.feed(initName, new float[0], 0); Trace.endSection(); for (int i = 0; i < numIterations; i++) { float[] trainFeatureBatch; float[] trainLabelBatch; int[] indices = new int[batchSize]; for (int j = 0; j < batchSize; j++){ indices[j] = new Random().nextInt(N); } // Get the training feature trainFeatureBatch = TrainingDataIO.getInstance().getTFFeatureBatch(indices, params); // Get the training label trainLabelBatch = TrainingDataIO.getInstance().getTFLabelBatch(indices, params); // Copy the training data into TensorFlow. Trace.beginSection("feed"); trainingInterface.feed(feedName, trainFeatureBatch, batchSize, D); trainingInterface.feed(weightsIn, w, D, K); trainingInterface.feed(fetchName, trainLabelBatch, batchSize, K); Trace.endSection(); // Run a single step of training Trace.beginSection("train"); trainingInterface.run(new String[]{weightsOp}, new String[]{trainName}); Trace.endSection(); // Copy the weights Tensor into the weights array. Trace.beginSection("fetch"); trainingInterface.fetch(weightsOp, w); Trace.endSection(); Log.d("TFTrainingInterface", i + " iteration"); if (i == 0 || (i+1) % stepsToTest == 0){ float[] testFeatures = TrainingDataIO.getInstance().getTFTestFeatures(testN, params); float[] testLabels = TrainingDataIO.getInstance().getTFTestingLabels(testN, params); // Copy the test data into TensorFlow. Trace.beginSection("feed"); trainingInterface.feed(feedName, testFeatures, testN, D); trainingInterface.feed(fetchName, testLabels, testN, K); Trace.endSection(); // Run the inference call. Trace.beginSection("test"); trainingInterface.run(new String[]{testName}, new String[]{}); Trace.endSection(); // Copy the accuracy Tensor back into the output array. float[] outputs = new float[1]; Trace.beginSection("fetch"); trainingInterface.fetch(testName, outputs); Trace.endSection(); Log.d("TFTrainingAccuracy", (outputs[0] * 100) + "%"); Trace.endSection(); // "beginTraining" } } List<Double> newWeights = new ArrayList<>(D * K); for (int j = 0; j < D*K; j++){ newWeights.add(j, (double)w[j]); } return newWeights; } @Override public List<Double> getNoisyGrad() { // TODO(tylermzeller): How to get gradients from tensorflow? return null; } @Override public Trainer setIter(int t) { this.t = t; return getInstance(); } @Override public Trainer setWeights(List<Double> weights) { this.weights = weights; return getInstance(); } @Override public Trainer setParams(Parameters params) { this.params = params; return getInstance(); } @Override public void destroy() { trainingInterface.close(); weights = null; params = null; instance = null; } }
apache-2.0
XillioQA/xill-platform-3.4
xill-ide-core/src/main/java/nl/xillio/xill/util/settings/SettingsHandler.java
4320
/** * Copyright (C) 2014 Xillio (support@xillio.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package nl.xillio.xill.util.settings; import me.biesaart.utils.FileUtils; import me.biesaart.utils.Log; import nl.xillio.util.XillioHomeFolder; import org.slf4j.Logger; import java.io.File; import java.io.IOException; /** * Class that is main point for dealing with settings in Xill IDE * It encapsulates all settings handlers. * * @author Zbynek Hochmann */ public class SettingsHandler { private final static File SETTINGS_FILE = new File(XillioHomeFolder.forXillIDE(), "settings.cfg"); private final static File SETTINGS_FILE_BACKUP = new File(XillioHomeFolder.forXillIDE(), "settings.cfg.bak"); private ContentHandlerImpl content; private static SettingsHandler settings; private SimpleVariableHandler simple; private ProjectSettingsHandler project; private static final Logger LOGGER = Log.get(); /** * @return The instance of settings handler */ public static SettingsHandler getSettingsHandler() { return settings; } private SettingsHandler() throws IOException {// singleton class this.content = new ContentHandlerImpl(SETTINGS_FILE); this.content.init(); this.simple = new SimpleVariableHandler(this.content); this.project = new ProjectSettingsHandler(this.content); } /** * @return The implementation of simple variable settings */ public SimpleVariableHandler simple() { return this.simple; } /** * @return The implementation of project settings */ public ProjectSettingsHandler project() { return this.project; } /** * Set the save mechanism (see {@link nl.xillio.xill.util.settings.ContentHandler#setManualCommit(boolean)}) * * @param manual true = manual commit, false = auto commit (default) */ public void setManualCommit(boolean manual) { this.content.setManualCommit(manual); } /** * Save all changes from last commit() if manual commit is on (see {@link nl.xillio.xill.util.settings.ContentHandler#commit()}) */ public void commit() { this.content.commit(); } /** * Load settings from the settings file. * <p> * This method should be called once before the settings are used and can be called when the settings need to be reloaded. * <p> * When loading of settings succeeds, a backup is written. This backup can be recoverd using {@link SettingsHandler#recoverSettings()} * * @throws IOException When the settings file cannot be parsed */ public static void loadSettings() throws IOException { settings = new SettingsHandler(); // Write a backup copy of the settings // Exceptions thrown here should not be propagated (the IDE should just continue loading) try { FileUtils.copyFile(SETTINGS_FILE, SETTINGS_FILE_BACKUP); } catch(IOException e) { LOGGER.error("Could not write settings backup", e); } } /** * Overwrite the settings file by a previously created backup * @throws IOException When recovery fails */ public static void recoverSettings() throws IOException{ FileUtils.copyFile(SETTINGS_FILE_BACKUP, SETTINGS_FILE); } /** * Overwrite the current settings by the defaults and load them * @throws IOException When writing default settings or loading them fails */ public static void forceDefaultSettings() throws IOException { FileUtils.forceDelete(SETTINGS_FILE); loadSettings(); } }
apache-2.0
travelersun/common
common-utils/src/main/java/com/travelersun/utils/exception/BaseRuntimeException.java
1126
package com.travelersun.utils.exception; import org.apache.commons.lang3.RandomStringUtils; import org.apache.commons.lang3.time.DateFormatUtils; import org.springframework.core.NestedRuntimeException; public abstract class BaseRuntimeException extends NestedRuntimeException { private static final long serialVersionUID = -23347847086757165L; private String errorCode; public BaseRuntimeException(String message) { super(message); } public BaseRuntimeException(String errorCode, String message) { super(message); this.errorCode = errorCode; } public BaseRuntimeException(String msg, Throwable cause) { super(msg, cause); } public String getErrorCode() { return errorCode; } //生成一个异常流水号,追加到错误消息上显示到前端用户,用户反馈问题时给出此流水号给运维或开发人员快速定位对应具体异常细节 public static String buildExceptionCode() { return "ERR" + DateFormatUtils.format(new java.util.Date(), "yyMMddHHmmss") + RandomStringUtils.randomNumeric(3); } }
apache-2.0
phax/ph-ubl
ph-ubltr/src/main/java/com/helger/ubltr/UBLTRWriterBuilder.java
2186
/* * Copyright (C) 2014-2022 Philip Helger (www.helger.com) * philip[at]helger[dot]com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.helger.ubltr; import javax.annotation.Nonnull; import javax.annotation.concurrent.NotThreadSafe; import com.helger.jaxb.builder.JAXBWriterBuilder; import com.helger.xml.namespace.MapBasedNamespaceContext; /** * A writer builder for UBLTR documents. * * @author Philip Helger * @param <JAXBTYPE> * The UBLTR implementation class to be read */ @NotThreadSafe public class UBLTRWriterBuilder <JAXBTYPE> extends JAXBWriterBuilder <JAXBTYPE, UBLTRWriterBuilder <JAXBTYPE>> { public UBLTRWriterBuilder (@Nonnull final EUBLTRDocumentType eDocType) { super (eDocType); // Create a special namespace context for the passed document type final MapBasedNamespaceContext aNSContext = new UBLTRNamespaceContext (); // For backwards compatibility we leave this potential override (from "hr" // to "") aNSContext.addDefaultNamespaceURI (m_aDocType.getNamespaceURI ()); setNamespaceContext (aNSContext); } public UBLTRWriterBuilder (@Nonnull final Class <JAXBTYPE> aClass) { this (UBLTRDocumentTypes.getDocumentTypeOfImplementationClass (aClass)); } /** * Create a new writer builder. * * @param aClass * The UBL class to be written. May not be <code>null</code>. * @return The new writer builder. Never <code>null</code>. * @param <T> * The UBLTR document implementation type */ @Nonnull public static <T> UBLTRWriterBuilder <T> create (@Nonnull final Class <T> aClass) { return new UBLTRWriterBuilder <> (aClass); } }
apache-2.0
Blazebit/blaze-security
entity/api/src/main/java/com/blazebit/security/entity/EntityIdAccessor.java
155
package com.blazebit.security.entity; import java.io.Serializable; public interface EntityIdAccessor { public Serializable getId(Object entity); }
apache-2.0
nms-htc/eip-vnm
src/main/java/com/nms/vnm/eip/web/util/MessageUtil.java
3124
/** * Copyright (C) 2014 Next Generation Mobile Service JSC., (NMS). All rights * reserved. */ package com.nms.vnm.eip.web.util; import java.util.MissingResourceException; import java.util.ResourceBundle; import java.util.logging.Level; import java.util.logging.Logger; import javax.faces.application.FacesMessage; import javax.faces.context.FacesContext; /** * @author Nguyen Trong Cuong * @since 08/26/2014 * @version 1.0 */ public class MessageUtil { public static final String APP_RESOURCE_BUNDLE_NAME = "com.nms.i18n.Bundle"; private static final Logger LOGGER = Logger.getLogger(MessageUtil.class.getName()); public static final String REQUEST_SUCCESS_MESSAGE = "your-request-has-been-successfully-implemented"; public static final String REQUEST_FAIL_MESSAGE = "your-request-fails"; public static ResourceBundle getResourceBundle() { return ResourceBundle.getBundle(APP_RESOURCE_BUNDLE_NAME); } /** * Get a string for given key from application resource bundle. * * @param key the key for the desired string * @return the string for the given key */ public static String getBundleMessage(String key) { String ret = key; try { ret = getResourceBundle().getString(key); } catch (MissingResourceException e) { LOGGER.log(Level.WARNING, "[MessageUtil] resource with key {0} not found in application " + "resource bundle, exception message : {1}", new Object[]{key, e.toString()}); } return ret; } public static void addGlobalInfoMessage(String message) { addGlobalMessage(FacesMessage.SEVERITY_INFO, message); } public static void addGlobalErrorMessage(String message) { addGlobalMessage(FacesMessage.SEVERITY_ERROR, message); } public static void addGlobalErrorMessage(String summary, Throwable t) { addGlobalMessage(FacesMessage.SEVERITY_ERROR, summary, JsfUtil.getRootCause(t).getLocalizedMessage()); } @SuppressWarnings("ThrowableResultIgnored") public static void addGlobalErrorMessage(Throwable t) { addGlobalMessage(FacesMessage.SEVERITY_ERROR, JsfUtil.getRootCause(t).getLocalizedMessage()); } public static void addGlobalWarnMessage(String message) { addGlobalMessage(FacesMessage.SEVERITY_WARN, message); } public static void addGlobalFatalMessage(String message) { addGlobalMessage(FacesMessage.SEVERITY_FATAL, message); } public static void addGlobalMessage(FacesMessage.Severity severity, String message) { FacesMessage msg = new FacesMessage(severity, getBundleMessage(message), ""); FacesContext.getCurrentInstance().addMessage(null, msg); } public static void addGlobalMessage(FacesMessage.Severity severity, String summary, String detail) { FacesMessage msg = new FacesMessage(severity, getBundleMessage(summary), getBundleMessage(detail)); FacesContext.getCurrentInstance().addMessage(null, msg); } }
apache-2.0
wjsl/jaredcumulo
server/src/main/java/org/apache/accumulo/server/client/HdfsZooInstance.java
7644
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.accumulo.server.client; import java.nio.ByteBuffer; import java.util.Collections; import java.util.List; import java.util.UUID; import org.apache.accumulo.core.Constants; import org.apache.accumulo.core.client.AccumuloException; import org.apache.accumulo.core.client.AccumuloSecurityException; import org.apache.accumulo.core.client.Connector; import org.apache.accumulo.core.client.Instance; import org.apache.accumulo.core.client.ZooKeeperInstance; import org.apache.accumulo.core.client.impl.ConnectorImpl; import org.apache.accumulo.core.client.security.tokens.AuthenticationToken; import org.apache.accumulo.core.client.security.tokens.PasswordToken; import org.apache.accumulo.core.conf.AccumuloConfiguration; import org.apache.accumulo.core.conf.Property; import org.apache.accumulo.core.security.CredentialHelper; import org.apache.accumulo.core.security.thrift.TCredentials; import org.apache.accumulo.core.util.ByteBufferUtil; import org.apache.accumulo.core.util.OpTimer; import org.apache.accumulo.core.util.StringUtil; import org.apache.accumulo.core.util.TextUtil; import org.apache.accumulo.core.zookeeper.ZooUtil; import org.apache.accumulo.fate.zookeeper.ZooCache; import org.apache.accumulo.server.ServerConstants; import org.apache.accumulo.server.conf.ServerConfiguration; import org.apache.accumulo.server.zookeeper.ZooLock; import org.apache.hadoop.io.Text; import org.apache.log4j.Level; import org.apache.log4j.Logger; /** * An implementation of Instance that looks in HDFS and ZooKeeper to find the master and root tablet location. * */ public class HdfsZooInstance implements Instance { public static class AccumuloNotInitializedException extends RuntimeException { private static final long serialVersionUID = 1L; public AccumuloNotInitializedException(String string) { super(string); } } private HdfsZooInstance() { AccumuloConfiguration acuConf = ServerConfiguration.getSiteConfiguration(); zooCache = new ZooCache(acuConf.get(Property.INSTANCE_ZK_HOST), (int) acuConf.getTimeInMillis(Property.INSTANCE_ZK_TIMEOUT)); } private static HdfsZooInstance cachedHdfsZooInstance = null; public static synchronized Instance getInstance() { if (cachedHdfsZooInstance == null) cachedHdfsZooInstance = new HdfsZooInstance(); return cachedHdfsZooInstance; } private static ZooCache zooCache; private static String instanceId = null; private static final Logger log = Logger.getLogger(HdfsZooInstance.class); @Override public String getRootTabletLocation() { String zRootLocPath = ZooUtil.getRoot(this) + Constants.ZROOT_TABLET_LOCATION; OpTimer opTimer = new OpTimer(log, Level.TRACE).start("Looking up root tablet location in zoocache."); byte[] loc = zooCache.get(zRootLocPath); opTimer.stop("Found root tablet at " + (loc == null ? null : new String(loc)) + " in %DURATION%"); if (loc == null) { return null; } return new String(loc).split("\\|")[0]; } @Override public List<String> getMasterLocations() { String masterLocPath = ZooUtil.getRoot(this) + Constants.ZMASTER_LOCK; OpTimer opTimer = new OpTimer(log, Level.TRACE).start("Looking up master location in zoocache."); byte[] loc = ZooLock.getLockData(zooCache, masterLocPath, null); opTimer.stop("Found master at " + (loc == null ? null : new String(loc)) + " in %DURATION%"); if (loc == null) { return Collections.emptyList(); } return Collections.singletonList(new String(loc)); } @Override public String getInstanceID() { if (instanceId == null) _getInstanceID(); return instanceId; } private static synchronized void _getInstanceID() { if (instanceId == null) { @SuppressWarnings("deprecation") String instanceIdFromFile = ZooKeeperInstance.getInstanceIDFromHdfs(ServerConstants.getInstanceIdLocation()); instanceId = instanceIdFromFile; } } @Override public String getInstanceName() { return ZooKeeperInstance.lookupInstanceName(zooCache, UUID.fromString(getInstanceID())); } @Override public String getZooKeepers() { return ServerConfiguration.getSiteConfiguration().get(Property.INSTANCE_ZK_HOST); } @Override public int getZooKeepersSessionTimeOut() { return (int) ServerConfiguration.getSiteConfiguration().getTimeInMillis(Property.INSTANCE_ZK_TIMEOUT); } @Override // Not really deprecated, just not for client use public Connector getConnector(String principal, AuthenticationToken token) throws AccumuloException, AccumuloSecurityException { return getConnector(CredentialHelper.create(principal, token, getInstanceID())); } @SuppressWarnings("deprecation") private Connector getConnector(TCredentials cred) throws AccumuloException, AccumuloSecurityException { return new ConnectorImpl(this, cred); } @Override // Not really deprecated, just not for client use public Connector getConnector(String user, byte[] pass) throws AccumuloException, AccumuloSecurityException { return getConnector(user, new PasswordToken(pass)); } @Override // Not really deprecated, just not for client use public Connector getConnector(String user, ByteBuffer pass) throws AccumuloException, AccumuloSecurityException { return getConnector(user, ByteBufferUtil.toBytes(pass)); } @Override public Connector getConnector(String user, CharSequence pass) throws AccumuloException, AccumuloSecurityException { return getConnector(user, TextUtil.getBytes(new Text(pass.toString()))); } private AccumuloConfiguration conf = null; @Override public AccumuloConfiguration getConfiguration() { if (conf == null) conf = new ServerConfiguration(this).getConfiguration(); return conf; } @Override public void setConfiguration(AccumuloConfiguration conf) { this.conf = conf; } public static void main(String[] args) { Instance instance = HdfsZooInstance.getInstance(); System.out.println("Instance Name: " + instance.getInstanceName()); System.out.println("Instance ID: " + instance.getInstanceID()); System.out.println("ZooKeepers: " + instance.getZooKeepers()); System.out.println("Masters: " + StringUtil.join(instance.getMasterLocations(), ", ")); } @Override public void close() throws AccumuloException { try { zooCache.close(); } catch (InterruptedException e) { throw new AccumuloException("Issues closing ZooKeeper, try again"); } } @Deprecated @Override public Connector getConnector(org.apache.accumulo.core.security.thrift.AuthInfo auth) throws AccumuloException, AccumuloSecurityException { return getConnector(auth.user, auth.getPassword()); } }
apache-2.0
fedevelatec/asic-core
src/main/java/com/fedevela/core/cod/model/DataSourceConnection.java
4617
package com.fedevela.core.cod.model; /** * Created by Federico on 20/04/14. */ import com.fedevela.core.cod.CDCException; import com.fedevela.core.cod.ResourceBundleHandler; import java.sql.CallableStatement; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.PreparedStatement; import java.sql.SQLException; import javax.naming.Context; import javax.naming.InitialContext; import javax.sql.DataSource; public class DataSourceConnection { private Connection connection = null; private ResourceBundleHandler resourceBundleHandler = null; private String prefix = null; public DataSourceConnection(String source, String prefix) throws CDCException { if ((source == null) || (prefix == null)) { throw new CDCException("lng.msg.error.datasourceconnection"); } this.prefix = prefix; try { resourceBundleHandler = new ResourceBundleHandler(source); } catch (Exception e) { throw new CDCException(e); } } public void rollback() throws CDCException { try { if (isOpen()) { connection.rollback(); } else { throw new CDCException("Connection is close."); } } catch (Exception e) { throw new CDCException(e); } } public boolean open() throws CDCException { try { Context ctx = new InitialContext(); if (ctx == null) { throw new CDCException("lng.msg.error.openconnection"); } DataSource ds = (DataSource) ctx.lookup("java:comp/env/" + resourceBundleHandler.getValue(new StringBuilder().append(prefix).append(".DSLookUp").toString())); connection = ((ds != null) ? ds.getConnection() : null); } catch (Exception e) { throw new CDCException(e); } finally { return isOpen(); } } public boolean isOpen() throws CDCException { try { return (connection != null) && (!connection.isClosed()); } catch (SQLException ex) { throw new CDCException(ex); } } public Connection getConnection() throws CDCException { if (isOpen()) { return connection; } else { throw new CDCException("Connection is close."); } } public void close() throws CDCException { try { if (isOpen()) { connection.close(); } } catch (SQLException e) { throw new CDCException(e); } finally { connection = null; resourceBundleHandler = null; prefix = null; } } public CallableStatement prepareCall(String sql) throws CDCException { CallableStatement result = null; try { if (isOpen()) { result = connection.prepareCall(sql); } else { throw new CDCException("Connection is close."); } } catch (SQLException e) { throw new CDCException(e); } return result; } public PreparedStatement prepareStatement(String sql) throws CDCException { if (isOpen()) { try { return connection.prepareStatement(sql); } catch (SQLException ex) { throw new CDCException(ex); } } else { throw new CDCException("Connection is close."); } } public DatabaseMetaData getMetaData() throws CDCException { DatabaseMetaData result = null; try { if (isOpen()) { result = connection.getMetaData(); } else { throw new CDCException("Connection is close."); } } catch (SQLException e) { throw new CDCException(e); } return result; } public void commit() throws CDCException { try { if (isOpen()) { connection.commit(); } else { throw new CDCException("Connection is close."); } } catch (SQLException ex) { throw new CDCException(ex); } } public void setAutoCommit(boolean b) throws CDCException { try { if (isOpen()) { connection.setAutoCommit(b); } else { throw new CDCException("Connection is close."); } } catch (SQLException e) { throw new CDCException(e); } } }
apache-2.0
Buble1981/MyDroolsFork
drools-core/src/main/java/org/drools/core/reteoo/MemoryVisitor.java
11809
/* * Copyright 2010 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.core.reteoo; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.lang.reflect.Field; import org.drools.core.common.InternalWorkingMemory; import org.drools.core.util.AbstractHashTable; import org.drools.core.util.Entry; import org.drools.core.util.FastIterator; import org.drools.core.util.LinkedList; import org.drools.core.util.ObjectHashSet; import org.drools.core.util.ReflectiveVisitor; import org.drools.core.util.index.RightTupleIndexHashTable; import org.drools.core.util.index.RightTupleList; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class MemoryVisitor extends ReflectiveVisitor implements Externalizable { protected static transient Logger logger = LoggerFactory.getLogger(MemoryVisitor.class); private InternalWorkingMemory workingMemory; private int indent = 0; /** * Constructor. */ public MemoryVisitor() { } public MemoryVisitor(final InternalWorkingMemory workingMemory) { this.workingMemory = workingMemory; } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { workingMemory = (InternalWorkingMemory) in.readObject(); indent = in.readInt(); } public void writeExternal(ObjectOutput out) throws IOException { out.writeObject( workingMemory ); out.writeInt( indent ); } /** * RuleBaseImpl visits its Rete. */ public void visitReteooRuleBase(final ReteooRuleBase ruleBase) { visit( (ruleBase).getRete() ); } /** * Rete visits each of its ObjectTypeNodes. */ public void visitRete(final Rete rete) { for ( ObjectTypeNode node : rete.getObjectTypeNodes() ) { visit( node ); } } public void visitObjectTypeNode(final ObjectTypeNode node) { logger.info( indent() + node ); ObjectHashSet memory = (ObjectHashSet) workingMemory.getNodeMemory( node ); checkObjectHashSet( memory ); this.indent++; try { final Field field = ObjectSource.class.getDeclaredField( "sink" ); field.setAccessible( true ); final ObjectSinkPropagator sink = (ObjectSinkPropagator) field.get( node ); final ObjectSink[] sinks = sink.getSinks(); for ( int i = 0, length = sinks.length; i < length; i++ ) { visit( sinks[i] ); } } catch ( final Exception e ) { e.printStackTrace(); } this.indent--; } public void visitAlphaNode(final AlphaNode node) { logger.info( indent() + node ); this.indent++; try { final Field field = ObjectSource.class.getDeclaredField( "sink" ); field.setAccessible( true ); final ObjectSinkPropagator sink = (ObjectSinkPropagator) field.get( node ); final ObjectSink[] sinks = sink.getSinks(); for ( int i = 0, length = sinks.length; i < length; i++ ) { visit( sinks[i] ); } } catch ( final Exception e ) { e.printStackTrace(); } this.indent--; } public void visitLeftInputAdapterNode(final LeftInputAdapterNode node) { logger.info( indent() + node ); this.indent++; try { final Field field = LeftTupleSource.class.getDeclaredField( "sink" ); field.setAccessible( true ); final LeftTupleSinkPropagator sink = (LeftTupleSinkPropagator) field.get( node ); final LeftTupleSink[] sinks = sink.getSinks(); for ( int i = 0, length = sinks.length; i < length; i++ ) { visit( sinks[i] ); } } catch ( final Exception e ) { e.printStackTrace(); } this.indent--; } public void visitJoinNode(final JoinNode node) { logger.info( indent() + node ); try { final BetaMemory memory = (BetaMemory) this.workingMemory.getNodeMemory( node ); checkObjectHashTable( memory.getRightTupleMemory() ); checkLeftTupleMemory( memory.getLeftTupleMemory() ); } catch ( final Exception e ) { e.printStackTrace(); } this.indent++; try { final Field field = LeftTupleSource.class.getDeclaredField( "sink" ); field.setAccessible( true ); final LeftTupleSinkPropagator sink = (LeftTupleSinkPropagator) field.get( node ); final LeftTupleSink[] sinks = sink.getSinks(); for ( int i = 0, length = sinks.length; i < length; i++ ) { visit( sinks[i] ); } } catch ( final Exception e ) { e.printStackTrace(); } this.indent--; } public void visitNotNode(final NotNode node) { logger.info( indent() + node ); try { final BetaMemory memory = (BetaMemory) this.workingMemory.getNodeMemory( node ); checkObjectHashTable( memory.getRightTupleMemory() ); checkLeftTupleMemory( memory.getLeftTupleMemory() ); } catch ( final Exception e ) { e.printStackTrace(); } this.indent++; try { final Field field = LeftTupleSource.class.getDeclaredField( "sink" ); field.setAccessible( true ); final LeftTupleSinkPropagator sink = (LeftTupleSinkPropagator) field.get( node ); final LeftTupleSink[] sinks = sink.getSinks(); for ( int i = 0, length = sinks.length; i < length; i++ ) { visit( sinks[i] ); } } catch ( final Exception e ) { e.printStackTrace(); } this.indent--; } public void visitRuleTerminalNode(final RuleTerminalNode node) { logger.info( indent() + node ); // final TerminalNodeMemory memory = (TerminalNodeMemory) this.workingMemory.getNodeMemory( node ); // checkLeftTupleMemory( memory.getTupleMemory() ); } // private void checkObjectHashMap(final ObjectHashMap map) { // final Entry[] entries = map.getTable(); // int count = 0; // for ( int i = 0, length = entries.length; i < length; i++ ) { // if ( entries[i] != null ) { // count++; // } // } // // logger.info( "ObjectHashMap: " + indent() + map.size() + ":" + count ); // if ( map.size() != count ) { // logger.info( indent() + "error" ); // } // } private void checkObjectHashSet(ObjectHashSet memory) { FastIterator it = LinkedList.fastIterator; final Entry[] entries = memory.getTable(); int factCount = 0; int bucketCount = 0; for ( int i = 0, length = entries.length; i < length; i++ ) { if ( entries[i] != null ) { Entry entry = (Entry ) entries[i]; while ( entry != null ) { entry = it.next( entry ); factCount++; } } } logger.info( indent() + "ObjectHashSet: " + memory.size() + ":" + factCount ); if( factCount != memory.size() ) { logger.info( indent() + "error" ); } } private void checkObjectHashTable(final RightTupleMemory memory) { if ( memory instanceof RightTupleList ) { checkRightTupleList( (RightTupleList) memory ); } else if ( memory instanceof RightTupleIndexHashTable ) { checkRightTupleIndexHashTable( (RightTupleIndexHashTable) memory ); } else { throw new RuntimeException( memory.getClass() + " should not be here" ); } } private void checkRightTupleList(final RightTupleList memory) { int count = 0; FastIterator rightIt = memory.fastIterator(); for ( RightTuple rightTuple = memory.getFirst( ); rightTuple != null; rightTuple = (RightTuple) rightIt.next( rightTuple ) ) { count++; } logger.info( indent() + "FactHashTable: " + memory.size() + ":" + count ); if ( memory.size() != count ) { logger.info( indent() + "error" ); } } private void checkRightTupleIndexHashTable(final RightTupleIndexHashTable memory) { final Entry[] entries = memory.getTable(); int factCount = 0; int bucketCount = 0; FastIterator it = LinkedList.fastIterator; for ( int i = 0, length = entries.length; i < length; i++ ) { if ( entries[i] != null ) { RightTupleList rightTupleList = (RightTupleList) entries[i]; while ( rightTupleList != null ) { if ( rightTupleList.first != null ) { Entry entry = rightTupleList.first; while ( entry != null ) { entry = it.next( entry ); factCount++; } } else { logger.info( "error : fieldIndexHashTable cannot have empty FieldIndexEntry objects" ); } rightTupleList = (RightTupleList) rightTupleList.getNext(); bucketCount++; } } } try { final Field field = AbstractHashTable.class.getDeclaredField( "size" ); field.setAccessible( true ); logger.info( indent() + "FieldIndexBuckets: " + ((Integer) field.get( memory )).intValue() + ":" + bucketCount ); if ( ((Integer) field.get( memory )).intValue() != bucketCount ) { logger.info( indent() + "error" ); } } catch ( final Exception e ) { e.printStackTrace(); } logger.info( indent() + "FieldIndexFacts: " + memory.size() + ":" + factCount ); if ( memory.size() != factCount ) { logger.info( indent() + "error" ); } } private void checkLeftTupleMemory(final LeftTupleMemory memory) { // @todo need to implement this correctly, as we now have indexed and none indxed tuple memories. // final Entry[] entries = memory.getTable(); // int count = 0; // for ( int i = 0, length = entries.length; i < length; i++ ) { // if ( entries[i] != null ) { // Entry entry = entries[i]; // while ( entry != null ) { // count++; // entry = entry.remove(); // } // } // } // // logger.info( indent() + "TupleMemory: " + memory.size() + ":" + count ); // if ( memory.size() != count ) { // logger.info( indent() + "error" ); // } } private String indent() { final StringBuilder buffer = new StringBuilder(); for ( int i = 0; i < this.indent; i++ ) { buffer.append( " " ); } return buffer.toString(); } }
apache-2.0
zhangtianqiu/githubandroid
app/src/test/java/com/github/pockethub/ui/MainActivityTest.java
4646
package com.github.pockethub.ui; import android.accounts.Account; import android.accounts.AccountManager; import android.accounts.AccountManagerCallback; import android.os.Bundle; import android.os.Handler; import android.support.v4.app.Fragment; import android.view.MenuItem; import com.alorma.github.sdk.bean.dto.response.Organization; import com.github.pockethub.BuildConfig; import com.github.pockethub.R; import com.github.pockethub.ui.gist.GistsPagerFragment; import com.github.pockethub.ui.issue.FilterListFragment; import com.github.pockethub.ui.issue.IssueDashboardPagerFragment; import com.github.pockethub.ui.user.HomePagerFragment; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.robolectric.Robolectric; import org.robolectric.RobolectricGradleTestRunner; import org.robolectric.RuntimeEnvironment; import org.robolectric.annotation.Config; import java.util.ArrayList; import java.util.List; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; import static org.mockito.Matchers.anyObject; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @RunWith(RobolectricGradleTestRunner.class) @Config(constants = BuildConfig.class, sdk = 21) public class MainActivityTest { private MockMainActivity mockMainActivity; static Fragment fragment; static AccountManager mockManager; private ArgumentCaptor<Account> argumentCaptor; private Account[] accounts; @Before public void setup() { mockMainActivity = Robolectric.buildActivity(MockMainActivity.class).create().get(); List<Organization> org = new ArrayList<>(); org.add(new Organization()); Account firstGitHubAccount = new Account("GitHubAccount", "com.github"); Account secondGitHubAccount = new Account("GitHubAccount2", "com.github"); accounts = new Account[]{firstGitHubAccount, secondGitHubAccount}; mockManager = mock(AccountManager.class); when(mockManager.getAccountsByType(RuntimeEnvironment.application.getString(R.string.account_type))).thenReturn(accounts); mockMainActivity.onLoadFinished(null, org); argumentCaptor = ArgumentCaptor.forClass(Account.class); } private MenuItem getMockMenuItem(int id) { MenuItem mockedMenuItem = mock(MenuItem.class); when(mockedMenuItem.getItemId()).thenReturn(id); return mockedMenuItem; } @Test public void testNavigationDrawerClickListenerPos1_ShouldReplaceHomePagerFragmentToContainer() { mockMainActivity.onNavigationItemSelected(getMockMenuItem(R.id.navigation_home)); assertThat(fragment, is(instanceOf(HomePagerFragment.class))); } @Test public void testNavigationDrawerClickListenerPos2_ShouldReplaceGistsPagerFragmentToContainer() { mockMainActivity.onNavigationItemSelected(getMockMenuItem(R.id.navigation_gists)); assertThat(fragment, is(instanceOf(GistsPagerFragment.class))); } @Test public void testNavigationDrawerClickListenerPos3_ShouldReplaceIssueDashboardPagerFragmentToContainer() { mockMainActivity.onNavigationItemSelected(getMockMenuItem(R.id.navigation_issue_dashboard)); assertThat(fragment, is(instanceOf(IssueDashboardPagerFragment.class))); } @Test public void testNavigationDrawerClickListenerPos4_ShouldReplaceFilterListFragmentToContainer() { mockMainActivity.onNavigationItemSelected(getMockMenuItem(R.id.navigation_bookmarks)); assertThat(fragment, is(instanceOf(FilterListFragment.class))); } @Test public void test() { mockMainActivity.onNavigationItemSelected(getMockMenuItem(R.id.navigation_log_out)); verify(mockManager, times(2)).removeAccount(argumentCaptor.capture(), (AccountManagerCallback<Boolean>) anyObject(), (Handler) anyObject()); List<Account> values = argumentCaptor.getAllValues(); assertThat(values.get(0), is(equalTo(accounts[0]))); assertThat(values.get(1), is(equalTo(accounts[1]))); } public static class MockMainActivity extends MainActivity { @Override void switchFragment(Fragment frag, Organization org) { super.switchFragment(frag, org); fragment = frag; } @Override AccountManager getAccountManager() { return mockManager; } } }
apache-2.0
arvis/formhelper
src/test/java/com/viestards/formhelper/InputFieldTest.java
2671
package com.viestards.formhelper; import static org.junit.Assert.*; import org.junit.Before; import org.junit.Test; import com.viestards.formhelper.InputField; public class InputFieldTest { private static InputField input; @Before public void setUp(){ input=new InputField("testname", ""); } @Test public void testGetField(){ String result =input.getField(); assertEquals("<input type='text' name='testname' id='testname' value='' />", result); } @Test public void testGetFieldLabel(){ String result =input.getFieldLabel(); assertEquals("<label for='testname' >testname</label>", result); } @Test public void testError(){ String result =input.getError(""); assertEquals(" ", result); } @Test public void testCustomLabel(){ input.setLabel("Custum one"); String result =input.getFieldLabel(); assertEquals("<label for='testname' >Custum one</label>", result); } @Test public void testGenerate() { String result =input.generate(); assertEquals("<label for='testname' >testname</label><input name='testname' id='testname' value='' /> ", result); } /** * tests if html is correctly escaped and harmfull scripts can not be executed * */ @Test public void testGenerateHtmlEscape() { InputField harmfullInput=new InputField("simple", "value"); harmfullInput.setLabel("harmfull imput<script>alert('All your base are belong to us!')</script> "); String result =harmfullInput.generate(); //System.out.println(result); assertEquals("<label for='simple' >" + "harmfull imput&lt;script&gt;" + "alert('All your base are belong to us!')&lt;/script&gt; </label>" + "<input type='text' name='simple' id='simple' value='value' /> ", result); } /** * testing if different characters (russian,baltic etc) are displayed correctly * */ @Test public void testEscapingAccents(){ input.setLabel("glāžšķūņu Преве́д" ); final String result=input.getFieldLabel(); //System.out.println(result); assertEquals("<label for='testname' >glāž&scaron;ķūņu Преве́д</label>", result); } /** * testing that value data should not be escaped * */ @Test public void testEscpapingValue(){ input.setValue("glāžšķūņu Преве́д" ); final String result=input.getField(); assertEquals("<input type='text' name='testname' id='testname' value='glāž&scaron;ķūņu Преве́д' />", result); } /** * Test if display is correct if value is null * */ @Test public void testForNullValue(){ input.setValue(null); String result =input.getField(); assertEquals("<input type='text' name='testname' id='testname' value='' />", result); } }
apache-2.0
gdgjodhpur/gdgapp
app/src/main/java/org/gdg/frisbee/android/event/EventActivity.java
4087
/* * Copyright 2013 The GDG Frisbee Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gdg.frisbee.android.event; import android.content.Context; import android.os.Bundle; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentStatePagerAdapter; import android.support.v4.view.ViewPager; import android.view.MenuItem; import com.viewpagerindicator.TitlePageIndicator; import org.gdg.frisbee.android.Const; import org.gdg.frisbee.android.R; import org.gdg.frisbee.android.activity.GdgActivity; import org.gdg.frisbee.android.api.GroupDirectory; import butterknife.InjectView; /** * GDG Aachen * org.gdg.frisbee.android.activity * <p/> * User: maui * Date: 22.04.13 * Time: 23:03 */ public class EventActivity extends GdgActivity { private static String LOG_TAG = "GDG-EventActivity"; @InjectView(R.id.pager) ViewPager mViewPager; @InjectView(R.id.titles) TitlePageIndicator mIndicator; private EventPagerAdapter mViewPagerAdapter; private String mEventId; private GroupDirectory mClient; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_event); getSupportActionBar().setDisplayHomeAsUpEnabled(true); getSupportActionBar().setDisplayShowTitleEnabled(true); getSupportActionBar().setDisplayUseLogoEnabled(true); getSupportActionBar().setTitle(R.string.event); mIndicator.setOnPageChangeListener(this); mViewPagerAdapter = new EventPagerAdapter(this, getSupportFragmentManager()); mViewPager.setAdapter(mViewPagerAdapter); mIndicator.setViewPager(mViewPager); mEventId = getIntent().getStringExtra(Const.EXTRA_EVENT_ID); String section = getIntent().getStringExtra(Const.EXTRA_SECTION); if (EventPagerAdapter.SECTION_OVERVIEW.equals(section)){ mIndicator.setCurrentItem(0); } } protected String getTrackedViewName() { return "Event/"+getResources().getStringArray(R.array.about_tabs)[getCurrentPage()]; } @Override public boolean onOptionsItemSelected(MenuItem item) { if(item.getItemId() == android.R.id.home) { finish(); return true; } return super.onOptionsItemSelected(item); } public class EventPagerAdapter extends FragmentStatePagerAdapter { public static final String SECTION_OVERVIEW = "overview"; private Context mContext; public EventPagerAdapter(Context ctx, FragmentManager fm) { super(fm); mContext = ctx; } @Override public int getItemPosition(Object object) { return POSITION_NONE; } @Override public int getCount() { int count = mContext.getResources().getStringArray(R.array.event_tabs).length; return 1; } @Override public Fragment getItem(int position) { switch(position) { case 0: return EventOverviewFragment.createFor(mEventId); case 1: return new AgendaFragment(); case 2: return new MoreFragment(); } return null; } @Override public CharSequence getPageTitle(int position) { return mContext.getResources().getStringArray(R.array.event_tabs)[position]; } } }
apache-2.0
ibissource/iaf
core/src/test/java/nl/nn/adapterframework/jdbc/dbms/ConcurrentManagedTransactionTester.java
1052
package nl.nn.adapterframework.jdbc.dbms; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.TransactionDefinition; import nl.nn.adapterframework.core.IbisTransaction; import nl.nn.adapterframework.jta.SpringTxManagerProxy; import nl.nn.adapterframework.testutil.ConcurrentActionTester; public abstract class ConcurrentManagedTransactionTester extends ConcurrentActionTester { private PlatformTransactionManager txManager; private IbisTransaction mainItx; public ConcurrentManagedTransactionTester(PlatformTransactionManager txManager) { super(); this.txManager=txManager; } @Override public void initAction() throws Exception { TransactionDefinition txDef = SpringTxManagerProxy.getTransactionDefinition(TransactionDefinition.PROPAGATION_REQUIRES_NEW,20); mainItx = IbisTransaction.getTransaction(txManager, txDef, "ConcurrentManagedTransactionTester"); } @Override public void finalizeAction() throws Exception { if(mainItx != null) { mainItx.commit(); } } }
apache-2.0
shevek/simple-xml-serializers
simple-xml-serializers-spring/src/test/java/org/anarres/simplexml/serializers/spring/SimpleXmlSerializersConfigurationTest.java
2207
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package org.anarres.simplexml.serializers.spring; import java.net.InetAddress; import java.net.URI; import org.anarres.simplexml.factory.PersisterFactory; import org.anarres.simplexml.serializers.test.PersisterTestUtils; import org.junit.Test; import org.junit.runner.RunWith; import org.simpleframework.xml.Attribute; import org.simpleframework.xml.Element; import org.simpleframework.xml.core.Persister; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; /** * * @author shevek */ @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(classes = SimpleXmlSerializersConfiguration.class) // @ActiveProfiles({}) // @DirtiesContext // @TestPropertySource() public class SimpleXmlSerializersConfigurationTest { private static final Logger LOG = LoggerFactory.getLogger(SimpleXmlSerializersConfigurationTest.class); @Autowired private PersisterFactory factory; private static class Bean { @Attribute InetAddress aAddress; @Element InetAddress eAddress; @Attribute URI aUri; @Element URI eUri; } @Test public void testPersister() throws Exception { Persister persister = factory.newPersister(); LOG.info("Serializer is " + persister); PersisterTestUtils.testSerialization(persister, new int[]{1, 2, 3, 4, 5}); PersisterTestUtils.testSerialization(persister, InetAddress.getByAddress(new byte[]{1, 2, 3, 4})); Bean bean = new Bean(); bean.aAddress = InetAddress.getByAddress(new byte[]{3, 4, 5, 6}); bean.eAddress = InetAddress.getByAddress(new byte[]{3, 4, 5, 7}); bean.aUri = URI.create("mailto:nobody@localhost"); bean.eUri = URI.create("mailto:somebody@localhost"); PersisterTestUtils.testSerialization(persister, bean); } }
apache-2.0
mkhutornenko/incubator-aurora
src/test/java/org/apache/aurora/scheduler/async/TaskSchedulerTest.java
22454
/** * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.aurora.scheduler.async; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.util.concurrent.RateLimiter; import com.twitter.common.quantity.Amount; import com.twitter.common.quantity.Time; import com.twitter.common.testing.easymock.EasyMockTest; import com.twitter.common.util.BackoffStrategy; import com.twitter.common.util.testing.FakeClock; import org.apache.aurora.gen.AssignedTask; import org.apache.aurora.gen.HostStatus; import org.apache.aurora.gen.Identity; import org.apache.aurora.gen.MaintenanceMode; import org.apache.aurora.gen.ScheduleStatus; import org.apache.aurora.gen.ScheduledTask; import org.apache.aurora.gen.TaskConfig; import org.apache.aurora.scheduler.Driver; import org.apache.aurora.scheduler.async.OfferQueue.OfferQueueImpl; import org.apache.aurora.scheduler.async.OfferQueue.OfferReturnDelay; import org.apache.aurora.scheduler.async.TaskScheduler.TaskSchedulerImpl; import org.apache.aurora.scheduler.base.Query; import org.apache.aurora.scheduler.base.Tasks; import org.apache.aurora.scheduler.events.PubsubEvent.HostMaintenanceStateChange; import org.apache.aurora.scheduler.events.PubsubEvent.TaskStateChange; import org.apache.aurora.scheduler.events.PubsubEvent.TasksDeleted; import org.apache.aurora.scheduler.filter.AttributeAggregate; import org.apache.aurora.scheduler.state.MaintenanceController; import org.apache.aurora.scheduler.state.StateManager; import org.apache.aurora.scheduler.state.TaskAssigner; import org.apache.aurora.scheduler.storage.AttributeStore; import org.apache.aurora.scheduler.storage.Storage; import org.apache.aurora.scheduler.storage.Storage.MutableStoreProvider; import org.apache.aurora.scheduler.storage.Storage.MutateWork; import org.apache.aurora.scheduler.storage.Storage.StorageException; import org.apache.aurora.scheduler.storage.TaskStore; import org.apache.aurora.scheduler.storage.entities.IScheduledTask; import org.apache.aurora.scheduler.storage.mem.MemStorage; import org.apache.mesos.Protos.Offer; import org.apache.mesos.Protos.OfferID; import org.apache.mesos.Protos.SlaveID; import org.apache.mesos.Protos.TaskID; import org.apache.mesos.Protos.TaskInfo; import org.easymock.Capture; import org.easymock.EasyMock; import org.junit.Before; import org.junit.Test; import static org.apache.aurora.gen.ScheduleStatus.FINISHED; import static org.apache.aurora.gen.ScheduleStatus.INIT; import static org.apache.aurora.gen.ScheduleStatus.KILLED; import static org.apache.aurora.gen.ScheduleStatus.LOST; import static org.apache.aurora.gen.ScheduleStatus.PENDING; import static org.apache.aurora.gen.ScheduleStatus.RUNNING; import static org.easymock.EasyMock.capture; import static org.easymock.EasyMock.eq; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.expectLastCall; import static org.easymock.EasyMock.isA; import static org.junit.Assert.assertEquals; /** * TODO(wfarner): Break this test up to independently test TaskSchedulerImpl and OfferQueueImpl. */ public class TaskSchedulerTest extends EasyMockTest { private static final Offer OFFER_A = Offers.makeOffer("OFFER_A", "HOST_A"); private static final Offer OFFER_B = Offers.makeOffer("OFFER_B", "HOST_B"); private static final Offer OFFER_C = Offers.makeOffer("OFFER_C", "HOST_C"); private static final Offer OFFER_D = Offers.makeOffer("OFFER_D", "HOST_D"); private Storage storage; private MaintenanceController maintenance; private StateManager stateManager; private TaskAssigner assigner; private BackoffStrategy retryStrategy; private Driver driver; private ScheduledExecutorService executor; private ScheduledFuture<?> future; private OfferReturnDelay returnDelay; private OfferQueue offerQueue; private TaskGroups taskGroups; private FakeClock clock; private RescheduleCalculator rescheduleCalculator; private Preemptor preemptor; private AttributeAggregate emptyJob; private Amount<Long, Time> reservationDuration = Amount.of(1L, Time.MINUTES); @Before public void setUp() { storage = MemStorage.newEmptyStorage(); maintenance = createMock(MaintenanceController.class); stateManager = createMock(StateManager.class); assigner = createMock(TaskAssigner.class); retryStrategy = createMock(BackoffStrategy.class); driver = createMock(Driver.class); executor = createMock(ScheduledExecutorService.class); future = createMock(ScheduledFuture.class); returnDelay = createMock(OfferReturnDelay.class); clock = new FakeClock(); clock.setNowMillis(0); rescheduleCalculator = createMock(RescheduleCalculator.class); preemptor = createMock(Preemptor.class); emptyJob = new AttributeAggregate( Suppliers.ofInstance(ImmutableSet.<IScheduledTask>of()), createMock(AttributeStore.class)); } private void replayAndCreateScheduler() { control.replay(); offerQueue = new OfferQueueImpl(driver, returnDelay, executor, maintenance); TaskScheduler scheduler = new TaskSchedulerImpl(storage, stateManager, assigner, offerQueue, preemptor, reservationDuration, clock); taskGroups = new TaskGroups( executor, retryStrategy, RateLimiter.create(100), scheduler, rescheduleCalculator); } private Capture<Runnable> expectOffer() { return expectOfferDeclineIn(10); } private Capture<Runnable> expectOfferDeclineIn(int delayMillis) { expect(returnDelay.get()).andReturn(Amount.of(delayMillis, Time.MILLISECONDS)); Capture<Runnable> runnable = createCapture(); executor.schedule(capture(runnable), eq((long) delayMillis), eq(TimeUnit.MILLISECONDS)); expectLastCall().andReturn(createMock(ScheduledFuture.class)); return runnable; } private void changeState( IScheduledTask task, ScheduleStatus oldState, ScheduleStatus newState) { final IScheduledTask copy = IScheduledTask.build(task.newBuilder().setStatus(newState)); // Insert the task if it doesn't already exist. storage.write(new MutateWork.NoResult.Quiet() { @Override protected void execute(MutableStoreProvider storeProvider) { TaskStore.Mutable taskStore = storeProvider.getUnsafeTaskStore(); if (taskStore.fetchTasks(Query.taskScoped(Tasks.id(copy))).isEmpty()) { taskStore.saveTasks(ImmutableSet.of(copy)); } } }); taskGroups.taskChangedState(TaskStateChange.transition(copy, oldState)); } private Capture<Runnable> expectTaskRetryIn(long penaltyMs) { Capture<Runnable> capture = createCapture(); executor.schedule( capture(capture), eq(penaltyMs), eq(TimeUnit.MILLISECONDS)); expectLastCall().andReturn(future); return capture; } private Capture<Runnable> expectTaskGroupBackoff(long previousPenaltyMs, long nextPenaltyMs) { expect(retryStrategy.calculateBackoffMs(previousPenaltyMs)).andReturn(nextPenaltyMs); return expectTaskRetryIn(nextPenaltyMs); } private Capture<Runnable> expectTaskGroupBackoff(long nextPenaltyMs) { return expectTaskGroupBackoff(0, nextPenaltyMs); } @Test public void testNoTasks() { expectAnyMaintenanceCalls(); expectOfferDeclineIn(10); expectOfferDeclineIn(10); replayAndCreateScheduler(); offerQueue.addOffer(OFFER_A); offerQueue.addOffer(OFFER_B); } @Test public void testNoOffers() { Capture<Runnable> timeoutCapture = expectTaskGroupBackoff(10); expectTaskGroupBackoff(10, 20); expect(preemptor.findPreemptionSlotFor("a", emptyJob)).andReturn(Optional.<String>absent()); replayAndCreateScheduler(); changeState(makeTask("a"), INIT, PENDING); timeoutCapture.getValue().run(); } private IScheduledTask makeTask(String taskId) { return IScheduledTask.build(new ScheduledTask() .setAssignedTask(new AssignedTask() .setInstanceId(0) .setTaskId(taskId) .setTask(new TaskConfig() .setJobName("job-" + taskId) .setOwner(new Identity().setRole("role-" + taskId).setUser("user-" + taskId)) .setEnvironment("env-" + taskId)))); } private IScheduledTask makeTask(String taskId, ScheduleStatus status) { return IScheduledTask.build(makeTask(taskId).newBuilder().setStatus(status)); } @Test public void testLoadFromStorage() { final IScheduledTask a = makeTask("a", KILLED); final IScheduledTask b = makeTask("b", PENDING); final IScheduledTask c = makeTask("c", RUNNING); expect(rescheduleCalculator.getStartupScheduleDelayMs(b)).andReturn(10L); expectTaskRetryIn(10); replayAndCreateScheduler(); storage.write(new MutateWork.NoResult.Quiet() { @Override protected void execute(MutableStoreProvider store) { store.getUnsafeTaskStore().saveTasks(ImmutableSet.of(a, b, c)); } }); for (IScheduledTask task : ImmutableList.of(a, b, c)) { taskGroups.taskChangedState(TaskStateChange.initialized(task)); } changeState(c, RUNNING, FINISHED); } @Test public void testTaskMissing() { Capture<Runnable> timeoutCapture = expectTaskGroupBackoff(10); replayAndCreateScheduler(); taskGroups.taskChangedState(TaskStateChange.transition(makeTask("a", PENDING), INIT)); timeoutCapture.getValue().run(); } @Test public void testTaskAssigned() { expectAnyMaintenanceCalls(); expectOfferDeclineIn(10); IScheduledTask task = makeTask("a", PENDING); TaskInfo mesosTask = makeTaskInfo(task); Capture<Runnable> timeoutCapture = expectTaskGroupBackoff(10); expect(assigner.maybeAssign(OFFER_A, task, emptyJob)).andReturn(Optional.<TaskInfo>absent()); expect(preemptor.findPreemptionSlotFor("a", emptyJob)).andReturn(Optional.<String>absent()); Capture<Runnable> timeoutCapture2 = expectTaskGroupBackoff(10, 20); expect(assigner.maybeAssign(OFFER_A, task, emptyJob)).andReturn(Optional.of(mesosTask)); driver.launchTask(OFFER_A.getId(), mesosTask); Capture<Runnable> timeoutCapture3 = expectTaskGroupBackoff(10); expectTaskGroupBackoff(10, 20); expect(preemptor.findPreemptionSlotFor("b", emptyJob)).andReturn(Optional.<String>absent()); replayAndCreateScheduler(); offerQueue.addOffer(OFFER_A); changeState(task, INIT, PENDING); timeoutCapture.getValue().run(); timeoutCapture2.getValue().run(); // Ensure the offer was consumed. changeState(makeTask("b"), INIT, PENDING); timeoutCapture3.getValue().run(); } @Test public void testDriverNotReady() { IScheduledTask task = makeTask("a", PENDING); TaskInfo mesosTask = TaskInfo.newBuilder() .setName(Tasks.id(task)) .setTaskId(TaskID.newBuilder().setValue(Tasks.id(task))) .setSlaveId(SlaveID.newBuilder().setValue("slaveId")) .build(); Capture<Runnable> timeoutCapture = expectTaskGroupBackoff(10); expectAnyMaintenanceCalls(); expectOfferDeclineIn(10); expect(assigner.maybeAssign(OFFER_A, task, emptyJob)).andReturn(Optional.of(mesosTask)); driver.launchTask(OFFER_A.getId(), mesosTask); expectLastCall().andThrow(new IllegalStateException("Driver not ready.")); expect(stateManager.changeState( "a", Optional.of(PENDING), LOST, TaskSchedulerImpl.LAUNCH_FAILED_MSG)) .andReturn(true); replayAndCreateScheduler(); changeState(task, INIT, PENDING); offerQueue.addOffer(OFFER_A); timeoutCapture.getValue().run(); } @Test public void testStorageException() { IScheduledTask task = makeTask("a", PENDING); TaskInfo mesosTask = TaskInfo.newBuilder() .setName(Tasks.id(task)) .setTaskId(TaskID.newBuilder().setValue(Tasks.id(task))) .setSlaveId(SlaveID.newBuilder().setValue("slaveId")) .build(); Capture<Runnable> timeoutCapture = expectTaskGroupBackoff(10); expectAnyMaintenanceCalls(); expectOfferDeclineIn(10); expect(assigner.maybeAssign(OFFER_A, task, emptyJob)) .andThrow(new StorageException("Injected failure.")); Capture<Runnable> timeoutCapture2 = expectTaskGroupBackoff(10, 20); expect(assigner.maybeAssign(OFFER_A, task, emptyJob)).andReturn(Optional.of(mesosTask)); driver.launchTask(OFFER_A.getId(), mesosTask); expectLastCall(); replayAndCreateScheduler(); changeState(task, INIT, PENDING); offerQueue.addOffer(OFFER_A); timeoutCapture.getValue().run(); timeoutCapture2.getValue().run(); } @Test public void testExpiration() { IScheduledTask task = makeTask("a", PENDING); Capture<Runnable> timeoutCapture = expectTaskGroupBackoff(10); Capture<Runnable> offerExpirationCapture = expectOfferDeclineIn(10); expectAnyMaintenanceCalls(); expect(assigner.maybeAssign(OFFER_A, task, emptyJob)).andReturn(Optional.<TaskInfo>absent()); Capture<Runnable> timeoutCapture2 = expectTaskGroupBackoff(10, 20); expect(preemptor.findPreemptionSlotFor("a", emptyJob)).andReturn(Optional.<String>absent()); driver.declineOffer(OFFER_A.getId()); expectTaskGroupBackoff(20, 30); expect(preemptor.findPreemptionSlotFor("a", emptyJob)).andReturn(Optional.<String>absent()); replayAndCreateScheduler(); changeState(task, INIT, PENDING); offerQueue.addOffer(OFFER_A); timeoutCapture.getValue().run(); offerExpirationCapture.getValue().run(); timeoutCapture2.getValue().run(); } @Test public void testOneOfferPerSlave() { expectAnyMaintenanceCalls(); Capture<Runnable> offerExpirationCapture = expectOfferDeclineIn(10); Offer offerAB = Offers.makeOffer("OFFER_B").toBuilder().setSlaveId(OFFER_A.getSlaveId()).build(); driver.declineOffer(OFFER_A.getId()); driver.declineOffer(offerAB.getId()); replayAndCreateScheduler(); offerQueue.addOffer(OFFER_A); offerQueue.addOffer(offerAB); offerExpirationCapture.getValue().run(); } @Test public void testDontDeclineAcceptedOffer() throws OfferQueue.LaunchException { expectAnyMaintenanceCalls(); Capture<Runnable> offerExpirationCapture = expectOfferDeclineIn(10); Function<Offer, Optional<TaskInfo>> offerAcceptor = createMock(new Clazz<Function<Offer, Optional<TaskInfo>>>() { }); final TaskInfo taskInfo = TaskInfo.getDefaultInstance(); expect(offerAcceptor.apply(OFFER_A)).andReturn(Optional.of(taskInfo)); driver.launchTask(OFFER_A.getId(), taskInfo); replayAndCreateScheduler(); offerQueue.addOffer(OFFER_A); offerQueue.launchFirst(offerAcceptor); offerExpirationCapture.getValue().run(); } @Test public void testBasicMaintenancePreferences() { expectOffer(); expect(maintenance.getMode("HOST_D")).andReturn(MaintenanceMode.DRAINED); expectOffer(); expect(maintenance.getMode("HOST_C")).andReturn(MaintenanceMode.DRAINING); expectOffer(); expect(maintenance.getMode("HOST_B")).andReturn(MaintenanceMode.SCHEDULED); expectOffer(); expect(maintenance.getMode("HOST_A")).andReturn(MaintenanceMode.NONE); IScheduledTask taskA = makeTask("A", PENDING); TaskInfo mesosTaskA = makeTaskInfo(taskA); expect(assigner.maybeAssign(OFFER_A, taskA, emptyJob)).andReturn(Optional.of(mesosTaskA)); driver.launchTask(OFFER_A.getId(), mesosTaskA); Capture<Runnable> captureA = expectTaskGroupBackoff(10); IScheduledTask taskB = makeTask("B", PENDING); TaskInfo mesosTaskB = makeTaskInfo(taskB); expect(assigner.maybeAssign(OFFER_B, taskB, emptyJob)).andReturn(Optional.of(mesosTaskB)); driver.launchTask(OFFER_B.getId(), mesosTaskB); Capture<Runnable> captureB = expectTaskGroupBackoff(10); replayAndCreateScheduler(); offerQueue.addOffer(OFFER_D); offerQueue.addOffer(OFFER_C); offerQueue.addOffer(OFFER_B); offerQueue.addOffer(OFFER_A); changeState(taskA, INIT, PENDING); captureA.getValue().run(); changeState(taskB, INIT, PENDING); captureB.getValue().run(); } @Test public void testChangingMaintenancePreferences() { expectOffer(); expect(maintenance.getMode("HOST_A")).andReturn(MaintenanceMode.NONE); expectOffer(); expect(maintenance.getMode("HOST_B")).andReturn(MaintenanceMode.SCHEDULED); expectOffer(); expect(maintenance.getMode("HOST_C")).andReturn(MaintenanceMode.DRAINED); IScheduledTask taskA = makeTask("A", PENDING); TaskInfo mesosTaskA = makeTaskInfo(taskA); expect(assigner.maybeAssign(OFFER_B, taskA, emptyJob)).andReturn(Optional.of(mesosTaskA)); driver.launchTask(OFFER_B.getId(), mesosTaskA); Capture<Runnable> captureA = expectTaskGroupBackoff(10); IScheduledTask taskB = makeTask("B", PENDING); TaskInfo mesosTaskB = makeTaskInfo(taskB); expect(assigner.maybeAssign(OFFER_C, taskB, emptyJob)).andReturn(Optional.of(mesosTaskB)); driver.launchTask(OFFER_C.getId(), mesosTaskB); Capture<Runnable> captureB = expectTaskGroupBackoff(10); replayAndCreateScheduler(); offerQueue.addOffer(OFFER_A); offerQueue.addOffer(OFFER_B); offerQueue.addOffer(OFFER_C); // Initially, we'd expect the offers to be consumed in order (A, B), with (C) unschedulable // Expected order now (B), with (C, A) unschedulable changeHostMaintenanceState("HOST_A", MaintenanceMode.DRAINING); changeState(taskA, INIT, PENDING); captureA.getValue().run(); // Expected order now (C), with (A) unschedulable and (B) already consumed changeHostMaintenanceState("HOST_C", MaintenanceMode.NONE); changeState(taskB, INIT, PENDING); captureB.getValue().run(); } private Capture<IScheduledTask> expectTaskScheduled(IScheduledTask task) { TaskInfo mesosTask = makeTaskInfo(task); Capture<IScheduledTask> taskScheduled = createCapture(); expect(assigner.maybeAssign( EasyMock.<Offer>anyObject(), capture(taskScheduled), EasyMock.eq(emptyJob))) .andReturn(Optional.of(mesosTask)); driver.launchTask(EasyMock.<OfferID>anyObject(), eq(mesosTask)); return taskScheduled; } @Test public void testResistsStarvation() { // TODO(wfarner): This test requires intimate knowledge of the way futures are used inside // TaskScheduler. It's time to test using a real ScheduledExecutorService. expectAnyMaintenanceCalls(); IScheduledTask jobA0 = makeTask("a0", PENDING); ScheduledTask jobA1Builder = jobA0.newBuilder(); jobA1Builder.getAssignedTask().setTaskId("a1"); jobA1Builder.getAssignedTask().setInstanceId(1); IScheduledTask jobA1 = IScheduledTask.build(jobA1Builder); ScheduledTask jobA2Builder = jobA0.newBuilder(); jobA2Builder.getAssignedTask().setTaskId("a2"); jobA2Builder.getAssignedTask().setInstanceId(2); IScheduledTask jobA2 = IScheduledTask.build(jobA2Builder); IScheduledTask jobB0 = makeTask("b0", PENDING); expectOfferDeclineIn(10); expectOfferDeclineIn(10); expectOfferDeclineIn(10); expectOfferDeclineIn(10); Capture<Runnable> timeoutA = expectTaskGroupBackoff(10); Capture<Runnable> timeoutB = expectTaskGroupBackoff(10); Capture<IScheduledTask> firstScheduled = expectTaskScheduled(jobA0); Capture<IScheduledTask> secondScheduled = expectTaskScheduled(jobB0); // Expect another watch of the task group for job A. expectTaskGroupBackoff(10); replayAndCreateScheduler(); offerQueue.addOffer(OFFER_A); offerQueue.addOffer(OFFER_B); offerQueue.addOffer(OFFER_C); offerQueue.addOffer(OFFER_D); changeState(jobA0, INIT, PENDING); changeState(jobA1, INIT, PENDING); changeState(jobA2, INIT, PENDING); changeState(jobB0, INIT, PENDING); timeoutA.getValue().run(); timeoutB.getValue().run(); assertEquals( ImmutableSet.of(jobA0, jobB0), ImmutableSet.of(firstScheduled.getValue(), secondScheduled.getValue())); } @Test public void testTaskDeleted() { expectAnyMaintenanceCalls(); expectOfferDeclineIn(10); final IScheduledTask task = makeTask("a", PENDING); Capture<Runnable> timeoutCapture = expectTaskGroupBackoff(10); expect(assigner.maybeAssign(OFFER_A, task, emptyJob)).andReturn(Optional.<TaskInfo>absent()); expectTaskGroupBackoff(10, 20); expect(preemptor.findPreemptionSlotFor("a", emptyJob)).andReturn(Optional.<String>absent()); replayAndCreateScheduler(); offerQueue.addOffer(OFFER_A); changeState(task, INIT, PENDING); timeoutCapture.getValue().run(); // Ensure the offer was consumed. changeState(task, INIT, PENDING); storage.write(new MutateWork.NoResult.Quiet() { @Override protected void execute(MutableStoreProvider storeProvider) { storeProvider.getUnsafeTaskStore().deleteTasks(Tasks.ids(task)); } }); taskGroups.tasksDeleted(new TasksDeleted(ImmutableSet.of(task))); timeoutCapture.getValue().run(); } private TaskInfo makeTaskInfo(IScheduledTask task) { return TaskInfo.newBuilder() .setName(Tasks.id(task)) .setTaskId(TaskID.newBuilder().setValue(Tasks.id(task))) .setSlaveId(SlaveID.newBuilder().setValue("slave-id" + task.toString())) .build(); } private void expectAnyMaintenanceCalls() { expect(maintenance.getMode(isA(String.class))).andReturn(MaintenanceMode.NONE).anyTimes(); } private void changeHostMaintenanceState(String hostName, MaintenanceMode mode) { offerQueue.hostChangedState(new HostMaintenanceStateChange(new HostStatus(hostName, mode))); } }
apache-2.0
mikewalch/accumulo
test/src/main/java/org/apache/accumulo/test/mapreduce/AccumuloRowInputFormatIT.java
7531
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.accumulo.test.mapreduce; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map.Entry; import org.apache.accumulo.core.client.BatchWriter; import org.apache.accumulo.core.client.BatchWriterConfig; import org.apache.accumulo.core.client.Connector; import org.apache.accumulo.core.client.MutationsRejectedException; import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat; import org.apache.accumulo.core.client.mapreduce.AccumuloRowInputFormat; import org.apache.accumulo.core.client.security.tokens.AuthenticationToken; import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.KeyValue; import org.apache.accumulo.core.data.Mutation; import org.apache.accumulo.core.data.Value; import org.apache.accumulo.core.security.ColumnVisibility; import org.apache.accumulo.core.util.PeekingIterator; import org.apache.accumulo.harness.AccumuloClusterHarness; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.junit.BeforeClass; import org.junit.Test; public class AccumuloRowInputFormatIT extends AccumuloClusterHarness { private static final String ROW1 = "row1"; private static final String ROW2 = "row2"; private static final String ROW3 = "row3"; private static final String COLF1 = "colf1"; private static List<Entry<Key,Value>> row1; private static List<Entry<Key,Value>> row2; private static List<Entry<Key,Value>> row3; private static AssertionError e1 = null; private static AssertionError e2 = null; @BeforeClass public static void prepareRows() { row1 = new ArrayList<>(); row1.add(new KeyValue(new Key(ROW1, COLF1, "colq1"), "v1".getBytes())); row1.add(new KeyValue(new Key(ROW1, COLF1, "colq2"), "v2".getBytes())); row1.add(new KeyValue(new Key(ROW1, "colf2", "colq3"), "v3".getBytes())); row2 = new ArrayList<>(); row2.add(new KeyValue(new Key(ROW2, COLF1, "colq4"), "v4".getBytes())); row3 = new ArrayList<>(); row3.add(new KeyValue(new Key(ROW3, COLF1, "colq5"), "v5".getBytes())); } private static void checkLists(final List<Entry<Key,Value>> first, final Iterator<Entry<Key,Value>> second) { int entryIndex = 0; while (second.hasNext()) { final Entry<Key,Value> entry = second.next(); assertEquals("Keys should be equal", first.get(entryIndex).getKey(), entry.getKey()); assertEquals("Values should be equal", first.get(entryIndex).getValue(), entry.getValue()); entryIndex++; } } private static void insertList(final BatchWriter writer, final List<Entry<Key,Value>> list) throws MutationsRejectedException { for (Entry<Key,Value> e : list) { final Key key = e.getKey(); final Mutation mutation = new Mutation(key.getRow()); ColumnVisibility colVisibility = new ColumnVisibility(key.getColumnVisibility()); mutation.put(key.getColumnFamily(), key.getColumnQualifier(), colVisibility, key.getTimestamp(), e.getValue()); writer.addMutation(mutation); } } private static class MRTester extends Configured implements Tool { private static class TestMapper extends Mapper<Text,PeekingIterator<Entry<Key,Value>>,Key,Value> { int count = 0; @Override protected void map(Text k, PeekingIterator<Entry<Key,Value>> v, Context context) throws IOException, InterruptedException { try { switch (count) { case 0: assertEquals("Current key should be " + ROW1, new Text(ROW1), k); checkLists(row1, v); break; case 1: assertEquals("Current key should be " + ROW2, new Text(ROW2), k); checkLists(row2, v); break; case 2: assertEquals("Current key should be " + ROW3, new Text(ROW3), k); checkLists(row3, v); break; default: assertTrue(false); } } catch (AssertionError e) { e1 = e; } count++; } @Override protected void cleanup(Context context) throws IOException, InterruptedException { try { assertEquals(3, count); } catch (AssertionError e) { e2 = e; } } } @Override public int run(String[] args) throws Exception { if (args.length != 1) { throw new IllegalArgumentException("Usage : " + MRTester.class.getName() + " <table>"); } String user = getAdminPrincipal(); AuthenticationToken pass = getAdminToken(); String table = args[0]; Job job = Job.getInstance(getConf(), this.getClass().getSimpleName() + "_" + System.currentTimeMillis()); job.setJarByClass(this.getClass()); job.setInputFormatClass(AccumuloRowInputFormat.class); AccumuloInputFormat.setConnectorInfo(job, user, pass); AccumuloInputFormat.setInputTableName(job, table); AccumuloRowInputFormat.setZooKeeperInstance(job, getCluster().getClientConfig()); job.setMapperClass(TestMapper.class); job.setMapOutputKeyClass(Key.class); job.setMapOutputValueClass(Value.class); job.setOutputFormatClass(NullOutputFormat.class); job.setNumReduceTasks(0); job.waitForCompletion(true); return job.isSuccessful() ? 0 : 1; } public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); conf.set("mapreduce.framework.name", "local"); conf.set("mapreduce.cluster.local.dir", new File(System.getProperty("user.dir"), "target/mapreduce-tmp").getAbsolutePath()); assertEquals(0, ToolRunner.run(conf, new MRTester(), args)); } } @Test public void test() throws Exception { final Connector conn = getConnector(); String tableName = getUniqueNames(1)[0]; conn.tableOperations().create(tableName); BatchWriter writer = null; try { writer = conn.createBatchWriter(tableName, new BatchWriterConfig()); insertList(writer, row1); insertList(writer, row2); insertList(writer, row3); } finally { if (writer != null) { writer.close(); } } MRTester.main(new String[] {tableName}); assertNull(e1); assertNull(e2); } }
apache-2.0
Catherine22/DesignPattern
src/com/catherine/business_delegate/BusinessLookUp.java
235
package com.catherine.business_delegate; public class BusinessLookUp { public BusinessService getBusinessService(ServiceType type) { if (type == ServiceType.EJB) return new EJBService(); else return new JMSService(); } }
apache-2.0
gxa/atlas
sc/src/test/java/uk/ac/ebi/atlas/metadata/CellMetadataDaoIT.java
11033
package uk.ac.ebi.atlas.metadata; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInstance; import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.core.io.ClassPathResource; import org.springframework.jdbc.datasource.init.ResourceDatabasePopulator; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit.jupiter.SpringExtension; import org.springframework.test.context.web.WebAppConfiguration; import org.springframework.util.StringUtils; import uk.ac.ebi.atlas.commons.readers.TsvStreamer; import uk.ac.ebi.atlas.experimentimport.idf.IdfParser; import uk.ac.ebi.atlas.resource.DataFileHub; import uk.ac.ebi.atlas.configuration.TestConfig; import uk.ac.ebi.atlas.solr.cloud.SolrCloudCollectionProxyFactory; import uk.ac.ebi.atlas.solr.cloud.collections.SingleCellAnalyticsCollectionProxy; import uk.ac.ebi.atlas.solr.cloud.collections.SingleCellAnalyticsCollectionProxy.SingleCellAnalyticsSchemaField; import uk.ac.ebi.atlas.testutils.JdbcUtils; import uk.ac.ebi.atlas.testutils.RandomDataTestUtils; import javax.inject.Inject; import java.util.Arrays; import javax.sql.DataSource; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.ThreadLocalRandom; import java.util.stream.Collectors; import java.util.stream.Stream; import static org.assertj.core.api.Assertions.assertThat; import static uk.ac.ebi.atlas.metadata.CellMetadataDao.MISSING_METADATA_VALUE_PLACEHOLDER; @ExtendWith(SpringExtension.class) @WebAppConfiguration @ContextConfiguration(classes = TestConfig.class) @TestInstance(TestInstance.Lifecycle.PER_CLASS) class CellMetadataDaoIT { private static final Logger LOGGER = LoggerFactory.getLogger(CellMetadataDaoIT.class); @Inject private DataSource dataSource; @Inject private SolrCloudCollectionProxyFactory solrCloudCollectionProxyFactory; @Inject private JdbcUtils jdbcUtils; @Inject private IdfParser idfParser; @Inject private DataFileHub dataFileHub; private static final String IDF_ADDITIONAL_ATTRIBUTES_ID = "Comment[EAAdditionalAttributes]".toUpperCase(); private CellMetadataDao subject; @BeforeAll void populateDatabaseTables() { ResourceDatabasePopulator populator = new ResourceDatabasePopulator(); populator.addScripts( new ClassPathResource("fixtures/scxa_experiment-fixture.sql"), new ClassPathResource("fixtures/scxa_analytics-fixture.sql")); populator.execute(dataSource); } @AfterAll void cleanDatabaseTables() { ResourceDatabasePopulator populator = new ResourceDatabasePopulator(); populator.addScripts( new ClassPathResource("fixtures/scxa_experiment-delete.sql"), new ClassPathResource("fixtures/scxa_analytics-delete.sql")); populator.execute(dataSource); } @BeforeEach void setUp() { this.subject = new CellMetadataDao(solrCloudCollectionProxyFactory, idfParser); } @ParameterizedTest @MethodSource("experimentsWithMetadataProvider") void validExperimentAccessionHasMetadataFields(String experimentAccession) { assertThat(subject.getMetadataFieldNames(experimentAccession)).isNotEmpty(); } @Test void invalidExperimentAccessionHasNoMetadata() { String experimentAccession = RandomDataTestUtils.generateRandomExperimentAccession(); assertThat(subject.getMetadataFieldNames(experimentAccession)).isEmpty(); } @ParameterizedTest @MethodSource("experimentsWithMetadataProvider") void invalidCellIdHasNoMetadata(String experimentAccession) { String cellId = "FOOBAR"; List<SingleCellAnalyticsSchemaField> metadataFieldNames = subject.getMetadataFieldNames(experimentAccession); assertThat(metadataFieldNames) .isNotEmpty() .allSatisfy(field -> assertThat(subject.getMetadataValueForCellId(experimentAccession, field, cellId)) .isNotPresent()); } @ParameterizedTest @MethodSource("experimentsWithMetadataProvider") void validCellIdHasMetadataValues(String experimentAccession) { String cellId = jdbcUtils.fetchRandomCellFromExperiment(experimentAccession); LOGGER.info("Retrieving metadata field names for experiment {}", experimentAccession); List<SingleCellAnalyticsSchemaField> metadataFieldNames = subject.getMetadataFieldNames(experimentAccession); assertThat(metadataFieldNames) .isNotEmpty() .allSatisfy(field -> { LOGGER.info( "Retrieving values for {} metadata for cell ID {} from experiment {}", field.displayName(), cellId, experimentAccession); assertThat(subject.getMetadataValueForCellId(experimentAccession, field, cellId)).isPresent(); }); } @ParameterizedTest @MethodSource("experimentsWithFactorsProvider") void validExperimentAccessionHasFactorFields(String experimentAccession) { String cellId = jdbcUtils.fetchRandomCellFromExperiment(experimentAccession); LOGGER.info("Retrieving factor fields for cell ID {} from experiment {}", cellId, experimentAccession); assertThat(subject.getFactorFieldNames(experimentAccession, cellId)).isNotEmpty(); } @Test void invalidCellIdAndExperimentAccessionHasNoFactorFields() { String experimentAccession = RandomDataTestUtils.generateRandomExperimentAccession(); String cellId = "FOO"; assertThat(subject.getFactorFieldNames(experimentAccession, cellId)).isEmpty(); } @Test void experimentWithMissingValuesReturnsNotAvailable() { String experimentAccession = "E-GEOD-71585"; // TODO: Retrieve randomly sampled cell IDs from Solr ImmutableList<String> cellIdsWithMissingValues = ImmutableList.of( "SRR2138737", "SRR2140225", "SRR2139550", "SRR2139566"); Map<String, String> result = subject.getMetadataValueForCellIds(experimentAccession, SingleCellAnalyticsCollectionProxy.CHARACTERISTIC_INFERRED_CELL_TYPE, cellIdsWithMissingValues); assertThat(result.values()).containsOnly(MISSING_METADATA_VALUE_PLACEHOLDER); } @ParameterizedTest @MethodSource("experimentsWithMetadataProvider") void validCellIdsHaveMetadataValues(String experimentAccession) { List<String> cellIds = jdbcUtils.fetchRandomListOfCellsFromExperiment( experimentAccession, ThreadLocalRandom.current().nextInt(1, 2000)); LOGGER.info("Retrieving metadata field names for experiment {}", experimentAccession); List<SingleCellAnalyticsSchemaField> metadataFieldNames = subject.getMetadataFieldNames(experimentAccession); assertThat(metadataFieldNames) .isNotEmpty() .allSatisfy(field -> { LOGGER.info( "Retrieving values for {} metadata for {} random cell IDs from experiment {}", field.displayName(), cellIds.size(), experimentAccession); assertThat(subject.getMetadataValueForCellIds(experimentAccession, field, cellIds)) .isNotEmpty() .containsKeys(cellIds.toArray(new String[0])); }); } @Test void getFieldValuesForNoFieldsReturnsEmpty() { String experimentAccession = jdbcUtils.fetchRandomSingleCellExperimentAccession(); assertThat(subject.getQueryResultForMultiValueFields(experimentAccession, Optional.empty(), ImmutableSet.of())) .isEmpty(); } @ParameterizedTest @MethodSource("experimentsWithAdditionalAttributesProvider") void validExperimentIdHasAdditionalAttributes(String experimentAccession) { assertThat(subject.getAdditionalAttributesFieldNames(experimentAccession)).isNotEmpty(); } @ParameterizedTest @MethodSource("experimentsWithoutAdditionalAttributesProvider") void invalidExperimentAccessionHasNoAdditionalAttributes(String experimentAccession) { assertThat(subject.getAdditionalAttributesFieldNames(experimentAccession)).isEmpty(); } private Stream<String> experimentsWithMetadataProvider() { // E-GEOD-99058 does not have any metadata (factors or inferred cell types) // E-GEOD-71585 has missing inferred cell types for some cells return jdbcUtils.fetchPublicSingleCellExperimentAccessions() .stream() .filter(accession -> !accession.equalsIgnoreCase("E-GEOD-99058") && !accession.equalsIgnoreCase("E-GEOD-71585")); } private Stream<String> experimentsWithFactorsProvider() { // E-GEOD-99058 and E-ENAD-13 do not have any factors return jdbcUtils.fetchPublicSingleCellExperimentAccessions() .stream() .filter(accession -> !accession.equalsIgnoreCase("E-GEOD-99058") && !accession.equalsIgnoreCase("E-ENAD-13")); } private Stream<String> experimentsWithAdditionalAttributesProvider() { return jdbcUtils.fetchPublicSingleCellExperimentAccessions() .stream() .filter(accession -> hasAdditionalAttributesInIdf(accession)); } private Stream<String> experimentsWithoutAdditionalAttributesProvider() { return jdbcUtils.fetchPublicSingleCellExperimentAccessions() .stream() .filter(accession -> !hasAdditionalAttributesInIdf(accession)); } private boolean hasAdditionalAttributesInIdf(String experimentAccession) { try (TsvStreamer idfStreamer = dataFileHub.getExperimentFiles(experimentAccession).idf.get()) { Optional<List<String>> additionalAttributesLine = idfStreamer .get() .filter(line -> StringUtils.trimAllWhitespace(line[0]).equalsIgnoreCase(IDF_ADDITIONAL_ATTRIBUTES_ID)) .map(line -> Arrays.stream(line) .skip(1) .filter(item -> !item.isEmpty()) .collect(Collectors.toList())) .filter(x -> !x.isEmpty()) .findFirst(); return additionalAttributesLine.isPresent(); } } }
apache-2.0
unlimitedggames/gdxjam-ugg
core/src/com/ugg/gdxjam/model/systems/LockOnSystem.java
1806
package com.ugg.gdxjam.model.systems; import com.badlogic.ashley.core.Entity; import com.badlogic.ashley.core.Family; import com.badlogic.ashley.systems.IteratingSystem; import com.badlogic.gdx.math.Vector2; import com.ugg.gdxjam.model.Mappers; import com.ugg.gdxjam.model.SteerLocation; import com.ugg.gdxjam.model.components.CollisionComponent; import com.ugg.gdxjam.model.components.LockOnComponent; import com.ugg.gdxjam.model.components.PhysicsComponent; import com.ugg.gdxjam.model.components.SteeringBehaviorComponent; import com.ugg.gdxjam.model.utils.BehaviorUtils; public class LockOnSystem extends IteratingSystem { public LockOnSystem() { super(Family.all(LockOnComponent.class).get()); } public void processEntity(Entity entity, float deltaTime) { LockOnComponent lockOnC = Mappers.lockOn.get(entity); if(lockOnC.lockOnTime.limit > 0 && lockOnC.lockOnTime.lapse(deltaTime)) { entity.remove(LockOnComponent.class); return; } if(lockOnC.updateTargetLocation) { SteeringBehaviorComponent behaviorC = Mappers.steeringBehavior.get(entity); //Get target physicsComponent, so we know the position /*if(lockOnC.target == null || lockOnC.target.entity == null) { lockOnC.origin.entity.remove(LockOnComponent.class); return; }*/ PhysicsComponent physicsComponent = Mappers.physics.get(lockOnC.target.entity); Vector2 targetPosition = physicsComponent.body.getPosition(); //Set the new target position SteerLocation targetLocation = BehaviorUtils.getTarget(behaviorC.behavior); if (targetLocation != null) targetLocation.getPosition().set(targetPosition); } } }
apache-2.0
yongbeam/Android-Y-PhotoPicker
app/src/main/java/com/yongbeam/navi/MainActivity.java
2205
package com.yongbeam.navi; import android.content.Intent; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.util.Log; import android.view.View; import android.widget.Button; import com.yongbeam.y_photopicker.util.photopicker.PhotoPagerActivity; import com.yongbeam.y_photopicker.util.photopicker.PhotoPickerActivity; import com.yongbeam.y_photopicker.util.photopicker.utils.YPhotoPickerIntent; import java.util.ArrayList; import java.util.List; public class MainActivity extends AppCompatActivity { public final static int REQUEST_CODE = 1; private Button button3; public static ArrayList<String> selectedPhotos = new ArrayList<>(); @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); button3 = (Button) findViewById(R.id.button3); button3.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { YPhotoPickerIntent intent = new YPhotoPickerIntent(MainActivity.this); intent.setMaxSelectCount(20); intent.setShowCamera(true); intent.setShowGif(true); intent.setSelectCheckBox(false); intent.setMaxGrideItemCount(3); startActivityForResult(intent, REQUEST_CODE); } }); } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); List<String> photos = null; if (resultCode == RESULT_OK && requestCode == REQUEST_CODE) { if (data != null) { photos = data.getStringArrayListExtra(PhotoPickerActivity.KEY_SELECTED_PHOTOS); } if (photos != null) { selectedPhotos.addAll(photos); } // start image viewr Intent startActivity = new Intent(this , PhotoPagerActivity.class); startActivity.putStringArrayListExtra("photos" , selectedPhotos); startActivity(startActivity); } } }
apache-2.0
harfalm/Sakai-10.1
assignment/assignment-impl/impl/src/java/org/sakaiproject/assignment/impl/conversion/impl/RemoveDuplicateSubmissionsConversionHandler.java
2387
/********************************************************************************** * $URL: https://source.sakaiproject.org/svn/assignment/tags/sakai-10.1/assignment-impl/impl/src/java/org/sakaiproject/assignment/impl/conversion/impl/RemoveDuplicateSubmissionsConversionHandler.java $ * $Id: RemoveDuplicateSubmissionsConversionHandler.java 105078 2012-02-24 23:00:38Z ottenhoff@longsight.com $ *********************************************************************************** * * Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008, 2009 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.assignment.impl.conversion.impl; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import org.sakaiproject.util.conversion.SchemaConversionHandler; /** * * * */ public class RemoveDuplicateSubmissionsConversionHandler implements SchemaConversionHandler { // db driver private String m_dbDriver = null; /** * {@inheritDoc} */ public String getDbDriver() { return m_dbDriver; } /** * {@inheritDoc} */ public void setDbDriver(String dbDriver) { m_dbDriver = dbDriver; } public boolean convertSource(String id, Object source, PreparedStatement updateRecord) throws SQLException { updateRecord.setString(1, id); // TODO Auto-generated method stub return true; } public Object getSource(String id, ResultSet rs) throws SQLException { return rs.next()?rs.getString(1):null; } public Object getValidateSource(String id, ResultSet rs) throws SQLException { // TODO Auto-generated method stub return null; } public void validate(String id, Object source, Object result) throws Exception { // TODO Auto-generated method stub } }
apache-2.0
CypherCove/gdx-cclibs
gdxtokryo/src/main/java/com/cyphercove/gdx/gdxtokryo/gdxserializers/math/GridPoint3Serializer.java
1669
/******************************************************************************* * Copyright 2017 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.cyphercove.gdx.gdxtokryo.gdxserializers.math; import com.badlogic.gdx.math.GridPoint3; import com.esotericsoftware.kryo.Kryo; import com.esotericsoftware.kryo.Serializer; import com.esotericsoftware.kryo.io.Input; import com.esotericsoftware.kryo.io.Output; public class GridPoint3Serializer extends Serializer<GridPoint3> { @Override public void write(Kryo kryo, Output output, GridPoint3 gridPoint3) { output.writeInt(gridPoint3.x); output.writeInt(gridPoint3.y); output.writeInt(gridPoint3.z); } @Override public GridPoint3 read(Kryo kryo, Input input, Class<GridPoint3> type) { int x = input.readInt(); int y = input.readInt(); int z = input.readInt(); return new GridPoint3(x, y, z); } @Override public GridPoint3 copy (Kryo kryo, GridPoint3 original) { return new GridPoint3(original); } }
apache-2.0
chsimon99/myDouNiu
app/src/main/java/com/zfxf/douniu/bean/address/County.java
359
package com.zfxf.douniu.bean.address; public class County { private String areaId; private String areaName; public String getAreaId() { return areaId; } public void setAreaId(String areaId) { this.areaId = areaId; } public String getAreaName() { return areaName; } public void setAreaName(String areaName) { this.areaName = areaName; } }
apache-2.0
jishenghua/JSH_ERP
jshERP-boot/src/main/java/com/jsh/erp/datasource/entities/AccountHeadExample.java
40126
package com.jsh.erp.datasource.entities; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Date; import java.util.List; public class AccountHeadExample { protected String orderByClause; protected boolean distinct; protected List<Criteria> oredCriteria; public AccountHeadExample() { oredCriteria = new ArrayList<>(); } public void setOrderByClause(String orderByClause) { this.orderByClause = orderByClause; } public String getOrderByClause() { return orderByClause; } public void setDistinct(boolean distinct) { this.distinct = distinct; } public boolean isDistinct() { return distinct; } public List<Criteria> getOredCriteria() { return oredCriteria; } public void or(Criteria criteria) { oredCriteria.add(criteria); } public Criteria or() { Criteria criteria = createCriteriaInternal(); oredCriteria.add(criteria); return criteria; } public Criteria createCriteria() { Criteria criteria = createCriteriaInternal(); if (oredCriteria.size() == 0) { oredCriteria.add(criteria); } return criteria; } protected Criteria createCriteriaInternal() { Criteria criteria = new Criteria(); return criteria; } public void clear() { oredCriteria.clear(); orderByClause = null; distinct = false; } protected abstract static class GeneratedCriteria { protected List<Criterion> criteria; protected GeneratedCriteria() { super(); criteria = new ArrayList<>(); } public boolean isValid() { return criteria.size() > 0; } public List<Criterion> getAllCriteria() { return criteria; } public List<Criterion> getCriteria() { return criteria; } protected void addCriterion(String condition) { if (condition == null) { throw new RuntimeException("Value for condition cannot be null"); } criteria.add(new Criterion(condition)); } protected void addCriterion(String condition, Object value, String property) { if (value == null) { throw new RuntimeException("Value for " + property + " cannot be null"); } criteria.add(new Criterion(condition, value)); } protected void addCriterion(String condition, Object value1, Object value2, String property) { if (value1 == null || value2 == null) { throw new RuntimeException("Between values for " + property + " cannot be null"); } criteria.add(new Criterion(condition, value1, value2)); } public Criteria andIdIsNull() { addCriterion("id is null"); return (Criteria) this; } public Criteria andIdIsNotNull() { addCriterion("id is not null"); return (Criteria) this; } public Criteria andIdEqualTo(Long value) { addCriterion("id =", value, "id"); return (Criteria) this; } public Criteria andIdNotEqualTo(Long value) { addCriterion("id <>", value, "id"); return (Criteria) this; } public Criteria andIdGreaterThan(Long value) { addCriterion("id >", value, "id"); return (Criteria) this; } public Criteria andIdGreaterThanOrEqualTo(Long value) { addCriterion("id >=", value, "id"); return (Criteria) this; } public Criteria andIdLessThan(Long value) { addCriterion("id <", value, "id"); return (Criteria) this; } public Criteria andIdLessThanOrEqualTo(Long value) { addCriterion("id <=", value, "id"); return (Criteria) this; } public Criteria andIdIn(List<Long> values) { addCriterion("id in", values, "id"); return (Criteria) this; } public Criteria andIdNotIn(List<Long> values) { addCriterion("id not in", values, "id"); return (Criteria) this; } public Criteria andIdBetween(Long value1, Long value2) { addCriterion("id between", value1, value2, "id"); return (Criteria) this; } public Criteria andIdNotBetween(Long value1, Long value2) { addCriterion("id not between", value1, value2, "id"); return (Criteria) this; } public Criteria andTypeIsNull() { addCriterion("type is null"); return (Criteria) this; } public Criteria andTypeIsNotNull() { addCriterion("type is not null"); return (Criteria) this; } public Criteria andTypeEqualTo(String value) { addCriterion("type =", value, "type"); return (Criteria) this; } public Criteria andTypeNotEqualTo(String value) { addCriterion("type <>", value, "type"); return (Criteria) this; } public Criteria andTypeGreaterThan(String value) { addCriterion("type >", value, "type"); return (Criteria) this; } public Criteria andTypeGreaterThanOrEqualTo(String value) { addCriterion("type >=", value, "type"); return (Criteria) this; } public Criteria andTypeLessThan(String value) { addCriterion("type <", value, "type"); return (Criteria) this; } public Criteria andTypeLessThanOrEqualTo(String value) { addCriterion("type <=", value, "type"); return (Criteria) this; } public Criteria andTypeLike(String value) { addCriterion("type like", value, "type"); return (Criteria) this; } public Criteria andTypeNotLike(String value) { addCriterion("type not like", value, "type"); return (Criteria) this; } public Criteria andTypeIn(List<String> values) { addCriterion("type in", values, "type"); return (Criteria) this; } public Criteria andTypeNotIn(List<String> values) { addCriterion("type not in", values, "type"); return (Criteria) this; } public Criteria andTypeBetween(String value1, String value2) { addCriterion("type between", value1, value2, "type"); return (Criteria) this; } public Criteria andTypeNotBetween(String value1, String value2) { addCriterion("type not between", value1, value2, "type"); return (Criteria) this; } public Criteria andOrganIdIsNull() { addCriterion("organ_id is null"); return (Criteria) this; } public Criteria andOrganIdIsNotNull() { addCriterion("organ_id is not null"); return (Criteria) this; } public Criteria andOrganIdEqualTo(Long value) { addCriterion("organ_id =", value, "organId"); return (Criteria) this; } public Criteria andOrganIdNotEqualTo(Long value) { addCriterion("organ_id <>", value, "organId"); return (Criteria) this; } public Criteria andOrganIdGreaterThan(Long value) { addCriterion("organ_id >", value, "organId"); return (Criteria) this; } public Criteria andOrganIdGreaterThanOrEqualTo(Long value) { addCriterion("organ_id >=", value, "organId"); return (Criteria) this; } public Criteria andOrganIdLessThan(Long value) { addCriterion("organ_id <", value, "organId"); return (Criteria) this; } public Criteria andOrganIdLessThanOrEqualTo(Long value) { addCriterion("organ_id <=", value, "organId"); return (Criteria) this; } public Criteria andOrganIdIn(List<Long> values) { addCriterion("organ_id in", values, "organId"); return (Criteria) this; } public Criteria andOrganIdNotIn(List<Long> values) { addCriterion("organ_id not in", values, "organId"); return (Criteria) this; } public Criteria andOrganIdBetween(Long value1, Long value2) { addCriterion("organ_id between", value1, value2, "organId"); return (Criteria) this; } public Criteria andOrganIdNotBetween(Long value1, Long value2) { addCriterion("organ_id not between", value1, value2, "organId"); return (Criteria) this; } public Criteria andHandsPersonIdIsNull() { addCriterion("hands_person_id is null"); return (Criteria) this; } public Criteria andHandsPersonIdIsNotNull() { addCriterion("hands_person_id is not null"); return (Criteria) this; } public Criteria andHandsPersonIdEqualTo(Long value) { addCriterion("hands_person_id =", value, "handsPersonId"); return (Criteria) this; } public Criteria andHandsPersonIdNotEqualTo(Long value) { addCriterion("hands_person_id <>", value, "handsPersonId"); return (Criteria) this; } public Criteria andHandsPersonIdGreaterThan(Long value) { addCriterion("hands_person_id >", value, "handsPersonId"); return (Criteria) this; } public Criteria andHandsPersonIdGreaterThanOrEqualTo(Long value) { addCriterion("hands_person_id >=", value, "handsPersonId"); return (Criteria) this; } public Criteria andHandsPersonIdLessThan(Long value) { addCriterion("hands_person_id <", value, "handsPersonId"); return (Criteria) this; } public Criteria andHandsPersonIdLessThanOrEqualTo(Long value) { addCriterion("hands_person_id <=", value, "handsPersonId"); return (Criteria) this; } public Criteria andHandsPersonIdIn(List<Long> values) { addCriterion("hands_person_id in", values, "handsPersonId"); return (Criteria) this; } public Criteria andHandsPersonIdNotIn(List<Long> values) { addCriterion("hands_person_id not in", values, "handsPersonId"); return (Criteria) this; } public Criteria andHandsPersonIdBetween(Long value1, Long value2) { addCriterion("hands_person_id between", value1, value2, "handsPersonId"); return (Criteria) this; } public Criteria andHandsPersonIdNotBetween(Long value1, Long value2) { addCriterion("hands_person_id not between", value1, value2, "handsPersonId"); return (Criteria) this; } public Criteria andCreatorIsNull() { addCriterion("creator is null"); return (Criteria) this; } public Criteria andCreatorIsNotNull() { addCriterion("creator is not null"); return (Criteria) this; } public Criteria andCreatorEqualTo(Long value) { addCriterion("creator =", value, "creator"); return (Criteria) this; } public Criteria andCreatorNotEqualTo(Long value) { addCriterion("creator <>", value, "creator"); return (Criteria) this; } public Criteria andCreatorGreaterThan(Long value) { addCriterion("creator >", value, "creator"); return (Criteria) this; } public Criteria andCreatorGreaterThanOrEqualTo(Long value) { addCriterion("creator >=", value, "creator"); return (Criteria) this; } public Criteria andCreatorLessThan(Long value) { addCriterion("creator <", value, "creator"); return (Criteria) this; } public Criteria andCreatorLessThanOrEqualTo(Long value) { addCriterion("creator <=", value, "creator"); return (Criteria) this; } public Criteria andCreatorIn(List<Long> values) { addCriterion("creator in", values, "creator"); return (Criteria) this; } public Criteria andCreatorNotIn(List<Long> values) { addCriterion("creator not in", values, "creator"); return (Criteria) this; } public Criteria andCreatorBetween(Long value1, Long value2) { addCriterion("creator between", value1, value2, "creator"); return (Criteria) this; } public Criteria andCreatorNotBetween(Long value1, Long value2) { addCriterion("creator not between", value1, value2, "creator"); return (Criteria) this; } public Criteria andChangeAmountIsNull() { addCriterion("change_amount is null"); return (Criteria) this; } public Criteria andChangeAmountIsNotNull() { addCriterion("change_amount is not null"); return (Criteria) this; } public Criteria andChangeAmountEqualTo(BigDecimal value) { addCriterion("change_amount =", value, "changeAmount"); return (Criteria) this; } public Criteria andChangeAmountNotEqualTo(BigDecimal value) { addCriterion("change_amount <>", value, "changeAmount"); return (Criteria) this; } public Criteria andChangeAmountGreaterThan(BigDecimal value) { addCriterion("change_amount >", value, "changeAmount"); return (Criteria) this; } public Criteria andChangeAmountGreaterThanOrEqualTo(BigDecimal value) { addCriterion("change_amount >=", value, "changeAmount"); return (Criteria) this; } public Criteria andChangeAmountLessThan(BigDecimal value) { addCriterion("change_amount <", value, "changeAmount"); return (Criteria) this; } public Criteria andChangeAmountLessThanOrEqualTo(BigDecimal value) { addCriterion("change_amount <=", value, "changeAmount"); return (Criteria) this; } public Criteria andChangeAmountIn(List<BigDecimal> values) { addCriterion("change_amount in", values, "changeAmount"); return (Criteria) this; } public Criteria andChangeAmountNotIn(List<BigDecimal> values) { addCriterion("change_amount not in", values, "changeAmount"); return (Criteria) this; } public Criteria andChangeAmountBetween(BigDecimal value1, BigDecimal value2) { addCriterion("change_amount between", value1, value2, "changeAmount"); return (Criteria) this; } public Criteria andChangeAmountNotBetween(BigDecimal value1, BigDecimal value2) { addCriterion("change_amount not between", value1, value2, "changeAmount"); return (Criteria) this; } public Criteria andDiscountMoneyIsNull() { addCriterion("discount_money is null"); return (Criteria) this; } public Criteria andDiscountMoneyIsNotNull() { addCriterion("discount_money is not null"); return (Criteria) this; } public Criteria andDiscountMoneyEqualTo(BigDecimal value) { addCriterion("discount_money =", value, "discountMoney"); return (Criteria) this; } public Criteria andDiscountMoneyNotEqualTo(BigDecimal value) { addCriterion("discount_money <>", value, "discountMoney"); return (Criteria) this; } public Criteria andDiscountMoneyGreaterThan(BigDecimal value) { addCriterion("discount_money >", value, "discountMoney"); return (Criteria) this; } public Criteria andDiscountMoneyGreaterThanOrEqualTo(BigDecimal value) { addCriterion("discount_money >=", value, "discountMoney"); return (Criteria) this; } public Criteria andDiscountMoneyLessThan(BigDecimal value) { addCriterion("discount_money <", value, "discountMoney"); return (Criteria) this; } public Criteria andDiscountMoneyLessThanOrEqualTo(BigDecimal value) { addCriterion("discount_money <=", value, "discountMoney"); return (Criteria) this; } public Criteria andDiscountMoneyIn(List<BigDecimal> values) { addCriterion("discount_money in", values, "discountMoney"); return (Criteria) this; } public Criteria andDiscountMoneyNotIn(List<BigDecimal> values) { addCriterion("discount_money not in", values, "discountMoney"); return (Criteria) this; } public Criteria andDiscountMoneyBetween(BigDecimal value1, BigDecimal value2) { addCriterion("discount_money between", value1, value2, "discountMoney"); return (Criteria) this; } public Criteria andDiscountMoneyNotBetween(BigDecimal value1, BigDecimal value2) { addCriterion("discount_money not between", value1, value2, "discountMoney"); return (Criteria) this; } public Criteria andTotalPriceIsNull() { addCriterion("total_price is null"); return (Criteria) this; } public Criteria andTotalPriceIsNotNull() { addCriterion("total_price is not null"); return (Criteria) this; } public Criteria andTotalPriceEqualTo(BigDecimal value) { addCriterion("total_price =", value, "totalPrice"); return (Criteria) this; } public Criteria andTotalPriceNotEqualTo(BigDecimal value) { addCriterion("total_price <>", value, "totalPrice"); return (Criteria) this; } public Criteria andTotalPriceGreaterThan(BigDecimal value) { addCriterion("total_price >", value, "totalPrice"); return (Criteria) this; } public Criteria andTotalPriceGreaterThanOrEqualTo(BigDecimal value) { addCriterion("total_price >=", value, "totalPrice"); return (Criteria) this; } public Criteria andTotalPriceLessThan(BigDecimal value) { addCriterion("total_price <", value, "totalPrice"); return (Criteria) this; } public Criteria andTotalPriceLessThanOrEqualTo(BigDecimal value) { addCriterion("total_price <=", value, "totalPrice"); return (Criteria) this; } public Criteria andTotalPriceIn(List<BigDecimal> values) { addCriterion("total_price in", values, "totalPrice"); return (Criteria) this; } public Criteria andTotalPriceNotIn(List<BigDecimal> values) { addCriterion("total_price not in", values, "totalPrice"); return (Criteria) this; } public Criteria andTotalPriceBetween(BigDecimal value1, BigDecimal value2) { addCriterion("total_price between", value1, value2, "totalPrice"); return (Criteria) this; } public Criteria andTotalPriceNotBetween(BigDecimal value1, BigDecimal value2) { addCriterion("total_price not between", value1, value2, "totalPrice"); return (Criteria) this; } public Criteria andAccountIdIsNull() { addCriterion("account_id is null"); return (Criteria) this; } public Criteria andAccountIdIsNotNull() { addCriterion("account_id is not null"); return (Criteria) this; } public Criteria andAccountIdEqualTo(Long value) { addCriterion("account_id =", value, "accountId"); return (Criteria) this; } public Criteria andAccountIdNotEqualTo(Long value) { addCriterion("account_id <>", value, "accountId"); return (Criteria) this; } public Criteria andAccountIdGreaterThan(Long value) { addCriterion("account_id >", value, "accountId"); return (Criteria) this; } public Criteria andAccountIdGreaterThanOrEqualTo(Long value) { addCriterion("account_id >=", value, "accountId"); return (Criteria) this; } public Criteria andAccountIdLessThan(Long value) { addCriterion("account_id <", value, "accountId"); return (Criteria) this; } public Criteria andAccountIdLessThanOrEqualTo(Long value) { addCriterion("account_id <=", value, "accountId"); return (Criteria) this; } public Criteria andAccountIdIn(List<Long> values) { addCriterion("account_id in", values, "accountId"); return (Criteria) this; } public Criteria andAccountIdNotIn(List<Long> values) { addCriterion("account_id not in", values, "accountId"); return (Criteria) this; } public Criteria andAccountIdBetween(Long value1, Long value2) { addCriterion("account_id between", value1, value2, "accountId"); return (Criteria) this; } public Criteria andAccountIdNotBetween(Long value1, Long value2) { addCriterion("account_id not between", value1, value2, "accountId"); return (Criteria) this; } public Criteria andBillNoIsNull() { addCriterion("bill_no is null"); return (Criteria) this; } public Criteria andBillNoIsNotNull() { addCriterion("bill_no is not null"); return (Criteria) this; } public Criteria andBillNoEqualTo(String value) { addCriterion("bill_no =", value, "billNo"); return (Criteria) this; } public Criteria andBillNoNotEqualTo(String value) { addCriterion("bill_no <>", value, "billNo"); return (Criteria) this; } public Criteria andBillNoGreaterThan(String value) { addCriterion("bill_no >", value, "billNo"); return (Criteria) this; } public Criteria andBillNoGreaterThanOrEqualTo(String value) { addCriterion("bill_no >=", value, "billNo"); return (Criteria) this; } public Criteria andBillNoLessThan(String value) { addCriterion("bill_no <", value, "billNo"); return (Criteria) this; } public Criteria andBillNoLessThanOrEqualTo(String value) { addCriterion("bill_no <=", value, "billNo"); return (Criteria) this; } public Criteria andBillNoLike(String value) { addCriterion("bill_no like", value, "billNo"); return (Criteria) this; } public Criteria andBillNoNotLike(String value) { addCriterion("bill_no not like", value, "billNo"); return (Criteria) this; } public Criteria andBillNoIn(List<String> values) { addCriterion("bill_no in", values, "billNo"); return (Criteria) this; } public Criteria andBillNoNotIn(List<String> values) { addCriterion("bill_no not in", values, "billNo"); return (Criteria) this; } public Criteria andBillNoBetween(String value1, String value2) { addCriterion("bill_no between", value1, value2, "billNo"); return (Criteria) this; } public Criteria andBillNoNotBetween(String value1, String value2) { addCriterion("bill_no not between", value1, value2, "billNo"); return (Criteria) this; } public Criteria andBillTimeIsNull() { addCriterion("bill_time is null"); return (Criteria) this; } public Criteria andBillTimeIsNotNull() { addCriterion("bill_time is not null"); return (Criteria) this; } public Criteria andBillTimeEqualTo(Date value) { addCriterion("bill_time =", value, "billTime"); return (Criteria) this; } public Criteria andBillTimeNotEqualTo(Date value) { addCriterion("bill_time <>", value, "billTime"); return (Criteria) this; } public Criteria andBillTimeGreaterThan(Date value) { addCriterion("bill_time >", value, "billTime"); return (Criteria) this; } public Criteria andBillTimeGreaterThanOrEqualTo(Date value) { addCriterion("bill_time >=", value, "billTime"); return (Criteria) this; } public Criteria andBillTimeLessThan(Date value) { addCriterion("bill_time <", value, "billTime"); return (Criteria) this; } public Criteria andBillTimeLessThanOrEqualTo(Date value) { addCriterion("bill_time <=", value, "billTime"); return (Criteria) this; } public Criteria andBillTimeIn(List<Date> values) { addCriterion("bill_time in", values, "billTime"); return (Criteria) this; } public Criteria andBillTimeNotIn(List<Date> values) { addCriterion("bill_time not in", values, "billTime"); return (Criteria) this; } public Criteria andBillTimeBetween(Date value1, Date value2) { addCriterion("bill_time between", value1, value2, "billTime"); return (Criteria) this; } public Criteria andBillTimeNotBetween(Date value1, Date value2) { addCriterion("bill_time not between", value1, value2, "billTime"); return (Criteria) this; } public Criteria andRemarkIsNull() { addCriterion("remark is null"); return (Criteria) this; } public Criteria andRemarkIsNotNull() { addCriterion("remark is not null"); return (Criteria) this; } public Criteria andRemarkEqualTo(String value) { addCriterion("remark =", value, "remark"); return (Criteria) this; } public Criteria andRemarkNotEqualTo(String value) { addCriterion("remark <>", value, "remark"); return (Criteria) this; } public Criteria andRemarkGreaterThan(String value) { addCriterion("remark >", value, "remark"); return (Criteria) this; } public Criteria andRemarkGreaterThanOrEqualTo(String value) { addCriterion("remark >=", value, "remark"); return (Criteria) this; } public Criteria andRemarkLessThan(String value) { addCriterion("remark <", value, "remark"); return (Criteria) this; } public Criteria andRemarkLessThanOrEqualTo(String value) { addCriterion("remark <=", value, "remark"); return (Criteria) this; } public Criteria andRemarkLike(String value) { addCriterion("remark like", value, "remark"); return (Criteria) this; } public Criteria andRemarkNotLike(String value) { addCriterion("remark not like", value, "remark"); return (Criteria) this; } public Criteria andRemarkIn(List<String> values) { addCriterion("remark in", values, "remark"); return (Criteria) this; } public Criteria andRemarkNotIn(List<String> values) { addCriterion("remark not in", values, "remark"); return (Criteria) this; } public Criteria andRemarkBetween(String value1, String value2) { addCriterion("remark between", value1, value2, "remark"); return (Criteria) this; } public Criteria andRemarkNotBetween(String value1, String value2) { addCriterion("remark not between", value1, value2, "remark"); return (Criteria) this; } public Criteria andFileNameIsNull() { addCriterion("file_name is null"); return (Criteria) this; } public Criteria andFileNameIsNotNull() { addCriterion("file_name is not null"); return (Criteria) this; } public Criteria andFileNameEqualTo(String value) { addCriterion("file_name =", value, "fileName"); return (Criteria) this; } public Criteria andFileNameNotEqualTo(String value) { addCriterion("file_name <>", value, "fileName"); return (Criteria) this; } public Criteria andFileNameGreaterThan(String value) { addCriterion("file_name >", value, "fileName"); return (Criteria) this; } public Criteria andFileNameGreaterThanOrEqualTo(String value) { addCriterion("file_name >=", value, "fileName"); return (Criteria) this; } public Criteria andFileNameLessThan(String value) { addCriterion("file_name <", value, "fileName"); return (Criteria) this; } public Criteria andFileNameLessThanOrEqualTo(String value) { addCriterion("file_name <=", value, "fileName"); return (Criteria) this; } public Criteria andFileNameLike(String value) { addCriterion("file_name like", value, "fileName"); return (Criteria) this; } public Criteria andFileNameNotLike(String value) { addCriterion("file_name not like", value, "fileName"); return (Criteria) this; } public Criteria andFileNameIn(List<String> values) { addCriterion("file_name in", values, "fileName"); return (Criteria) this; } public Criteria andFileNameNotIn(List<String> values) { addCriterion("file_name not in", values, "fileName"); return (Criteria) this; } public Criteria andFileNameBetween(String value1, String value2) { addCriterion("file_name between", value1, value2, "fileName"); return (Criteria) this; } public Criteria andFileNameNotBetween(String value1, String value2) { addCriterion("file_name not between", value1, value2, "fileName"); return (Criteria) this; } public Criteria andStatusIsNull() { addCriterion("status is null"); return (Criteria) this; } public Criteria andStatusIsNotNull() { addCriterion("status is not null"); return (Criteria) this; } public Criteria andStatusEqualTo(String value) { addCriterion("status =", value, "status"); return (Criteria) this; } public Criteria andStatusNotEqualTo(String value) { addCriterion("status <>", value, "status"); return (Criteria) this; } public Criteria andStatusGreaterThan(String value) { addCriterion("status >", value, "status"); return (Criteria) this; } public Criteria andStatusGreaterThanOrEqualTo(String value) { addCriterion("status >=", value, "status"); return (Criteria) this; } public Criteria andStatusLessThan(String value) { addCriterion("status <", value, "status"); return (Criteria) this; } public Criteria andStatusLessThanOrEqualTo(String value) { addCriterion("status <=", value, "status"); return (Criteria) this; } public Criteria andStatusLike(String value) { addCriterion("status like", value, "status"); return (Criteria) this; } public Criteria andStatusNotLike(String value) { addCriterion("status not like", value, "status"); return (Criteria) this; } public Criteria andStatusIn(List<String> values) { addCriterion("status in", values, "status"); return (Criteria) this; } public Criteria andStatusNotIn(List<String> values) { addCriterion("status not in", values, "status"); return (Criteria) this; } public Criteria andStatusBetween(String value1, String value2) { addCriterion("status between", value1, value2, "status"); return (Criteria) this; } public Criteria andStatusNotBetween(String value1, String value2) { addCriterion("status not between", value1, value2, "status"); return (Criteria) this; } public Criteria andTenantIdIsNull() { addCriterion("tenant_id is null"); return (Criteria) this; } public Criteria andTenantIdIsNotNull() { addCriterion("tenant_id is not null"); return (Criteria) this; } public Criteria andTenantIdEqualTo(Long value) { addCriterion("tenant_id =", value, "tenantId"); return (Criteria) this; } public Criteria andTenantIdNotEqualTo(Long value) { addCriterion("tenant_id <>", value, "tenantId"); return (Criteria) this; } public Criteria andTenantIdGreaterThan(Long value) { addCriterion("tenant_id >", value, "tenantId"); return (Criteria) this; } public Criteria andTenantIdGreaterThanOrEqualTo(Long value) { addCriterion("tenant_id >=", value, "tenantId"); return (Criteria) this; } public Criteria andTenantIdLessThan(Long value) { addCriterion("tenant_id <", value, "tenantId"); return (Criteria) this; } public Criteria andTenantIdLessThanOrEqualTo(Long value) { addCriterion("tenant_id <=", value, "tenantId"); return (Criteria) this; } public Criteria andTenantIdIn(List<Long> values) { addCriterion("tenant_id in", values, "tenantId"); return (Criteria) this; } public Criteria andTenantIdNotIn(List<Long> values) { addCriterion("tenant_id not in", values, "tenantId"); return (Criteria) this; } public Criteria andTenantIdBetween(Long value1, Long value2) { addCriterion("tenant_id between", value1, value2, "tenantId"); return (Criteria) this; } public Criteria andTenantIdNotBetween(Long value1, Long value2) { addCriterion("tenant_id not between", value1, value2, "tenantId"); return (Criteria) this; } public Criteria andDeleteFlagIsNull() { addCriterion("delete_flag is null"); return (Criteria) this; } public Criteria andDeleteFlagIsNotNull() { addCriterion("delete_flag is not null"); return (Criteria) this; } public Criteria andDeleteFlagEqualTo(String value) { addCriterion("delete_flag =", value, "deleteFlag"); return (Criteria) this; } public Criteria andDeleteFlagNotEqualTo(String value) { addCriterion("delete_flag <>", value, "deleteFlag"); return (Criteria) this; } public Criteria andDeleteFlagGreaterThan(String value) { addCriterion("delete_flag >", value, "deleteFlag"); return (Criteria) this; } public Criteria andDeleteFlagGreaterThanOrEqualTo(String value) { addCriterion("delete_flag >=", value, "deleteFlag"); return (Criteria) this; } public Criteria andDeleteFlagLessThan(String value) { addCriterion("delete_flag <", value, "deleteFlag"); return (Criteria) this; } public Criteria andDeleteFlagLessThanOrEqualTo(String value) { addCriterion("delete_flag <=", value, "deleteFlag"); return (Criteria) this; } public Criteria andDeleteFlagLike(String value) { addCriterion("delete_flag like", value, "deleteFlag"); return (Criteria) this; } public Criteria andDeleteFlagNotLike(String value) { addCriterion("delete_flag not like", value, "deleteFlag"); return (Criteria) this; } public Criteria andDeleteFlagIn(List<String> values) { addCriterion("delete_flag in", values, "deleteFlag"); return (Criteria) this; } public Criteria andDeleteFlagNotIn(List<String> values) { addCriterion("delete_flag not in", values, "deleteFlag"); return (Criteria) this; } public Criteria andDeleteFlagBetween(String value1, String value2) { addCriterion("delete_flag between", value1, value2, "deleteFlag"); return (Criteria) this; } public Criteria andDeleteFlagNotBetween(String value1, String value2) { addCriterion("delete_flag not between", value1, value2, "deleteFlag"); return (Criteria) this; } } public static class Criteria extends GeneratedCriteria { protected Criteria() { super(); } } public static class Criterion { private String condition; private Object value; private Object secondValue; private boolean noValue; private boolean singleValue; private boolean betweenValue; private boolean listValue; private String typeHandler; public String getCondition() { return condition; } public Object getValue() { return value; } public Object getSecondValue() { return secondValue; } public boolean isNoValue() { return noValue; } public boolean isSingleValue() { return singleValue; } public boolean isBetweenValue() { return betweenValue; } public boolean isListValue() { return listValue; } public String getTypeHandler() { return typeHandler; } protected Criterion(String condition) { super(); this.condition = condition; this.typeHandler = null; this.noValue = true; } protected Criterion(String condition, Object value, String typeHandler) { super(); this.condition = condition; this.value = value; this.typeHandler = typeHandler; if (value instanceof List<?>) { this.listValue = true; } else { this.singleValue = true; } } protected Criterion(String condition, Object value) { this(condition, value, null); } protected Criterion(String condition, Object value, Object secondValue, String typeHandler) { super(); this.condition = condition; this.value = value; this.secondValue = secondValue; this.typeHandler = typeHandler; this.betweenValue = true; } protected Criterion(String condition, Object value, Object secondValue) { this(condition, value, secondValue, null); } } }
apache-2.0
ThalKod/Miwok-App
app/src/main/java/com/example/android/miwok/PhrasesFragment.java
5580
package com.example.android.miwok; import android.media.AudioManager; import android.media.MediaPlayer; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v4.app.Fragment; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.ListView; import java.util.ArrayList; /** * Created by Thal Marc on 1/16/2017. */ public class PhrasesFragment extends Fragment { private MediaPlayer mediaPlayer; private AudioManager am; AudioManager.OnAudioFocusChangeListener audioListener = new AudioManager.OnAudioFocusChangeListener() { @Override public void onAudioFocusChange(int i) { //In case audio focus is loss if(i == AudioManager.AUDIOFOCUS_GAIN){ mediaPlayer.start(); //Listener to wait the music stop playing mediaPlayer.setOnCompletionListener(new MediaPlayer.OnCompletionListener() { @Override public void onCompletion(MediaPlayer mediaPlayer) { releaseMediaplayer(); } }); } if(i== AudioManager.AUDIOFOCUS_LOSS_TRANSIENT){ mediaPlayer.pause(); } if(i==AudioManager.AUDIOFOCUS_LOSS){ mediaPlayer.stop(); releaseMediaplayer(); am.abandonAudioFocus(this); } } }; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.display,container,false); am = (AudioManager) getActivity().getSystemService(getActivity().AUDIO_SERVICE); final ArrayList<Word> PhrasesArrayList = new ArrayList<Word>(); PhrasesArrayList.add(new Word("Where are you going ?", "minto wuksus",R.raw.phrase_where_are_you_going)); PhrasesArrayList.add(new Word("What is your name ?", "tinnә oyaase'nә",R.raw.phrase_what_is_your_name)); PhrasesArrayList.add(new Word("My name is...", "oyaaset...",R.raw.phrase_my_name_is)); PhrasesArrayList.add(new Word("How are you feeling?", "michәksәs?",R.raw.phrase_how_are_you_feeling)); PhrasesArrayList.add(new Word("i'm felling good", "kuchi achit",R.raw.phrase_im_feeling_good)); PhrasesArrayList.add(new Word("Are you Comming?", "әәnәs'aa?",R.raw.phrase_are_you_coming)); PhrasesArrayList.add(new Word("Yes, I'm coming", "hәә’ әәnәm",R.raw.phrase_yes_im_coming)); PhrasesArrayList.add(new Word("I'm coming", "әәnәm",R.raw.phrase_im_coming)); PhrasesArrayList.add(new Word("Let's Go", "yoowutis",R.raw.phrase_lets_go)); PhrasesArrayList.add(new Word("Come here", "әnni'nem",R.raw.phrase_come_here)); // Create an {@link ArrayAdapter}, whose data source is a list of Strings. The // adapter knows how to create layouts for each item in the list, using the // simple_list_item_1.xml layout resource defined in the Android framework. // This list item layout contains a single {@link TextView}, which the adapter will set to // display a single word. CustomArrayAdapter itemsAdapter = new CustomArrayAdapter(getActivity(),PhrasesArrayList,R.color.category_phrases); // Find the {@link ListView} object in the view hierarchy of the {@link Activity}. // There should be a {@link ListView} with the view ID called list, which is declared in the // displayut file. ListView listView = (ListView) rootView.findViewById(R.id.List); listView.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) { final Word word = PhrasesArrayList.get(i); releaseMediaplayer(); // Request an Audio Focus Listener and handle different state of audio focus int okAudio = am.requestAudioFocus(audioListener,AudioManager.STREAM_MUSIC,AudioManager.AUDIOFOCUS_GAIN_TRANSIENT); if(okAudio == AudioManager.AUDIOFOCUS_REQUEST_GRANTED){ //Play the music mediaPlayer = MediaPlayer.create(getActivity(),word.getmAudioId()); mediaPlayer.start(); mediaPlayer.setOnCompletionListener(new MediaPlayer.OnCompletionListener() { @Override public void onCompletion(MediaPlayer mediaPlayer) { releaseMediaplayer(); } }); } } }); // Make the {@link ListView} use the {@link ArrayAdapter} we created above, so that the // {@link ListView} will display list items for each word in the list of words. // Do this by calling the setAdapter method on the {@link ListView} object and pass in // 1 argument, which is the {@link ArrayAdapter} with the variable name itemsAdapter. listView.setAdapter(itemsAdapter); return rootView; } public void releaseMediaplayer(){ if(mediaPlayer !=null){ mediaPlayer.release(); am.abandonAudioFocus(audioListener); mediaPlayer = null; } } @Override public void onStop() { super.onStop(); releaseMediaplayer(); } }
apache-2.0
bluefoot/springsecurity
servicelayersecurity/src/main/java/info/gewton/slsecurity/dao/support/PostDao.java
1212
/* * Copyright 2011 Gewton Jhames <gewtonj@gmail.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package info.gewton.slsecurity.dao.support; import java.util.List; import info.gewton.slsecurity.dao.Dao; import info.gewton.slsecurity.model.Post; import info.gewton.slsecurity.model.PostUser; /** * Interface com métodos padrões de acesso a dados e métodos exclusivos de * manipulação da entidade {@link Post} * @author gewton */ public interface PostDao extends Dao<Post> { /** * Carrega todos os posts de um {@link PostUser} * @param u usuário que postou * @return lista com todos os posts de <tt>u</tt> */ List<Post> retrieveByUser(PostUser u); }
apache-2.0
PkayJava/pluggable
framework/src/main/java/com/googlecode/wickedcharts/highcharts/options/Center.java
1528
/** * Copyright 2012-2013 Wicked Charts (http://wicked-charts.googlecode.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.googlecode.wickedcharts.highcharts.options; import java.io.Serializable; /** * Defines the location pf a pie chart * * @see <a * href="http://api.highcharts.com/highcharts#plotOptions.pie.center">http://api.highcharts.com/highcharts#plotOptions.pie.center</a> * @author Tom Hombergs (tom.hombergs@gmail.com) * */ public class Center implements Serializable { public enum Unit { PERCENT, PIXELS; } private static final long serialVersionUID = 1L; private final int x; private final int y; private final Unit unit;; public Center(final int x, final int y, final Unit unit) { this.x = x; this.y = y; this.unit = unit; } public Unit getUnit() { return this.unit; } public int getX() { return this.x; } public int getY() { return this.y; } }
apache-2.0
gejiaheng/Protein
app/src/main/java/com/ge/protein/data/api/service/FollowersService.java
1044
/* * Copyright 2017 Jiaheng Ge * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ge.protein.data.api.service; import com.ge.protein.data.model.Shot; import java.util.List; import io.reactivex.Observable; import retrofit2.Response; import retrofit2.http.GET; import retrofit2.http.Query; /** * http://developer.dribbble.com/v1/users/followers/ */ public interface FollowersService { @GET("/v1/user/following/shots") Observable<Response<List<Shot>>> listFollowingShots(@Query("per_page") int perPage); }
apache-2.0
SergeyI88/InduikovS
chapter_004/src/main/java/tree/SimpleTree.java
429
package tree; import java.util.Iterator; /** * Created by admin on 11.07.2017. */ public interface SimpleTree<E extends Comparable<E>> extends Iterable<E>{ /** * Добавить элемент child в parent. * Parent может иметь список child. * @param parent parent. * @param child child. * @return */ boolean add(E parent, E child); }
apache-2.0
oehme/analysing-gradle-performance
my-app/src/test/java/org/gradle/test/performance/mediummonolithicjavaproject/p193/Test3872.java
2111
package org.gradle.test.performance.mediummonolithicjavaproject.p193; import org.junit.Test; import static org.junit.Assert.*; public class Test3872 { Production3872 objectUnderTest = new Production3872(); @Test public void testProperty0() { String value = "value"; objectUnderTest.setProperty0(value); assertEquals(value, objectUnderTest.getProperty0()); } @Test public void testProperty1() { String value = "value"; objectUnderTest.setProperty1(value); assertEquals(value, objectUnderTest.getProperty1()); } @Test public void testProperty2() { String value = "value"; objectUnderTest.setProperty2(value); assertEquals(value, objectUnderTest.getProperty2()); } @Test public void testProperty3() { String value = "value"; objectUnderTest.setProperty3(value); assertEquals(value, objectUnderTest.getProperty3()); } @Test public void testProperty4() { String value = "value"; objectUnderTest.setProperty4(value); assertEquals(value, objectUnderTest.getProperty4()); } @Test public void testProperty5() { String value = "value"; objectUnderTest.setProperty5(value); assertEquals(value, objectUnderTest.getProperty5()); } @Test public void testProperty6() { String value = "value"; objectUnderTest.setProperty6(value); assertEquals(value, objectUnderTest.getProperty6()); } @Test public void testProperty7() { String value = "value"; objectUnderTest.setProperty7(value); assertEquals(value, objectUnderTest.getProperty7()); } @Test public void testProperty8() { String value = "value"; objectUnderTest.setProperty8(value); assertEquals(value, objectUnderTest.getProperty8()); } @Test public void testProperty9() { String value = "value"; objectUnderTest.setProperty9(value); assertEquals(value, objectUnderTest.getProperty9()); } }
apache-2.0
jituo666/CrazyPic
src/com/xjt/crazypic/common/OrientationSource.java
144
package com.xjt.crazypic.common; public interface OrientationSource { public int getDisplayRotation(); public int getCompensation(); }
apache-2.0
cchacin/codegen-tester
src/test/java/gentester/RunCukesTest.java
281
package gentester; import org.junit.runner.RunWith; import cucumber.api.CucumberOptions; import cucumber.api.junit.Cucumber; @RunWith(Cucumber.class) @CucumberOptions(plugin = "pretty", glue = "gentester", features = "src/test/resources/features") public class RunCukesTest { }
apache-2.0
salomax/machine-learning
src/test/java/org/salomax/ml/GradientDescentTest.java
763
package org.salomax.ml; import org.junit.Test; import org.salomax.ml.data.DataSet; import org.salomax.ml.data.SimpleDataSet; /** * Created by marcos.salomao on 25/12/16. */ public class GradientDescentTest { @Test public void minimize() { DataSet dataSet = new SimpleDataSet(); dataSet.add(1.0, 1.0); dataSet.add(2.0, 2.0); dataSet.add(3.0, 3.0); Hypothesis h = (thetas, features) -> thetas.get(0) * features.get(0) + thetas.get(1) * features.get(1); GradientDescent gradientDescent = new GradientDescent(); Minimum minimum = gradientDescent.minimize(dataSet, h, 0.1, 1000); // Assert.assertThat(Precision.round(minimum.getCostFunction(), 2), CoreMatchers.equalTo(0.00)); } }
apache-2.0
tanhaichao/leopard
leopard-biz/src/main/java/io/leopard/biz/sortedset/SortedSetBizImpl.java
2208
package io.leopard.biz.sortedset; import io.leopard.data4j.pubsub.IPubSub; import io.leopard.data4j.pubsub.Publisher; import io.leopard.redis.Redis; import java.util.Set; import redis.clients.jedis.Tuple; public class SortedSetBizImpl implements SortedSetBiz, IPubSub { protected static final String SPLIT = ",,,"; private SortedSetBizRedisImpl sortedSetBizRedisImpl; private SortedSetBizMemoryImpl sortedSetBizMemoryImpl; public SortedSetBizImpl(Redis redis, String key) { this.sortedSetBizRedisImpl = new SortedSetBizRedisImpl(redis, key); this.sortedSetBizMemoryImpl = new SortedSetBizMemoryImpl(); Publisher.listen(this, redis); this.load(); } public SortedSetBizRedisImpl getSortedSetBizRedisImpl() { return sortedSetBizRedisImpl; } public SortedSetBizMemoryImpl getSortedSetBizMemoryImpl() { return sortedSetBizMemoryImpl; } @Override public boolean zadd(String element, double score) { sortedSetBizMemoryImpl.zadd(element, score); boolean success = sortedSetBizRedisImpl.zadd(element, score); String message = element + SPLIT + score; Publisher.publish(this, message); return success; } @Override public Double zscore(String element) { return sortedSetBizMemoryImpl.zscore(element); } @Override public boolean zrem(String element) { boolean success = sortedSetBizRedisImpl.zrem(element); sortedSetBizMemoryImpl.zrem(element); return success; } protected void load() { Set<Tuple> set = this.sortedSetBizRedisImpl.listAll(); // System.err.println("load set:" + set); if (set == null || set.isEmpty()) { return; } for (Tuple tuple : set) { String element = tuple.getElement(); double score = tuple.getScore(); this.sortedSetBizMemoryImpl.zadd(element, score); } } @Override public Set<Tuple> listAll() { return this.sortedSetBizMemoryImpl.listAll(); } @Override public void subscribe(String message, boolean isMySelf) { if (isMySelf) { return; } String[] list = message.split(SPLIT); String element = list[0]; double score = Double.parseDouble(list[1]); this.sortedSetBizMemoryImpl.zadd(element, score); } }
apache-2.0
quarkusio/quarkus
independent-projects/resteasy-reactive/server/processor/src/main/java/org/jboss/resteasy/reactive/server/processor/generation/multipart/MultipartTransformer.java
3968
package org.jboss.resteasy.reactive.server.processor.generation.multipart; import io.quarkus.gizmo.Gizmo; import java.util.function.BiFunction; import org.jboss.resteasy.reactive.server.injection.ResteasyReactiveInjectionContext; import org.jboss.resteasy.reactive.server.injection.ResteasyReactiveInjectionTarget; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.FieldVisitor; import org.objectweb.asm.MethodVisitor; import org.objectweb.asm.Opcodes; public class MultipartTransformer implements BiFunction<String, ClassVisitor, ClassVisitor> { static final String POPULATE_METHOD_NAME = "populate"; private static final String INJECTION_TARGET_BINARY_NAME = ResteasyReactiveInjectionTarget.class.getName() .replace('.', '/'); private static final String INJECTION_CONTEXT_BINARY_NAME = ResteasyReactiveInjectionContext.class.getName() .replace('.', '/'); private static final String INJECTION_CONTEXT_DESCRIPTOR = "L" + INJECTION_CONTEXT_BINARY_NAME + ";"; private static final String INJECT_METHOD_NAME = "__quarkus_rest_inject"; private static final String INJECT_METHOD_DESCRIPTOR = "(" + INJECTION_CONTEXT_DESCRIPTOR + ")V"; private final String populatorName; public MultipartTransformer(String populatorName) { this.populatorName = populatorName; } @Override public ClassVisitor apply(String s, ClassVisitor visitor) { return new MultipartClassVisitor(Gizmo.ASM_API_VERSION, visitor, populatorName); } static class MultipartClassVisitor extends ClassVisitor { private String thisDescriptor; private final String populatorName; public MultipartClassVisitor(int api, ClassVisitor classVisitor, String populatorName) { super(api, classVisitor); this.populatorName = populatorName; } @Override public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) { thisDescriptor = "L" + name + ";"; // make the class public access &= ~(Opcodes.ACC_PRIVATE | Opcodes.ACC_PROTECTED); access |= Opcodes.ACC_PUBLIC; String[] newInterfaces = new String[interfaces.length + 1]; newInterfaces[0] = INJECTION_TARGET_BINARY_NAME; System.arraycopy(interfaces, 0, newInterfaces, 1, interfaces.length); super.visit(version, access, name, signature, superName, newInterfaces); } @Override public FieldVisitor visitField(int access, String name, String descriptor, String signature, Object value) { if (((access & Opcodes.ACC_FINAL) == 0) && ((access & Opcodes.ACC_STATIC) == 0)) { // convert non-final, non-static fields to public so our generated code can always access it access &= ~(Opcodes.ACC_PRIVATE | Opcodes.ACC_PROTECTED); access |= Opcodes.ACC_PUBLIC; } return super.visitField(access, name, descriptor, signature, value); } @Override public void visitEnd() { MethodVisitor injectMethod = visitMethod(Opcodes.ACC_PUBLIC, INJECT_METHOD_NAME, INJECT_METHOD_DESCRIPTOR, null, null); injectMethod.visitParameter("ctx", 0 /* modifiers */); injectMethod.visitCode(); // this injectMethod.visitIntInsn(Opcodes.ALOAD, 0); // ctx param injectMethod.visitIntInsn(Opcodes.ALOAD, 1); // call the populator injectMethod.visitMethodInsn(Opcodes.INVOKESTATIC, populatorName.replace('.', '/'), POPULATE_METHOD_NAME, String.format("(%s%s)V", thisDescriptor, INJECTION_CONTEXT_DESCRIPTOR), false); injectMethod.visitInsn(Opcodes.RETURN); injectMethod.visitEnd(); injectMethod.visitMaxs(0, 0); } } }
apache-2.0
huazhouwang/Synapse
app/src/main/java/io/whz/synapse/transition/FabTransform.java
12107
/* * Copyright 2016 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.whz.synapse.transition; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.animation.AnimatorSet; import android.animation.ObjectAnimator; import android.content.Intent; import android.graphics.Color; import android.graphics.Outline; import android.graphics.Rect; import android.graphics.drawable.ColorDrawable; import android.graphics.drawable.Drawable; import android.support.annotation.ColorInt; import android.support.annotation.DrawableRes; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.v4.content.ContextCompat; import android.support.v7.app.AppCompatActivity; import android.transition.Transition; import android.transition.TransitionValues; import android.view.View; import android.view.ViewAnimationUtils; import android.view.ViewGroup; import android.view.ViewOutlineProvider; import android.view.animation.AnimationUtils; import android.view.animation.Interpolator; import java.util.ArrayList; import java.util.List; import static android.view.View.MeasureSpec.makeMeasureSpec; /** * Thanks https://github.com/nickbutcher/plaid * A transition between a FAB & another surface using a circular reveal moving along an arc. * <p> * See: https://www.google.com/design/spec/motion/transforming-material.html#transforming-material-radial-transformation */ public class FabTransform extends Transition { private static final String EXTRA_FAB_COLOR = "EXTRA_FAB_COLOR"; private static final String EXTRA_FAB_ICON_RES_ID = "EXTRA_FAB_ICON_RES_ID"; private static final long DEFAULT_DURATION = 240L; private static final String PROP_BOUNDS = "plaid:fabTransform:bounds"; private static final String[] TRANSITION_PROPERTIES = { PROP_BOUNDS }; private final int color; private final int icon; private FabTransform(@ColorInt int fabColor, @DrawableRes int fabIconResId) { color = fabColor; icon = fabIconResId; setPathMotion(new GravityArcMotion()); setDuration(DEFAULT_DURATION); } /** * Configure {@code intent} with the extras needed to initialize this transition. */ public static void addExtras(@NonNull Intent intent, @ColorInt int fabColor, @DrawableRes int fabIconResId) { intent.putExtra(EXTRA_FAB_COLOR, fabColor); intent.putExtra(EXTRA_FAB_ICON_RES_ID, fabIconResId); } /** * Create a {@link FabTransform} from the supplied {@code activity} extras and set as its * shared element enter/return transition. */ public static boolean setup(@NonNull AppCompatActivity activity, @Nullable View target) { final Intent intent = activity.getIntent(); if (!intent.hasExtra(EXTRA_FAB_COLOR) || !intent.hasExtra(EXTRA_FAB_ICON_RES_ID)) { return false; } final int color = intent.getIntExtra(EXTRA_FAB_COLOR, Color.TRANSPARENT); final int icon = intent.getIntExtra(EXTRA_FAB_ICON_RES_ID, -1); final FabTransform sharedEnter = new FabTransform(color, icon); if (target != null) { sharedEnter.addTarget(target); } activity.getWindow().setSharedElementEnterTransition(sharedEnter); return true; } @Override public String[] getTransitionProperties() { return TRANSITION_PROPERTIES; } @Override public void captureStartValues(TransitionValues transitionValues) { captureValues(transitionValues); } @Override public void captureEndValues(TransitionValues transitionValues) { captureValues(transitionValues); } @Override public Animator createAnimator(final ViewGroup sceneRoot, final TransitionValues startValues, final TransitionValues endValues) { if (startValues == null || endValues == null) return null; final Rect startBounds = (Rect) startValues.values.get(PROP_BOUNDS); final Rect endBounds = (Rect) endValues.values.get(PROP_BOUNDS); final boolean fromFab = endBounds.width() > startBounds.width(); final View view = endValues.view; final Rect dialogBounds = fromFab ? endBounds : startBounds; final Rect fabBounds = fromFab ? startBounds : endBounds; final Interpolator fastOutSlowInInterpolator = AnimationUtils.loadInterpolator(sceneRoot.getContext(), android.R.interpolator.fast_out_linear_in); final long duration = getDuration(); final long halfDuration = duration / 2; final long twoThirdsDuration = duration * 2 / 3; if (!fromFab) { // Force measure / layout the dialog back to it's original bounds view.measure( makeMeasureSpec(startBounds.width(), View.MeasureSpec.EXACTLY), makeMeasureSpec(startBounds.height(), View.MeasureSpec.EXACTLY)); view.layout(startBounds.left, startBounds.top, startBounds.right, startBounds.bottom); } final int translationX = startBounds.centerX() - endBounds.centerX(); final int translationY = startBounds.centerY() - endBounds.centerY(); if (fromFab) { view.setTranslationX(translationX); view.setTranslationY(translationY); } // Add a color overlay to fake appearance of the FAB final ColorDrawable fabColor = new ColorDrawable(color); fabColor.setBounds(0, 0, dialogBounds.width(), dialogBounds.height()); if (!fromFab) fabColor.setAlpha(0); view.getOverlay().add(fabColor); // Add an icon overlay again to fake the appearance of the FAB final Drawable fabIcon = ContextCompat.getDrawable(sceneRoot.getContext(), icon).mutate(); final int iconLeft = (dialogBounds.width() - fabIcon.getIntrinsicWidth()) / 2; final int iconTop = (dialogBounds.height() - fabIcon.getIntrinsicHeight()) / 2; fabIcon.setBounds(iconLeft, iconTop, iconLeft + fabIcon.getIntrinsicWidth(), iconTop + fabIcon.getIntrinsicHeight()); if (!fromFab) fabIcon.setAlpha(0); view.getOverlay().add(fabIcon); // Circular clip from/to the FAB size final Animator circularReveal; if (fromFab) { circularReveal = ViewAnimationUtils.createCircularReveal(view, view.getWidth() / 2, view.getHeight() / 2, startBounds.width() / 2, (float) Math.hypot(endBounds.width() / 2, endBounds.height() / 2)); circularReveal.setInterpolator(fastOutSlowInInterpolator); } else { circularReveal = ViewAnimationUtils.createCircularReveal(view, view.getWidth() / 2, view.getHeight() / 2, (float) Math.hypot(startBounds.width() / 2, startBounds.height() / 2), endBounds.width() / 2); circularReveal.setInterpolator(AnimationUtils.loadInterpolator(sceneRoot.getContext(), android.R.interpolator.linear_out_slow_in)); // Persist the end clip i.e. stay at FAB size after the reveal has run circularReveal.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { view.setOutlineProvider(new ViewOutlineProvider() { @Override public void getOutline(View view, Outline outline) { final int left = (view.getWidth() - fabBounds.width()) / 2; final int top = (view.getHeight() - fabBounds.height()) / 2; outline.setOval( left, top, left + fabBounds.width(), top + fabBounds.height()); view.setClipToOutline(true); } }); } }); } circularReveal.setDuration(duration); // Translate to end position along an arc final Animator translate = ObjectAnimator.ofFloat( view, View.TRANSLATION_X, View.TRANSLATION_Y, fromFab ? getPathMotion().getPath(translationX, translationY, 0, 0) : getPathMotion().getPath(0, 0, -translationX, -translationY)); translate.setDuration(duration); translate.setInterpolator(fastOutSlowInInterpolator); // Fade contents of non-FAB view in/out List<Animator> fadeContents = null; if (view instanceof ViewGroup) { final ViewGroup vg = ((ViewGroup) view); fadeContents = new ArrayList<>(vg.getChildCount()); for (int i = vg.getChildCount() - 1; i >= 0; i--) { final View child = vg.getChildAt(i); final Animator fade = ObjectAnimator.ofFloat(child, View.ALPHA, fromFab ? 1f : 0f); if (fromFab) { child.setAlpha(0f); } fade.setDuration(twoThirdsDuration); fade.setInterpolator(fastOutSlowInInterpolator); fadeContents.add(fade); } } // Fade in/out the fab color & icon overlays final Animator colorFade = ObjectAnimator.ofInt(fabColor, "alpha", fromFab ? 0 : 255); final Animator iconFade = ObjectAnimator.ofInt(fabIcon, "alpha", fromFab ? 0 : 255); if (!fromFab) { colorFade.setStartDelay(halfDuration); iconFade.setStartDelay(halfDuration); } colorFade.setDuration(halfDuration); iconFade.setDuration(halfDuration); colorFade.setInterpolator(fastOutSlowInInterpolator); iconFade.setInterpolator(fastOutSlowInInterpolator); // Work around issue with elevation shadows. At the end of the return transition the shared // element's shadow is drawn twice (by each activity) which is jarring. This workaround // still causes the shadow to snap, but it's better than seeing it double drawn. Animator elevation = null; if (!fromFab) { elevation = ObjectAnimator.ofFloat(view, View.TRANSLATION_Z, -view.getElevation()); elevation.setDuration(duration); elevation.setInterpolator(fastOutSlowInInterpolator); } // Run all animations together final AnimatorSet transition = new AnimatorSet(); transition.playTogether(circularReveal, translate, colorFade, iconFade); transition.playTogether(fadeContents); if (elevation != null) transition.play(elevation); if (fromFab) { transition.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { // Clean up view.getOverlay().clear(); } }); } return transition; } private void captureValues(TransitionValues transitionValues) { final View view = transitionValues.view; if (view == null || view.getWidth() <= 0 || view.getHeight() <= 0) return; transitionValues.values.put(PROP_BOUNDS, new Rect(view.getLeft(), view.getTop(), view.getRight(), view.getBottom())); } }
apache-2.0
CyberEagle/AndroidTestLibrary
library/src/main/java/br/com/cybereagle/androidtestlibrary/shadow/ShadowContentObservable.java
1663
/* * Copyright 2013 Cyber Eagle * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package br.com.cybereagle.androidtestlibrary.shadow; import android.database.ContentObservable; import android.database.ContentObserver; import org.robolectric.annotation.Implementation; import org.robolectric.annotation.Implements; @Implements(ContentObservable.class) public class ShadowContentObservable extends ShadowObservable<ContentObserver> { @Override public void registerObserver(ContentObserver observer) { super.registerObserver(observer); } @Implementation public void dispatchChange(boolean selfChange) { synchronized(mObservers) { for (ContentObserver observer : mObservers) { if (!selfChange || observer.deliverSelfNotifications()) { observer.dispatchChange(selfChange); } } } } @Implementation public void notifyChange(boolean selfChange) { synchronized(mObservers) { for (ContentObserver observer : mObservers) { observer.onChange(selfChange); } } } }
apache-2.0
luiz158/bookstore_javaee
bookstore/src/main/java/com/sivalabs/bookstore/entities/Inventory.java
2919
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package com.sivalabs.bookstore.entities; import java.io.Serializable; import java.util.Date; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.validation.constraints.NotNull; import javax.xml.bind.annotation.XmlRootElement; /** * * @author Siva */ @Entity @Table(name = "inventory") @XmlRootElement public class Inventory implements Serializable { private static final long serialVersionUID = 1L; @Id @GeneratedValue(strategy = GenerationType.IDENTITY) @Column(name = "inv_id") private Integer id; @NotNull @Column(name = "quantity") private long quantity; @Column(name = "created_on") @Temporal(TemporalType.TIMESTAMP) private Date createdOn; @Column(name = "updated_on") @Temporal(TemporalType.TIMESTAMP) private Date updatedOn; @JoinColumn(name = "product_id", referencedColumnName = "product_id") @ManyToOne() private Product product; public Inventory() { } public Inventory(Integer id) { this.id = id; } public Inventory(Integer id, long quantity) { this.id = id; this.quantity = quantity; } public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public Date getCreatedOn() { return createdOn; } public void setCreatedOn(Date createdOn) { this.createdOn = createdOn; } public Date getUpdatedOn() { return updatedOn; } public void setUpdatedOn(Date updatedOn) { this.updatedOn = updatedOn; } public long getQuantity() { return quantity; } public void setQuantity(long quantity) { this.quantity = quantity; } public Product getProduct() { return product; } public void setProduct(Product product) { this.product = product; } @Override public int hashCode() { int hash = 0; hash += (id != null ? id.hashCode() : 0); return hash; } @Override public boolean equals(Object object) { if (!(object instanceof Inventory)) { return false; } Inventory other = (Inventory) object; if ((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id))) { return false; } return true; } }
apache-2.0
ibmsoe/cassandra
src/java/org/apache/cassandra/cql3/statements/SelectStatement.java
97070
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.cql3.statements; import java.nio.ByteBuffer; import java.util.*; import com.google.common.base.Objects; import com.google.common.base.Predicate; import com.google.common.collect.AbstractIterator; import com.google.common.collect.Iterables; import com.google.common.collect.Iterators; import org.github.jamm.MemoryMeter; import org.apache.cassandra.auth.Permission; import org.apache.cassandra.cql3.*; import org.apache.cassandra.db.composites.*; import org.apache.cassandra.transport.messages.ResultMessage; import org.apache.cassandra.config.CFMetaData; import org.apache.cassandra.config.ColumnDefinition; import org.apache.cassandra.db.*; import org.apache.cassandra.db.filter.*; import org.apache.cassandra.db.marshal.*; import org.apache.cassandra.dht.*; import org.apache.cassandra.exceptions.*; import org.apache.cassandra.service.ClientState; import org.apache.cassandra.service.QueryState; import org.apache.cassandra.service.StorageProxy; import org.apache.cassandra.service.StorageService; import org.apache.cassandra.service.pager.*; import org.apache.cassandra.db.ConsistencyLevel; import org.apache.cassandra.thrift.ThriftValidation; import org.apache.cassandra.serializers.MarshalException; import org.apache.cassandra.utils.ByteBufferUtil; import org.apache.cassandra.utils.FBUtilities; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Encapsulates a completely parsed SELECT query, including the target * column family, expression, result count, and ordering clause. * */ public class SelectStatement implements CQLStatement, MeasurableForPreparedCache { private static final Logger logger = LoggerFactory.getLogger(SelectStatement.class); private static final int DEFAULT_COUNT_PAGE_SIZE = 10000; private final int boundTerms; public final CFMetaData cfm; public final Parameters parameters; private final Selection selection; private final Term limit; /** Restrictions on partitioning columns */ private final Restriction[] keyRestrictions; /** Restrictions on clustering columns */ private final Restriction[] columnRestrictions; /** Restrictions on non-primary key columns (i.e. secondary index restrictions) */ private final Map<ColumnIdentifier, Restriction> metadataRestrictions = new HashMap<ColumnIdentifier, Restriction>(); // All restricted columns not covered by the key or index filter private final Set<ColumnDefinition> restrictedColumns = new HashSet<ColumnDefinition>(); private Restriction.Slice sliceRestriction; private boolean isReversed; private boolean onToken; private boolean isKeyRange; private boolean keyIsInRelation; private boolean usesSecondaryIndexing; private Map<ColumnIdentifier, Integer> orderingIndexes; private boolean selectsStaticColumns; private boolean selectsOnlyStaticColumns; // Used by forSelection below private static final Parameters defaultParameters = new Parameters(Collections.<ColumnIdentifier, Boolean>emptyMap(), false, false, null, false); private static final Predicate<ColumnDefinition> isStaticFilter = new Predicate<ColumnDefinition>() { public boolean apply(ColumnDefinition def) { return def.isStatic(); } }; public SelectStatement(CFMetaData cfm, int boundTerms, Parameters parameters, Selection selection, Term limit) { this.cfm = cfm; this.boundTerms = boundTerms; this.selection = selection; this.keyRestrictions = new Restriction[cfm.partitionKeyColumns().size()]; this.columnRestrictions = new Restriction[cfm.clusteringColumns().size()]; this.parameters = parameters; this.limit = limit; // Now gather a few info on whether we should bother with static columns or not for this statement initStaticColumnsInfo(); } private void initStaticColumnsInfo() { if (!cfm.hasStaticColumns()) return; // If it's a wildcard, we do select static but not only them if (selection.isWildcard()) { selectsStaticColumns = true; return; } // Otherwise, check the selected columns selectsStaticColumns = !Iterables.isEmpty(Iterables.filter(selection.getColumns(), isStaticFilter)); selectsOnlyStaticColumns = true; for (ColumnDefinition def : selection.getColumns()) { if (def.kind != ColumnDefinition.Kind.PARTITION_KEY && def.kind != ColumnDefinition.Kind.STATIC) { selectsOnlyStaticColumns = false; break; } } } // Creates a simple select based on the given selection. // Note that the results select statement should not be used for actual queries, but only for processing already // queried data through processColumnFamily. static SelectStatement forSelection(CFMetaData cfm, Selection selection) { return new SelectStatement(cfm, 0, defaultParameters, selection, null); } public ResultSet.Metadata getResultMetadata() { return parameters.isCount ? ResultSet.makeCountMetadata(keyspace(), columnFamily(), parameters.countAlias) : selection.getResultMetadata(); } public long measureForPreparedCache(MemoryMeter meter) { return meter.measure(this) + meter.measureDeep(parameters) + meter.measureDeep(selection) + (limit == null ? 0 : meter.measureDeep(limit)) + meter.measureDeep(keyRestrictions) + meter.measureDeep(columnRestrictions) + meter.measureDeep(metadataRestrictions) + meter.measureDeep(restrictedColumns) + (sliceRestriction == null ? 0 : meter.measureDeep(sliceRestriction)) + (orderingIndexes == null ? 0 : meter.measureDeep(orderingIndexes)); } public int getBoundTerms() { return boundTerms; } public void checkAccess(ClientState state) throws InvalidRequestException, UnauthorizedException { state.hasColumnFamilyAccess(keyspace(), columnFamily(), Permission.SELECT); } public void validate(ClientState state) throws InvalidRequestException { // Nothing to do, all validation has been done by RawStatement.prepare() } public ResultMessage.Rows execute(QueryState state, QueryOptions options) throws RequestExecutionException, RequestValidationException { ConsistencyLevel cl = options.getConsistency(); if (cl == null) throw new InvalidRequestException("Invalid empty consistency level"); cl.validateForRead(keyspace()); int limit = getLimit(options); long now = System.currentTimeMillis(); Pageable command = getPageableCommand(options, limit, now); int pageSize = options.getPageSize(); // A count query will never be paged for the user, but we always page it internally to avoid OOM. // If we user provided a pageSize we'll use that to page internally (because why not), otherwise we use our default // Note that if there are some nodes in the cluster with a version less than 2.0, we can't use paging (CASSANDRA-6707). if (parameters.isCount && pageSize <= 0) pageSize = DEFAULT_COUNT_PAGE_SIZE; if (pageSize <= 0 || command == null || !QueryPagers.mayNeedPaging(command, pageSize)) { return execute(command, options, limit, now); } else { QueryPager pager = QueryPagers.pager(command, cl, options.getPagingState()); if (parameters.isCount) return pageCountQuery(pager, options, pageSize, now, limit); // We can't properly do post-query ordering if we page (see #6722) if (needsPostQueryOrdering()) throw new InvalidRequestException("Cannot page queries with both ORDER BY and a IN restriction on the partition key; you must either remove the " + "ORDER BY or the IN and sort client side, or disable paging for this query"); List<Row> page = pager.fetchPage(pageSize); ResultMessage.Rows msg = processResults(page, options, limit, now); return pager.isExhausted() ? msg : msg.withPagingState(pager.state()); } } private Pageable getPageableCommand(QueryOptions options, int limit, long now) throws RequestValidationException { int limitForQuery = updateLimitForQuery(limit); if (isKeyRange || usesSecondaryIndexing) return getRangeCommand(options, limitForQuery, now); List<ReadCommand> commands = getSliceCommands(options, limitForQuery, now); return commands == null ? null : new Pageable.ReadCommands(commands); } public Pageable getPageableCommand(QueryOptions options) throws RequestValidationException { return getPageableCommand(options, getLimit(options), System.currentTimeMillis()); } private ResultMessage.Rows execute(Pageable command, QueryOptions options, int limit, long now) throws RequestValidationException, RequestExecutionException { List<Row> rows; if (command == null) { rows = Collections.<Row>emptyList(); } else { rows = command instanceof Pageable.ReadCommands ? StorageProxy.read(((Pageable.ReadCommands)command).commands, options.getConsistency()) : StorageProxy.getRangeSlice((RangeSliceCommand)command, options.getConsistency()); } return processResults(rows, options, limit, now); } private ResultMessage.Rows pageCountQuery(QueryPager pager, QueryOptions options, int pageSize, long now, int limit) throws RequestValidationException, RequestExecutionException { int count = 0; while (!pager.isExhausted()) { int maxLimit = pager.maxRemaining(); logger.debug("New maxLimit for paged count query is {}", maxLimit); ResultSet rset = process(pager.fetchPage(pageSize), options, maxLimit, now); count += rset.rows.size(); } // We sometimes query one more result than the user limit asks to handle exclusive bounds with compact tables (see updateLimitForQuery). // So do make sure the count is not greater than what the user asked for. ResultSet result = ResultSet.makeCountResult(keyspace(), columnFamily(), Math.min(count, limit), parameters.countAlias); return new ResultMessage.Rows(result); } public ResultMessage.Rows processResults(List<Row> rows, QueryOptions options, int limit, long now) throws RequestValidationException { // Even for count, we need to process the result as it'll group some column together in sparse column families ResultSet rset = process(rows, options, limit, now); rset = parameters.isCount ? rset.makeCountResult(parameters.countAlias) : rset; return new ResultMessage.Rows(rset); } static List<Row> readLocally(String keyspaceName, List<ReadCommand> cmds) { Keyspace keyspace = Keyspace.open(keyspaceName); List<Row> rows = new ArrayList<Row>(cmds.size()); for (ReadCommand cmd : cmds) rows.add(cmd.getRow(keyspace)); return rows; } public ResultMessage.Rows executeInternal(QueryState state, QueryOptions options) throws RequestExecutionException, RequestValidationException { int limit = getLimit(options); long now = System.currentTimeMillis(); Pageable command = getPageableCommand(options, limit, now); List<Row> rows = command == null ? Collections.<Row>emptyList() : (command instanceof Pageable.ReadCommands ? readLocally(keyspace(), ((Pageable.ReadCommands)command).commands) : ((RangeSliceCommand)command).executeLocally()); return processResults(rows, options, limit, now); } public ResultSet process(List<Row> rows) throws InvalidRequestException { assert !parameters.isCount; // not yet needed QueryOptions options = QueryOptions.DEFAULT; return process(rows, options, getLimit(options), System.currentTimeMillis()); } public String keyspace() { return cfm.ksName; } public String columnFamily() { return cfm.cfName; } private List<ReadCommand> getSliceCommands(QueryOptions options, int limit, long now) throws RequestValidationException { Collection<ByteBuffer> keys = getKeys(options); if (keys.isEmpty()) // in case of IN () for (the last column of) the partition key. return null; List<ReadCommand> commands = new ArrayList<>(keys.size()); IDiskAtomFilter filter = makeFilter(options, limit); if (filter == null) return null; // Note that we use the total limit for every key, which is potentially inefficient. // However, IN + LIMIT is not a very sensible choice. for (ByteBuffer key : keys) { QueryProcessor.validateKey(key); // We should not share the slice filter amongst the commands (hence the cloneShallow), due to // SliceQueryFilter not being immutable due to its columnCounter used by the lastCounted() method // (this is fairly ugly and we should change that but that's probably not a tiny refactor to do that cleanly) commands.add(ReadCommand.create(keyspace(), key, columnFamily(), now, filter.cloneShallow())); } return commands; } private RangeSliceCommand getRangeCommand(QueryOptions options, int limit, long now) throws RequestValidationException { IDiskAtomFilter filter = makeFilter(options, limit); if (filter == null) return null; List<IndexExpression> expressions = getIndexExpressions(options); // The LIMIT provided by the user is the number of CQL row he wants returned. // We want to have getRangeSlice to count the number of columns, not the number of keys. AbstractBounds<RowPosition> keyBounds = getKeyBounds(options); return keyBounds == null ? null : new RangeSliceCommand(keyspace(), columnFamily(), now, filter, keyBounds, expressions, limit, !parameters.isDistinct, false); } private AbstractBounds<RowPosition> getKeyBounds(QueryOptions options) throws InvalidRequestException { IPartitioner<?> p = StorageService.getPartitioner(); if (onToken) { Token startToken = getTokenBound(Bound.START, options, p); Token endToken = getTokenBound(Bound.END, options, p); boolean includeStart = includeKeyBound(Bound.START); boolean includeEnd = includeKeyBound(Bound.END); /* * If we ask SP.getRangeSlice() for (token(200), token(200)], it will happily return the whole ring. * However, wrapping range doesn't really make sense for CQL, and we want to return an empty result * in that case (CASSANDRA-5573). So special case to create a range that is guaranteed to be empty. * * In practice, we want to return an empty result set if either startToken > endToken, or both are * equal but one of the bound is excluded (since [a, a] can contains something, but not (a, a], [a, a) * or (a, a)). Note though that in the case where startToken or endToken is the minimum token, then * this special case rule should not apply. */ int cmp = startToken.compareTo(endToken); if (!startToken.isMinimum() && !endToken.isMinimum() && (cmp > 0 || (cmp == 0 && (!includeStart || !includeEnd)))) return null; RowPosition start = includeStart ? startToken.minKeyBound() : startToken.maxKeyBound(); RowPosition end = includeEnd ? endToken.maxKeyBound() : endToken.minKeyBound(); return new Range<RowPosition>(start, end); } else { ByteBuffer startKeyBytes = getKeyBound(Bound.START, options); ByteBuffer finishKeyBytes = getKeyBound(Bound.END, options); RowPosition startKey = RowPosition.ForKey.get(startKeyBytes, p); RowPosition finishKey = RowPosition.ForKey.get(finishKeyBytes, p); if (startKey.compareTo(finishKey) > 0 && !finishKey.isMinimum(p)) return null; if (includeKeyBound(Bound.START)) { return includeKeyBound(Bound.END) ? new Bounds<RowPosition>(startKey, finishKey) : new IncludingExcludingBounds<RowPosition>(startKey, finishKey); } else { return includeKeyBound(Bound.END) ? new Range<RowPosition>(startKey, finishKey) : new ExcludingBounds<RowPosition>(startKey, finishKey); } } } private IDiskAtomFilter makeFilter(QueryOptions options, int limit) throws InvalidRequestException { if (parameters.isDistinct) { return new SliceQueryFilter(ColumnSlice.ALL_COLUMNS_ARRAY, false, 1, -1); } else if (isColumnRange()) { int toGroup = cfm.comparator.isDense() ? -1 : cfm.clusteringColumns().size(); List<Composite> startBounds = getRequestedBound(Bound.START, options); List<Composite> endBounds = getRequestedBound(Bound.END, options); assert startBounds.size() == endBounds.size(); // Handles fetching static columns. Note that for 2i, the filter is just used to restrict // the part of the index to query so adding the static slice would be useless and confusing. // For 2i, static columns are retrieve in CompositesSearcher with each index hit. ColumnSlice staticSlice = null; if (selectsStaticColumns && !usesSecondaryIndexing) { // Note: we could use staticPrefix.start() for the start bound, but EMPTY gives us the // same effect while saving a few CPU cycles. staticSlice = isReversed ? new ColumnSlice(cfm.comparator.staticPrefix().end(), Composites.EMPTY) : new ColumnSlice(Composites.EMPTY, cfm.comparator.staticPrefix().end()); // In the case where we only select static columns, we want to really only check the static columns. // So we return early as the rest of that method would actually make us query everything if (selectsOnlyStaticColumns) return sliceFilter(staticSlice, limit, toGroup); } // The case where startBounds == 1 is common enough that it's worth optimizing if (startBounds.size() == 1) { ColumnSlice slice = new ColumnSlice(startBounds.get(0), endBounds.get(0)); if (slice.isAlwaysEmpty(cfm.comparator, isReversed)) return staticSlice == null ? null : sliceFilter(staticSlice, limit, toGroup); return staticSlice == null ? sliceFilter(slice, limit, toGroup) : (slice.includes(cfm.comparator, staticSlice.finish) ? sliceFilter(new ColumnSlice(staticSlice.start, slice.finish), limit, toGroup) : sliceFilter(new ColumnSlice[]{ staticSlice, slice }, limit, toGroup)); } List<ColumnSlice> l = new ArrayList<ColumnSlice>(startBounds.size()); for (int i = 0; i < startBounds.size(); i++) { ColumnSlice slice = new ColumnSlice(startBounds.get(i), endBounds.get(i)); if (!slice.isAlwaysEmpty(cfm.comparator, isReversed)) l.add(slice); } if (l.isEmpty()) return staticSlice == null ? null : sliceFilter(staticSlice, limit, toGroup); if (staticSlice == null) return sliceFilter(l.toArray(new ColumnSlice[l.size()]), limit, toGroup); // The slices should not overlap. We know the slices built from startBounds/endBounds don't, but if there is // a static slice, it could overlap with the 2nd slice. Check for it and correct if that's the case ColumnSlice[] slices; if (isReversed) { if (l.get(l.size() - 1).includes(cfm.comparator, staticSlice.start)) { slices = l.toArray(new ColumnSlice[l.size()]); slices[slices.length-1] = new ColumnSlice(slices[slices.length-1].start, Composites.EMPTY); } else { slices = l.toArray(new ColumnSlice[l.size()+1]); slices[slices.length-1] = staticSlice; } } else { if (l.get(0).includes(cfm.comparator, staticSlice.finish)) { slices = new ColumnSlice[l.size()]; slices[0] = new ColumnSlice(Composites.EMPTY, l.get(0).finish); for (int i = 1; i < l.size(); i++) slices[i] = l.get(i); } else { slices = new ColumnSlice[l.size()+1]; slices[0] = staticSlice; for (int i = 0; i < l.size(); i++) slices[i+1] = l.get(i); } } return sliceFilter(slices, limit, toGroup); } else { SortedSet<CellName> cellNames = getRequestedColumns(options); if (cellNames == null) // in case of IN () for the last column of the key return null; QueryProcessor.validateCellNames(cellNames, cfm.comparator); return new NamesQueryFilter(cellNames, true); } } private SliceQueryFilter sliceFilter(ColumnSlice slice, int limit, int toGroup) { return sliceFilter(new ColumnSlice[]{ slice }, limit, toGroup); } private SliceQueryFilter sliceFilter(ColumnSlice[] slices, int limit, int toGroup) { assert ColumnSlice.validateSlices(slices, cfm.comparator, isReversed) : String.format("Invalid slices: " + Arrays.toString(slices) + (isReversed ? " (reversed)" : "")); return new SliceQueryFilter(slices, isReversed, limit, toGroup); } private int getLimit(QueryOptions options) throws InvalidRequestException { int l = Integer.MAX_VALUE; if (limit != null) { ByteBuffer b = limit.bindAndGet(options); if (b == null) throw new InvalidRequestException("Invalid null value of limit"); try { Int32Type.instance.validate(b); l = Int32Type.instance.compose(b); } catch (MarshalException e) { throw new InvalidRequestException("Invalid limit value"); } } if (l <= 0) throw new InvalidRequestException("LIMIT must be strictly positive"); return l; } private int updateLimitForQuery(int limit) { // Internally, we don't support exclusive bounds for slices. Instead, we query one more element if necessary // and exclude it later (in processColumnFamily) return sliceRestriction != null && (!sliceRestriction.isInclusive(Bound.START) || !sliceRestriction.isInclusive(Bound.END)) && limit != Integer.MAX_VALUE ? limit + 1 : limit; } private Collection<ByteBuffer> getKeys(final QueryOptions options) throws InvalidRequestException { List<ByteBuffer> keys = new ArrayList<ByteBuffer>(); CBuilder builder = cfm.getKeyValidatorAsCType().builder(); for (ColumnDefinition def : cfm.partitionKeyColumns()) { Restriction r = keyRestrictions[def.position()]; assert r != null && !r.isSlice(); List<ByteBuffer> values = r.values(options); if (builder.remainingCount() == 1) { for (ByteBuffer val : values) { if (val == null) throw new InvalidRequestException(String.format("Invalid null value for partition key part %s", def.name)); keys.add(builder.buildWith(val).toByteBuffer()); } } else { // Note: for backward compatibility reasons, we let INs with 1 value slide if (values.size() != 1) throw new InvalidRequestException("IN is only supported on the last column of the partition key"); ByteBuffer val = values.get(0); if (val == null) throw new InvalidRequestException(String.format("Invalid null value for partition key part %s", def.name)); builder.add(val); } } return keys; } private ByteBuffer getKeyBound(Bound b, QueryOptions options) throws InvalidRequestException { // Deal with unrestricted partition key components (special-casing is required to deal with 2i queries on the first // component of a composite partition key). for (int i = 0; i < keyRestrictions.length; i++) if (keyRestrictions[i] == null) return ByteBufferUtil.EMPTY_BYTE_BUFFER; // We deal with IN queries for keys in other places, so we know buildBound will return only one result return buildBound(b, cfm.partitionKeyColumns(), keyRestrictions, false, cfm.getKeyValidatorAsCType(), options).get(0).toByteBuffer(); } private Token getTokenBound(Bound b, QueryOptions options, IPartitioner<?> p) throws InvalidRequestException { assert onToken; Restriction restriction = keyRestrictions[0]; assert !restriction.isMultiColumn() : "Unexpectedly got a multi-column restriction on a partition key for a range query"; SingleColumnRestriction keyRestriction = (SingleColumnRestriction)restriction; ByteBuffer value; if (keyRestriction.isEQ()) { value = keyRestriction.values(options).get(0); } else { SingleColumnRestriction.Slice slice = (SingleColumnRestriction.Slice)keyRestriction; if (!slice.hasBound(b)) return p.getMinimumToken(); value = slice.bound(b, options); } if (value == null) throw new InvalidRequestException("Invalid null token value"); return p.getTokenFactory().fromByteArray(value); } private boolean includeKeyBound(Bound b) { for (Restriction r : keyRestrictions) { if (r == null) return true; else if (r.isSlice()) { assert !r.isMultiColumn() : "Unexpectedly got multi-column restriction on partition key"; return ((SingleColumnRestriction.Slice)r).isInclusive(b); } } // All equality return true; } private boolean isColumnRange() { // Due to CASSANDRA-5762, we always do a slice for CQL3 tables (not dense, composite). // Static CF (non dense but non composite) never entails a column slice however if (!cfm.comparator.isDense()) return cfm.comparator.isCompound(); // Otherwise (i.e. for compact table where we don't have a row marker anyway and thus don't care about CASSANDRA-5762), // it is a range query if it has at least one the column alias for which no relation is defined or is not EQ. for (Restriction r : columnRestrictions) { if (r == null || r.isSlice()) return true; } return false; } private SortedSet<CellName> getRequestedColumns(QueryOptions options) throws InvalidRequestException { // Note: getRequestedColumns don't handle static columns, but due to CASSANDRA-5762 // we always do a slice for CQL3 tables, so it's ok to ignore them here assert !isColumnRange(); CBuilder builder = cfm.comparator.prefixBuilder(); Iterator<ColumnDefinition> idIter = cfm.clusteringColumns().iterator(); for (Restriction r : columnRestrictions) { ColumnDefinition def = idIter.next(); assert r != null && !r.isSlice(); List<ByteBuffer> values = r.values(options); if (values.size() == 1) { ByteBuffer val = values.get(0); if (val == null) throw new InvalidRequestException(String.format("Invalid null value for clustering key part %s", def.name)); builder.add(val); } else { // We have a IN, which we only support for the last column. // If compact, just add all values and we're done. Otherwise, // for each value of the IN, creates all the columns corresponding to the selection. if (values.isEmpty()) return null; SortedSet<CellName> columns = new TreeSet<CellName>(cfm.comparator); Iterator<ByteBuffer> iter = values.iterator(); while (iter.hasNext()) { ByteBuffer val = iter.next(); if (val == null) throw new InvalidRequestException(String.format("Invalid null value for clustering key part %s", def.name)); Composite prefix = builder.buildWith(val); columns.addAll(addSelectedColumns(prefix)); } return columns; } } return addSelectedColumns(builder.build()); } private SortedSet<CellName> addSelectedColumns(Composite prefix) { if (cfm.comparator.isDense()) { return FBUtilities.singleton(cfm.comparator.create(prefix, null), cfm.comparator); } else { // Collections require doing a slice query because a given collection is a // non-know set of columns, so we shouldn't get there assert !selectACollection(); SortedSet<CellName> columns = new TreeSet<CellName>(cfm.comparator); // We need to query the selected column as well as the marker // column (for the case where the row exists but has no columns outside the PK) // Two exceptions are "static CF" (non-composite non-compact CF) and "super CF" // that don't have marker and for which we must query all columns instead if (cfm.comparator.isCompound() && !cfm.isSuper()) { // marker columns.add(cfm.comparator.rowMarker(prefix)); // selected columns for (ColumnDefinition def : selection.getColumns()) if (def.kind == ColumnDefinition.Kind.REGULAR || def.kind == ColumnDefinition.Kind.STATIC) columns.add(cfm.comparator.create(prefix, def)); } else { // We now that we're not composite so we can ignore static columns for (ColumnDefinition def : cfm.regularColumns()) columns.add(cfm.comparator.create(prefix, def)); } return columns; } } private boolean selectACollection() { if (!cfm.comparator.hasCollections()) return false; for (ColumnDefinition def : selection.getColumns()) { if (def.type instanceof CollectionType) return true; } return false; } private static List<Composite> buildBound(Bound bound, List<ColumnDefinition> defs, Restriction[] restrictions, boolean isReversed, CType type, QueryOptions options) throws InvalidRequestException { CBuilder builder = type.builder(); // check the first restriction to see if we're dealing with a multi-column restriction if (!defs.isEmpty()) { Restriction firstRestriction = restrictions[0]; if (firstRestriction != null && firstRestriction.isMultiColumn()) { if (firstRestriction.isSlice()) return buildMultiColumnSliceBound(bound, defs, (MultiColumnRestriction.Slice) firstRestriction, isReversed, builder, options); else if (firstRestriction.isIN()) return buildMultiColumnInBound(bound, defs, (MultiColumnRestriction.IN) firstRestriction, isReversed, builder, type, options); else return buildMultiColumnEQBound(bound, defs, (MultiColumnRestriction.EQ) firstRestriction, isReversed, builder, options); } } // The end-of-component of composite doesn't depend on whether the // component type is reversed or not (i.e. the ReversedType is applied // to the component comparator but not to the end-of-component itself), // it only depends on whether the slice is reversed Bound eocBound = isReversed ? Bound.reverse(bound) : bound; for (Iterator<ColumnDefinition> iter = defs.iterator(); iter.hasNext();) { ColumnDefinition def = iter.next(); // In a restriction, we always have Bound.START < Bound.END for the "base" comparator. // So if we're doing a reverse slice, we must inverse the bounds when giving them as start and end of the slice filter. // But if the actual comparator itself is reversed, we must inversed the bounds too. Bound b = isReversed == isReversedType(def) ? bound : Bound.reverse(bound); Restriction r = restrictions[def.position()]; if (isNullRestriction(r, b)) { // There wasn't any non EQ relation on that key, we select all records having the preceding component as prefix. // For composites, if there was preceding component and we're computing the end, we must change the last component // End-Of-Component, otherwise we would be selecting only one record. Composite prefix = builder.build(); return Collections.singletonList(!prefix.isEmpty() && eocBound == Bound.END ? prefix.end() : prefix); } if (r.isSlice()) { builder.add(getSliceValue(r, b, options)); Relation.Type relType = ((Restriction.Slice)r).getRelation(eocBound, b); return Collections.singletonList(builder.build().withEOC(eocForRelation(relType))); } else { List<ByteBuffer> values = r.values(options); if (values.size() != 1) { // IN query, we only support it on the clustering columns assert def.position() == defs.size() - 1; // The IN query might not have listed the values in comparator order, so we need to re-sort // the bounds lists to make sure the slices works correctly (also, to avoid duplicates). TreeSet<Composite> s = new TreeSet<>(isReversed ? type.reverseComparator() : type); for (ByteBuffer val : values) { if (val == null) throw new InvalidRequestException(String.format("Invalid null clustering key part %s", def.name)); Composite prefix = builder.buildWith(val); // See below for why this s.add((eocBound == Bound.END && builder.remainingCount() > 0) ? prefix.end() : prefix); } return new ArrayList<>(s); } ByteBuffer val = values.get(0); if (val == null) throw new InvalidRequestException(String.format("Invalid null clustering key part %s", def.name)); builder.add(val); } } // Means no relation at all or everything was an equal // Note: if the builder is "full", there is no need to use the end-of-component bit. For columns selection, // it would be harmless to do it. However, we use this method got the partition key too. And when a query // with 2ndary index is done, and with the the partition provided with an EQ, we'll end up here, and in that // case using the eoc would be bad, since for the random partitioner we have no guarantee that // prefix.end() will sort after prefix (see #5240). Composite prefix = builder.build(); return Collections.singletonList(eocBound == Bound.END && builder.remainingCount() > 0 ? prefix.end() : prefix); } private static Composite.EOC eocForRelation(Relation.Type op) { switch (op) { case LT: // < X => using startOf(X) as finish bound return Composite.EOC.START; case GT: case LTE: // > X => using endOf(X) as start bound // <= X => using endOf(X) as finish bound return Composite.EOC.END; default: // >= X => using X as start bound (could use START_OF too) // = X => using X return Composite.EOC.NONE; } } private static List<Composite> buildMultiColumnSliceBound(Bound bound, List<ColumnDefinition> defs, MultiColumnRestriction.Slice slice, boolean isReversed, CBuilder builder, QueryOptions options) throws InvalidRequestException { Bound eocBound = isReversed ? Bound.reverse(bound) : bound; Iterator<ColumnDefinition> iter = defs.iterator(); ColumnDefinition firstName = iter.next(); // A hack to preserve pre-6875 behavior for tuple-notation slices where the comparator mixes ASCENDING // and DESCENDING orders. This stores the bound for the first component; we will re-use it for all following // components, even if they don't match the first component's reversal/non-reversal. Note that this does *not* // guarantee correct query results, it just preserves the previous behavior. Bound firstComponentBound = isReversed == isReversedType(firstName) ? bound : Bound.reverse(bound); if (!slice.hasBound(firstComponentBound)) { Composite prefix = builder.build(); return Collections.singletonList(builder.remainingCount() > 0 && eocBound == Bound.END ? prefix.end() : prefix); } List<ByteBuffer> vals = slice.componentBounds(firstComponentBound, options); ByteBuffer v = vals.get(firstName.position()); if (v == null) throw new InvalidRequestException("Invalid null value in condition for column " + firstName.name); builder.add(v); while (iter.hasNext()) { ColumnDefinition def = iter.next(); if (def.position() >= vals.size()) break; v = vals.get(def.position()); if (v == null) throw new InvalidRequestException("Invalid null value in condition for column " + def.name); builder.add(v); } Relation.Type relType = slice.getRelation(eocBound, firstComponentBound); return Collections.singletonList(builder.build().withEOC(eocForRelation(relType))); } private static List<Composite> buildMultiColumnInBound(Bound bound, List<ColumnDefinition> defs, MultiColumnRestriction.IN restriction, boolean isReversed, CBuilder builder, CType type, QueryOptions options) throws InvalidRequestException { List<List<ByteBuffer>> splitInValues = restriction.splitValues(options); Bound eocBound = isReversed ? Bound.reverse(bound) : bound; // The IN query might not have listed the values in comparator order, so we need to re-sort // the bounds lists to make sure the slices works correctly (also, to avoid duplicates). TreeSet<Composite> inValues = new TreeSet<>(isReversed ? type.reverseComparator() : type); for (List<ByteBuffer> components : splitInValues) { for (int i = 0; i < components.size(); i++) if (components.get(i) == null) throw new InvalidRequestException("Invalid null value in condition for column " + defs.get(i)); Composite prefix = builder.buildWith(components); inValues.add(eocBound == Bound.END && builder.remainingCount() - components.size() > 0 ? prefix.end() : prefix); } return new ArrayList<>(inValues); } private static List<Composite> buildMultiColumnEQBound(Bound bound, List<ColumnDefinition> defs, MultiColumnRestriction.EQ restriction, boolean isReversed, CBuilder builder, QueryOptions options) throws InvalidRequestException { Bound eocBound = isReversed ? Bound.reverse(bound) : bound; List<ByteBuffer> values = restriction.values(options); for (int i = 0; i < values.size(); i++) { ByteBuffer component = values.get(i); if (component == null) throw new InvalidRequestException("Invalid null value in condition for column " + defs.get(i)); builder.add(component); } Composite prefix = builder.build(); return Collections.singletonList(builder.remainingCount() > 0 && eocBound == Bound.END ? prefix.end() : prefix); } private static boolean isNullRestriction(Restriction r, Bound b) { return r == null || (r.isSlice() && !((Restriction.Slice)r).hasBound(b)); } private static ByteBuffer getSliceValue(Restriction r, Bound b, QueryOptions options) throws InvalidRequestException { Restriction.Slice slice = (Restriction.Slice)r; assert slice.hasBound(b); ByteBuffer val = slice.bound(b, options); if (val == null) throw new InvalidRequestException(String.format("Invalid null clustering key part %s", r)); return val; } private List<Composite> getRequestedBound(Bound b, QueryOptions options) throws InvalidRequestException { assert isColumnRange(); return buildBound(b, cfm.clusteringColumns(), columnRestrictions, isReversed, cfm.comparator, options); } public List<IndexExpression> getIndexExpressions(QueryOptions options) throws InvalidRequestException { if (!usesSecondaryIndexing || restrictedColumns.isEmpty()) return Collections.emptyList(); List<IndexExpression> expressions = new ArrayList<IndexExpression>(); for (ColumnDefinition def : restrictedColumns) { Restriction restriction; switch (def.kind) { case PARTITION_KEY: restriction = keyRestrictions[def.position()]; break; case CLUSTERING_COLUMN: restriction = columnRestrictions[def.position()]; break; case REGULAR: case STATIC: restriction = metadataRestrictions.get(def.name); break; default: // We don't allow restricting a COMPACT_VALUE for now in prepare. throw new AssertionError(); } if (restriction.isSlice()) { Restriction.Slice slice = (Restriction.Slice)restriction; for (Bound b : Bound.values()) { if (slice.hasBound(b)) { ByteBuffer value = validateIndexedValue(def, slice.bound(b, options)); IndexExpression.Operator op = slice.getIndexOperator(b); // If the underlying comparator for name is reversed, we need to reverse the IndexOperator: user operation // always refer to the "forward" sorting even if the clustering order is reversed, but the 2ndary code does // use the underlying comparator as is. if (def.type instanceof ReversedType) op = reverse(op); expressions.add(new IndexExpression(def.name.bytes, op, value)); } } } else if (restriction.isContains()) { SingleColumnRestriction.Contains contains = (SingleColumnRestriction.Contains)restriction; for (ByteBuffer value : contains.values(options)) { validateIndexedValue(def, value); expressions.add(new IndexExpression(def.name.bytes, IndexExpression.Operator.CONTAINS, value)); } for (ByteBuffer key : contains.keys(options)) { validateIndexedValue(def, key); expressions.add(new IndexExpression(def.name.bytes, IndexExpression.Operator.CONTAINS_KEY, key)); } } else { List<ByteBuffer> values = restriction.values(options); if (values.size() != 1) throw new InvalidRequestException("IN restrictions are not supported on indexed columns"); ByteBuffer value = validateIndexedValue(def, values.get(0)); expressions.add(new IndexExpression(def.name.bytes, IndexExpression.Operator.EQ, value)); } } return expressions; } private static ByteBuffer validateIndexedValue(ColumnDefinition def, ByteBuffer value) throws InvalidRequestException { if (value == null) throw new InvalidRequestException(String.format("Unsupported null value for indexed column %s", def.name)); if (value.remaining() > 0xFFFF) throw new InvalidRequestException("Index expression values may not be larger than 64K"); return value; } private Iterator<Cell> applySliceRestriction(final Iterator<Cell> cells, final QueryOptions options) throws InvalidRequestException { assert sliceRestriction != null; final CellNameType type = cfm.comparator; final CellName excludedStart = sliceRestriction.isInclusive(Bound.START) ? null : type.makeCellName(sliceRestriction.bound(Bound.START, options)); final CellName excludedEnd = sliceRestriction.isInclusive(Bound.END) ? null : type.makeCellName(sliceRestriction.bound(Bound.END, options)); return new AbstractIterator<Cell>() { protected Cell computeNext() { while (cells.hasNext()) { Cell c = cells.next(); // For dynamic CF, the column could be out of the requested bounds (because we don't support strict bounds internally (unless // the comparator is composite that is)), filter here if ( (excludedStart != null && type.compare(c.name(), excludedStart) == 0) || (excludedEnd != null && type.compare(c.name(), excludedEnd) == 0) ) continue; return c; } return endOfData(); } }; } private static IndexExpression.Operator reverse(IndexExpression.Operator op) { switch (op) { case LT: return IndexExpression.Operator.GT; case LTE: return IndexExpression.Operator.GTE; case GT: return IndexExpression.Operator.LT; case GTE: return IndexExpression.Operator.LTE; default: return op; } } private ResultSet process(List<Row> rows, QueryOptions options, int limit, long now) throws InvalidRequestException { Selection.ResultSetBuilder result = selection.resultSetBuilder(now); for (org.apache.cassandra.db.Row row : rows) { // Not columns match the query, skip if (row.cf == null) continue; processColumnFamily(row.key.getKey(), row.cf, options, now, result); } ResultSet cqlRows = result.build(); orderResults(cqlRows); // Internal calls always return columns in the comparator order, even when reverse was set if (isReversed) cqlRows.reverse(); // Trim result if needed to respect the user limit cqlRows.trim(limit); return cqlRows; } // Used by ModificationStatement for CAS operations void processColumnFamily(ByteBuffer key, ColumnFamily cf, QueryOptions options, long now, Selection.ResultSetBuilder result) throws InvalidRequestException { CFMetaData cfm = cf.metadata(); ByteBuffer[] keyComponents = null; if (cfm.getKeyValidator() instanceof CompositeType) { keyComponents = ((CompositeType)cfm.getKeyValidator()).split(key); } else { keyComponents = new ByteBuffer[]{ key }; } Iterator<Cell> cells = cf.getSortedColumns().iterator(); if (sliceRestriction != null) cells = applySliceRestriction(cells, options); CQL3Row.RowIterator iter = cfm.comparator.CQL3RowBuilder(cfm, now).group(cells); // If there is static columns but there is no non-static row, then provided the select was a full // partition selection (i.e. not a 2ndary index search and there was no condition on clustering columns) // then we want to include the static columns in the result set (and we're done). CQL3Row staticRow = iter.getStaticRow(); if (staticRow != null && !iter.hasNext() && !usesSecondaryIndexing && hasNoClusteringColumnsRestriction()) { result.newRow(); for (ColumnDefinition def : selection.getColumns()) { switch (def.kind) { case PARTITION_KEY: result.add(keyComponents[def.position()]); break; case STATIC: addValue(result, def, staticRow, options); break; default: result.add((ByteBuffer)null); } } return; } while (iter.hasNext()) { CQL3Row cql3Row = iter.next(); // Respect requested order result.newRow(); // Respect selection order for (ColumnDefinition def : selection.getColumns()) { switch (def.kind) { case PARTITION_KEY: result.add(keyComponents[def.position()]); break; case CLUSTERING_COLUMN: result.add(cql3Row.getClusteringColumn(def.position())); break; case COMPACT_VALUE: result.add(cql3Row.getColumn(null)); break; case REGULAR: addValue(result, def, cql3Row, options); break; case STATIC: addValue(result, def, staticRow, options); break; } } } } private static void addValue(Selection.ResultSetBuilder result, ColumnDefinition def, CQL3Row row, QueryOptions options) { if (row == null) { result.add((ByteBuffer)null); return; } if (def.type.isCollection()) { List<Cell> collection = row.getCollection(def.name); ByteBuffer value = collection == null ? null : ((CollectionType)def.type).serializeForNativeProtocol(collection, options.getProtocolVersion()); result.add(value); return; } result.add(row.getColumn(def.name)); } private boolean hasNoClusteringColumnsRestriction() { for (int i = 0; i < columnRestrictions.length; i++) if (columnRestrictions[i] != null) return false; return true; } private boolean needsPostQueryOrdering() { // We need post-query ordering only for queries with IN on the partition key and an ORDER BY. return keyIsInRelation && !parameters.orderings.isEmpty(); } /** * Orders results when multiple keys are selected (using IN) */ private void orderResults(ResultSet cqlRows) throws InvalidRequestException { if (cqlRows.size() == 0 || !needsPostQueryOrdering()) return; assert orderingIndexes != null; List<Integer> idToSort = new ArrayList<Integer>(); List<Comparator<ByteBuffer>> sorters = new ArrayList<Comparator<ByteBuffer>>(); for (ColumnIdentifier identifier : parameters.orderings.keySet()) { ColumnDefinition orderingColumn = cfm.getColumnDefinition(identifier); idToSort.add(orderingIndexes.get(orderingColumn.name)); sorters.add(orderingColumn.type); } Comparator<List<ByteBuffer>> comparator = idToSort.size() == 1 ? new SingleColumnComparator(idToSort.get(0), sorters.get(0)) : new CompositeComparator(sorters, idToSort); Collections.sort(cqlRows.rows, comparator); } private static boolean isReversedType(ColumnDefinition def) { return def.type instanceof ReversedType; } private boolean columnFilterIsIdentity() { for (Restriction r : columnRestrictions) { if (r != null) return false; } return true; } private boolean hasClusteringColumnsRestriction() { for (int i = 0; i < columnRestrictions.length; i++) if (columnRestrictions[i] != null) return true; return false; } public static class RawStatement extends CFStatement { private final Parameters parameters; private final List<RawSelector> selectClause; private final List<Relation> whereClause; private final Term.Raw limit; public RawStatement(CFName cfName, Parameters parameters, List<RawSelector> selectClause, List<Relation> whereClause, Term.Raw limit) { super(cfName); this.parameters = parameters; this.selectClause = selectClause; this.whereClause = whereClause == null ? Collections.<Relation>emptyList() : whereClause; this.limit = limit; } public ParsedStatement.Prepared prepare() throws InvalidRequestException { CFMetaData cfm = ThriftValidation.validateColumnFamily(keyspace(), columnFamily()); VariableSpecifications boundNames = getBoundVariables(); // Select clause if (parameters.isCount && !selectClause.isEmpty()) throw new InvalidRequestException("Only COUNT(*) and COUNT(1) operations are currently supported."); Selection selection = selectClause.isEmpty() ? Selection.wildcard(cfm) : Selection.fromSelectors(cfm, selectClause); if (parameters.isDistinct) validateDistinctSelection(selection.getColumns(), cfm.partitionKeyColumns()); SelectStatement stmt = new SelectStatement(cfm, boundNames.size(), parameters, selection, prepareLimit(boundNames)); /* * WHERE clause. For a given entity, rules are: * - EQ relation conflicts with anything else (including a 2nd EQ) * - Can't have more than one LT(E) relation (resp. GT(E) relation) * - IN relation are restricted to row keys (for now) and conflicts with anything else * (we could allow two IN for the same entity but that doesn't seem very useful) * - The value_alias cannot be restricted in any way (we don't support wide rows with indexed value in CQL so far) */ boolean hasQueriableIndex = false; boolean hasQueriableClusteringColumnIndex = false; for (Relation relation : whereClause) { if (relation.isMultiColumn()) { MultiColumnRelation rel = (MultiColumnRelation) relation; List<ColumnDefinition> names = new ArrayList<>(rel.getEntities().size()); for (ColumnIdentifier entity : rel.getEntities()) { ColumnDefinition def = cfm.getColumnDefinition(entity); boolean[] queriable = processRelationEntity(stmt, relation, entity, def); hasQueriableIndex |= queriable[0]; hasQueriableClusteringColumnIndex |= queriable[1]; names.add(def); } updateRestrictionsForRelation(stmt, names, rel, boundNames); } else { SingleColumnRelation rel = (SingleColumnRelation) relation; ColumnIdentifier entity = rel.getEntity(); ColumnDefinition def = cfm.getColumnDefinition(entity); boolean[] queriable = processRelationEntity(stmt, relation, entity, def); hasQueriableIndex |= queriable[0]; hasQueriableClusteringColumnIndex |= queriable[1]; updateRestrictionsForRelation(stmt, def, rel, boundNames); } } // At this point, the select statement if fully constructed, but we still have a few things to validate processPartitionKeyRestrictions(stmt, hasQueriableIndex, cfm); // All (or none) of the partition key columns have been specified; // hence there is no need to turn these restrictions into index expressions. if (!stmt.usesSecondaryIndexing) stmt.restrictedColumns.removeAll(cfm.partitionKeyColumns()); if (stmt.selectsOnlyStaticColumns && stmt.hasClusteringColumnsRestriction()) throw new InvalidRequestException("Cannot restrict clustering columns when selecting only static columns"); processColumnRestrictions(stmt, hasQueriableIndex, cfm); // Covers indexes on the first clustering column (among others). if (stmt.isKeyRange && hasQueriableClusteringColumnIndex) stmt.usesSecondaryIndexing = true; if (!stmt.usesSecondaryIndexing) stmt.restrictedColumns.removeAll(cfm.clusteringColumns()); // Even if usesSecondaryIndexing is false at this point, we'll still have to use one if // there is restrictions not covered by the PK. if (!stmt.metadataRestrictions.isEmpty()) { if (!hasQueriableIndex) throw new InvalidRequestException("No indexed columns present in by-columns clause with Equal operator"); stmt.usesSecondaryIndexing = true; } if (stmt.usesSecondaryIndexing) validateSecondaryIndexSelections(stmt); if (!stmt.parameters.orderings.isEmpty()) processOrderingClause(stmt, cfm); checkNeedsFiltering(stmt); return new ParsedStatement.Prepared(stmt, boundNames); } /** Returns a pair of (hasQueriableIndex, hasQueriableClusteringColumnIndex) */ private boolean[] processRelationEntity(SelectStatement stmt, Relation relation, ColumnIdentifier entity, ColumnDefinition def) throws InvalidRequestException { if (def == null) handleUnrecognizedEntity(entity, relation); stmt.restrictedColumns.add(def); if (def.isIndexed() && relation.operator().allowsIndexQuery()) return new boolean[]{true, def.kind == ColumnDefinition.Kind.CLUSTERING_COLUMN}; return new boolean[]{false, false}; } /** Throws an InvalidRequestException for an unrecognized identifier in the WHERE clause */ private void handleUnrecognizedEntity(ColumnIdentifier entity, Relation relation) throws InvalidRequestException { if (containsAlias(entity)) throw new InvalidRequestException(String.format("Aliases aren't allowed in the where clause ('%s')", relation)); else throw new InvalidRequestException(String.format("Undefined name %s in where clause ('%s')", entity, relation)); } /** Returns a Term for the limit or null if no limit is set */ private Term prepareLimit(VariableSpecifications boundNames) throws InvalidRequestException { if (limit == null) return null; Term prepLimit = limit.prepare(keyspace(), limitReceiver()); prepLimit.collectMarkerSpecification(boundNames); return prepLimit; } private void updateRestrictionsForRelation(SelectStatement stmt, List<ColumnDefinition> defs, MultiColumnRelation relation, VariableSpecifications boundNames) throws InvalidRequestException { List<ColumnDefinition> restrictedColumns = new ArrayList<>(); Set<ColumnDefinition> seen = new HashSet<>(); int previousPosition = -1; for (ColumnDefinition def : defs) { // ensure multi-column restriction only applies to clustering columns if (def.kind != ColumnDefinition.Kind.CLUSTERING_COLUMN) throw new InvalidRequestException(String.format("Multi-column relations can only be applied to clustering columns: %s", def)); if (seen.contains(def)) throw new InvalidRequestException(String.format("Column \"%s\" appeared twice in a relation: %s", def, relation)); seen.add(def); // check that no clustering columns were skipped if (def.position() != previousPosition + 1) { if (previousPosition == -1) throw new InvalidRequestException(String.format( "Clustering columns may not be skipped in multi-column relations. " + "They should appear in the PRIMARY KEY order. Got %s", relation)); else throw new InvalidRequestException(String.format( "Clustering columns must appear in the PRIMARY KEY order in multi-column relations: %s", relation)); } previousPosition++; Restriction existing = getExistingRestriction(stmt, def); Relation.Type operator = relation.operator(); if (existing != null) { if (operator == Relation.Type.EQ || operator == Relation.Type.IN) throw new InvalidRequestException(String.format("Column \"%s\" cannot be restricted by more than one relation if it is in an %s relation", def, relation.operator())); else if (!existing.isSlice()) throw new InvalidRequestException(String.format("Column \"%s\" cannot be restricted by an equality relation and an inequality relation", def)); } restrictedColumns.add(def); } switch (relation.operator()) { case EQ: { Term t = relation.getValue().prepare(keyspace(), defs); t.collectMarkerSpecification(boundNames); Restriction restriction = new MultiColumnRestriction.EQ(t, false); for (ColumnDefinition def : restrictedColumns) stmt.columnRestrictions[def.position()] = restriction; break; } case IN: { Restriction restriction; List<? extends Term.MultiColumnRaw> inValues = relation.getInValues(); if (inValues != null) { // we have something like "(a, b, c) IN ((1, 2, 3), (4, 5, 6), ...) or // "(a, b, c) IN (?, ?, ?) List<Term> terms = new ArrayList<>(inValues.size()); for (Term.MultiColumnRaw tuple : inValues) { Term t = tuple.prepare(keyspace(), defs); t.collectMarkerSpecification(boundNames); terms.add(t); } restriction = new MultiColumnRestriction.InWithValues(terms); } else { Tuples.INRaw rawMarker = relation.getInMarker(); AbstractMarker t = rawMarker.prepare(keyspace(), defs); t.collectMarkerSpecification(boundNames); restriction = new MultiColumnRestriction.InWithMarker(t); } for (ColumnDefinition def : restrictedColumns) stmt.columnRestrictions[def.position()] = restriction; break; } case LT: case LTE: case GT: case GTE: { Term t = relation.getValue().prepare(keyspace(), defs); t.collectMarkerSpecification(boundNames); for (ColumnDefinition def : defs) { Restriction.Slice restriction = (Restriction.Slice)getExistingRestriction(stmt, def); if (restriction == null) restriction = new MultiColumnRestriction.Slice(false); else if (!restriction.isMultiColumn()) throw new InvalidRequestException(String.format("Column \"%s\" cannot have both tuple-notation inequalities and single-column inequalities: %s", def.name, relation)); restriction.setBound(def.name, relation.operator(), t); stmt.columnRestrictions[def.position()] = restriction; } } } } private Restriction getExistingRestriction(SelectStatement stmt, ColumnDefinition def) { switch (def.kind) { case PARTITION_KEY: return stmt.keyRestrictions[def.position()]; case CLUSTERING_COLUMN: return stmt.columnRestrictions[def.position()]; case REGULAR: case STATIC: return stmt.metadataRestrictions.get(def.name); default: throw new AssertionError(); } } private void updateRestrictionsForRelation(SelectStatement stmt, ColumnDefinition def, SingleColumnRelation relation, VariableSpecifications names) throws InvalidRequestException { switch (def.kind) { case PARTITION_KEY: stmt.keyRestrictions[def.position()] = updateSingleColumnRestriction(def, stmt.keyRestrictions[def.position()], relation, names); break; case CLUSTERING_COLUMN: stmt.columnRestrictions[def.position()] = updateSingleColumnRestriction(def, stmt.columnRestrictions[def.position()], relation, names); break; case COMPACT_VALUE: throw new InvalidRequestException(String.format("Predicates on the non-primary-key column (%s) of a COMPACT table are not yet supported", def.name)); case REGULAR: case STATIC: // We only all IN on the row key and last clustering key so far, never on non-PK columns, and this even if there's an index Restriction r = updateSingleColumnRestriction(def, stmt.metadataRestrictions.get(def.name), relation, names); if (r.isIN() && !((Restriction.IN)r).canHaveOnlyOneValue()) // Note: for backward compatibility reason, we conside a IN of 1 value the same as a EQ, so we let that slide. throw new InvalidRequestException(String.format("IN predicates on non-primary-key columns (%s) is not yet supported", def.name)); stmt.metadataRestrictions.put(def.name, r); break; } } Restriction updateSingleColumnRestriction(ColumnDefinition def, Restriction existingRestriction, SingleColumnRelation newRel, VariableSpecifications boundNames) throws InvalidRequestException { ColumnSpecification receiver = def; if (newRel.onToken) { if (def.kind != ColumnDefinition.Kind.PARTITION_KEY) throw new InvalidRequestException(String.format("The token() function is only supported on the partition key, found on %s", def.name)); receiver = new ColumnSpecification(def.ksName, def.cfName, new ColumnIdentifier("partition key token", true), StorageService.getPartitioner().getTokenValidator()); } switch (newRel.operator()) { case EQ: { if (existingRestriction != null) throw new InvalidRequestException(String.format("%s cannot be restricted by more than one relation if it includes an Equal", def.name)); Term t = newRel.getValue().prepare(keyspace(), receiver); t.collectMarkerSpecification(boundNames); existingRestriction = new SingleColumnRestriction.EQ(t, newRel.onToken); } break; case IN: if (existingRestriction != null) throw new InvalidRequestException(String.format("%s cannot be restricted by more than one relation if it includes a IN", def.name)); if (newRel.getInValues() == null) { // Means we have a "SELECT ... IN ?" assert newRel.getValue() != null; Term t = newRel.getValue().prepare(keyspace(), receiver); t.collectMarkerSpecification(boundNames); existingRestriction = new SingleColumnRestriction.InWithMarker((Lists.Marker)t); } else { List<Term> inValues = new ArrayList<>(newRel.getInValues().size()); for (Term.Raw raw : newRel.getInValues()) { Term t = raw.prepare(keyspace(), receiver); t.collectMarkerSpecification(boundNames); inValues.add(t); } existingRestriction = new SingleColumnRestriction.InWithValues(inValues); } break; case GT: case GTE: case LT: case LTE: { if (existingRestriction == null) existingRestriction = new SingleColumnRestriction.Slice(newRel.onToken); else if (!existingRestriction.isSlice()) throw new InvalidRequestException(String.format("Column \"%s\" cannot be restricted by both an equality and an inequality relation", def.name)); else if (existingRestriction.isMultiColumn()) throw new InvalidRequestException(String.format("Column \"%s\" cannot be restricted by both a tuple notation inequality and a single column inequality (%s)", def.name, newRel)); Term t = newRel.getValue().prepare(keyspace(), receiver); t.collectMarkerSpecification(boundNames); ((SingleColumnRestriction.Slice)existingRestriction).setBound(def.name, newRel.operator(), t); } break; case CONTAINS_KEY: if (!(receiver.type instanceof MapType)) throw new InvalidRequestException(String.format("Cannot use CONTAINS_KEY on non-map column %s", def.name)); // Fallthrough on purpose case CONTAINS: { if (!receiver.type.isCollection()) throw new InvalidRequestException(String.format("Cannot use %s relation on non collection column %s", newRel.operator(), def.name)); if (existingRestriction == null) existingRestriction = new SingleColumnRestriction.Contains(); else if (!existingRestriction.isContains()) throw new InvalidRequestException(String.format("Collection column %s can only be restricted by CONTAINS or CONTAINS KEY", def.name)); boolean isKey = newRel.operator() == Relation.Type.CONTAINS_KEY; receiver = makeCollectionReceiver(receiver, isKey); Term t = newRel.getValue().prepare(keyspace(), receiver); ((SingleColumnRestriction.Contains)existingRestriction).add(t, isKey); } } return existingRestriction; } private void processPartitionKeyRestrictions(SelectStatement stmt, boolean hasQueriableIndex, CFMetaData cfm) throws InvalidRequestException { // If there is a queriable index, no special condition are required on the other restrictions. // But we still need to know 2 things: // - If we don't have a queriable index, is the query ok // - Is it queriable without 2ndary index, which is always more efficient // If a component of the partition key is restricted by a relation, all preceding // components must have a EQ. Only the last partition key component can be in IN relation. boolean canRestrictFurtherComponents = true; ColumnDefinition previous = null; stmt.keyIsInRelation = false; Iterator<ColumnDefinition> iter = cfm.partitionKeyColumns().iterator(); for (int i = 0; i < stmt.keyRestrictions.length; i++) { ColumnDefinition cdef = iter.next(); Restriction restriction = stmt.keyRestrictions[i]; if (restriction == null) { if (stmt.onToken) throw new InvalidRequestException("The token() function must be applied to all partition key components or none of them"); // The only time not restricting a key part is allowed is if none are restricted or an index is used. if (i > 0 && stmt.keyRestrictions[i - 1] != null) { if (hasQueriableIndex) { stmt.usesSecondaryIndexing = true; stmt.isKeyRange = true; break; } throw new InvalidRequestException(String.format("Partition key part %s must be restricted since preceding part is", cdef.name)); } stmt.isKeyRange = true; canRestrictFurtherComponents = false; } else if (!canRestrictFurtherComponents) { if (hasQueriableIndex) { stmt.usesSecondaryIndexing = true; break; } throw new InvalidRequestException(String.format( "Partitioning column \"%s\" cannot be restricted because the preceding column (\"%s\") is " + "either not restricted or is restricted by a non-EQ relation", cdef.name, previous)); } else if (restriction.isOnToken()) { // If this is a query on tokens, it's necessarily a range query (there can be more than one key per token). stmt.isKeyRange = true; stmt.onToken = true; } else if (stmt.onToken) { throw new InvalidRequestException(String.format("The token() function must be applied to all partition key components or none of them")); } else if (!restriction.isSlice()) { if (restriction.isIN()) { // We only support IN for the last name so far if (i != stmt.keyRestrictions.length - 1) throw new InvalidRequestException(String.format("Partition KEY part %s cannot be restricted by IN relation (only the last part of the partition key can)", cdef.name)); stmt.keyIsInRelation = true; } } else { // Non EQ relation is not supported without token(), even if we have a 2ndary index (since even those are ordered by partitioner). // Note: In theory we could allow it for 2ndary index queries with ALLOW FILTERING, but that would probably require some special casing // Note bis: This is also why we don't bother handling the 'tuple' notation of #4851 for keys. If we lift the limitation for 2ndary // index with filtering, we'll need to handle it though. throw new InvalidRequestException("Only EQ and IN relation are supported on the partition key (unless you use the token() function)"); } previous = cdef; } } private void processColumnRestrictions(SelectStatement stmt, boolean hasQueriableIndex, CFMetaData cfm) throws InvalidRequestException { // If a clustering key column is restricted by a non-EQ relation, all preceding // columns must have a EQ, and all following must have no restriction. Unless // the column is indexed that is. boolean canRestrictFurtherComponents = true; ColumnDefinition previous = null; boolean previousIsSlice = false; Iterator<ColumnDefinition> iter = cfm.clusteringColumns().iterator(); for (int i = 0; i < stmt.columnRestrictions.length; i++) { ColumnDefinition cdef = iter.next(); Restriction restriction = stmt.columnRestrictions[i]; if (restriction == null) { canRestrictFurtherComponents = false; previousIsSlice = false; } else if (!canRestrictFurtherComponents) { // We're here if the previous clustering column was either not restricted or was a slice. // We can't restrict the current column unless: // 1) we're in the special case of the 'tuple' notation from #4851 which we expand as multiple // consecutive slices: in which case we're good with this restriction and we continue // 2) we have a 2ndary index, in which case we have to use it but can skip more validation if (!(previousIsSlice && restriction.isSlice() && restriction.isMultiColumn())) { if (hasQueriableIndex) { stmt.usesSecondaryIndexing = true; // handle gaps and non-keyrange cases. break; } throw new InvalidRequestException(String.format( "PRIMARY KEY column \"%s\" cannot be restricted (preceding column \"%s\" is either not restricted or by a non-EQ relation)", cdef.name, previous)); } } else if (restriction.isSlice()) { canRestrictFurtherComponents = false; previousIsSlice = true; Restriction.Slice slice = (Restriction.Slice)restriction; // For non-composite slices, we don't support internally the difference between exclusive and // inclusive bounds, so we deal with it manually. if (!cfm.comparator.isCompound() && (!slice.isInclusive(Bound.START) || !slice.isInclusive(Bound.END))) stmt.sliceRestriction = slice; } else if (restriction.isIN()) { if (!restriction.isMultiColumn() && i != stmt.columnRestrictions.length - 1) throw new InvalidRequestException(String.format("Clustering column \"%s\" cannot be restricted by an IN relation", cdef.name)); else if (stmt.selectACollection()) throw new InvalidRequestException(String.format("Cannot restrict column \"%s\" by IN relation as a collection is selected by the query", cdef.name)); } previous = cdef; } } private void validateSecondaryIndexSelections(SelectStatement stmt) throws InvalidRequestException { if (stmt.keyIsInRelation) throw new InvalidRequestException("Select on indexed columns and with IN clause for the PRIMARY KEY are not supported"); // When the user only select static columns, the intent is that we don't query the whole partition but just // the static parts. But 1) we don't have an easy way to do that with 2i and 2) since we don't support index on static columns // so far, 2i means that you've restricted a non static column, so the query is somewhat non-sensical. if (stmt.selectsOnlyStaticColumns) throw new InvalidRequestException("Queries using 2ndary indexes don't support selecting only static columns"); } private void verifyOrderingIsAllowed(SelectStatement stmt) throws InvalidRequestException { if (stmt.usesSecondaryIndexing) throw new InvalidRequestException("ORDER BY with 2ndary indexes is not supported."); if (stmt.isKeyRange) throw new InvalidRequestException("ORDER BY is only supported when the partition key is restricted by an EQ or an IN."); } private void handleUnrecognizedOrderingColumn(ColumnIdentifier column) throws InvalidRequestException { if (containsAlias(column)) throw new InvalidRequestException(String.format("Aliases are not allowed in order by clause ('%s')", column)); else throw new InvalidRequestException(String.format("Order by on unknown column %s", column)); } private void processOrderingClause(SelectStatement stmt, CFMetaData cfm) throws InvalidRequestException { verifyOrderingIsAllowed(stmt); // If we order post-query (see orderResults), the sorted column needs to be in the ResultSet for sorting, even if we don't // ultimately ship them to the client (CASSANDRA-4911). if (stmt.keyIsInRelation) { stmt.orderingIndexes = new HashMap<>(); for (ColumnIdentifier column : stmt.parameters.orderings.keySet()) { final ColumnDefinition def = cfm.getColumnDefinition(column); if (def == null) handleUnrecognizedOrderingColumn(column); int index = indexOf(def, stmt.selection); if (index < 0) index = stmt.selection.addColumnForOrdering(def); stmt.orderingIndexes.put(def.name, index); } } stmt.isReversed = isReversed(stmt, cfm); } private boolean isReversed(SelectStatement stmt, CFMetaData cfm) throws InvalidRequestException { Boolean[] reversedMap = new Boolean[cfm.clusteringColumns().size()]; int i = 0; for (Map.Entry<ColumnIdentifier, Boolean> entry : stmt.parameters.orderings.entrySet()) { ColumnIdentifier column = entry.getKey(); boolean reversed = entry.getValue(); ColumnDefinition def = cfm.getColumnDefinition(column); if (def == null) handleUnrecognizedOrderingColumn(column); if (def.kind != ColumnDefinition.Kind.CLUSTERING_COLUMN) throw new InvalidRequestException(String.format("Order by is currently only supported on the clustered columns of the PRIMARY KEY, got %s", column)); if (i++ != def.position()) throw new InvalidRequestException(String.format("Order by currently only support the ordering of columns following their declared order in the PRIMARY KEY")); reversedMap[def.position()] = (reversed != isReversedType(def)); } // Check that all boolean in reversedMap, if set, agrees Boolean isReversed = null; for (Boolean b : reversedMap) { // Column on which order is specified can be in any order if (b == null) continue; if (isReversed == null) { isReversed = b; continue; } if (!isReversed.equals(b)) throw new InvalidRequestException(String.format("Unsupported order by relation")); } assert isReversed != null; return isReversed; } /** If ALLOW FILTERING was not specified, this verifies that it is not needed */ private void checkNeedsFiltering(SelectStatement stmt) throws InvalidRequestException { // non-key-range non-indexed queries cannot involve filtering underneath if (!parameters.allowFiltering && (stmt.isKeyRange || stmt.usesSecondaryIndexing)) { // We will potentially filter data if either: // - Have more than one IndexExpression // - Have no index expression and the column filter is not the identity if (stmt.restrictedColumns.size() > 1 || (stmt.restrictedColumns.isEmpty() && !stmt.columnFilterIsIdentity())) throw new InvalidRequestException("Cannot execute this query as it might involve data filtering and " + "thus may have unpredictable performance. If you want to execute " + "this query despite the performance unpredictability, use ALLOW FILTERING"); } } private int indexOf(ColumnDefinition def, Selection selection) { return indexOf(def, selection.getColumns().iterator()); } private int indexOf(final ColumnDefinition def, Iterator<ColumnDefinition> defs) { return Iterators.indexOf(defs, new Predicate<ColumnDefinition>() { public boolean apply(ColumnDefinition n) { return def.name.equals(n.name); } }); } private void validateDistinctSelection(Collection<ColumnDefinition> requestedColumns, Collection<ColumnDefinition> partitionKey) throws InvalidRequestException { for (ColumnDefinition def : requestedColumns) if (!partitionKey.contains(def)) throw new InvalidRequestException(String.format("SELECT DISTINCT queries must only request partition key columns (not %s)", def.name)); for (ColumnDefinition def : partitionKey) if (!requestedColumns.contains(def)) throw new InvalidRequestException(String.format("SELECT DISTINCT queries must request all the partition key columns (missing %s)", def.name)); } private boolean containsAlias(final ColumnIdentifier name) { return Iterables.any(selectClause, new Predicate<RawSelector>() { public boolean apply(RawSelector raw) { return name.equals(raw.alias); } }); } private ColumnSpecification limitReceiver() { return new ColumnSpecification(keyspace(), columnFamily(), new ColumnIdentifier("[limit]", true), Int32Type.instance); } private static ColumnSpecification makeCollectionReceiver(ColumnSpecification collection, boolean isKey) { assert collection.type.isCollection(); switch (((CollectionType)collection.type).kind) { case LIST: assert !isKey; return Lists.valueSpecOf(collection); case SET: assert !isKey; return Sets.valueSpecOf(collection); case MAP: return isKey ? Maps.keySpecOf(collection) : Maps.valueSpecOf(collection); } throw new AssertionError(); } @Override public String toString() { return Objects.toStringHelper(this) .add("name", cfName) .add("selectClause", selectClause) .add("whereClause", whereClause) .add("isDistinct", parameters.isDistinct) .add("isCount", parameters.isCount) .toString(); } } public static class Parameters { private final Map<ColumnIdentifier, Boolean> orderings; private final boolean isDistinct; private final boolean isCount; private final ColumnIdentifier countAlias; private final boolean allowFiltering; public Parameters(Map<ColumnIdentifier, Boolean> orderings, boolean isDistinct, boolean isCount, ColumnIdentifier countAlias, boolean allowFiltering) { this.orderings = orderings; this.isDistinct = isDistinct; this.isCount = isCount; this.countAlias = countAlias; this.allowFiltering = allowFiltering; } } /** * Used in orderResults(...) method when single 'ORDER BY' condition where given */ private static class SingleColumnComparator implements Comparator<List<ByteBuffer>> { private final int index; private final Comparator<ByteBuffer> comparator; public SingleColumnComparator(int columnIndex, Comparator<ByteBuffer> orderer) { index = columnIndex; comparator = orderer; } public int compare(List<ByteBuffer> a, List<ByteBuffer> b) { return comparator.compare(a.get(index), b.get(index)); } } /** * Used in orderResults(...) method when multiple 'ORDER BY' conditions where given */ private static class CompositeComparator implements Comparator<List<ByteBuffer>> { private final List<Comparator<ByteBuffer>> orderTypes; private final List<Integer> positions; private CompositeComparator(List<Comparator<ByteBuffer>> orderTypes, List<Integer> positions) { this.orderTypes = orderTypes; this.positions = positions; } public int compare(List<ByteBuffer> a, List<ByteBuffer> b) { for (int i = 0; i < positions.size(); i++) { Comparator<ByteBuffer> type = orderTypes.get(i); int columnPos = positions.get(i); ByteBuffer aValue = a.get(columnPos); ByteBuffer bValue = b.get(columnPos); int comparison = type.compare(aValue, bValue); if (comparison != 0) return comparison; } return 0; } } }
apache-2.0
ibm-wearables-sdk-for-mobile/ibm-wearables-android-sdk
mobileedge-mobile/src/main/java/com/ibm/mobilefirst/mobileedge/translators/ICharacteristicDataTranslator.java
368
package com.ibm.mobilefirst.mobileedge.translators; import android.bluetooth.BluetoothGattCharacteristic; import com.ibm.mobilefirst.mobileedge.abstractmodel.BaseSensorData; public interface ICharacteristicDataTranslator { BaseSensorData translate (BluetoothGattCharacteristic c); BaseSensorData translate (BluetoothGattCharacteristic characteristic, int[] c); }
apache-2.0
capitalone/Hydrograph
hydrograph.ui/hydrograph.ui.model/src/main/java/hydrograph/ui/graph/model/components/ORedshift.java
1190
/******************************************************************************* * Copyright 2017 Capital One Services, LLC and Bitwise, Inc. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package hydrograph.ui.graph.model.components; import hydrograph.ui.graph.model.categories.OutputCategory; public class ORedshift extends OutputCategory { private static final long serialVersionUID = 4823508502009659559L; /** * Instantiates a new Output Oracle. */ public ORedshift() { super(); } public String getConverter() { return "hydrograph.ui.engine.converter.impl.OutputRedshiftConverter"; } }
apache-2.0
leapframework/framework
web/api/src/main/java/leap/web/api/config/model/OAuthConfig.java
1325
/* * Copyright 2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package leap.web.api.config.model; /** * The configuration of OAuth2. */ public interface OAuthConfig { /** * Is OAuth2 security enabled explicitly. * @return */ default boolean isEnabled() { return Boolean.TRUE.equals(getEnabled()); } /** * Is OAuth2 security enabled, returns null if did not configured. */ Boolean getEnabled(); /** * Optional. Returns the OAuth2 flow. */ String getFlow(); /** * Required. Returns the authorization endpoint url of auth server. */ String getAuthorizationUrl(); /** * Required. Returns the token endpoint url of auth server. */ String getTokenUrl(); }
apache-2.0
kohsah/akomantoso-lib
src/main/java/org/akomantoso/schema/v3/release/ComponentInfo.java
2254
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.8-b130911.1802 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2017.05.20 at 03:05:24 PM IST // package org.akomantoso.schema.v3.release; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence maxOccurs="unbounded" minOccurs="0"> * &lt;element ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0}componentData"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "componentData" }) @XmlRootElement(name = "componentInfo") public class ComponentInfo { protected List<ComponentData> componentData; /** * Gets the value of the componentData property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the componentData property. * * <p> * For example, to add a new item, do as follows: * <pre> * getComponentData().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link ComponentData } * * */ public List<ComponentData> getComponentData() { if (componentData == null) { componentData = new ArrayList<ComponentData>(); } return this.componentData; } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-eks/src/main/java/com/amazonaws/services/eks/model/DescribeAddonVersionsRequest.java
13021
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.eks.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/eks-2017-11-01/DescribeAddonVersions" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DescribeAddonVersionsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The Kubernetes versions that the add-on can be used with. * </p> */ private String kubernetesVersion; /** * <p> * The maximum number of results to return. * </p> */ private Integer maxResults; /** * <p> * The <code>nextToken</code> value returned from a previous paginated <code>DescribeAddonVersionsRequest</code> * where <code>maxResults</code> was used and the results exceeded the value of that parameter. Pagination continues * from the end of the previous results that returned the <code>nextToken</code> value. * </p> * <note> * <p> * This token should be treated as an opaque identifier that is used only to retrieve the next items in a list and * not for other programmatic purposes. * </p> * </note> */ private String nextToken; /** * <p> * The name of the add-on. The name must match one of the names returned by <a * href="https://docs.aws.amazon.com/eks/latest/APIReference/API_ListAddons.html"> <code>ListAddons</code> </a>. * </p> */ private String addonName; /** * <p> * The Kubernetes versions that the add-on can be used with. * </p> * * @param kubernetesVersion * The Kubernetes versions that the add-on can be used with. */ public void setKubernetesVersion(String kubernetesVersion) { this.kubernetesVersion = kubernetesVersion; } /** * <p> * The Kubernetes versions that the add-on can be used with. * </p> * * @return The Kubernetes versions that the add-on can be used with. */ public String getKubernetesVersion() { return this.kubernetesVersion; } /** * <p> * The Kubernetes versions that the add-on can be used with. * </p> * * @param kubernetesVersion * The Kubernetes versions that the add-on can be used with. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeAddonVersionsRequest withKubernetesVersion(String kubernetesVersion) { setKubernetesVersion(kubernetesVersion); return this; } /** * <p> * The maximum number of results to return. * </p> * * @param maxResults * The maximum number of results to return. */ public void setMaxResults(Integer maxResults) { this.maxResults = maxResults; } /** * <p> * The maximum number of results to return. * </p> * * @return The maximum number of results to return. */ public Integer getMaxResults() { return this.maxResults; } /** * <p> * The maximum number of results to return. * </p> * * @param maxResults * The maximum number of results to return. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeAddonVersionsRequest withMaxResults(Integer maxResults) { setMaxResults(maxResults); return this; } /** * <p> * The <code>nextToken</code> value returned from a previous paginated <code>DescribeAddonVersionsRequest</code> * where <code>maxResults</code> was used and the results exceeded the value of that parameter. Pagination continues * from the end of the previous results that returned the <code>nextToken</code> value. * </p> * <note> * <p> * This token should be treated as an opaque identifier that is used only to retrieve the next items in a list and * not for other programmatic purposes. * </p> * </note> * * @param nextToken * The <code>nextToken</code> value returned from a previous paginated * <code>DescribeAddonVersionsRequest</code> where <code>maxResults</code> was used and the results exceeded * the value of that parameter. Pagination continues from the end of the previous results that returned the * <code>nextToken</code> value.</p> <note> * <p> * This token should be treated as an opaque identifier that is used only to retrieve the next items in a * list and not for other programmatic purposes. * </p> */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * The <code>nextToken</code> value returned from a previous paginated <code>DescribeAddonVersionsRequest</code> * where <code>maxResults</code> was used and the results exceeded the value of that parameter. Pagination continues * from the end of the previous results that returned the <code>nextToken</code> value. * </p> * <note> * <p> * This token should be treated as an opaque identifier that is used only to retrieve the next items in a list and * not for other programmatic purposes. * </p> * </note> * * @return The <code>nextToken</code> value returned from a previous paginated * <code>DescribeAddonVersionsRequest</code> where <code>maxResults</code> was used and the results exceeded * the value of that parameter. Pagination continues from the end of the previous results that returned the * <code>nextToken</code> value.</p> <note> * <p> * This token should be treated as an opaque identifier that is used only to retrieve the next items in a * list and not for other programmatic purposes. * </p> */ public String getNextToken() { return this.nextToken; } /** * <p> * The <code>nextToken</code> value returned from a previous paginated <code>DescribeAddonVersionsRequest</code> * where <code>maxResults</code> was used and the results exceeded the value of that parameter. Pagination continues * from the end of the previous results that returned the <code>nextToken</code> value. * </p> * <note> * <p> * This token should be treated as an opaque identifier that is used only to retrieve the next items in a list and * not for other programmatic purposes. * </p> * </note> * * @param nextToken * The <code>nextToken</code> value returned from a previous paginated * <code>DescribeAddonVersionsRequest</code> where <code>maxResults</code> was used and the results exceeded * the value of that parameter. Pagination continues from the end of the previous results that returned the * <code>nextToken</code> value.</p> <note> * <p> * This token should be treated as an opaque identifier that is used only to retrieve the next items in a * list and not for other programmatic purposes. * </p> * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeAddonVersionsRequest withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * <p> * The name of the add-on. The name must match one of the names returned by <a * href="https://docs.aws.amazon.com/eks/latest/APIReference/API_ListAddons.html"> <code>ListAddons</code> </a>. * </p> * * @param addonName * The name of the add-on. The name must match one of the names returned by <a * href="https://docs.aws.amazon.com/eks/latest/APIReference/API_ListAddons.html"> <code>ListAddons</code> * </a>. */ public void setAddonName(String addonName) { this.addonName = addonName; } /** * <p> * The name of the add-on. The name must match one of the names returned by <a * href="https://docs.aws.amazon.com/eks/latest/APIReference/API_ListAddons.html"> <code>ListAddons</code> </a>. * </p> * * @return The name of the add-on. The name must match one of the names returned by <a * href="https://docs.aws.amazon.com/eks/latest/APIReference/API_ListAddons.html"> <code>ListAddons</code> * </a>. */ public String getAddonName() { return this.addonName; } /** * <p> * The name of the add-on. The name must match one of the names returned by <a * href="https://docs.aws.amazon.com/eks/latest/APIReference/API_ListAddons.html"> <code>ListAddons</code> </a>. * </p> * * @param addonName * The name of the add-on. The name must match one of the names returned by <a * href="https://docs.aws.amazon.com/eks/latest/APIReference/API_ListAddons.html"> <code>ListAddons</code> * </a>. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeAddonVersionsRequest withAddonName(String addonName) { setAddonName(addonName); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getKubernetesVersion() != null) sb.append("KubernetesVersion: ").append(getKubernetesVersion()).append(","); if (getMaxResults() != null) sb.append("MaxResults: ").append(getMaxResults()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()).append(","); if (getAddonName() != null) sb.append("AddonName: ").append(getAddonName()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DescribeAddonVersionsRequest == false) return false; DescribeAddonVersionsRequest other = (DescribeAddonVersionsRequest) obj; if (other.getKubernetesVersion() == null ^ this.getKubernetesVersion() == null) return false; if (other.getKubernetesVersion() != null && other.getKubernetesVersion().equals(this.getKubernetesVersion()) == false) return false; if (other.getMaxResults() == null ^ this.getMaxResults() == null) return false; if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; if (other.getAddonName() == null ^ this.getAddonName() == null) return false; if (other.getAddonName() != null && other.getAddonName().equals(this.getAddonName()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getKubernetesVersion() == null) ? 0 : getKubernetesVersion().hashCode()); hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); hashCode = prime * hashCode + ((getAddonName() == null) ? 0 : getAddonName().hashCode()); return hashCode; } @Override public DescribeAddonVersionsRequest clone() { return (DescribeAddonVersionsRequest) super.clone(); } }
apache-2.0
mattprecious/notisync
NotiSync/src/com/mattprecious/notisync/wizardpager/model/AbstractWizardModel.java
3338
/* * Copyright 2012 Roman Nurik * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.mattprecious.notisync.wizardpager.model; import android.content.Context; import android.os.Bundle; import java.util.ArrayList; import java.util.List; /** * Represents a wizard model, including the pages/steps in the wizard, their * dependencies, and their currently populated choices/values/selections. To * create an actual wizard model, extend this class and implement * {@link #onNewRootPageList()}. */ public abstract class AbstractWizardModel implements ModelCallbacks { protected Context mContext; private List<ModelCallbacks> mListeners = new ArrayList<ModelCallbacks>(); private PageList mRootPageList; public AbstractWizardModel(Context context) { mContext = context; mRootPageList = onNewRootPageList(); } /** * Override this to define a new wizard model. */ protected abstract PageList onNewRootPageList(); @Override public void onPageDataChanged(Page page) { // can't use for each because of concurrent modification (review // fragment // can get added or removed and will register itself as a listener) for (int i = 0; i < mListeners.size(); i++) { mListeners.get(i).onPageDataChanged(page); } } @Override public void onPageTreeChanged() { // can't use for each because of concurrent modification (review // fragment // can get added or removed and will register itself as a listener) for (int i = 0; i < mListeners.size(); i++) { mListeners.get(i).onPageTreeChanged(); } } public Page findByKey(String key) { return mRootPageList.findByKey(key); } public void load(Bundle savedValues) { for (String key : savedValues.keySet()) { mRootPageList.findByKey(key).resetData(savedValues.getBundle(key)); } } public void registerListener(ModelCallbacks listener) { mListeners.add(listener); } public Bundle save() { Bundle bundle = new Bundle(); for (Page page : getCurrentPageSequence()) { bundle.putBundle(page.getKey(), page.getData()); } return bundle; } /** * Gets the current list of wizard steps, flattening nested (dependent) * pages based on the user's choices. */ public List<Page> getCurrentPageSequence() { ArrayList<Page> flattened = new ArrayList<Page>(); mRootPageList.flattenCurrentPageSequence(flattened); return flattened; } public void unregisterListener(ModelCallbacks listener) { mListeners.remove(listener); } }
apache-2.0
progosch/dashboard-maven-plugin
src/main/java/org/codehaus/mojo/dashboard/report/plugin/DashBoardMaven1ReportGenerator.java
31617
package org.codehaus.mojo.dashboard.report.plugin; /* * Copyright 2007 David Vicente * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.text.SimpleDateFormat; import java.util.Date; import java.util.Hashtable; import java.util.Iterator; import java.util.Map; import java.util.ResourceBundle; import java.util.Set; import org.apache.maven.plugin.logging.Log; import org.codehaus.doxia.sink.Sink; import org.codehaus.mojo.dashboard.report.plugin.beans.CheckstyleReportBean; import org.codehaus.mojo.dashboard.report.plugin.beans.CloverReportBean; import org.codehaus.mojo.dashboard.report.plugin.beans.CoberturaReportBean; import org.codehaus.mojo.dashboard.report.plugin.beans.CpdReportBean; import org.codehaus.mojo.dashboard.report.plugin.beans.DashBoardMavenProject; import org.codehaus.mojo.dashboard.report.plugin.beans.FindBugsReportBean; import org.codehaus.mojo.dashboard.report.plugin.beans.IDashBoardReportBean; import org.codehaus.mojo.dashboard.report.plugin.beans.PmdReportBean; import org.codehaus.mojo.dashboard.report.plugin.beans.SurefireReportBean; import org.codehaus.mojo.dashboard.report.plugin.beans.TagListReportBean; /** * @author <a href="dvicente72@gmail.com">David Vicente</a> * */ public class DashBoardMaven1ReportGenerator extends AbstractDashBoardGenerator { private String dashboardAnchorLink = "/dashboard-report.html"; private DashBoardMavenProject mavenProject; private Map map = new Hashtable(); private boolean isDBAvailable = false; /** * * @param dashboardReport */ public DashBoardMaven1ReportGenerator( DashBoardMavenProject mavenProject, boolean isDBAvailable, Log log ) { super( log ); this.mavenProject = mavenProject; this.isDBAvailable = isDBAvailable; Set reports = mavenProject.getReports(); Iterator iter = reports.iterator(); while ( iter.hasNext() ) { IDashBoardReportBean report = (IDashBoardReportBean) iter.next(); if ( report != null ) { map.put( report.getClass(), report ); } } } public void doGenerateReport( ResourceBundle bundle, Sink sink ) { createTitle( bundle, sink ); addDashboardCss(sink); this.sinkJavascriptCode( sink ); sink.body(); sink.anchor( "top" ); sink.anchor_(); createHeader( bundle, sink ); createBodyReport( bundle, sink ); sink.body_(); sink.flush(); sink.close(); } public void createTitle( ResourceBundle bundle, Sink sink ) { sink.head(); sink.title(); sink.text( bundle.getString( "dashboard.multireport.name" ) ); sink.title_(); sink.head_(); } public void createHeader( ResourceBundle bundle, Sink sink ) { sink.section1(); sink.sectionTitle1(); sink.text( bundle.getString( "dashboard.multireport.name" ) + " : " + this.mavenProject.getProjectName() ); sink.sectionTitle1_(); sink.text( "Date Generated: " + new SimpleDateFormat().format( new Date( System.currentTimeMillis() ) ) ); sink.horizontalRule(); if ( this.isDBAvailable ) { sink.sectionTitle3(); sink.bold(); sink.text( "[" ); sink.link( "dashboard-report-historic.html" ); sink.text( "Go to Historic page" ); sink.link_(); sink.text( "]" ); sink.bold_(); sink.sectionTitle3_(); sink.horizontalRule(); } sink.lineBreak(); sink.section1_(); } public void createBodyReport( ResourceBundle bundle, Sink sink ) { System.out.println( "DashBoardMultiReportGenerator createBodyByReport(...)" ); createAllSection( bundle, sink ); } public void createAllSection( ResourceBundle bundle, Sink sink ) { sink.table(); writeSuperHeader( sink ); writeHeader( bundle, sink, true ); createAllLineByReport( bundle, sink, mavenProject, true, "" ); createTotalLine( bundle, sink, mavenProject ); writeHeader( bundle, sink, false ); writeSuperHeader( sink ); sink.table_(); sink.lineBreak(); } public void createAllLineByReport( ResourceBundle bundle, Sink sink, DashBoardMavenProject mavenProject, boolean isRoot, String prefix ) { if ( mavenProject.getModules() != null && !mavenProject.getModules().isEmpty() ) { Iterator iter = mavenProject.getModules().iterator(); if ( !isRoot ) { prefix = writeMultiProjectRow( sink, mavenProject, prefix, dashboardAnchorLink ); } while ( iter.hasNext() ) { DashBoardMavenProject subproject = (DashBoardMavenProject) iter.next(); createAllLineByReport( bundle, sink, subproject, false, prefix ); } } else { sink.tableRow(); writeProjectCell( sink, mavenProject, prefix, dashboardAnchorLink ); if ( map.get( CoberturaReportBean.class ) != null ) { CoberturaReportBean coberReportBean = (CoberturaReportBean) mavenProject.getReportsByType( CoberturaReportBean.class ); if ( coberReportBean != null ) { sinkCellClass( sink, Integer.toString( coberReportBean.getNbClasses() ), "cobertura" ); // sinkCellClass( sink, getPercentValue( coberReportBean.getLineCoverRate() ) ,"cobertura"); sinkCellPercentGraphic( sink, coberReportBean.getLineCoverRate(), "cobertura" ); // sinkCellClass( sink, getPercentValue( coberReportBean.getBranchCoverRate() ),"cobertura" ); sinkCellPercentGraphic( sink, coberReportBean.getBranchCoverRate(), "cobertura" ); sinkHeaderBold( sink, "|" ); } else { sinkCellClass( sink, "", "cobertura" ); sinkCellClass( sink, "", "cobertura" ); sinkCellClass( sink, "", "cobertura" ); sinkHeaderBold( sink, "|" ); } } if ( map.get( CloverReportBean.class ) != null ) { CloverReportBean cloverReportBean = (CloverReportBean) mavenProject.getReportsByType( CloverReportBean.class ); if ( cloverReportBean != null ) { sinkCellPercentGraphic( sink, cloverReportBean.getPercentCoveredConditionals(), "clover", "(" + cloverReportBean.getCoveredElements() + " / " + cloverReportBean.getElements() + ")" ); this.sinkCellClass( sink, cloverReportBean.getConditionalsLabel(), "clover" ); this.sinkCellClass( sink, cloverReportBean.getStatementsLabel(), "clover" ); this.sinkCellClass( sink, cloverReportBean.getMethodsLabel(), "clover" ); sinkHeaderBold( sink, "|" ); } else { sinkCellClass( sink, "", "clover" ); sinkCellClass( sink, "", "clover" ); sinkCellClass( sink, "", "clover" ); sinkCellClass( sink, "", "clover" ); sinkHeaderBold( sink, "|" ); } } if ( map.get( SurefireReportBean.class ) != null ) { SurefireReportBean fireReportBean = (SurefireReportBean) mavenProject.getReportsByType( SurefireReportBean.class ); if ( fireReportBean != null ) { // sinkCellClass( sink, Double.toString( fireReportBean.getSucessRate() ) + "%" , "surefire"); sinkCellPercentGraphic( sink, fireReportBean.getSucessRate() / 100, "surefire" ); sinkCellClass( sink, Integer.toString( fireReportBean.getNbTests() ), "surefire" ); sinkCellClass( sink, Integer.toString( fireReportBean.getNbErrors() ), "surefire" ); sinkCellClass( sink, Integer.toString( fireReportBean.getNbFailures() ), "surefire" ); sinkCellClass( sink, Integer.toString( fireReportBean.getNbSkipped() ), "surefire" ); sinkCellClass( sink, Double.toString( fireReportBean.getElapsedTime() ), "surefire" ); sinkHeaderBold( sink, "|" ); } else { sinkCellClass( sink, "", "surefire" ); sinkCellClass( sink, "", "surefire" ); sinkCellClass( sink, "", "surefire" ); sinkCellClass( sink, "", "surefire" ); sinkCellClass( sink, "", "surefire" ); sinkCellClass( sink, "", "surefire" ); sinkHeaderBold( sink, "|" ); } } if ( map.get( CheckstyleReportBean.class ) != null ) { CheckstyleReportBean checkStyleReport = (CheckstyleReportBean) mavenProject.getReportsByType( CheckstyleReportBean.class ); if ( checkStyleReport != null ) { sinkCellClass( sink, Integer.toString( checkStyleReport.getNbClasses() ), "checkstyle" ); sinkCellClass( sink, Integer.toString( checkStyleReport.getNbTotal() ), "checkstyle" ); sinkCellClass( sink, Integer.toString( checkStyleReport.getNbInfos() ), "checkstyle" ); sinkCellClass( sink, Integer.toString( checkStyleReport.getNbWarnings() ), "checkstyle" ); sinkCellClass( sink, Integer.toString( checkStyleReport.getNbErrors() ), "checkstyle" ); sinkHeaderBold( sink, "|" ); } else { sinkCellClass( sink, "", "checkstyle" ); sinkCellClass( sink, "", "checkstyle" ); sinkCellClass( sink, "", "checkstyle" ); sinkCellClass( sink, "", "checkstyle" ); sinkCellClass( sink, "", "checkstyle" ); sinkHeaderBold( sink, "|" ); } } if ( map.get( PmdReportBean.class ) != null ) { PmdReportBean pmdReportBean = (PmdReportBean) mavenProject.getReportsByType( PmdReportBean.class ); if ( pmdReportBean != null ) { sinkCellClass( sink, Integer.toString( pmdReportBean.getNbClasses() ), "pmd" ); sinkCellClass( sink, Integer.toString( pmdReportBean.getNbViolations() ), "pmd" ); sinkHeaderBold( sink, "|" ); } else { sinkCellClass( sink, "", "pmd" ); sinkCellClass( sink, "", "pmd" ); sinkHeaderBold( sink, "|" ); } } if ( map.get( CpdReportBean.class ) != null ) { CpdReportBean cpdReportBean = (CpdReportBean) mavenProject.getReportsByType( CpdReportBean.class ); if ( cpdReportBean != null ) { sinkCellClass( sink, Integer.toString( cpdReportBean.getNbClasses() ), "cpd" ); sinkCellClass( sink, Integer.toString( cpdReportBean.getNbDuplicate() ), "cpd" ); sinkHeaderBold( sink, "|" ); } else { sinkCellClass( sink, "", "cpd" ); sinkCellClass( sink, "", "cpd" ); sinkHeaderBold( sink, "|" ); } } if ( map.get( FindBugsReportBean.class ) != null ) { FindBugsReportBean findBugsReportBean = (FindBugsReportBean) mavenProject.getReportsByType( FindBugsReportBean.class ); if ( findBugsReportBean != null ) { sinkCellClass( sink, Integer.toString( findBugsReportBean.getNbClasses() ), "findbugs" ); sinkCellClass( sink, Integer.toString( findBugsReportBean.getNbBugs() ), "findbugs" ); sinkCellClass( sink, Integer.toString( findBugsReportBean.getNbErrors() ), "findbugs" ); sinkCellClass( sink, Integer.toString( findBugsReportBean.getNbMissingClasses() ), "findbugs" ); sinkHeaderBold( sink, "|" ); } else { sinkCellClass( sink, "", "findbugs" ); sinkCellClass( sink, "", "findbugs" ); sinkCellClass( sink, "", "findbugs" ); sinkCellClass( sink, "", "findbugs" ); sinkHeaderBold( sink, "|" ); } } if ( map.get( TagListReportBean.class ) != null ) { TagListReportBean taglistReportBean = (TagListReportBean) mavenProject.getReportsByType( TagListReportBean.class ); if ( taglistReportBean != null ) { sinkCellClass( sink, Integer.toString( taglistReportBean.getNbClasses() ), "taglist" ); sinkCellClass( sink, Integer.toString( taglistReportBean.getNbTotal() ), "taglist" ); sinkHeaderBold( sink, "|" ); } else { sinkCellClass( sink, "", "taglist" ); sinkCellClass( sink, "", "taglist" ); sinkHeaderBold( sink, "|" ); } } sink.tableRow_(); } } private void writeProjectCell( Sink sink, DashBoardMavenProject mavenProject, String prefix, String suffix ) { if ( prefix == null || prefix.length() == 0 ) { String artefactId = mavenProject.getArtifactId(); String link = artefactId.substring( artefactId.lastIndexOf( "." ) + 1, artefactId.length() ); sinkCellWithLink( sink, mavenProject.getProjectName(), link + suffix ); } else { int nbTab = prefix.split( "/" ).length; String artefactId = mavenProject.getArtifactId(); String link = prefix + "/" + artefactId.substring( artefactId.lastIndexOf( "." ) + 1, artefactId.length() ); sinkCellTabWithLink( sink, mavenProject.getProjectName(), nbTab, link + suffix ); } } private String writeMultiProjectRow( Sink sink, DashBoardMavenProject mavenProject, String prefix, String suffix ) { if ( prefix == null || prefix.length() == 0 ) { String artefactId = mavenProject.getArtifactId(); prefix = artefactId.substring( artefactId.lastIndexOf( "." ) + 1, artefactId.length() ); sink.tableRow(); sinkCellBoldWithLink( sink, mavenProject.getProjectName(), prefix + suffix ); sink.tableRow_(); } else { sink.tableRow(); int nbTab = prefix.split( "/" ).length; String artefactId = mavenProject.getArtifactId(); prefix = prefix + "/" + artefactId.substring( artefactId.lastIndexOf( "." ) + 1, artefactId.length() ); sinkCellTabBoldWithLink( sink, mavenProject.getProjectName(), nbTab, prefix + suffix ); sink.tableRow_(); } return prefix; } private void writeSuperHeader( Sink sink ) { sink.tableRow(); sink.tableHeaderCell(); sink.nonBreakingSpace(); sink.tableHeaderCell_(); if ( map.get( CoberturaReportBean.class ) != null ) { sinkSuperHeaderClass( sink, "Cobertura", 3, "cobertura" ); this.sinkHeaderCollapsedIcon( sink, "cobertura" ); } if ( map.get( CloverReportBean.class ) != null ) { sinkSuperHeaderClass( sink, "Clover", 4, "clover" ); this.sinkHeaderCollapsedIcon( sink, "clover" ); } if ( map.get( SurefireReportBean.class ) != null ) { sinkSuperHeaderClass( sink, "Surefire", 6, "surefire" ); this.sinkHeaderCollapsedIcon( sink, "surefire" ); } if ( map.get( CheckstyleReportBean.class ) != null ) { sinkSuperHeaderClass( sink, "Checkstyle", 5, "checkstyle" ); this.sinkHeaderCollapsedIcon( sink, "checkstyle" ); } if ( map.get( PmdReportBean.class ) != null ) { sinkSuperHeaderClass( sink, "PMD", 2, "pmd" ); this.sinkHeaderCollapsedIcon( sink, "pmd" ); } if ( map.get( CpdReportBean.class ) != null ) { sinkSuperHeaderClass( sink, "CPD", 2, "cpd" ); this.sinkHeaderCollapsedIcon( sink, "cpd" ); } if ( map.get( FindBugsReportBean.class ) != null ) { sinkSuperHeaderClass( sink, "FindBugs", 4, "findbugs" ); this.sinkHeaderCollapsedIcon( sink, "findbugs" ); } if ( map.get( TagListReportBean.class ) != null ) { sinkSuperHeaderClass( sink, "Tag List", 2, "taglist" ); this.sinkHeaderCollapsedIcon( sink, "taglist" ); } sink.tableRow_(); } private void writeHeader( ResourceBundle bundle, Sink sink, boolean upside ) { sink.tableRow(); if ( upside ) { sinkHeader( sink, bundle.getString( "report.project.name.header" ) ); } else { sinkHeader( sink, "" ); } if ( map.get( CoberturaReportBean.class ) != null ) { sinkHeaderClass( sink, bundle.getString( "report.cobertura.label.nbclasses" ), "cobertura" ); sinkHeaderClass( sink, bundle.getString( "report.cobertura.label.linecover" ), "cobertura" ); sinkHeaderClass( sink, bundle.getString( "report.cobertura.label.branchcover" ), "cobertura" ); sinkHeaderBold( sink, "" ); } if ( map.get( CloverReportBean.class ) != null ) { sinkHeaderClass( sink, bundle.getString( "report.clover.label.total" ), "clover" ); sinkHeaderClass( sink, bundle.getString( "report.clover.label.conditionals" ), "clover" ); sinkHeaderClass( sink, bundle.getString( "report.clover.label.statements" ), "clover" ); sinkHeaderClass( sink, bundle.getString( "report.clover.label.methods" ), "clover" ); sinkHeaderBold( sink, "" ); } if ( map.get( SurefireReportBean.class ) != null ) { sinkHeaderClass( sink, bundle.getString( "report.surefire.label.successrate" ), "surefire" ); sinkHeaderClass( sink, bundle.getString( "report.surefire.label.tests" ), "surefire" ); sinkHeaderClass( sink, bundle.getString( "report.surefire.label.errors" ), "surefire" ); sinkHeaderClass( sink, bundle.getString( "report.surefire.label.failures" ), "surefire" ); sinkHeaderClass( sink, bundle.getString( "report.surefire.label.skipped" ), "surefire" ); sinkHeaderClass( sink, bundle.getString( "report.surefire.label.time" ), "surefire" ); sinkHeaderBold( sink, "" ); } if ( map.get( CheckstyleReportBean.class ) != null ) { sinkHeaderClass( sink, bundle.getString( "report.checkstyle.files" ), "checkstyle" ); sinkHeaderClass( sink, bundle.getString( "report.checkstyle.column.total" ), "checkstyle" ); sink.rawText( "<th class=\"checkstyle\">" + bundle.getString( "report.checkstyle.column.infos" ) ); sink.nonBreakingSpace(); iconInfo( sink ); sink.rawText( "</th>" ); sink.rawText( "<th class=\"checkstyle\">" + bundle.getString( "report.checkstyle.column.warnings" ) ); sink.nonBreakingSpace(); iconWarning( sink ); sink.rawText( "</th>" ); sink.rawText( "<th class=\"checkstyle\">" + bundle.getString( "report.checkstyle.column.errors" ) ); sink.nonBreakingSpace(); iconError( sink ); sink.rawText( "</th>" ); sinkHeaderBold( sink, "" ); } if ( map.get( PmdReportBean.class ) != null ) { sinkHeaderClass( sink, bundle.getString( "report.pmd.label.nbclasses" ), "pmd" ); sinkHeaderClass( sink, bundle.getString( "report.pmd.label.nbviolations" ), "pmd" ); sinkHeaderBold( sink, "" ); } if ( map.get( CpdReportBean.class ) != null ) { sinkHeaderClass( sink, bundle.getString( "report.cpd.label.nbclasses" ), "cpd" ); sinkHeaderClass( sink, bundle.getString( "report.cpd.label.nbduplicate" ), "cpd" ); sinkHeaderBold( sink, "" ); } if ( map.get( FindBugsReportBean.class ) != null ) { sinkHeaderClass( sink, bundle.getString( "report.findbugs.label.nbclasses" ), "findbugs" ); sinkHeaderClass( sink, bundle.getString( "report.findbugs.label.nbbugs" ), "findbugs" ); sinkHeaderClass( sink, bundle.getString( "report.findbugs.label.nberrors" ), "findbugs" ); sinkHeaderClass( sink, bundle.getString( "report.findbugs.label.nbMissingClasses" ), "findbugs" ); sinkHeaderBold( sink, "" ); } if ( map.get( TagListReportBean.class ) != null ) { sinkHeaderClass( sink, bundle.getString( "report.taglist.label.nbclasses" ), "taglist" ); sinkHeaderClass( sink, bundle.getString( "report.taglist.column.nboccurs" ), "taglist" ); sinkHeaderBold( sink, "" ); } sink.tableRow_(); } public void createTotalLine( ResourceBundle bundle, Sink sink, DashBoardMavenProject mavenProject ) { sink.tableRow(); sinkHeader( sink, "Total" ); CoberturaReportBean reportBean = (CoberturaReportBean) mavenProject.getReportsByType( CoberturaReportBean.class ); if ( reportBean != null ) { sinkHeaderClass( sink, Integer.toString( reportBean.getNbClasses() ), "cobertura" ); // sinkHeaderClass( sink, getPercentValue( reportBean.getLineCoverRate() ) , "cobertura"); sinkHeaderCellPercentGraphic( sink, reportBean.getLineCoverRate(), "cobertura" ); // sinkHeaderClass( sink, getPercentValue( reportBean.getBranchCoverRate() ) , "cobertura"); sinkHeaderCellPercentGraphic( sink, reportBean.getBranchCoverRate(), "cobertura" ); sinkHeaderBold( sink, "|" ); } CloverReportBean cloverReportBean = (CloverReportBean) mavenProject.getReportsByType( CloverReportBean.class ); if ( cloverReportBean != null ) { sinkHeaderCellPercentGraphic( sink, cloverReportBean.getPercentCoveredConditionals(), "clover", "(" + cloverReportBean.getCoveredElements() + " / " + cloverReportBean.getElements() + ")" ); sinkHeaderCellPercentGraphic( sink, cloverReportBean.getPercentCoveredConditionals(), "clover", "(" + cloverReportBean.getCoveredConditionals() + " / " + cloverReportBean.getConditionals() + ")" ); sinkHeaderCellPercentGraphic( sink, cloverReportBean.getPercentCoveredStatements(), "clover", "(" + cloverReportBean.getCoveredStatements() + " / " + cloverReportBean.getStatements() + ")" ); sinkHeaderCellPercentGraphic( sink, cloverReportBean.getPercentCoveredMethods(), "clover", "(" + cloverReportBean.getCoveredMethods() + " / " + cloverReportBean.getMethods() + ")" ); sinkHeaderBold( sink, "|" ); } SurefireReportBean fireReportBean = (SurefireReportBean) mavenProject.getReportsByType( SurefireReportBean.class ); if ( fireReportBean != null ) { sinkHeaderCellPercentGraphic( sink, fireReportBean.getSucessRate() / 100, "surefire" ); sinkHeaderClass( sink, Integer.toString( fireReportBean.getNbTests() ), "surefire" ); sinkHeaderClass( sink, Integer.toString( fireReportBean.getNbErrors() ), "surefire" ); sinkHeaderClass( sink, Integer.toString( fireReportBean.getNbFailures() ), "surefire" ); sinkHeaderClass( sink, Integer.toString( fireReportBean.getNbSkipped() ), "surefire" ); sinkHeaderClass( sink, Double.toString( fireReportBean.getElapsedTime() ), "surefire" ); sinkHeaderBold( sink, "|" ); } CheckstyleReportBean checkstyleReportBean = (CheckstyleReportBean) mavenProject.getReportsByType( CheckstyleReportBean.class ); if ( checkstyleReportBean != null ) { sinkHeaderClass( sink, Integer.toString( checkstyleReportBean.getNbClasses() ), "checkstyle" ); sinkHeaderClass( sink, Integer.toString( checkstyleReportBean.getNbTotal() ), "checkstyle" ); tableHeaderCellClass( sink, "checkstyle" ); sink.text( Integer.toString( checkstyleReportBean.getNbInfos() ) ); sinkInvertPercentGraphic( sink, checkstyleReportBean.getPercentInfos() ); tableHeaderCell_( sink ); tableHeaderCellClass( sink, "checkstyle" ); sink.text( Integer.toString( checkstyleReportBean.getNbWarnings() ) ); sinkInvertPercentGraphic( sink, checkstyleReportBean.getPercentWarnings() ); tableHeaderCell_( sink ); tableHeaderCellClass( sink, "checkstyle" ); sink.text( Integer.toString( checkstyleReportBean.getNbErrors() ) ); sinkInvertPercentGraphic( sink, checkstyleReportBean.getPercentErrors() ); tableHeaderCell_( sink ); sinkHeaderBold( sink, "|" ); } PmdReportBean pmdReportBean = (PmdReportBean) mavenProject.getReportsByType( PmdReportBean.class ); if ( pmdReportBean != null ) { sinkHeaderClass( sink, Integer.toString( pmdReportBean.getNbClasses() ), "pmd" ); sinkHeaderClass( sink, Integer.toString( pmdReportBean.getNbViolations() ), "pmd" ); sinkHeaderBold( sink, "|" ); } CpdReportBean cpdReportBean = (CpdReportBean) mavenProject.getReportsByType( CpdReportBean.class ); if ( cpdReportBean != null ) { sinkHeaderClass( sink, Integer.toString( cpdReportBean.getNbClasses() ), "cpd" ); sinkHeaderClass( sink, Integer.toString( cpdReportBean.getNbDuplicate() ), "cpd" ); sinkHeaderBold( sink, "|" ); } FindBugsReportBean findBugsReportBean = (FindBugsReportBean) mavenProject.getReportsByType( FindBugsReportBean.class ); if ( findBugsReportBean != null ) { sinkHeaderClass( sink, Integer.toString( findBugsReportBean.getNbClasses() ), "findbugs" ); sinkHeaderClass( sink, Integer.toString( findBugsReportBean.getNbBugs() ), "findbugs" ); sinkHeaderClass( sink, Integer.toString( findBugsReportBean.getNbErrors() ), "findbugs" ); sinkHeaderClass( sink, Integer.toString( findBugsReportBean.getNbMissingClasses() ), "findbugs" ); sinkHeaderBold( sink, "|" ); } TagListReportBean taglistReportBean = (TagListReportBean) mavenProject.getReportsByType( TagListReportBean.class ); if ( taglistReportBean != null ) { sinkHeaderClass( sink, Integer.toString( taglistReportBean.getNbClasses() ), "taglist" ); sinkHeaderClass( sink, Integer.toString( taglistReportBean.getNbTotal() ), "taglist" ); sinkHeaderBold( sink, "|" ); } sink.tableRow_(); } private void sinkHeaderCollapsedIcon( Sink sink, String id ) { sink.tableHeaderCell(); String idImg = "Collapsed" + id; sink.rawText( "<IMG SRC=\"./images/previous.gif\" ALT=\"" + id + "\" name=\"" + idImg + "\" onclick=\"javascript:toggleCol('" + idImg + "','" + id + "');\">" ); sink.tableHeaderCell_(); } private void sinkJavascriptCode( Sink sink ) { StringBuffer buff = new StringBuffer(); buff.append( "<script type=\"text/javascript\">" ); buff.append( " function toggleCol(imageID,strCol){" ); buff.append( " var ths = document.getElementsByTagName(\"th\");" ); buff.append( " var tds = document.getElementsByTagName(\"td\");" ); buff.append( " var mesimages = document.getElementsByName(imageID);" ); buff.append( " for (idx in ths) {" ); buff.append( " if (ths[idx].className == strCol)" ); buff.append( " {" ); buff.append( " if (ths[idx].style.display == \"none\") {" ); buff.append( " ths[idx].style.display = \"\";" ); buff.append( " for (var i = 0; i < mesimages.length; i++) {" ); buff.append( " mesimages[i].src = './images/previous.gif';" ); buff.append( " }" ); buff.append( " }" ); buff.append( " else {" ); buff.append( " ths[idx].style.display = \"none\";" ); buff.append( " for (var i = 0; i < mesimages.length; i++) {" ); buff.append( " mesimages[i].src = './images/next.gif';" ); buff.append( " }" ); buff.append( " }" ); buff.append( " }" ); buff.append( " }" ); buff.append( " for (idx in tds) {" ); buff.append( " if (tds[idx].className == strCol)" ); buff.append( " {" ); buff.append( " if (tds[idx].style.display == \"none\") " ); buff.append( " tds[idx].style.display = \"\";" ); buff.append( " else{" ); buff.append( " tds[idx].style.display = \"none\";" ); buff.append( " }" ); buff.append( " }" ); buff.append( " }" ); buff.append( " }" ); buff.append( "</script>" ); sink.rawText( buff.toString() ); } }
apache-2.0
cfung/Android_App_ud851-Exercises
Lesson02-GitHub-Repo-Search/T02.01-Exercise-CreateLayout/app/src/main/java/com/example/android/datafrominternet/MainActivity.java
1893
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.android.datafrominternet; import android.os.Bundle; //import android.support.v7.app.AppCompatActivity; import android.app.Activity; import android.widget.EditText; import android.widget.TextView; import org.w3c.dom.Text; public class MainActivity extends Activity { // TODO (26) Create an EditText variable called mSearchBoxEditText EditText mSearchBoxEditText; // TODO (27) Create a TextView variable called mUrlDisplayTextView TextView mUrlDisplayTextView; // TODO (28) Create a TextView variable called mSearchResultsTextView TextView mSearchResultsTextView; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); // TODO (29) Use findViewById to get a reference to mSearchBoxEditText mSearchBoxEditText = (EditText) findViewById(R.id.et_search_box); // TODO (30) Use findViewById to get a reference to mUrlDisplayTextView mUrlDisplayTextView = (TextView) findViewById(R.id.tv_url_display); // TODO (31) Use findViewById to get a reference to mSearchResultsTextView mSearchResultsTextView = (TextView) findViewById(R.id.tv_github_search_results_json); } }
apache-2.0
chenxyzy/cms
src/com/lerx/web/util/WebThreadUcenter.java
3708
package com.lerx.web.util; import java.io.IOException; import com.lerx.style.site.vo.SiteStyle; import com.lerx.sys.util.FileUtil; import com.lerx.sys.util.StringUtil; import com.lerx.sys.util.vo.FormatElements; import com.lerx.sys.util.vo.ReadFileArg; import com.lerx.sys.util.vo.UserCookie; import com.lerx.user.dao.IInterconnectionDao; import com.lerx.user.util.UserUtil; import com.lerx.user.vo.User; import com.lerx.web.util.camp.PubUtil; import com.lerx.web.util.camp.ResultPage; import com.lerx.web.util.camp.SiteInit; import com.lerx.web.util.camp.SiteUtil; import com.lerx.web.vo.LoginCheckEl; import com.lerx.web.vo.ResultEl; import com.lerx.web.vo.WebElements; import com.opensymphony.xwork2.ActionSupport; public class WebThreadUcenter { public static String show(WebElements wel) throws IOException{ wel.setRefererRec(false); //从wel中取值,以防用过多的get方法 // IArticleThreadDao articleThreadDaoImp=wel.getArticleThreadDaoImp(); // IArticleGroupDao articleGroupDaoImp=wel.getArticleGroupDaoImp(); // IUserDao userDaoImp=wel.getUserDaoImp(); ActionSupport as=wel.getAs(); // HttpServletRequest request=wel.getRequest(); // SiteInfo site=wel.getSite(); SiteStyle curSiteStyle=wel.getCurSiteStyle(); // int pageSize=wel.getPageSize(); // int page=wel.getPage(); // int gid=wel.getGid(); //取值结束 //下面五行顺序不能错 wel=SiteUtil.initSiteElement(wel, curSiteStyle.getUserCenterStyle()); wel.setTitleFormat(StringUtil.strReplace(wel.getTitleFormat(), "{$$app$$}", as.getText("lerx.ucenterTitle"))); wel=SiteUtil.endSiteService(wel); FormatElements fel=wel.getFel(); String htmlTemplate = wel.getHtmlTemplate(); IInterconnectionDao interconnectionDaoImp=wel.getInterconnectionDaoImp(); wel=SiteInit.reInit(wel); ResultEl re=wel.getRe(); htmlTemplate = StringUtil.strReplace(htmlTemplate, "{$$location$$}", as.getText("lerx.ucenterTitle")); LoginCheckEl lcel=PubUtil.logincheck(wel); wel.setCdm(lcel.getCdm()); wel.setUc(lcel.getUc()); wel.setUserLogined(lcel.isLogined()); UserCookie uc=wel.getUc(); System.currentTimeMillis(); if (uc != null) { fel.setLf(htmlTemplate); htmlTemplate = UserUtil.formatHref(fel, uc.getUserId()); User u = wel.getUserDaoImp().findUserById(uc.getUserId()); if (u.isAvatarFileLock()){ htmlTemplate = StringUtil.strReplace(htmlTemplate, "{$$avatarFileLock$$}", "checked"); }else{ htmlTemplate = StringUtil.strReplace(htmlTemplate, "{$$avatarFileLock$$}", ""); } if (interconnectionDaoImp.findUserByUid(uc.getUserId(), 1)!=null){ // System.out.println("aaa"); String rootFolder; rootFolder=curSiteStyle.getRootResFolder(); ReadFileArg rfv=new ReadFileArg(); rfv.setAs(as); rfv.setRequest(wel.getRequest()); rfv.setRootFolder(rootFolder); rfv.setFileName("icClear.txt"); rfv.setSubFolder("act"); String txt = FileUtil.readFile(rfv); // System.out.println("txt:"+txt); txt = StringUtil.strReplace(txt, "{$$href$$}", wel.getRequest().getContextPath()+"/qq_clear.action?user.id="+uc.getUserId()+"&f=fore"); htmlTemplate = StringUtil.strReplace(htmlTemplate, "{$$icClear$$}", txt); }else{ htmlTemplate = StringUtil.strReplace(htmlTemplate, "{$$icClear$$}", ""); } }else{ re.setMes(as.getText("lerx.fail.auth")); re.setMod(2); re.setSiteStyleDaoImp(wel.getSiteStyleDaoImp()); // re.setRefererUrl(SiteInit.refCheck(wel,0)); re.setRefererUrl(wel.getRequest().getContextPath()+"/login.action"); // ResultEl re=reInit(refCheck(0),"loginForbidden",0,0); htmlTemplate=ResultPage.init(re); } return htmlTemplate; } }
apache-2.0
Donnerbart/hazelcast-simulator
simulator/src/main/java/com/hazelcast/simulator/worker/testcontainer/IllegalTestException.java
1227
/* * Copyright (c) 2008-2016, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.simulator.worker.testcontainer; /** * Exception thrown when a test is not valid, e.g. it has no method with a {@link com.hazelcast.simulator.test.annotations.Run} * or {@link com.hazelcast.simulator.test.annotations.RunWithWorker} annotation. */ public class IllegalTestException extends RuntimeException { public IllegalTestException(String message) { super(message); } public IllegalTestException(String message, Throwable cause) { super(message, cause); } public IllegalTestException(Throwable cause) { super(cause); } }
apache-2.0
griffon-plugins/griffon-domain-plugin
subprojects/griffon-domain-core/src/main/java/org/codehaus/griffon/runtime/domain/methods/AbstractCreatePersistentMethod.java
3120
/* * Copyright 2014-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.codehaus.griffon.runtime.domain.methods; import griffon.exceptions.StaticMethodInvocationException; import griffon.plugins.domain.GriffonDomain; import griffon.plugins.domain.GriffonDomainClass; import griffon.plugins.domain.GriffonDomainHandler; import griffon.plugins.domain.GriffonDomainProperty; import griffon.plugins.domain.methods.CreateMethod; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.util.Map; import static griffon.util.GriffonClassUtils.requireState; import static java.lang.reflect.Modifier.isAbstract; /** * @author Andres Almiray */ public abstract class AbstractCreatePersistentMethod extends AbstractPersistentStaticMethodInvocation implements CreateMethod { public AbstractCreatePersistentMethod(@Nonnull GriffonDomainHandler griffonDomainHandler) { super(griffonDomainHandler); } @Nullable @Override @SuppressWarnings("unchecked") protected final <T extends GriffonDomain> Object invokeInternal(@Nonnull GriffonDomainClass<T> domainClass, @Nonnull String methodName, @Nonnull Object[] arguments) { requireState(!isAbstract(domainClass.getClazz().getModifiers()), "Cannot invoke " + METHOD_NAME + " on an abstract class!"); if (arguments.length == 0) { return createInstance(domainClass); } else if (arguments[0] instanceof Map) { return create(domainClass, (Map) arguments[0]); } throw new StaticMethodInvocationException(domainClass.getClazz(), methodName, arguments); } @Nonnull protected <T extends GriffonDomain> GriffonDomain create(@Nonnull GriffonDomainClass<T> domainClass, @Nonnull Map<String, Object> props) { GriffonDomain instance = createInstance(domainClass); applyProperties(domainClass, instance, props); return instance; } @Nonnull private <T extends GriffonDomain> GriffonDomain createInstance(@Nonnull GriffonDomainClass<T> domainClass) { return (GriffonDomain) getGriffonDomainHandler().getApplication().getArtifactManager().newInstance(domainClass); } private <T extends GriffonDomain> void applyProperties(@Nonnull GriffonDomainClass<T> domainClass, @Nonnull GriffonDomain instance, @Nonnull Map<String, Object> props) { for (GriffonDomainProperty property : domainClass.getProperties()) { Object value = props.get(property.getName()); if (value != null) property.setValue(instance, value); } } }
apache-2.0
estatio/estatio
estatioapp/app/src/test/java/org/estatio/module/budget/integtests/budget/BudgetItemValueRepository_IntegTest.java
5239
package org.estatio.module.budget.integtests.budget; import java.math.BigDecimal; import java.util.List; import javax.inject.Inject; import org.joda.time.LocalDate; import org.junit.Before; import org.junit.Test; import org.apache.isis.applib.fixturescripts.FixtureScript; import org.estatio.module.asset.dom.Property; import org.estatio.module.asset.dom.PropertyRepository; import org.estatio.module.asset.fixtures.property.enums.Property_enum; import org.estatio.module.budget.dom.budget.Budget; import org.estatio.module.budget.dom.budget.BudgetRepository; import org.estatio.module.budget.dom.budgetcalculation.BudgetCalculationType; import org.estatio.module.budget.dom.budgetitem.BudgetItem; import org.estatio.module.budget.dom.budgetitem.BudgetItemValue; import org.estatio.module.budget.dom.budgetitem.BudgetItemValueRepository; import org.estatio.module.budget.fixtures.budgets.enums.Budget_enum; import org.estatio.module.budget.integtests.BudgetModuleIntegTestAbstract; import static org.assertj.core.api.Assertions.assertThat; public class BudgetItemValueRepository_IntegTest extends BudgetModuleIntegTestAbstract { @Inject PropertyRepository propertyRepository; @Inject BudgetRepository budgetRepository; @Inject BudgetItemValueRepository budgetItemValueRepository; @Before public void setupData() { runFixtureScript(new FixtureScript() { @Override protected void execute(final ExecutionContext executionContext) { executionContext.executeChild(this, Budget_enum.OxfBudget2015.builder()); executionContext.executeChild(this, Budget_enum.OxfBudget2016.builder()); } }); } @Test public void findByBudgetItemAndType() { // given Property property = Property_enum.OxfGb.findUsing(serviceRegistry); Budget budget = budgetRepository.findByPropertyAndStartDate(property, new LocalDate(2015, 01, 01)); BudgetItem budgetItem = budget.getItems().first(); assertThat(budgetItem.getValues().size()).isEqualTo(1); assertThat(budgetItem.getValues().first().getType()).isEqualTo(BudgetCalculationType.BUDGETED); // when List<BudgetItemValue> results = budgetItemValueRepository.findByBudgetItemAndType(budgetItem, BudgetCalculationType.BUDGETED); // then assertThat(results.size()).isEqualTo(1); } @Test public void findUniqueTest(){ // given Property property = Property_enum.OxfGb.findUsing(serviceRegistry); Budget budget = budgetRepository.findByPropertyAndStartDate(property, new LocalDate(2015, 01, 01)); BudgetItem budgetItem = budget.getItems().first(); // when BudgetItemValue result = budgetItemValueRepository.findUnique(budgetItem, new LocalDate(2015,01,01), BudgetCalculationType.BUDGETED); // then assertThat(result.getDate()).isEqualTo(new LocalDate(2015, 01, 01)); // and when result = budgetItemValueRepository.findUnique(budgetItem, new LocalDate(2015,01,02), BudgetCalculationType.BUDGETED); // then assertThat(result).isNull(); } @Test public void updateOrCreateTest_Update(){ // given LocalDate budgetStart = new LocalDate(2015, 01, 01); Property property = Property_enum.OxfGb.findUsing(serviceRegistry); Budget budget = budgetRepository.findByPropertyAndStartDate(property, budgetStart); BudgetItem budgetItem = budget.getItems().first(); assertThat(budgetItem.getValues().size()).isEqualTo(1); assertThat(budgetItem.getValues().first().getType()).isEqualTo(BudgetCalculationType.BUDGETED); assertThat(budgetItem.getValues().first().getValue()).isEqualTo(new BigDecimal("30000.55")); // when BudgetItemValue result = budgetItemValueRepository.upsert(budgetItem, new BigDecimal("33333.00"), budgetStart, BudgetCalculationType.BUDGETED); // then assertThat(budgetItem.getValues().size()).isEqualTo(1); assertThat(result.getValue()).isEqualTo(new BigDecimal("33333.00")); } @Test public void updateOrCreateTest_Create(){ // given LocalDate budgetStart = new LocalDate(2015, 1, 1); Property property = Property_enum.OxfGb.findUsing(serviceRegistry); Budget budget = budgetRepository.findByPropertyAndStartDate(property, budgetStart); BudgetItem budgetItem = budget.getItems().first(); assertThat(budgetItem.getValues().size()).isEqualTo(1); assertThat(budgetItem.getValues().first().getType()).isEqualTo(BudgetCalculationType.BUDGETED); assertThat(budgetItem.getValues().first().getValue()).isEqualTo(new BigDecimal("30000.55")); // when BudgetItemValue result = budgetItemValueRepository.upsert(budgetItem, new BigDecimal("33333.00"), budgetStart, BudgetCalculationType.AUDITED); transactionService.flushTransaction(); // then assertThat(budgetItem.getValues().size()).isEqualTo(2); assertThat(result.getValue()).isEqualTo(new BigDecimal("33333.00")); assertThat(result.getType()).isEqualTo(BudgetCalculationType.AUDITED); } }
apache-2.0
trasukg/river-qa-2.2
src/com/sun/jini/thread/WakeupManager.java
20252
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.sun.jini.thread; import com.sun.jini.config.Config; import java.text.DateFormat; import java.util.SortedSet; import java.util.logging.Level; import java.util.logging.Logger; import net.jini.config.Configuration; import net.jini.config.ConfigurationException; /** * A Queue of timed tasks. Each task implements {@link Runnable}. * Events can either be executed in the queue's thread or in their own thread. * <p> * A task is an object that implements <code>Runnable</code>. It is * scheduled by invoking {@link #schedule(long, Runnable, WakeupManager.ThreadDesc) * schedule} with a time at which it should be run. When that time * arrives (approximately) the task will be pulled off the queue and * have its {@link Runnable#run run} method invoked. <p> * * A <code>schedule</code> request can specify a * {@link WakeupManager.ThreadDesc}, which will define the parameters * of a thread to be created to run the <code>Runnable</code>. You can * specify the group, whether the thread is a daemon thread, and the priority. * Additionally you can use a subclass of <code>WakeupManager.ThreadDesc</code> * and override the {@link WakeupManager.ThreadDesc#thread thread} method * to further customize thread creation. * <p> * * When a task is scheduled, a {@link WakeupManager.Ticket} is returned * that can be used to cancel the event if desired. * <p> * * The queue requires its own thread, whose parameters can be defined * via a <code>ThreadDesc</code> if desired. The queue's thread * will be started when the first task is scheduled. If the queue * becomes empty the thread will be terminated after a * <a href=#queueThreadTimeout>configurable delay</a>. The thread * will be re-started if a new task is scheduled. * <p> * * While it is theoretically possible to obtain the queue's thread and * interrupt it, the results of doing so are undefined. If a client * wishes to stop the queue's thread the client should either remove * all the tasks or call {@link #stop}. Note, calling * <code>stop</code> will cause future <code>schedule</code> calls to * fail with an <code>IllegalStateException</code>. <p> * * <a name="ConfigEntries"> * <code>WakeupManager</code> supports the <code>queueThreadTimeout</code> * configuration entry, with the component * <code>com.sun.jini.thread.WakeupManager</code>. * * <a name="queueThreadTimeout"> * <table summary="Describes the queueThreadTimeout configuration entry" * border="0" cellpadding="2"> * <tr valign="top"> * <th scope="col" summary="layout"> <font size="+1">&#X2022;</font> * <th scope="col" align="left" colspan="2"> <font size="+1"> * <code>queueThreadTimeout</code></font> * * <tr valign="top"> <td> &nbsp <th scope="row" align="right"> * Type: <td> <code>long</code> * * <tr valign="top"> <td> &nbsp <th scope="row" align="right"> * Default: <td> 30,000 milliseconds * * <tr valign="top"> <td> &nbsp <th scope="row" align="right"> * Description: * <td> How long, in milliseconds, the queue's thread will be * left running if there are no scheduled tasks. Must be * a non-negative long value. This configuration entry is * consulted when the <code>WakeupManager</code> is initially created. * * </table> * <p> * * This class uses the {@link Logger} named * <code>com.sun.jini.thread.WakeupManager</code> to log information at * the following logging levels: <p> * * <table border=1 cellpadding=5 * summary="Describes logging performed by WakeupManager at different * logging levels"> * * <tr> <th> Level <th> Description * * <tr> <td> SEVERE <td> exceptions thrown when we attempt to * create the queue's thread * * <tr> <td> WARNING <td> exceptions thrown by the run methods of tasks, * by the <code>ThreadDesc</code>'s of tasks, or * if the queue's thread is interrupted * * <tr> <td> FINEST <td> how many milliseconds until the next event * and when the queue's thread is stopped or started * * </table> * * @author Sun Microsystems, Inc. * * @see java.lang.Runnable */ public class WakeupManager { /** Component we pull configuration entries from and our logger name */ private final static String COMPONENT_NAME = "com.sun.jini.thread.WakeupManager"; /** Default value for <code>queueThreadTimeout</code> */ private final static long DEFAULT_QUEUE_THREAD_TIMEOUT = 30000; /** * If there are no registered tasks number of * milliseconds to wait before killing the kicker thread */ private final long queueThreadTimeout; /** * The queue. Also the object we use for locking, multi-threaded * access to all the other fields is arbitrated by synchronizing * on this object. */ private final SortedSet contents = new java.util.TreeSet(); /** <code>ThreadDesc</code> we use to create kicker threads */ private final ThreadDesc kickerDesc; /** The Runnable for the queue's thread */ private final Kicker kicker = new Kicker(); /** Next tie breaker ticket */ private long nextBreaker = 0; /** First item in contents */ private Ticket head = null; /** The queue's thread */ private Thread kickerThread; /** * <code>true</code> if we have been stopped. */ private boolean dead = false; /** * <code>DataFormat</code> used by {@link Ticket} to format its * <code>toString</code> return value. */ private static DateFormat dateFmt = DateFormat.getTimeInstance(DateFormat.LONG); /** Logger for this class and nested classes */ private static final Logger logger = Logger.getLogger(COMPONENT_NAME); /** * Description of a future thread. * * @see WakeupManager#schedule * @see WakeupManager#WakeupManager(WakeupManager.ThreadDesc) */ public static class ThreadDesc { private final ThreadGroup group; // group to create in private final boolean daemon; // create as daemon? private final int priority; // priority /** * Equivalent to * <pre> * ThreadDesc(null, false) * </pre> */ public ThreadDesc() { this(null, false); } /** * Equivalent to * <pre> * ThreadDesc(group, deamon, Thread.NORM_PRIORITY) * </pre> */ public ThreadDesc(ThreadGroup group, boolean daemon) { this(group, daemon, Thread.NORM_PRIORITY); } /** * Describe a future thread that will be created in the given group, * deamon status, and priority. * * @param group The group to be created in. If <code>null</code>, * the thread will be created in the default group. * @param daemon The thread will be a daemon thread if this is * <code>true</code>. * @param priority The thread's priority. * @throws IllegalArgumentException if priority is not * in between {@link Thread#MIN_PRIORITY} and * {@link Thread#MAX_PRIORITY} */ public ThreadDesc(ThreadGroup group, boolean daemon, int priority) { if (priority < Thread.MIN_PRIORITY || priority > Thread.MAX_PRIORITY) { throw new IllegalArgumentException("bad value for priority:" + priority); } this.group = group; this.daemon = daemon; this.priority = priority; } /** * The {@link ThreadGroup} the thread will be created in. * @return the {@link ThreadGroup} the thread will be created in. */ public ThreadGroup getGroup() { return group; } /** * Returns <code>true</code> if the the thread will be daemon * thread, returns <code>false</code> otherwise. * @return <code>true</code> if the the thread will be daemon * thread, returns <code>false</code> otherwise. */ public boolean isDaemon() { return daemon; } /** * The priority the thread should be created with. * @return the priority the thread should be created with. */ public int getPriority() { return priority; } /** * Create a thread for the given runnable based on the values in this * object. May be overridden to give full control over creation * of thread. * @return a thread to run <code>r</code>, unstarted */ public Thread thread(Runnable r) { Thread thr; if (getGroup() == null) thr = new Thread(r); else thr = new Thread(getGroup(), r); thr.setDaemon(isDaemon()); thr.setPriority(getPriority()); return thr; } public String toString() { return "[" + getGroup() + ", " + isDaemon() + ", " + getPriority() + "]"; } } /** * A ticket that can be used for cancelling a future task. It * describes the task itself as well. The {@link * WakeupManager#newTicket WakeupManager.newTicket} method * can be used by subclasses of <code>WakeupManager</code> to * create new <code>Ticket</code> instances. */ public static class Ticket implements Comparable { /** When the task should occur. */ public final long when; /** The task object to be executed */ public final Runnable task; /** The <code>ThreadDesc</code>, or <code>null</code> if none. */ public final ThreadDesc desc; /** Tie beaker used when two tickets have the same value for when */ private final long breaker; private Ticket(long when, Runnable task, ThreadDesc threadDesc, long breaker) { if (task == null) throw new NullPointerException("task not specified"); this.when = when; this.task = task; this.desc = threadDesc; this.breaker = breaker; } public String toString() { return dateFmt.format(new Long(when)) + "(" + when + ")" + ", " + task.getClass().getName() + ", " + desc; } public boolean equals(Object o) { if (!(o instanceof Ticket)) return false; final Ticket that = (Ticket)o; return that.when == when && that.breaker == breaker; } public int hashCode() { return (int)breaker; } public int compareTo(Object o) { final Ticket that = (Ticket)o; final long whenDiff = when - that.when; if (whenDiff > 0) return 1; else if (whenDiff < 0) return -1; else { final long breakerDiff = breaker - that.breaker; if (breakerDiff > 0) return 1; else if (breakerDiff < 0) return -1; else return 0; } } } /** * Create a new <code>WakeupManager</code>. Equivalent to. * <pre> * WakeupManager(new ThreadDesc()) * </pre> * * @see WakeupManager.ThreadDesc */ public WakeupManager() { this(new ThreadDesc()); } /** * Create a new <code>WakeupManager</code>. The thread used for * timing will be created according to the provided <code>ThreadDesc</code>. * @throws NullPointerException if desc is null */ public WakeupManager(ThreadDesc desc) { if (desc == null) throw new NullPointerException("desc must be non-null"); kickerDesc = desc; queueThreadTimeout = DEFAULT_QUEUE_THREAD_TIMEOUT; } /** * Create a new <code>WakeupManager</code>. The thread used for * timing will be created according to the provided <code>ThreadDesc</code>. * Optionally pass a configuration to control various implementation * specific behaviors. * @throws ConfigurationException if if an exception * occurs while retrieving an item from the given * <code>Configuration</code> object * @throws NullPointerException if either argument is null */ public WakeupManager(ThreadDesc desc, Configuration config) throws ConfigurationException { if (desc == null) throw new NullPointerException("desc must be non-null"); kickerDesc = desc; queueThreadTimeout = Config.getLongEntry(config, COMPONENT_NAME, "queueThreadTimeout", DEFAULT_QUEUE_THREAD_TIMEOUT, 0, Long.MAX_VALUE); } /** * Create a new ticket with the specified values for when the task * should be run, what task should be run, and what sort of * thread the task should be run in. * * @param when when the task should run, an absolute time * @param task what task should be run * @param threadDesc if non-<code>null</code> the object to use to * create the thread the task should be run in, if * <code>null</code> the task should be run in the * manager's thread. * @throws NullPointerException if task is <code>null</code> */ protected Ticket newTicket(long when, Runnable task, ThreadDesc threadDesc) { synchronized (contents) { return new Ticket(when, task, threadDesc, nextBreaker++); } } /** * Schedule the given task for the given time. The task's <code>run</code> * method will be executed synchronously in the queue's own thread, so it * should be brief or it will affect whether future events will be executed * at an appropriate time. * @throws NullPointerException if <code>task</code> is <code>null</code> * @throws IllegalStateException if the manager has been stopped */ public Ticket schedule(long when, Runnable task) { return schedule(when, task, null); } /** * Schedule the given task for the given time, to be run in a thread. * When the time comes, a new thread will be created according to the * <code>ThreadDesc</code> object provided. If <code>threadDesc</code> is * <code>null</code>, this is equivalent to the other form of * <code>schedule</code>. * @throws NullPointerException if <code>task</code> is <code>null</code> * @throws IllegalStateException if the manager has been stopped */ public Ticket schedule(long when, Runnable task, ThreadDesc threadDesc) { synchronized (contents) { if (dead) throw new IllegalStateException( "trying to add task to stopped WakeupManager"); Ticket t = newTicket(when, task, threadDesc); contents.add(t); if (kickerThread == null) { logger.log(Level.FINEST, "starting queue's thread"); try { final Thread thread = kickerDesc.thread(kicker); thread.start(); // Only set once we know start worked kickerThread = thread; } catch (Throwable tt) { try { logger.log(Level.SEVERE, "queue thread creation exception",tt); } catch (Throwable ttt) { // don't let a problem in logging kill the thread } } } // need to call checkHead (even if we just (re)created the // kickerThread), because that is how head gets set (note, // this is ok to call even if thread creation failed) checkHead(); return t; } } /** * Cancel the given ticket. */ public void cancel(Ticket t) { synchronized (contents) { if (dead) return; contents.remove(t); checkHead(); } } /** * Cancel all tickets. */ public void cancelAll() { synchronized (contents) { if (dead) return; contents.clear(); checkHead(); } } /** * Called whenever we change contents to update head * and see if we need to wake up the queue thread. * Assumes the caller holds the lock on contents. */ private void checkHead() { assert Thread.holdsLock(contents); final Ticket oldHead = head; if (contents.isEmpty()) head = null; else head = (Ticket)contents.first(); if (head == oldHead) return; // New first event (including possibly no events), run // needs to wake up and change its sleep time. contents.notifyAll(); } /** * Return whether the queue is currently empty. */ public boolean isEmpty() { synchronized (contents) { return (contents.isEmpty()); } } /** * Stop executing. */ public void stop() { synchronized (contents) { contents.clear(); kickerThread = null; head = null; dead = true; contents.notifyAll(); } } /** * The kicker work. This is what sleeps until the time of * the next event. */ private class Kicker implements Runnable { public void run() { /* Set when contents is empty to control when the kicker will * exit. Long.MIN_VALUE used as flag value to indicate * kickerExitTime is invalid */ long kickerExitTime = Long.MIN_VALUE; while (true) { final Ticket ticketToRun; synchronized (contents) { while (true) { if (dead) return; final long now = System.currentTimeMillis(); final long timeToNextEvent; if (contents.isEmpty()) { if (kickerExitTime == Long.MIN_VALUE) { kickerExitTime = now + queueThreadTimeout; if (kickerExitTime < 0) { // overflow kickerExitTime = Long.MAX_VALUE; } } // Since contents is empty the next event is exit timeToNextEvent = kickerExitTime - now; if (timeToNextEvent <= 0) { // been idle long enough, depart /* $$$ Do this in a finally block for the run? * so no mater how this thread ends kickerThread * get set to null? */ kickerThread = null; logger.log(Level.FINEST, "stopping queue's thread"); return; } } else { // contents is non-empty kickerExitTime = Long.MIN_VALUE; timeToNextEvent = head.when - now; if (timeToNextEvent <= 0) { // The head's time has come, consume and // break out of inner loop to run it. ticketToRun = head; contents.remove(head); checkHead(); break; } } if (logger.isLoggable(Level.FINEST)) { logger.log(Level.FINEST, "timeToNextEvent:{0}", (timeToNextEvent == Long.MAX_VALUE ? "Long.MAX_VALUE" : Long.toString(timeToNextEvent))); } assert timeToNextEvent > 0; try { contents.wait(timeToNextEvent); } catch (InterruptedException e) { /* This should never happen, our thread is * private to WakeupManager and tasks * calling Thread.currentThread().interrupt() is * decidedly anti-social. Log, but keep on * going. */ try { logger.log(Level.WARNING, "Attempt to interrupt Queue's thread"); } catch (Throwable t) { // ignore } /* This loop already deals with wait returning * early for no reason, so going to the top * of the loop is ok here - if there are no * new tasks and we are not dead we will * just calc a new value for timeToNextEvent */ } /* Something has changed or the time has arived * for action, don't know which, go back to the * the top of the inner loop to figure out what to * do next */ } } // Run the task outside of the lock if (ticketToRun.desc == null) { // ... in this thread try { ticketToRun.task.run(); } catch (Throwable e) { try { logger.log(Level.WARNING, "Runnable.run exception", e); } catch (Throwable t) { // don't let a problem in logging kill the thread } } } else { // ... in its own thread try { ticketToRun.desc.thread(ticketToRun.task).start(); } catch (Throwable t) { try { logger.log(Level.WARNING, "task thread creation exception", t); } catch (Throwable tt) { // don't let a problem in logging kill the thread } } } } } } }
apache-2.0
bablumon/EasyJobsMain
app/src/main/java/me/toptas/jobseasy/parser/RssParser.java
5028
package me.toptas.jobseasy.parser; import org.xml.sax.Attributes; import org.xml.sax.SAXException; import org.xml.sax.helpers.DefaultHandler; import java.util.ArrayList; import me.toptas.jobseasy.model.RssItem; public class RssParser extends DefaultHandler { private String elementValue = null; private boolean elementOn = false; private RssItem rssItem; private String tempTitle = ""; private String tempLink; private String tempImage; private String tempPubdate; private String tempDescription; private boolean parsingTitle = false; private boolean parsingDesc = false; private boolean parsingLink = false; private final ArrayList<RssItem> items; public RssParser() { super(); items = new ArrayList<>(); } public ArrayList<RssItem> getItems() { return items; } @Override public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException { elementOn = true; if (localName.equals("item")) { rssItem = new RssItem(); } else if (localName.equalsIgnoreCase("title") && !qName.contains("media")) { parsingTitle = true; tempTitle = ""; } else if (localName.equalsIgnoreCase("description")) { parsingDesc = true; tempDescription = ""; } else if (localName.equalsIgnoreCase("link") && !qName.equals("atom:link")) { parsingLink = true; tempLink = ""; } if (attributes != null) { String url = attributes.getValue("url"); if (url != null && !url.isEmpty()) { tempImage = url; } } } @Override public void endElement(String uri, String localName, String qName) throws SAXException { elementOn = false; /** * Sets the values after retrieving the values from the XML tags * */ if (rssItem != null) { if (localName.equalsIgnoreCase("item")) { rssItem = new RssItem(); rssItem.setTitle(tempTitle.trim()); rssItem.setUrl(tempLink); rssItem.setImageUrl(tempImage); rssItem.setPubDate(tempPubdate); rssItem.setDescription((tempDescription)); if (tempImage == null && tempDescription != null && getImageSourceFromDescription(tempDescription) != null) { rssItem.setImageUrl(getImageSourceFromDescription(tempDescription)); } items.add(rssItem); tempLink = ""; tempImage = null; tempPubdate = ""; // Log.v("asd","pended: " + tempTitle); } else if (localName.equalsIgnoreCase("title") && !qName.contains("media")) { // tempTitle = elementValue; parsingTitle = false; elementValue = ""; tempTitle = tempTitle.replace("\n", ""); } else if (localName.equalsIgnoreCase("link") && !elementValue.isEmpty()) { // tempLink = elementValue; parsingLink = false; elementValue = ""; tempLink = tempLink.replace("\n", ""); } else if (localName.equalsIgnoreCase("image") || localName.equalsIgnoreCase("url")) { if (elementValue != null && !elementValue.isEmpty()) { tempImage = elementValue; } } else if (localName.equals("pubDate")) { tempPubdate = elementValue; } else if (localName.equals("description")) { parsingDesc = false; elementValue = ""; } } } @Override public void characters(char[] ch, int start, int length) throws SAXException { String buff = new String(ch, start, length); if (elementOn) { if (buff.length() > 2) { elementValue = buff; elementOn = false; } } if (parsingTitle) { tempTitle = tempTitle + buff; } if (parsingDesc) { tempDescription = tempDescription + buff; } if (parsingLink) { tempLink = tempLink + buff; } } private String getImageSourceFromDescription(String description) { if (description.contains("<img") && description.contains("src")) { String[] parts = description.split("src=\""); if (parts.length == 2 && parts[1].length() > 0) { String src = parts[1].substring(0, parts[1].indexOf("\"")); String[] srcParts = src.split("http"); if (srcParts.length > 2) { src = "http" + srcParts[2]; } return src; } } return null; } }
apache-2.0
androidx/media
libraries/exoplayer/src/main/java/androidx/media3/exoplayer/video/MediaCodecVideoRenderer.java
80768
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.media3.exoplayer.video; import static androidx.media3.exoplayer.DecoderReuseEvaluation.DISCARD_REASON_MAX_INPUT_SIZE_EXCEEDED; import static androidx.media3.exoplayer.DecoderReuseEvaluation.DISCARD_REASON_VIDEO_MAX_RESOLUTION_EXCEEDED; import static androidx.media3.exoplayer.DecoderReuseEvaluation.REUSE_RESULT_NO; import static java.lang.Math.max; import static java.lang.Math.min; import android.annotation.SuppressLint; import android.annotation.TargetApi; import android.content.Context; import android.graphics.Point; import android.media.MediaCodec; import android.media.MediaCodecInfo.CodecCapabilities; import android.media.MediaCodecInfo.CodecProfileLevel; import android.media.MediaCrypto; import android.media.MediaFormat; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.os.SystemClock; import android.util.Pair; import android.view.Surface; import androidx.annotation.CallSuper; import androidx.annotation.Nullable; import androidx.annotation.RequiresApi; import androidx.media3.common.C; import androidx.media3.common.DrmInitData; import androidx.media3.common.Format; import androidx.media3.common.MimeTypes; import androidx.media3.common.VideoSize; import androidx.media3.common.util.Assertions; import androidx.media3.common.util.Log; import androidx.media3.common.util.MediaFormatUtil; import androidx.media3.common.util.TraceUtil; import androidx.media3.common.util.UnstableApi; import androidx.media3.common.util.Util; import androidx.media3.decoder.DecoderInputBuffer; import androidx.media3.exoplayer.DecoderCounters; import androidx.media3.exoplayer.DecoderReuseEvaluation; import androidx.media3.exoplayer.DecoderReuseEvaluation.DecoderDiscardReasons; import androidx.media3.exoplayer.ExoPlaybackException; import androidx.media3.exoplayer.ExoPlayer; import androidx.media3.exoplayer.FormatHolder; import androidx.media3.exoplayer.PlayerMessage.Target; import androidx.media3.exoplayer.RendererCapabilities; import androidx.media3.exoplayer.mediacodec.MediaCodecAdapter; import androidx.media3.exoplayer.mediacodec.MediaCodecDecoderException; import androidx.media3.exoplayer.mediacodec.MediaCodecInfo; import androidx.media3.exoplayer.mediacodec.MediaCodecRenderer; import androidx.media3.exoplayer.mediacodec.MediaCodecSelector; import androidx.media3.exoplayer.mediacodec.MediaCodecUtil; import androidx.media3.exoplayer.mediacodec.MediaCodecUtil.DecoderQueryException; import androidx.media3.exoplayer.video.VideoRendererEventListener.EventDispatcher; import com.google.common.collect.ImmutableList; import java.nio.ByteBuffer; import java.util.List; /** * Decodes and renders video using {@link MediaCodec}. * * <p>This renderer accepts the following messages sent via {@link ExoPlayer#createMessage(Target)} * on the playback thread: * * <ul> * <li>Message with type {@link #MSG_SET_VIDEO_OUTPUT} to set the output. The message payload * should be the target {@link Surface}, or null to clear the output. Other non-null payloads * have the effect of clearing the output. * <li>Message with type {@link #MSG_SET_SCALING_MODE} to set the video scaling mode. The message * payload should be one of the integer scaling modes in {@link C.VideoScalingMode}. Note that * the scaling mode only applies if the {@link Surface} targeted by this renderer is owned by * a {@link android.view.SurfaceView}. * <li>Message with type {@link #MSG_SET_CHANGE_FRAME_RATE_STRATEGY} to set the strategy used to * call {@link Surface#setFrameRate}. * <li>Message with type {@link #MSG_SET_VIDEO_FRAME_METADATA_LISTENER} to set a listener for * metadata associated with frames being rendered. The message payload should be the {@link * VideoFrameMetadataListener}, or null. * </ul> */ @UnstableApi public class MediaCodecVideoRenderer extends MediaCodecRenderer { private static final String TAG = "MediaCodecVideoRenderer"; private static final String KEY_CROP_LEFT = "crop-left"; private static final String KEY_CROP_RIGHT = "crop-right"; private static final String KEY_CROP_BOTTOM = "crop-bottom"; private static final String KEY_CROP_TOP = "crop-top"; // Long edge length in pixels for standard video formats, in decreasing in order. private static final int[] STANDARD_LONG_EDGE_VIDEO_PX = new int[] {1920, 1600, 1440, 1280, 960, 854, 640, 540, 480}; /** * Scale factor for the initial maximum input size used to configure the codec in non-adaptive * playbacks. See {@link #getCodecMaxValues(MediaCodecInfo, Format, Format[])}. */ private static final float INITIAL_FORMAT_MAX_INPUT_SIZE_SCALE_FACTOR = 1.5f; /** Magic frame render timestamp that indicates the EOS in tunneling mode. */ private static final long TUNNELING_EOS_PRESENTATION_TIME_US = Long.MAX_VALUE; private static boolean evaluatedDeviceNeedsSetOutputSurfaceWorkaround; private static boolean deviceNeedsSetOutputSurfaceWorkaround; private final Context context; private final VideoFrameReleaseHelper frameReleaseHelper; private final EventDispatcher eventDispatcher; private final long allowedJoiningTimeMs; private final int maxDroppedFramesToNotify; private final boolean deviceNeedsNoPostProcessWorkaround; private CodecMaxValues codecMaxValues; private boolean codecNeedsSetOutputSurfaceWorkaround; private boolean codecHandlesHdr10PlusOutOfBandMetadata; @Nullable private Surface surface; @Nullable private DummySurface dummySurface; private boolean haveReportedFirstFrameRenderedForCurrentSurface; private @C.VideoScalingMode int scalingMode; private boolean renderedFirstFrameAfterReset; private boolean mayRenderFirstFrameAfterEnableIfNotStarted; private boolean renderedFirstFrameAfterEnable; private long initialPositionUs; private long joiningDeadlineMs; private long droppedFrameAccumulationStartTimeMs; private int droppedFrames; private int consecutiveDroppedFrameCount; private int buffersInCodecCount; private long lastBufferPresentationTimeUs; private long lastRenderRealtimeUs; private long totalVideoFrameProcessingOffsetUs; private int videoFrameProcessingOffsetCount; private int currentWidth; private int currentHeight; private int currentUnappliedRotationDegrees; private float currentPixelWidthHeightRatio; @Nullable private VideoSize reportedVideoSize; private boolean tunneling; private int tunnelingAudioSessionId; /* package */ @Nullable OnFrameRenderedListenerV23 tunnelingOnFrameRenderedListener; @Nullable private VideoFrameMetadataListener frameMetadataListener; /** * @param context A context. * @param mediaCodecSelector A decoder selector. */ public MediaCodecVideoRenderer(Context context, MediaCodecSelector mediaCodecSelector) { this(context, mediaCodecSelector, 0); } /** * @param context A context. * @param mediaCodecSelector A decoder selector. * @param allowedJoiningTimeMs The maximum duration in milliseconds for which this video renderer * can attempt to seamlessly join an ongoing playback. */ public MediaCodecVideoRenderer( Context context, MediaCodecSelector mediaCodecSelector, long allowedJoiningTimeMs) { this( context, mediaCodecSelector, allowedJoiningTimeMs, /* eventHandler= */ null, /* eventListener= */ null, /* maxDroppedFramesToNotify= */ 0); } /** * @param context A context. * @param mediaCodecSelector A decoder selector. * @param allowedJoiningTimeMs The maximum duration in milliseconds for which this video renderer * can attempt to seamlessly join an ongoing playback. * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. * @param maxDroppedFramesToNotify The maximum number of frames that can be dropped between * invocations of {@link VideoRendererEventListener#onDroppedFrames(int, long)}. */ public MediaCodecVideoRenderer( Context context, MediaCodecSelector mediaCodecSelector, long allowedJoiningTimeMs, @Nullable Handler eventHandler, @Nullable VideoRendererEventListener eventListener, int maxDroppedFramesToNotify) { this( context, MediaCodecAdapter.Factory.DEFAULT, mediaCodecSelector, allowedJoiningTimeMs, /* enableDecoderFallback= */ false, eventHandler, eventListener, maxDroppedFramesToNotify, /* assumedMinimumCodecOperatingRate= */ 30); } /** * @param context A context. * @param mediaCodecSelector A decoder selector. * @param allowedJoiningTimeMs The maximum duration in milliseconds for which this video renderer * can attempt to seamlessly join an ongoing playback. * @param enableDecoderFallback Whether to enable fallback to lower-priority decoders if decoder * initialization fails. This may result in using a decoder that is slower/less efficient than * the primary decoder. * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. * @param maxDroppedFramesToNotify The maximum number of frames that can be dropped between * invocations of {@link VideoRendererEventListener#onDroppedFrames(int, long)}. */ public MediaCodecVideoRenderer( Context context, MediaCodecSelector mediaCodecSelector, long allowedJoiningTimeMs, boolean enableDecoderFallback, @Nullable Handler eventHandler, @Nullable VideoRendererEventListener eventListener, int maxDroppedFramesToNotify) { this( context, MediaCodecAdapter.Factory.DEFAULT, mediaCodecSelector, allowedJoiningTimeMs, enableDecoderFallback, eventHandler, eventListener, maxDroppedFramesToNotify, /* assumedMinimumCodecOperatingRate= */ 30); } /** * @param context A context. * @param codecAdapterFactory The {@link MediaCodecAdapter.Factory} used to create {@link * MediaCodecAdapter} instances. * @param mediaCodecSelector A decoder selector. * @param allowedJoiningTimeMs The maximum duration in milliseconds for which this video renderer * can attempt to seamlessly join an ongoing playback. * @param enableDecoderFallback Whether to enable fallback to lower-priority decoders if decoder * initialization fails. This may result in using a decoder that is slower/less efficient than * the primary decoder. * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. * @param maxDroppedFramesToNotify The maximum number of frames that can be dropped between * invocations of {@link VideoRendererEventListener#onDroppedFrames(int, long)}. */ public MediaCodecVideoRenderer( Context context, MediaCodecAdapter.Factory codecAdapterFactory, MediaCodecSelector mediaCodecSelector, long allowedJoiningTimeMs, boolean enableDecoderFallback, @Nullable Handler eventHandler, @Nullable VideoRendererEventListener eventListener, int maxDroppedFramesToNotify) { this( context, codecAdapterFactory, mediaCodecSelector, allowedJoiningTimeMs, enableDecoderFallback, eventHandler, eventListener, maxDroppedFramesToNotify, /* assumedMinimumCodecOperatingRate= */ 30); } /** * Creates a new instance. * * @param context A context. * @param codecAdapterFactory The {@link MediaCodecAdapter.Factory} used to create {@link * MediaCodecAdapter} instances. * @param mediaCodecSelector A decoder selector. * @param allowedJoiningTimeMs The maximum duration in milliseconds for which this video renderer * can attempt to seamlessly join an ongoing playback. * @param enableDecoderFallback Whether to enable fallback to lower-priority decoders if decoder * initialization fails. This may result in using a decoder that is slower/less efficient than * the primary decoder. * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. * @param maxDroppedFramesToNotify The maximum number of frames that can be dropped between * invocations of {@link VideoRendererEventListener#onDroppedFrames(int, long)}. * @param assumedMinimumCodecOperatingRate A codec operating rate that all codecs instantiated by * this renderer are assumed to meet implicitly (i.e. without the operating rate being set * explicitly using {@link MediaFormat#KEY_OPERATING_RATE}). */ public MediaCodecVideoRenderer( Context context, MediaCodecAdapter.Factory codecAdapterFactory, MediaCodecSelector mediaCodecSelector, long allowedJoiningTimeMs, boolean enableDecoderFallback, @Nullable Handler eventHandler, @Nullable VideoRendererEventListener eventListener, int maxDroppedFramesToNotify, float assumedMinimumCodecOperatingRate) { super( C.TRACK_TYPE_VIDEO, codecAdapterFactory, mediaCodecSelector, enableDecoderFallback, assumedMinimumCodecOperatingRate); this.allowedJoiningTimeMs = allowedJoiningTimeMs; this.maxDroppedFramesToNotify = maxDroppedFramesToNotify; this.context = context.getApplicationContext(); frameReleaseHelper = new VideoFrameReleaseHelper(this.context); eventDispatcher = new EventDispatcher(eventHandler, eventListener); deviceNeedsNoPostProcessWorkaround = deviceNeedsNoPostProcessWorkaround(); joiningDeadlineMs = C.TIME_UNSET; currentWidth = Format.NO_VALUE; currentHeight = Format.NO_VALUE; currentPixelWidthHeightRatio = Format.NO_VALUE; scalingMode = C.VIDEO_SCALING_MODE_DEFAULT; tunnelingAudioSessionId = C.AUDIO_SESSION_ID_UNSET; clearReportedVideoSize(); } @Override public String getName() { return TAG; } @Override protected @Capabilities int supportsFormat(MediaCodecSelector mediaCodecSelector, Format format) throws DecoderQueryException { String mimeType = format.sampleMimeType; if (!MimeTypes.isVideo(mimeType)) { return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_TYPE); } @Nullable DrmInitData drmInitData = format.drmInitData; // Assume encrypted content requires secure decoders. boolean requiresSecureDecryption = drmInitData != null; List<MediaCodecInfo> decoderInfos = getDecoderInfos( mediaCodecSelector, format, requiresSecureDecryption, /* requiresTunnelingDecoder= */ false); if (requiresSecureDecryption && decoderInfos.isEmpty()) { // No secure decoders are available. Fall back to non-secure decoders. decoderInfos = getDecoderInfos( mediaCodecSelector, format, /* requiresSecureDecoder= */ false, /* requiresTunnelingDecoder= */ false); } if (decoderInfos.isEmpty()) { return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_SUBTYPE); } if (!supportsFormatDrm(format)) { return RendererCapabilities.create(C.FORMAT_UNSUPPORTED_DRM); } // Check whether the first decoder supports the format. This is the preferred decoder for the // format's MIME type, according to the MediaCodecSelector. MediaCodecInfo decoderInfo = decoderInfos.get(0); boolean isFormatSupported = decoderInfo.isFormatSupported(format); boolean isPreferredDecoder = true; if (!isFormatSupported) { // Check whether any of the other decoders support the format. for (int i = 1; i < decoderInfos.size(); i++) { MediaCodecInfo otherDecoderInfo = decoderInfos.get(i); if (otherDecoderInfo.isFormatSupported(format)) { decoderInfo = otherDecoderInfo; isFormatSupported = true; isPreferredDecoder = false; break; } } } @C.FormatSupport int formatSupport = isFormatSupported ? C.FORMAT_HANDLED : C.FORMAT_EXCEEDS_CAPABILITIES; @AdaptiveSupport int adaptiveSupport = decoderInfo.isSeamlessAdaptationSupported(format) ? ADAPTIVE_SEAMLESS : ADAPTIVE_NOT_SEAMLESS; @HardwareAccelerationSupport int hardwareAccelerationSupport = decoderInfo.hardwareAccelerated ? HARDWARE_ACCELERATION_SUPPORTED : HARDWARE_ACCELERATION_NOT_SUPPORTED; @DecoderSupport int decoderSupport = isPreferredDecoder ? DECODER_SUPPORT_PRIMARY : DECODER_SUPPORT_FALLBACK; @TunnelingSupport int tunnelingSupport = TUNNELING_NOT_SUPPORTED; if (isFormatSupported) { List<MediaCodecInfo> tunnelingDecoderInfos = getDecoderInfos( mediaCodecSelector, format, requiresSecureDecryption, /* requiresTunnelingDecoder= */ true); if (!tunnelingDecoderInfos.isEmpty()) { MediaCodecInfo tunnelingDecoderInfo = MediaCodecUtil.getDecoderInfosSortedByFormatSupport(tunnelingDecoderInfos, format) .get(0); if (tunnelingDecoderInfo.isFormatSupported(format) && tunnelingDecoderInfo.isSeamlessAdaptationSupported(format)) { tunnelingSupport = TUNNELING_SUPPORTED; } } } return RendererCapabilities.create( formatSupport, adaptiveSupport, tunnelingSupport, hardwareAccelerationSupport, decoderSupport); } @Override protected List<MediaCodecInfo> getDecoderInfos( MediaCodecSelector mediaCodecSelector, Format format, boolean requiresSecureDecoder) throws DecoderQueryException { return MediaCodecUtil.getDecoderInfosSortedByFormatSupport( getDecoderInfos(mediaCodecSelector, format, requiresSecureDecoder, tunneling), format); } /** * Returns a list of decoders that can decode media in the specified format, in the priority order * specified by the {@link MediaCodecSelector}. Note that since the {@link MediaCodecSelector} * only has access to {@link Format#sampleMimeType}, the list is not ordered to account for * whether each decoder supports the details of the format (e.g., taking into account the format's * profile, level, resolution and so on). {@link * MediaCodecUtil#getDecoderInfosSortedByFormatSupport} can be used to further sort the list into * an order where decoders that fully support the format come first. * * @param mediaCodecSelector The decoder selector. * @param format The {@link Format} for which a decoder is required. * @param requiresSecureDecoder Whether a secure decoder is required. * @param requiresTunnelingDecoder Whether a tunneling decoder is required. * @return A list of {@link MediaCodecInfo}s corresponding to decoders. May be empty. * @throws DecoderQueryException Thrown if there was an error querying decoders. */ private static List<MediaCodecInfo> getDecoderInfos( MediaCodecSelector mediaCodecSelector, Format format, boolean requiresSecureDecoder, boolean requiresTunnelingDecoder) throws DecoderQueryException { @Nullable String mimeType = format.sampleMimeType; if (mimeType == null) { return ImmutableList.of(); } List<MediaCodecInfo> decoderInfos = mediaCodecSelector.getDecoderInfos( mimeType, requiresSecureDecoder, requiresTunnelingDecoder); @Nullable String alternativeMimeType = MediaCodecUtil.getAlternativeCodecMimeType(format); if (alternativeMimeType == null) { return ImmutableList.copyOf(decoderInfos); } List<MediaCodecInfo> alternativeDecoderInfos = mediaCodecSelector.getDecoderInfos( alternativeMimeType, requiresSecureDecoder, requiresTunnelingDecoder); return ImmutableList.<MediaCodecInfo>builder() .addAll(decoderInfos) .addAll(alternativeDecoderInfos) .build(); } @Override protected void onEnabled(boolean joining, boolean mayRenderStartOfStream) throws ExoPlaybackException { super.onEnabled(joining, mayRenderStartOfStream); boolean tunneling = getConfiguration().tunneling; Assertions.checkState(!tunneling || tunnelingAudioSessionId != C.AUDIO_SESSION_ID_UNSET); if (this.tunneling != tunneling) { this.tunneling = tunneling; releaseCodec(); } eventDispatcher.enabled(decoderCounters); mayRenderFirstFrameAfterEnableIfNotStarted = mayRenderStartOfStream; renderedFirstFrameAfterEnable = false; } @Override protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException { super.onPositionReset(positionUs, joining); clearRenderedFirstFrame(); frameReleaseHelper.onPositionReset(); lastBufferPresentationTimeUs = C.TIME_UNSET; initialPositionUs = C.TIME_UNSET; consecutiveDroppedFrameCount = 0; if (joining) { setJoiningDeadlineMs(); } else { joiningDeadlineMs = C.TIME_UNSET; } } @Override public boolean isReady() { if (super.isReady() && (renderedFirstFrameAfterReset || (dummySurface != null && surface == dummySurface) || getCodec() == null || tunneling)) { // Ready. If we were joining then we've now joined, so clear the joining deadline. joiningDeadlineMs = C.TIME_UNSET; return true; } else if (joiningDeadlineMs == C.TIME_UNSET) { // Not joining. return false; } else if (SystemClock.elapsedRealtime() < joiningDeadlineMs) { // Joining and still within the joining deadline. return true; } else { // The joining deadline has been exceeded. Give up and clear the deadline. joiningDeadlineMs = C.TIME_UNSET; return false; } } @Override protected void onStarted() { super.onStarted(); droppedFrames = 0; droppedFrameAccumulationStartTimeMs = SystemClock.elapsedRealtime(); lastRenderRealtimeUs = SystemClock.elapsedRealtime() * 1000; totalVideoFrameProcessingOffsetUs = 0; videoFrameProcessingOffsetCount = 0; frameReleaseHelper.onStarted(); } @Override protected void onStopped() { joiningDeadlineMs = C.TIME_UNSET; maybeNotifyDroppedFrames(); maybeNotifyVideoFrameProcessingOffset(); frameReleaseHelper.onStopped(); super.onStopped(); } @Override protected void onDisabled() { clearReportedVideoSize(); clearRenderedFirstFrame(); haveReportedFirstFrameRenderedForCurrentSurface = false; tunnelingOnFrameRenderedListener = null; try { super.onDisabled(); } finally { eventDispatcher.disabled(decoderCounters); } } @TargetApi(17) // Needed for dummySurface usage. dummySurface is always null on API level 16. @Override protected void onReset() { try { super.onReset(); } finally { if (dummySurface != null) { releaseDummySurface(); } } } @Override public void handleMessage(@MessageType int messageType, @Nullable Object message) throws ExoPlaybackException { switch (messageType) { case MSG_SET_VIDEO_OUTPUT: setOutput(message); break; case MSG_SET_SCALING_MODE: scalingMode = (Integer) message; @Nullable MediaCodecAdapter codec = getCodec(); if (codec != null) { codec.setVideoScalingMode(scalingMode); } break; case MSG_SET_CHANGE_FRAME_RATE_STRATEGY: frameReleaseHelper.setChangeFrameRateStrategy((int) message); break; case MSG_SET_VIDEO_FRAME_METADATA_LISTENER: frameMetadataListener = (VideoFrameMetadataListener) message; break; case MSG_SET_AUDIO_SESSION_ID: int tunnelingAudioSessionId = (int) message; if (this.tunnelingAudioSessionId != tunnelingAudioSessionId) { this.tunnelingAudioSessionId = tunnelingAudioSessionId; if (tunneling) { releaseCodec(); } } break; case MSG_SET_AUDIO_ATTRIBUTES: case MSG_SET_AUX_EFFECT_INFO: case MSG_SET_CAMERA_MOTION_LISTENER: case MSG_SET_SKIP_SILENCE_ENABLED: case MSG_SET_VOLUME: case MSG_SET_WAKEUP_LISTENER: default: super.handleMessage(messageType, message); } } private void setOutput(@Nullable Object output) throws ExoPlaybackException { // Handle unsupported (i.e., non-Surface) outputs by clearing the surface. @Nullable Surface surface = output instanceof Surface ? (Surface) output : null; if (surface == null) { // Use a dummy surface if possible. if (dummySurface != null) { surface = dummySurface; } else { MediaCodecInfo codecInfo = getCodecInfo(); if (codecInfo != null && shouldUseDummySurface(codecInfo)) { dummySurface = DummySurface.newInstanceV17(context, codecInfo.secure); surface = dummySurface; } } } // We only need to update the codec if the surface has changed. if (this.surface != surface) { this.surface = surface; frameReleaseHelper.onSurfaceChanged(surface); haveReportedFirstFrameRenderedForCurrentSurface = false; @State int state = getState(); @Nullable MediaCodecAdapter codec = getCodec(); if (codec != null) { if (Util.SDK_INT >= 23 && surface != null && !codecNeedsSetOutputSurfaceWorkaround) { setOutputSurfaceV23(codec, surface); } else { releaseCodec(); maybeInitCodecOrBypass(); } } if (surface != null && surface != dummySurface) { // If we know the video size, report it again immediately. maybeRenotifyVideoSizeChanged(); // We haven't rendered to the new surface yet. clearRenderedFirstFrame(); if (state == STATE_STARTED) { setJoiningDeadlineMs(); } } else { // The surface has been removed. clearReportedVideoSize(); clearRenderedFirstFrame(); } } else if (surface != null && surface != dummySurface) { // The surface is set and unchanged. If we know the video size and/or have already rendered to // the surface, report these again immediately. maybeRenotifyVideoSizeChanged(); maybeRenotifyRenderedFirstFrame(); } } @Override protected boolean shouldInitCodec(MediaCodecInfo codecInfo) { return surface != null || shouldUseDummySurface(codecInfo); } @Override protected boolean getCodecNeedsEosPropagation() { // Since API 23, onFrameRenderedListener allows for detection of the renderer EOS. return tunneling && Util.SDK_INT < 23; } @TargetApi(17) // Needed for dummySurface usage. dummySurface is always null on API level 16. @Override protected MediaCodecAdapter.Configuration getMediaCodecConfiguration( MediaCodecInfo codecInfo, Format format, @Nullable MediaCrypto crypto, float codecOperatingRate) { if (dummySurface != null && dummySurface.secure != codecInfo.secure) { // We can't re-use the current DummySurface instance with the new decoder. releaseDummySurface(); } String codecMimeType = codecInfo.codecMimeType; codecMaxValues = getCodecMaxValues(codecInfo, format, getStreamFormats()); MediaFormat mediaFormat = getMediaFormat( format, codecMimeType, codecMaxValues, codecOperatingRate, deviceNeedsNoPostProcessWorkaround, tunneling ? tunnelingAudioSessionId : C.AUDIO_SESSION_ID_UNSET); if (surface == null) { if (!shouldUseDummySurface(codecInfo)) { throw new IllegalStateException(); } if (dummySurface == null) { dummySurface = DummySurface.newInstanceV17(context, codecInfo.secure); } surface = dummySurface; } return MediaCodecAdapter.Configuration.createForVideoDecoding( codecInfo, mediaFormat, format, surface, crypto); } @Override protected DecoderReuseEvaluation canReuseCodec( MediaCodecInfo codecInfo, Format oldFormat, Format newFormat) { DecoderReuseEvaluation evaluation = codecInfo.canReuseCodec(oldFormat, newFormat); @DecoderDiscardReasons int discardReasons = evaluation.discardReasons; if (newFormat.width > codecMaxValues.width || newFormat.height > codecMaxValues.height) { discardReasons |= DISCARD_REASON_VIDEO_MAX_RESOLUTION_EXCEEDED; } if (getMaxInputSize(codecInfo, newFormat) > codecMaxValues.inputSize) { discardReasons |= DISCARD_REASON_MAX_INPUT_SIZE_EXCEEDED; } return new DecoderReuseEvaluation( codecInfo.name, oldFormat, newFormat, discardReasons != 0 ? REUSE_RESULT_NO : evaluation.result, discardReasons); } @CallSuper @Override protected void resetCodecStateForFlush() { super.resetCodecStateForFlush(); buffersInCodecCount = 0; } @Override public void setPlaybackSpeed(float currentPlaybackSpeed, float targetPlaybackSpeed) throws ExoPlaybackException { super.setPlaybackSpeed(currentPlaybackSpeed, targetPlaybackSpeed); frameReleaseHelper.onPlaybackSpeed(currentPlaybackSpeed); } @Override protected float getCodecOperatingRateV23( float targetPlaybackSpeed, Format format, Format[] streamFormats) { // Use the highest known stream frame-rate up front, to avoid having to reconfigure the codec // should an adaptive switch to that stream occur. float maxFrameRate = -1; for (Format streamFormat : streamFormats) { float streamFrameRate = streamFormat.frameRate; if (streamFrameRate != Format.NO_VALUE) { maxFrameRate = max(maxFrameRate, streamFrameRate); } } return maxFrameRate == -1 ? CODEC_OPERATING_RATE_UNSET : (maxFrameRate * targetPlaybackSpeed); } @Override protected void onCodecInitialized( String name, MediaCodecAdapter.Configuration configuration, long initializedTimestampMs, long initializationDurationMs) { eventDispatcher.decoderInitialized(name, initializedTimestampMs, initializationDurationMs); codecNeedsSetOutputSurfaceWorkaround = codecNeedsSetOutputSurfaceWorkaround(name); codecHandlesHdr10PlusOutOfBandMetadata = Assertions.checkNotNull(getCodecInfo()).isHdr10PlusOutOfBandMetadataSupported(); if (Util.SDK_INT >= 23 && tunneling) { tunnelingOnFrameRenderedListener = new OnFrameRenderedListenerV23(Assertions.checkNotNull(getCodec())); } } @Override protected void onCodecReleased(String name) { eventDispatcher.decoderReleased(name); } @Override protected void onCodecError(Exception codecError) { Log.e(TAG, "Video codec error", codecError); eventDispatcher.videoCodecError(codecError); } @Override @Nullable protected DecoderReuseEvaluation onInputFormatChanged(FormatHolder formatHolder) throws ExoPlaybackException { @Nullable DecoderReuseEvaluation evaluation = super.onInputFormatChanged(formatHolder); eventDispatcher.inputFormatChanged(formatHolder.format, evaluation); return evaluation; } /** * Called immediately before an input buffer is queued into the codec. * * <p>In tunneling mode for pre Marshmallow, the buffer is treated as if immediately output. * * @param buffer The buffer to be queued. * @throws ExoPlaybackException Thrown if an error occurs handling the input buffer. */ @CallSuper @Override protected void onQueueInputBuffer(DecoderInputBuffer buffer) throws ExoPlaybackException { // In tunneling mode the device may do frame rate conversion, so in general we can't keep track // of the number of buffers in the codec. if (!tunneling) { buffersInCodecCount++; } if (Util.SDK_INT < 23 && tunneling) { // In tunneled mode before API 23 we don't have a way to know when the buffer is output, so // treat it as if it were output immediately. onProcessedTunneledBuffer(buffer.timeUs); } } @Override protected void onOutputFormatChanged(Format format, @Nullable MediaFormat mediaFormat) { @Nullable MediaCodecAdapter codec = getCodec(); if (codec != null) { // Must be applied each time the output format changes. codec.setVideoScalingMode(scalingMode); } if (tunneling) { currentWidth = format.width; currentHeight = format.height; } else { Assertions.checkNotNull(mediaFormat); boolean hasCrop = mediaFormat.containsKey(KEY_CROP_RIGHT) && mediaFormat.containsKey(KEY_CROP_LEFT) && mediaFormat.containsKey(KEY_CROP_BOTTOM) && mediaFormat.containsKey(KEY_CROP_TOP); currentWidth = hasCrop ? mediaFormat.getInteger(KEY_CROP_RIGHT) - mediaFormat.getInteger(KEY_CROP_LEFT) + 1 : mediaFormat.getInteger(MediaFormat.KEY_WIDTH); currentHeight = hasCrop ? mediaFormat.getInteger(KEY_CROP_BOTTOM) - mediaFormat.getInteger(KEY_CROP_TOP) + 1 : mediaFormat.getInteger(MediaFormat.KEY_HEIGHT); } currentPixelWidthHeightRatio = format.pixelWidthHeightRatio; if (Util.SDK_INT >= 21) { // On API level 21 and above the decoder applies the rotation when rendering to the surface. // Hence currentUnappliedRotation should always be 0. For 90 and 270 degree rotations, we need // to flip the width, height and pixel aspect ratio to reflect the rotation that was applied. if (format.rotationDegrees == 90 || format.rotationDegrees == 270) { int rotatedHeight = currentWidth; currentWidth = currentHeight; currentHeight = rotatedHeight; currentPixelWidthHeightRatio = 1 / currentPixelWidthHeightRatio; } } else { // On API level 20 and below the decoder does not apply the rotation. currentUnappliedRotationDegrees = format.rotationDegrees; } frameReleaseHelper.onFormatChanged(format.frameRate); } @Override @TargetApi(29) // codecHandlesHdr10PlusOutOfBandMetadata is false if Util.SDK_INT < 29 protected void handleInputBufferSupplementalData(DecoderInputBuffer buffer) throws ExoPlaybackException { if (!codecHandlesHdr10PlusOutOfBandMetadata) { return; } ByteBuffer data = Assertions.checkNotNull(buffer.supplementalData); if (data.remaining() >= 7) { // Check for HDR10+ out-of-band metadata. See User_data_registered_itu_t_t35 in ST 2094-40. byte ituTT35CountryCode = data.get(); int ituTT35TerminalProviderCode = data.getShort(); int ituTT35TerminalProviderOrientedCode = data.getShort(); byte applicationIdentifier = data.get(); byte applicationVersion = data.get(); data.position(0); if (ituTT35CountryCode == (byte) 0xB5 && ituTT35TerminalProviderCode == 0x003C && ituTT35TerminalProviderOrientedCode == 0x0001 && applicationIdentifier == 4 && applicationVersion == 0) { // The metadata size may vary so allocate a new array every time. This is not too // inefficient because the metadata is only a few tens of bytes. byte[] hdr10PlusInfo = new byte[data.remaining()]; data.get(hdr10PlusInfo); data.position(0); setHdr10PlusInfoV29(getCodec(), hdr10PlusInfo); } } } @Override protected boolean processOutputBuffer( long positionUs, long elapsedRealtimeUs, @Nullable MediaCodecAdapter codec, @Nullable ByteBuffer buffer, int bufferIndex, int bufferFlags, int sampleCount, long bufferPresentationTimeUs, boolean isDecodeOnlyBuffer, boolean isLastBuffer, Format format) throws ExoPlaybackException { Assertions.checkNotNull(codec); // Can not render video without codec if (initialPositionUs == C.TIME_UNSET) { initialPositionUs = positionUs; } if (bufferPresentationTimeUs != lastBufferPresentationTimeUs) { frameReleaseHelper.onNextFrame(bufferPresentationTimeUs); this.lastBufferPresentationTimeUs = bufferPresentationTimeUs; } long outputStreamOffsetUs = getOutputStreamOffsetUs(); long presentationTimeUs = bufferPresentationTimeUs - outputStreamOffsetUs; if (isDecodeOnlyBuffer && !isLastBuffer) { skipOutputBuffer(codec, bufferIndex, presentationTimeUs); return true; } // Note: Use of double rather than float is intentional for accuracy in the calculations below. double playbackSpeed = getPlaybackSpeed(); boolean isStarted = getState() == STATE_STARTED; long elapsedRealtimeNowUs = SystemClock.elapsedRealtime() * 1000; // Calculate how early we are. In other words, the realtime duration that needs to elapse whilst // the renderer is started before the frame should be rendered. A negative value means that // we're already late. long earlyUs = (long) ((bufferPresentationTimeUs - positionUs) / playbackSpeed); if (isStarted) { // Account for the elapsed time since the start of this iteration of the rendering loop. earlyUs -= elapsedRealtimeNowUs - elapsedRealtimeUs; } if (surface == dummySurface) { // Skip frames in sync with playback, so we'll be at the right frame if the mode changes. if (isBufferLate(earlyUs)) { skipOutputBuffer(codec, bufferIndex, presentationTimeUs); updateVideoFrameProcessingOffsetCounters(earlyUs); return true; } return false; } long elapsedSinceLastRenderUs = elapsedRealtimeNowUs - lastRenderRealtimeUs; boolean shouldRenderFirstFrame = !renderedFirstFrameAfterEnable ? (isStarted || mayRenderFirstFrameAfterEnableIfNotStarted) : !renderedFirstFrameAfterReset; // Don't force output until we joined and the position reached the current stream. boolean forceRenderOutputBuffer = joiningDeadlineMs == C.TIME_UNSET && positionUs >= outputStreamOffsetUs && (shouldRenderFirstFrame || (isStarted && shouldForceRenderOutputBuffer(earlyUs, elapsedSinceLastRenderUs))); if (forceRenderOutputBuffer) { long releaseTimeNs = System.nanoTime(); notifyFrameMetadataListener(presentationTimeUs, releaseTimeNs, format); if (Util.SDK_INT >= 21) { renderOutputBufferV21(codec, bufferIndex, presentationTimeUs, releaseTimeNs); } else { renderOutputBuffer(codec, bufferIndex, presentationTimeUs); } updateVideoFrameProcessingOffsetCounters(earlyUs); return true; } if (!isStarted || positionUs == initialPositionUs) { return false; } // Compute the buffer's desired release time in nanoseconds. long systemTimeNs = System.nanoTime(); long unadjustedFrameReleaseTimeNs = systemTimeNs + (earlyUs * 1000); // Apply a timestamp adjustment, if there is one. long adjustedReleaseTimeNs = frameReleaseHelper.adjustReleaseTime(unadjustedFrameReleaseTimeNs); earlyUs = (adjustedReleaseTimeNs - systemTimeNs) / 1000; boolean treatDroppedBuffersAsSkipped = joiningDeadlineMs != C.TIME_UNSET; if (shouldDropBuffersToKeyframe(earlyUs, elapsedRealtimeUs, isLastBuffer) && maybeDropBuffersToKeyframe(positionUs, treatDroppedBuffersAsSkipped)) { return false; } else if (shouldDropOutputBuffer(earlyUs, elapsedRealtimeUs, isLastBuffer)) { if (treatDroppedBuffersAsSkipped) { skipOutputBuffer(codec, bufferIndex, presentationTimeUs); } else { dropOutputBuffer(codec, bufferIndex, presentationTimeUs); } updateVideoFrameProcessingOffsetCounters(earlyUs); return true; } if (Util.SDK_INT >= 21) { // Let the underlying framework time the release. if (earlyUs < 50000) { notifyFrameMetadataListener(presentationTimeUs, adjustedReleaseTimeNs, format); renderOutputBufferV21(codec, bufferIndex, presentationTimeUs, adjustedReleaseTimeNs); updateVideoFrameProcessingOffsetCounters(earlyUs); return true; } } else { // We need to time the release ourselves. if (earlyUs < 30000) { if (earlyUs > 11000) { // We're a little too early to render the frame. Sleep until the frame can be rendered. // Note: The 11ms threshold was chosen fairly arbitrarily. try { // Subtracting 10000 rather than 11000 ensures the sleep time will be at least 1ms. Thread.sleep((earlyUs - 10000) / 1000); } catch (InterruptedException e) { Thread.currentThread().interrupt(); return false; } } notifyFrameMetadataListener(presentationTimeUs, adjustedReleaseTimeNs, format); renderOutputBuffer(codec, bufferIndex, presentationTimeUs); updateVideoFrameProcessingOffsetCounters(earlyUs); return true; } } // We're either not playing, or it's not time to render the frame yet. return false; } private void notifyFrameMetadataListener( long presentationTimeUs, long releaseTimeNs, Format format) { if (frameMetadataListener != null) { frameMetadataListener.onVideoFrameAboutToBeRendered( presentationTimeUs, releaseTimeNs, format, getCodecOutputMediaFormat()); } } /** Called when a buffer was processed in tunneling mode. */ protected void onProcessedTunneledBuffer(long presentationTimeUs) throws ExoPlaybackException { updateOutputFormatForTime(presentationTimeUs); maybeNotifyVideoSizeChanged(); decoderCounters.renderedOutputBufferCount++; maybeNotifyRenderedFirstFrame(); onProcessedOutputBuffer(presentationTimeUs); } /** Called when a output EOS was received in tunneling mode. */ private void onProcessedTunneledEndOfStream() { setPendingOutputEndOfStream(); } @CallSuper @Override protected void onProcessedOutputBuffer(long presentationTimeUs) { super.onProcessedOutputBuffer(presentationTimeUs); if (!tunneling) { buffersInCodecCount--; } } @Override protected void onProcessedStreamChange() { super.onProcessedStreamChange(); clearRenderedFirstFrame(); } /** * Returns whether the buffer being processed should be dropped. * * @param earlyUs The time until the buffer should be presented in microseconds. A negative value * indicates that the buffer is late. * @param elapsedRealtimeUs {@link android.os.SystemClock#elapsedRealtime()} in microseconds, * measured at the start of the current iteration of the rendering loop. * @param isLastBuffer Whether the buffer is the last buffer in the current stream. */ protected boolean shouldDropOutputBuffer( long earlyUs, long elapsedRealtimeUs, boolean isLastBuffer) { return isBufferLate(earlyUs) && !isLastBuffer; } /** * Returns whether to drop all buffers from the buffer being processed to the keyframe at or after * the current playback position, if possible. * * @param earlyUs The time until the current buffer should be presented in microseconds. A * negative value indicates that the buffer is late. * @param elapsedRealtimeUs {@link android.os.SystemClock#elapsedRealtime()} in microseconds, * measured at the start of the current iteration of the rendering loop. * @param isLastBuffer Whether the buffer is the last buffer in the current stream. */ protected boolean shouldDropBuffersToKeyframe( long earlyUs, long elapsedRealtimeUs, boolean isLastBuffer) { return isBufferVeryLate(earlyUs) && !isLastBuffer; } /** * Returns whether to force rendering an output buffer. * * @param earlyUs The time until the current buffer should be presented in microseconds. A * negative value indicates that the buffer is late. * @param elapsedSinceLastRenderUs The elapsed time since the last output buffer was rendered, in * microseconds. * @return Returns whether to force rendering an output buffer. */ protected boolean shouldForceRenderOutputBuffer(long earlyUs, long elapsedSinceLastRenderUs) { // Force render late buffers every 100ms to avoid frozen video effect. return isBufferLate(earlyUs) && elapsedSinceLastRenderUs > 100000; } /** * Skips the output buffer with the specified index. * * @param codec The codec that owns the output buffer. * @param index The index of the output buffer to skip. * @param presentationTimeUs The presentation time of the output buffer, in microseconds. */ protected void skipOutputBuffer(MediaCodecAdapter codec, int index, long presentationTimeUs) { TraceUtil.beginSection("skipVideoBuffer"); codec.releaseOutputBuffer(index, false); TraceUtil.endSection(); decoderCounters.skippedOutputBufferCount++; } /** * Drops the output buffer with the specified index. * * @param codec The codec that owns the output buffer. * @param index The index of the output buffer to drop. * @param presentationTimeUs The presentation time of the output buffer, in microseconds. */ protected void dropOutputBuffer(MediaCodecAdapter codec, int index, long presentationTimeUs) { TraceUtil.beginSection("dropVideoBuffer"); codec.releaseOutputBuffer(index, false); TraceUtil.endSection(); updateDroppedBufferCounters( /* droppedInputBufferCount= */ 0, /* droppedDecoderBufferCount= */ 1); } /** * Drops frames from the current output buffer to the next keyframe at or before the playback * position. If no such keyframe exists, as the playback position is inside the same group of * pictures as the buffer being processed, returns {@code false}. Returns {@code true} otherwise. * * @param positionUs The current playback position, in microseconds. * @param treatDroppedBuffersAsSkipped Whether dropped buffers should be treated as intentionally * skipped. * @return Whether any buffers were dropped. * @throws ExoPlaybackException If an error occurs flushing the codec. */ protected boolean maybeDropBuffersToKeyframe( long positionUs, boolean treatDroppedBuffersAsSkipped) throws ExoPlaybackException { int droppedSourceBufferCount = skipSource(positionUs); if (droppedSourceBufferCount == 0) { return false; } // We dropped some buffers to catch up, so update the decoder counters and flush the codec, // which releases all pending buffers buffers including the current output buffer. if (treatDroppedBuffersAsSkipped) { decoderCounters.skippedInputBufferCount += droppedSourceBufferCount; decoderCounters.skippedOutputBufferCount += buffersInCodecCount; } else { decoderCounters.droppedToKeyframeCount++; updateDroppedBufferCounters( droppedSourceBufferCount, /* droppedDecoderBufferCount= */ buffersInCodecCount); } flushOrReinitializeCodec(); return true; } /** * Updates local counters and {@link #decoderCounters} to reflect that buffers were dropped. * * @param droppedInputBufferCount The number of buffers dropped from the source before being * passed to the decoder. * @param droppedDecoderBufferCount The number of buffers dropped after being passed to the * decoder. */ protected void updateDroppedBufferCounters( int droppedInputBufferCount, int droppedDecoderBufferCount) { decoderCounters.droppedInputBufferCount += droppedInputBufferCount; int totalDroppedBufferCount = droppedInputBufferCount + droppedDecoderBufferCount; decoderCounters.droppedBufferCount += totalDroppedBufferCount; droppedFrames += totalDroppedBufferCount; consecutiveDroppedFrameCount += totalDroppedBufferCount; decoderCounters.maxConsecutiveDroppedBufferCount = max(consecutiveDroppedFrameCount, decoderCounters.maxConsecutiveDroppedBufferCount); if (maxDroppedFramesToNotify > 0 && droppedFrames >= maxDroppedFramesToNotify) { maybeNotifyDroppedFrames(); } } /** * Updates local counters and {@link DecoderCounters} with a new video frame processing offset. * * @param processingOffsetUs The video frame processing offset. */ protected void updateVideoFrameProcessingOffsetCounters(long processingOffsetUs) { decoderCounters.addVideoFrameProcessingOffset(processingOffsetUs); totalVideoFrameProcessingOffsetUs += processingOffsetUs; videoFrameProcessingOffsetCount++; } /** * Renders the output buffer with the specified index. This method is only called if the platform * API version of the device is less than 21. * * @param codec The codec that owns the output buffer. * @param index The index of the output buffer to drop. * @param presentationTimeUs The presentation time of the output buffer, in microseconds. */ protected void renderOutputBuffer(MediaCodecAdapter codec, int index, long presentationTimeUs) { maybeNotifyVideoSizeChanged(); TraceUtil.beginSection("releaseOutputBuffer"); codec.releaseOutputBuffer(index, true); TraceUtil.endSection(); lastRenderRealtimeUs = SystemClock.elapsedRealtime() * 1000; decoderCounters.renderedOutputBufferCount++; consecutiveDroppedFrameCount = 0; maybeNotifyRenderedFirstFrame(); } /** * Renders the output buffer with the specified index. This method is only called if the platform * API version of the device is 21 or later. * * @param codec The codec that owns the output buffer. * @param index The index of the output buffer to drop. * @param presentationTimeUs The presentation time of the output buffer, in microseconds. * @param releaseTimeNs The wallclock time at which the frame should be displayed, in nanoseconds. */ @RequiresApi(21) protected void renderOutputBufferV21( MediaCodecAdapter codec, int index, long presentationTimeUs, long releaseTimeNs) { maybeNotifyVideoSizeChanged(); TraceUtil.beginSection("releaseOutputBuffer"); codec.releaseOutputBuffer(index, releaseTimeNs); TraceUtil.endSection(); lastRenderRealtimeUs = SystemClock.elapsedRealtime() * 1000; decoderCounters.renderedOutputBufferCount++; consecutiveDroppedFrameCount = 0; maybeNotifyRenderedFirstFrame(); } private boolean shouldUseDummySurface(MediaCodecInfo codecInfo) { return Util.SDK_INT >= 23 && !tunneling && !codecNeedsSetOutputSurfaceWorkaround(codecInfo.name) && (!codecInfo.secure || DummySurface.isSecureSupported(context)); } @RequiresApi(17) private void releaseDummySurface() { if (surface == dummySurface) { surface = null; } dummySurface.release(); dummySurface = null; } private void setJoiningDeadlineMs() { joiningDeadlineMs = allowedJoiningTimeMs > 0 ? (SystemClock.elapsedRealtime() + allowedJoiningTimeMs) : C.TIME_UNSET; } private void clearRenderedFirstFrame() { renderedFirstFrameAfterReset = false; // The first frame notification is triggered by renderOutputBuffer or renderOutputBufferV21 for // non-tunneled playback, onQueueInputBuffer for tunneled playback prior to API level 23, and // OnFrameRenderedListenerV23.onFrameRenderedListener for tunneled playback on API level 23 and // above. if (Util.SDK_INT >= 23 && tunneling) { @Nullable MediaCodecAdapter codec = getCodec(); // If codec is null then the listener will be instantiated in configureCodec. if (codec != null) { tunnelingOnFrameRenderedListener = new OnFrameRenderedListenerV23(codec); } } } /* package */ void maybeNotifyRenderedFirstFrame() { renderedFirstFrameAfterEnable = true; if (!renderedFirstFrameAfterReset) { renderedFirstFrameAfterReset = true; eventDispatcher.renderedFirstFrame(surface); haveReportedFirstFrameRenderedForCurrentSurface = true; } } private void maybeRenotifyRenderedFirstFrame() { if (haveReportedFirstFrameRenderedForCurrentSurface) { eventDispatcher.renderedFirstFrame(surface); } } private void clearReportedVideoSize() { reportedVideoSize = null; } private void maybeNotifyVideoSizeChanged() { if ((currentWidth != Format.NO_VALUE || currentHeight != Format.NO_VALUE) && (reportedVideoSize == null || reportedVideoSize.width != currentWidth || reportedVideoSize.height != currentHeight || reportedVideoSize.unappliedRotationDegrees != currentUnappliedRotationDegrees || reportedVideoSize.pixelWidthHeightRatio != currentPixelWidthHeightRatio)) { reportedVideoSize = new VideoSize( currentWidth, currentHeight, currentUnappliedRotationDegrees, currentPixelWidthHeightRatio); eventDispatcher.videoSizeChanged(reportedVideoSize); } } private void maybeRenotifyVideoSizeChanged() { if (reportedVideoSize != null) { eventDispatcher.videoSizeChanged(reportedVideoSize); } } private void maybeNotifyDroppedFrames() { if (droppedFrames > 0) { long now = SystemClock.elapsedRealtime(); long elapsedMs = now - droppedFrameAccumulationStartTimeMs; eventDispatcher.droppedFrames(droppedFrames, elapsedMs); droppedFrames = 0; droppedFrameAccumulationStartTimeMs = now; } } private void maybeNotifyVideoFrameProcessingOffset() { if (videoFrameProcessingOffsetCount != 0) { eventDispatcher.reportVideoFrameProcessingOffset( totalVideoFrameProcessingOffsetUs, videoFrameProcessingOffsetCount); totalVideoFrameProcessingOffsetUs = 0; videoFrameProcessingOffsetCount = 0; } } private static boolean isBufferLate(long earlyUs) { // Class a buffer as late if it should have been presented more than 30 ms ago. return earlyUs < -30000; } private static boolean isBufferVeryLate(long earlyUs) { // Class a buffer as very late if it should have been presented more than 500 ms ago. return earlyUs < -500000; } @RequiresApi(29) private static void setHdr10PlusInfoV29(MediaCodecAdapter codec, byte[] hdr10PlusInfo) { Bundle codecParameters = new Bundle(); codecParameters.putByteArray(MediaCodec.PARAMETER_KEY_HDR10_PLUS_INFO, hdr10PlusInfo); codec.setParameters(codecParameters); } @RequiresApi(23) protected void setOutputSurfaceV23(MediaCodecAdapter codec, Surface surface) { codec.setOutputSurface(surface); } @RequiresApi(21) private static void configureTunnelingV21(MediaFormat mediaFormat, int tunnelingAudioSessionId) { mediaFormat.setFeatureEnabled(CodecCapabilities.FEATURE_TunneledPlayback, true); mediaFormat.setInteger(MediaFormat.KEY_AUDIO_SESSION_ID, tunnelingAudioSessionId); } /** * Returns the framework {@link MediaFormat} that should be used to configure the decoder. * * @param format The {@link Format} of media. * @param codecMimeType The MIME type handled by the codec. * @param codecMaxValues Codec max values that should be used when configuring the decoder. * @param codecOperatingRate The codec operating rate, or {@link #CODEC_OPERATING_RATE_UNSET} if * no codec operating rate should be set. * @param deviceNeedsNoPostProcessWorkaround Whether the device is known to do post processing by * default that isn't compatible with ExoPlayer. * @param tunnelingAudioSessionId The audio session id to use for tunneling, or {@link * C#AUDIO_SESSION_ID_UNSET} if tunneling should not be enabled. * @return The framework {@link MediaFormat} that should be used to configure the decoder. */ @SuppressLint("InlinedApi") @TargetApi(21) // tunnelingAudioSessionId is unset if Util.SDK_INT < 21 protected MediaFormat getMediaFormat( Format format, String codecMimeType, CodecMaxValues codecMaxValues, float codecOperatingRate, boolean deviceNeedsNoPostProcessWorkaround, int tunnelingAudioSessionId) { MediaFormat mediaFormat = new MediaFormat(); // Set format parameters that should always be set. mediaFormat.setString(MediaFormat.KEY_MIME, codecMimeType); mediaFormat.setInteger(MediaFormat.KEY_WIDTH, format.width); mediaFormat.setInteger(MediaFormat.KEY_HEIGHT, format.height); MediaFormatUtil.setCsdBuffers(mediaFormat, format.initializationData); // Set format parameters that may be unset. MediaFormatUtil.maybeSetFloat(mediaFormat, MediaFormat.KEY_FRAME_RATE, format.frameRate); MediaFormatUtil.maybeSetInteger(mediaFormat, MediaFormat.KEY_ROTATION, format.rotationDegrees); MediaFormatUtil.maybeSetColorInfo(mediaFormat, format.colorInfo); if (MimeTypes.VIDEO_DOLBY_VISION.equals(format.sampleMimeType)) { // Some phones require the profile to be set on the codec. // See https://github.com/google/ExoPlayer/pull/5438. Pair<Integer, Integer> codecProfileAndLevel = MediaCodecUtil.getCodecProfileAndLevel(format); if (codecProfileAndLevel != null) { MediaFormatUtil.maybeSetInteger( mediaFormat, MediaFormat.KEY_PROFILE, codecProfileAndLevel.first); } } // Set codec max values. mediaFormat.setInteger(MediaFormat.KEY_MAX_WIDTH, codecMaxValues.width); mediaFormat.setInteger(MediaFormat.KEY_MAX_HEIGHT, codecMaxValues.height); MediaFormatUtil.maybeSetInteger( mediaFormat, MediaFormat.KEY_MAX_INPUT_SIZE, codecMaxValues.inputSize); // Set codec configuration values. if (Util.SDK_INT >= 23) { mediaFormat.setInteger(MediaFormat.KEY_PRIORITY, 0 /* realtime priority */); if (codecOperatingRate != CODEC_OPERATING_RATE_UNSET) { mediaFormat.setFloat(MediaFormat.KEY_OPERATING_RATE, codecOperatingRate); } } if (deviceNeedsNoPostProcessWorkaround) { mediaFormat.setInteger("no-post-process", 1); mediaFormat.setInteger("auto-frc", 0); } if (tunnelingAudioSessionId != C.AUDIO_SESSION_ID_UNSET) { configureTunnelingV21(mediaFormat, tunnelingAudioSessionId); } return mediaFormat; } /** * Returns {@link CodecMaxValues} suitable for configuring a codec for {@code format} in a way * that will allow possible adaptation to other compatible formats in {@code streamFormats}. * * @param codecInfo Information about the {@link MediaCodec} being configured. * @param format The {@link Format} for which the codec is being configured. * @param streamFormats The possible stream formats. * @return Suitable {@link CodecMaxValues}. */ protected CodecMaxValues getCodecMaxValues( MediaCodecInfo codecInfo, Format format, Format[] streamFormats) { int maxWidth = format.width; int maxHeight = format.height; int maxInputSize = getMaxInputSize(codecInfo, format); if (streamFormats.length == 1) { // The single entry in streamFormats must correspond to the format for which the codec is // being configured. if (maxInputSize != Format.NO_VALUE) { int codecMaxInputSize = getCodecMaxInputSize(codecInfo, format); if (codecMaxInputSize != Format.NO_VALUE) { // Scale up the initial video decoder maximum input size so playlist item transitions with // small increases in maximum sample size don't require reinitialization. This only makes // a difference if the exact maximum sample sizes are known from the container. int scaledMaxInputSize = (int) (maxInputSize * INITIAL_FORMAT_MAX_INPUT_SIZE_SCALE_FACTOR); // Avoid exceeding the maximum expected for the codec. maxInputSize = min(scaledMaxInputSize, codecMaxInputSize); } } return new CodecMaxValues(maxWidth, maxHeight, maxInputSize); } boolean haveUnknownDimensions = false; for (Format streamFormat : streamFormats) { if (format.colorInfo != null && streamFormat.colorInfo == null) { // streamFormat likely has incomplete color information. Copy the complete color information // from format to avoid codec re-use being ruled out for only this reason. streamFormat = streamFormat.buildUpon().setColorInfo(format.colorInfo).build(); } if (codecInfo.canReuseCodec(format, streamFormat).result != REUSE_RESULT_NO) { haveUnknownDimensions |= (streamFormat.width == Format.NO_VALUE || streamFormat.height == Format.NO_VALUE); maxWidth = max(maxWidth, streamFormat.width); maxHeight = max(maxHeight, streamFormat.height); maxInputSize = max(maxInputSize, getMaxInputSize(codecInfo, streamFormat)); } } if (haveUnknownDimensions) { Log.w(TAG, "Resolutions unknown. Codec max resolution: " + maxWidth + "x" + maxHeight); Point codecMaxSize = getCodecMaxSize(codecInfo, format); if (codecMaxSize != null) { maxWidth = max(maxWidth, codecMaxSize.x); maxHeight = max(maxHeight, codecMaxSize.y); maxInputSize = max( maxInputSize, getCodecMaxInputSize( codecInfo, format.buildUpon().setWidth(maxWidth).setHeight(maxHeight).build())); Log.w(TAG, "Codec max resolution adjusted to: " + maxWidth + "x" + maxHeight); } } return new CodecMaxValues(maxWidth, maxHeight, maxInputSize); } @Override protected MediaCodecDecoderException createDecoderException( Throwable cause, @Nullable MediaCodecInfo codecInfo) { return new MediaCodecVideoDecoderException(cause, codecInfo, surface); } /** * Returns a maximum video size to use when configuring a codec for {@code format} in a way that * will allow possible adaptation to other compatible formats that are expected to have the same * aspect ratio, but whose sizes are unknown. * * @param codecInfo Information about the {@link MediaCodec} being configured. * @param format The {@link Format} for which the codec is being configured. * @return The maximum video size to use, or null if the size of {@code format} should be used. */ private static Point getCodecMaxSize(MediaCodecInfo codecInfo, Format format) { boolean isVerticalVideo = format.height > format.width; int formatLongEdgePx = isVerticalVideo ? format.height : format.width; int formatShortEdgePx = isVerticalVideo ? format.width : format.height; float aspectRatio = (float) formatShortEdgePx / formatLongEdgePx; for (int longEdgePx : STANDARD_LONG_EDGE_VIDEO_PX) { int shortEdgePx = (int) (longEdgePx * aspectRatio); if (longEdgePx <= formatLongEdgePx || shortEdgePx <= formatShortEdgePx) { // Don't return a size not larger than the format for which the codec is being configured. return null; } else if (Util.SDK_INT >= 21) { Point alignedSize = codecInfo.alignVideoSizeV21( isVerticalVideo ? shortEdgePx : longEdgePx, isVerticalVideo ? longEdgePx : shortEdgePx); float frameRate = format.frameRate; if (codecInfo.isVideoSizeAndRateSupportedV21(alignedSize.x, alignedSize.y, frameRate)) { return alignedSize; } } else { try { // Conservatively assume the codec requires 16px width and height alignment. longEdgePx = Util.ceilDivide(longEdgePx, 16) * 16; shortEdgePx = Util.ceilDivide(shortEdgePx, 16) * 16; if (longEdgePx * shortEdgePx <= MediaCodecUtil.maxH264DecodableFrameSize()) { return new Point( isVerticalVideo ? shortEdgePx : longEdgePx, isVerticalVideo ? longEdgePx : shortEdgePx); } } catch (DecoderQueryException e) { // We tried our best. Give up! return null; } } } return null; } /** * Returns a maximum input buffer size for a given {@link MediaCodec} and {@link Format}. * * @param codecInfo Information about the {@link MediaCodec} being configured. * @param format The format. * @return A maximum input buffer size in bytes, or {@link Format#NO_VALUE} if a maximum could not * be determined. */ protected static int getMaxInputSize(MediaCodecInfo codecInfo, Format format) { if (format.maxInputSize != Format.NO_VALUE) { // The format defines an explicit maximum input size. Add the total size of initialization // data buffers, as they may need to be queued in the same input buffer as the largest sample. int totalInitializationDataSize = 0; int initializationDataCount = format.initializationData.size(); for (int i = 0; i < initializationDataCount; i++) { totalInitializationDataSize += format.initializationData.get(i).length; } return format.maxInputSize + totalInitializationDataSize; } else { return getCodecMaxInputSize(codecInfo, format); } } /** * Returns a maximum input size for a given codec and format. * * @param codecInfo Information about the {@link MediaCodec} being configured. * @param format The format. * @return A maximum input size in bytes, or {@link Format#NO_VALUE} if a maximum could not be * determined. */ private static int getCodecMaxInputSize(MediaCodecInfo codecInfo, Format format) { int width = format.width; int height = format.height; if (width == Format.NO_VALUE || height == Format.NO_VALUE) { // We can't infer a maximum input size without video dimensions. return Format.NO_VALUE; } String sampleMimeType = format.sampleMimeType; if (MimeTypes.VIDEO_DOLBY_VISION.equals(sampleMimeType)) { // Dolby vision can be a wrapper around H264 or H265. We assume it's wrapping H265 by default // because it's the common case, and because some devices may fail to allocate the codec when // the larger buffer size required for H264 is requested. We size buffers for H264 only if the // format contains sufficient information for us to determine unambiguously that it's a H264 // profile. sampleMimeType = MimeTypes.VIDEO_H265; @Nullable Pair<Integer, Integer> codecProfileAndLevel = MediaCodecUtil.getCodecProfileAndLevel(format); if (codecProfileAndLevel != null) { int profile = codecProfileAndLevel.first; if (profile == CodecProfileLevel.DolbyVisionProfileDvavSe || profile == CodecProfileLevel.DolbyVisionProfileDvavPer || profile == CodecProfileLevel.DolbyVisionProfileDvavPen) { sampleMimeType = MimeTypes.VIDEO_H264; } } } // Attempt to infer a maximum input size from the format. int maxPixels; int minCompressionRatio; switch (sampleMimeType) { case MimeTypes.VIDEO_H263: case MimeTypes.VIDEO_MP4V: maxPixels = width * height; minCompressionRatio = 2; break; case MimeTypes.VIDEO_H264: if ("BRAVIA 4K 2015".equals(Util.MODEL) // Sony Bravia 4K || ("Amazon".equals(Util.MANUFACTURER) && ("KFSOWI".equals(Util.MODEL) // Kindle Soho || ("AFTS".equals(Util.MODEL) && codecInfo.secure)))) { // Fire TV Gen 2 // Use the default value for cases where platform limitations may prevent buffers of the // calculated maximum input size from being allocated. return Format.NO_VALUE; } // Round up width/height to an integer number of macroblocks. maxPixels = Util.ceilDivide(width, 16) * Util.ceilDivide(height, 16) * 16 * 16; minCompressionRatio = 2; break; case MimeTypes.VIDEO_VP8: // VPX does not specify a ratio so use the values from the platform's SoftVPX.cpp. maxPixels = width * height; minCompressionRatio = 2; break; case MimeTypes.VIDEO_H265: case MimeTypes.VIDEO_VP9: maxPixels = width * height; minCompressionRatio = 4; break; default: // Leave the default max input size. return Format.NO_VALUE; } // Estimate the maximum input size assuming three channel 4:2:0 subsampled input frames. return (maxPixels * 3) / (2 * minCompressionRatio); } /** * Returns whether the device is known to do post processing by default that isn't compatible with * ExoPlayer. * * @return Whether the device is known to do post processing by default that isn't compatible with * ExoPlayer. */ private static boolean deviceNeedsNoPostProcessWorkaround() { // Nvidia devices prior to M try to adjust the playback rate to better map the frame-rate of // content to the refresh rate of the display. For example playback of 23.976fps content is // adjusted to play at 1.001x speed when the output display is 60Hz. Unfortunately the // implementation causes ExoPlayer's reported playback position to drift out of sync. Captions // also lose sync [Internal: b/26453592]. Even after M, the devices may apply post processing // operations that can modify frame output timestamps, which is incompatible with ExoPlayer's // logic for skipping decode-only frames. return "NVIDIA".equals(Util.MANUFACTURER); } /* * TODO: * * 1. Validate that Android device certification now ensures correct behavior, and add a * corresponding SDK_INT upper bound for applying the workaround (probably SDK_INT < 26). * 2. Determine a complete list of affected devices. * 3. Some of the devices in this list only fail to support setOutputSurface when switching from * a SurfaceView provided Surface to a Surface of another type (e.g. TextureView/DummySurface), * and vice versa. One hypothesis is that setOutputSurface fails when the surfaces have * different pixel formats. If we can find a way to query the Surface instances to determine * whether this case applies, then we'll be able to provide a more targeted workaround. */ /** * Returns whether the codec is known to implement {@link MediaCodec#setOutputSurface(Surface)} * incorrectly. * * <p>If true is returned then we fall back to releasing and re-instantiating the codec instead. * * @param name The name of the codec. * @return True if the device is known to implement {@link MediaCodec#setOutputSurface(Surface)} * incorrectly. */ protected boolean codecNeedsSetOutputSurfaceWorkaround(String name) { if (name.startsWith("OMX.google")) { // Google OMX decoders are not known to have this issue on any API level. return false; } synchronized (MediaCodecVideoRenderer.class) { if (!evaluatedDeviceNeedsSetOutputSurfaceWorkaround) { deviceNeedsSetOutputSurfaceWorkaround = evaluateDeviceNeedsSetOutputSurfaceWorkaround(); evaluatedDeviceNeedsSetOutputSurfaceWorkaround = true; } } return deviceNeedsSetOutputSurfaceWorkaround; } protected Surface getSurface() { return surface; } protected static final class CodecMaxValues { public final int width; public final int height; public final int inputSize; public CodecMaxValues(int width, int height, int inputSize) { this.width = width; this.height = height; this.inputSize = inputSize; } } private static boolean evaluateDeviceNeedsSetOutputSurfaceWorkaround() { if (Util.SDK_INT <= 28) { // Workaround for MiTV and MiBox devices which have been observed broken up to API 28. // https://github.com/google/ExoPlayer/issues/5169, // https://github.com/google/ExoPlayer/issues/6899. // https://github.com/google/ExoPlayer/issues/8014. // https://github.com/google/ExoPlayer/issues/8329. // https://github.com/google/ExoPlayer/issues/9710. switch (Util.DEVICE) { case "aquaman": case "dangal": case "dangalUHD": case "dangalFHD": case "magnolia": case "machuca": case "once": case "oneday": return true; default: break; // Do nothing. } } if (Util.SDK_INT <= 27 && "HWEML".equals(Util.DEVICE)) { // Workaround for Huawei P20: // https://github.com/google/ExoPlayer/issues/4468#issuecomment-459291645. return true; } if (Util.SDK_INT <= 26) { // In general, devices running API level 27 or later should be unaffected unless observed // otherwise. Enable the workaround on a per-device basis. Works around: // https://github.com/google/ExoPlayer/issues/3236, // https://github.com/google/ExoPlayer/issues/3355, // https://github.com/google/ExoPlayer/issues/3439, // https://github.com/google/ExoPlayer/issues/3724, // https://github.com/google/ExoPlayer/issues/3835, // https://github.com/google/ExoPlayer/issues/4006, // https://github.com/google/ExoPlayer/issues/4084, // https://github.com/google/ExoPlayer/issues/4104, // https://github.com/google/ExoPlayer/issues/4134, // https://github.com/google/ExoPlayer/issues/4315, // https://github.com/google/ExoPlayer/issues/4419, // https://github.com/google/ExoPlayer/issues/4460, // https://github.com/google/ExoPlayer/issues/4468, // https://github.com/google/ExoPlayer/issues/5312, // https://github.com/google/ExoPlayer/issues/6503. // https://github.com/google/ExoPlayer/issues/8014, // https://github.com/google/ExoPlayer/pull/8030. switch (Util.DEVICE) { case "1601": case "1713": case "1714": case "601LV": case "602LV": case "A10-70F": case "A10-70L": case "A1601": case "A2016a40": case "A7000-a": case "A7000plus": case "A7010a48": case "A7020a48": case "AquaPowerM": case "ASUS_X00AD_2": case "Aura_Note_2": case "b5": case "BLACK-1X": case "BRAVIA_ATV2": case "BRAVIA_ATV3_4K": case "C1": case "ComioS1": case "CP8676_I02": case "CPH1609": case "CPH1715": case "CPY83_I00": case "cv1": case "cv3": case "deb": case "DM-01K": case "E5643": case "ELUGA_A3_Pro": case "ELUGA_Note": case "ELUGA_Prim": case "ELUGA_Ray_X": case "EverStar_S": case "F01H": case "F01J": case "F02H": case "F03H": case "F04H": case "F04J": case "F3111": case "F3113": case "F3116": case "F3211": case "F3213": case "F3215": case "F3311": case "flo": case "fugu": case "GiONEE_CBL7513": case "GiONEE_GBL7319": case "GIONEE_GBL7360": case "GIONEE_SWW1609": case "GIONEE_SWW1627": case "GIONEE_SWW1631": case "GIONEE_WBL5708": case "GIONEE_WBL7365": case "GIONEE_WBL7519": case "griffin": case "htc_e56ml_dtul": case "hwALE-H": case "HWBLN-H": case "HWCAM-H": case "HWVNS-H": case "HWWAS-H": case "i9031": case "iball8735_9806": case "Infinix-X572": case "iris60": case "itel_S41": case "j2xlteins": case "JGZ": case "K50a40": case "kate": case "l5460": case "le_x6": case "LS-5017": case "M04": case "M5c": case "manning": case "marino_f": case "MEIZU_M5": case "mh": case "mido": case "MX6": case "namath": case "nicklaus_f": case "NX541J": case "NX573J": case "OnePlus5T": case "p212": case "P681": case "P85": case "pacificrim": case "panell_d": case "panell_dl": case "panell_ds": case "panell_dt": case "PB2-670M": case "PGN528": case "PGN610": case "PGN611": case "Phantom6": case "Pixi4-7_3G": case "Pixi5-10_4G": case "PLE": case "PRO7S": case "Q350": case "Q4260": case "Q427": case "Q4310": case "Q5": case "QM16XE_U": case "QX1": case "RAIJIN": case "santoni": case "Slate_Pro": case "SVP-DTV15": case "s905x018": case "taido_row": case "TB3-730F": case "TB3-730X": case "TB3-850F": case "TB3-850M": case "tcl_eu": case "V1": case "V23GB": case "V5": case "vernee_M5": case "watson": case "whyred": case "woods_f": case "woods_fn": case "X3_HK": case "XE2X": case "XT1663": case "Z12_PRO": case "Z80": return true; default: break; // Do nothing. } switch (Util.MODEL) { case "AFTA": case "AFTN": case "JSN-L21": return true; default: break; // Do nothing. } } return false; } @RequiresApi(23) private final class OnFrameRenderedListenerV23 implements MediaCodecAdapter.OnFrameRenderedListener, Handler.Callback { private static final int HANDLE_FRAME_RENDERED = 0; private final Handler handler; public OnFrameRenderedListenerV23(MediaCodecAdapter codec) { handler = Util.createHandlerForCurrentLooper(/* callback= */ this); codec.setOnFrameRenderedListener(/* listener= */ this, handler); } @Override public void onFrameRendered(MediaCodecAdapter codec, long presentationTimeUs, long nanoTime) { // Workaround bug in MediaCodec that causes deadlock if you call directly back into the // MediaCodec from this listener method. // Deadlock occurs because MediaCodec calls this listener method holding a lock, // which may also be required by calls made back into the MediaCodec. // This was fixed in https://android-review.googlesource.com/1156807. // // The workaround queues the event for subsequent processing, where the lock will not be held. if (Util.SDK_INT < 30) { Message message = Message.obtain( handler, /* what= */ HANDLE_FRAME_RENDERED, /* arg1= */ (int) (presentationTimeUs >> 32), /* arg2= */ (int) presentationTimeUs); handler.sendMessageAtFrontOfQueue(message); } else { handleFrameRendered(presentationTimeUs); } } @Override public boolean handleMessage(Message message) { switch (message.what) { case HANDLE_FRAME_RENDERED: handleFrameRendered(Util.toLong(message.arg1, message.arg2)); return true; default: return false; } } private void handleFrameRendered(long presentationTimeUs) { if (this != tunnelingOnFrameRenderedListener) { // Stale event. return; } if (presentationTimeUs == TUNNELING_EOS_PRESENTATION_TIME_US) { onProcessedTunneledEndOfStream(); } else { try { onProcessedTunneledBuffer(presentationTimeUs); } catch (ExoPlaybackException e) { setPendingPlaybackException(e); } } } } }
apache-2.0
jaivox/gui
work/src/com/jaivox/ui/gengram/WordnetUtils.java
3640
/* Jaivox Application Generator (JAG) version 0.2 March 2014 Copyright 2010-2014 by Bits and Pixels, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Please see work/licenses for licenses to other components included with this package. */ /* * To change this template, choose Tools | Templates * and open the template in the editor. */ package com.jaivox.ui.gengram; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import net.didion.jwnl.JWNL; import net.didion.jwnl.JWNLException; import net.didion.jwnl.data.IndexWord; import net.didion.jwnl.data.POS; import net.didion.jwnl.data.Pointer; import net.didion.jwnl.data.PointerType; import net.didion.jwnl.data.PointerUtils; import net.didion.jwnl.data.Synset; import net.didion.jwnl.dictionary.Dictionary; import net.didion.jwnl.data.Word; /** * * @author rj */ public class WordnetUtils { static PointerUtils ptUtils = PointerUtils.getInstance (); static Dictionary dictionary = Dictionary.getInstance (); public static String[] getSynonyms (String word, String postype) throws JWNLException { String sy[] = getSynonyms1 (word, postype); String ss = ""; if (sy != null) { for (String s : sy) { ss += s + ", "; } } //System.out.println("[WordnetUtils]:getSynonyms: " + word +"--"+postype+"---"+ ss); return sy; } public static String[] getSynonyms1 (String word, String postype) throws JWNLException { String[] ts = null; List<POS> allPos = POS.getAllPOS (); Set<String> synonyms = new HashSet<String> (); if (postype != null) { POS pos = POS.getPOSForLabel (postype); if (pos != null) { allPos = new ArrayList<POS> (); allPos.add (pos); } } for (POS pos : allPos) { IndexWord iw = Dictionary.getInstance ().getIndexWord (pos, word); if (iw != null) { Synset[] senses = iw.getSenses (); for (Synset syns : senses) { Word[] words = syns.getWords (); for (Word w : words) { if (!w.getLemma ().trim ().equalsIgnoreCase (word)) { synonyms.add (w.getLemma ().trim ()); } } PointerType pt[] = {PointerType.HYPONYM, PointerType.HYPERNYM}; for (PointerType type : pt) { Pointer[] ptrs = syns.getPointers (type); for (Pointer p : ptrs) { Synset s = p.getTargetSynset (); Word[] ws = s.getWords (); for (Word w : ws) { //System.out.println("----" + w.getLemma().trim() +"----"+ pos.getLabel() +", "+ type.getLabel()); //if(!w.getLemma().trim().equalsIgnoreCase(word)) synonyms.add(w.getLemma().trim()); } } } } } } if (synonyms.size () > 0) { ts = synonyms.toArray (new String[synonyms.size ()]); } return ts; } public static void initialize (String conffile) throws JWNLException { try { JWNL.initialize (new FileInputStream (conffile)); } catch (FileNotFoundException ex) { JWNL.initialize (WordnetUtils.class.getClassLoader ().getResourceAsStream (conffile)); } } }
apache-2.0
nafae/developer
modules/adwords_appengine/src/main/java/com/google/api/ads/adwords/jaxws/v201402/cm/MediaService.java
2680
package com.google.api.ads.adwords.jaxws.v201402.cm; import java.net.MalformedURLException; import java.net.URL; import javax.xml.namespace.QName; import javax.xml.ws.Service; import javax.xml.ws.WebEndpoint; import javax.xml.ws.WebServiceClient; import javax.xml.ws.WebServiceException; import javax.xml.ws.WebServiceFeature; /** * This class was generated by the JAX-WS RI. * JAX-WS RI 2.2.4-b01 * Generated source version: 2.1 * */ @WebServiceClient(name = "MediaService", targetNamespace = "https://adwords.google.com/api/adwords/cm/v201402", wsdlLocation = "https://adwords.google.com/api/adwords/cm/v201402/MediaService?wsdl") public class MediaService extends Service { private final static URL MEDIASERVICE_WSDL_LOCATION; private final static WebServiceException MEDIASERVICE_EXCEPTION; private final static QName MEDIASERVICE_QNAME = new QName("https://adwords.google.com/api/adwords/cm/v201402", "MediaService"); static { URL url = null; WebServiceException e = null; try { url = new URL("https://adwords.google.com/api/adwords/cm/v201402/MediaService?wsdl"); } catch (MalformedURLException ex) { e = new WebServiceException(ex); } MEDIASERVICE_WSDL_LOCATION = url; MEDIASERVICE_EXCEPTION = e; } public MediaService() { super(__getWsdlLocation(), MEDIASERVICE_QNAME); } public MediaService(URL wsdlLocation, QName serviceName) { super(wsdlLocation, serviceName); } /** * * @return * returns MediaServiceInterface */ @WebEndpoint(name = "MediaServiceInterfacePort") public MediaServiceInterface getMediaServiceInterfacePort() { return super.getPort(new QName("https://adwords.google.com/api/adwords/cm/v201402", "MediaServiceInterfacePort"), MediaServiceInterface.class); } /** * * @param features * A list of {@link javax.xml.ws.WebServiceFeature} to configure on the proxy. Supported features not in the <code>features</code> parameter will have their default values. * @return * returns MediaServiceInterface */ @WebEndpoint(name = "MediaServiceInterfacePort") public MediaServiceInterface getMediaServiceInterfacePort(WebServiceFeature... features) { return super.getPort(new QName("https://adwords.google.com/api/adwords/cm/v201402", "MediaServiceInterfacePort"), MediaServiceInterface.class, features); } private static URL __getWsdlLocation() { if (MEDIASERVICE_EXCEPTION!= null) { throw MEDIASERVICE_EXCEPTION; } return MEDIASERVICE_WSDL_LOCATION; } }
apache-2.0
YoungDigitalPlanet/empiria.player
src/main/java/eu/ydp/empiria/player/client/module/info/ContentFieldInfoListProvider.java
7604
/* * Copyright 2017 Young Digital Planet S.A. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package eu.ydp.empiria.player.client.module.info; import com.google.common.collect.Lists; import com.google.inject.Inject; import eu.ydp.empiria.player.client.controller.data.DataSourceDataSupplier; import eu.ydp.empiria.player.client.controller.session.datasupplier.SessionDataSupplier; import eu.ydp.empiria.player.client.controller.variables.ResultExtractorsFactory; import eu.ydp.empiria.player.client.module.info.handler.*; import java.util.List; public class ContentFieldInfoListProvider { @Inject private ContentFieldInfoFactory contentFieldInfoFactory; @Inject private ProviderAssessmentValueHandler providerAssessmentValueHandler; @Inject private AssessmentResultValueHandler assessmentResultValueHandler; @Inject private ResultValueHandler resultValueHandler; @Inject private PageCountValueHandler pageCountValueHandler; @Inject private ItemIndexValueHandler itemIndexValueHandler; @Inject private TitleValueHandler titleValueHandler; @Inject private ItemValueHandler itemValueHandler; @Inject private FeedbackValueHandler feedbackValueHandler; private static final String TEST_RESULT = "test.result"; private static final String TEST_TITLE = "test.title"; private static final String TEST_RESET = "test.reset"; private static final String TEST_SHOW_ANSWERS = "test.show_answers"; private static final String TEST_MISTAKES = "test.mistakes"; private static final String TEST_CHECKS = "test.checks"; private static final String TEST_DONE = "test.done"; private static final String TEST_TODO = "test.todo"; private static final String ITEM_RESULT = "item.result"; private static final String ITEM_PAGE_COUNT = "item.page_count"; private static final String ITEM_PAGE_NUM = "item.page_num"; private static final String ITEM_INDEX = "item.index"; private static final String ITEM_TITLE = "item.title"; private static final String ITEM_RESET = "item.reset"; private static final String ITEM_SHOW_ANSWERS = "item.show_answers"; private static final String ITEM_MISTAKES = "item.mistakes"; private static final String ITEM_CHECKS = "item.checks"; private static final String ITEM_DONE = "item.done"; private static final String ITEM_TODO = "item.todo"; private static final String ITEM_FEEDBACK = "item.feedback"; public List<ContentFieldInfo> get() { List<ContentFieldInfo> fieldInfos = Lists.newArrayList(); List<ContentFieldInfo> itemInfos = getItemInfos(); List<ContentFieldInfo> assessmentInfos = getAssessmentInfos(); List<ContentFieldInfo> titleInfos = getTitleInfos(); List<ContentFieldInfo> itemIndexInfos = getItemIndexInfos(); List<ContentFieldInfo> pageCountInfos = getPageCountInfos(); List<ContentFieldInfo> resultInfos = getResultInfos(); List<ContentFieldInfo> assessmentResultInfos = getAssessmentResultInfos(); List<ContentFieldInfo> reportFeedbackInfos = getReportFeedbackInfos(); fieldInfos.addAll(itemInfos); fieldInfos.addAll(assessmentInfos); fieldInfos.addAll(titleInfos); fieldInfos.addAll(itemIndexInfos); fieldInfos.addAll(pageCountInfos); fieldInfos.addAll(resultInfos); fieldInfos.addAll(assessmentResultInfos); fieldInfos.addAll(reportFeedbackInfos); return fieldInfos; } private List<ContentFieldInfo> getAssessmentResultInfos() { List<ContentFieldInfo> contentFieldInfos = Lists.newArrayList(); contentFieldInfos.add(contentFieldInfoFactory.create(TEST_RESULT, assessmentResultValueHandler)); return contentFieldInfos; } private List<ContentFieldInfo> getResultInfos() { List<ContentFieldInfo> contentFieldInfos = Lists.newArrayList(); contentFieldInfos.add(contentFieldInfoFactory.create(ITEM_RESULT, resultValueHandler)); return contentFieldInfos; } private List<ContentFieldInfo> getPageCountInfos() { List<ContentFieldInfo> contentFieldInfos = Lists.newArrayList(); contentFieldInfos.add(contentFieldInfoFactory.create(ITEM_PAGE_COUNT, pageCountValueHandler)); return contentFieldInfos; } private List<ContentFieldInfo> getItemIndexInfos() { List<ContentFieldInfo> contentFieldInfos = Lists.newArrayList(); contentFieldInfos.add(contentFieldInfoFactory.create(ITEM_INDEX, itemIndexValueHandler)); contentFieldInfos.add(contentFieldInfoFactory.create(ITEM_PAGE_NUM, itemIndexValueHandler)); return contentFieldInfos; } private List<ContentFieldInfo> getTitleInfos() { List<ContentFieldInfo> contentFieldInfos = Lists.newArrayList(); contentFieldInfos.add(contentFieldInfoFactory.create(ITEM_TITLE, titleValueHandler)); contentFieldInfos.add(contentFieldInfoFactory.create(TEST_TITLE, titleValueHandler)); return contentFieldInfos; } private List<ContentFieldInfo> getItemInfos() { List<ContentFieldInfo> contentFieldInfos = Lists.newArrayList(); contentFieldInfos.add(contentFieldInfoFactory.create(ITEM_TODO, itemValueHandler)); contentFieldInfos.add(contentFieldInfoFactory.create(ITEM_DONE, itemValueHandler)); contentFieldInfos.add(contentFieldInfoFactory.create(ITEM_CHECKS, itemValueHandler)); contentFieldInfos.add(contentFieldInfoFactory.create(ITEM_MISTAKES, itemValueHandler)); contentFieldInfos.add(contentFieldInfoFactory.create(ITEM_SHOW_ANSWERS, itemValueHandler)); contentFieldInfos.add(contentFieldInfoFactory.create(ITEM_RESET, itemValueHandler)); return contentFieldInfos; } private List<ContentFieldInfo> getAssessmentInfos() { List<ContentFieldInfo> contentFieldInfos = Lists.newArrayList(); contentFieldInfos.add(contentFieldInfoFactory.create(TEST_TODO, providerAssessmentValueHandler)); contentFieldInfos.add(contentFieldInfoFactory.create(TEST_DONE, providerAssessmentValueHandler)); contentFieldInfos.add(contentFieldInfoFactory.create(TEST_CHECKS, providerAssessmentValueHandler)); contentFieldInfos.add(contentFieldInfoFactory.create(TEST_MISTAKES, providerAssessmentValueHandler)); contentFieldInfos.add(contentFieldInfoFactory.create(TEST_SHOW_ANSWERS, providerAssessmentValueHandler)); contentFieldInfos.add(contentFieldInfoFactory.create(TEST_RESET, providerAssessmentValueHandler)); return contentFieldInfos; } private List<ContentFieldInfo> getReportFeedbackInfos() { List<ContentFieldInfo> contentFieldInfos = Lists.newArrayList(); contentFieldInfos.add(contentFieldInfoFactory.create(ITEM_FEEDBACK, feedbackValueHandler)); return contentFieldInfos; } }
apache-2.0
AadithyaU/Daily_Price
src/com/androidbegin/menuviewpagertutorial/Splash.java
2488
package com.androidbegin.menuviewpagertutorial; import java.util.ArrayList; import com.parse.ParseException; import com.parse.ParseFile; import com.parse.ParseObject; import com.parse.ParseQuery; import android.app.Activity; import android.app.ProgressDialog; import android.content.Intent; import android.os.AsyncTask; import android.os.Bundle; import android.os.Handler; import android.util.Log; import android.view.View; import android.view.Window; import android.view.WindowManager; import android.widget.ProgressBar; public class Splash extends Activity { private final int SPLASH_DISPLAY_LENGHT = 3000; //ProgressBar a; @Override public void onCreate(Bundle icicle) { super.onCreate(icicle); requestWindowFeature(Window.FEATURE_NO_TITLE); getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,WindowManager.LayoutParams.FLAG_FULLSCREEN); setContentView(R.layout.splash); new Handler().postDelayed(new Runnable(){ @Override public void run() { Intent mainIntent = new Intent(Splash.this,MainActivity.class); //a = new ProgressBar(Splash.this); //a.setIndeterminate(false); //a.animate(); Splash.this.startActivity(mainIntent); Splash.this.finish(); } }, SPLASH_DISPLAY_LENGHT); }} /* public class Splash extends AsyncTask<Void, Integer, Integer> { ProgressBar ProgressBar1; @Override protected void onPreExecute() { } @Override protected Integer doInBackground(Void... args) { download(); return 1; } private void download() { // We are just imitating some process thats takes a bit of time // (loading of resources / downloading) int count = 10; for (int i = 0; i < count; i++) { // Update the progress bar after every step int progress = (int) ((i / (float) count) * 100); publishProgress(progress); // Do some long loading things try { Thread.sleep(2000); } catch (InterruptedException ignore) { } } } @Override protected void onProgressUpdate(Integer... values) { super.onProgressUpdate(values); ProgressBar1.setProgress(values[0]); } @Override protected void onPostExecute(Integer a) { ProgressBar1.setVisibility(View.GONE); } } */
apache-2.0
nafae/developer
modules/dfp_axis/src/main/java/com/google/api/ads/dfp/axis/v201306/AdSenseSettings.java
30839
/** * AdSenseSettings.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter. */ package com.google.api.ads.dfp.axis.v201306; /** * Contains the AdSense configuration for an {@link AdUnit}. */ public class AdSenseSettings implements java.io.Serializable { /* Specifies whether or not the {@link AdUnit} is enabled for * serving ads from * the AdSense content network. This attribute is optional * and defaults to the * ad unit's parent or ancestor's setting if one has * been set. If no ancestor * of the ad unit has set {@code adSenseEnabled}, the * attribute is * defaulted to {@code true}. */ private java.lang.Boolean adSenseEnabled; /* Specifies the Hexadecimal border color, from {@code 000000} * to {@code * FFFFFF}. This attribute is optional and defaults to * the ad unit's parent or * ancestor's setting if one has been set. If no ancestor * of the ad unit has * set {@code borderColor}, the attribute is defaulted * to {@code FFFFFF}. */ private java.lang.String borderColor; /* Specifies the Hexadecimal title color of an ad, from {@code * 000000} to * {@code FFFFFF}. This attribute is optional and defaults * to the ad unit's * parent or ancestor's setting if one has been set. * If no ancestor of the * ad unit has set {@code titleColor}, the attribute * is defaulted to {@code * 0000FF}. */ private java.lang.String titleColor; /* Specifies the Hexadecimal background color of an ad, from {@code * 000000} to * {@code FFFFFF}. This attribute is optional and defaults * to the ad unit's * parent or ancestor's setting if one has been set. * If no ancestor of the ad * unit has set {@code backgroundColor}, the attribute * is defaulted to {@code * FFFFFF}. */ private java.lang.String backgroundColor; /* Specifies the Hexadecimal color of the text of an ad, from * {@code 000000} * to {@code FFFFFF}. This attribute is optional and * defaults to the ad unit's * parent or ancestor's setting if one has been set. * If no ancestor of the ad * unit has set {@code textColor}, the attribute is defaulted * to {@code * 000000}. */ private java.lang.String textColor; /* Specifies the Hexadecimal color of the URL of an ad, from {@code * 000000} to * {@code FFFFFF}. This attribute is optional and defaults * to the ad unit's * parent or ancestor's setting if one has been set. * If no ancestor of the ad * unit has set {@code urlColor}, the attribute is defaulted * to {@code 008000} * . */ private java.lang.String urlColor; /* Specifies what kind of ad can be served by this {@link AdUnit} * from the * AdSense Content Network. This attribute is optional * and defaults to the ad * unit's parent or ancestor's setting if one has been * set. If no ancestor of * the ad unit has set {@code adType}, the attribute * is defaulted to {@link * AdType#TEXT_AND_IMAGE}. */ private com.google.api.ads.dfp.axis.v201306.AdSenseSettingsAdType adType; /* Specifies the border-style of the {@link AdUnit}. This attribute * is * optional and defaults to the ad unit's parent or ancestor's * setting if one * has been set. If no ancestor of the ad unit has set * {@code borderStyle}, * the attribute is defaulted to {@link BorderStyle#DEFAULT}. */ private com.google.api.ads.dfp.axis.v201306.AdSenseSettingsBorderStyle borderStyle; /* Specifies the font family of the {@link AdUnit}. This attribute * is optional * and defaults to the ad unit's parent or ancestor's * setting if one has been * set. If no ancestor of the ad unit has set {@code * fontFamily}, the * attribute is defaulted to {@link FontFamily#DEFAULT}. */ private com.google.api.ads.dfp.axis.v201306.AdSenseSettingsFontFamily fontFamily; /* Specifies the font size of the {@link AdUnit}. This attribute * is optional * and defaults to the ad unit's parent or ancestor's * setting if one has been * set. If no ancestor of the ad unit has set {@code * fontSize}, the * attribute is defaulted to {@link FontSize#DEFAULT}. */ private com.google.api.ads.dfp.axis.v201306.AdSenseSettingsFontSize fontSize; /* Maps ad unit sizes to AdSense-For-Content Size Formats. This * attribute is * optional. */ private com.google.api.ads.dfp.axis.v201306.Size_StringMapEntry[] afcFormats; public AdSenseSettings() { } public AdSenseSettings( java.lang.Boolean adSenseEnabled, java.lang.String borderColor, java.lang.String titleColor, java.lang.String backgroundColor, java.lang.String textColor, java.lang.String urlColor, com.google.api.ads.dfp.axis.v201306.AdSenseSettingsAdType adType, com.google.api.ads.dfp.axis.v201306.AdSenseSettingsBorderStyle borderStyle, com.google.api.ads.dfp.axis.v201306.AdSenseSettingsFontFamily fontFamily, com.google.api.ads.dfp.axis.v201306.AdSenseSettingsFontSize fontSize, com.google.api.ads.dfp.axis.v201306.Size_StringMapEntry[] afcFormats) { this.adSenseEnabled = adSenseEnabled; this.borderColor = borderColor; this.titleColor = titleColor; this.backgroundColor = backgroundColor; this.textColor = textColor; this.urlColor = urlColor; this.adType = adType; this.borderStyle = borderStyle; this.fontFamily = fontFamily; this.fontSize = fontSize; this.afcFormats = afcFormats; } /** * Gets the adSenseEnabled value for this AdSenseSettings. * * @return adSenseEnabled * Specifies whether or not the {@link AdUnit} is enabled for * serving ads from * the AdSense content network. This attribute is optional * and defaults to the * ad unit's parent or ancestor's setting if one has * been set. If no ancestor * of the ad unit has set {@code adSenseEnabled}, the * attribute is * defaulted to {@code true}. */ public java.lang.Boolean getAdSenseEnabled() { return adSenseEnabled; } /** * Sets the adSenseEnabled value for this AdSenseSettings. * * @param adSenseEnabled * Specifies whether or not the {@link AdUnit} is enabled for * serving ads from * the AdSense content network. This attribute is optional * and defaults to the * ad unit's parent or ancestor's setting if one has * been set. If no ancestor * of the ad unit has set {@code adSenseEnabled}, the * attribute is * defaulted to {@code true}. */ public void setAdSenseEnabled(java.lang.Boolean adSenseEnabled) { this.adSenseEnabled = adSenseEnabled; } /** * Gets the borderColor value for this AdSenseSettings. * * @return borderColor * Specifies the Hexadecimal border color, from {@code 000000} * to {@code * FFFFFF}. This attribute is optional and defaults to * the ad unit's parent or * ancestor's setting if one has been set. If no ancestor * of the ad unit has * set {@code borderColor}, the attribute is defaulted * to {@code FFFFFF}. */ public java.lang.String getBorderColor() { return borderColor; } /** * Sets the borderColor value for this AdSenseSettings. * * @param borderColor * Specifies the Hexadecimal border color, from {@code 000000} * to {@code * FFFFFF}. This attribute is optional and defaults to * the ad unit's parent or * ancestor's setting if one has been set. If no ancestor * of the ad unit has * set {@code borderColor}, the attribute is defaulted * to {@code FFFFFF}. */ public void setBorderColor(java.lang.String borderColor) { this.borderColor = borderColor; } /** * Gets the titleColor value for this AdSenseSettings. * * @return titleColor * Specifies the Hexadecimal title color of an ad, from {@code * 000000} to * {@code FFFFFF}. This attribute is optional and defaults * to the ad unit's * parent or ancestor's setting if one has been set. * If no ancestor of the * ad unit has set {@code titleColor}, the attribute * is defaulted to {@code * 0000FF}. */ public java.lang.String getTitleColor() { return titleColor; } /** * Sets the titleColor value for this AdSenseSettings. * * @param titleColor * Specifies the Hexadecimal title color of an ad, from {@code * 000000} to * {@code FFFFFF}. This attribute is optional and defaults * to the ad unit's * parent or ancestor's setting if one has been set. * If no ancestor of the * ad unit has set {@code titleColor}, the attribute * is defaulted to {@code * 0000FF}. */ public void setTitleColor(java.lang.String titleColor) { this.titleColor = titleColor; } /** * Gets the backgroundColor value for this AdSenseSettings. * * @return backgroundColor * Specifies the Hexadecimal background color of an ad, from {@code * 000000} to * {@code FFFFFF}. This attribute is optional and defaults * to the ad unit's * parent or ancestor's setting if one has been set. * If no ancestor of the ad * unit has set {@code backgroundColor}, the attribute * is defaulted to {@code * FFFFFF}. */ public java.lang.String getBackgroundColor() { return backgroundColor; } /** * Sets the backgroundColor value for this AdSenseSettings. * * @param backgroundColor * Specifies the Hexadecimal background color of an ad, from {@code * 000000} to * {@code FFFFFF}. This attribute is optional and defaults * to the ad unit's * parent or ancestor's setting if one has been set. * If no ancestor of the ad * unit has set {@code backgroundColor}, the attribute * is defaulted to {@code * FFFFFF}. */ public void setBackgroundColor(java.lang.String backgroundColor) { this.backgroundColor = backgroundColor; } /** * Gets the textColor value for this AdSenseSettings. * * @return textColor * Specifies the Hexadecimal color of the text of an ad, from * {@code 000000} * to {@code FFFFFF}. This attribute is optional and * defaults to the ad unit's * parent or ancestor's setting if one has been set. * If no ancestor of the ad * unit has set {@code textColor}, the attribute is defaulted * to {@code * 000000}. */ public java.lang.String getTextColor() { return textColor; } /** * Sets the textColor value for this AdSenseSettings. * * @param textColor * Specifies the Hexadecimal color of the text of an ad, from * {@code 000000} * to {@code FFFFFF}. This attribute is optional and * defaults to the ad unit's * parent or ancestor's setting if one has been set. * If no ancestor of the ad * unit has set {@code textColor}, the attribute is defaulted * to {@code * 000000}. */ public void setTextColor(java.lang.String textColor) { this.textColor = textColor; } /** * Gets the urlColor value for this AdSenseSettings. * * @return urlColor * Specifies the Hexadecimal color of the URL of an ad, from {@code * 000000} to * {@code FFFFFF}. This attribute is optional and defaults * to the ad unit's * parent or ancestor's setting if one has been set. * If no ancestor of the ad * unit has set {@code urlColor}, the attribute is defaulted * to {@code 008000} * . */ public java.lang.String getUrlColor() { return urlColor; } /** * Sets the urlColor value for this AdSenseSettings. * * @param urlColor * Specifies the Hexadecimal color of the URL of an ad, from {@code * 000000} to * {@code FFFFFF}. This attribute is optional and defaults * to the ad unit's * parent or ancestor's setting if one has been set. * If no ancestor of the ad * unit has set {@code urlColor}, the attribute is defaulted * to {@code 008000} * . */ public void setUrlColor(java.lang.String urlColor) { this.urlColor = urlColor; } /** * Gets the adType value for this AdSenseSettings. * * @return adType * Specifies what kind of ad can be served by this {@link AdUnit} * from the * AdSense Content Network. This attribute is optional * and defaults to the ad * unit's parent or ancestor's setting if one has been * set. If no ancestor of * the ad unit has set {@code adType}, the attribute * is defaulted to {@link * AdType#TEXT_AND_IMAGE}. */ public com.google.api.ads.dfp.axis.v201306.AdSenseSettingsAdType getAdType() { return adType; } /** * Sets the adType value for this AdSenseSettings. * * @param adType * Specifies what kind of ad can be served by this {@link AdUnit} * from the * AdSense Content Network. This attribute is optional * and defaults to the ad * unit's parent or ancestor's setting if one has been * set. If no ancestor of * the ad unit has set {@code adType}, the attribute * is defaulted to {@link * AdType#TEXT_AND_IMAGE}. */ public void setAdType(com.google.api.ads.dfp.axis.v201306.AdSenseSettingsAdType adType) { this.adType = adType; } /** * Gets the borderStyle value for this AdSenseSettings. * * @return borderStyle * Specifies the border-style of the {@link AdUnit}. This attribute * is * optional and defaults to the ad unit's parent or ancestor's * setting if one * has been set. If no ancestor of the ad unit has set * {@code borderStyle}, * the attribute is defaulted to {@link BorderStyle#DEFAULT}. */ public com.google.api.ads.dfp.axis.v201306.AdSenseSettingsBorderStyle getBorderStyle() { return borderStyle; } /** * Sets the borderStyle value for this AdSenseSettings. * * @param borderStyle * Specifies the border-style of the {@link AdUnit}. This attribute * is * optional and defaults to the ad unit's parent or ancestor's * setting if one * has been set. If no ancestor of the ad unit has set * {@code borderStyle}, * the attribute is defaulted to {@link BorderStyle#DEFAULT}. */ public void setBorderStyle(com.google.api.ads.dfp.axis.v201306.AdSenseSettingsBorderStyle borderStyle) { this.borderStyle = borderStyle; } /** * Gets the fontFamily value for this AdSenseSettings. * * @return fontFamily * Specifies the font family of the {@link AdUnit}. This attribute * is optional * and defaults to the ad unit's parent or ancestor's * setting if one has been * set. If no ancestor of the ad unit has set {@code * fontFamily}, the * attribute is defaulted to {@link FontFamily#DEFAULT}. */ public com.google.api.ads.dfp.axis.v201306.AdSenseSettingsFontFamily getFontFamily() { return fontFamily; } /** * Sets the fontFamily value for this AdSenseSettings. * * @param fontFamily * Specifies the font family of the {@link AdUnit}. This attribute * is optional * and defaults to the ad unit's parent or ancestor's * setting if one has been * set. If no ancestor of the ad unit has set {@code * fontFamily}, the * attribute is defaulted to {@link FontFamily#DEFAULT}. */ public void setFontFamily(com.google.api.ads.dfp.axis.v201306.AdSenseSettingsFontFamily fontFamily) { this.fontFamily = fontFamily; } /** * Gets the fontSize value for this AdSenseSettings. * * @return fontSize * Specifies the font size of the {@link AdUnit}. This attribute * is optional * and defaults to the ad unit's parent or ancestor's * setting if one has been * set. If no ancestor of the ad unit has set {@code * fontSize}, the * attribute is defaulted to {@link FontSize#DEFAULT}. */ public com.google.api.ads.dfp.axis.v201306.AdSenseSettingsFontSize getFontSize() { return fontSize; } /** * Sets the fontSize value for this AdSenseSettings. * * @param fontSize * Specifies the font size of the {@link AdUnit}. This attribute * is optional * and defaults to the ad unit's parent or ancestor's * setting if one has been * set. If no ancestor of the ad unit has set {@code * fontSize}, the * attribute is defaulted to {@link FontSize#DEFAULT}. */ public void setFontSize(com.google.api.ads.dfp.axis.v201306.AdSenseSettingsFontSize fontSize) { this.fontSize = fontSize; } /** * Gets the afcFormats value for this AdSenseSettings. * * @return afcFormats * Maps ad unit sizes to AdSense-For-Content Size Formats. This * attribute is * optional. */ public com.google.api.ads.dfp.axis.v201306.Size_StringMapEntry[] getAfcFormats() { return afcFormats; } /** * Sets the afcFormats value for this AdSenseSettings. * * @param afcFormats * Maps ad unit sizes to AdSense-For-Content Size Formats. This * attribute is * optional. */ public void setAfcFormats(com.google.api.ads.dfp.axis.v201306.Size_StringMapEntry[] afcFormats) { this.afcFormats = afcFormats; } public com.google.api.ads.dfp.axis.v201306.Size_StringMapEntry getAfcFormats(int i) { return this.afcFormats[i]; } public void setAfcFormats(int i, com.google.api.ads.dfp.axis.v201306.Size_StringMapEntry _value) { this.afcFormats[i] = _value; } private java.lang.Object __equalsCalc = null; public synchronized boolean equals(java.lang.Object obj) { if (!(obj instanceof AdSenseSettings)) return false; AdSenseSettings other = (AdSenseSettings) obj; if (obj == null) return false; if (this == obj) return true; if (__equalsCalc != null) { return (__equalsCalc == obj); } __equalsCalc = obj; boolean _equals; _equals = true && ((this.adSenseEnabled==null && other.getAdSenseEnabled()==null) || (this.adSenseEnabled!=null && this.adSenseEnabled.equals(other.getAdSenseEnabled()))) && ((this.borderColor==null && other.getBorderColor()==null) || (this.borderColor!=null && this.borderColor.equals(other.getBorderColor()))) && ((this.titleColor==null && other.getTitleColor()==null) || (this.titleColor!=null && this.titleColor.equals(other.getTitleColor()))) && ((this.backgroundColor==null && other.getBackgroundColor()==null) || (this.backgroundColor!=null && this.backgroundColor.equals(other.getBackgroundColor()))) && ((this.textColor==null && other.getTextColor()==null) || (this.textColor!=null && this.textColor.equals(other.getTextColor()))) && ((this.urlColor==null && other.getUrlColor()==null) || (this.urlColor!=null && this.urlColor.equals(other.getUrlColor()))) && ((this.adType==null && other.getAdType()==null) || (this.adType!=null && this.adType.equals(other.getAdType()))) && ((this.borderStyle==null && other.getBorderStyle()==null) || (this.borderStyle!=null && this.borderStyle.equals(other.getBorderStyle()))) && ((this.fontFamily==null && other.getFontFamily()==null) || (this.fontFamily!=null && this.fontFamily.equals(other.getFontFamily()))) && ((this.fontSize==null && other.getFontSize()==null) || (this.fontSize!=null && this.fontSize.equals(other.getFontSize()))) && ((this.afcFormats==null && other.getAfcFormats()==null) || (this.afcFormats!=null && java.util.Arrays.equals(this.afcFormats, other.getAfcFormats()))); __equalsCalc = null; return _equals; } private boolean __hashCodeCalc = false; public synchronized int hashCode() { if (__hashCodeCalc) { return 0; } __hashCodeCalc = true; int _hashCode = 1; if (getAdSenseEnabled() != null) { _hashCode += getAdSenseEnabled().hashCode(); } if (getBorderColor() != null) { _hashCode += getBorderColor().hashCode(); } if (getTitleColor() != null) { _hashCode += getTitleColor().hashCode(); } if (getBackgroundColor() != null) { _hashCode += getBackgroundColor().hashCode(); } if (getTextColor() != null) { _hashCode += getTextColor().hashCode(); } if (getUrlColor() != null) { _hashCode += getUrlColor().hashCode(); } if (getAdType() != null) { _hashCode += getAdType().hashCode(); } if (getBorderStyle() != null) { _hashCode += getBorderStyle().hashCode(); } if (getFontFamily() != null) { _hashCode += getFontFamily().hashCode(); } if (getFontSize() != null) { _hashCode += getFontSize().hashCode(); } if (getAfcFormats() != null) { for (int i=0; i<java.lang.reflect.Array.getLength(getAfcFormats()); i++) { java.lang.Object obj = java.lang.reflect.Array.get(getAfcFormats(), i); if (obj != null && !obj.getClass().isArray()) { _hashCode += obj.hashCode(); } } } __hashCodeCalc = false; return _hashCode; } // Type metadata private static org.apache.axis.description.TypeDesc typeDesc = new org.apache.axis.description.TypeDesc(AdSenseSettings.class, true); static { typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201306", "AdSenseSettings")); org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("adSenseEnabled"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201306", "adSenseEnabled")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "boolean")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("borderColor"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201306", "borderColor")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "string")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("titleColor"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201306", "titleColor")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "string")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("backgroundColor"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201306", "backgroundColor")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "string")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("textColor"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201306", "textColor")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "string")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("urlColor"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201306", "urlColor")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "string")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("adType"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201306", "adType")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201306", "AdSenseSettings.AdType")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("borderStyle"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201306", "borderStyle")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201306", "AdSenseSettings.BorderStyle")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("fontFamily"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201306", "fontFamily")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201306", "AdSenseSettings.FontFamily")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("fontSize"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201306", "fontSize")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201306", "AdSenseSettings.FontSize")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("afcFormats"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201306", "afcFormats")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201306", "Size_StringMapEntry")); elemField.setMinOccurs(0); elemField.setNillable(false); elemField.setMaxOccursUnbounded(true); typeDesc.addFieldDesc(elemField); } /** * Return type metadata object */ public static org.apache.axis.description.TypeDesc getTypeDesc() { return typeDesc; } /** * Get Custom Serializer */ public static org.apache.axis.encoding.Serializer getSerializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanSerializer( _javaType, _xmlType, typeDesc); } /** * Get Custom Deserializer */ public static org.apache.axis.encoding.Deserializer getDeserializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanDeserializer( _javaType, _xmlType, typeDesc); } }
apache-2.0
cristiani/encuestame
enme-business/src/test/java/org/encuestame/test/business/csv/TestCSVParser.java
1649
package org.encuestame.test.business.csv; import java.io.FileNotFoundException; import java.io.IOException; import org.encuestame.business.setup.install.demo.CSVParser; import org.encuestame.persistence.exception.EnMeNoResultsFoundException; import org.encuestame.test.business.security.AbstractSpringSecurityContext; import org.encuestame.utils.categories.test.DefaultTest; import org.junit.Assert; import org.junit.Test; import org.junit.experimental.categories.Category; import org.springframework.beans.factory.annotation.Autowired; @Category(DefaultTest.class) public class TestCSVParser extends AbstractSpringSecurityContext{ @Autowired CSVParser csvParser; /** * Test csv parser. * @throws IOException * @throws FileNotFoundException * @throws EnMeNoResultsFoundException */ @Test public void testCSVParser() throws FileNotFoundException, IOException, EnMeNoResultsFoundException { Assert.assertNotNull(this.csvParser); this.csvParser.executeCSVDemoInstall(2, 2, 2); Assert.assertEquals("Questions should be", 2, getHibernateTemplate().find("from Question").size()); Assert.assertEquals("Users should be", 4, getHibernateTemplate().find("from UserAccount").size()); Assert.assertEquals("TweetPoll should be", 1, getHibernateTemplate().find("from TweetPoll").size()); Assert.assertEquals("Poll should be", 1, getHibernateTemplate().find("from Poll").size()); } /** * @return the csvParser */ public CSVParser getCsvParser() { return csvParser; } /** * @param csvParser the csvParser to set */ public void setCsvParser(CSVParser csvParser) { this.csvParser = csvParser; } }
apache-2.0
jramsden/yamlconfig
src/main/java/ramsdenj/yamlconfig/aws/YamlConfigAWSCredentialsProvider.java
2910
package ramsdenj.yamlconfig.aws; import ramsdenj.yamlconfig.YamlConfig; import ramsdenj.yamlconfig.model.ConfigurationKeyNotFoundException; import ramsdenj.yamlconfig.model.ConfigurationValueConversionException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.auth.ClasspathPropertiesFileCredentialsProvider; import com.amazonaws.auth.EnvironmentVariableCredentialsProvider; import com.amazonaws.auth.InstanceProfileCredentialsProvider; public class YamlConfigAWSCredentialsProvider implements AWSCredentialsProvider { private static final Logger LOG = LoggerFactory.getLogger(YamlConfigAWSCredentialsProvider.class); private YamlConfig yamlConfig; private String credentialsConfigKey; private volatile AWSCredentialsProvider provider; public YamlConfigAWSCredentialsProvider(YamlConfig yamlConfig, String credentialsConfigKey) { this.yamlConfig = yamlConfig; this.credentialsConfigKey = credentialsConfigKey; refresh(); } public AWSCredentials getCredentials() { return provider.getCredentials(); } public void refresh() { AwsCredentialsConfig credentialsConfig = null; try { credentialsConfig = yamlConfig.getSetting(credentialsConfigKey, AwsCredentialsConfig.class); } catch (ConfigurationKeyNotFoundException e) { LOG.error("Error loading credentials from config.", e); provider = null; } catch (ConfigurationValueConversionException e) { LOG.error("Error converting configuration type from config."); provider = null; } AWSCredentialsProvider tmpProvider = null; switch (credentialsConfig.getCredentialsSource()) { case ENVIRONMENT: tmpProvider = new EnvironmentVariableCredentialsProvider(); break; case PROPERTIES_FILE: tmpProvider = new ClasspathPropertiesFileCredentialsProvider(); break; case INSTANCE_PROFILE: tmpProvider = new InstanceProfileCredentialsProvider(); break; case CONFIGURATION: final String accessKey = credentialsConfig.getAccessKey(); final String secretKey = credentialsConfig.getSecretKey(); tmpProvider = new AWSCredentialsProvider() { @Override public void refresh() { } @Override public AWSCredentials getCredentials() { return new BasicAWSCredentials(accessKey, secretKey); } }; break; default: break; } provider = tmpProvider; } }
apache-2.0
pravinmhaske/SeloMartApp
server/src/main/java/com/example/common/LatLongDetails.java
2359
package com.example.common; import java.net.URLConnection; import java.net.HttpURLConnection; import java.net.URL; import java.net.URLEncoder; import java.io.InputStreamReader; import java.io.BufferedReader; import java.io.InputStream; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.xpath.XPath; import javax.xml.xpath.XPathExpression; import javax.xml.xpath.XPathFactory; import javax.xml.xpath.XPathConstants; import org.w3c.dom.Document; import java.io.ByteArrayInputStream; import java.io.IOException; public class LatLongDetails { public String getZipCode(String args) { InputStream is = new ByteArrayInputStream(args.getBytes()); BufferedReader reader = new BufferedReader(new InputStreamReader(is)); String postcode; String latLongs = ""; try { postcode = reader.readLine(); latLongs= getLatLongPositions(postcode); } catch (Exception e) { e.printStackTrace(); } return latLongs; } public static String getLatLongPositions(String address) throws Exception { int responseCode = 0; String api = "http://maps.googleapis.com/maps/api/geocode/xml?address=" + URLEncoder.encode(address, "UTF-8") + "&sensor=true"; URL url = new URL(api); HttpURLConnection httpConnection = (HttpURLConnection)url.openConnection(); httpConnection.connect(); responseCode = httpConnection.getResponseCode(); if(responseCode == 200) { DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();; Document document = builder.parse(httpConnection.getInputStream()); XPathFactory xPathfactory = XPathFactory.newInstance(); XPath xpath = xPathfactory.newXPath(); XPathExpression expr = xpath.compile("/GeocodeResponse/status"); String status = (String)expr.evaluate(document, XPathConstants.STRING); if(status.equals("OK")) { expr = xpath.compile("//geometry/location/lat"); String latitude = (String)expr.evaluate(document, XPathConstants.STRING); expr = xpath.compile("//geometry/location/lng"); String longitude = (String)expr.evaluate(document, XPathConstants.STRING); return latitude+longitude; } else { throw new Exception("Error from the API - response status: "+status); } } return null; } }
apache-2.0
mesutcelik/hazelcast
hazelcast/src/main/java/com/hazelcast/internal/serialization/PortableContext.java
1579
/* * Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.internal.serialization; import com.hazelcast.core.ManagedContext; import com.hazelcast.nio.serialization.ClassDefinition; import com.hazelcast.nio.serialization.FieldDefinition; import com.hazelcast.nio.serialization.Portable; import java.io.IOException; import java.nio.ByteOrder; public interface PortableContext { int getVersion(); int getClassVersion(int factoryId, int classId); void setClassVersion(int factoryId, int classId, int version); ClassDefinition lookupClassDefinition(int factoryId, int classId, int version); ClassDefinition lookupClassDefinition(Data data) throws IOException; ClassDefinition registerClassDefinition(ClassDefinition cd); ClassDefinition lookupOrRegisterClassDefinition(Portable portable) throws IOException; FieldDefinition getFieldDefinition(ClassDefinition cd, String name); ManagedContext getManagedContext(); ByteOrder getByteOrder(); }
apache-2.0
gravitee-io/gravitee-definition
jackson/src/main/java/io/gravitee/definition/jackson/datatype/api/ser/ssl/KeyStoreSerializer.java
2128
/** * Copyright (C) 2015 The Gravitee team (http://gravitee.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.gravitee.definition.jackson.datatype.api.ser.ssl; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.databind.SerializerProvider; import com.fasterxml.jackson.databind.jsontype.TypeSerializer; import com.fasterxml.jackson.databind.ser.std.StdScalarSerializer; import io.gravitee.definition.model.ssl.KeyStore; import java.io.IOException; /** * @author David BRASSELY (david.brassely at graviteesource.com) * @author GraviteeSource Team */ public abstract class KeyStoreSerializer<T extends KeyStore> extends StdScalarSerializer<T> { public KeyStoreSerializer(Class<T> t) { super(t); } @Override public void serialize(T keyStore, JsonGenerator jgen, SerializerProvider provider) throws IOException { jgen.writeStartObject(); doSerialize(keyStore, jgen, provider); jgen.writeEndObject(); } @Override public void serializeWithType(T value, JsonGenerator g, SerializerProvider provider, TypeSerializer typeSer) throws IOException { serialize(value, g, provider); } protected void doSerialize(T keyStore, JsonGenerator jgen, SerializerProvider serializerProvider) throws IOException { jgen.writeStringField("type", keyStore.getType().name()); } protected void writeStringField(JsonGenerator jgen, String field, String value) throws IOException { if (value != null && !value.isEmpty()) { jgen.writeStringField(field, value); } } }
apache-2.0
ArseniyJ4J/akulikov
chapter_007/src/main/java/ru/job4j/multithreading/threads/jmmtrouble/package-info.java
179
/** * Package for TxtCalculator task. * * @author Arseniy Kulikov (mailto:arsmail424@gmail.com) * @version 1.0 * @since 1.0 */ package ru.job4j.multithreading.threads.jmmtrouble;
apache-2.0
GIGAMOLE/NEON
app/src/main/java/com/gigamole/neon/utils/Utilities.java
2527
package com.gigamole.neon.utils; /* * Copyright (C) 2015 Basil Miller * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Created by GIGAMOLE on 17.05.2015. */ public class Utilities { // Function to convert milliseconds time to Timer Format public static String milliSecondsToTimer(long milliseconds) { String finalTimerString = ""; String secondsString = ""; // Convert total duration into time int hours = (int) (milliseconds / (1000 * 60 * 60)); int minutes = (int) (milliseconds % (1000 * 60 * 60)) / (1000 * 60); int seconds = (int) ((milliseconds % (1000 * 60 * 60)) % (1000 * 60) / 1000); // Add hours if there if (hours > 0) { finalTimerString = hours + ":"; } // Prepending 0 to seconds if it is one digit if (seconds < 10) { secondsString = "0" + seconds; } else { secondsString = "" + seconds; } finalTimerString = finalTimerString + minutes + ":" + secondsString; // return timer string return finalTimerString; } // Function to get Progress percentage public static int getProgressPercentage(long currentDuration, long totalDuration) { Double percentage = (double) 0; long currentSeconds = (int) (currentDuration / 1000); long totalSeconds = (int) (totalDuration / 1000); // calculating percentage percentage = (((double) currentSeconds) / totalSeconds) * 100; // return percentage return percentage.intValue(); } // Function to change progress to timer returns current duration in milliseconds public static int progressToTimer(int progress, int totalDuration) { int currentDuration = 0; totalDuration = (int) (totalDuration / 1000); currentDuration = (int) ((((double) progress) / 100) * totalDuration); // return current duration in milliseconds return currentDuration * 1000; } }
apache-2.0
leafclick/intellij-community
platform/diff-impl/src/com/intellij/diff/util/DiffGutterOperation.java
3772
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.diff.util; import com.intellij.diff.tools.util.KeyboardModifierListener; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.markup.GutterIconRenderer; import com.intellij.openapi.editor.markup.HighlighterLayer; import com.intellij.openapi.editor.markup.HighlighterTargetArea; import com.intellij.openapi.editor.markup.RangeHighlighter; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; public abstract class DiffGutterOperation { @NotNull private final RangeHighlighter myHighlighter; public DiffGutterOperation(@NotNull Editor editor, int offset) { myHighlighter = editor.getMarkupModel().addRangeHighlighter(offset, offset, HighlighterLayer.ADDITIONAL_SYNTAX, null, HighlighterTargetArea.LINES_IN_RANGE); } public void dispose() { myHighlighter.dispose(); } public void update(boolean force) { if (!myHighlighter.isValid()) return; myHighlighter.setGutterIconRenderer(createRenderer()); } protected abstract GutterIconRenderer createRenderer(); public static int lineToOffset(@NotNull Editor editor, int line) { Document document = editor.getDocument(); return line == DiffUtil.getLineCount(document) ? document.getTextLength() : document.getLineStartOffset(line); } public static final class Simple extends DiffGutterOperation { @NotNull private final RendererBuilder myBuilder; public Simple(@NotNull Editor editor, int offset, @NotNull RendererBuilder builder) { super(editor, offset); myBuilder = builder; update(true); } @Override protected GutterIconRenderer createRenderer() { return myBuilder.createRenderer(); } } public static final class WithModifiers extends DiffGutterOperation { @NotNull private final ModifiersRendererBuilder myBuilder; @NotNull private final KeyboardModifierListener myModifierProvider; private boolean myCtrlPressed; private boolean myShiftPressed; private boolean myAltPressed; public WithModifiers(@NotNull Editor editor, int offset, @NotNull KeyboardModifierListener modifierProvider, @NotNull ModifiersRendererBuilder builder) { super(editor, offset); myBuilder = builder; myModifierProvider = modifierProvider; update(true); } @Override public void update(boolean force) { boolean shouldUpdate = force || myCtrlPressed == myModifierProvider.isCtrlPressed() || myShiftPressed == myModifierProvider.isShiftPressed() || myAltPressed == myModifierProvider.isAltPressed(); if (!shouldUpdate) return; myCtrlPressed = myModifierProvider.isCtrlPressed(); myShiftPressed = myModifierProvider.isShiftPressed(); myAltPressed = myModifierProvider.isAltPressed(); super.update(force); } @Override protected GutterIconRenderer createRenderer() { return myBuilder.createRenderer(myCtrlPressed, myShiftPressed, myAltPressed); } } public interface RendererBuilder { @Nullable GutterIconRenderer createRenderer(); } public interface ModifiersRendererBuilder { @Nullable GutterIconRenderer createRenderer(boolean ctrlPressed, boolean shiftPressed, boolean altPressed); } }
apache-2.0
alexryndin/ambari
ambari-server/src/test/java/org/apache/ambari/server/controller/AuthToLocalBuilderTest.java
24626
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ambari.server.controller; import org.apache.ambari.server.utils.CollectionPresentationUtils; import org.junit.Test; import java.util.Arrays; import java.util.Collections; import java.util.List; import static org.junit.Assert.*; public class AuthToLocalBuilderTest { @Test public void testRuleGeneration() { AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false); builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs"); // Duplicate principal for secondary namenode, should be filtered out... builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("dn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("jn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("rm/_HOST@EXAMPLE.COM", "yarn"); builder.addRule("jhs/_HOST@EXAMPLE.COM", "mapred"); builder.addRule("hm/_HOST@EXAMPLE.COM", "hbase"); builder.addRule("rs/_HOST@EXAMPLE.COM", "hbase"); builder.addRule("foobar@EXAMPLE.COM", "hdfs"); assertEquals( "RULE:[1:$1@$0](foobar@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" + "RULE:[2:$1@$0](dn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](hm@EXAMPLE.COM)s/.*/hbase/\n" + "RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/\n" + "RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](nn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](rm@EXAMPLE.COM)s/.*/yarn/\n" + "RULE:[2:$1@$0](rs@EXAMPLE.COM)s/.*/hbase/\n" + "DEFAULT", builder.generate()); } @Test public void testRuleGeneration_caseInsensitiveSupport() { AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), true); builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs"); // Duplicate principal for secondary namenode, should be filtered out... builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("dn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("jn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("rm/_HOST@EXAMPLE.COM", "yarn"); builder.addRule("jhs/_HOST@EXAMPLE.COM", "mapred"); builder.addRule("hm/_HOST@EXAMPLE.COM", "hbase"); builder.addRule("rs/_HOST@EXAMPLE.COM", "hbase"); builder.addRule("foobar@EXAMPLE.COM", "hdfs"); assertEquals( "RULE:[1:$1@$0](foobar@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*///L\n" + "RULE:[2:$1@$0](dn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](hm@EXAMPLE.COM)s/.*/hbase/\n" + "RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/\n" + "RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](nn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](rm@EXAMPLE.COM)s/.*/yarn/\n" + "RULE:[2:$1@$0](rs@EXAMPLE.COM)s/.*/hbase/\n" + "DEFAULT", builder.generate()); } @Test public void testRuleGeneration_changeToCaseInsensitiveSupport() { AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false); builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs"); // Duplicate principal for secondary namenode, should be filtered out... builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("dn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("jn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("rm/_HOST@EXAMPLE.COM", "yarn"); builder.addRule("jhs/_HOST@EXAMPLE.COM", "mapred"); builder.addRule("hm/_HOST@EXAMPLE.COM", "hbase"); builder.addRule("rs/_HOST@EXAMPLE.COM", "hbase"); String existingRules = builder.generate(); builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), true); builder.addRules(existingRules); builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("dn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("jn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("rm/_HOST@EXAMPLE.COM", "yarn"); builder.addRule("jhs/_HOST@EXAMPLE.COM", "mapred"); builder.addRule("hm/_HOST@EXAMPLE.COM", "hbase"); builder.addRule("rs/_HOST@EXAMPLE.COM", "hbase"); assertEquals( "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*///L\n" + "RULE:[2:$1@$0](dn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](hm@EXAMPLE.COM)s/.*/hbase/\n" + "RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/\n" + "RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](nn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](rm@EXAMPLE.COM)s/.*/yarn/\n" + "RULE:[2:$1@$0](rs@EXAMPLE.COM)s/.*/hbase/\n" + "DEFAULT", builder.generate()); } @Test public void testRuleGeneration_changeToCaseSensitiveSupport() { AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), true); builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs"); // Duplicate principal for secondary namenode, should be filtered out... builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("dn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("jn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("rm/_HOST@EXAMPLE.COM", "yarn"); builder.addRule("jhs/_HOST@EXAMPLE.COM", "mapred"); builder.addRule("hm/_HOST@EXAMPLE.COM", "hbase"); builder.addRule("rs/_HOST@EXAMPLE.COM", "hbase"); String existingRules = builder.generate(); builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false); builder.addRules(existingRules); builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("dn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("jn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("rm/_HOST@EXAMPLE.COM", "yarn"); builder.addRule("jhs/_HOST@EXAMPLE.COM", "mapred"); builder.addRule("hm/_HOST@EXAMPLE.COM", "hbase"); builder.addRule("rs/_HOST@EXAMPLE.COM", "hbase"); assertEquals( "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" + "RULE:[2:$1@$0](dn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](hm@EXAMPLE.COM)s/.*/hbase/\n" + "RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/\n" + "RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](nn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](rm@EXAMPLE.COM)s/.*/yarn/\n" + "RULE:[2:$1@$0](rs@EXAMPLE.COM)s/.*/hbase/\n" + "DEFAULT", builder.generate()); } @Test public void testRuleGeneration_ExistingRules() { AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false); // previously generated non-host specific rules builder.addRule("foobar@EXAMPLE.COM", "hdfs"); // doesn't exist in latter generation builder.addRule("hm/_HOST@EXAMPLE.COM", "hbase"); builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs"); String existingRules = builder.generate(); builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false); // set previously existing rules builder.addRules(existingRules); builder.addRule("dn/_HOST@EXAMPLE.COM", "hdfs"); // Duplicate of existing rule should not result in duplicate rule generation builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs"); // duplicated again in this builder should not result in duplicate rule generation builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("jn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("rm/_HOST@EXAMPLE.COM", "yarn"); builder.addRule("jhs/_HOST@EXAMPLE.COM", "mapred"); builder.addRule("rs/_HOST@EXAMPLE.COM", "hbase"); assertEquals( "RULE:[1:$1@$0](foobar@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" + "RULE:[2:$1@$0](dn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](hm@EXAMPLE.COM)s/.*/hbase/\n" + "RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/\n" + "RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](nn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](rm@EXAMPLE.COM)s/.*/yarn/\n" + "RULE:[2:$1@$0](rs@EXAMPLE.COM)s/.*/hbase/\n" + "DEFAULT", builder.generate()); } @Test public void testRuleGeneration_ExistingRules_existingMoreSpecificRule() { AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false); // previously generated non-host specific rules builder.addRule("foobar@EXAMPLE.COM", "hdfs"); builder.addRule("hm/_HOST@EXAMPLE.COM", "hbase"); builder.addRule("jn/_HOST@EXAMPLE.COM", "hdfs"); String existingRules = builder.generate(); // prepend host specific rule existingRules = "RULE:[2:$1/$2@$0](dn/somehost.com@EXAMPLE.COM)s/.*/hdfs/\n" + existingRules; // append default realm rule for additional realm existingRules += "\nRULE:[1:$1@$0](.*@OTHER_REALM.COM)s/@.*//"; builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false); // set previously existing rules builder.addRules(existingRules); // more specific host qualifed rule exists for dn // non-host specific rule should still be generated but occur later in generated string builder.addRule("dn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs"); // Duplicate principal for secondary namenode, should be filtered out... builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs"); // duplicate of existing rule builder.addRule("jn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("rm/_HOST@EXAMPLE.COM", "yarn"); builder.addRule("jhs/_HOST@EXAMPLE.COM", "mapred"); builder.addRule("rs/_HOST@EXAMPLE.COM", "hbase"); assertEquals( "RULE:[1:$1@$0](foobar@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" + "RULE:[1:$1@$0](.*@OTHER_REALM.COM)s/@.*//\n" + "RULE:[2:$1/$2@$0](dn/somehost.com@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](dn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](hm@EXAMPLE.COM)s/.*/hbase/\n" + "RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/\n" + "RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](nn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](rm@EXAMPLE.COM)s/.*/yarn/\n" + "RULE:[2:$1@$0](rs@EXAMPLE.COM)s/.*/hbase/\n" + "DEFAULT", builder.generate()); } @Test public void testAddNullExistingRule() { AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false); builder.addRules(null); assertEquals( "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" + "DEFAULT", builder.generate() ); } @Test public void testRuleRegexWithDifferentEnding() { String rules = "RULE:[1:$1@$0](foobar@EXAMPLE.COM)s/.*/hdfs/\\\\\n" + "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\ntext\\\\" + "RULE:[2:$1@$0](dn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](hm@EXAMPLE.COM)s/.*/hbase/" + "RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/\\\\\\" + "RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/\\/\\"; AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false); builder.addRules(rules); assertEquals( "RULE:[1:$1@$0](foobar@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" + "RULE:[2:$1@$0](dn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](hm@EXAMPLE.COM)s/.*/hbase/\n" + "RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/\n" + "RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/\n" + "DEFAULT", builder.generate()); } @Test public void testRuleRegexWithComplexReplacements() { String rules = "RULE:[1:$1@$0](foobar@\\QEXAMPLE1.COM\\E$)s/.*@\\QEXAMPLE1.COM\\E$/hdfs/\n" + "RULE:[1:$1@$0](.*@\\QEXAMPLE1.COM\\E)s/@\\QEXAMPLE1.COM\\E//\n" + "RULE:[2:$1@$0](.*@\\QEXAMPLE1.COM\\E)s/@\\QEXAMPLE1.COM\\E//"; AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false); builder.addRules(rules); builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("dn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("jn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("rm/_HOST@EXAMPLE.COM", "yarn"); builder.addRule("jhs/_HOST@EXAMPLE.COM", "mapred"); builder.addRule("hm/_HOST@EXAMPLE.COM", "hbase"); builder.addRule("rs/_HOST@EXAMPLE.COM", "hbase"); builder.addRule("ambari-qa-c1@EXAMPLE.COM", "ambari-qa"); assertEquals( "RULE:[1:$1@$0](ambari-qa-c1@EXAMPLE.COM)s/.*/ambari-qa/\n" + "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" + "RULE:[1:$1@$0](.*@\\QEXAMPLE1.COM\\E)s/@\\QEXAMPLE1.COM\\E//\n" + "RULE:[1:$1@$0](foobar@\\QEXAMPLE1.COM\\E$)s/.*@\\QEXAMPLE1.COM\\E$/hdfs/\n" + "RULE:[2:$1@$0](dn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](hm@EXAMPLE.COM)s/.*/hbase/\n" + "RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/\n" + "RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](nn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](rm@EXAMPLE.COM)s/.*/yarn/\n" + "RULE:[2:$1@$0](rs@EXAMPLE.COM)s/.*/hbase/\n" + "RULE:[2:$1@$0](.*@\\QEXAMPLE1.COM\\E)s/@\\QEXAMPLE1.COM\\E//\n" + "DEFAULT", builder.generate()); } @Test public void testRulesWithWhitespace() { String rulesWithWhitespace = "RULE: [1:$1@$0](foobar@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[ 1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" + "RULE:[2: $1@$0](dn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0 ](hm@EXAMPLE.COM)s/.*/hbase/\n" + "RULE:[2:$1@$0] (jhs@EXAMPLE.COM)s/.*/mapred/\n" + "RULE:[2:$1@$0](jn@EXAMPLE.COM) s/.*/hdfs/\n"; AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false); builder.addRules(rulesWithWhitespace); assertEquals( "RULE:[1:$1@$0](foobar@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" + "RULE:[2:$1@$0](dn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](hm@EXAMPLE.COM)s/.*/hbase/\n" + "RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/\n" + "RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/\n" + "DEFAULT", builder.generate()); } @Test public void testExistingRuleWithNoRealm() { AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false); builder.addRules("RULE:[1:$1](foobar)s/.*/hdfs/"); assertEquals( "RULE:[1:$1](foobar)s/.*/hdfs/\n" + "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" + "DEFAULT", builder.generate()); } @Test public void testExistingRuleWithNoRealm2() { AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false); builder.addRules("RULE:[1:$1/$2](foobar/someHost)s/.*/hdfs/"); assertEquals( "RULE:[1:$1/$2](foobar/someHost)s/.*/hdfs/\n" + "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" + "DEFAULT", builder.generate()); } @Test(expected = IllegalArgumentException.class) public void testAddNewRuleWithNoRealm() { AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false); builder.addRule("someUser", "hdfs"); } @Test(expected = IllegalArgumentException.class) public void testAddNewRuleWithNoRealm2() { AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false); builder.addRule("someUser/someHost", "hdfs"); } @Test public void testExistingWildcardRealm() { AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false); builder.addRules("RULE:[2:$1@$0]([rn]m@.*)s/.*/yarn/\n" + "RULE:[2:$1@$0]([nd]n@.*)s/.*/hdfs/\n" + "RULE:[2:$1@$0](.*@EXAMPLE.COM)s/.*/yarn/\n" + "DEFAULT"); builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("jn/_HOST@EXAMPLE.COM", "hdfs"); // ensure that no default realm rule is generated for .* realm and // also that that .* realm rules are ordered last in relation to // other rules with the same number of expected principal components assertEquals( "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" + "RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](nn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](.*@EXAMPLE.COM)s/.*/yarn/\n" + "RULE:[2:$1@$0]([nd]n@.*)s/.*/hdfs/\n" + "RULE:[2:$1@$0]([rn]m@.*)s/.*/yarn/\n" + "DEFAULT", builder.generate()); } @Test public void testClone() throws CloneNotSupportedException { AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false); builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("dn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("jn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("rm/_HOST@EXAMPLE.COM", "yarn"); builder.addRule("jhs/_HOST@EXAMPLE.COM", "mapred"); builder.addRule("hm/_HOST@EXAMPLE.COM", "hbase"); builder.addRule("rs/_HOST@EXAMPLE.COM", "hbase"); builder.addRule("foobar@EXAMPLE.COM", "hdfs"); AuthToLocalBuilder copy = (AuthToLocalBuilder) builder.clone(); assertNotSame(builder, copy); assertEquals(builder.generate(), copy.generate()); // Ensure that mutable fields do not change the copy when changed in the original builder.addRule("user@EXAMPLE.COM", "hdfs"); assertTrue(!copy.generate().equals(builder.generate())); } @Test public void testAdditionalRealms() { AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", "REALM2,REALM3, REALM1 ", false); builder.addRules( "RULE:[1:$1@$0](.*@FOOBAR.COM)s/@.*//\n" + "DEFAULT"); builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("dn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("jn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("rm/_HOST@EXAMPLE.COM", "yarn"); builder.addRule("jhs/_HOST@EXAMPLE.COM", "mapred"); builder.addRule("hm/_HOST@EXAMPLE.COM", "hbase"); builder.addRule("rs/_HOST@EXAMPLE.COM", "hbase"); // Depends on hashing, string representation can be different List<String> rules = Arrays.asList("RULE:[1:$1@$0](.*@FOOBAR.COM)s/@.*//", "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//", "RULE:[1:$1@$0](.*@REALM2)s/@.*//", "RULE:[1:$1@$0](.*@REALM1)s/@.*//", "RULE:[1:$1@$0](.*@REALM3)s/@.*//", "RULE:[2:$1@$0](dn@EXAMPLE.COM)s/.*/hdfs/", "RULE:[2:$1@$0](hm@EXAMPLE.COM)s/.*/hbase/", "RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/", "RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/", "RULE:[2:$1@$0](nn@EXAMPLE.COM)s/.*/hdfs/", "RULE:[2:$1@$0](rm@EXAMPLE.COM)s/.*/yarn/", "RULE:[2:$1@$0](rs@EXAMPLE.COM)s/.*/hbase/", "DEFAULT"); assertTrue(CollectionPresentationUtils.isStringPermutationOfCollection(builder.generate(), rules, "\n", 0, 0)); } @Test public void testAdditionalRealms_Null() { AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", Collections.<String>emptyList(), false); builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("dn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("jn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("rm/_HOST@EXAMPLE.COM", "yarn"); builder.addRule("jhs/_HOST@EXAMPLE.COM", "mapred"); builder.addRule("hm/_HOST@EXAMPLE.COM", "hbase"); builder.addRule("rs/_HOST@EXAMPLE.COM", "hbase"); assertEquals( "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" + "RULE:[2:$1@$0](dn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](hm@EXAMPLE.COM)s/.*/hbase/\n" + "RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/\n" + "RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](nn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](rm@EXAMPLE.COM)s/.*/yarn/\n" + "RULE:[2:$1@$0](rs@EXAMPLE.COM)s/.*/hbase/\n" + "DEFAULT", builder.generate()); } @Test public void testAdditionalRealms_Empty() { AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", "", false); builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("dn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("jn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("rm/_HOST@EXAMPLE.COM", "yarn"); builder.addRule("jhs/_HOST@EXAMPLE.COM", "mapred"); builder.addRule("hm/_HOST@EXAMPLE.COM", "hbase"); builder.addRule("rs/_HOST@EXAMPLE.COM", "hbase"); assertEquals( "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" + "RULE:[2:$1@$0](dn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](hm@EXAMPLE.COM)s/.*/hbase/\n" + "RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/\n" + "RULE:[2:$1@$0](jn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](nn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](rm@EXAMPLE.COM)s/.*/yarn/\n" + "RULE:[2:$1@$0](rs@EXAMPLE.COM)s/.*/hbase/\n" + "DEFAULT", builder.generate()); } @Test public void testUseCase() { AuthToLocalBuilder builder = new AuthToLocalBuilder("EXAMPLE.COM", "FOOBAR.COM,HW.HDP,BAZ.NET", false); String existingRules = "RULE:[1:$1@$0](.*@BAZ.NET)s/@.*//\n" + "RULE:[1:$1@$0](accumulo-c1@EXAMPLE.COM)s/.*/accumulo/\n" + "RULE:[1:$1@$0](ambari-qa-c1@EXAMPLE.COM)s/.*/ambari-qa/\n" + "RULE:[1:$1@$0](hbase-c1@EXAMPLE.COM)s/.*/hbase/\n" + "RULE:[1:$1@$0](hdfs-c1@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[1:$1@$0](spark-c1@EXAMPLE.COM)s/.*/spark/\n" + "RULE:[1:$1@$0](tracer-c1@EXAMPLE.COM)s/.*/accumulo/\n" + "RULE:[1:$1@$0](.*@EXAMPLE.COM)s/@.*//\n" + "RULE:[1:$1@$0](.*@FOOBAR.COM)s/@.*//\n" + "RULE:[1:$1@$0](.*@HW.HDP)s/@.*//\n" + "RULE:[2:$1@$0](accumulo@EXAMPLE.COM)s/.*/accumulo/\n" + "RULE:[2:$1@$0](amshbase@EXAMPLE.COM)s/.*/ams/\n" + "RULE:[2:$1@$0](amszk@EXAMPLE.COM)s/.*/ams/\n" + "RULE:[2:$1@$0](dn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](falcon@EXAMPLE.COM)s/.*/falcon/\n" + "RULE:[2:$1@$0](hbase@EXAMPLE.COM)s/.*/hbase/\n" + "RULE:[2:$1@$0](hive@EXAMPLE.COM)s/.*/hive/\n" + "RULE:[2:$1@$0](jhs@EXAMPLE.COM)s/.*/mapred/\n" + "RULE:[2:$1@$0](nm@EXAMPLE.COM)s/.*/yarn/\n" + "RULE:[2:$1@$0](nn@EXAMPLE.COM)s/.*/hdfs/\n" + "RULE:[2:$1@$0](oozie@EXAMPLE.COM)s/.*/oozie/\n" + "RULE:[2:$1@$0](rm@EXAMPLE.COM)s/.*/yarn/\n" + "RULE:[2:$1@$0](yarn@EXAMPLE.COM)s/.*/yarn/\n" + "DEFAULT"; builder.addRules(existingRules); builder.addRule("nn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("dn/_HOST@EXAMPLE.COM", "hdfs"); builder.addRule("rm/_HOST@EXAMPLE.COM", "yarn"); builder.addRule("yarn/_HOST@EXAMPLE.COM", "yarn"); builder.addRule("kafka/_HOST@EXAMPLE.COM", null); builder.addRule("hdfs-c1@EXAMPLE.COM", "hdfs"); assertEquals(existingRules, builder.generate()); } }
apache-2.0
reallyinsane/trainsimulator-controller
trainsimulator-core/src/main/java/io/mathan/trainsimulator/service/Event.java
1061
/* * Copyright 2019 Matthias Hanisch (reallyinsane) * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.mathan.trainsimulator.service; import io.mathan.trainsimulator.model.ControlData; public class Event { private final String control; private final ControlData controlData; public Event(String control, ControlData controlData) { this.control = control; this.controlData = controlData; } public String getControl() { return this.control; } public ControlData getData() { return this.controlData; } }
apache-2.0
kexinrong/macrobase
lib/src/main/java/edu/stanford/futuredata/macrobase/util/MacrobaseException.java
181
package edu.stanford.futuredata.macrobase.util; public class MacrobaseException extends Exception { public MacrobaseException(String message) { super(message); } }
apache-2.0
palantir/atlasdb
atlasdb-config/src/test/java/com/palantir/atlasdb/memory/AsyncInitializeableInMemoryTimestampService.java
2920
/* * (c) Copyright 2021 Palantir Technologies Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.palantir.atlasdb.memory; import com.palantir.async.initializer.AsyncInitializer; import com.palantir.atlasdb.keyvalue.api.KeyValueService; import com.palantir.logsafe.Preconditions; import com.palantir.timestamp.AutoDelegate_TimestampService; import com.palantir.timestamp.InMemoryTimestampService; import com.palantir.timestamp.ManagedTimestampService; import com.palantir.timestamp.TimestampService; import java.time.Duration; public final class AsyncInitializeableInMemoryTimestampService extends AsyncInitializer implements AutoDelegate_TimestampService, ManagedTimestampService { private final KeyValueService kvs; private final InMemoryTimestampService timestampService = new InMemoryTimestampService(); private AsyncInitializeableInMemoryTimestampService(KeyValueService kvs) { this.kvs = kvs; } /** * Creates an {@link InMemoryTimestampService} that is asynchronously initialized and ready to use once the kvs is * ready. This should only be useful for tests. * * @param kvs KeyValueService that must be ready before the returned TimestampService can be initialized. * @return the asynchronously initialized PersistentTimestampService */ public static AsyncInitializeableInMemoryTimestampService initializeWhenKvsIsReady(KeyValueService kvs) { AsyncInitializeableInMemoryTimestampService service = new AsyncInitializeableInMemoryTimestampService(kvs); service.initialize(true); return service; } @Override public TimestampService delegate() { checkInitialized(); return timestampService; } @Override protected void tryInitialize() { Preconditions.checkState(kvs.isInitialized()); } @Override protected String getInitializingClassName() { return "AsyncInitializeableInMemoryTimestampService"; } @Override protected Duration sleepInterval() { return Duration.ofSeconds(1); } @Override public void fastForwardTimestamp(long currentTimestamp) { throw new UnsupportedOperationException("Not implemented in test class"); } @Override public String ping() { throw new UnsupportedOperationException("Not implemented in test class"); } }
apache-2.0
jamesdbloom/mockserver
mockserver-core/src/main/java/org/mockserver/mock/action/HttpResponseClassCallbackActionHandler.java
4677
package org.mockserver.mock.action; import org.mockserver.log.model.LogEntry; import org.mockserver.logging.MockServerLogger; import org.mockserver.model.HttpClassCallback; import org.mockserver.model.HttpRequest; import org.mockserver.model.HttpResponse; import org.slf4j.event.Level; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import static org.mockserver.log.model.LogEntry.LogMessageType.WARN; import static org.mockserver.model.HttpResponse.notFoundResponse; /** * @author jamesdbloom */ public class HttpResponseClassCallbackActionHandler { private final MockServerLogger mockServerLogger; public HttpResponseClassCallbackActionHandler(MockServerLogger mockServerLogger) { this.mockServerLogger = mockServerLogger; } public HttpResponse handle(HttpClassCallback httpClassCallback, HttpRequest request) { return invokeCallbackMethod(httpClassCallback, request); } private ExpectationResponseCallback instantiateCallback(HttpClassCallback httpClassCallback) { try { Class expectationResponseCallbackClass = Class.forName(httpClassCallback.getCallbackClass()); if (ExpectationResponseCallback.class.isAssignableFrom(expectationResponseCallbackClass)) { Constructor<? extends ExpectationResponseCallback> constructor = expectationResponseCallbackClass.getConstructor(); return constructor.newInstance(); } else { mockServerLogger.logEvent( new LogEntry() .setType(LogEntry.LogMessageType.EXCEPTION) .setLogLevel(Level.ERROR) .setHttpRequest(null) .setMessageFormat(httpClassCallback.getCallbackClass() + " does not implement " + ExpectationForwardCallback.class.getCanonicalName() + " which required for forwarded requests generated from a class callback") ); } } catch (ClassNotFoundException e) { mockServerLogger.logEvent( new LogEntry() .setType(LogEntry.LogMessageType.EXCEPTION) .setLogLevel(Level.ERROR) .setMessageFormat("ClassNotFoundException - while trying to instantiate ExpectationResponseCallback class \"" + httpClassCallback.getCallbackClass() + "\"") .setThrowable(e) ); } catch (NoSuchMethodException e) { mockServerLogger.logEvent( new LogEntry() .setType(LogEntry.LogMessageType.EXCEPTION) .setLogLevel(Level.ERROR) .setMessageFormat("NoSuchMethodException - while trying to create default constructor on ExpectationResponseCallback class \"" + httpClassCallback.getCallbackClass() + "\"") .setThrowable(e) ); } catch (InvocationTargetException | InstantiationException | IllegalAccessException e) { mockServerLogger.logEvent( new LogEntry() .setType(LogEntry.LogMessageType.EXCEPTION) .setLogLevel(Level.ERROR) .setMessageFormat("InvocationTargetException - while trying to execute default constructor on ExpectationResponseCallback class \"" + httpClassCallback.getCallbackClass() + "\"") .setThrowable(e) ); } return null; } private HttpResponse invokeCallbackMethod(HttpClassCallback httpClassCallback, HttpRequest httpRequest) { if (httpRequest != null) { ExpectationResponseCallback expectationResponseCallback = instantiateCallback(httpClassCallback); if (expectationResponseCallback != null) { try { return expectationResponseCallback.handle(httpRequest); } catch (Throwable throwable) { mockServerLogger.logEvent( new LogEntry() .setType(LogEntry.LogMessageType.EXCEPTION) .setLogLevel(Level.ERROR) .setHttpRequest(httpRequest) .setMessageFormat(httpClassCallback.getCallbackClass() + " throw exception while executing handle callback method - " + throwable.getMessage()) .setThrowable(throwable) ); return notFoundResponse(); } } else { return notFoundResponse(); } } else { return notFoundResponse(); } } }
apache-2.0
Juangel29/Appability
app/src/main/java/grability/com/appability/ui/adapters/ApplicationsViewHolder.java
1184
package grability.com.appability.ui.adapters; import android.content.Context; import android.support.v7.widget.RecyclerView; import android.view.View; import android.widget.ImageView; import android.widget.TextView; import com.squareup.picasso.Picasso; import butterknife.Bind; import butterknife.ButterKnife; import grability.com.appability.R; import grability.com.appability.entities.Application; /** * * Created by juanangelardila on 4/2/16. */ public class ApplicationsViewHolder extends RecyclerView.ViewHolder { @Bind(R.id.imvImage) ImageView imvApplicationImage; @Bind(R.id.txvName) TextView txvName; @Bind(R.id.txvArtist) TextView txvArtist; @Bind(R.id.txvRights) TextView txvRights; public ApplicationsViewHolder(View itemView) { super(itemView); ButterKnife.bind(this, itemView); } public void bindCategory (Context context, Application application) { Picasso.with(context).load(application.getImageUrl()).into(imvApplicationImage); txvName.setText(application.getName()); txvArtist.setText(application.getArtist()); txvRights.setText(application.getRights()); } }
apache-2.0
hethune/leetCode
singleNumber/leetCode/src/Test.java
469
/** * Created by wenhang on 2/9/14. */ public class Test { public static void main(String[] args) { test(); } private static void test() { int [] a = {1, 1, 2, 2, 3}; int [] b = {1}; Solution solution = new Solution(); int repeatedNumber = solution.singleNumber(a); System.out.println(repeatedNumber); repeatedNumber = solution.singleNumber(b); System.out.println(repeatedNumber); } }
apache-2.0
master-bob/attendance
api/src/java/org/sakaiproject/attendance/model/AttendanceRecord.java
2190
/* * Copyright (c) 2016, University of Dayton * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://opensource.org/licenses/ecl2 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sakaiproject.attendance.model; import lombok.*; import java.io.Serializable; import java.util.Objects; /** * An AttendanceRecord for a specific user for a specific AttendanceEvent * * @author Leonardo Canessa [lcanessa1 (at) udayton (dot) edu] * @author David Bauer [dbauer1 (at) udayton (dot) edu] * @author Steve Swinsburg (steve.swinsburg@gmail.com) */ @NoArgsConstructor @AllArgsConstructor public class AttendanceRecord implements Serializable { private static final long serialVersionUID = 1L; @Getter @Setter private Long id; @Getter @Setter private AttendanceEvent attendanceEvent; @Getter @Setter private String userID; @Getter @Setter private Status status; @Getter @Setter private String comment; public AttendanceRecord(AttendanceEvent e, String uId, Status s) { this.attendanceEvent = e; this.userID = uId; this.status = s; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; AttendanceRecord that = (AttendanceRecord) o; return Objects.equals(id, that.id) && Objects.equals(attendanceEvent, that.attendanceEvent) && Objects.equals(userID, that.userID) && status == that.status && Objects.equals(comment, that.comment); } @Override public int hashCode() { return Objects.hash(id); } }
apache-2.0
sakai-mirror/k2
kernel/src/main/java/org/sakaiproject/kernel/webapp/RestServlet.java
4051
/* * Licensed to the Sakai Foundation (SF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The SF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package org.sakaiproject.kernel.webapp; import org.apache.commons.lang.StringUtils; import org.sakaiproject.kernel.api.KernelManager; import org.sakaiproject.kernel.api.Registry; import org.sakaiproject.kernel.api.RegistryService; import org.sakaiproject.kernel.api.rest.RestProvider; import org.sakaiproject.kernel.util.rest.RestDescription; import java.io.IOException; import java.util.Map; import javax.servlet.ServletConfig; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; /** * */ public class RestServlet extends HttpServlet { /** * */ private static final long serialVersionUID = -172232497404083238L; private transient Registry<String, RestProvider> registry; /** * {@inheritDoc} * * @see javax.servlet.GenericServlet#init(javax.servlet.ServletConfig) */ @Override public void init(ServletConfig config) throws ServletException { super.init(config); KernelManager km = new KernelManager(); RegistryService registryService = km.getService(RegistryService.class); registry = registryService.getRegistry(RestProvider.REST_REGISTRY); } /** * {@inheritDoc} * * @see javax.servlet.http.HttpServlet#doGet(javax.servlet.http.HttpServletRequest, * javax.servlet.http.HttpServletResponse) */ @Override protected void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String requestPath = request.getPathInfo(); if (requestPath == null) { requestPath = ""; } String[] elements = StringUtils.split(requestPath, '/'); String locator = "default"; if (elements != null && elements.length > 0) { locator = elements[0]; } Map<String, RestProvider> restProviders = registry.getMap(); if (locator == null) { locator = "default"; } if ("__describe__".equals(locator)) { locator = "default"; } RestProvider restProvider = restProviders.get(locator); if (restProvider == null) { response.sendError(HttpServletResponse.SC_NOT_FOUND); } else { try { if (requestPath.endsWith("__describe__")) { RestDescription description = restProvider.getDescription(); String format = request.getParameter("fmt"); if ("xml".equals(format)) { response.setContentType("text/xml"); response.getWriter().print(description.toXml()); } else if ("json".equals(format)) { response.setContentType(RestProvider.CONTENT_TYPE); response.getWriter().print(description.toJson()); } else { response.setContentType("text/html"); response.getWriter().print(description.toHtml()); } } else { restProvider.dispatch(elements, request, response); } } catch (SecurityException ex) { response.reset(); response.sendError(HttpServletResponse.SC_FORBIDDEN, ex.getMessage()); } catch (RestServiceFaultException ex) { ex.printStackTrace(); response.reset(); response.sendError(ex.getStatusCode(), ex.getMessage()); } } } }
apache-2.0
ayaseruri/TorrFM
app/src/main/java/ayaseruri/torr/torrfm/adaptar/MainViewPagerAdaptar.java
684
package ayaseruri.torr.torrfm.adaptar; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentPagerAdapter; import java.util.List; /** * Created by ayaseruri on 15/12/17. */ public class MainViewPagerAdaptar extends FragmentPagerAdapter { private List<Fragment> fragments; public MainViewPagerAdaptar(FragmentManager fm, List<Fragment> fragments) { super(fm); this.fragments = fragments; } @Override public Fragment getItem(int position) { return fragments.get(position); } @Override public int getCount() { return fragments.size(); } }
apache-2.0
DenverM80/ds3_java_sdk
ds3-sdk/src/main/java/com/spectralogic/ds3client/commands/spectrads3/GetTapePartitionSpectraS3Response.java
1531
/* * ****************************************************************************** * Copyright 2014-2017 Spectra Logic Corporation. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"). You may not use * this file except in compliance with the License. A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. * This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * **************************************************************************** */ // This code is auto-generated, do not modify package com.spectralogic.ds3client.commands.spectrads3; import com.spectralogic.ds3client.models.TapePartition; import com.spectralogic.ds3client.models.ChecksumType; import com.spectralogic.ds3client.commands.interfaces.AbstractResponse; public class GetTapePartitionSpectraS3Response extends AbstractResponse { private final TapePartition tapePartitionResult; public GetTapePartitionSpectraS3Response(final TapePartition tapePartitionResult, final String checksum, final ChecksumType.Type checksumType) { super(checksum, checksumType); this.tapePartitionResult = tapePartitionResult; } public TapePartition getTapePartitionResult() { return this.tapePartitionResult; } }
apache-2.0
Unicon/cas
core/cas-server-core-authentication/src/test/java/org/apereo/cas/authentication/principal/ChainingPrincipalResolverTests.java
2838
package org.apereo.cas.authentication.principal; import org.apereo.cas.authentication.AuthenticationHandler; import org.apereo.cas.authentication.Credential; import org.apereo.cas.authentication.handler.support.SimpleTestUsernamePasswordAuthenticationHandler; import org.apereo.cas.authentication.principal.resolvers.ChainingPrincipalResolver; import org.junit.Test; import java.util.Arrays; import java.util.Collections; import static org.junit.Assert.*; import static org.mockito.Mockito.*; /** * Unit test for {@link ChainingPrincipalResolver}. * * @author Marvin S. Addison * @since 4.0.0 */ public class ChainingPrincipalResolverTests { private final PrincipalFactory principalFactory = new DefaultPrincipalFactory(); @Test public void examineSupports() throws Exception { final Credential credential = mock(Credential.class); when(credential.getId()).thenReturn("a"); final PrincipalResolver resolver1 = mock(PrincipalResolver.class); when(resolver1.supports(eq(credential))).thenReturn(true); final PrincipalResolver resolver2 = mock(PrincipalResolver.class); when(resolver2.supports(eq(credential))).thenReturn(false); final ChainingPrincipalResolver resolver = new ChainingPrincipalResolver(); resolver.setChain(Arrays.asList(resolver1, resolver2)); assertTrue(resolver.supports(credential)); } @Test public void examineResolve() throws Exception { final Principal principalOut = principalFactory.createPrincipal("output"); final Credential credential = mock(Credential.class); when(credential.getId()).thenReturn("input"); final PrincipalResolver resolver1 = mock(PrincipalResolver.class); when(resolver1.supports(eq(credential))).thenReturn(true); when(resolver1.resolve(eq(credential), any(Principal.class), any(AuthenticationHandler.class))) .thenReturn(principalOut); final PrincipalResolver resolver2 = mock(PrincipalResolver.class); when(resolver2.supports(any(Credential.class))).thenReturn(true); when(resolver2.resolve(any(Credential.class), any(Principal.class), any(AuthenticationHandler.class))) .thenReturn(principalFactory.createPrincipal("output", Collections.singletonMap("mail", "final@example.com"))); final ChainingPrincipalResolver resolver = new ChainingPrincipalResolver(); resolver.setChain(Arrays.asList(resolver1, resolver2)); final Principal principal = resolver.resolve(credential, principalOut, new SimpleTestUsernamePasswordAuthenticationHandler()); assertEquals("output", principal.getId()); assertEquals("final@example.com", principal.getAttributes().get("mail")); } }
apache-2.0
mesutcelik/hazelcast
hazelcast/src/main/java/com/hazelcast/cache/impl/AbstractCacheService.java
38636
/* * Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.cache.impl; import com.hazelcast.cache.CacheNotExistsException; import com.hazelcast.cache.HazelcastCacheManager; import com.hazelcast.cache.impl.event.CachePartitionLostEventFilter; import com.hazelcast.cache.impl.eviction.CacheClearExpiredRecordsTask; import com.hazelcast.cache.impl.journal.CacheEventJournal; import com.hazelcast.cache.impl.journal.RingbufferCacheEventJournalImpl; import com.hazelcast.cache.impl.operation.AddCacheConfigOperationSupplier; import com.hazelcast.cache.impl.operation.OnJoinCacheOperation; import com.hazelcast.cache.impl.tenantcontrol.CacheDestroyEventContext; import com.hazelcast.cluster.ClusterState; import com.hazelcast.cluster.Member; import com.hazelcast.config.CacheConfig; import com.hazelcast.config.CacheConfigAccessor; import com.hazelcast.config.CacheSimpleConfig; import com.hazelcast.config.InMemoryFormat; import com.hazelcast.core.DistributedObject; import com.hazelcast.internal.cluster.ClusterStateListener; import com.hazelcast.internal.eviction.ExpirationManager; import com.hazelcast.internal.metrics.MetricDescriptor; import com.hazelcast.internal.metrics.MetricsCollectionContext; import com.hazelcast.internal.monitor.LocalCacheStats; import com.hazelcast.internal.monitor.impl.LocalCacheStatsImpl; import com.hazelcast.internal.nio.IOUtil; import com.hazelcast.internal.partition.IPartitionLostEvent; import com.hazelcast.internal.partition.MigrationEndpoint; import com.hazelcast.internal.partition.PartitionAwareService; import com.hazelcast.internal.partition.PartitionMigrationEvent; import com.hazelcast.internal.serialization.Data; import com.hazelcast.internal.services.PreJoinAwareService; import com.hazelcast.internal.services.SplitBrainHandlerService; import com.hazelcast.internal.services.SplitBrainProtectionAwareService; import com.hazelcast.internal.util.Clock; import com.hazelcast.internal.util.ConcurrencyUtil; import com.hazelcast.internal.util.ConstructorFunction; import com.hazelcast.internal.util.ContextMutexFactory; import com.hazelcast.internal.util.FutureUtil; import com.hazelcast.internal.util.InvocationUtil; import com.hazelcast.internal.util.MapUtil; import com.hazelcast.internal.util.ServiceLoader; import com.hazelcast.logging.ILogger; import com.hazelcast.spi.impl.InternalCompletableFuture; import com.hazelcast.spi.impl.NodeEngine; import com.hazelcast.spi.impl.NodeEngineImpl; import com.hazelcast.spi.impl.eventservice.EventFilter; import com.hazelcast.spi.impl.eventservice.EventRegistration; import com.hazelcast.spi.impl.eventservice.EventService; import com.hazelcast.spi.impl.operationservice.Operation; import com.hazelcast.spi.merge.SplitBrainMergePolicy; import com.hazelcast.spi.merge.SplitBrainMergePolicyProvider; import com.hazelcast.spi.properties.ClusterProperty; import com.hazelcast.spi.tenantcontrol.TenantControlFactory; import com.hazelcast.wan.impl.WanReplicationService; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import javax.cache.CacheException; import javax.cache.configuration.CacheEntryListenerConfiguration; import javax.cache.event.CacheEntryListener; import java.io.Closeable; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import static com.hazelcast.cache.impl.AbstractCacheRecordStore.SOURCE_NOT_AVAILABLE; import static com.hazelcast.cache.impl.PreJoinCacheConfig.asCacheConfig; import static com.hazelcast.config.CacheConfigAccessor.getTenantControl; import static com.hazelcast.internal.config.ConfigValidator.checkCacheConfig; import static com.hazelcast.internal.metrics.MetricDescriptorConstants.CACHE_PREFIX; import static com.hazelcast.internal.metrics.impl.ProviderHelper.provide; import static com.hazelcast.internal.util.ConcurrencyUtil.CALLER_RUNS; import static com.hazelcast.internal.util.ExceptionUtil.rethrow; import static com.hazelcast.internal.util.FutureUtil.RETHROW_EVERYTHING; import static com.hazelcast.internal.util.MapUtil.createHashMap; import static com.hazelcast.spi.tenantcontrol.TenantControl.NOOP_TENANT_CONTROL; import static com.hazelcast.spi.tenantcontrol.TenantControlFactory.NOOP_TENANT_CONTROL_FACTORY; import static java.util.Collections.newSetFromMap; import static java.util.Collections.singleton; @SuppressWarnings("checkstyle:classdataabstractioncoupling") public abstract class AbstractCacheService implements ICacheService, PreJoinAwareService, PartitionAwareService, SplitBrainProtectionAwareService, SplitBrainHandlerService, ClusterStateListener { public static final String TENANT_CONTROL_FACTORY = "com.hazelcast.spi.tenantcontrol.TenantControlFactory"; /** * Map from full prefixed cache name to {@link CacheConfig} */ protected final ConcurrentMap<String, CompletableFuture<CacheConfig>> configs = new ConcurrentHashMap<>(); /** * Map from full prefixed cache name to {@link CacheContext} */ protected final ConcurrentMap<String, CacheContext> cacheContexts = new ConcurrentHashMap<>(); /** * Map from full prefixed cache name to {@link CacheStatisticsImpl} */ protected final ConcurrentMap<String, CacheStatisticsImpl> statistics = new ConcurrentHashMap<>(); /** * Map from full prefixed cache name to set of {@link Closeable} resources */ protected final ConcurrentMap<String, Set<Closeable>> resources = new ConcurrentHashMap<>(); protected final ConcurrentMap<UUID, Closeable> closeableListeners = new ConcurrentHashMap<>(); protected final ConcurrentMap<String, CacheOperationProvider> operationProviderCache = new ConcurrentHashMap<>(); protected final ConstructorFunction<String, CacheContext> cacheContextsConstructorFunction = name -> new CacheContext(); protected final ConstructorFunction<String, CacheStatisticsImpl> cacheStatisticsConstructorFunction = name -> new CacheStatisticsImpl( Clock.currentTimeMillis(), CacheEntryCountResolver.createEntryCountResolver(getOrCreateCacheContext(name))); protected final ConstructorFunction<String, Set<Closeable>> cacheResourcesConstructorFunction = name -> newSetFromMap(new ConcurrentHashMap<Closeable, Boolean>()); // mutex factory ensures each Set<Closeable> of cache resources is only constructed and inserted in resources map once protected final ContextMutexFactory cacheResourcesMutexFactory = new ContextMutexFactory(); protected ILogger logger; protected NodeEngine nodeEngine; protected CachePartitionSegment[] segments; protected CacheEventHandler cacheEventHandler; protected RingbufferCacheEventJournalImpl eventJournal; protected SplitBrainMergePolicyProvider mergePolicyProvider; protected CacheSplitBrainHandlerService splitBrainHandlerService; protected CacheClearExpiredRecordsTask clearExpiredRecordsTask; protected ExpirationManager expirationManager; @Override public final void init(NodeEngine nodeEngine, Properties properties) { this.nodeEngine = nodeEngine; int partitionCount = nodeEngine.getPartitionService().getPartitionCount(); this.segments = new CachePartitionSegment[partitionCount]; for (int i = 0; i < partitionCount; i++) { segments[i] = newPartitionSegment(i); } this.clearExpiredRecordsTask = new CacheClearExpiredRecordsTask(this.segments, nodeEngine); this.expirationManager = new ExpirationManager(this.clearExpiredRecordsTask, nodeEngine); this.cacheEventHandler = new CacheEventHandler(nodeEngine); this.splitBrainHandlerService = new CacheSplitBrainHandlerService(nodeEngine, segments); this.logger = nodeEngine.getLogger(getClass()); this.eventJournal = new RingbufferCacheEventJournalImpl(nodeEngine); this.mergePolicyProvider = nodeEngine.getSplitBrainMergePolicyProvider(); boolean dsMetricsEnabled = nodeEngine.getProperties().getBoolean(ClusterProperty.METRICS_DATASTRUCTURES); postInit(nodeEngine, properties, dsMetricsEnabled); } public SplitBrainMergePolicyProvider getMergePolicyProvider() { return mergePolicyProvider; } public SplitBrainMergePolicy getMergePolicy(String dataStructureName) { CacheConfig cacheConfig = getCacheConfig(dataStructureName); String mergePolicyName = cacheConfig.getMergePolicyConfig().getPolicy(); return mergePolicyProvider.getMergePolicy(mergePolicyName); } public ConcurrentMap<String, CacheConfig> getConfigs() { ConcurrentMap<String, CacheConfig> cacheConfigs = MapUtil.createConcurrentHashMap(configs.size()); for (Map.Entry<String, CompletableFuture<CacheConfig>> config : configs.entrySet()) { cacheConfigs.put(config.getKey(), config.getValue().join()); } return cacheConfigs; } protected void postInit(NodeEngine nodeEngine, Properties properties, boolean metricsEnabled) { if (metricsEnabled) { ((NodeEngineImpl) nodeEngine).getMetricsRegistry().registerDynamicMetricsProvider(this); } } protected abstract CachePartitionSegment newPartitionSegment(int partitionId); protected abstract ICacheRecordStore createNewRecordStore(String cacheNameWithPrefix, int partitionId); @Override public void reset() { reset(false); } private void reset(boolean onShutdown) { for (String objectName : configs.keySet()) { deleteCache(objectName, null, false); } CachePartitionSegment[] partitionSegments = segments; for (CachePartitionSegment partitionSegment : partitionSegments) { if (partitionSegment != null) { if (onShutdown) { partitionSegment.shutdown(); } else { partitionSegment.reset(); partitionSegment.init(); } } } for (String objectName : configs.keySet()) { sendInvalidationEvent(objectName, null, SOURCE_NOT_AVAILABLE); } } @Override public void shutdown(boolean terminate) { if (!terminate) { expirationManager.onShutdown(); cacheEventHandler.shutdown(); reset(true); } } @Override @SuppressFBWarnings({"EI_EXPOSE_REP"}) public CachePartitionSegment[] getPartitionSegments() { return segments; } @Override public DistributedObject createDistributedObject(String cacheNameWithPrefix, UUID source, boolean local) { try { /* * In here, cacheNameWithPrefix is the full cache name. * Full cache name contains, Hazelcast prefix, cache name prefix and pure cache name. */ // At first, lookup cache name in the created cache configs. CacheConfig cacheConfig = getCacheConfig(cacheNameWithPrefix); if (cacheConfig == null) { /* * Prefixed cache name contains cache name prefix and pure cache name, but not Hazelcast prefix (`/hz/`). * Cache name prefix is generated by using specified URI and classloader scopes. * This means, if there is no specified URI and classloader, prefixed cache name is pure cache name. * This means, if there is no specified URI and classloader, prefixed cache name is pure cache name. */ // If cache config is not created yet, remove Hazelcast prefix and get prefixed cache name. String cacheName = cacheNameWithPrefix.substring(HazelcastCacheManager.CACHE_MANAGER_PREFIX.length()); // Lookup prefixed cache name in the config. cacheConfig = findCacheConfig(cacheName); if (cacheConfig == null) { throw new CacheNotExistsException("Couldn't find cache config with name " + cacheNameWithPrefix); } cacheConfig.setManagerPrefix(HazelcastCacheManager.CACHE_MANAGER_PREFIX); } checkCacheConfig(cacheConfig, mergePolicyProvider); if (putCacheConfigIfAbsent(cacheConfig) == null && !local) { // if the cache config was not previously known, ensure the new cache config // becomes available on all members before the proxy is returned to the caller createCacheConfigOnAllMembers(PreJoinCacheConfig.of(cacheConfig)); } return new CacheProxy(cacheConfig, nodeEngine, this); } catch (Throwable t) { throw rethrow(t); } } @Override public void destroyDistributedObject(String objectName, boolean local) { deleteCache(objectName, null, true); } @Override public void beforeMigration(PartitionMigrationEvent event) { } @Override public void commitMigration(PartitionMigrationEvent event) { if (event.getMigrationEndpoint() == MigrationEndpoint.SOURCE) { clearCachesHavingLesserBackupCountThan(event.getPartitionId(), event.getNewReplicaIndex()); } initPartitionReplica(event.getPartitionId()); } @Override public void rollbackMigration(PartitionMigrationEvent event) { if (event.getMigrationEndpoint() == MigrationEndpoint.DESTINATION) { clearCachesHavingLesserBackupCountThan(event.getPartitionId(), event.getCurrentReplicaIndex()); } initPartitionReplica(event.getPartitionId()); } private void clearCachesHavingLesserBackupCountThan(int partitionId, int thresholdReplicaIndex) { if (thresholdReplicaIndex == -1) { clearPartitionReplica(partitionId); return; } CachePartitionSegment segment = segments[partitionId]; segment.clearHavingLesserBackupCountThan(thresholdReplicaIndex); } private void initPartitionReplica(int partitionId) { segments[partitionId].init(); } private void clearPartitionReplica(int partitionId) { segments[partitionId].reset(); } @Override public ICacheRecordStore getOrCreateRecordStore(String cacheNameWithPrefix, int partitionId) { return segments[partitionId].getOrCreateRecordStore(cacheNameWithPrefix); } @Override public ICacheRecordStore getRecordStore(String cacheNameWithPrefix, int partitionId) { return segments[partitionId].getRecordStore(cacheNameWithPrefix); } @Override public CachePartitionSegment getSegment(int partitionId) { return segments[partitionId]; } protected void destroySegments(CacheConfig cacheConfig) { String name = cacheConfig.getNameWithPrefix(); for (CachePartitionSegment segment : segments) { segment.deleteRecordStore(name, true); } } protected void closeSegments(String name) { for (CachePartitionSegment segment : segments) { segment.deleteRecordStore(name, false); } } @Override public void deleteCache(String cacheNameWithPrefix, UUID callerUuid, boolean destroy) { CacheConfig config = deleteCacheConfig(cacheNameWithPrefix); if (config == null) { // Cache is already cleaned up return; } if (destroy) { cacheEventHandler.destroy(cacheNameWithPrefix, SOURCE_NOT_AVAILABLE); destroySegments(config); } else { closeSegments(cacheNameWithPrefix); } WanReplicationService wanService = nodeEngine.getWanReplicationService(); wanService.removeWanEventCounters(ICacheService.SERVICE_NAME, cacheNameWithPrefix); cacheContexts.remove(cacheNameWithPrefix); operationProviderCache.remove(cacheNameWithPrefix); deregisterAllListener(cacheNameWithPrefix); setStatisticsEnabled(config, cacheNameWithPrefix, false); setManagementEnabled(config, cacheNameWithPrefix, false); deleteCacheStat(cacheNameWithPrefix); deleteCacheResources(cacheNameWithPrefix); } @Override public CacheConfig putCacheConfigIfAbsent(CacheConfig config) { // ensure all configs registered in CacheService are not PreJoinCacheConfig's CacheConfig cacheConfig = asCacheConfig(config); CompletableFuture<CacheConfig> future = new CompletableFuture<>(); CompletableFuture<CacheConfig> localConfigFuture = configs.putIfAbsent(cacheConfig.getNameWithPrefix(), future); // if the existing cache config future is not yet fully configured, we block here CacheConfig localConfig = localConfigFuture == null ? null : localConfigFuture.join(); if (localConfigFuture == null) { try { if (cacheConfig.isStatisticsEnabled()) { setStatisticsEnabled(cacheConfig, cacheConfig.getNameWithPrefix(), true); } if (cacheConfig.isManagementEnabled()) { setManagementEnabled(cacheConfig, cacheConfig.getNameWithPrefix(), true); } logger.info("Added cache config: " + cacheConfig); additionalCacheConfigSetup(config, false); // now it is safe for others to obtain the new cache config future.complete(cacheConfig); } catch (Throwable e) { configs.remove(cacheConfig.getNameWithPrefix(), future); future.completeExceptionally(e); throw rethrow(e); } } else { additionalCacheConfigSetup(localConfig, true); } return localConfig; } protected void additionalCacheConfigSetup(CacheConfig config, boolean existingConfig) { // overridden in other context } @Override public CacheConfig deleteCacheConfig(String cacheNameWithPrefix) { CompletableFuture<CacheConfig> cacheConfigFuture = configs.remove(cacheNameWithPrefix); CacheConfig cacheConfig = null; if (cacheConfigFuture != null) { // decouple this cache from the tenant // the tenant will unregister it's event listeners so the tenant itself // can be garbage collected cacheConfig = cacheConfigFuture.join(); getTenantControl(cacheConfig).unregister(); logger.info("Removed cache config: " + cacheConfig); } return cacheConfig; } @Override public ExpirationManager getExpirationManager() { return expirationManager; } @Override public CacheStatisticsImpl createCacheStatIfAbsent(String cacheNameWithPrefix) { return ConcurrencyUtil.getOrPutIfAbsent(statistics, cacheNameWithPrefix, cacheStatisticsConstructorFunction); } public CacheContext getCacheContext(String name) { return cacheContexts.get(name); } @Override public CacheContext getOrCreateCacheContext(String cacheNameWithPrefix) { return ConcurrencyUtil.getOrPutIfAbsent(cacheContexts, cacheNameWithPrefix, cacheContextsConstructorFunction); } @Override public void deleteCacheStat(String cacheNameWithPrefix) { statistics.remove(cacheNameWithPrefix); } @Override public void setStatisticsEnabled(CacheConfig cacheConfig, String cacheNameWithPrefix, boolean enabled) { cacheConfig = cacheConfig != null ? cacheConfig : getCacheConfig(cacheNameWithPrefix); if (cacheConfig != null) { String cacheManagerName = cacheConfig.getUriString(); cacheConfig.setStatisticsEnabled(enabled); if (enabled) { CacheStatisticsImpl cacheStatistics = createCacheStatIfAbsent(cacheNameWithPrefix); CacheStatisticsMXBeanImpl mxBean = new CacheStatisticsMXBeanImpl(cacheStatistics); MXBeanUtil.registerCacheObject(mxBean, cacheManagerName, cacheConfig.getName(), true); } else { MXBeanUtil.unregisterCacheObject(cacheManagerName, cacheConfig.getName(), true); deleteCacheStat(cacheNameWithPrefix); } } } @Override public void setManagementEnabled(CacheConfig cacheConfig, String cacheNameWithPrefix, boolean enabled) { cacheConfig = cacheConfig != null ? cacheConfig : getCacheConfig(cacheNameWithPrefix); if (cacheConfig != null) { String cacheManagerName = cacheConfig.getUriString(); cacheConfig.setManagementEnabled(enabled); if (enabled) { CacheMXBeanImpl mxBean = new CacheMXBeanImpl(cacheConfig); MXBeanUtil.registerCacheObject(mxBean, cacheManagerName, cacheConfig.getName(), false); } else { MXBeanUtil.unregisterCacheObject(cacheManagerName, cacheConfig.getName(), false); deleteCacheStat(cacheNameWithPrefix); } } } @Override public CacheConfig getCacheConfig(String cacheNameWithPrefix) { CompletableFuture<CacheConfig> future = configs.get(cacheNameWithPrefix); return future == null ? null : future.join(); } @Override public CacheConfig findCacheConfig(String simpleName) { if (simpleName == null) { return null; } CacheSimpleConfig cacheSimpleConfig = nodeEngine.getConfig().findCacheConfigOrNull(simpleName); if (cacheSimpleConfig == null) { return null; } try { // Set name explicitly, because found config might have a wildcard name. CacheConfig cacheConfig = new CacheConfig(cacheSimpleConfig).setName(simpleName); setTenantControl(cacheConfig); return cacheConfig; } catch (Exception e) { throw new CacheException(e); } } @Override public void setTenantControl(CacheConfig cacheConfig) { if (!NOOP_TENANT_CONTROL.equals(getTenantControl(cacheConfig))) { // a tenant control has already been explicitly set for the cache config return; } // associate cache config with the current thread's tenant // and add hook so when the tenant is destroyed, so is the cache config TenantControlFactory tenantControlFactory = null; try { tenantControlFactory = ServiceLoader.load(TenantControlFactory.class, TENANT_CONTROL_FACTORY, nodeEngine.getConfigClassLoader()); } catch (Exception e) { if (logger.isFinestEnabled()) { logger.finest("Could not load service provider for TenantControl", e); } } if (tenantControlFactory == null) { tenantControlFactory = NOOP_TENANT_CONTROL_FACTORY; } CacheConfigAccessor.setTenantControl(cacheConfig, tenantControlFactory.saveCurrentTenant( new CacheDestroyEventContext(cacheConfig.getName()))); } @Override public Collection<CacheConfig> getCacheConfigs() { List<CacheConfig> cacheConfigs = new ArrayList<>(configs.size()); for (CompletableFuture<CacheConfig> future : configs.values()) { cacheConfigs.add(future.join()); } return cacheConfigs; } public Object toObject(Object data) { if (data == null) { return null; } if (data instanceof Data) { return nodeEngine.toObject(data); } else { return data; } } public Data toData(Object object) { if (object == null) { return null; } if (object instanceof Data) { return (Data) object; } else { return nodeEngine.getSerializationService().toData(object); } } @Override public void publishEvent(CacheEventContext cacheEventContext) { cacheEventHandler.publishEvent(cacheEventContext); } @Override public void publishEvent(String cacheNameWithPrefix, CacheEventSet eventSet, int orderKey) { cacheEventHandler.publishEvent(cacheNameWithPrefix, eventSet, orderKey); } @Override public NodeEngine getNodeEngine() { return nodeEngine; } @Override public void dispatchEvent(Object event, CacheEventListener listener) { listener.handleEvent(event); } @Override public UUID registerLocalListener(String cacheNameWithPrefix, CacheEventListener listener) { EventService eventService = getNodeEngine().getEventService(); EventRegistration registration = eventService .registerLocalListener(AbstractCacheService.SERVICE_NAME, cacheNameWithPrefix, listener); if (registration == null) { return null; } return updateRegisteredListeners(listener, registration); } @Override public UUID registerLocalListener(String cacheNameWithPrefix, CacheEventListener listener, EventFilter eventFilter) { EventService eventService = getNodeEngine().getEventService(); EventRegistration registration = eventService .registerLocalListener(AbstractCacheService.SERVICE_NAME, cacheNameWithPrefix, eventFilter, listener); if (registration == null) { return null; } return updateRegisteredListeners(listener, registration); } @Override public CompletableFuture<UUID> registerListenerAsync(String cacheNameWithPrefix, CacheEventListener listener) { EventService eventService = getNodeEngine().getEventService(); return eventService.registerListenerAsync(AbstractCacheService.SERVICE_NAME, cacheNameWithPrefix, listener) .thenApplyAsync((eventRegistration) -> updateRegisteredListeners(listener, eventRegistration), CALLER_RUNS); } @Override public CompletableFuture<UUID> registerListenerAsync(String cacheNameWithPrefix, CacheEventListener listener, EventFilter eventFilter) { EventService eventService = getNodeEngine().getEventService(); return eventService.registerListenerAsync(AbstractCacheService.SERVICE_NAME, cacheNameWithPrefix, eventFilter, listener) .thenApplyAsync((eventRegistration) -> updateRegisteredListeners(listener, eventRegistration), CALLER_RUNS); } private UUID updateRegisteredListeners(CacheEventListener listener, EventRegistration eventRegistration) { UUID id = eventRegistration.getId(); if (listener instanceof Closeable) { closeableListeners.put(id, (Closeable) listener); } else if (listener instanceof CacheEntryListenerProvider) { CacheEntryListener cacheEntryListener = ((CacheEntryListenerProvider) listener) .getCacheEntryListener(); if (cacheEntryListener instanceof Closeable) { closeableListeners.put(id, (Closeable) cacheEntryListener); } } return id; } @Override public UUID registerListener(String cacheNameWithPrefix, CacheEventListener listener) { EventService eventService = getNodeEngine().getEventService(); EventRegistration registration = eventService .registerListener(AbstractCacheService.SERVICE_NAME, cacheNameWithPrefix, listener); return updateRegisteredListeners(listener, registration); } @Override public UUID registerListener(String cacheNameWithPrefix, CacheEventListener listener, EventFilter eventFilter) { EventService eventService = getNodeEngine().getEventService(); EventRegistration registration = eventService .registerListener(AbstractCacheService.SERVICE_NAME, cacheNameWithPrefix, eventFilter, listener); return updateRegisteredListeners(listener, registration); } @Override public CompletableFuture<Boolean> deregisterListenerAsync(String cacheNameWithPrefix, UUID registrationId) { EventService eventService = getNodeEngine().getEventService(); return eventService.deregisterListenerAsync(AbstractCacheService.SERVICE_NAME, cacheNameWithPrefix, registrationId) .thenApplyAsync(result -> { removeFromLocalResources(registrationId); return result; }, CALLER_RUNS); } private void removeFromLocalResources(UUID registrationId) { Closeable listener = closeableListeners.remove(registrationId); if (listener != null) { IOUtil.closeResource(listener); } } @Override public boolean deregisterListener(String cacheNameWithPrefix, UUID registrationId) { EventService eventService = getNodeEngine().getEventService(); if (eventService.deregisterListener(AbstractCacheService.SERVICE_NAME, cacheNameWithPrefix, registrationId)) { removeFromLocalResources(registrationId); return true; } return false; } @Override public void deregisterAllListener(String cacheNameWithPrefix) { EventService eventService = getNodeEngine().getEventService(); Collection<EventRegistration> registrations = eventService.getRegistrations(SERVICE_NAME, cacheNameWithPrefix); if (registrations != null) { for (EventRegistration registration : registrations) { removeFromLocalResources(registration.getId()); } } eventService.deregisterAllListeners(AbstractCacheService.SERVICE_NAME, cacheNameWithPrefix); CacheContext cacheContext = cacheContexts.get(cacheNameWithPrefix); if (cacheContext != null) { cacheContext.resetCacheEntryListenerCount(); cacheContext.resetInvalidationListenerCount(); } } @Override public Map<String, LocalCacheStats> getStats() { Map<String, LocalCacheStats> stats = createHashMap(statistics.size()); for (Map.Entry<String, CacheStatisticsImpl> entry : statistics.entrySet()) { stats.put(entry.getKey(), new LocalCacheStatsImpl(entry.getValue())); } return stats; } @Override public CacheOperationProvider getCacheOperationProvider(String cacheNameWithPrefix, InMemoryFormat inMemoryFormat) { if (InMemoryFormat.NATIVE.equals(inMemoryFormat)) { throw new IllegalArgumentException("Native memory is available only in Hazelcast Enterprise." + "Make sure you have Hazelcast Enterprise JARs on your classpath!"); } CacheOperationProvider cacheOperationProvider = operationProviderCache.get(cacheNameWithPrefix); if (cacheOperationProvider != null) { return cacheOperationProvider; } cacheOperationProvider = createOperationProvider(cacheNameWithPrefix, inMemoryFormat); CacheOperationProvider current = operationProviderCache.putIfAbsent(cacheNameWithPrefix, cacheOperationProvider); return current == null ? cacheOperationProvider : current; } protected abstract CacheOperationProvider createOperationProvider(String nameWithPrefix, InMemoryFormat inMemoryFormat); public void addCacheResource(String cacheNameWithPrefix, Closeable resource) { Set<Closeable> cacheResources = ConcurrencyUtil.getOrPutSynchronized( resources, cacheNameWithPrefix, cacheResourcesMutexFactory, cacheResourcesConstructorFunction); cacheResources.add(resource); } protected void deleteCacheResources(String name) { Set<Closeable> cacheResources; try (ContextMutexFactory.Mutex mutex = cacheResourcesMutexFactory.mutexFor(name)) { synchronized (mutex) { cacheResources = resources.remove(name); } } if (cacheResources != null) { for (Closeable resource : cacheResources) { IOUtil.closeResource(resource); } cacheResources.clear(); } } @Override public Operation getPreJoinOperation() { OnJoinCacheOperation preJoinCacheOperation; preJoinCacheOperation = new OnJoinCacheOperation(); for (Map.Entry<String, CompletableFuture<CacheConfig>> cacheConfigEntry : configs.entrySet()) { CacheConfig cacheConfig = new PreJoinCacheConfig(cacheConfigEntry.getValue().join()); preJoinCacheOperation.addCacheConfig(cacheConfig); } return preJoinCacheOperation; } protected void publishCachePartitionLostEvent(String cacheName, int partitionId) { Collection<EventRegistration> registrations = new LinkedList<>(); for (EventRegistration registration : getRegistrations(cacheName)) { if (registration.getFilter() instanceof CachePartitionLostEventFilter) { registrations.add(registration); } } if (registrations.isEmpty()) { return; } Member member = nodeEngine.getLocalMember(); CacheEventData eventData = new CachePartitionEventData(cacheName, partitionId, member); EventService eventService = nodeEngine.getEventService(); eventService.publishEvent(SERVICE_NAME, registrations, eventData, partitionId); } Collection<EventRegistration> getRegistrations(String cacheName) { EventService eventService = nodeEngine.getEventService(); return eventService.getRegistrations(SERVICE_NAME, cacheName); } @Override public void onPartitionLost(IPartitionLostEvent partitionLostEvent) { int partitionId = partitionLostEvent.getPartitionId(); for (CacheConfig config : getCacheConfigs()) { final String cacheName = config.getName(); if (config.getTotalBackupCount() <= partitionLostEvent.getLostReplicaIndex()) { publishCachePartitionLostEvent(cacheName, partitionId); } } } public void cacheEntryListenerRegistered(String name, CacheEntryListenerConfiguration cacheEntryListenerConfiguration) { CacheConfig cacheConfig = getCacheConfig(name); if (cacheConfig == null) { throw new IllegalStateException("CacheConfig does not exist for cache " + name); } cacheConfig.addCacheEntryListenerConfiguration(cacheEntryListenerConfiguration); } public void cacheEntryListenerDeregistered(String name, CacheEntryListenerConfiguration cacheEntryListenerConfiguration) { CacheConfig cacheConfig = getCacheConfig(name); if (cacheConfig == null) { throw new IllegalStateException("CacheConfig does not exist for cache " + name); } cacheConfig.removeCacheEntryListenerConfiguration(cacheEntryListenerConfiguration); } /** * Gets the name of the split brain protection associated with specified cache * * @param cacheName name of the cache * @return name of the associated split brain protection * null if there is no associated split brain protection */ @Override public String getSplitBrainProtectionName(String cacheName) { CacheConfig cacheConfig = getCacheConfig(cacheName); if (cacheConfig == null) { return null; } return cacheConfig.getSplitBrainProtectionName(); } /** * Sends an invalidation event for given <code>cacheName</code> with specified <code>key</code> * from mentioned source with <code>sourceUuid</code>. * * @param cacheNameWithPrefix the name of the cache that invalidation event is sent for * @param key the {@link Data} represents the invalidation event * @param sourceUuid an ID that represents the source for invalidation event */ @Override public void sendInvalidationEvent(String cacheNameWithPrefix, Data key, UUID sourceUuid) { cacheEventHandler.sendInvalidationEvent(cacheNameWithPrefix, key, sourceUuid); } @Override public Runnable prepareMergeRunnable() { return splitBrainHandlerService.prepareMergeRunnable(); } public CacheEventHandler getCacheEventHandler() { return cacheEventHandler; } @Override public CacheEventJournal getEventJournal() { return eventJournal; } @Override public <K, V> void createCacheConfigOnAllMembers(PreJoinCacheConfig<K, V> cacheConfig) { InternalCompletableFuture future = createCacheConfigOnAllMembersAsync(cacheConfig); FutureUtil.waitForever(singleton(future), RETHROW_EVERYTHING); } public <K, V> InternalCompletableFuture<Object> createCacheConfigOnAllMembersAsync(PreJoinCacheConfig<K, V> cacheConfig) { return InvocationUtil.invokeOnStableClusterSerial(getNodeEngine(), new AddCacheConfigOperationSupplier(cacheConfig), MAX_ADD_CACHE_CONFIG_RETRIES); } @Override public void onClusterStateChange(ClusterState newState) { ExpirationManager expManager = expirationManager; if (expManager != null) { expManager.onClusterStateChange(newState); } } @Override public void provideDynamicMetrics(MetricDescriptor descriptor, MetricsCollectionContext context) { provide(descriptor, context, CACHE_PREFIX, getStats()); } }
apache-2.0
ahmedaljazzar/edx-app-android
OpenEdXMobile/src/main/java/org/edx/mobile/view/my_videos/MyRecentVideosFragment.java
25263
package org.edx.mobile.view.my_videos; import android.content.res.Configuration; import android.os.Bundle; import android.support.v4.app.DialogFragment; import android.support.v4.app.FragmentManager; import android.text.TextUtils; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.Button; import android.widget.LinearLayout; import android.widget.ListView; import com.google.inject.Inject; import org.edx.mobile.R; import org.edx.mobile.base.BaseFragment; import org.edx.mobile.core.IEdxEnvironment; import org.edx.mobile.interfaces.NetworkSubject; import org.edx.mobile.interfaces.SectionItemInterface; import org.edx.mobile.logger.Logger; import org.edx.mobile.model.api.TranscriptModel; import org.edx.mobile.model.api.VideoResponseModel; import org.edx.mobile.model.db.DownloadEntry; import org.edx.mobile.module.analytics.ISegment; import org.edx.mobile.module.db.DataCallback; import org.edx.mobile.module.storage.DownloadCompletedEvent; import org.edx.mobile.module.storage.DownloadedVideoDeletedEvent; import org.edx.mobile.player.IPlayerEventCallback; import org.edx.mobile.player.PlayerFragment; import org.edx.mobile.services.LastAccessManager; import org.edx.mobile.task.GetRecentDownloadedVideosTask; import org.edx.mobile.util.AppConstants; import org.edx.mobile.util.CheckboxDrawableUtil; import org.edx.mobile.util.ResourceUtil; import org.edx.mobile.util.UiUtil; import org.edx.mobile.view.adapters.MyRecentVideoAdapter; import org.edx.mobile.view.dialog.DeleteVideoDialogFragment; import org.edx.mobile.view.dialog.IDialogCallback; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import de.greenrobot.event.EventBus; public class MyRecentVideosFragment extends BaseFragment implements IPlayerEventCallback { private MyRecentVideoAdapter adapter; private ListView videoListView; private PlayerFragment playerFragment; private DeleteVideoDialogFragment deleteDialogFragment; private int playingVideoIndex = -1; private DownloadEntry videoModel; private Button deleteButton; private MenuItem selectAllMenuItem; private final Logger logger = new Logger(getClass().getName()); private GetRecentDownloadedVideosTask getRecentDownloadedVideosTask; @Inject LastAccessManager lastAccessManager; @Inject protected IEdxEnvironment environment; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); environment.getSegment().trackScreenView(ISegment.Screens.MY_VIDEOS_RECENT); setHasOptionsMenu(!isLandscape()); EventBus.getDefault().register(this); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { super.onCreateView(inflater, container, savedInstanceState); View view = inflater.inflate(R.layout.fragment_video_list_with_player_container, null); return view; } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); if (savedInstanceState != null) { restore(savedInstanceState); } getView().findViewById(R.id.container_player).setVisibility( playerFragment == null ? View.GONE : View.VISIBLE); videoListView = (ListView) getView().findViewById(R.id.list_video); if (videoListView != null) { adapter = new MyRecentVideoAdapter(getActivity(), environment) { @Override protected void onItemClick(SectionItemInterface model, int position) { showPlayer(); // initialize index for this model playingVideoIndex = position; videoModel = (DownloadEntry) model; playVideoModel(); notifyAdapter(); } @Override public void onSelectItem() { int selectedItemsCount = adapter.getSelectedVideoItemsCount(); int totalVideos = adapter.getTotalVideoItemsCount(); deleteButton.setEnabled(selectedItemsCount > 0); setSelectAllChecked(selectedItemsCount == totalVideos); } }; if (videoModel != null) { adapter.setVideoId(videoModel.videoId); } adapter.setSelectedPosition(playingVideoIndex); videoListView.setEmptyView(getView().findViewById(R.id.empty_list_view)); videoListView.setAdapter(adapter); showDeletePanel(getView()); videoListView.setOnItemClickListener(adapter); } } @Override public void onStart() { super.onStart(); showDeletePanel(getView()); setDeleteMode(false); } @Override public void onResume() { super.onResume(); addToRecentAdapter(); } @Override public void onStop() { super.onStop(); if (getRecentDownloadedVideosTask != null) { getRecentDownloadedVideosTask.cancel(true); getRecentDownloadedVideosTask = null; } hideConfirmDeleteDialog(); AppConstants.myVideosDeleteMode = false; } @Override public void onDestroy() { super.onDestroy(); if (playerFragment != null) { ((NetworkSubject) getActivity()).unregisterNetworkObserver(playerFragment); } EventBus.getDefault().unregister(this); } @Override public void setUserVisibleHint(boolean isVisibleToUser) { super.setUserVisibleHint(isVisibleToUser); if (playerFragment != null) { playerFragment.setUserVisibleHint(isVisibleToUser); if (isVisibleToUser) { playerFragment.unlockOrientation(); } else { playerFragment.lockOrientation(); } } } @Override public void onDestroyOptionsMenu() { super.onDestroyOptionsMenu(); selectAllMenuItem = null; } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { if (AppConstants.myVideosDeleteMode) { inflater.inflate(R.menu.video_list, menu); selectAllMenuItem = menu.findItem(R.id.delete_checkbox); setSelectAllChecked(adapter.getSelectedVideoItemsCount() == adapter.getTotalVideoItemsCount()); } } private void setSelectAllChecked(boolean isChecked) { selectAllMenuItem.setChecked(isChecked); selectAllMenuItem.setIcon(CheckboxDrawableUtil.createActionBarDrawable(getActivity(), isChecked)); } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.delete_checkbox: { selectAllMenuItem.setChecked(!selectAllMenuItem.isChecked()); if (selectAllMenuItem.isChecked()) { adapter.selectAll(); } else { adapter.unselectAll(); } notifyAdapter(); deleteButton.setEnabled(selectAllMenuItem.isChecked()); return true; } default: { return super.onOptionsItemSelected(item); } } } private void setDeleteMode(boolean inDeleteMode) { AppConstants.myVideosDeleteMode = inDeleteMode; getActivity().supportInvalidateOptionsMenu(); } private void addToRecentAdapter() { if (adapter == null) { return; } if (getRecentDownloadedVideosTask != null) { getRecentDownloadedVideosTask.cancel(true); } logger.debug("MyRecentVideoAdapter reloading"); final String selectedId = adapter.getVideoId(); getRecentDownloadedVideosTask = new GetRecentDownloadedVideosTask(getActivity()) { @Override protected void onSuccess(List<SectionItemInterface> list) throws Exception { super.onSuccess(list); if (list != null) { adapter.clear(); adapter.addAll(list); logger.debug("MyRecentVideoAdapter reloaded."); } if (adapter.getCount() <= 0) { hideDeletePanel(getView()); } else { showDeletePanel(getView()); } videoListView.setOnItemClickListener(adapter); if (selectedId != null) { adapter.setVideoId(selectedId); } notifyAdapter(); // Refresh the previous and next buttons visibility on the video // player if a video is playing, based on the new data set. if (playerFragment != null) { playerFragment.setNextPreviousListeners(getNextListener(), getPreviousListener()); } } }; getRecentDownloadedVideosTask.execute(); } private void playVideoModel() { if (playerFragment == null) { return; } if (playerFragment.isPlaying() && videoModel.getVideoId().equals( playerFragment.getPlayingVideo().getVideoId())) { logger.debug("this video is already being played, skipping play event"); return; } VideoResponseModel vrm; try { vrm = environment.getServiceManager().getVideoById(videoModel.eid, videoModel.videoId); } catch (Exception e) { logger.error(e); return; } lastAccessManager.setLastAccessed(videoModel.eid, vrm.getSection().getId()); // reload this model environment.getStorage().reloadDownloadEntry(videoModel); logger.debug("Resumed= " + playerFragment.isResumed()); TranscriptModel transcript = null; try { transcript = environment.getServiceManager().getTranscriptsOfVideo(videoModel.eid, videoModel.videoId); } catch (Exception e) { logger.error(e); } String filepath = null; // check if file available on local if (!TextUtils.isEmpty(videoModel.filepath)) { if (videoModel.isDownloaded()) { File f = new File(videoModel.filepath); if (f.exists()) { // play from local filepath = videoModel.filepath; logger.debug("Playing from local file"); } } } else { DownloadEntry de = (DownloadEntry) environment.getDatabase() .getIVideoModelByVideoUrl(videoModel.url, null); if (de != null && de.filepath != null) { File f = new File(de.filepath); if (f.exists()) { // play from local filepath = de.filepath; logger.debug("Playing from local file for " + "another Download Entry"); } } } if (filepath == null || filepath.length() <= 0) { // not available on local, so play online logger.warn("Local file path not available"); filepath = videoModel.getBestEncodingUrl(getContext()); } playerFragment.play(filepath, videoModel.lastPlayedOffset, videoModel.getTitle(), transcript, videoModel); adapter.setVideoId(this.videoModel.videoId); } private void destroyPlayer() { if (playerFragment == null) { return; } showDeletePanel(getView()); playerFragment.lockOrientation(); getChildFragmentManager().beginTransaction().remove(playerFragment).commitAllowingStateLoss(); View container = getView().findViewById(R.id.container_player); container.setVisibility(View.GONE); playerFragment = null; } private void showPlayer() { if (playerFragment != null) { return; } hideDeletePanel(getView()); View container = getView().findViewById(R.id.container_player); if (container.getVisibility() != View.VISIBLE) { container.setVisibility(View.VISIBLE); } // add and display player fragment playerFragment = new PlayerFragment(); // set callback for player events playerFragment.setCallback(this); playerFragment.setNextPreviousListeners(getNextListener(), getPreviousListener()); FragmentManager childManager = getChildFragmentManager(); childManager.beginTransaction().add(R.id.container_player, playerFragment).commit(); // the fragment needs to be added immediately in order to be playable childManager.executePendingTransactions(); ((NetworkSubject) getActivity()).registerNetworkObserver(playerFragment); playerFragment.unlockOrientation(); } private void hideDeletePanel(View view) { // hide delete button panel at bottom view.findViewById(R.id.delete_button_panel).setVisibility(View.GONE); // hide checkbox in action bar setDeleteMode(false); // hide checkboxes in list notifyAdapter(); } private void showDeletePanel(View view) { if (playerFragment != null) { return; } LinearLayout deletePanel = (LinearLayout) view .findViewById(R.id.delete_button_panel); deletePanel.setVisibility(View.VISIBLE); deleteButton = (Button) view .findViewById(R.id.delete_btn); final Button editButton = (Button) view .findViewById(R.id.edit_btn); editButton.setVisibility(View.VISIBLE); final Button cancelButton = (Button) view .findViewById(R.id.cancel_btn); if (AppConstants.myVideosDeleteMode) { deleteButton.setVisibility(View.VISIBLE); cancelButton.setVisibility(View.VISIBLE); editButton.setVisibility(View.GONE); } else { deleteButton.setVisibility(View.GONE); cancelButton.setVisibility(View.GONE); editButton.setVisibility(View.VISIBLE); } deleteButton.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { ArrayList<SectionItemInterface> list = adapter .getSelectedItems(); if (list != null && list.size() > 0) { showConfirmDeleteDialog(list.size()); } } }); cancelButton.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { editButton.setVisibility(View.VISIBLE); videoListView.setOnItemClickListener(adapter); setDeleteMode(false); adapter.unselectAll(); notifyAdapter(); deleteButton.setVisibility(View.GONE); cancelButton.setVisibility(View.GONE); } }); editButton.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { editButton.setVisibility(View.GONE); setDeleteMode(true); notifyAdapter(); videoListView.setOnItemClickListener(null); deleteButton.setEnabled(false); deleteButton.setVisibility(View.VISIBLE); cancelButton.setVisibility(View.VISIBLE); } }); } protected void showConfirmDeleteDialog(int itemCount) { Map<String, String> dialogMap = new HashMap<>(); dialogMap.put("title", getString(R.string.delete_dialog_title_help)); dialogMap.put("message_1", getResources().getQuantityString(R.plurals.delete_video_dialog_msg, itemCount)); dialogMap.put("yes_button", getString(R.string.label_delete)); dialogMap.put("no_button", getString(R.string.label_cancel)); deleteDialogFragment = DeleteVideoDialogFragment.newInstance(dialogMap, new IDialogCallback() { @Override public void onPositiveClicked() { onConfirmDelete(); deleteDialogFragment.dismiss(); } @Override public void onNegativeClicked() { deleteDialogFragment.dismiss(); } }); deleteDialogFragment.setStyle(DialogFragment.STYLE_NO_TITLE, 0); deleteDialogFragment.show(getFragmentManager(), "dialog"); deleteDialogFragment.setCancelable(false); } protected void hideConfirmDeleteDialog() { if (deleteDialogFragment != null) { deleteDialogFragment.dismissAllowingStateLoss(); } } //Deleting Downloaded videos on getting confirmation private void onConfirmDelete() { int deletedVideoCount = 0; ArrayList<SectionItemInterface> list = adapter.getSelectedItems(); if (list != null) { for (SectionItemInterface section : list) { if (section.isDownload()) { // TODO The removeDownload() triggers a callback upon video deletion. // Would be better if removeDownload() could take a list of videos to delete. DownloadEntry de = (DownloadEntry) section; environment.getStorage().removeDownload(de); deletedVideoCount++; // Although the adapter is refreshed below, we update the adapter here to // prevent a user from being able to click a deleted video while the adapter is // refreshing. adapter.remove(section); } } } // Although the videos are removed from the adapter above, the section the videos are in // is not available so we refresh the adapter here. addToRecentAdapter(); notifyAdapter(); videoListView.setOnItemClickListener(adapter); setDeleteMode(false); if (deletedVideoCount > 0) { UiUtil.showMessage(getView(), ResourceUtil.getFormattedStringForQuantity(getResources(), R.plurals.deleted_video, "video_count", deletedVideoCount).toString()); } getView().findViewById(R.id.delete_btn).setVisibility(View.GONE); getView().findViewById(R.id.edit_btn).setVisibility(View.VISIBLE); getView().findViewById(R.id.cancel_btn).setVisibility(View.GONE); } @Override public void onError() { } @Override public void onPlaybackStarted() { environment.getStorage().markVideoPlaying(videoModel, watchedStateCallback); notifyAdapter(); } @Override public void saveCurrentPlaybackPosition(int offset) { DownloadEntry v = videoModel; if (v != null) { // mark this as partially watches, as playing has started environment.getDatabase().updateVideoLastPlayedOffset(v.videoId, offset, setCurrentPositionCallback); } notifyAdapter(); } @Override public void onPlaybackComplete() { DownloadEntry v = videoModel; if (v != null) { if (v.watched == DownloadEntry.WatchedState.PARTIALLY_WATCHED) { videoModel.watched = DownloadEntry.WatchedState.WATCHED; // mark this as partially watches, as playing has started environment.getDatabase().updateVideoWatchedState(v.videoId, DownloadEntry.WatchedState.WATCHED, watchedStateCallback); } } notifyAdapter(); } private void notifyAdapter() { if (adapter == null) { return; } adapter.setSelectedPosition(playingVideoIndex); adapter.notifyDataSetChanged(); } /** * @return true if current orientation is LANDSCAPE, false otherwise. */ protected boolean isLandscape() { return (getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE); } @Override public void onSaveInstanceState(Bundle outState) { logger.debug("In onSaveInstance State"); outState.putInt("playingVideoIndex", playingVideoIndex); outState.putSerializable("model", videoModel); super.onSaveInstanceState(outState); } /** * Container tab will call this method to restore the saved data */ protected void restore(Bundle savedInstanceState) { playingVideoIndex = savedInstanceState.getInt("playingVideoIndex", -1); videoModel = (DownloadEntry) savedInstanceState.getSerializable("model"); playerFragment = (PlayerFragment) getChildFragmentManager() .findFragmentById(R.id.container_player); if (playerFragment != null) { playerFragment.setCallback(this); ((NetworkSubject) getActivity()).registerNetworkObserver(playerFragment); } } //Play the nextVideo if user selects next from Dialog private void playNext() { playingVideoIndex++; // check next playable video entry while (playingVideoIndex < adapter.getCount()) { SectionItemInterface i = adapter.getItem(playingVideoIndex); if (i != null && i instanceof DownloadEntry) { videoModel = (DownloadEntry) i; adapter.setSelectedPosition(playingVideoIndex); adapter.setVideoId(videoModel.videoId); playVideoModel(); break; } // try next playingVideoIndex++; } } //Check if next video is available in the Video List private boolean hasNextVideo(int index) { if (index == -1) { index = playingVideoIndex; } for (int i = index + 1; i < adapter.getCount(); i++) { SectionItemInterface d = adapter.getItem(i); if (d != null && d instanceof DownloadEntry) { return true; } } return false; } private void playPrevious() { playingVideoIndex--; // check next playable video entry while (playingVideoIndex >= 0) { SectionItemInterface i = adapter.getItem(playingVideoIndex); if (i != null && i instanceof DownloadEntry) { videoModel = (DownloadEntry) i; adapter.setSelectedPosition(playingVideoIndex); adapter.setVideoId(videoModel.videoId); playVideoModel(); break; } // try next playingVideoIndex--; } } private boolean hasPreviousVideo(int playingIndex) { for (int i = playingIndex - 1; i >= 0; i--) { SectionItemInterface d = adapter.getItem(i); if (d != null && d instanceof DownloadEntry) { return true; } } return false; } private View.OnClickListener getNextListener() { if (hasNextVideo(playingVideoIndex)) { return new NextClickListener(); } return null; } private View.OnClickListener getPreviousListener() { if (hasPreviousVideo(playingVideoIndex)) { return new PreviousClickListener(); } return null; } private class NextClickListener implements OnClickListener { @Override public void onClick(View v) { playNext(); } } private class PreviousClickListener implements OnClickListener { @Override public void onClick(View v) { playPrevious(); } } private DataCallback<Integer> watchedStateCallback = new DataCallback<Integer>() { @Override public void onResult(Integer result) { logger.debug("Watched State Updated"); } @Override public void onFail(Exception ex) { logger.error(ex); } }; private DataCallback<Integer> setCurrentPositionCallback = new DataCallback<Integer>() { @Override public void onResult(Integer result) { logger.debug("Current Playback Position Updated"); } @Override public void onFail(Exception ex) { logger.error(ex); } }; public void onEventMainThread(DownloadCompletedEvent e) { addToRecentAdapter(); } public void onEventMainThread(DownloadedVideoDeletedEvent e) { destroyPlayer(); } }
apache-2.0
nafae/developer
modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201408/Dimension.java
31338
package com.google.api.ads.dfp.jaxws.v201408; import javax.xml.bind.annotation.XmlEnum; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for Dimension. * * <p>The following schema fragment specifies the expected content contained within this class. * <p> * <pre> * &lt;simpleType name="Dimension"> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}string"> * &lt;enumeration value="MONTH_AND_YEAR"/> * &lt;enumeration value="WEEK"/> * &lt;enumeration value="DATE"/> * &lt;enumeration value="DAY"/> * &lt;enumeration value="HOUR"/> * &lt;enumeration value="LINE_ITEM_ID"/> * &lt;enumeration value="LINE_ITEM_NAME"/> * &lt;enumeration value="LINE_ITEM_TYPE"/> * &lt;enumeration value="ORDER_ID"/> * &lt;enumeration value="ORDER_NAME"/> * &lt;enumeration value="ADVERTISER_ID"/> * &lt;enumeration value="ADVERTISER_NAME"/> * &lt;enumeration value="AD_NETWORK_ID"/> * &lt;enumeration value="AD_NETWORK_NAME"/> * &lt;enumeration value="SALESPERSON_ID"/> * &lt;enumeration value="SALESPERSON_NAME"/> * &lt;enumeration value="CREATIVE_ID"/> * &lt;enumeration value="CREATIVE_NAME"/> * &lt;enumeration value="CREATIVE_TYPE"/> * &lt;enumeration value="CUSTOM_EVENT_ID"/> * &lt;enumeration value="CUSTOM_EVENT_NAME"/> * &lt;enumeration value="CUSTOM_EVENT_TYPE"/> * &lt;enumeration value="CREATIVE_SIZE"/> * &lt;enumeration value="AD_UNIT_ID"/> * &lt;enumeration value="AD_UNIT_NAME"/> * &lt;enumeration value="PARENT_AD_UNIT_ID"/> * &lt;enumeration value="PARENT_AD_UNIT_NAME"/> * &lt;enumeration value="PLACEMENT_ID"/> * &lt;enumeration value="PLACEMENT_NAME"/> * &lt;enumeration value="TARGETING"/> * &lt;enumeration value="DEVICE_CATEGORY_ID"/> * &lt;enumeration value="DEVICE_CATEGORY_NAME"/> * &lt;enumeration value="COUNTRY_CRITERIA_ID"/> * &lt;enumeration value="COUNTRY_NAME"/> * &lt;enumeration value="REGION_CRITERIA_ID"/> * &lt;enumeration value="REGION_NAME"/> * &lt;enumeration value="CITY_CRITERIA_ID"/> * &lt;enumeration value="CITY_NAME"/> * &lt;enumeration value="METRO_CRITERIA_ID"/> * &lt;enumeration value="METRO_NAME"/> * &lt;enumeration value="POSTAL_CODE_CRITERIA_ID"/> * &lt;enumeration value="POSTAL_CODE"/> * &lt;enumeration value="CUSTOM_TARGETING_VALUE_ID"/> * &lt;enumeration value="CUSTOM_CRITERIA"/> * &lt;enumeration value="ACTIVITY_ID"/> * &lt;enumeration value="ACTIVITY_NAME"/> * &lt;enumeration value="ACTIVITY_GROUP_ID"/> * &lt;enumeration value="ACTIVITY_GROUP_NAME"/> * &lt;enumeration value="CONTENT_ID"/> * &lt;enumeration value="CONTENT_NAME"/> * &lt;enumeration value="CONTENT_BUNDLE_ID"/> * &lt;enumeration value="CONTENT_BUNDLE_NAME"/> * &lt;enumeration value="CONTENT_HIERARCHY"/> * &lt;enumeration value="VIDEO_FALLBACK_POSITION"/> * &lt;enumeration value="POSITION_OF_POD"/> * &lt;enumeration value="POSITION_IN_POD"/> * &lt;enumeration value="GRP_DEMOGRAPHICS"/> * &lt;enumeration value="AD_REQUEST_SIZE"/> * &lt;enumeration value="AD_REQUEST_AD_UNIT_SIZES"/> * &lt;enumeration value="AD_REQUEST_CUSTOM_CRITERIA"/> * &lt;enumeration value="BUYER_ID"/> * &lt;enumeration value="BUYER_NAME"/> * &lt;enumeration value="VERIFIED_ADVERTISER_ID"/> * &lt;enumeration value="VERIFIED_ADVERTISER_NAME"/> * &lt;enumeration value="MASTER_COMPANION_CREATIVE_ID"/> * &lt;enumeration value="MASTER_COMPANION_CREATIVE_NAME"/> * &lt;enumeration value="DISTRIBUTION_PARTNER_ID"/> * &lt;enumeration value="DISTRIBUTION_PARTNER_NAME"/> * &lt;enumeration value="CONTENT_PARTNER_ID"/> * &lt;enumeration value="CONTENT_PARTNER_NAME"/> * &lt;enumeration value="RIGHTS_HOLDER_ID"/> * &lt;enumeration value="RIGHTS_HOLDER_NAME"/> * &lt;enumeration value="PROPOSAL_LINE_ITEM_ID"/> * &lt;enumeration value="PROPOSAL_LINE_ITEM_NAME"/> * &lt;enumeration value="PROPOSAL_ID"/> * &lt;enumeration value="PROPOSAL_NAME"/> * &lt;enumeration value="ALL_SALESPEOPLE_ID"/> * &lt;enumeration value="ALL_SALESPEOPLE_NAME"/> * &lt;enumeration value="PROPOSAL_AGENCY_ID"/> * &lt;enumeration value="PROPOSAL_AGENCY_NAME"/> * &lt;enumeration value="PRODUCT_ID"/> * &lt;enumeration value="PRODUCT_NAME"/> * &lt;enumeration value="PRODUCT_TEMPLATE_ID"/> * &lt;enumeration value="PRODUCT_TEMPLATE_NAME"/> * &lt;enumeration value="RATE_CARD_ID"/> * &lt;enumeration value="RATE_CARD_NAME"/> * &lt;enumeration value="WORKFLOW_ID"/> * &lt;enumeration value="WORKFLOW_NAME"/> * &lt;enumeration value="AUDIENCE_SEGMENT_ID"/> * &lt;enumeration value="AUDIENCE_SEGMENT_NAME"/> * &lt;enumeration value="AUDIENCE_SEGMENT_DATA_PROVIDER_NAME"/> * &lt;/restriction> * &lt;/simpleType> * </pre> * */ @XmlType(name = "Dimension") @XmlEnum public enum Dimension { /** * * Breaks down reporting data by month and year in the network time zone. Can * be used to filter on month using ISO 4601 format 'YYYY-MM'. * * */ MONTH_AND_YEAR, /** * * Breaks down reporting data by week of the year in the network time zone. * Cannot be used for filtering. * * */ WEEK, /** * * Breaks down reporting data by date in the network time zone. Can be used to * filter by date using ISO 8601's format 'YYYY-MM-DD'". * * */ DATE, /** * * Breaks down reporting data by day of the week in the network time zone. Can * be used to filter by day of the week using the index of the day (from 1 for * Monday is 1 to 7 for Sunday). * * */ DAY, /** * * Breaks down reporting data by hour of the day in the network time zone. Can * be used to filter by hour of the day (from 0 to 23). * * */ HOUR, /** * * Breaks down reporting data by {@link LineItem#id}. Can be used to * filter by {@link LineItem#id}. * * */ LINE_ITEM_ID, /** * * Breaks down reporting data by line item. {@link LineItem#name} and * {@link LineItem#id} are automatically included as columns in the report. * Can be used to filter by {@link LineItem#name}. * * */ LINE_ITEM_NAME, /** * * Breaks down reporting data by {@link LineItem#lineItemType}. Can be used * to filter by line item type using {@link LineItemType} enumeration names. * * */ LINE_ITEM_TYPE, /** * * Breaks down reporting data by {@link Order#id}. Can be used to filter by * {@link Order#id}. * * */ ORDER_ID, /** * * Breaks down reporting data by order. {@link Order#name} and * {@link Order#id} are automatically included as columns in the report. Can * be used to filter by {@link Order#name}. * * */ ORDER_NAME, /** * * Breaks down reporting data by advertising company {@link Company#id}. Can * be used to filter by {@link Company#id}. * * */ ADVERTISER_ID, /** * * Breaks down reporting data by advertising company. {@link Company#name} and * {@link Company#id} are automatically included as columns in the report. * Can be used to filter by {@link Company#name}. * * */ ADVERTISER_NAME, /** * * The network that provided the ad for SDK ad mediation. * * <p> * If selected for a report, that report will include only SDK mediation ads and will not contain * non-SDK mediation ads. * </p> * * <p> * SDK mediation ads are ads for mobile devices. They have a list of ad networks which can provide * ads to serve. Not every ad network will have an ad to serve so the device will try each network * one-by-one until it finds an ad network with an ad to serve. The ad network that ends up * serving the ad will appear here. Note that this id does not correlate to anything in the * companies table and is not the same id as is served by {@link #ADVERTISER_ID}. * </p> * * */ AD_NETWORK_ID, /** * * The name of the network defined in {@link #AD_NETWORK_ID}. * * */ AD_NETWORK_NAME, /** * * Breaks down reporting data by salesperson {@link User#id}. Can be used to * filter by {@link User#id}. * * */ SALESPERSON_ID, /** * * Breaks down reporting data by salesperson. {@link User#name} and * {@link User#id} of the salesperson are automatically included as columns in * the report. Can be used to filter by {@link User#name}. * * */ SALESPERSON_NAME, /** * * Breaks down reporting data by {@link Creative#id} or creative set id * (master's {@link Creative#id}) if the creative is part of a creative set. * Can be used to filter by {@link Creative#id}. * * */ CREATIVE_ID, /** * * Breaks down reporting data by creative. {@link Creative#name} and * {@link Creative#id} are automatically included as columns in the report. * Can be used to filter by {@link Creative#name}. * * */ CREATIVE_NAME, /** * * Breaks down reporting data by creative type. * * */ CREATIVE_TYPE, /** * * Breaks down reporting data by custom event ID. * * */ CUSTOM_EVENT_ID, /** * * Breaks down reporting data by custom event name. * * */ CUSTOM_EVENT_NAME, /** * * Breaks down reporting data by custom event type (timer/exit/counter). * * */ CUSTOM_EVENT_TYPE, /** * * Breaks down reporting data by {@link Creative#size}. Cannot be used for * filtering. * * */ CREATIVE_SIZE, /** * * Breaks down reporting data by {@link AdUnit#id}. Can be used to filter by * {@link AdUnit#id}. {@link #AD_UNIT_NAME}, i.e. {@link AdUnit#name}, is * automatically included as a dimension in the report. * * */ AD_UNIT_ID, /** * * Breaks down reporting data by ad unit. {@link AdUnit#name} and * {@link AdUnit#id} are automatically included as columns in the report. Can * be used to filter by {@link AdUnit#name}. * * */ AD_UNIT_NAME, /** * * Used to filter on all the descendants of an ad unit by {@link AdUnit#id}. * Not available as a dimension to report on. * * */ PARENT_AD_UNIT_ID, /** * * Used to filter on all the descendants of an ad unit by {@link AdUnit#name}. * Not available as a dimension to report on. * * */ PARENT_AD_UNIT_NAME, /** * * Breaks down reporting data by {@link Placement#id}. Can be used to filter * by {@link Placement#id}. * * */ PLACEMENT_ID, /** * * Breaks down reporting data by placement. {@link Placement#name} and * {@link Placement#id} are automatically included as columns in the report. * Can be used to filter by {@link Placement#name}. * * */ PLACEMENT_NAME, /** * * Breaks down reporting data by criteria predefined by DoubleClick For * Publishers like the operating system, browser etc. Cannot be used for * filtering. * * */ TARGETING, /** * * The ID of the device category to which an ad is being targeted. * * Can be used to filter by device category ID. * * */ DEVICE_CATEGORY_ID, /** * * The category of device (smartphone, feature phone, tablet, or desktop) to which an ad is being * targeted. * * Can be used to filter by device category name. * * */ DEVICE_CATEGORY_NAME, /** * * Breaks down reporting data by country criteria ID. Can be used to filter by * country criteria ID. * * */ COUNTRY_CRITERIA_ID, /** * * Breaks down reporting data by country name. The country name and the * country criteria ID are automatically included as columns in the report. * Can be used to filter by country name using the US English name. * * */ COUNTRY_NAME, /** * * Breaks down reporting data by region criteria ID. Can be used to filter by * region criteria ID. * * */ REGION_CRITERIA_ID, /** * * Breaks down reporting data by region name. The region name and the region * criteria ID are automatically included as columns in the report. Can be * used to filter by region name using the US English name. * * */ REGION_NAME, /** * * Breaks down reporting data by city criteria ID. Can be used to filter by * city criteria ID. * * */ CITY_CRITERIA_ID, /** * * Breaks down reporting data by city name. The city name and the city * criteria ID are automatically included as columns in the report. Can be * used to filter by city name using the US English name. * * */ CITY_NAME, /** * * Breaks down reporting data by metro criteria ID. Can be used to filter by * metro criteria ID. * * */ METRO_CRITERIA_ID, /** * * Breaks down reporting data by metro name. The metro name and the metro * criteria ID are automatically included as columns in the report. Can be * used to filter by metro name using the US English name. * * */ METRO_NAME, /** * * Breaks down reporting data by postal code criteria ID. Can be used to * filter by postal code criteria ID. * * */ POSTAL_CODE_CRITERIA_ID, /** * * Breaks down reporting data by postal code. The postal code and the postal * code criteria ID are automatically included as columns in the report. Can * be used to filter by postal code. * * */ POSTAL_CODE, /** * * Breaks down reporting data by {@link CustomTargetingValue#id}. Can be used * to filter by {@link CustomTargetingValue#id}. * * */ CUSTOM_TARGETING_VALUE_ID, /** * * Breaks down reporting data by custom criteria. The {@link CustomTargetingValue} is * displayed in the form: * <ul> * <li> * car=honda when value match type is * {@link CustomTargetingValue.MatchType#EXACT} * </li> * <li> * car~honda when value match type is * {@link CustomTargetingValue.MatchType#BROAD} * </li> * <li> * car=*honda when value match type is * {@link CustomTargetingValue.MatchType#PREFIX} * </li> * <li> * car~*honda when value match type is * {@link CustomTargetingValue.MatchType#BROAD_PREFIX} * </li> * </ul> * {@link #CUSTOM_TARGETING_VALUE_ID}, i.e. {@link CustomTargetingValue#id} is * automatically included as a column in the report. * Cannot be used for filtering; use {@link #CUSTOM_TARGETING_VALUE_ID} instead. * <p> * When using this {@code Dimension}, metrics for freeform key values are only * reported on when they are registered with {@code CustomTargetingService}. * * */ CUSTOM_CRITERIA, /** * * Breaks down reporting data by activity ID. Can be used to filter by * activity ID. * * */ ACTIVITY_ID, /** * * Breaks down reporting data by activity. The activity name and the activity * ID are automatically included as columns in the report. Can be used to * filter by activity name. * * */ ACTIVITY_NAME, /** * * Breaks down reporting data by activity group ID. Can be used to filter by * activity group ID. * * */ ACTIVITY_GROUP_ID, /** * * Breaks down reporting data by activity group. The activity group name and * the activity group ID are automatically included as columns in the report. * Can be used to filter by activity group name. * * */ ACTIVITY_GROUP_NAME, /** * * Breaks down reporting data by {@link Content#id}. Can be used to filter by * {@link Content#id}. * * */ CONTENT_ID, /** * * Breaks down reporting data by content. {@link Content#name} and * {@link Content#id} are automatically included as columns in the report. Can * be used to filter by {@link Content#name}. * * */ CONTENT_NAME, /** * * Breaks down reporting data by {@link ContentBundle#id}. Can be used to filter * by {@link ContentBundle#id}. * * */ CONTENT_BUNDLE_ID, /** * * Breaks down reporting data by content bundle. {@link ContentBundle#name} and * {@link ContentBundle#id} are automatically included as columns in the * report. Can be used to filter by {@link ContentBundle#name}. * * */ CONTENT_BUNDLE_NAME, /** * * Breaks down reporting data by the content hierarchy. To use this dimension, a list of custom * targeting key IDs must be specified in * {@link ReportQuery#contentMetadataKeyHierarchyCustomTargetingKeyIds}. * <p> * This dimension can be used as a filter in the {@link Statement} in PQL syntax: * CONTENT_HIERARCHY_CUSTOM_TARGETING_KEY[contentMetadataKeyHierarchyCustomTargetingKeyId]_ID = * {@link CustomTargetingValue#id custom targeting value ID} * <p> * For example: WHERE CONTENT_HIERARCHY_CUSTOM_TARGETING_KEY[4242]_ID = 53423 * * */ CONTENT_HIERARCHY, /** * * Breaks down reporting data by the fallback position of the video ad, i.e., * {@code NON_FALLBACK}, {@code FALLBACK_POSITION_1}, {@code FALLBACK_POSITION_2}, etc. Can be * used for filtering. * * */ VIDEO_FALLBACK_POSITION, /** * * Breaks down reporting data by the position of the video ad within the video stream, i.e., * {@code UNKNOWN_POSITION}, {@code PREROLL}, {@code POSTROLL}, {@code UNKNOWN_MIDROLL}, * {@code MIDROLL_1}, {@code MIDROLL_2}, etc. {@code UNKNOWN_MIDROLL} represents a midroll, but * which specific midroll is unknown. Can be used for filtering. * * */ POSITION_OF_POD, /** * * Breaks down reporting data by the position of the video ad within the pod, i.e., * {@code UNKNOWN_POSITION}, {@code POSITION_1}, {@code POSITION_2}, etc. * Can be used for filtering. * * */ POSITION_IN_POD, /** * * Breaks down reporting data by gender and age group, i.e., MALE_13_TO_17, MALE_18_TO_24, * MALE_25_TO_34, MALE_35_TO_44, MALE_45_TO_54, MALE_55_TO_64, MALE_65_PLUS, FEMALE_13_TO_17, * FEMALE_18_TO_24, FEMALE_25_TO_34, FEMALE_35_TO_44, FEMALE_45_TO_54, FEMALE_55_TO_64, * FEMALE_65_PLUS, UNKNOWN_0_TO_17 and UNKNOWN. * Whenever this dimension is selected, {@link #COUNTRY_NAME} must be selected. * * <p> * This dimension is supported only for GRP columns. * * */ GRP_DEMOGRAPHICS, /** * * Size of the creative requested for an ad. * * */ AD_REQUEST_SIZE, /** * * Breaks down reporting data by the ad unit sizes specified in ad requests. * * <p>Formatted as comma separated values, e.g. "300x250,300x250v,300x60". * * <p>This dimension is supported only for sell-through columns. * * */ AD_REQUEST_AD_UNIT_SIZES, /** * * Breaks down reporting data by the custom criteria specified in ad requests. * * <p>Formatted as comma separated * {@link CustomTargetingKey key}-{@link CustomTargetingValue values}, where a key-value is * formatted as {@code key=value_1,...,value_n}. * * <p>This dimension is supported only for sell-through columns. * * */ AD_REQUEST_CUSTOM_CRITERIA, /** * * The unique identifier used for an ad network that is associated with the * company that the ad is served for. * * */ BUYER_ID, /** * * The name of the ad network that is associated with the company that the ad is served for. * * */ BUYER_NAME, /** * * ID of the advertiser that filled the ad either directly (through DFP) or indirectly via * Google Ad Exchange or another ad network or exchange. * * */ VERIFIED_ADVERTISER_ID, /** * * Name of the advertiser that filled the ad either directly (through DFP) or indirectly * via Google Ad Exchange or another ad network or exchange. * * */ VERIFIED_ADVERTISER_NAME, /** * * Breaks down reporting data by {@link Creative#id}. This includes regular creatives, * and master and companions in case of creative sets. * * */ MASTER_COMPANION_CREATIVE_ID, /** * * Breaks down reporting data by creative. This includes regular creatives, * and master and companions in case of creative sets. * * */ MASTER_COMPANION_CREATIVE_NAME, /** * * Breaks down reporting data by ID of the distribution partner. * * */ DISTRIBUTION_PARTNER_ID, /** * * Breaks down reporting data by name of the distribution partner. * * */ DISTRIBUTION_PARTNER_NAME, /** * * Breaks down reporting data by ID of the content partner. * * */ CONTENT_PARTNER_ID, /** * * Breaks down reporting data by name of the content partner. * * */ CONTENT_PARTNER_NAME, /** * * Breaks down reporting data by ID of the rights holder. * * */ RIGHTS_HOLDER_ID, /** * * Breaks down reporting data by name of the rights holder. * * */ RIGHTS_HOLDER_NAME, /** * * Breaks down reporting data by {@link ProposalLineItem#id}. Can be used to filter by * {@link ProposalLineItem#id}. * * */ PROPOSAL_LINE_ITEM_ID, /** * * Breaks down reporting data by {@link ProposalLineItem#name}. Can be used to filter by * {@link ProposalLineItem#name}. * * */ PROPOSAL_LINE_ITEM_NAME, /** * * Breaks down reporting data by {@link Proposal#id}. Can be used to filter by * {@link Proposal#id}. * * */ PROPOSAL_ID, /** * * Breaks down reporting data by {@link Proposal#name}. Can be used to filter by * {@link Proposal#name}. * * */ PROPOSAL_NAME, /** * * Breaks down reporting data by salesperson {@link User#id}, including both salesperson and * secondary salespeople. Can be used to filter by all salespeople {@link User#id}. * * */ ALL_SALESPEOPLE_ID, /** * * Breaks down reporting data by salesperson {@link User#name}, including both salesperson and * secondary salespeople. Can be used to filter by all salespeople {@link User#name}. * * */ ALL_SALESPEOPLE_NAME, /** * * Breaks down reporting data by proposal agency {@link Company#id}. Can be used to filter by * proposal agency {@link Company#id}. * * */ PROPOSAL_AGENCY_ID, /** * * Breaks down reporting data by proposal agency {@link Company#name}. Can be used to filter by * proposal agency {@link Company#name}. * * */ PROPOSAL_AGENCY_NAME, /** * * Breaks down reporting data by {@link Product#id}. Can be used to filter by {@link Product#id}. * * */ PRODUCT_ID, /** * * Breaks down reporting data by {@link Product#name}. * * */ PRODUCT_NAME, /** * * Breaks down reporting data by {@link ProductTemplate#id}. Can be used to filter by * {@link ProductTemplate#id}. * * */ PRODUCT_TEMPLATE_ID, /** * * Breaks down reporting data by {@link ProductTemplate#name}. Can be used to filter by * {@link ProductTemplate#name}. * * */ PRODUCT_TEMPLATE_NAME, /** * * Breaks down reporting data by {@link RateCard#id}. Can be used to filter by * {@link RateCard#id}. * * */ RATE_CARD_ID, /** * * Breaks down reporting data by {@link RateCard#name}. Can be used to filter by * {@link RateCard#name}. * * */ RATE_CARD_NAME, /** * * Used to filter by {@link Workflow#id}. Not available as a dimension to report on. * * */ WORKFLOW_ID, /** * * Used to filter by {@link Workflow#name}. Not available as a dimension to report on. * * */ WORKFLOW_NAME, /** * * Breaks down reporting data by billable audience segment ID. * * */ AUDIENCE_SEGMENT_ID, /** * * Breaks down reporting data by billable audience segment name. * * */ AUDIENCE_SEGMENT_NAME, /** * * Breaks down reporting data by audience segment data provider name. * * */ AUDIENCE_SEGMENT_DATA_PROVIDER_NAME; public String value() { return name(); } public static Dimension fromValue(String v) { return valueOf(v); } }
apache-2.0
jeche21/SaleWeb
src/main/java/es/sidelab/SaleWeb/SecurityConfiguration.java
1805
package es.sidelab.SaleWeb; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Configuration; import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder; import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; @Configuration @EnableGlobalMethodSecurity(securedEnabled = true) public class SecurityConfiguration extends WebSecurityConfigurerAdapter { @Autowired public UsuarioRepositoryAuthenticationProvider authenticationProvider; @Override protected void configure(HttpSecurity http) throws Exception{ //Paginas Publicas http.authorizeRequests().antMatchers("/").permitAll(); http.authorizeRequests().antMatchers("/loggin").permitAll(); http.authorizeRequests().antMatchers("/registrar_usuario").permitAll(); http.authorizeRequests().antMatchers("/usuario/nuevo").permitAll(); //Paginas Privadas http.authorizeRequests().anyRequest().authenticated(); //Loggin http.formLogin().loginPage("/"); http.formLogin().usernameParameter("email"); http.formLogin().passwordParameter("contraseña"); http.formLogin().defaultSuccessUrl("/tienda"); http.formLogin().failureUrl("/loggin"); //Logout http.logout().logoutUrl("/logout"); http.logout().logoutSuccessUrl("/"); } //Si queremos poner los administradores @Override protected void configure(AuthenticationManagerBuilder auth) throws Exception { auth.authenticationProvider(authenticationProvider); } }
apache-2.0
jpaulm/javafbp
src/main/java/com/jpaulmorrison/fbp/resourcekit/examples/components/GenerateFixedSizeArray.java
2680
/* * JavaFBP - A Java Implementation of Flow-Based Programming (FBP) * Copyright (C) 2009, 2016 J. Paul Morrison * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Library General Public * License as published by the Free Software Foundation; either * version 3.0 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * * You should have received a copy of the GNU Library General Public * License along with this library; if not, see the GNU Library General Public License v3 * at https://www.gnu.org/licenses/lgpl-3.0.en.html for more details. */ package com.jpaulmorrison.fbp.resourcekit.examples.components; import com.jpaulmorrison.fbp.core.engine.Component; import com.jpaulmorrison.fbp.core.engine.ComponentDescription; import com.jpaulmorrison.fbp.core.engine.InPort; import com.jpaulmorrison.fbp.core.engine.InputPort; import com.jpaulmorrison.fbp.core.engine.OutPort; import com.jpaulmorrison.fbp.core.engine.OutputPort; import com.jpaulmorrison.fbp.core.engine.Packet; /** * Component to generate stream of 'n' packets to each element of an array output port, * where 'n' is specified in an InitializationConnection. */ @ComponentDescription("Generates stream of packets under control of a counter") @OutPort(value = "OUT", fixedSize = true, description = "Generated stream", type = String.class, arrayPort = true /*, optional = true */) @InPort(value = "COUNT", description = "Count of packets to be generated", type = String.class) public class GenerateFixedSizeArray extends Component { OutputPort outPortArray[]; InputPort count; @Override protected void openPorts() { outPortArray = openOutputArray("OUT", 3); count = openInput("COUNT"); } @Override protected void execute() { Packet ctp = count.receive(); if (ctp == null) { return; } count.close(); String cti = (String) ctp.getContent(); cti = cti.trim(); int ct = 0; try { ct = Integer.parseInt(cti); } catch (NumberFormatException e) { e.printStackTrace(); } drop(ctp); int no = outPortArray.length; for (int k = 0; k < no; k++) { // int k2 = k; for (int i = ct; i > 0; i--) { String s = String.format("%1$06d", i) + "abcd"; Packet p = create(s); // if (outPortArray[k].isConnected()) { outPortArray[k].send(p); // } else { // drop(p); // } } } } }
artistic-2.0
Snowy1013/BabyCare
app/src/main/java/com/snowy/babycare/bean/Result.java
662
package com.snowy.babycare.bean; import java.util.Map; import java.util.Objects; /** * Created by snowy on 16/3/3. */ public class Result { private Member result; private int code; public Result() { } public void setResult(Member result) { this.result = result; } public void setCode(int code) { this.code = code; } public Member getResult() { return result; } public int getCode() { return code; } @Override public String toString() { return "Result{" + "result=" + result + ", code=" + code + '}'; } }
artistic-2.0
glacier0315/study
activemq-example/hello-world/src/main/java/com/glacier/bean/Student.java
1709
package com.glacier.bean; import java.io.Serializable; import java.util.Date; import java.util.List; import java.util.Map; /** * @author glacier * @version v1.0.0 * @Date 2017-09-24 20:29:59 */ public class Student implements Serializable { private static final long serialVersionUID = 6598540986256127247L; private String name; private int sex; private Date birthDay; private Map<String, String> map; private Room room; private List<String> list; public static long getSerialVersionUID() { return serialVersionUID; } public String getName() { return name; } public void setName(String name) { this.name = name; } public int getSex() { return sex; } public void setSex(int sex) { this.sex = sex; } public Date getBirthDay() { return birthDay; } public void setBirthDay(Date birthDay) { this.birthDay = birthDay; } public Map<String, String> getMap() { return map; } public void setMap(Map<String, String> map) { this.map = map; } public Room getRoom() { return room; } public void setRoom(Room room) { this.room = room; } public List<String> getList() { return list; } public void setList(List<String> list) { this.list = list; } @Override public String toString() { return "Student{" + "name='" + name + '\'' + ", sex=" + sex + ", birthDay=" + birthDay + ", map=" + map + ", room=" + room + ", list=" + list + '}'; } }
artistic-2.0
javafunk/funk
funk-core/src/test/java/org/javafunk/funk/EagerlyFilterRejectPartitionTest.java
5559
/* * Copyright (C) 2011-Present Funk committers. * All rights reserved. * * The software in this package is published under the terms of the BSD * style license a copy of which has been included with this distribution in * the LICENSE.txt file. */ package org.javafunk.funk; import org.javafunk.funk.datastructures.tuples.Pair; import org.javafunk.funk.functors.Predicate; import org.junit.Test; import java.util.Collection; import static org.hamcrest.MatcherAssert.assertThat; import static org.javafunk.funk.Literals.collectionWith; import static org.javafunk.funk.Literals.iterableWith; import static org.javafunk.matchbox.Matchers.hasOnlyItemsInOrder; public class EagerlyFilterRejectPartitionTest { @Test public void shouldOnlyReturnThoseElementsMatchingTheSuppliedPredicate() { // Given Iterable<Integer> inputs = iterableWith(1, 2, 3, 4, 5, 6); Collection<Integer> expectedOutput = collectionWith(2, 4, 6); // When Collection<Integer> actualOutput = Eagerly.filter(inputs, new Predicate<Integer>() { @Override public boolean evaluate(Integer item) { return isEven(item); } private boolean isEven(Integer item) { return item % 2 == 0; } }); // Then assertThat(actualOutput, hasOnlyItemsInOrder(expectedOutput)); } @Test(expected = NullPointerException.class) public void shouldThrowNullPointerExceptionIfPredicateSuppliedToFilterIsNull() throws Exception { // Given Iterable<Integer> inputs = iterableWith(1, 2, 3, 4, 5, 6); Predicate<? super Integer> predicate = null; // When Eagerly.filter(inputs, predicate); // Then a NullPointerException is thrown. } @Test(expected = NullPointerException.class) public void shouldThrowANullPointerExceptionIfIterablePassedToFilterIsNull() throws Exception { // Given Iterable<Integer> inputs = null; Predicate<Integer> predicate = Predicates.alwaysTrue(); // When Eagerly.filter(inputs, predicate); // Then a NullPointerException is thrown } @Test public void shouldOnlyReturnThoseElementsThatDoNotMatchTheSuppliedPredicate() { // Given Iterable<Integer> inputs = iterableWith(1, 2, 3, 4, 5, 6); Collection<Integer> expectedOutput = collectionWith(1, 3, 5); // When Collection<Integer> actualOutput = Eagerly.reject(inputs, new Predicate<Integer>() { @Override public boolean evaluate(Integer item) { return isEven(item); } private boolean isEven(Integer item) { return item % 2 == 0; } }); // Then assertThat(actualOutput, hasOnlyItemsInOrder(expectedOutput)); } @Test(expected = NullPointerException.class) public void shouldThrowNullPointerExceptionIfPredicateSuppliedToRejectIsNull() throws Exception { // Given Iterable<Integer> inputs = iterableWith(1, 2, 3, 4, 5, 6); Predicate<? super Integer> predicate = null; // When Eagerly.reject(inputs, predicate); // Then a NullPointerException is thrown. } @Test(expected = NullPointerException.class) public void shouldThrowANullPointerExceptionIfIterablePassedToRejectIsNull() throws Exception { // Given Iterable<Integer> inputs = null; Predicate<Integer> predicate = Predicates.alwaysTrue(); // When Eagerly.reject(inputs, predicate); // Then a NullPointerException is thrown } @Test public void shouldReturnTheMatchingElementsFirstAndTheNonMatchingElementsSecond() { // Given Iterable<String> input = iterableWith("a", "b", "c", "d", "e", "f", "g", "h"); Collection<String> expectedMatchingItems = collectionWith("a", "b", "c", "d"); Collection<String> expectedNonMatchingItems = collectionWith("e", "f", "g", "h"); // When Pair<Collection<String>, Collection<String>> partitionResults = Eagerly.partition(input, new Predicate<String>() { public boolean evaluate(String item) { return item.compareTo("e") < 0; } }); // Then Collection<String> actualMatchingItems = partitionResults.getFirst(); Collection<String> actualNonMatchingItems = partitionResults.getSecond(); assertThat(actualMatchingItems, hasOnlyItemsInOrder(expectedMatchingItems)); assertThat(actualNonMatchingItems, hasOnlyItemsInOrder(expectedNonMatchingItems)); } @Test(expected = NullPointerException.class) public void shouldThrowNullPointerExceptionIfPredicateSuppliedToPartitionIsNull() throws Exception { // Given Iterable<String> input = iterableWith("a", "b", "c", "d", "e", "f", "g", "h"); Predicate<? super String> predicate = null; // When Eagerly.partition(input, predicate); // Then a NullPointerException is thrown. } @Test(expected = NullPointerException.class) public void shouldThrowANullPointerExceptionIfIterablePassedToPartitionIsNull() throws Exception { // Given Iterable<String> input = null; Predicate<Object> predicate = Predicates.alwaysTrue(); // When Eagerly.partition(input, predicate); // Then a NullPointerException is thrown } }
bsd-2-clause
aparo/elasticsearch-cookbook-third-edition
chapter_17/analysis_plugin/src/main/java/org/elasticsearch/plugin/analysis/AnalysisPlugin.java
865
package org.elasticsearch.plugin.analysis; import org.apache.lucene.analysis.Analyzer; import org.elasticsearch.index.analysis.AnalyzerProvider; import org.elasticsearch.index.analysis.CustomEnglishAnalyzerProvider; import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.plugins.Plugin; import java.util.HashMap; import java.util.Map; public class AnalysisPlugin extends Plugin implements org.elasticsearch.plugins.AnalysisPlugin { @Override public Map<String, AnalysisModule.AnalysisProvider<AnalyzerProvider<? extends Analyzer>>> getAnalyzers() { Map<String, AnalysisModule.AnalysisProvider<AnalyzerProvider<? extends Analyzer>>> analyzers = new HashMap(); analyzers.put(CustomEnglishAnalyzerProvider.NAME, CustomEnglishAnalyzerProvider::getCustomEnglishAnalyzerProvider); return analyzers; } }
bsd-2-clause
Pushjet/Pushjet-Android
gradle/wrapper/dists/gradle-2.2.1-all/c64ydeuardnfqctvr1gm30w53/gradle-2.2.1/src/platform-native/org/gradle/nativeplatform/test/internal/NativeTestSuiteBinarySpecInternal.java
916
/* * Copyright 2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.nativeplatform.test.internal; import org.gradle.nativeplatform.internal.NativeBinarySpecInternal; import org.gradle.nativeplatform.test.NativeTestSuiteBinarySpec; public interface NativeTestSuiteBinarySpecInternal extends NativeTestSuiteBinarySpec, NativeBinarySpecInternal { }
bsd-2-clause
kephale/imagej-ops
src/main/java/net/imagej/ops/math/add/AddConstantToImageFunctional.java
2595
/* * #%L * ImageJ software for multidimensional image processing and analysis. * %% * Copyright (C) 2014 - 2015 Board of Regents of the University of * Wisconsin-Madison, University of Konstanz and Brian Northan. * %% * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * #L% */ package net.imagej.ops.math.add; import net.imagej.ops.AbstractComputerOp; import net.imagej.ops.Ops; import net.imglib2.Cursor; import net.imglib2.IterableInterval; import net.imglib2.RandomAccess; import net.imglib2.RandomAccessibleInterval; import net.imglib2.type.numeric.NumericType; import org.scijava.Priority; import org.scijava.plugin.Parameter; import org.scijava.plugin.Plugin; @Plugin(type = Ops.Math.Add.class, name = Ops.Math.Add.NAME, priority = Priority.VERY_LOW_PRIORITY) public class AddConstantToImageFunctional<T extends NumericType<T>> extends AbstractComputerOp<IterableInterval<T>, RandomAccessibleInterval<T>> implements Ops.Math.Add { @Parameter private T value; @Override public void compute(final IterableInterval<T> input, final RandomAccessibleInterval<T> output) { final Cursor<T> c = input.localizingCursor(); final RandomAccess<T> ra = output.randomAccess(); while (c.hasNext()) { final T in = c.next(); ra.setPosition(c); final T out = ra.get(); out.set(in); out.add(value); } } }
bsd-2-clause
rsling/cow
src/de/BerkeleyTopoCOW/edu/berkeley/nlp/util/CounterMap.java
6574
package edu.berkeley.nlp.util; import java.util.Collection; import java.util.Map; import java.util.Set; /** * Maintains counts of (key, value) pairs. The map is structured so that for * every key, one can get a counter over values. Example usage: keys might be * words with values being POS tags, and the count being the number of * occurences of that word/tag pair. The sub-counters returned by * getCounter(word) would be count distributions over tags for that word. * * @author Dan Klein */ public class CounterMap <K,V> implements java.io.Serializable { private static final long serialVersionUID = 4230378533059209021L; MapFactory<V, Double> mf; Map<K, Counter<V>> counterMap; int currentModCount = 0; int cacheModCount = -1; double cacheTotalCount = 0.0; protected Counter<V> ensureCounter(K key) { Counter<V> valueCounter = counterMap.get(key); if (valueCounter == null) { valueCounter = new Counter<V>(mf); counterMap.put(key, valueCounter); } return valueCounter; } /** * Returns the keys that have been inserted into this CounterMap. */ public Set<K> keySet() { return counterMap.keySet(); } /** * Sets the count for a particular (key, value) pair. */ public void setCount(K key, V value, double count) { Counter<V> valueCounter = ensureCounter(key); valueCounter.setCount(value, count); currentModCount++; } /** * Increments the count for a particular (key, value) pair. */ public void incrementCount(K key, V value, double count) { Counter<V> valueCounter = ensureCounter(key); valueCounter.incrementCount(value, count); currentModCount++; } public void incrementAll(Map<K,V> map, double count) { for (Map.Entry<K, V> entry: map.entrySet()) { incrementCount(entry.getKey(), entry.getValue(), count); } } public void incrementAll(Collection<Pair<K,V>> entries, double count) { for (Pair<K,V> entry: entries) { incrementCount(entry.getFirst(), entry.getSecond(), count); } } /** * Gets the count of the given (key, value) entry, or zero if that entry is * not present. Does not create any objects. */ public double getCount(K key, V value) { Counter<V> valueCounter = counterMap.get(key); if (valueCounter == null) return 0.0; return valueCounter.getCount(value); } /** * Gets the sub-counter for the given key. If there is none, a counter is * created for that key, and installed in the CounterMap. You can, for * example, add to the returned empty counter directly (though you shouldn't). * This is so whether the key is present or not, modifying the returned * counter has the same effect (but don't do it). */ public Counter<V> getCounter(K key) { return ensureCounter(key); } /** * Returns whether or not the <code>CounterMap</code> contains any * entries for the given key. * @author Aria Haghighi * @param key * @return */ public boolean containsKey(K key) { return counterMap.containsKey(key); } /** * Returns the total of all counts in sub-counters. This implementation is * caches the result -- it can get out of sync if the entries get modified externally. */ public double totalCount() { if (currentModCount != cacheModCount) { double total = 0.0; for (Map.Entry<K, Counter<V>> entry : counterMap.entrySet()) { Counter<V> counter = entry.getValue(); total += counter.totalCount(); } cacheTotalCount = total; cacheModCount = currentModCount; } return cacheTotalCount; } /** * Returns the total number of (key, value) entries in the CounterMap (not * their total counts). */ public int totalSize() { int total = 0; for (Map.Entry<K, Counter<V>> entry : counterMap.entrySet()) { Counter<V> counter = entry.getValue(); total += counter.size(); } return total; } /** * Normalizes the maps inside this CounterMap -- not the CounterMap itself. */ public void normalize() { for (Map.Entry<K, Counter<V>> entry : counterMap.entrySet()) { Counter<V> counter = entry.getValue(); counter.normalize(); } currentModCount++; } /** * The number of keys in this CounterMap (not the number of key-value entries * -- use totalSize() for that) */ public int size() { return counterMap.size(); } /** * True if there are no entries in the CounterMap (false does not mean * totalCount > 0) */ public boolean isEmpty() { return size() == 0; } public String toString() { StringBuilder sb = new StringBuilder("[\n"); for (Map.Entry<K, Counter<V>> entry : counterMap.entrySet()) { sb.append(" "); sb.append(entry.getKey()); sb.append(" -> "); sb.append(entry.getValue()); sb.append("\n"); } sb.append("]"); return sb.toString(); } public CounterMap() { this(new MapFactory.HashMapFactory<K, Counter<V>>(), new MapFactory.HashMapFactory<V, Double>()); } public CounterMap(MapFactory<K, Counter<V>> outerMF, MapFactory<V, Double> innerMF) { mf = innerMF; counterMap = outerMF.newMap(); } public static void main(String[] args) { CounterMap<String, String> bigramCounterMap = new CounterMap<String, String>(); bigramCounterMap.incrementCount("people", "run", 1); bigramCounterMap.incrementCount("cats", "growl", 2); bigramCounterMap.incrementCount("cats", "scamper", 3); System.out.println(bigramCounterMap); System.out.println("Entries for cats: "+bigramCounterMap.getCounter("cats")); System.out.println("Entries for dogs: "+bigramCounterMap.getCounter("dogs")); System.out.println("Count of cats scamper: "+bigramCounterMap.getCount("cats", "scamper")); System.out.println("Count of snakes slither: "+bigramCounterMap.getCount("snakes", "slither")); System.out.println("Total size: "+bigramCounterMap.totalSize()); System.out.println("Total count: "+bigramCounterMap.totalCount()); System.out.println(bigramCounterMap); } public Pair<K,V> argMax() { double maxCount = Double.NEGATIVE_INFINITY; Pair<K,V> maxKey = null; for (Map.Entry<K, Counter<V>> entry : counterMap.entrySet()) { Counter<V> counter = entry.getValue(); V localMax = counter.argMax(); if (counter.getCount(localMax) > maxCount || maxKey == null) { maxKey = new Pair<K,V>(entry.getKey(), localMax); maxCount = counter.getCount(localMax); } } return maxKey; } }
bsd-2-clause