repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
smanvi-pivotal/geode
geode-core/src/test/java/util/TestException.java
1103
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package util; import java.lang.Error; public class TestException extends Error { public TestException(String message, Throwable cause) { super(message, cause); } public TestException(String name) { super(name); } public TestException(StringBuffer name) { super(name.toString()); } }
apache-2.0
q474818917/solr-5.2.0
lucene/highlighter/src/java/org/apache/lucene/search/postingshighlight/Passage.java
4876
package org.apache.lucene.search.postingshighlight; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.InPlaceMergeSorter; import org.apache.lucene.util.RamUsageEstimator; /** * Represents a passage (typically a sentence of the document). * <p> * A passage contains {@link #getNumMatches} highlights from the query, * and the offsets and query terms that correspond with each match. * @lucene.experimental */ public final class Passage { int startOffset = -1; int endOffset = -1; float score = 0.0f; int matchStarts[] = new int[8]; int matchEnds[] = new int[8]; BytesRef matchTerms[] = new BytesRef[8]; int numMatches = 0; void addMatch(int startOffset, int endOffset, BytesRef term) { assert startOffset >= this.startOffset && startOffset <= this.endOffset; if (numMatches == matchStarts.length) { int newLength = ArrayUtil.oversize(numMatches+1, RamUsageEstimator.NUM_BYTES_OBJECT_REF); int newMatchStarts[] = new int[newLength]; int newMatchEnds[] = new int[newLength]; BytesRef newMatchTerms[] = new BytesRef[newLength]; System.arraycopy(matchStarts, 0, newMatchStarts, 0, numMatches); System.arraycopy(matchEnds, 0, newMatchEnds, 0, numMatches); System.arraycopy(matchTerms, 0, newMatchTerms, 0, numMatches); matchStarts = newMatchStarts; matchEnds = newMatchEnds; matchTerms = newMatchTerms; } assert matchStarts.length == matchEnds.length && matchEnds.length == matchTerms.length; matchStarts[numMatches] = startOffset; matchEnds[numMatches] = endOffset; matchTerms[numMatches] = term; numMatches++; } void sort() { final int starts[] = matchStarts; final int ends[] = matchEnds; final BytesRef terms[] = matchTerms; new InPlaceMergeSorter() { @Override protected void swap(int i, int j) { int temp = starts[i]; starts[i] = starts[j]; starts[j] = temp; temp = ends[i]; ends[i] = ends[j]; ends[j] = temp; BytesRef tempTerm = terms[i]; terms[i] = terms[j]; terms[j] = tempTerm; } @Override protected int compare(int i, int j) { return Integer.compare(starts[i], starts[j]); } }.sort(0, numMatches); } void reset() { startOffset = endOffset = -1; score = 0.0f; numMatches = 0; } /** * Start offset of this passage. * @return start index (inclusive) of the passage in the * original content: always &gt;= 0. */ public int getStartOffset() { return startOffset; } /** * End offset of this passage. * @return end index (exclusive) of the passage in the * original content: always &gt;= {@link #getStartOffset()} */ public int getEndOffset() { return endOffset; } /** * Passage's score. */ public float getScore() { return score; } /** * Number of term matches available in * {@link #getMatchStarts}, {@link #getMatchEnds}, * {@link #getMatchTerms} */ public int getNumMatches() { return numMatches; } /** * Start offsets of the term matches, in increasing order. * <p> * Only {@link #getNumMatches} are valid. Note that these * offsets are absolute (not relative to {@link #getStartOffset()}). */ public int[] getMatchStarts() { return matchStarts; } /** * End offsets of the term matches, corresponding with {@link #getMatchStarts}. * <p> * Only {@link #getNumMatches} are valid. Note that it's possible that an end offset * could exceed beyond the bounds of the passage ({@link #getEndOffset()}), if the * Analyzer produced a term which spans a passage boundary. */ public int[] getMatchEnds() { return matchEnds; } /** * BytesRef (term text) of the matches, corresponding with {@link #getMatchStarts()}. * <p> * Only {@link #getNumMatches()} are valid. */ public BytesRef[] getMatchTerms() { return matchTerms; } }
apache-2.0
guobingkun/druid
services/src/main/java/io/druid/cli/CreateTables.java
3843
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.cli; import com.google.common.collect.ImmutableList; import com.google.inject.Binder; import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.Module; import io.airlift.airline.Command; import io.airlift.airline.Option; import io.druid.guice.JsonConfigProvider; import io.druid.guice.annotations.Self; import io.druid.java.util.common.logger.Logger; import io.druid.metadata.MetadataStorageConnector; import io.druid.metadata.MetadataStorageConnectorConfig; import io.druid.metadata.MetadataStorageTablesConfig; import io.druid.server.DruidNode; import java.util.List; @Command( name = "metadata-init", description = "Initialize Metadata Storage" ) public class CreateTables extends GuiceRunnable { @Option(name = "--connectURI", description = "Database JDBC connection string", required = true) private String connectURI; @Option(name = "--user", description = "Database username", required = true) private String user; @Option(name = "--password", description = "Database password", required = true) private String password; @Option(name = "--base", description = "Base table name") private String base; private static final Logger log = new Logger(CreateTables.class); public CreateTables() { super(log); } @Override protected List<? extends Module> getModules() { return ImmutableList.<Module>of( new Module() { @Override public void configure(Binder binder) { JsonConfigProvider.bindInstance( binder, Key.get(MetadataStorageConnectorConfig.class), new MetadataStorageConnectorConfig() { @Override public String getConnectURI() { return connectURI; } @Override public String getUser() { return user; } @Override public String getPassword() { return password; } } ); JsonConfigProvider.bindInstance( binder, Key.get(MetadataStorageTablesConfig.class), MetadataStorageTablesConfig.fromBase(base) ); JsonConfigProvider.bindInstance( binder, Key.get(DruidNode.class, Self.class), new DruidNode("tools", "localhost", -1) ); } } ); } @Override public void run() { final Injector injector = makeInjector(); MetadataStorageConnector dbConnector = injector.getInstance(MetadataStorageConnector.class); dbConnector.createDataSourceTable(); dbConnector.createPendingSegmentsTable(); dbConnector.createSegmentTable(); dbConnector.createRulesTable(); dbConnector.createConfigTable(); dbConnector.createTaskTables(); dbConnector.createAuditTable(); dbConnector.createSupervisorsTable(); } }
apache-2.0
dalaro/incubator-tinkerpop
gremlin-core/src/main/java/org/apache/tinkerpop/gremlin/process/traversal/step/map/TreeStep.java
7018
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.tinkerpop.gremlin.process.traversal.step.map; import org.apache.tinkerpop.gremlin.process.computer.KeyValue; import org.apache.tinkerpop.gremlin.process.computer.MapReduce; import org.apache.tinkerpop.gremlin.process.computer.traversal.TraversalVertexProgram; import org.apache.tinkerpop.gremlin.process.computer.util.StaticMapReduce; import org.apache.tinkerpop.gremlin.process.traversal.Path; import org.apache.tinkerpop.gremlin.process.traversal.Traversal; import org.apache.tinkerpop.gremlin.process.traversal.Traverser; import org.apache.tinkerpop.gremlin.process.traversal.step.MapReducer; import org.apache.tinkerpop.gremlin.process.traversal.step.PathProcessor; import org.apache.tinkerpop.gremlin.process.traversal.step.TraversalParent; import org.apache.tinkerpop.gremlin.process.traversal.step.util.ReducingBarrierStep; import org.apache.tinkerpop.gremlin.process.traversal.step.util.Tree; import org.apache.tinkerpop.gremlin.process.traversal.traverser.TraverserRequirement; import org.apache.tinkerpop.gremlin.process.traversal.traverser.util.TraverserSet; import org.apache.tinkerpop.gremlin.process.traversal.util.TraversalRing; import org.apache.tinkerpop.gremlin.process.traversal.util.TraversalUtil; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.structure.util.StringFactory; import org.apache.tinkerpop.gremlin.util.function.TreeSupplier; import java.io.Serializable; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.function.BiFunction; import java.util.function.Supplier; /** * @author Marko A. Rodriguez (http://markorodriguez.com) */ public final class TreeStep<S> extends ReducingBarrierStep<S, Tree> implements MapReducer, TraversalParent, PathProcessor { private TraversalRing<Object, Object> traversalRing = new TraversalRing<>(); public TreeStep(final Traversal.Admin traversal) { super(traversal); this.setSeedSupplier((Supplier) TreeSupplier.instance()); this.setBiFunction(new TreeBiFunction(this)); } @Override public List<Traversal.Admin<Object, Object>> getLocalChildren() { return this.traversalRing.getTraversals(); } @Override public void addLocalChild(final Traversal.Admin<?, ?> treeTraversal) { this.traversalRing.addTraversal(this.integrateChild(treeTraversal)); } @Override public Set<TraverserRequirement> getRequirements() { return this.getSelfAndChildRequirements(TraverserRequirement.PATH, TraverserRequirement.SIDE_EFFECTS); } @Override public MapReduce<MapReduce.NullObject, Tree, MapReduce.NullObject, Tree, Tree> getMapReduce() { return TreeMapReduce.instance(); } @Override public TreeStep<S> clone() { final TreeStep<S> clone = (TreeStep<S>) super.clone(); clone.traversalRing = this.traversalRing.clone(); clone.getLocalChildren().forEach(clone::integrateChild); clone.setBiFunction(new TreeBiFunction<>(clone)); return clone; } @Override public int hashCode() { return super.hashCode() ^ this.traversalRing.hashCode(); } @Override public Traverser<Tree> processNextStart() { if (this.byPass) { final Traverser.Admin<S> traverser = this.starts.next(); return traverser.split(this.reducingBiFunction.apply(new Tree(), traverser), this); } else { return super.processNextStart(); } } @Override public String toString() { return StringFactory.stepString(this, this.traversalRing); } @Override public void reset() { super.reset(); this.traversalRing.reset(); } /////////// private static class TreeBiFunction<S> implements BiFunction<Tree, Traverser<S>, Tree>, Serializable { private final TreeStep<S> treeStep; private TreeBiFunction(final TreeStep<S> treeStep) { this.treeStep = treeStep; } @Override public Tree apply(final Tree mutatingSeed, final Traverser<S> traverser) { Tree depth = mutatingSeed; final Path path = traverser.path(); for (int i = 0; i < path.size(); i++) { final Object object = TraversalUtil.apply(path.<Object>get(i), this.treeStep.traversalRing.next()); if (!depth.containsKey(object)) depth.put(object, new Tree<>()); depth = (Tree) depth.get(object); } this.treeStep.traversalRing.reset(); return mutatingSeed; } } /////////// public static final class TreeMapReduce extends StaticMapReduce<MapReduce.NullObject, Tree, MapReduce.NullObject, Tree, Tree> { private static final TreeMapReduce INSTANCE = new TreeMapReduce(); private TreeMapReduce() { } @Override public boolean doStage(final Stage stage) { return true; } @Override public void map(final Vertex vertex, final MapEmitter<NullObject, Tree> emitter) { vertex.<TraverserSet<Tree>>property(TraversalVertexProgram.HALTED_TRAVERSERS).ifPresent(traverserSet -> traverserSet.forEach(traverser -> emitter.emit(traverser.get()))); } @Override public void combine(final NullObject key, final Iterator<Tree> values, final ReduceEmitter<NullObject, Tree> emitter) { this.reduce(key, values, emitter); } @Override public void reduce(final NullObject key, final Iterator<Tree> values, final ReduceEmitter<NullObject, Tree> emitter) { final Tree tree = new Tree(); values.forEachRemaining(tree::addTree); emitter.emit(tree); } @Override public Tree generateFinalResult(final Iterator<KeyValue<NullObject, Tree>> keyValues) { return keyValues.hasNext() ? keyValues.next().getValue() : new Tree(); } @Override public String getMemoryKey() { return REDUCING; } public static final TreeMapReduce instance() { return INSTANCE; } } }
apache-2.0
shehzaadn/phoenix
phoenix-core/src/main/java/org/apache/phoenix/mapreduce/index/PhoenixScrutinyJobCounters.java
1381
/** /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.mapreduce.index; /** * Counters used for Index Scrutiny MR job */ public enum PhoenixScrutinyJobCounters { /** * number of rows in data table with a valid index row (or vice-versa) */ VALID_ROW_COUNT, /** * number of rows in data table with an invalid index row (or vice-versa) */ INVALID_ROW_COUNT, /** * Number of rows in the index table with an incorrect covered column value */ BAD_COVERED_COL_VAL_COUNT, /** * Number of batches processed */ BATCHES_PROCESSED_COUNT; }
apache-2.0
wangcy6/storm_app
frame/kafka-0.11.0/kafka-0.11.0.1-src/clients/src/main/java/org/apache/kafka/common/requests/CreateTopicsRequest.java
12817
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.common.requests; import org.apache.kafka.common.errors.UnsupportedVersionException; import org.apache.kafka.common.protocol.ApiKeys; import org.apache.kafka.common.protocol.types.Struct; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; public class CreateTopicsRequest extends AbstractRequest { private static final String REQUESTS_KEY_NAME = "create_topic_requests"; private static final String TIMEOUT_KEY_NAME = "timeout"; private static final String VALIDATE_ONLY_KEY_NAME = "validate_only"; private static final String TOPIC_KEY_NAME = "topic"; private static final String NUM_PARTITIONS_KEY_NAME = "num_partitions"; private static final String REPLICATION_FACTOR_KEY_NAME = "replication_factor"; private static final String REPLICA_ASSIGNMENT_KEY_NAME = "replica_assignment"; private static final String REPLICA_ASSIGNMENT_PARTITION_ID_KEY_NAME = "partition_id"; private static final String REPLICA_ASSIGNMENT_REPLICAS_KEY_NAME = "replicas"; private static final String CONFIG_KEY_KEY_NAME = "config_name"; private static final String CONFIG_VALUE_KEY_NAME = "config_value"; private static final String CONFIGS_KEY_NAME = "config_entries"; public static final class TopicDetails { public final int numPartitions; public final short replicationFactor; public final Map<Integer, List<Integer>> replicasAssignments; public final Map<String, String> configs; private TopicDetails(int numPartitions, short replicationFactor, Map<Integer, List<Integer>> replicasAssignments, Map<String, String> configs) { this.numPartitions = numPartitions; this.replicationFactor = replicationFactor; this.replicasAssignments = replicasAssignments; this.configs = configs; } public TopicDetails(int partitions, short replicationFactor, Map<String, String> configs) { this(partitions, replicationFactor, Collections.<Integer, List<Integer>>emptyMap(), configs); } public TopicDetails(int partitions, short replicationFactor) { this(partitions, replicationFactor, Collections.<String, String>emptyMap()); } public TopicDetails(Map<Integer, List<Integer>> replicasAssignments, Map<String, String> configs) { this(NO_NUM_PARTITIONS, NO_REPLICATION_FACTOR, replicasAssignments, configs); } public TopicDetails(Map<Integer, List<Integer>> replicasAssignments) { this(replicasAssignments, Collections.<String, String>emptyMap()); } @Override public String toString() { StringBuilder bld = new StringBuilder(); bld.append("(numPartitions=").append(numPartitions). append(", replicationFactor=").append(replicationFactor). append(", replicasAssignments=").append(replicasAssignments). append(", configs=").append(configs). append(")"); return bld.toString(); } } public static class Builder extends AbstractRequest.Builder<CreateTopicsRequest> { private final Map<String, TopicDetails> topics; private final int timeout; private final boolean validateOnly; // introduced in V1 public Builder(Map<String, TopicDetails> topics, int timeout) { this(topics, timeout, false); } public Builder(Map<String, TopicDetails> topics, int timeout, boolean validateOnly) { super(ApiKeys.CREATE_TOPICS); this.topics = topics; this.timeout = timeout; this.validateOnly = validateOnly; } @Override public CreateTopicsRequest build(short version) { if (validateOnly && version == 0) throw new UnsupportedVersionException("validateOnly is not supported in version 0 of " + "CreateTopicsRequest"); return new CreateTopicsRequest(topics, timeout, validateOnly, version); } @Override public String toString() { StringBuilder bld = new StringBuilder(); bld.append("(type=CreateTopicsRequest"). append(", topics=").append(topics). append(", timeout=").append(timeout). append(", validateOnly=").append(validateOnly). append(")"); return bld.toString(); } } private final Map<String, TopicDetails> topics; private final Integer timeout; private final boolean validateOnly; // introduced in V1 // Set to handle special case where 2 requests for the same topic exist on the wire. // This allows the broker to return an error code for these topics. private final Set<String> duplicateTopics; public static final int NO_NUM_PARTITIONS = -1; public static final short NO_REPLICATION_FACTOR = -1; private CreateTopicsRequest(Map<String, TopicDetails> topics, Integer timeout, boolean validateOnly, short version) { super(version); this.topics = topics; this.timeout = timeout; this.validateOnly = validateOnly; this.duplicateTopics = Collections.emptySet(); } public CreateTopicsRequest(Struct struct, short version) { super(version); Object[] requestStructs = struct.getArray(REQUESTS_KEY_NAME); Map<String, TopicDetails> topics = new HashMap<>(); Set<String> duplicateTopics = new HashSet<>(); for (Object requestStructObj : requestStructs) { Struct singleRequestStruct = (Struct) requestStructObj; String topic = singleRequestStruct.getString(TOPIC_KEY_NAME); if (topics.containsKey(topic)) duplicateTopics.add(topic); int numPartitions = singleRequestStruct.getInt(NUM_PARTITIONS_KEY_NAME); short replicationFactor = singleRequestStruct.getShort(REPLICATION_FACTOR_KEY_NAME); //replica assignment Object[] assignmentsArray = singleRequestStruct.getArray(REPLICA_ASSIGNMENT_KEY_NAME); Map<Integer, List<Integer>> partitionReplicaAssignments = new HashMap<>(assignmentsArray.length); for (Object assignmentStructObj : assignmentsArray) { Struct assignmentStruct = (Struct) assignmentStructObj; Integer partitionId = assignmentStruct.getInt(REPLICA_ASSIGNMENT_PARTITION_ID_KEY_NAME); Object[] replicasArray = assignmentStruct.getArray(REPLICA_ASSIGNMENT_REPLICAS_KEY_NAME); List<Integer> replicas = new ArrayList<>(replicasArray.length); for (Object replica : replicasArray) { replicas.add((Integer) replica); } partitionReplicaAssignments.put(partitionId, replicas); } Object[] configArray = singleRequestStruct.getArray(CONFIGS_KEY_NAME); Map<String, String> configs = new HashMap<>(configArray.length); for (Object configStructObj : configArray) { Struct configStruct = (Struct) configStructObj; String key = configStruct.getString(CONFIG_KEY_KEY_NAME); String value = configStruct.getString(CONFIG_VALUE_KEY_NAME); configs.put(key, value); } TopicDetails args = new TopicDetails(numPartitions, replicationFactor, partitionReplicaAssignments, configs); topics.put(topic, args); } this.topics = topics; this.timeout = struct.getInt(TIMEOUT_KEY_NAME); if (struct.hasField(VALIDATE_ONLY_KEY_NAME)) this.validateOnly = struct.getBoolean(VALIDATE_ONLY_KEY_NAME); else this.validateOnly = false; this.duplicateTopics = duplicateTopics; } @Override public AbstractResponse getErrorResponse(int throttleTimeMs, Throwable e) { Map<String, ApiError> topicErrors = new HashMap<>(); for (String topic : topics.keySet()) { topicErrors.put(topic, ApiError.fromThrowable(e)); } short versionId = version(); switch (versionId) { case 0: case 1: return new CreateTopicsResponse(topicErrors); case 2: return new CreateTopicsResponse(throttleTimeMs, topicErrors); default: throw new IllegalArgumentException(String.format("Version %d is not valid. Valid versions for %s are 0 to %d", versionId, this.getClass().getSimpleName(), ApiKeys.CREATE_TOPICS.latestVersion())); } } public Map<String, TopicDetails> topics() { return this.topics; } public int timeout() { return this.timeout; } public boolean validateOnly() { return validateOnly; } public Set<String> duplicateTopics() { return this.duplicateTopics; } public static CreateTopicsRequest parse(ByteBuffer buffer, short version) { return new CreateTopicsRequest(ApiKeys.CREATE_TOPICS.parseRequest(version, buffer), version); } /** * Visible for testing. */ @Override public Struct toStruct() { short version = version(); Struct struct = new Struct(ApiKeys.CREATE_TOPICS.requestSchema(version)); List<Struct> createTopicRequestStructs = new ArrayList<>(topics.size()); for (Map.Entry<String, TopicDetails> entry : topics.entrySet()) { Struct singleRequestStruct = struct.instance(REQUESTS_KEY_NAME); String topic = entry.getKey(); TopicDetails args = entry.getValue(); singleRequestStruct.set(TOPIC_KEY_NAME, topic); singleRequestStruct.set(NUM_PARTITIONS_KEY_NAME, args.numPartitions); singleRequestStruct.set(REPLICATION_FACTOR_KEY_NAME, args.replicationFactor); // replica assignment List<Struct> replicaAssignmentsStructs = new ArrayList<>(args.replicasAssignments.size()); for (Map.Entry<Integer, List<Integer>> partitionReplicaAssignment : args.replicasAssignments.entrySet()) { Struct replicaAssignmentStruct = singleRequestStruct.instance(REPLICA_ASSIGNMENT_KEY_NAME); replicaAssignmentStruct.set(REPLICA_ASSIGNMENT_PARTITION_ID_KEY_NAME, partitionReplicaAssignment.getKey()); replicaAssignmentStruct.set(REPLICA_ASSIGNMENT_REPLICAS_KEY_NAME, partitionReplicaAssignment.getValue().toArray()); replicaAssignmentsStructs.add(replicaAssignmentStruct); } singleRequestStruct.set(REPLICA_ASSIGNMENT_KEY_NAME, replicaAssignmentsStructs.toArray()); // configs List<Struct> configsStructs = new ArrayList<>(args.configs.size()); for (Map.Entry<String, String> configEntry : args.configs.entrySet()) { Struct configStruct = singleRequestStruct.instance(CONFIGS_KEY_NAME); configStruct.set(CONFIG_KEY_KEY_NAME, configEntry.getKey()); configStruct.set(CONFIG_VALUE_KEY_NAME, configEntry.getValue()); configsStructs.add(configStruct); } singleRequestStruct.set(CONFIGS_KEY_NAME, configsStructs.toArray()); createTopicRequestStructs.add(singleRequestStruct); } struct.set(REQUESTS_KEY_NAME, createTopicRequestStructs.toArray()); struct.set(TIMEOUT_KEY_NAME, timeout); if (version >= 1) struct.set(VALIDATE_ONLY_KEY_NAME, validateOnly); return struct; } }
apache-2.0
ullgren/camel
components/camel-csv/src/test/java/org/apache/camel/dataformat/csv/CsvUnmarshalPipeDelimiterTest.java
3081
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.dataformat.csv; import java.util.List; import org.apache.camel.EndpointInject; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.test.junit4.CamelTestSupport; import org.junit.Test; /** * Spring based integration test for the <code>CsvDataFormat</code> */ public class CsvUnmarshalPipeDelimiterTest extends CamelTestSupport { @EndpointInject("mock:result") private MockEndpoint result; @SuppressWarnings("unchecked") @Test public void testCsvUnMarshal() throws Exception { result.expectedMessageCount(1); template.sendBody("direct:start", "123|Camel in Action|1\n124|ActiveMQ in Action|2"); assertMockEndpointsSatisfied(); List<List<String>> body = result.getReceivedExchanges().get(0).getIn().getBody(List.class); assertEquals(2, body.size()); assertEquals("123", body.get(0).get(0)); assertEquals("Camel in Action", body.get(0).get(1)); assertEquals("1", body.get(0).get(2)); assertEquals("124", body.get(1).get(0)); assertEquals("ActiveMQ in Action", body.get(1).get(1)); assertEquals("2", body.get(1).get(2)); } @SuppressWarnings("unchecked") @Test public void testCsvUnMarshalSingleLine() throws Exception { result.expectedMessageCount(1); template.sendBody("direct:start", "123|Camel in Action|1"); assertMockEndpointsSatisfied(); List<List<String>> body = result.getReceivedExchanges().get(0).getIn().getBody(List.class); assertEquals(1, body.size()); assertEquals("123", body.get(0).get(0)); assertEquals("Camel in Action", body.get(0).get(1)); assertEquals("1", body.get(0).get(2)); } @Override protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { @Override public void configure() throws Exception { CsvDataFormat csv = new CsvDataFormat().setDelimiter('|'); from("direct:start").unmarshal(csv) .to("mock:result"); } }; } }
apache-2.0
lbendig/gobblin
gobblin-utility/src/main/java/gobblin/util/filesystem/InstrumentedHDFSFileSystem.java
1242
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package gobblin.util.filesystem; import org.apache.hadoop.hdfs.DistributedFileSystem; /** * A {@link InstrumentedFileSystem} for the hdfs scheme. * * Usage: * FileSystem.get("instrumented-hdfs://...") */ public class InstrumentedHDFSFileSystem extends InstrumentedFileSystem { public static final String SCHEME = "instrumented-hdfs"; public InstrumentedHDFSFileSystem() { super(SCHEME, new DistributedFileSystem()); } }
apache-2.0
punkhorn/camel-upstream
core/camel-core/src/test/java/org/apache/camel/language/BeanAnnotationParameterTwoTest.java
4226
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.language; import org.apache.camel.ContextTestSupport; import org.apache.camel.Exchange; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.impl.JndiRegistry; import org.junit.Test; /** * */ public class BeanAnnotationParameterTwoTest extends ContextTestSupport { @Test public void testBeanAnnotationOne() throws Exception { getMockEndpoint("mock:result").expectedBodiesReceived("Hello/Bonjour World"); template.sendBody("direct:one", "World"); assertMockEndpointsSatisfied(); } @Test public void testBeanAnnotationTwo() throws Exception { getMockEndpoint("mock:result").expectedBodiesReceived("Hello/Bonjour World"); template.sendBody("direct:two", "World"); assertMockEndpointsSatisfied(); } @Test public void testBeanAnnotationThree() throws Exception { getMockEndpoint("mock:result").expectedBodiesReceived("Hello/Bonjour World"); template.sendBody("direct:three", "World"); assertMockEndpointsSatisfied(); } @Test public void testBeanAnnotationFour() throws Exception { getMockEndpoint("mock:middle").expectedBodiesReceived("Hello/Bonjour World"); getMockEndpoint("mock:result").expectedBodiesReceived("Bye World"); template.sendBody("direct:four", "World"); assertMockEndpointsSatisfied(); } @Override protected JndiRegistry createRegistry() throws Exception { JndiRegistry jndi = super.createRegistry(); jndi.bind("GreetingService", new GreetingService()); return jndi; } @Override protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { @Override public void configure() throws Exception { from("direct:one") .bean(MyBean.class) .to("mock:result"); from("direct:two") .bean(MyBean.class, "callA") .to("mock:result"); from("direct:three") .setHeader(Exchange.BEAN_METHOD_NAME, constant("callA")) .bean(MyBean.class) .to("mock:result"); from("direct:four") .bean(MyBean.class, "callA") .to("mock:middle") .bean(MyBean.class, "callB") .to("mock:result"); } }; } public static final class MyBean { public String callA(@Bean(ref = "GreetingService", method = "english") String greeting, @Bean(ref = "GreetingService", method = "french") String french, String body) { return greeting + "/" + french + " " + body; } public String callB() { return "Bye World"; } } public static final class GreetingService { public String callA() { throw new IllegalArgumentException("Should not callA"); } public String callB() { throw new IllegalArgumentException("Should not callB"); } public String english() { return "Hello"; } public String french() { return "Bonjour"; } public String german() { return "Hallo"; } } }
apache-2.0
juhalindfors/bazel-patches
src/main/java/com/google/devtools/build/lib/query2/engine/SomeFunction.java
2931
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.query2.engine; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.devtools.build.lib.query2.engine.QueryEnvironment.Argument; import com.google.devtools.build.lib.query2.engine.QueryEnvironment.ArgumentType; import com.google.devtools.build.lib.query2.engine.QueryEnvironment.QueryFunction; import com.google.devtools.build.lib.query2.engine.QueryEnvironment.QueryTaskCallable; import com.google.devtools.build.lib.query2.engine.QueryEnvironment.QueryTaskFuture; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; /** * A some(x) filter expression, which returns an arbitrary node in set x, or * fails if x is empty. * * <pre>expr ::= SOME '(' expr ')'</pre> */ class SomeFunction implements QueryFunction { SomeFunction() { } @Override public String getName() { return "some"; } @Override public int getMandatoryArguments() { return 1; } @Override public List<ArgumentType> getArgumentTypes() { return ImmutableList.of(ArgumentType.EXPRESSION); } @Override public <T> QueryTaskFuture<Void> eval( QueryEnvironment<T> env, VariableContext<T> context, final QueryExpression expression, List<Argument> args, final Callback<T> callback) { final AtomicBoolean someFound = new AtomicBoolean(false); QueryTaskFuture<Void> operandEvalFuture = env.eval( args.get(0).getExpression(), context, new Callback<T>() { @Override public void process(Iterable<T> partialResult) throws QueryException, InterruptedException { if (someFound.get() || Iterables.isEmpty(partialResult)) { return; } callback.process(ImmutableSet.of(partialResult.iterator().next())); someFound.set(true); } }); return env.whenSucceedsCall( operandEvalFuture, new QueryTaskCallable<Void>() { @Override public Void call() throws QueryException { if (!someFound.get()) { throw new QueryException(expression, "argument set is empty"); } return null; } }); } }
apache-2.0
mirego/j2objc
jre_emul/android/platform/libcore/ojluni/src/main/java/java/lang/StringBuffer.java
20026
/* * Copyright (c) 1994, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package java.lang; /** * A thread-safe, mutable sequence of characters. * A string buffer is like a {@link String}, but can be modified. At any * point in time it contains some particular sequence of characters, but * the length and content of the sequence can be changed through certain * method calls. * <p> * String buffers are safe for use by multiple threads. The methods * are synchronized where necessary so that all the operations on any * particular instance behave as if they occur in some serial order * that is consistent with the order of the method calls made by each of * the individual threads involved. * <p> * The principal operations on a {@code StringBuffer} are the * {@code append} and {@code insert} methods, which are * overloaded so as to accept data of any type. Each effectively * converts a given datum to a string and then appends or inserts the * characters of that string to the string buffer. The * {@code append} method always adds these characters at the end * of the buffer; the {@code insert} method adds the characters at * a specified point. * <p> * For example, if {@code z} refers to a string buffer object * whose current contents are {@code "start"}, then * the method call {@code z.append("le")} would cause the string * buffer to contain {@code "startle"}, whereas * {@code z.insert(4, "le")} would alter the string buffer to * contain {@code "starlet"}. * <p> * In general, if sb refers to an instance of a {@code StringBuffer}, * then {@code sb.append(x)} has the same effect as * {@code sb.insert(sb.length(), x)}. * <p> * Whenever an operation occurs involving a source sequence (such as * appending or inserting from a source sequence), this class synchronizes * only on the string buffer performing the operation, not on the source. * Note that while {@code StringBuffer} is designed to be safe to use * concurrently from multiple threads, if the constructor or the * {@code append} or {@code insert} operation is passed a source sequence * that is shared across threads, the calling code must ensure * that the operation has a consistent and unchanging view of the source * sequence for the duration of the operation. * This could be satisfied by the caller holding a lock during the * operation's call, by using an immutable source sequence, or by not * sharing the source sequence across threads. * <p> * Every string buffer has a capacity. As long as the length of the * character sequence contained in the string buffer does not exceed * the capacity, it is not necessary to allocate a new internal * buffer array. If the internal buffer overflows, it is * automatically made larger. * <p> * Unless otherwise noted, passing a {@code null} argument to a constructor * or method in this class will cause a {@link NullPointerException} to be * thrown. * <p> * As of release JDK 5, this class has been supplemented with an equivalent * class designed for use by a single thread, {@link StringBuilder}. The * {@code StringBuilder} class should generally be used in preference to * this one, as it supports all of the same operations but it is faster, as * it performs no synchronization. * * @author Arthur van Hoff * @see java.lang.StringBuilder * @see java.lang.String * @since JDK1.0 */ public final class StringBuffer extends AbstractStringBuilder implements java.io.Serializable, CharSequence { /** use serialVersionUID from JDK 1.0.2 for interoperability */ static final long serialVersionUID = 3388685877147921107L; /** * Constructs a string buffer with no characters in it and an * initial capacity of 16 characters. */ public StringBuffer() { super(16); } /** * Constructs a string buffer with no characters in it and * the specified initial capacity. * * @param capacity the initial capacity. * @exception NegativeArraySizeException if the {@code capacity} * argument is less than {@code 0}. */ public StringBuffer(int capacity) { super(capacity); } /** * Constructs a string buffer initialized to the contents of the * specified string. The initial capacity of the string buffer is * {@code 16} plus the length of the string argument. * * @param str the initial contents of the buffer. */ public StringBuffer(String str) { super(str.length() + 16); append(str); } /** * Constructs a string buffer that contains the same characters * as the specified {@code CharSequence}. The initial capacity of * the string buffer is {@code 16} plus the length of the * {@code CharSequence} argument. * <p> * If the length of the specified {@code CharSequence} is * less than or equal to zero, then an empty buffer of capacity * {@code 16} is returned. * * @param seq the sequence to copy. * @since 1.5 */ public StringBuffer(CharSequence seq) { this(seq.length() + 16); append(seq); } @Override public synchronized native int length() /*-[ return self->delegate_.count_; ]-*/; @Override public synchronized native int capacity() /*-[ return self->delegate_.bufferSize_; ]-*/; @Override public synchronized void ensureCapacity(int minimumCapacity) { super.ensureCapacity(minimumCapacity); } /** * @since 1.5 */ @Override public synchronized void trimToSize() { super.trimToSize(); } /** * @throws IndexOutOfBoundsException {@inheritDoc} * @see #length() */ @Override public synchronized void setLength(int newLength) { super.setLength(newLength); } /** * @throws IndexOutOfBoundsException {@inheritDoc} * @see #length() */ @Override public synchronized char charAt(int index) { return super.charAt(index); } /** * @since 1.5 */ @Override public synchronized int codePointAt(int index) { return super.codePointAt(index); } /** * @since 1.5 */ @Override public synchronized int codePointBefore(int index) { return super.codePointBefore(index); } /** * @since 1.5 */ @Override public synchronized int codePointCount(int beginIndex, int endIndex) { return super.codePointCount(beginIndex, endIndex); } /** * @since 1.5 */ @Override public synchronized int offsetByCodePoints(int index, int codePointOffset) { return super.offsetByCodePoints(index, codePointOffset); } /** * @throws IndexOutOfBoundsException {@inheritDoc} */ @Override public synchronized void getChars(int srcBegin, int srcEnd, char[] dst, int dstBegin) { super.getChars(srcBegin, srcEnd, dst, dstBegin); } /** * @throws IndexOutOfBoundsException {@inheritDoc} * @see #length() */ @Override public synchronized void setCharAt(int index, char ch) { super.setCharAt(index, ch); } public StringBuffer append(Object obj) { return append(String.valueOf(obj)); } public synchronized native StringBuffer append(String str) /*-[ JreStringBuilder_appendString(&self->delegate_, str); return self; ]-*/; /** * Appends the specified {@code StringBuffer} to this sequence. * <p> * The characters of the {@code StringBuffer} argument are appended, * in order, to the contents of this {@code StringBuffer}, increasing the * length of this {@code StringBuffer} by the length of the argument. * If {@code sb} is {@code null}, then the four characters * {@code "null"} are appended to this {@code StringBuffer}. * <p> * Let <i>n</i> be the length of the old character sequence, the one * contained in the {@code StringBuffer} just prior to execution of the * {@code append} method. Then the character at index <i>k</i> in * the new character sequence is equal to the character at index <i>k</i> * in the old character sequence, if <i>k</i> is less than <i>n</i>; * otherwise, it is equal to the character at index <i>k-n</i> in the * argument {@code sb}. * <p> * This method synchronizes on {@code this}, the destination * object, but does not synchronize on the source ({@code sb}). * * @param sb the {@code StringBuffer} to append. * @return a reference to this object. * @since 1.4 */ public synchronized native StringBuffer append(StringBuffer sb) /*-[ JreStringBuilder_appendStringBuffer(&self->delegate_, sb); return self; ]-*/; /** * @since 1.8 * J2ObjC - Unused. synchronized StringBuffer append(AbstractStringBuilder asb) { super.append(asb); return this; }*/ /** * Appends the specified {@code CharSequence} to this * sequence. * <p> * The characters of the {@code CharSequence} argument are appended, * in order, increasing the length of this sequence by the length of the * argument. * * <p>The result of this method is exactly the same as if it were an * invocation of this.append(s, 0, s.length()); * * <p>This method synchronizes on {@code this}, the destination * object, but does not synchronize on the source ({@code s}). * * <p>If {@code s} is {@code null}, then the four characters * {@code "null"} are appended. * * @param s the {@code CharSequence} to append. * @return a reference to this object. * @since 1.5 */ @Override public synchronized native StringBuffer append(CharSequence s) /*-[ JreStringBuilder_appendCharSequence(&self->delegate_, s); return self; ]-*/; /** * @throws IndexOutOfBoundsException {@inheritDoc} * @since 1.5 */ @Override public synchronized native StringBuffer append(CharSequence s, int start, int end) /*-[ JreStringBuilder_appendCharSequenceSubset(&self->delegate_, s, start, end); return self; ]-*/; public synchronized native StringBuffer append(char[] str) /*-[ JreStringBuilder_appendCharArray(&self->delegate_, str); return self; ]-*/; /** * @throws IndexOutOfBoundsException {@inheritDoc} */ public synchronized native StringBuffer append(char[] str, int offset, int len) /*-[ JreStringBuilder_appendCharArraySubset(&self->delegate_, str, offset, len); return self; ]-*/; public StringBuffer append(boolean b) { return append(Boolean.toString(b)); } @Override public synchronized native StringBuffer append(char c) /*-[ JreStringBuilder_appendChar(&self->delegate_, c); return self; ]-*/; public synchronized native StringBuffer append(int i) /*-[ JreStringBuilder_appendInt(&self->delegate_, i); return self; ]-*/; /** * @since 1.5 */ public StringBuffer appendCodePoint(int codePoint) { return append(Character.toChars(codePoint)); } public synchronized native StringBuffer append(long lng) /*-[ JreStringBuilder_appendLong(&self->delegate_, lng); return self; ]-*/; public synchronized native StringBuffer append(float f) /*-[ JreStringBuilder_appendFloat(&self->delegate_, f); return self; ]-*/; public synchronized native StringBuffer append(double d) /*-[ JreStringBuilder_appendDouble(&self->delegate_, d); return self; ]-*/; /** * @throws StringIndexOutOfBoundsException {@inheritDoc} * @since 1.2 */ public synchronized native StringBuffer delete(int start, int end) /*-[ JreStringBuilder_delete(&self->delegate_, start, end); return self; ]-*/; /** * @throws StringIndexOutOfBoundsException {@inheritDoc} * @since 1.2 */ public synchronized native StringBuffer deleteCharAt(int index) /*-[ JreStringBuilder_deleteCharAt(&self->delegate_, index); return self; ]-*/; /** * @throws StringIndexOutOfBoundsException {@inheritDoc} * @since 1.2 */ public synchronized native StringBuffer replace(int start, int end, String str) /*-[ JreStringBuilder_replace(&self->delegate_, start, end, str); return self; ]-*/; /** * @throws StringIndexOutOfBoundsException {@inheritDoc} * @since 1.2 */ @Override public synchronized String substring(int start) { return super.substring(start); } /** * @throws IndexOutOfBoundsException {@inheritDoc} * @since 1.4 */ @Override public synchronized CharSequence subSequence(int start, int end) { return super.substring(start, end); } /** * @throws StringIndexOutOfBoundsException {@inheritDoc} * @since 1.2 */ @Override public synchronized String substring(int start, int end) { return super.substring(start, end); } /** * @throws StringIndexOutOfBoundsException {@inheritDoc} * @since 1.2 */ public synchronized native StringBuffer insert(int index, char[] str, int offset, int len) /*-[ JreStringBuilder_insertCharArraySubset(&self->delegate_, index, str, offset, len); return self; ]-*/; /** * @throws StringIndexOutOfBoundsException {@inheritDoc} */ public StringBuffer insert(int offset, Object obj) { return insert(offset, String.valueOf(obj)); } /** * @throws StringIndexOutOfBoundsException {@inheritDoc} */ public synchronized native StringBuffer insert(int offset, String str) /*-[ JreStringBuilder_insertString(&self->delegate_, offset, str); return self; ]-*/; /** * @throws StringIndexOutOfBoundsException {@inheritDoc} */ public synchronized native StringBuffer insert(int offset, char[] str) /*-[ JreStringBuilder_insertCharArray(&self->delegate_, offset, str); return self; ]-*/; /** * @throws IndexOutOfBoundsException {@inheritDoc} * @since 1.5 */ public StringBuffer insert(int dstOffset, CharSequence s) { return insert(dstOffset, String.valueOf(s)); } /** * @throws IndexOutOfBoundsException {@inheritDoc} * @since 1.5 */ public synchronized native StringBuffer insert(int dstOffset, CharSequence s, int start, int end) /*-[ JreStringBuilder_insertCharSequence(&self->delegate_, dstOffset, s, start, end); return self; ]-*/; /** * @throws StringIndexOutOfBoundsException {@inheritDoc} */ public StringBuffer insert(int offset, boolean b) { return insert(offset, Boolean.toString(b)); } /** * @throws IndexOutOfBoundsException {@inheritDoc} */ public synchronized native StringBuffer insert(int offset, char c) /*-[ JreStringBuilder_insertChar(&self->delegate_, offset, c); return self; ]-*/; /** * @throws StringIndexOutOfBoundsException {@inheritDoc} */ public StringBuffer insert(int offset, int i) { return insert(offset, Integer.toString(i)); } /** * @throws StringIndexOutOfBoundsException {@inheritDoc} */ public StringBuffer insert(int offset, long l) { return insert(offset, Long.toString(l)); } /** * @throws StringIndexOutOfBoundsException {@inheritDoc} */ public StringBuffer insert(int offset, float f) { return insert(offset, Float.toString(f)); } /** * @throws StringIndexOutOfBoundsException {@inheritDoc} */ public StringBuffer insert(int offset, double d) { return insert(offset, Double.toString(d)); } /** * @since 1.4 */ @Override public int indexOf(String str) { // Note, synchronization achieved via invocations of other StringBuffer methods return super.indexOf(str); } /** * @since 1.4 */ @Override public synchronized int indexOf(String str, int fromIndex) { return super.indexOf(str, fromIndex); } /** * @since 1.4 * J2ObjC - Provided by superclass. Synchronization achieved via call into * lastIndexOf(String,int). @Override public int lastIndexOf(String str) { // Note, synchronization achieved via invocations of other StringBuffer methods return lastIndexOf(str, count); }*/ /** * @since 1.4 */ @Override public synchronized int lastIndexOf(String str, int fromIndex) { return super.lastIndexOf(str, fromIndex); } /** * @since JDK1.0.2 */ public synchronized native StringBuffer reverse() /*-[ JreStringBuilder_reverse(&self->delegate_); return self; ]-*/; @Override public synchronized native String toString() /*-[ return JreStringBuilder_toString(&self->delegate_); ]-*/; /** * Serializable fields for StringBuffer. * * @serialField value char[] * The backing character array of this StringBuffer. * @serialField count int * The number of characters in this StringBuffer. * @serialField shared boolean * A flag indicating whether the backing array is shared. * The value is ignored upon deserialization. */ private static final java.io.ObjectStreamField[] serialPersistentFields = { new java.io.ObjectStreamField("value", char[].class), new java.io.ObjectStreamField("count", Integer.TYPE), new java.io.ObjectStreamField("shared", Boolean.TYPE), }; /** * readObject is called to restore the state of the StringBuffer from * a stream. */ private synchronized void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException { java.io.ObjectOutputStream.PutField fields = s.putFields(); fields.put("value", getValue()); fields.put("count", length()); fields.put("shared", false); s.writeFields(); } /** * readObject is called to restore the state of the StringBuffer from * a stream. */ private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException { java.io.ObjectInputStream.GetField fields = s.readFields(); char[] value = (char[])fields.get("value", null); int count = fields.get("count", 0); append(value, 0, count); } }
apache-2.0
eug48/hapi-fhir
hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/terminologies/ValueSetChecker.java
315
package org.hl7.fhir.dstu3.terminologies; import org.hl7.fhir.dstu3.terminologies.ValueSetExpander.ETooCostly; import org.hl7.fhir.dstu3.utils.EOperationOutcome; public interface ValueSetChecker { boolean codeInValueSet(String system, String code) throws ETooCostly, EOperationOutcome, Exception; }
apache-2.0
Allive1/pinpoint
profiler/src/main/java/com/navercorp/pinpoint/test/TestTcpDataSender.java
6517
/** * Copyright 2014 NAVER Corp. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.test; import com.navercorp.pinpoint.profiler.sender.EnhancedDataSender; import com.navercorp.pinpoint.rpc.FutureListener; import com.navercorp.pinpoint.rpc.ResponseMessage; import com.navercorp.pinpoint.rpc.client.PinpointClientReconnectEventListener; import com.navercorp.pinpoint.thrift.dto.TApiMetaData; import com.navercorp.pinpoint.thrift.dto.TSqlMetaData; import com.navercorp.pinpoint.thrift.dto.TStringMetaData; import org.apache.thrift.TBase; import java.io.PrintStream; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.NoSuchElementException; /** * @author Jongho Moon * */ public class TestTcpDataSender implements EnhancedDataSender { private final List<TBase<?, ?>> datas = new ArrayList<TBase<?, ?>>(); private final Map<Integer, String> apiIdMap = new HashMap<Integer, String>(); private final Map<String, Integer> apiDescriptionMap = new HashMap<String, Integer>(); private final Map<String, Integer> sqlMap = new HashMap<String, Integer>(); private final Map<Integer, String> sqlIdMap = new HashMap<Integer, String>(); private final Map<String, Integer> stringMap = new HashMap<String, Integer>(); private final Map<Integer, String> stringIdMap = new HashMap<Integer, String>(); private static final Comparator<Map.Entry<Integer, String>> COMPARATOR = new Comparator<Map.Entry<Integer, String>>() { @Override public int compare(Entry<Integer, String> o1, Entry<Integer, String> o2) { return o1.getKey() > o2.getKey() ? 1 : (o1.getKey() < o2.getKey() ? -1 : 0); } }; @Override public boolean send(TBase<?, ?> data) { addData(data); return false; } private void addData(TBase<?, ?> data) { if (data instanceof TApiMetaData) { TApiMetaData md = (TApiMetaData)data; String api = md.getApiInfo(); if (md.getLine() != -1) { api += ":" + md.getLine(); } apiIdMap.put(md.getApiId(), api); apiDescriptionMap.put(api, md.getApiId()); } else if (data instanceof TSqlMetaData) { TSqlMetaData md = (TSqlMetaData)data; int id = md.getSqlId(); String sql = md.getSql(); sqlMap.put(sql, id); sqlIdMap.put(id, sql); } else if (data instanceof TStringMetaData) { TStringMetaData md = (TStringMetaData)data; int id = md.getStringId(); String string = md.getStringValue(); stringMap.put(string, id); stringIdMap.put(id, string); } datas.add(data); } @Override public void stop() { // do nothing } @Override public boolean request(TBase<?, ?> data) { addData(data); return true; } @Override public boolean request(TBase<?, ?> data, int retry) { addData(data); return true; } @Override public boolean request(TBase<?, ?> data, FutureListener<ResponseMessage> listener) { addData(data); return true; } @Override public boolean addReconnectEventListener(PinpointClientReconnectEventListener eventListener) { return false; } @Override public boolean removeReconnectEventListener(PinpointClientReconnectEventListener eventListener) { return false; } public String getApiDescription(int id) { return apiIdMap.get(id); } public int getApiId(String description) { Integer id = apiDescriptionMap.get(description); if (id == null) { throw new NoSuchElementException(description); } return id; } public String getString(int id) { return stringIdMap.get(id); } public int getStringId(String string) { Integer id = stringMap.get(string); if (id == null) { throw new NoSuchElementException(string); } return id; } public String getSql(int id) { return sqlIdMap.get(id); } public int getSqlId(String sql) { Integer id = sqlMap.get(sql); if (id == null) { throw new NoSuchElementException(sql); } return id; } public List<TBase<?, ?>> getDatas() { return datas; } public void clear() { datas.clear(); } public void printDatas(PrintStream out) { out.println("API(" + apiIdMap.size() + "):"); printApis(out); out.println("SQL(" + sqlIdMap.size() + "):"); printSqls(out); out.println("STRING(" + stringIdMap.size() + "):"); printStrings(out); } public void printApis(PrintStream out) { List<Map.Entry<Integer, String>> apis = new ArrayList<Map.Entry<Integer, String>>(apiIdMap.entrySet()); printEntries(out, apis); } public void printStrings(PrintStream out) { List<Map.Entry<Integer, String>> strings = new ArrayList<Map.Entry<Integer, String>>(stringIdMap.entrySet()); printEntries(out, strings); } public void printSqls(PrintStream out) { List<Map.Entry<Integer, String>> sqls = new ArrayList<Map.Entry<Integer, String>>(sqlIdMap.entrySet()); printEntries(out, sqls); } private void printEntries(PrintStream out, List<Map.Entry<Integer, String>> entries) { Collections.sort(entries, COMPARATOR); for (Map.Entry<Integer, String> e : entries) { out.println(e.getKey() + ": " + e.getValue()); } } }
apache-2.0
robzor92/hops
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java
79289
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapreduce.v2.app; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.atLeast; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import com.google.common.base.Supplier; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.concurrent.TimeoutException; import org.junit.Assert; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.JobCounter; import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.OutputCommitter; import org.apache.hadoop.mapreduce.OutputFormat; import org.apache.hadoop.mapreduce.RecordWriter; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.mapreduce.TaskID; import org.apache.hadoop.mapreduce.TaskType; import org.apache.hadoop.mapreduce.TypeConverter; import org.apache.hadoop.mapreduce.jobhistory.Event; import org.apache.hadoop.mapreduce.jobhistory.EventType; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; import org.apache.hadoop.mapreduce.security.token.JobTokenIdentifier; import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo; import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; import org.apache.hadoop.mapreduce.v2.api.records.AMInfo; import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.JobState; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState; import org.apache.hadoop.mapreduce.v2.api.records.TaskId; import org.apache.hadoop.mapreduce.v2.api.records.TaskState; import org.apache.hadoop.mapreduce.v2.app.job.Job; import org.apache.hadoop.mapreduce.v2.app.job.Task; import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt; import org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEventType; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskRecoverEvent; import org.apache.hadoop.mapreduce.v2.app.job.impl.MapTaskImpl; import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncher; import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent; import org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics; import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.yarn.util.Clock; import org.apache.hadoop.yarn.util.SystemClock; import org.junit.Test; import org.mockito.ArgumentCaptor; @SuppressWarnings({"unchecked", "rawtypes"}) public class TestRecovery { private static final Log LOG = LogFactory.getLog(TestRecovery.class); private static Path outputDir = new Path(new File("target", TestRecovery.class.getName()).getAbsolutePath() + Path.SEPARATOR + "out"); private static String partFile = "part-r-00000"; private Text key1 = new Text("key1"); private Text key2 = new Text("key2"); private Text val1 = new Text("val1"); private Text val2 = new Text("val2"); /** * AM with 2 maps and 1 reduce. For 1st map, one attempt fails, one attempt * completely disappears because of failed launch, one attempt gets killed and * one attempt succeeds. AM crashes after the first tasks finishes and * recovers completely and succeeds in the second generation. * * @throws Exception */ @Test public void testCrashed() throws Exception { int runCount = 0; long am1StartTimeEst = System.currentTimeMillis(); MRApp app = new MRAppWithHistory(2, 1, false, this.getClass().getName(), true, ++runCount); Configuration conf = new Configuration(); conf.setBoolean("mapred.mapper.new-api", true); conf.setBoolean("mapred.reducer.new-api", true); conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, false); conf.set(FileOutputFormat.OUTDIR, outputDir.toString()); Job job = app.submit(conf); app.waitForState(job, JobState.RUNNING); long jobStartTime = job.getReport().getStartTime(); //all maps would be running Assert.assertEquals("No of tasks not correct", 3, job.getTasks().size()); Iterator<Task> it = job.getTasks().values().iterator(); Task mapTask1 = it.next(); Task mapTask2 = it.next(); Task reduceTask = it.next(); // all maps must be running app.waitForState(mapTask1, TaskState.RUNNING); app.waitForState(mapTask2, TaskState.RUNNING); TaskAttempt task1Attempt1 = mapTask1.getAttempts().values().iterator().next(); TaskAttempt task2Attempt = mapTask2.getAttempts().values().iterator().next(); //before sending the TA_DONE, event make sure attempt has come to //RUNNING state app.waitForState(task1Attempt1, TaskAttemptState.RUNNING); app.waitForState(task2Attempt, TaskAttemptState.RUNNING); // reduces must be in NEW state Assert.assertEquals("Reduce Task state not correct", TaskState.RUNNING, reduceTask.getReport().getTaskState()); /////////// Play some games with the TaskAttempts of the first task ////// //send the fail signal to the 1st map task attempt app.getContext().getEventHandler().handle( new TaskAttemptEvent( task1Attempt1.getID(), TaskAttemptEventType.TA_FAILMSG)); app.waitForState(task1Attempt1, TaskAttemptState.FAILED); int timeOut = 0; while (mapTask1.getAttempts().size() != 2 && timeOut++ < 10) { Thread.sleep(2000); LOG.info("Waiting for next attempt to start"); } Assert.assertEquals(2, mapTask1.getAttempts().size()); Iterator<TaskAttempt> itr = mapTask1.getAttempts().values().iterator(); itr.next(); TaskAttempt task1Attempt2 = itr.next(); // wait for the second task attempt to be assigned. waitForContainerAssignment(task1Attempt2); // This attempt will automatically fail because of the way ContainerLauncher // is setup // This attempt 'disappears' from JobHistory and so causes MAPREDUCE-3846 app.getContext().getEventHandler().handle( new TaskAttemptEvent(task1Attempt2.getID(), TaskAttemptEventType.TA_CONTAINER_LAUNCH_FAILED)); app.waitForState(task1Attempt2, TaskAttemptState.FAILED); timeOut = 0; while (mapTask1.getAttempts().size() != 3 && timeOut++ < 10) { Thread.sleep(2000); LOG.info("Waiting for next attempt to start"); } Assert.assertEquals(3, mapTask1.getAttempts().size()); itr = mapTask1.getAttempts().values().iterator(); itr.next(); itr.next(); TaskAttempt task1Attempt3 = itr.next(); app.waitForState(task1Attempt3, TaskAttemptState.RUNNING); //send the kill signal to the 1st map 3rd attempt app.getContext().getEventHandler().handle( new TaskAttemptEvent( task1Attempt3.getID(), TaskAttemptEventType.TA_KILL)); app.waitForState(task1Attempt3, TaskAttemptState.KILLED); timeOut = 0; while (mapTask1.getAttempts().size() != 4 && timeOut++ < 10) { Thread.sleep(2000); LOG.info("Waiting for next attempt to start"); } Assert.assertEquals(4, mapTask1.getAttempts().size()); itr = mapTask1.getAttempts().values().iterator(); itr.next(); itr.next(); itr.next(); TaskAttempt task1Attempt4 = itr.next(); app.waitForState(task1Attempt4, TaskAttemptState.RUNNING); //send the done signal to the 1st map 4th attempt app.getContext().getEventHandler().handle( new TaskAttemptEvent( task1Attempt4.getID(), TaskAttemptEventType.TA_DONE)); /////////// End of games with the TaskAttempts of the first task ////// //wait for first map task to complete app.waitForState(mapTask1, TaskState.SUCCEEDED); long task1StartTime = mapTask1.getReport().getStartTime(); long task1FinishTime = mapTask1.getReport().getFinishTime(); //stop the app app.stop(); //rerun //in rerun the 1st map will be recovered from previous run long am2StartTimeEst = System.currentTimeMillis(); app = new MRAppWithHistory(2, 1, false, this.getClass().getName(), false, ++runCount); conf = new Configuration(); conf.setBoolean(MRJobConfig.MR_AM_JOB_RECOVERY_ENABLE, true); conf.setBoolean("mapred.mapper.new-api", true); conf.setBoolean("mapred.reducer.new-api", true); conf.set(FileOutputFormat.OUTDIR, outputDir.toString()); conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, false); job = app.submit(conf); app.waitForState(job, JobState.RUNNING); //all maps would be running Assert.assertEquals("No of tasks not correct", 3, job.getTasks().size()); it = job.getTasks().values().iterator(); mapTask1 = it.next(); mapTask2 = it.next(); reduceTask = it.next(); // first map will be recovered, no need to send done app.waitForState(mapTask1, TaskState.SUCCEEDED); app.waitForState(mapTask2, TaskState.RUNNING); task2Attempt = mapTask2.getAttempts().values().iterator().next(); //before sending the TA_DONE, event make sure attempt has come to //RUNNING state app.waitForState(task2Attempt, TaskAttemptState.RUNNING); //send the done signal to the 2nd map task app.getContext().getEventHandler().handle( new TaskAttemptEvent( mapTask2.getAttempts().values().iterator().next().getID(), TaskAttemptEventType.TA_DONE)); //wait to get it completed app.waitForState(mapTask2, TaskState.SUCCEEDED); //wait for reduce to be running before sending done app.waitForState(reduceTask, TaskState.RUNNING); //send the done signal to the reduce app.getContext().getEventHandler().handle( new TaskAttemptEvent( reduceTask.getAttempts().values().iterator().next().getID(), TaskAttemptEventType.TA_DONE)); app.waitForState(job, JobState.SUCCEEDED); app.verifyCompleted(); Assert.assertEquals("Job Start time not correct", jobStartTime, job.getReport().getStartTime()); Assert.assertEquals("Task Start time not correct", task1StartTime, mapTask1.getReport().getStartTime()); Assert.assertEquals("Task Finish time not correct", task1FinishTime, mapTask1.getReport().getFinishTime()); Assert.assertEquals(2, job.getAMInfos().size()); int attemptNum = 1; // Verify AMInfo for (AMInfo amInfo : job.getAMInfos()) { Assert.assertEquals(attemptNum++, amInfo.getAppAttemptId() .getAttemptId()); Assert.assertEquals(amInfo.getAppAttemptId(), amInfo.getContainerId() .getApplicationAttemptId()); Assert.assertEquals(MRApp.NM_HOST, amInfo.getNodeManagerHost()); Assert.assertEquals(MRApp.NM_PORT, amInfo.getNodeManagerPort()); Assert.assertEquals(MRApp.NM_HTTP_PORT, amInfo.getNodeManagerHttpPort()); } long am1StartTimeReal = job.getAMInfos().get(0).getStartTime(); long am2StartTimeReal = job.getAMInfos().get(1).getStartTime(); Assert.assertTrue(am1StartTimeReal >= am1StartTimeEst && am1StartTimeReal <= am2StartTimeEst); Assert.assertTrue(am2StartTimeReal >= am2StartTimeEst && am2StartTimeReal <= System.currentTimeMillis()); // TODO Add verification of additional data from jobHistory - whatever was // available in the failed attempt should be available here } /** * Wait for a task attempt to be assigned a container to. * @param task1Attempt2 the task attempt to wait for its container assignment * @throws TimeoutException if times out * @throws InterruptedException if interrupted */ public static void waitForContainerAssignment(final TaskAttempt task1Attempt2) throws TimeoutException, InterruptedException { GenericTestUtils.waitFor(new Supplier<Boolean>() { @Override public Boolean get() { return task1Attempt2.getAssignedContainerID() != null; } }, 10, 10000); } /** * AM with 3 maps and 0 reduce. AM crashes after the first two tasks finishes * and recovers completely and succeeds in the second generation. * * @throws Exception */ @Test public void testCrashOfMapsOnlyJob() throws Exception { int runCount = 0; MRApp app = new MRAppWithHistory(3, 0, false, this.getClass().getName(), true, ++runCount); Configuration conf = new Configuration(); conf.setBoolean("mapred.mapper.new-api", true); conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, false); conf.set(FileOutputFormat.OUTDIR, outputDir.toString()); Job job = app.submit(conf); app.waitForState(job, JobState.RUNNING); // all maps would be running Assert.assertEquals("No of tasks not correct", 3, job.getTasks().size()); Iterator<Task> it = job.getTasks().values().iterator(); Task mapTask1 = it.next(); Task mapTask2 = it.next(); Task mapTask3 = it.next(); // all maps must be running app.waitForState(mapTask1, TaskState.RUNNING); app.waitForState(mapTask2, TaskState.RUNNING); app.waitForState(mapTask3, TaskState.RUNNING); TaskAttempt task1Attempt = mapTask1.getAttempts().values().iterator().next(); TaskAttempt task2Attempt = mapTask2.getAttempts().values().iterator().next(); TaskAttempt task3Attempt = mapTask3.getAttempts().values().iterator().next(); // before sending the TA_DONE, event make sure attempt has come to // RUNNING state app.waitForState(task1Attempt, TaskAttemptState.RUNNING); app.waitForState(task2Attempt, TaskAttemptState.RUNNING); app.waitForState(task3Attempt, TaskAttemptState.RUNNING); // send the done signal to the 1st two maps app .getContext() .getEventHandler() .handle( new TaskAttemptEvent(task1Attempt.getID(), TaskAttemptEventType.TA_DONE)); app .getContext() .getEventHandler() .handle( new TaskAttemptEvent(task2Attempt.getID(), TaskAttemptEventType.TA_DONE)); // wait for first two map task to complete app.waitForState(mapTask1, TaskState.SUCCEEDED); app.waitForState(mapTask2, TaskState.SUCCEEDED); // stop the app app.stop(); // rerun // in rerun the 1st two map will be recovered from previous run app = new MRAppWithHistory(2, 1, false, this.getClass().getName(), false, ++runCount); conf = new Configuration(); conf.setBoolean(MRJobConfig.MR_AM_JOB_RECOVERY_ENABLE, true); conf.setBoolean("mapred.mapper.new-api", true); conf.set(FileOutputFormat.OUTDIR, outputDir.toString()); // Set num-reduces explicitly in conf as recovery logic depends on it. conf.setInt(MRJobConfig.NUM_REDUCES, 0); conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, false); job = app.submit(conf); app.waitForState(job, JobState.RUNNING); Assert.assertEquals("No of tasks not correct", 3, job.getTasks().size()); it = job.getTasks().values().iterator(); mapTask1 = it.next(); mapTask2 = it.next(); mapTask3 = it.next(); // first two maps will be recovered, no need to send done app.waitForState(mapTask1, TaskState.SUCCEEDED); app.waitForState(mapTask2, TaskState.SUCCEEDED); app.waitForState(mapTask3, TaskState.RUNNING); task3Attempt = mapTask3.getAttempts().values().iterator().next(); // before sending the TA_DONE, event make sure attempt has come to // RUNNING state app.waitForState(task3Attempt, TaskAttemptState.RUNNING); // send the done signal to the 3rd map task app .getContext() .getEventHandler() .handle( new TaskAttemptEvent(mapTask3.getAttempts().values().iterator().next() .getID(), TaskAttemptEventType.TA_DONE)); // wait to get it completed app.waitForState(mapTask3, TaskState.SUCCEEDED); app.waitForState(job, JobState.SUCCEEDED); app.verifyCompleted(); } /** * The class provides a custom implementation of output committer setupTask * and isRecoverySupported methods, which determines if recovery supported * based on config property. */ public static class TestFileOutputCommitter extends org.apache.hadoop.mapred.FileOutputCommitter { @Override public boolean isRecoverySupported( org.apache.hadoop.mapred.JobContext jobContext) { boolean isRecoverySupported = false; if (jobContext != null && jobContext.getConfiguration() != null) { isRecoverySupported = jobContext.getConfiguration().getBoolean( "want.am.recovery", false); } return isRecoverySupported; } } /** * This test case primarily verifies if the recovery is controlled through config * property. In this case, recover is turned ON. AM with 3 maps and 0 reduce. * AM crashes after the first two tasks finishes and recovers completely and * succeeds in the second generation. * * @throws Exception */ @Test public void testRecoverySuccessUsingCustomOutputCommitter() throws Exception { int runCount = 0; MRApp app = new MRAppWithHistory(3, 0, false, this.getClass().getName(), true, ++runCount); Configuration conf = new Configuration(); conf.setClass("mapred.output.committer.class", TestFileOutputCommitter.class, org.apache.hadoop.mapred.OutputCommitter.class); conf.set(FileOutputFormat.OUTDIR, outputDir.toString()); conf.setBoolean("want.am.recovery", true); Job job = app.submit(conf); app.waitForState(job, JobState.RUNNING); // all maps would be running Assert.assertEquals("No of tasks not correct", 3, job.getTasks().size()); Iterator<Task> it = job.getTasks().values().iterator(); Task mapTask1 = it.next(); Task mapTask2 = it.next(); Task mapTask3 = it.next(); // all maps must be running app.waitForState(mapTask1, TaskState.RUNNING); app.waitForState(mapTask2, TaskState.RUNNING); app.waitForState(mapTask3, TaskState.RUNNING); TaskAttempt task1Attempt = mapTask1.getAttempts().values().iterator() .next(); TaskAttempt task2Attempt = mapTask2.getAttempts().values().iterator() .next(); TaskAttempt task3Attempt = mapTask3.getAttempts().values().iterator() .next(); // before sending the TA_DONE, event make sure attempt has come to // RUNNING state app.waitForState(task1Attempt, TaskAttemptState.RUNNING); app.waitForState(task2Attempt, TaskAttemptState.RUNNING); app.waitForState(task3Attempt, TaskAttemptState.RUNNING); // send the done signal to the 1st two maps app.getContext() .getEventHandler() .handle( new TaskAttemptEvent(task1Attempt.getID(), TaskAttemptEventType.TA_DONE)); app.getContext() .getEventHandler() .handle( new TaskAttemptEvent(task2Attempt.getID(), TaskAttemptEventType.TA_DONE)); // wait for first two map task to complete app.waitForState(mapTask1, TaskState.SUCCEEDED); app.waitForState(mapTask2, TaskState.SUCCEEDED); // stop the app app.stop(); // rerun // in rerun the 1st two map will be recovered from previous run app = new MRAppWithHistory(2, 1, false, this.getClass().getName(), false, ++runCount); conf = new Configuration(); conf.setClass("mapred.output.committer.class", TestFileOutputCommitter.class, org.apache.hadoop.mapred.OutputCommitter.class); conf.setBoolean("want.am.recovery", true); conf.set(FileOutputFormat.OUTDIR, outputDir.toString()); // Set num-reduces explicitly in conf as recovery logic depends on it. conf.setInt(MRJobConfig.NUM_REDUCES, 0); conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, false); job = app.submit(conf); app.waitForState(job, JobState.RUNNING); Assert.assertEquals("No of tasks not correct", 3, job.getTasks().size()); it = job.getTasks().values().iterator(); mapTask1 = it.next(); mapTask2 = it.next(); mapTask3 = it.next(); // first two maps will be recovered, no need to send done app.waitForState(mapTask1, TaskState.SUCCEEDED); app.waitForState(mapTask2, TaskState.SUCCEEDED); app.waitForState(mapTask3, TaskState.RUNNING); task3Attempt = mapTask3.getAttempts().values().iterator().next(); // before sending the TA_DONE, event make sure attempt has come to // RUNNING state app.waitForState(task3Attempt, TaskAttemptState.RUNNING); // send the done signal to the 3rd map task app.getContext() .getEventHandler() .handle( new TaskAttemptEvent(mapTask3.getAttempts().values().iterator() .next().getID(), TaskAttemptEventType.TA_DONE)); // wait to get it completed app.waitForState(mapTask3, TaskState.SUCCEEDED); app.waitForState(job, JobState.SUCCEEDED); app.verifyCompleted(); } /** * This test case primarily verifies if the recovery is controlled through config * property. In this case, recover is turned OFF. AM with 3 maps and 0 reduce. * AM crashes after the first two tasks finishes and recovery fails and have * to rerun fully in the second generation and succeeds. * * @throws Exception */ @Test public void testRecoveryFailsUsingCustomOutputCommitter() throws Exception { int runCount = 0; MRApp app = new MRAppWithHistory(3, 0, false, this.getClass().getName(), true, ++runCount); Configuration conf = new Configuration(); conf.setClass("mapred.output.committer.class", TestFileOutputCommitter.class, org.apache.hadoop.mapred.OutputCommitter.class); conf.set(FileOutputFormat.OUTDIR, outputDir.toString()); conf.setBoolean("want.am.recovery", false); Job job = app.submit(conf); app.waitForState(job, JobState.RUNNING); // all maps would be running Assert.assertEquals("No of tasks not correct", 3, job.getTasks().size()); Iterator<Task> it = job.getTasks().values().iterator(); Task mapTask1 = it.next(); Task mapTask2 = it.next(); Task mapTask3 = it.next(); // all maps must be running app.waitForState(mapTask1, TaskState.RUNNING); app.waitForState(mapTask2, TaskState.RUNNING); app.waitForState(mapTask3, TaskState.RUNNING); TaskAttempt task1Attempt = mapTask1.getAttempts().values().iterator().next(); TaskAttempt task2Attempt = mapTask2.getAttempts().values().iterator().next(); TaskAttempt task3Attempt = mapTask3.getAttempts().values().iterator().next(); // before sending the TA_DONE, event make sure attempt has come to // RUNNING state app.waitForState(task1Attempt, TaskAttemptState.RUNNING); app.waitForState(task2Attempt, TaskAttemptState.RUNNING); app.waitForState(task3Attempt, TaskAttemptState.RUNNING); // send the done signal to the 1st two maps app .getContext() .getEventHandler() .handle( new TaskAttemptEvent(task1Attempt.getID(), TaskAttemptEventType.TA_DONE)); app .getContext() .getEventHandler() .handle( new TaskAttemptEvent(task2Attempt.getID(), TaskAttemptEventType.TA_DONE)); // wait for first two map task to complete app.waitForState(mapTask1, TaskState.SUCCEEDED); app.waitForState(mapTask2, TaskState.SUCCEEDED); // stop the app app.stop(); // rerun // in rerun the 1st two map will be recovered from previous run app = new MRAppWithHistory(2, 1, false, this.getClass().getName(), false, ++runCount); conf = new Configuration(); conf.setClass("mapred.output.committer.class", TestFileOutputCommitter.class, org.apache.hadoop.mapred.OutputCommitter.class); conf.setBoolean("want.am.recovery", false); conf.set(FileOutputFormat.OUTDIR, outputDir.toString()); // Set num-reduces explicitly in conf as recovery logic depends on it. conf.setInt(MRJobConfig.NUM_REDUCES, 0); conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, false); job = app.submit(conf); app.waitForState(job, JobState.RUNNING); Assert.assertEquals("No of tasks not correct", 3, job.getTasks().size()); it = job.getTasks().values().iterator(); mapTask1 = it.next(); mapTask2 = it.next(); mapTask3 = it.next(); // first two maps will NOT be recovered, need to send done from them app.waitForState(mapTask1, TaskState.RUNNING); app.waitForState(mapTask2, TaskState.RUNNING); app.waitForState(mapTask3, TaskState.RUNNING); task3Attempt = mapTask3.getAttempts().values().iterator().next(); // before sending the TA_DONE, event make sure attempt has come to // RUNNING state app.waitForState(task3Attempt, TaskAttemptState.RUNNING); // send the done signal to all 3 tasks map task app .getContext() .getEventHandler() .handle( new TaskAttemptEvent(mapTask1.getAttempts().values().iterator().next() .getID(), TaskAttemptEventType.TA_DONE)); app .getContext() .getEventHandler() .handle( new TaskAttemptEvent(mapTask2.getAttempts().values().iterator().next() .getID(), TaskAttemptEventType.TA_DONE)); app .getContext() .getEventHandler() .handle( new TaskAttemptEvent(mapTask3.getAttempts().values().iterator().next() .getID(), TaskAttemptEventType.TA_DONE)); // wait to get it completed app.waitForState(mapTask3, TaskState.SUCCEEDED); app.waitForState(job, JobState.SUCCEEDED); app.verifyCompleted(); } @Test public void testMultipleCrashes() throws Exception { int runCount = 0; MRApp app = new MRAppWithHistory(2, 1, false, this.getClass().getName(), true, ++runCount); Configuration conf = new Configuration(); conf.setBoolean("mapred.mapper.new-api", true); conf.setBoolean("mapred.reducer.new-api", true); conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, false); conf.set(FileOutputFormat.OUTDIR, outputDir.toString()); Job job = app.submit(conf); app.waitForState(job, JobState.RUNNING); //all maps would be running Assert.assertEquals("No of tasks not correct", 3, job.getTasks().size()); Iterator<Task> it = job.getTasks().values().iterator(); Task mapTask1 = it.next(); Task mapTask2 = it.next(); Task reduceTask = it.next(); // all maps must be running app.waitForState(mapTask1, TaskState.RUNNING); app.waitForState(mapTask2, TaskState.RUNNING); TaskAttempt task1Attempt1 = mapTask1.getAttempts().values().iterator().next(); TaskAttempt task2Attempt = mapTask2.getAttempts().values().iterator().next(); //before sending the TA_DONE, event make sure attempt has come to //RUNNING state app.waitForState(task1Attempt1, TaskAttemptState.RUNNING); app.waitForState(task2Attempt, TaskAttemptState.RUNNING); // reduces must be in NEW state Assert.assertEquals("Reduce Task state not correct", TaskState.RUNNING, reduceTask.getReport().getTaskState()); //send the done signal to the 1st map app.getContext().getEventHandler().handle( new TaskAttemptEvent( task1Attempt1.getID(), TaskAttemptEventType.TA_DONE)); //wait for first map task to complete app.waitForState(mapTask1, TaskState.SUCCEEDED); // Crash the app app.stop(); //rerun //in rerun the 1st map will be recovered from previous run app = new MRAppWithHistory(2, 1, false, this.getClass().getName(), false, ++runCount); conf = new Configuration(); conf.setBoolean(MRJobConfig.MR_AM_JOB_RECOVERY_ENABLE, true); conf.setBoolean("mapred.mapper.new-api", true); conf.setBoolean("mapred.reducer.new-api", true); conf.set(FileOutputFormat.OUTDIR, outputDir.toString()); conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, false); job = app.submit(conf); app.waitForState(job, JobState.RUNNING); //all maps would be running Assert.assertEquals("No of tasks not correct", 3, job.getTasks().size()); it = job.getTasks().values().iterator(); mapTask1 = it.next(); mapTask2 = it.next(); reduceTask = it.next(); // first map will be recovered, no need to send done app.waitForState(mapTask1, TaskState.SUCCEEDED); app.waitForState(mapTask2, TaskState.RUNNING); task2Attempt = mapTask2.getAttempts().values().iterator().next(); //before sending the TA_DONE, event make sure attempt has come to //RUNNING state app.waitForState(task2Attempt, TaskAttemptState.RUNNING); //send the done signal to the 2nd map task app.getContext().getEventHandler().handle( new TaskAttemptEvent( mapTask2.getAttempts().values().iterator().next().getID(), TaskAttemptEventType.TA_DONE)); //wait to get it completed app.waitForState(mapTask2, TaskState.SUCCEEDED); // Crash the app again. app.stop(); //rerun //in rerun the 1st and 2nd map will be recovered from previous run app = new MRAppWithHistory(2, 1, false, this.getClass().getName(), false, ++runCount); conf = new Configuration(); conf.setBoolean(MRJobConfig.MR_AM_JOB_RECOVERY_ENABLE, true); conf.setBoolean("mapred.mapper.new-api", true); conf.setBoolean("mapred.reducer.new-api", true); conf.set(FileOutputFormat.OUTDIR, outputDir.toString()); conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, false); job = app.submit(conf); app.waitForState(job, JobState.RUNNING); //all maps would be running Assert.assertEquals("No of tasks not correct", 3, job.getTasks().size()); it = job.getTasks().values().iterator(); mapTask1 = it.next(); mapTask2 = it.next(); reduceTask = it.next(); // The maps will be recovered, no need to send done app.waitForState(mapTask1, TaskState.SUCCEEDED); app.waitForState(mapTask2, TaskState.SUCCEEDED); //wait for reduce to be running before sending done app.waitForState(reduceTask, TaskState.RUNNING); //send the done signal to the reduce app.getContext().getEventHandler().handle( new TaskAttemptEvent( reduceTask.getAttempts().values().iterator().next().getID(), TaskAttemptEventType.TA_DONE)); app.waitForState(job, JobState.SUCCEEDED); app.verifyCompleted(); } @Test public void testOutputRecovery() throws Exception { int runCount = 0; MRApp app = new MRAppWithHistory(1, 2, false, this.getClass().getName(), true, ++runCount); Configuration conf = new Configuration(); conf.setBoolean("mapred.mapper.new-api", true); conf.setBoolean("mapred.reducer.new-api", true); conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, false); conf.set(FileOutputFormat.OUTDIR, outputDir.toString()); Job job = app.submit(conf); app.waitForState(job, JobState.RUNNING); Assert.assertEquals("No of tasks not correct", 3, job.getTasks().size()); Iterator<Task> it = job.getTasks().values().iterator(); Task mapTask1 = it.next(); Task reduceTask1 = it.next(); // all maps must be running app.waitForState(mapTask1, TaskState.RUNNING); TaskAttempt task1Attempt1 = mapTask1.getAttempts().values().iterator() .next(); //before sending the TA_DONE, event make sure attempt has come to //RUNNING state app.waitForState(task1Attempt1, TaskAttemptState.RUNNING); //send the done signal to the map app.getContext().getEventHandler().handle( new TaskAttemptEvent( task1Attempt1.getID(), TaskAttemptEventType.TA_DONE)); //wait for map task to complete app.waitForState(mapTask1, TaskState.SUCCEEDED); // Verify the shuffle-port Assert.assertEquals(5467, task1Attempt1.getShufflePort()); app.waitForState(reduceTask1, TaskState.RUNNING); TaskAttempt reduce1Attempt1 = reduceTask1.getAttempts().values().iterator().next(); // write output corresponding to reduce1 writeOutput(reduce1Attempt1, conf); //send the done signal to the 1st reduce app.getContext().getEventHandler().handle( new TaskAttemptEvent( reduce1Attempt1.getID(), TaskAttemptEventType.TA_DONE)); //wait for first reduce task to complete app.waitForState(reduceTask1, TaskState.SUCCEEDED); //stop the app before the job completes. app.stop(); //rerun //in rerun the map will be recovered from previous run app = new MRAppWithHistory(1, 2, false, this.getClass().getName(), false, ++runCount); conf = new Configuration(); conf.setBoolean(MRJobConfig.MR_AM_JOB_RECOVERY_ENABLE, true); conf.setBoolean("mapred.mapper.new-api", true); conf.setBoolean("mapred.reducer.new-api", true); conf.set(FileOutputFormat.OUTDIR, outputDir.toString()); conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, false); job = app.submit(conf); app.waitForState(job, JobState.RUNNING); Assert.assertEquals("No of tasks not correct", 3, job.getTasks().size()); it = job.getTasks().values().iterator(); mapTask1 = it.next(); reduceTask1 = it.next(); Task reduceTask2 = it.next(); // map will be recovered, no need to send done app.waitForState(mapTask1, TaskState.SUCCEEDED); // Verify the shuffle-port after recovery task1Attempt1 = mapTask1.getAttempts().values().iterator().next(); Assert.assertEquals(5467, task1Attempt1.getShufflePort()); // first reduce will be recovered, no need to send done app.waitForState(reduceTask1, TaskState.SUCCEEDED); app.waitForState(reduceTask2, TaskState.RUNNING); TaskAttempt reduce2Attempt = reduceTask2.getAttempts().values() .iterator().next(); //before sending the TA_DONE, event make sure attempt has come to //RUNNING state app.waitForState(reduce2Attempt, TaskAttemptState.RUNNING); //send the done signal to the 2nd reduce task app.getContext().getEventHandler().handle( new TaskAttemptEvent( reduce2Attempt.getID(), TaskAttemptEventType.TA_DONE)); //wait to get it completed app.waitForState(reduceTask2, TaskState.SUCCEEDED); app.waitForState(job, JobState.SUCCEEDED); app.verifyCompleted(); validateOutput(); } @Test public void testOutputRecoveryMapsOnly() throws Exception { int runCount = 0; MRApp app = new MRAppWithHistory(2, 1, false, this.getClass().getName(), true, ++runCount); Configuration conf = new Configuration(); conf.setBoolean("mapred.mapper.new-api", true); conf.setBoolean("mapred.reducer.new-api", true); conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, false); conf.set(FileOutputFormat.OUTDIR, outputDir.toString()); Job job = app.submit(conf); app.waitForState(job, JobState.RUNNING); Assert.assertEquals("No of tasks not correct", 3, job.getTasks().size()); Iterator<Task> it = job.getTasks().values().iterator(); Task mapTask1 = it.next(); Task mapTask2 = it.next(); Task reduceTask1 = it.next(); // all maps must be running app.waitForState(mapTask1, TaskState.RUNNING); TaskAttempt task1Attempt1 = mapTask1.getAttempts().values().iterator() .next(); //before sending the TA_DONE, event make sure attempt has come to //RUNNING state app.waitForState(task1Attempt1, TaskAttemptState.RUNNING); // write output corresponding to map1 (This is just to validate that it is //no included in the output) writeBadOutput(task1Attempt1, conf); //send the done signal to the map app.getContext().getEventHandler().handle( new TaskAttemptEvent( task1Attempt1.getID(), TaskAttemptEventType.TA_DONE)); //wait for map task to complete app.waitForState(mapTask1, TaskState.SUCCEEDED); // Verify the shuffle-port Assert.assertEquals(5467, task1Attempt1.getShufflePort()); //stop the app before the job completes. app.stop(); //rerun //in rerun the map will be recovered from previous run app = new MRAppWithHistory(2, 1, false, this.getClass().getName(), false, ++runCount); conf = new Configuration(); conf.setBoolean(MRJobConfig.MR_AM_JOB_RECOVERY_ENABLE, true); conf.setBoolean("mapred.mapper.new-api", true); conf.setBoolean("mapred.reducer.new-api", true); conf.set(FileOutputFormat.OUTDIR, outputDir.toString()); conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, false); job = app.submit(conf); app.waitForState(job, JobState.RUNNING); Assert.assertEquals("No of tasks not correct", 3, job.getTasks().size()); it = job.getTasks().values().iterator(); mapTask1 = it.next(); mapTask2 = it.next(); reduceTask1 = it.next(); // map will be recovered, no need to send done app.waitForState(mapTask1, TaskState.SUCCEEDED); // Verify the shuffle-port after recovery task1Attempt1 = mapTask1.getAttempts().values().iterator().next(); Assert.assertEquals(5467, task1Attempt1.getShufflePort()); app.waitForState(mapTask2, TaskState.RUNNING); TaskAttempt task2Attempt1 = mapTask2.getAttempts().values().iterator() .next(); //before sending the TA_DONE, event make sure attempt has come to //RUNNING state app.waitForState(task2Attempt1, TaskAttemptState.RUNNING); //send the done signal to the map app.getContext().getEventHandler().handle( new TaskAttemptEvent( task2Attempt1.getID(), TaskAttemptEventType.TA_DONE)); //wait for map task to complete app.waitForState(mapTask2, TaskState.SUCCEEDED); // Verify the shuffle-port Assert.assertEquals(5467, task2Attempt1.getShufflePort()); app.waitForState(reduceTask1, TaskState.RUNNING); TaskAttempt reduce1Attempt1 = reduceTask1.getAttempts().values().iterator().next(); // write output corresponding to reduce1 writeOutput(reduce1Attempt1, conf); //send the done signal to the 1st reduce app.getContext().getEventHandler().handle( new TaskAttemptEvent( reduce1Attempt1.getID(), TaskAttemptEventType.TA_DONE)); //wait for first reduce task to complete app.waitForState(reduceTask1, TaskState.SUCCEEDED); app.waitForState(job, JobState.SUCCEEDED); app.verifyCompleted(); validateOutput(); } @Test public void testRecoveryWithOldCommiter() throws Exception { int runCount = 0; MRApp app = new MRAppWithHistory(1, 2, false, this.getClass().getName(), true, ++runCount); Configuration conf = new Configuration(); conf.setBoolean("mapred.mapper.new-api", false); conf.setBoolean("mapred.reducer.new-api", false); conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, false); conf.set(FileOutputFormat.OUTDIR, outputDir.toString()); Job job = app.submit(conf); app.waitForState(job, JobState.RUNNING); Assert.assertEquals("No of tasks not correct", 3, job.getTasks().size()); Iterator<Task> it = job.getTasks().values().iterator(); Task mapTask1 = it.next(); Task reduceTask1 = it.next(); // all maps must be running app.waitForState(mapTask1, TaskState.RUNNING); TaskAttempt task1Attempt1 = mapTask1.getAttempts().values().iterator() .next(); //before sending the TA_DONE, event make sure attempt has come to //RUNNING state app.waitForState(task1Attempt1, TaskAttemptState.RUNNING); //send the done signal to the map app.getContext().getEventHandler().handle( new TaskAttemptEvent( task1Attempt1.getID(), TaskAttemptEventType.TA_DONE)); //wait for map task to complete app.waitForState(mapTask1, TaskState.SUCCEEDED); // Verify the shuffle-port Assert.assertEquals(5467, task1Attempt1.getShufflePort()); app.waitForState(reduceTask1, TaskState.RUNNING); TaskAttempt reduce1Attempt1 = reduceTask1.getAttempts().values().iterator().next(); // write output corresponding to reduce1 writeOutput(reduce1Attempt1, conf); //send the done signal to the 1st reduce app.getContext().getEventHandler().handle( new TaskAttemptEvent( reduce1Attempt1.getID(), TaskAttemptEventType.TA_DONE)); //wait for first reduce task to complete app.waitForState(reduceTask1, TaskState.SUCCEEDED); //stop the app before the job completes. app.stop(); //rerun //in rerun the map will be recovered from previous run app = new MRAppWithHistory(1, 2, false, this.getClass().getName(), false, ++runCount); conf = new Configuration(); conf.setBoolean(MRJobConfig.MR_AM_JOB_RECOVERY_ENABLE, true); conf.setBoolean("mapred.mapper.new-api", false); conf.setBoolean("mapred.reducer.new-api", false); conf.set(FileOutputFormat.OUTDIR, outputDir.toString()); conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, false); job = app.submit(conf); app.waitForState(job, JobState.RUNNING); Assert.assertEquals("No of tasks not correct", 3, job.getTasks().size()); it = job.getTasks().values().iterator(); mapTask1 = it.next(); reduceTask1 = it.next(); Task reduceTask2 = it.next(); // map will be recovered, no need to send done app.waitForState(mapTask1, TaskState.SUCCEEDED); // Verify the shuffle-port after recovery task1Attempt1 = mapTask1.getAttempts().values().iterator().next(); Assert.assertEquals(5467, task1Attempt1.getShufflePort()); // first reduce will be recovered, no need to send done app.waitForState(reduceTask1, TaskState.SUCCEEDED); app.waitForState(reduceTask2, TaskState.RUNNING); TaskAttempt reduce2Attempt = reduceTask2.getAttempts().values() .iterator().next(); //before sending the TA_DONE, event make sure attempt has come to //RUNNING state app.waitForState(reduce2Attempt, TaskAttemptState.RUNNING); //send the done signal to the 2nd reduce task app.getContext().getEventHandler().handle( new TaskAttemptEvent( reduce2Attempt.getID(), TaskAttemptEventType.TA_DONE)); //wait to get it completed app.waitForState(reduceTask2, TaskState.SUCCEEDED); app.waitForState(job, JobState.SUCCEEDED); app.verifyCompleted(); validateOutput(); } /** * AM with 2 maps and 1 reduce. For 1st map, one attempt fails, one attempt * completely disappears because of failed launch, one attempt gets killed and * one attempt succeeds. AM crashes after the first tasks finishes and * recovers completely and succeeds in the second generation. * * @throws Exception */ @Test public void testSpeculative() throws Exception { int runCount = 0; long am1StartTimeEst = System.currentTimeMillis(); MRApp app = new MRAppWithHistory(2, 1, false, this.getClass().getName(), true, ++runCount); Configuration conf = new Configuration(); conf.setBoolean("mapred.mapper.new-api", true); conf.setBoolean("mapred.reducer.new-api", true); conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, false); conf.set(FileOutputFormat.OUTDIR, outputDir.toString()); Job job = app.submit(conf); app.waitForState(job, JobState.RUNNING); long jobStartTime = job.getReport().getStartTime(); //all maps would be running Assert.assertEquals("No of tasks not correct", 3, job.getTasks().size()); Iterator<Task> it = job.getTasks().values().iterator(); Task mapTask1 = it.next(); Task mapTask2 = it.next(); Task reduceTask = it.next(); // all maps must be running app.waitForState(mapTask1, TaskState.RUNNING); app.waitForState(mapTask2, TaskState.RUNNING); // Launch a Speculative Task for the first Task app.getContext().getEventHandler().handle( new TaskEvent(mapTask1.getID(), TaskEventType.T_ADD_SPEC_ATTEMPT)); int timeOut = 0; while (mapTask1.getAttempts().size() != 2 && timeOut++ < 10) { Thread.sleep(1000); LOG.info("Waiting for next attempt to start"); } Iterator<TaskAttempt> t1it = mapTask1.getAttempts().values().iterator(); TaskAttempt task1Attempt1 = t1it.next(); TaskAttempt task1Attempt2 = t1it.next(); TaskAttempt task2Attempt = mapTask2.getAttempts().values().iterator().next(); // wait for the second task attempt to be assigned. waitForContainerAssignment(task1Attempt2); ContainerId t1a2contId = task1Attempt2.getAssignedContainerID(); LOG.info(t1a2contId.toString()); LOG.info(task1Attempt1.getID().toString()); LOG.info(task1Attempt2.getID().toString()); // Launch container for speculative attempt app.getContext().getEventHandler().handle( new TaskAttemptContainerLaunchedEvent(task1Attempt2.getID(), runCount)); //before sending the TA_DONE, event make sure attempt has come to //RUNNING state app.waitForState(task1Attempt1, TaskAttemptState.RUNNING); app.waitForState(task1Attempt2, TaskAttemptState.RUNNING); app.waitForState(task2Attempt, TaskAttemptState.RUNNING); // reduces must be in NEW state Assert.assertEquals("Reduce Task state not correct", TaskState.RUNNING, reduceTask.getReport().getTaskState()); //send the done signal to the map 1 attempt 1 app.getContext().getEventHandler().handle( new TaskAttemptEvent( task1Attempt1.getID(), TaskAttemptEventType.TA_DONE)); app.waitForState(task1Attempt1, TaskAttemptState.SUCCEEDED); //wait for first map task to complete app.waitForState(mapTask1, TaskState.SUCCEEDED); long task1StartTime = mapTask1.getReport().getStartTime(); long task1FinishTime = mapTask1.getReport().getFinishTime(); //stop the app app.stop(); //rerun //in rerun the 1st map will be recovered from previous run long am2StartTimeEst = System.currentTimeMillis(); app = new MRAppWithHistory(2, 1, false, this.getClass().getName(), false, ++runCount); conf = new Configuration(); conf.setBoolean(MRJobConfig.MR_AM_JOB_RECOVERY_ENABLE, true); conf.setBoolean("mapred.mapper.new-api", true); conf.setBoolean("mapred.reducer.new-api", true); conf.set(FileOutputFormat.OUTDIR, outputDir.toString()); conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, false); job = app.submit(conf); app.waitForState(job, JobState.RUNNING); //all maps would be running Assert.assertEquals("No of tasks not correct", 3, job.getTasks().size()); it = job.getTasks().values().iterator(); mapTask1 = it.next(); mapTask2 = it.next(); reduceTask = it.next(); // first map will be recovered, no need to send done app.waitForState(mapTask1, TaskState.SUCCEEDED); app.waitForState(mapTask2, TaskState.RUNNING); task2Attempt = mapTask2.getAttempts().values().iterator().next(); //before sending the TA_DONE, event make sure attempt has come to //RUNNING state app.waitForState(task2Attempt, TaskAttemptState.RUNNING); //send the done signal to the 2nd map task app.getContext().getEventHandler().handle( new TaskAttemptEvent( mapTask2.getAttempts().values().iterator().next().getID(), TaskAttemptEventType.TA_DONE)); //wait to get it completed app.waitForState(mapTask2, TaskState.SUCCEEDED); //wait for reduce to be running before sending done app.waitForState(reduceTask, TaskState.RUNNING); //send the done signal to the reduce app.getContext().getEventHandler().handle( new TaskAttemptEvent( reduceTask.getAttempts().values().iterator().next().getID(), TaskAttemptEventType.TA_DONE)); app.waitForState(job, JobState.SUCCEEDED); app.verifyCompleted(); Assert.assertEquals("Job Start time not correct", jobStartTime, job.getReport().getStartTime()); Assert.assertEquals("Task Start time not correct", task1StartTime, mapTask1.getReport().getStartTime()); Assert.assertEquals("Task Finish time not correct", task1FinishTime, mapTask1.getReport().getFinishTime()); Assert.assertEquals(2, job.getAMInfos().size()); int attemptNum = 1; // Verify AMInfo for (AMInfo amInfo : job.getAMInfos()) { Assert.assertEquals(attemptNum++, amInfo.getAppAttemptId() .getAttemptId()); Assert.assertEquals(amInfo.getAppAttemptId(), amInfo.getContainerId() .getApplicationAttemptId()); Assert.assertEquals(MRApp.NM_HOST, amInfo.getNodeManagerHost()); Assert.assertEquals(MRApp.NM_PORT, amInfo.getNodeManagerPort()); Assert.assertEquals(MRApp.NM_HTTP_PORT, amInfo.getNodeManagerHttpPort()); } long am1StartTimeReal = job.getAMInfos().get(0).getStartTime(); long am2StartTimeReal = job.getAMInfos().get(1).getStartTime(); Assert.assertTrue(am1StartTimeReal >= am1StartTimeEst && am1StartTimeReal <= am2StartTimeEst); Assert.assertTrue(am2StartTimeReal >= am2StartTimeEst && am2StartTimeReal <= System.currentTimeMillis()); } @Test(timeout=30000) public void testRecoveryWithoutShuffleSecret() throws Exception { int runCount = 0; MRApp app = new MRAppNoShuffleSecret(2, 1, false, this.getClass().getName(), true, ++runCount); Configuration conf = new Configuration(); conf.setBoolean("mapred.mapper.new-api", true); conf.setBoolean("mapred.reducer.new-api", true); conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, false); conf.set(FileOutputFormat.OUTDIR, outputDir.toString()); Job job = app.submit(conf); app.waitForState(job, JobState.RUNNING); //all maps would be running Assert.assertEquals("No of tasks not correct", 3, job.getTasks().size()); Iterator<Task> it = job.getTasks().values().iterator(); Task mapTask1 = it.next(); Task mapTask2 = it.next(); Task reduceTask = it.next(); // all maps must be running app.waitForState(mapTask1, TaskState.RUNNING); app.waitForState(mapTask2, TaskState.RUNNING); TaskAttempt task1Attempt = mapTask1.getAttempts().values().iterator().next(); TaskAttempt task2Attempt = mapTask2.getAttempts().values().iterator().next(); //before sending the TA_DONE, event make sure attempt has come to //RUNNING state app.waitForState(task1Attempt, TaskAttemptState.RUNNING); app.waitForState(task2Attempt, TaskAttemptState.RUNNING); // reduces must be in NEW state Assert.assertEquals("Reduce Task state not correct", TaskState.RUNNING, reduceTask.getReport().getTaskState()); //send the done signal to the 1st map attempt app.getContext().getEventHandler().handle( new TaskAttemptEvent( task1Attempt.getID(), TaskAttemptEventType.TA_DONE)); //wait for first map task to complete app.waitForState(mapTask1, TaskState.SUCCEEDED); //stop the app app.stop(); //in recovery the 1st map should NOT be recovered from previous run //since the shuffle secret was not provided with the job credentials //and had to be rolled per app attempt app = new MRAppNoShuffleSecret(2, 1, false, this.getClass().getName(), false, ++runCount); conf = new Configuration(); conf.setBoolean(MRJobConfig.MR_AM_JOB_RECOVERY_ENABLE, true); conf.setBoolean("mapred.mapper.new-api", true); conf.setBoolean("mapred.reducer.new-api", true); conf.set(FileOutputFormat.OUTDIR, outputDir.toString()); conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, false); job = app.submit(conf); app.waitForState(job, JobState.RUNNING); //all maps would be running Assert.assertEquals("No of tasks not correct", 3, job.getTasks().size()); it = job.getTasks().values().iterator(); mapTask1 = it.next(); mapTask2 = it.next(); reduceTask = it.next(); app.waitForState(mapTask1, TaskState.RUNNING); app.waitForState(mapTask2, TaskState.RUNNING); task2Attempt = mapTask2.getAttempts().values().iterator().next(); //before sending the TA_DONE, event make sure attempt has come to //RUNNING state app.waitForState(task2Attempt, TaskAttemptState.RUNNING); //send the done signal to the 2nd map task app.getContext().getEventHandler().handle( new TaskAttemptEvent( mapTask2.getAttempts().values().iterator().next().getID(), TaskAttemptEventType.TA_DONE)); //wait to get it completed app.waitForState(mapTask2, TaskState.SUCCEEDED); //verify first map task is still running app.waitForState(mapTask1, TaskState.RUNNING); //send the done signal to the 2nd map task app.getContext().getEventHandler().handle( new TaskAttemptEvent( mapTask1.getAttempts().values().iterator().next().getID(), TaskAttemptEventType.TA_DONE)); //wait to get it completed app.waitForState(mapTask1, TaskState.SUCCEEDED); //wait for reduce to be running before sending done app.waitForState(reduceTask, TaskState.RUNNING); //send the done signal to the reduce app.getContext().getEventHandler().handle( new TaskAttemptEvent( reduceTask.getAttempts().values().iterator().next().getID(), TaskAttemptEventType.TA_DONE)); app.waitForState(job, JobState.SUCCEEDED); app.verifyCompleted(); } @Test public void testRecoverySuccessAttempt() { LOG.info("--- START: testRecoverySuccessAttempt ---"); long clusterTimestamp = System.currentTimeMillis(); EventHandler mockEventHandler = mock(EventHandler.class); MapTaskImpl recoverMapTask = getMockMapTask(clusterTimestamp, mockEventHandler); TaskId taskId = recoverMapTask.getID(); JobID jobID = new JobID(Long.toString(clusterTimestamp), 1); TaskID taskID = new TaskID(jobID, org.apache.hadoop.mapreduce.TaskType.MAP, taskId.getId()); //Mock up the TaskAttempts Map<TaskAttemptID, TaskAttemptInfo> mockTaskAttempts = new HashMap<TaskAttemptID, TaskAttemptInfo>(); TaskAttemptID taId1 = new TaskAttemptID(taskID, 2); TaskAttemptInfo mockTAinfo1 = getMockTaskAttemptInfo(taId1, TaskAttemptState.SUCCEEDED); mockTaskAttempts.put(taId1, mockTAinfo1); TaskAttemptID taId2 = new TaskAttemptID(taskID, 1); TaskAttemptInfo mockTAinfo2 = getMockTaskAttemptInfo(taId2, TaskAttemptState.FAILED); mockTaskAttempts.put(taId2, mockTAinfo2); OutputCommitter mockCommitter = mock (OutputCommitter.class); TaskInfo mockTaskInfo = mock(TaskInfo.class); when(mockTaskInfo.getTaskStatus()).thenReturn("SUCCEEDED"); when(mockTaskInfo.getTaskId()).thenReturn(taskID); when(mockTaskInfo.getAllTaskAttempts()).thenReturn(mockTaskAttempts); recoverMapTask.handle( new TaskRecoverEvent(taskId, mockTaskInfo,mockCommitter, true)); ArgumentCaptor<Event> arg = ArgumentCaptor.forClass(Event.class); verify(mockEventHandler,atLeast(1)).handle( (org.apache.hadoop.yarn.event.Event) arg.capture()); Map<TaskAttemptID, TaskAttemptState> finalAttemptStates = new HashMap<TaskAttemptID, TaskAttemptState>(); finalAttemptStates.put(taId1, TaskAttemptState.SUCCEEDED); finalAttemptStates.put(taId2, TaskAttemptState.FAILED); List<EventType> jobHistoryEvents = new ArrayList<EventType>(); jobHistoryEvents.add(EventType.TASK_STARTED); jobHistoryEvents.add(EventType.MAP_ATTEMPT_STARTED); jobHistoryEvents.add(EventType.MAP_ATTEMPT_FINISHED); jobHistoryEvents.add(EventType.MAP_ATTEMPT_STARTED); jobHistoryEvents.add(EventType.MAP_ATTEMPT_FAILED); jobHistoryEvents.add(EventType.TASK_FINISHED); recoveryChecker(recoverMapTask, TaskState.SUCCEEDED, finalAttemptStates, arg, jobHistoryEvents, 2L, 1L); } @Test public void testRecoveryAllFailAttempts() { LOG.info("--- START: testRecoveryAllFailAttempts ---"); long clusterTimestamp = System.currentTimeMillis(); EventHandler mockEventHandler = mock(EventHandler.class); MapTaskImpl recoverMapTask = getMockMapTask(clusterTimestamp, mockEventHandler); TaskId taskId = recoverMapTask.getID(); JobID jobID = new JobID(Long.toString(clusterTimestamp), 1); TaskID taskID = new TaskID(jobID, org.apache.hadoop.mapreduce.TaskType.MAP, taskId.getId()); //Mock up the TaskAttempts Map<TaskAttemptID, TaskAttemptInfo> mockTaskAttempts = new HashMap<TaskAttemptID, TaskAttemptInfo>(); TaskAttemptID taId1 = new TaskAttemptID(taskID, 2); TaskAttemptInfo mockTAinfo1 = getMockTaskAttemptInfo(taId1, TaskAttemptState.FAILED); mockTaskAttempts.put(taId1, mockTAinfo1); TaskAttemptID taId2 = new TaskAttemptID(taskID, 1); TaskAttemptInfo mockTAinfo2 = getMockTaskAttemptInfo(taId2, TaskAttemptState.FAILED); mockTaskAttempts.put(taId2, mockTAinfo2); OutputCommitter mockCommitter = mock (OutputCommitter.class); TaskInfo mockTaskInfo = mock(TaskInfo.class); when(mockTaskInfo.getTaskStatus()).thenReturn("FAILED"); when(mockTaskInfo.getTaskId()).thenReturn(taskID); when(mockTaskInfo.getAllTaskAttempts()).thenReturn(mockTaskAttempts); recoverMapTask.handle( new TaskRecoverEvent(taskId, mockTaskInfo, mockCommitter, true)); ArgumentCaptor<Event> arg = ArgumentCaptor.forClass(Event.class); verify(mockEventHandler,atLeast(1)).handle( (org.apache.hadoop.yarn.event.Event) arg.capture()); Map<TaskAttemptID, TaskAttemptState> finalAttemptStates = new HashMap<TaskAttemptID, TaskAttemptState>(); finalAttemptStates.put(taId1, TaskAttemptState.FAILED); finalAttemptStates.put(taId2, TaskAttemptState.FAILED); List<EventType> jobHistoryEvents = new ArrayList<EventType>(); jobHistoryEvents.add(EventType.TASK_STARTED); jobHistoryEvents.add(EventType.MAP_ATTEMPT_STARTED); jobHistoryEvents.add(EventType.MAP_ATTEMPT_FAILED); jobHistoryEvents.add(EventType.MAP_ATTEMPT_STARTED); jobHistoryEvents.add(EventType.MAP_ATTEMPT_FAILED); jobHistoryEvents.add(EventType.TASK_FAILED); recoveryChecker(recoverMapTask, TaskState.FAILED, finalAttemptStates, arg, jobHistoryEvents, 2L, 2L); } @Test public void testRecoveryTaskSuccessAllAttemptsFail() { LOG.info("--- START: testRecoveryTaskSuccessAllAttemptsFail ---"); long clusterTimestamp = System.currentTimeMillis(); EventHandler mockEventHandler = mock(EventHandler.class); MapTaskImpl recoverMapTask = getMockMapTask(clusterTimestamp, mockEventHandler); TaskId taskId = recoverMapTask.getID(); JobID jobID = new JobID(Long.toString(clusterTimestamp), 1); TaskID taskID = new TaskID(jobID, org.apache.hadoop.mapreduce.TaskType.MAP, taskId.getId()); //Mock up the TaskAttempts Map<TaskAttemptID, TaskAttemptInfo> mockTaskAttempts = new HashMap<TaskAttemptID, TaskAttemptInfo>(); TaskAttemptID taId1 = new TaskAttemptID(taskID, 2); TaskAttemptInfo mockTAinfo1 = getMockTaskAttemptInfo(taId1, TaskAttemptState.FAILED); mockTaskAttempts.put(taId1, mockTAinfo1); TaskAttemptID taId2 = new TaskAttemptID(taskID, 1); TaskAttemptInfo mockTAinfo2 = getMockTaskAttemptInfo(taId2, TaskAttemptState.FAILED); mockTaskAttempts.put(taId2, mockTAinfo2); OutputCommitter mockCommitter = mock (OutputCommitter.class); TaskInfo mockTaskInfo = mock(TaskInfo.class); when(mockTaskInfo.getTaskStatus()).thenReturn("SUCCEEDED"); when(mockTaskInfo.getTaskId()).thenReturn(taskID); when(mockTaskInfo.getAllTaskAttempts()).thenReturn(mockTaskAttempts); recoverMapTask.handle( new TaskRecoverEvent(taskId, mockTaskInfo, mockCommitter, true)); ArgumentCaptor<Event> arg = ArgumentCaptor.forClass(Event.class); verify(mockEventHandler,atLeast(1)).handle( (org.apache.hadoop.yarn.event.Event) arg.capture()); Map<TaskAttemptID, TaskAttemptState> finalAttemptStates = new HashMap<TaskAttemptID, TaskAttemptState>(); finalAttemptStates.put(taId1, TaskAttemptState.FAILED); finalAttemptStates.put(taId2, TaskAttemptState.FAILED); // check for one new attempt launched since successful attempt not found TaskAttemptID taId3 = new TaskAttemptID(taskID, 2000); finalAttemptStates.put(taId3, TaskAttemptState.NEW); List<EventType> jobHistoryEvents = new ArrayList<EventType>(); jobHistoryEvents.add(EventType.TASK_STARTED); jobHistoryEvents.add(EventType.MAP_ATTEMPT_STARTED); jobHistoryEvents.add(EventType.MAP_ATTEMPT_FAILED); jobHistoryEvents.add(EventType.MAP_ATTEMPT_STARTED); jobHistoryEvents.add(EventType.MAP_ATTEMPT_FAILED); recoveryChecker(recoverMapTask, TaskState.RUNNING, finalAttemptStates, arg, jobHistoryEvents, 2L, 2L); } @Test public void testRecoveryTaskSuccessAllAttemptsSucceed() { LOG.info("--- START: testRecoveryTaskSuccessAllAttemptsFail ---"); long clusterTimestamp = System.currentTimeMillis(); EventHandler mockEventHandler = mock(EventHandler.class); MapTaskImpl recoverMapTask = getMockMapTask(clusterTimestamp, mockEventHandler); TaskId taskId = recoverMapTask.getID(); JobID jobID = new JobID(Long.toString(clusterTimestamp), 1); TaskID taskID = new TaskID(jobID, org.apache.hadoop.mapreduce.TaskType.MAP, taskId.getId()); //Mock up the TaskAttempts Map<TaskAttemptID, TaskAttemptInfo> mockTaskAttempts = new HashMap<TaskAttemptID, TaskAttemptInfo>(); TaskAttemptID taId1 = new TaskAttemptID(taskID, 2); TaskAttemptInfo mockTAinfo1 = getMockTaskAttemptInfo(taId1, TaskAttemptState.SUCCEEDED); mockTaskAttempts.put(taId1, mockTAinfo1); TaskAttemptID taId2 = new TaskAttemptID(taskID, 1); TaskAttemptInfo mockTAinfo2 = getMockTaskAttemptInfo(taId2, TaskAttemptState.SUCCEEDED); mockTaskAttempts.put(taId2, mockTAinfo2); OutputCommitter mockCommitter = mock (OutputCommitter.class); TaskInfo mockTaskInfo = mock(TaskInfo.class); when(mockTaskInfo.getTaskStatus()).thenReturn("SUCCEEDED"); when(mockTaskInfo.getTaskId()).thenReturn(taskID); when(mockTaskInfo.getAllTaskAttempts()).thenReturn(mockTaskAttempts); recoverMapTask.handle( new TaskRecoverEvent(taskId, mockTaskInfo, mockCommitter, true)); ArgumentCaptor<Event> arg = ArgumentCaptor.forClass(Event.class); verify(mockEventHandler,atLeast(1)).handle( (org.apache.hadoop.yarn.event.Event) arg.capture()); Map<TaskAttemptID, TaskAttemptState> finalAttemptStates = new HashMap<TaskAttemptID, TaskAttemptState>(); finalAttemptStates.put(taId1, TaskAttemptState.SUCCEEDED); finalAttemptStates.put(taId2, TaskAttemptState.SUCCEEDED); List<EventType> jobHistoryEvents = new ArrayList<EventType>(); jobHistoryEvents.add(EventType.TASK_STARTED); jobHistoryEvents.add(EventType.MAP_ATTEMPT_STARTED); jobHistoryEvents.add(EventType.MAP_ATTEMPT_FINISHED); jobHistoryEvents.add(EventType.MAP_ATTEMPT_STARTED); jobHistoryEvents.add(EventType.MAP_ATTEMPT_FINISHED); jobHistoryEvents.add(EventType.TASK_FINISHED); recoveryChecker(recoverMapTask, TaskState.SUCCEEDED, finalAttemptStates, arg, jobHistoryEvents, 2L, 0L); } @Test public void testRecoveryAllAttemptsKilled() { LOG.info("--- START: testRecoveryAllAttemptsKilled ---"); long clusterTimestamp = System.currentTimeMillis(); EventHandler mockEventHandler = mock(EventHandler.class); MapTaskImpl recoverMapTask = getMockMapTask(clusterTimestamp, mockEventHandler); TaskId taskId = recoverMapTask.getID(); JobID jobID = new JobID(Long.toString(clusterTimestamp), 1); TaskID taskID = new TaskID(jobID, org.apache.hadoop.mapreduce.TaskType.MAP, taskId.getId()); //Mock up the TaskAttempts Map<TaskAttemptID, TaskAttemptInfo> mockTaskAttempts = new HashMap<TaskAttemptID, TaskAttemptInfo>(); TaskAttemptID taId1 = new TaskAttemptID(taskID, 2); TaskAttemptInfo mockTAinfo1 = getMockTaskAttemptInfo(taId1, TaskAttemptState.KILLED); mockTaskAttempts.put(taId1, mockTAinfo1); TaskAttemptID taId2 = new TaskAttemptID(taskID, 1); TaskAttemptInfo mockTAinfo2 = getMockTaskAttemptInfo(taId2, TaskAttemptState.KILLED); mockTaskAttempts.put(taId2, mockTAinfo2); OutputCommitter mockCommitter = mock (OutputCommitter.class); TaskInfo mockTaskInfo = mock(TaskInfo.class); when(mockTaskInfo.getTaskStatus()).thenReturn("KILLED"); when(mockTaskInfo.getTaskId()).thenReturn(taskID); when(mockTaskInfo.getAllTaskAttempts()).thenReturn(mockTaskAttempts); recoverMapTask.handle( new TaskRecoverEvent(taskId, mockTaskInfo, mockCommitter, true)); ArgumentCaptor<Event> arg = ArgumentCaptor.forClass(Event.class); verify(mockEventHandler,atLeast(1)).handle( (org.apache.hadoop.yarn.event.Event) arg.capture()); Map<TaskAttemptID, TaskAttemptState> finalAttemptStates = new HashMap<TaskAttemptID, TaskAttemptState>(); finalAttemptStates.put(taId1, TaskAttemptState.KILLED); finalAttemptStates.put(taId2, TaskAttemptState.KILLED); List<EventType> jobHistoryEvents = new ArrayList<EventType>(); jobHistoryEvents.add(EventType.TASK_STARTED); jobHistoryEvents.add(EventType.MAP_ATTEMPT_STARTED); jobHistoryEvents.add(EventType.MAP_ATTEMPT_KILLED); jobHistoryEvents.add(EventType.MAP_ATTEMPT_STARTED); jobHistoryEvents.add(EventType.MAP_ATTEMPT_KILLED); jobHistoryEvents.add(EventType.TASK_FAILED); recoveryChecker(recoverMapTask, TaskState.KILLED, finalAttemptStates, arg, jobHistoryEvents, 2L, 0L); } private void recoveryChecker(MapTaskImpl checkTask, TaskState finalState, Map<TaskAttemptID, TaskAttemptState> finalAttemptStates, ArgumentCaptor<Event> arg, List<EventType> expectedJobHistoryEvents, long expectedMapLaunches, long expectedFailedMaps) { assertEquals("Final State of Task", finalState, checkTask.getState()); Map<TaskAttemptId, TaskAttempt> recoveredAttempts = checkTask.getAttempts(); assertEquals("Expected Number of Task Attempts", finalAttemptStates.size(), recoveredAttempts.size()); for (TaskAttemptID taID : finalAttemptStates.keySet()) { assertEquals("Expected Task Attempt State", finalAttemptStates.get(taID), recoveredAttempts.get(TypeConverter.toYarn(taID)).getState()); } Iterator<Event> ie = arg.getAllValues().iterator(); int eventNum = 0; long totalLaunchedMaps = 0; long totalFailedMaps = 0; boolean jobTaskEventReceived = false; while (ie.hasNext()) { Object current = ie.next(); ++eventNum; LOG.info(eventNum + " " + current.getClass().getName()); if (current instanceof JobHistoryEvent) { JobHistoryEvent jhe = (JobHistoryEvent) current; LOG.info(expectedJobHistoryEvents.get(0).toString() + " " + jhe.getHistoryEvent().getEventType().toString() + " " + jhe.getJobID()); assertEquals(expectedJobHistoryEvents.get(0), jhe.getHistoryEvent().getEventType()); expectedJobHistoryEvents.remove(0); } else if (current instanceof JobCounterUpdateEvent) { JobCounterUpdateEvent jcue = (JobCounterUpdateEvent) current; LOG.info("JobCounterUpdateEvent " + jcue.getCounterUpdates().get(0).getCounterKey() + " " + jcue.getCounterUpdates().get(0).getIncrementValue()); if (jcue.getCounterUpdates().get(0).getCounterKey() == JobCounter.NUM_FAILED_MAPS) { totalFailedMaps += jcue.getCounterUpdates().get(0) .getIncrementValue(); } else if (jcue.getCounterUpdates().get(0).getCounterKey() == JobCounter.TOTAL_LAUNCHED_MAPS) { totalLaunchedMaps += jcue.getCounterUpdates().get(0) .getIncrementValue(); } } else if (current instanceof JobTaskEvent) { JobTaskEvent jte = (JobTaskEvent) current; assertEquals(jte.getState(), finalState); jobTaskEventReceived = true; } } assertTrue(jobTaskEventReceived || (finalState == TaskState.RUNNING)); assertEquals("Did not process all expected JobHistoryEvents", 0, expectedJobHistoryEvents.size()); assertEquals("Expected Map Launches", expectedMapLaunches, totalLaunchedMaps); assertEquals("Expected Failed Maps", expectedFailedMaps, totalFailedMaps); } private MapTaskImpl getMockMapTask(long clusterTimestamp, EventHandler eh) { ApplicationId appId = ApplicationId.newInstance(clusterTimestamp, 1); JobId jobId = MRBuilderUtils.newJobId(appId, 1); int partitions = 2; Path remoteJobConfFile = mock(Path.class); JobConf conf = new JobConf(); TaskAttemptListener taskAttemptListener = mock(TaskAttemptListener.class); Token<JobTokenIdentifier> jobToken = (Token<JobTokenIdentifier>) mock(Token.class); Credentials credentials = null; Clock clock = new SystemClock(); int appAttemptId = 3; MRAppMetrics metrics = mock(MRAppMetrics.class); Resource minContainerRequirements = mock(Resource.class); when(minContainerRequirements.getMemorySize()).thenReturn(1000L); ClusterInfo clusterInfo = mock(ClusterInfo.class); AppContext appContext = mock(AppContext.class); when(appContext.getClusterInfo()).thenReturn(clusterInfo); TaskSplitMetaInfo taskSplitMetaInfo = mock(TaskSplitMetaInfo.class); MapTaskImpl mapTask = new MapTaskImpl(jobId, partitions, eh, remoteJobConfFile, conf, taskSplitMetaInfo, taskAttemptListener, jobToken, credentials, clock, appAttemptId, metrics, appContext); return mapTask; } private TaskAttemptInfo getMockTaskAttemptInfo(TaskAttemptID tai, TaskAttemptState tas) { ContainerId ci = mock(ContainerId.class); Counters counters = mock(Counters.class); TaskType tt = TaskType.MAP; long finishTime = System.currentTimeMillis(); TaskAttemptInfo mockTAinfo = mock(TaskAttemptInfo.class); when(mockTAinfo.getAttemptId()).thenReturn(tai); when(mockTAinfo.getContainerId()).thenReturn(ci); when(mockTAinfo.getCounters()).thenReturn(counters); when(mockTAinfo.getError()).thenReturn(""); when(mockTAinfo.getFinishTime()).thenReturn(finishTime); when(mockTAinfo.getHostname()).thenReturn("localhost"); when(mockTAinfo.getHttpPort()).thenReturn(23); when(mockTAinfo.getMapFinishTime()).thenReturn(finishTime - 1000L); when(mockTAinfo.getPort()).thenReturn(24); when(mockTAinfo.getRackname()).thenReturn("defaultRack"); when(mockTAinfo.getShuffleFinishTime()).thenReturn(finishTime - 2000L); when(mockTAinfo.getShufflePort()).thenReturn(25); when(mockTAinfo.getSortFinishTime()).thenReturn(finishTime - 3000L); when(mockTAinfo.getStartTime()).thenReturn(finishTime -10000); when(mockTAinfo.getState()).thenReturn("task in progress"); when(mockTAinfo.getTaskStatus()).thenReturn(tas.toString()); when(mockTAinfo.getTaskType()).thenReturn(tt); when(mockTAinfo.getTrackerName()).thenReturn("TrackerName"); return mockTAinfo; } private void writeBadOutput(TaskAttempt attempt, Configuration conf) throws Exception { TaskAttemptContext tContext = new TaskAttemptContextImpl(conf, TypeConverter.fromYarn(attempt.getID())); TextOutputFormat<?, ?> theOutputFormat = new TextOutputFormat(); RecordWriter theRecordWriter = theOutputFormat .getRecordWriter(tContext); NullWritable nullWritable = NullWritable.get(); try { theRecordWriter.write(key2, val2); theRecordWriter.write(null, nullWritable); theRecordWriter.write(null, val2); theRecordWriter.write(nullWritable, val1); theRecordWriter.write(key1, nullWritable); theRecordWriter.write(key2, null); theRecordWriter.write(null, null); theRecordWriter.write(key1, val1); } finally { theRecordWriter.close(tContext); } OutputFormat outputFormat = ReflectionUtils.newInstance( tContext.getOutputFormatClass(), conf); OutputCommitter committer = outputFormat.getOutputCommitter(tContext); committer.commitTask(tContext); } private void writeOutput(TaskAttempt attempt, Configuration conf) throws Exception { TaskAttemptContext tContext = new TaskAttemptContextImpl(conf, TypeConverter.fromYarn(attempt.getID())); TextOutputFormat<?, ?> theOutputFormat = new TextOutputFormat(); RecordWriter theRecordWriter = theOutputFormat .getRecordWriter(tContext); NullWritable nullWritable = NullWritable.get(); try { theRecordWriter.write(key1, val1); theRecordWriter.write(null, nullWritable); theRecordWriter.write(null, val1); theRecordWriter.write(nullWritable, val2); theRecordWriter.write(key2, nullWritable); theRecordWriter.write(key1, null); theRecordWriter.write(null, null); theRecordWriter.write(key2, val2); } finally { theRecordWriter.close(tContext); } OutputFormat outputFormat = ReflectionUtils.newInstance( tContext.getOutputFormatClass(), conf); OutputCommitter committer = outputFormat.getOutputCommitter(tContext); committer.commitTask(tContext); } private void validateOutput() throws IOException { File expectedFile = new File(new Path(outputDir, partFile).toString()); StringBuffer expectedOutput = new StringBuffer(); expectedOutput.append(key1).append('\t').append(val1).append("\n"); expectedOutput.append(val1).append("\n"); expectedOutput.append(val2).append("\n"); expectedOutput.append(key2).append("\n"); expectedOutput.append(key1).append("\n"); expectedOutput.append(key2).append('\t').append(val2).append("\n"); String output = slurp(expectedFile); Assert.assertEquals(output, expectedOutput.toString()); } public static String slurp(File f) throws IOException { int len = (int) f.length(); byte[] buf = new byte[len]; FileInputStream in = new FileInputStream(f); String contents = null; try { in.read(buf, 0, len); contents = new String(buf, "UTF-8"); } finally { in.close(); } return contents; } static class MRAppWithHistory extends MRApp { public MRAppWithHistory(int maps, int reduces, boolean autoComplete, String testName, boolean cleanOnStart, int startCount) { super(maps, reduces, autoComplete, testName, cleanOnStart, startCount); } @Override protected ContainerLauncher createContainerLauncher(AppContext context) { MockContainerLauncher launcher = new MockContainerLauncher() { @Override public void handle(ContainerLauncherEvent event) { TaskAttemptId taskAttemptID = event.getTaskAttemptID(); // Pass everything except the 2nd attempt of the first task. if (taskAttemptID.getId() != 1 || taskAttemptID.getTaskId().getId() != 0) { super.handle(event); } } }; launcher.shufflePort = 5467; return launcher; } @Override protected EventHandler<JobHistoryEvent> createJobHistoryHandler( AppContext context) { JobHistoryEventHandler eventHandler = new JobHistoryEventHandler(context, getStartCount()); return eventHandler; } } static class MRAppNoShuffleSecret extends MRAppWithHistory { public MRAppNoShuffleSecret(int maps, int reduces, boolean autoComplete, String testName, boolean cleanOnStart, int startCount) { super(maps, reduces, autoComplete, testName, cleanOnStart, startCount); } @Override protected void initJobCredentialsAndUGI(Configuration conf) { // do NOT put a shuffle secret in the job credentials } } public static void main(String[] arg) throws Exception { TestRecovery test = new TestRecovery(); test.testCrashed(); test.testMultipleCrashes(); test.testOutputRecovery(); test.testOutputRecoveryMapsOnly(); test.testRecoveryWithOldCommiter(); test.testSpeculative(); test.testRecoveryWithoutShuffleSecret(); test.testRecoverySuccessAttempt(); test.testRecoveryAllFailAttempts(); test.testRecoveryTaskSuccessAllAttemptsFail(); test.testRecoveryTaskSuccessAllAttemptsSucceed(); test.testRecoveryAllAttemptsKilled(); } }
apache-2.0
flydream2046/azure-sdk-for-java
resource-management/azure-mgmt-network/src/main/java/com/microsoft/azure/management/network/models/RouteNextHopType.java
1908
/** * * Copyright (c) Microsoft and contributors. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. * */ // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. package com.microsoft.azure.management.network.models; /** * The type of Azure hop the packet should be sent to. */ public abstract class RouteNextHopType { /** * Represents an Azure S2S VPN Gateway. */ public static final String VIRTUALNETWORKGATEWAY = "VirtualNetworkGateway"; /** * Represents the local virtual network. For instance, if you have two * subnets, 10.1.0.0/16 and 10.2.0.0/16 in the same virtual network, the * route for each subnet in the route table will have a next hop value of * Local. */ public static final String VNETLOCAL = "VnetLocal"; /** * Represents the default Internet gateway provided by the Azure * Infrastructure */ public static final String INTERNET = "Internet"; /** * Represents a virtual appliance you added to your Azure virtual network. */ public static final String VIRTUALAPPLIANCE = "VirtualAppliance"; /** * Represents a black hole. Packets forwarded to a black hole will not be * forwarded at all. */ public static final String NONE = "None"; }
apache-2.0
jglobus/JGlobus
ssl-proxies/src/main/java/org/globus/util/http/HttpResponse.java
4078
/* * Copyright 1999-2010 University of Chicago * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS,WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. * * See the License for the specific language governing permissions and limitations under the License. */ package org.globus.util.http; import java.io.InputStream; import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; public class HttpResponse { private static Log logger = LogFactory.getLog(HttpResponse.class.getName()); protected InputStream input; protected long charsRead = 0; public String httpType = null; public String httpMsg = null; public int httpCode = -1; public long contentLength = -1; public String connection = null; public String contentType = null; public String server = null; public boolean chunked = false; public String location = null; public HttpResponse(InputStream in) throws IOException { input = in; parse(); } /** * Read a line of text from the given Stream and return it * as a String. Assumes lines end in CRLF. */ protected String readLine(InputStream in) throws IOException { StringBuffer buf = new StringBuffer(); int c, length = 0; while(true) { c = in.read(); if (c == -1 || c == '\n' || length > 512) { charsRead++; break; } else if (c == '\r') { in.read(); charsRead+=2; break; } else { buf.append((char)c); length++; } } charsRead += length; return buf.toString(); } public static String getRest(String line) { int pos = line.indexOf(":"); if (pos == -1) { return null; } else return line.substring(pos+1).trim(); } public void parseHttp(String line) { int p1 = line.indexOf(" "); if (p1 == -1) { return; } httpType = line.substring(0,p1); int p2 = line.indexOf(" ",p1+1); String tmp; if (p2 == -1) { tmp = line.substring(p2); } else { tmp = line.substring(p1,p2); httpMsg = line.substring(p2).trim(); } httpCode = Integer.parseInt(tmp.trim()); } private void parse() throws IOException { String line, tmp; line = readLine(input); if (logger.isTraceEnabled()) { logger.trace(line); } parseHttp(line); while ( (line=readLine(input)).length() != 0 ) { if (logger.isTraceEnabled()) { logger.trace(line); } tmp = getRest(line); if (line.startsWith(HTTPProtocol.CONNECTION)) { connection = tmp; } else if (line.startsWith(HTTPProtocol.SERVER)) { server = tmp; } else if (line.startsWith(HTTPProtocol.CONTENT_TYPE)) { contentType = tmp; } else if (line.startsWith(HTTPProtocol.CONTENT_LENGTH)) { contentLength = Long.parseLong(tmp.trim()); } else if (line.startsWith(HTTPProtocol.CHUNKED)) { chunked = true; } else if (line.startsWith(HTTPProtocol.LOCATION)) { location = tmp; } } } /** Generates a string representation of the http header * * @return <code>String</code> a string representation of the http header */ public String toString() { StringBuffer buf = new StringBuffer(); buf.append("Http : " + httpType + "\n"); buf.append("Message : " + httpMsg + "\n"); buf.append("Code : " + httpCode + "\n"); if (server != null) { buf.append("Server : " + server + "\n"); } buf.append("Length : " + contentLength + "\n"); buf.append("Chunked : " + chunked + "\n"); buf.append("Type : " + contentType + "\n"); if (connection != null) { buf.append("Connection : " + connection + "\n"); } if (location != null) { buf.append("Location : " + location + "\n"); } return buf.toString(); } }
apache-2.0
hawkular/hawkular-btm
tests/instrumentation-framework/src/main/java/org/hawkular/apm/tests/dockerized/TestScenarioRunner.java
8706
/* * Copyright 2015-2017 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hawkular.apm.tests.dockerized; import java.io.IOException; import java.io.StringWriter; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.logging.Logger; import org.hawkular.apm.tests.common.ApmMockServer; import org.hawkular.apm.tests.dockerized.environment.DockerComposeExecutor; import org.hawkular.apm.tests.dockerized.environment.DockerImageExecutor; import org.hawkular.apm.tests.dockerized.environment.TestEnvironmentExecutor; import org.hawkular.apm.tests.dockerized.exception.TestFailException; import org.hawkular.apm.tests.dockerized.model.JsonPathVerify; import org.hawkular.apm.tests.dockerized.model.TestCase; import org.hawkular.apm.tests.dockerized.model.TestScenario; import org.hawkular.apm.tests.dockerized.model.Type; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.databind.ObjectMapper; /** * Class for running test scenario. * * @author Pavol Loffay */ public class TestScenarioRunner { private static final Logger log = Logger.getLogger(TestScenarioRunner.class.getName()); private final ObjectMapper objectMapper; private int apmServerPort; /** * @param apmServerPort - port at which Hawkular APM server will be started */ public TestScenarioRunner(int apmServerPort) { this.apmServerPort = apmServerPort; this.objectMapper = new ObjectMapper(); } /** * Run a test scenario * * Life cycle of the test case: * 1. Create network (must for docker-compose, optional for single docker image) * 2. Start apm server * 3. Start environment (which includes start of a test app) * 4. Wait for app start * 5. Run action in the app which is executed inside docker container. * 6. Wait some time that the action is processed and some data propagated to apm server * 7. Verify results. * 8. Shut down test environment and apm server. * * @param testScenario * @return number of successful test cases */ public int run(TestScenario testScenario) { if (testScenario.getEnvironment().getDockerCompose() != null && testScenario.getEnvironment().getImage() != null) { throw new IllegalArgumentException("Ambiguous environment: defined docker image and docker-compose" + ", but we expect only one of them to be defined!"); } log.info(String.format("========================= Starting test scenario: %s", testScenario)); int successfulTestCases = 0; for (TestCase test: testScenario.getTests()) { if (test.isSkip()) { continue; } TestEnvironmentExecutor testEnvironmentExecutor = createTestEnvironmentExecutor(testScenario); try { runTestCase(testScenario, test, testEnvironmentExecutor); successfulTestCases++; } catch (TestFailException ex) { log.severe(String.format("Test case failed: %s\n%s", ex.toString(), ex.getMessage())); ex.printStackTrace(); } finally { testEnvironmentExecutor.close(); } } log.info(String.format("========================= Closing test scenario : %s", testScenario)); return successfulTestCases; } /** * Run single test case * * @param testScenario Test scenario of the test case * @param testCase Test case to run * @param testEnvironmentExecutor Initialized environment * @throws TestFailException */ private void runTestCase(TestScenario testScenario, TestCase testCase, TestEnvironmentExecutor testEnvironmentExecutor) throws TestFailException { log.info(String.format("Executing test case: %s", testCase)); List<String> environmentId = null; ApmMockServer apmServer = new ApmMockServer(); apmServer.setHost("0.0.0.0"); apmServer.setPort(apmServerPort); // disable shut down timer apmServer.setShutdownTimer(60*60*1000); try { /** * Create network if necessary */ if (testScenario.getEnvironment().getApmAddress() != null) { testEnvironmentExecutor.createNetwork(); } /** * Start APM server */ apmServer.run(); /** * Run a container and wait */ environmentId = testEnvironmentExecutor.run(testScenario.getEnvironment()); Thread.sleep(testScenario.getEnvironment().getInitWaitSeconds()*1000); /** * Execute test script and wait */ testEnvironmentExecutor.execScript(environmentId, testCase.getScriptServiceName(), testCase.getAction()); Thread.sleep(testCase.getAfterActionWaitSeconds()*1000); /** * verify results */ Collection<JsonPathVerify> jsonPathVerifies = verifyResults(testScenario, testCase, apmServer); if (!jsonPathVerifies.isEmpty()) { throw new TestFailException(testCase, jsonPathVerifies); } } catch (InterruptedException ex) { log.severe("Interruption exception"); log.severe(ex.toString()); throw new TestFailException(testCase, ex); } finally { if (environmentId != null) { testEnvironmentExecutor.stopAndRemove(environmentId); } if (apmServer != null) { apmServer.shutdown(); } } } private List<JsonPathVerify> verifyResults(TestScenario testScenario, TestCase testCase, ApmMockServer apmServer) { Collection<?> objects = getCapturedData(testScenario.getEnvironment().getType(), apmServer); String json; try { json = serialize(objects); } catch (IOException ex) { log.severe(String.format("Failed to serialize traces: %s", objects)); throw new RuntimeException("Failed to serialize traces = " + objects, ex); } log.info(String.format("Captured objects:\n%s", json)); List<JsonPathVerify> failedJsonPathVerify = new ArrayList<>(); for (JsonPathVerify jsonPathVerify: testCase.getVerify().getJsonPath()) { if (!JsonPathVerifier.verify(json, jsonPathVerify)) { failedJsonPathVerify.add(jsonPathVerify); } } return failedJsonPathVerify; } private Collection<?> getCapturedData(Type type, ApmMockServer apmMockServer) { Collection<?> objects = null; switch (type) { case APM: case APMAGENT: case APMOTAGENT: objects = apmMockServer.getTraces(); break; case ZIPKIN: objects = apmMockServer.getSpans(); break; } return objects; } private TestEnvironmentExecutor createTestEnvironmentExecutor(TestScenario testScenario) { TestEnvironmentExecutor testEnvironmentExecutor; if (testScenario.getEnvironment().getImage() != null) { testEnvironmentExecutor = DockerImageExecutor.getInstance(testScenario.getScenarioDirectory(), testScenario.getEnvironment().getApmAddress()); } else { testEnvironmentExecutor = DockerComposeExecutor.getInstance(testScenario.getScenarioDirectory(), testScenario.getEnvironment().getApmAddress()); } return testEnvironmentExecutor; } private String serialize(Object object) throws IOException { StringWriter out = new StringWriter(); JsonGenerator gen = objectMapper.getFactory().createGenerator(out); gen.writeObject(object); gen.close(); out.close(); return out.toString(); } }
apache-2.0
ydai1124/gobblin-1
gobblin-data-management/src/main/java/gobblin/data/management/conversion/hive/watermarker/HiveSourceWatermarkerFactory.java
1160
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package gobblin.data.management.conversion.hive.watermarker; import gobblin.configuration.State; /** * An interface for creating new {@link HiveSourceWatermarker}s */ public interface HiveSourceWatermarkerFactory { /** * Create new {@link HiveSourceWatermarker} from {@link State} */ public HiveSourceWatermarker createFromState(State state); }
apache-2.0
goodwinnk/intellij-community
platform/platform-impl/src/com/intellij/openapi/fileTypes/ex/FileTypeManagerEx.java
1597
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.fileTypes.ex; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.fileTypes.FileTypeFactory; import com.intellij.openapi.fileTypes.FileTypeManager; import org.jetbrains.annotations.NotNull; /** * @author max */ public abstract class FileTypeManagerEx extends FileTypeManager{ public static FileTypeManagerEx getInstanceEx(){ return (FileTypeManagerEx)getInstance(); } /** * @deprecated use {@link FileTypeFactory} instead */ @Deprecated public abstract void registerFileType(@NotNull FileType fileType); /** * @deprecated use {@link FileTypeFactory} instead */ @Deprecated public abstract void unregisterFileType(@NotNull FileType fileType); public abstract boolean isIgnoredFilesListEqualToCurrent(@NotNull String list); @NotNull public abstract String getExtension(@NotNull String fileName); public abstract void fireFileTypesChanged(); public abstract void fireBeforeFileTypesChanged(); }
apache-2.0
wisebaldone/incubator-wave
wave/src/test/java/org/waveprotocol/wave/model/document/indexed/LocationMapperTest.java
2952
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.waveprotocol.wave.model.document.indexed; import junit.framework.TestCase; import org.waveprotocol.wave.model.document.operation.Attributes; import org.waveprotocol.wave.model.document.operation.DocOp; import org.waveprotocol.wave.model.document.operation.impl.DocOpBuilder; import org.waveprotocol.wave.model.document.raw.impl.Element; import org.waveprotocol.wave.model.document.raw.impl.Node; import org.waveprotocol.wave.model.document.raw.impl.Text; import org.waveprotocol.wave.model.document.util.DocProviders; import org.waveprotocol.wave.model.operation.OperationException; /** * Tests for the point-locating capabilities of IndexedDocumentImpl. * */ public class LocationMapperTest extends TestCase { /** * Test that sample locations are being mapped correctly. */ public void testSampleLocations1() { IndexedDocument<Node, Element, Text> document = createEmptyDocument(); try { document.consume(element("p")); document.consume(characters("a", 1, 1)); Node testNode = document.getDocumentElement().getFirstChild(); assertNotNull(testNode); testNode = testNode.getFirstChild(); assertNotNull(testNode); assertEquals(1, document.getLocation(testNode)); assertEquals(1, testNode.getIndexingContainer().size()); document.consume(characters("b", 2, 1)); assertEquals(1, document.getLocation(testNode)); assertEquals(2, testNode.getIndexingContainer().size()); document.consume(characters("c", 3, 1)); assertEquals(1, document.getLocation(testNode)); assertEquals(3, testNode.getIndexingContainer().size()); } catch (OperationException e) { fail(e.toString()); } } private static DocOp element(String tag) { return new DocOpBuilder().elementStart(tag, Attributes.EMPTY_MAP).elementEnd().build(); } private static DocOp characters(String text, int location, int trail) { return new DocOpBuilder().retain(location).characters(text).retain(trail).build(); } private static IndexedDocument<Node, Element, Text> createEmptyDocument() { return DocProviders.POJO.parse(""); } }
apache-2.0
leleuj/cas
core/cas-server-core-tickets-api/src/main/java/org/apereo/cas/ticket/DefaultTicketDefinitionProperties.java
1054
package org.apereo.cas.ticket; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.Setter; import lombok.ToString; /** * This is {@link DefaultTicketDefinitionProperties}. * * @author Misagh Moayyed * @since 5.1.0 */ @ToString @Getter @EqualsAndHashCode @Setter public class DefaultTicketDefinitionProperties implements TicketDefinitionProperties { /** * Whether ticket operations require cascading down in the storage. */ private boolean cascadeRemovals; /** * Storage/cache name that holds this ticket. */ private String storageName; /** * Timeout for this ticket. */ private long storageTimeout; /** * Password for this ticket storage, if any. */ private String storagePassword; /** * If a ticket definition is going to be removed * as part of a cascade operation, should this definition * be be excluded from removals allowing the ticket * to hang around without its parent? */ private boolean excludeFromCascade; }
apache-2.0
jeorme/OG-Platform
projects/OG-Engine/src/test/java/com/opengamma/engine/depgraph/DepGraphInputMergingTest.java
17710
/** * Copyright (C) 2014 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.engine.depgraph; import static org.testng.Assert.assertEquals; import static org.testng.AssertJUnit.assertNotNull; import java.util.Collections; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.testng.annotations.Test; import com.google.common.collect.ImmutableSet; import com.opengamma.engine.ComputationTarget; import com.opengamma.engine.depgraph.impl.DependencyGraphImpl; import com.opengamma.engine.function.FunctionCompilationContext; import com.opengamma.engine.function.FunctionExecutionContext; import com.opengamma.engine.function.FunctionInputs; import com.opengamma.engine.value.ComputedValue; import com.opengamma.engine.value.ValueProperties; import com.opengamma.engine.value.ValueRequirement; import com.opengamma.engine.value.ValueSpecification; import com.opengamma.util.test.TestGroup; import com.opengamma.util.test.TestLifecycle; /** * Tests the dependency graph building with a multiple output function whose inputs vary with its output set */ @Test(groups = TestGroup.UNIT) public class DepGraphInputMergingTest extends AbstractDependencyGraphBuilderTest { private static class VariantInputFunction extends TestFunction { private final ValueSpecification _spec1; private final ValueSpecification _spec2; private final ValueRequirement _req1; private final ValueRequirement _req2; public VariantInputFunction(final DepGraphTestHelper helper, final String in, final String out) { _spec1 = new ValueSpecification(helper.getSpec1().getValueName(), helper.getSpec1().getTargetSpecification(), helper.getSpec1().getProperties().copy().with("AUX", "X", "Y") .with("TEST", out).get()); _req1 = new ValueRequirement(helper.getRequirement1().getValueName(), helper.getRequirement1().getTargetReference(), ValueProperties.with("TEST", in).get()); _spec2 = new ValueSpecification(helper.getSpec2().getValueName(), helper.getSpec2().getTargetSpecification(), helper.getSpec2().getProperties().copy().with("AUX", "X", "Y") .with("TEST", out).get()); _req2 = new ValueRequirement(helper.getRequirement2().getValueName(), helper.getRequirement2().getTargetReference(), ValueProperties.with("TEST", in).get()); } @Override public Set<ComputedValue> execute(FunctionExecutionContext executionContext, FunctionInputs inputs, ComputationTarget target, Set<ValueRequirement> desiredValues) { return null; } private static Set<ValueRequirement> createRequirements(final ValueRequirement req, final String aux) { return Collections.singleton(aux != null ? new ValueRequirement(req.getValueName(), req.getTargetReference(), req.getConstraints().copy().with("AUX", aux).get()) : req); } @Override public Set<ValueRequirement> getRequirements(FunctionCompilationContext context, ComputationTarget target, ValueRequirement desiredValue) { final String aux = desiredValue.getConstraints().getSingleValue("AUX'"); if (desiredValue.getValueName() == _spec1.getValueName()) { return createRequirements(_req1, aux); } else { return createRequirements(_req2, aux); } } @Override public Set<ValueSpecification> getResults(final FunctionCompilationContext context, final ComputationTarget target, final Map<ValueSpecification, ValueRequirement> inputs) { final Set<ValueSpecification> result = new HashSet<ValueSpecification>(); for (ValueRequirement input : inputs.values()) { if (input.getValueName() == _req1.getValueName()) { result.add(_spec1); } else { result.add(_spec2); } } return result; } @Override public Set<ValueSpecification> getResults(FunctionCompilationContext context, ComputationTarget target) { return ImmutableSet.of(_spec1, _spec2); } } public void req1() { TestLifecycle.begin(); try { final DepGraphTestHelper helper = new DepGraphTestHelper(); helper.addFunctionProducing(new ComputedValue(helper.getSpecification1Bar(), null)); helper.addFunctionProducing(new ComputedValue(helper.getSpec2Bar(), null)); final VariantInputFunction v = new VariantInputFunction(helper, "Bar", "Foo"); helper.getFunctionRepository().addFunction(v); final DependencyGraphBuilder builder = helper.createBuilder(null); builder.addTarget(helper.getRequirement1Foo()); DependencyGraph graph = builder.getDependencyGraph(); assertNotNull(graph); assertEquals(graph.getSize(), 2); // VariantFunction & NodeProducing1 assertEquals(DependencyGraphImpl.getAllOutputSpecifications(graph), ImmutableSet.of(v._spec1, helper.getSpecification1Bar())); final ValueRequirement req = new ValueRequirement(helper.getRequirement1().getValueName(), helper.getRequirement1().getTargetReference(), ValueProperties.with("TEST", "Foo") .with("AUX", "X").get()); builder.addTarget(req); graph = builder.getDependencyGraph(); assertNotNull(graph); assertEquals(graph.getSize(), 2); // VariantFunction & NodeProducing1 assertEquals(DependencyGraphImpl.getAllOutputSpecifications(graph), ImmutableSet.of(v._spec1.compose(req), helper.getSpecification1Bar())); } finally { TestLifecycle.end(); } } public void req1_twoLevel() { TestLifecycle.begin(); try { final DepGraphTestHelper helper = new DepGraphTestHelper(); helper.addFunctionProducing(new ComputedValue(helper.getSpecification1Bar(), null)); helper.addFunctionProducing(new ComputedValue(helper.getSpec2Bar(), null)); final VariantInputFunction v1 = new VariantInputFunction(helper, "Bar", "Cow"); final VariantInputFunction v2 = new VariantInputFunction(helper, "Cow", "Foo"); helper.getFunctionRepository().addFunction(v1); helper.getFunctionRepository().addFunction(v2); final DependencyGraphBuilder builder = helper.createBuilder(null); builder.addTarget(helper.getRequirement1Foo()); DependencyGraph graph = builder.getDependencyGraph(); assertNotNull(graph); assertEquals(graph.getSize(), 3); // VariantFunction1, VariantFunction2 & NodeProducing1 assertEquals(DependencyGraphImpl.getAllOutputSpecifications(graph), ImmutableSet.of(v1._spec1, v2._spec1, helper.getSpecification1Bar())); final ValueRequirement req = new ValueRequirement(helper.getRequirement1().getValueName(), helper.getRequirement1().getTargetReference(), ValueProperties.with("TEST", "Cow") .with("AUX", "X").get()); builder.addTarget(req); graph = builder.getDependencyGraph(); assertNotNull(graph); assertEquals(graph.getSize(), 3); // VariantFunction1, VariantFunction2 & NodeProducing1 assertEquals(DependencyGraphImpl.getAllOutputSpecifications(graph), ImmutableSet.of(v1._spec1.compose(req), v2._spec1, helper.getSpecification1Bar())); } finally { TestLifecycle.end(); } } public void req2() { TestLifecycle.begin(); try { final DepGraphTestHelper helper = new DepGraphTestHelper(); helper.addFunctionProducing(new ComputedValue(helper.getSpecification1Bar(), null)); helper.addFunctionProducing(new ComputedValue(helper.getSpec2Bar(), null)); final VariantInputFunction v = new VariantInputFunction(helper, "Bar", "Foo"); helper.getFunctionRepository().addFunction(v); final DependencyGraphBuilder builder = helper.createBuilder(null); builder.addTarget(helper.getRequirement2Foo()); DependencyGraph graph = builder.getDependencyGraph(); assertNotNull(graph); assertEquals(graph.getSize(), 2); // VariantFunction & NodeProducing2 assertEquals(DependencyGraphImpl.getAllOutputSpecifications(graph), ImmutableSet.of(v._spec2, helper.getSpec2Bar())); final ValueRequirement req = new ValueRequirement(helper.getRequirement2().getValueName(), helper.getRequirement2().getTargetReference(), ValueProperties.with("TEST", "Foo") .with("AUX", "X").get()); builder.addTarget(req); graph = builder.getDependencyGraph(); assertNotNull(graph); assertEquals(graph.getSize(), 2); // VariantFunction & NodeProducing2 assertEquals(DependencyGraphImpl.getAllOutputSpecifications(graph), ImmutableSet.of(v._spec2.compose(req), helper.getSpec2Bar())); } finally { TestLifecycle.end(); } } public void req2_twoLevel() { TestLifecycle.begin(); try { final DepGraphTestHelper helper = new DepGraphTestHelper(); helper.addFunctionProducing(new ComputedValue(helper.getSpecification1Bar(), null)); helper.addFunctionProducing(new ComputedValue(helper.getSpec2Bar(), null)); final VariantInputFunction v1 = new VariantInputFunction(helper, "Bar", "Cow"); final VariantInputFunction v2 = new VariantInputFunction(helper, "Cow", "Foo"); helper.getFunctionRepository().addFunction(v1); helper.getFunctionRepository().addFunction(v2); final DependencyGraphBuilder builder = helper.createBuilder(null); builder.addTarget(helper.getRequirement2Foo()); DependencyGraph graph = builder.getDependencyGraph(); assertNotNull(graph); assertEquals(graph.getSize(), 3); // VariantFunction1, VariantFunction2 & NodeProducing2 assertEquals(DependencyGraphImpl.getAllOutputSpecifications(graph), ImmutableSet.of(v1._spec2, v2._spec2, helper.getSpec2Bar())); final ValueRequirement req = new ValueRequirement(helper.getRequirement2().getValueName(), helper.getRequirement2().getTargetReference(), ValueProperties.with("TEST", "Cow") .with("AUX", "X").get()); builder.addTarget(req); graph = builder.getDependencyGraph(); assertNotNull(graph); assertEquals(graph.getSize(), 3); // VariantFunction1, VariantFunction2 & NodeProducing2 assertEquals(DependencyGraphImpl.getAllOutputSpecifications(graph), ImmutableSet.of(v1._spec2.compose(req), v2._spec2, helper.getSpec2Bar())); } finally { TestLifecycle.end(); } } public void req1And2() { TestLifecycle.begin(); try { final DepGraphTestHelper helper = new DepGraphTestHelper(); helper.addFunctionProducing(new ComputedValue(helper.getSpecification1Bar(), null)); helper.addFunctionProducing(new ComputedValue(helper.getSpec2Bar(), null)); final VariantInputFunction v = new VariantInputFunction(helper, "Bar", "Foo"); helper.getFunctionRepository().addFunction(v); final DependencyGraphBuilder builder = helper.createBuilder(null); builder.addTarget(helper.getRequirement1Foo()); builder.addTarget(helper.getRequirement2Foo()); DependencyGraph graph = builder.getDependencyGraph(); assertNotNull(graph); assertEquals(graph.getSize(), 3); // VariantFunction (with two outputs), NodeProducing1 and NodeProducing2 assertEquals(DependencyGraphImpl.getAllOutputSpecifications(graph), ImmutableSet.of(v._spec1, v._spec2, helper.getSpecification1Bar(), helper.getSpec2Bar())); final ValueRequirement req = new ValueRequirement(helper.getRequirement1().getValueName(), helper.getRequirement1().getTargetReference(), ValueProperties.with("TEST", "Foo") .with("AUX", "X").get()); builder.addTarget(req); graph = builder.getDependencyGraph(); assertNotNull(graph); assertEquals(graph.getSize(), 3); // VariantFunction (with two outputs), NodeProducing1 and NodeProducing2 assertEquals(DependencyGraphImpl.getAllOutputSpecifications(graph), ImmutableSet.of(v._spec1.compose(req), v._spec2, helper.getSpecification1Bar(), helper.getSpec2Bar())); } finally { TestLifecycle.end(); } } public void req1And2_threeLevel() { TestLifecycle.begin(); try { final DepGraphTestHelper helper = new DepGraphTestHelper(); helper.addFunctionProducing(new ComputedValue(helper.getSpecification1Bar(), null)); helper.addFunctionProducing(new ComputedValue(helper.getSpec2Bar(), null)); final VariantInputFunction v1 = new VariantInputFunction(helper, "Bar", "Cow"); final VariantInputFunction v2 = new VariantInputFunction(helper, "Cow", "Dog"); final VariantInputFunction v3 = new VariantInputFunction(helper, "Dog", "Foo"); helper.getFunctionRepository().addFunction(v1); helper.getFunctionRepository().addFunction(v2); helper.getFunctionRepository().addFunction(v3); final DependencyGraphBuilder builder = helper.createBuilder(null); builder.addTarget(helper.getRequirement1Foo()); builder.addTarget(helper.getRequirement2Foo()); DependencyGraph graph = builder.getDependencyGraph(); assertNotNull(graph); assertEquals(graph.getSize(), 5); // v1, v2, v3, NodeProducing1 and NodeProducing2 assertEquals(DependencyGraphImpl.getAllOutputSpecifications(graph), ImmutableSet.of(v1._spec1, v1._spec2, v2._spec1, v2._spec2, v3._spec1, v3._spec2, helper.getSpecification1Bar(), helper.getSpec2Bar())); final ValueRequirement req1 = new ValueRequirement(helper.getRequirement1().getValueName(), helper.getRequirement1().getTargetReference(), ValueProperties.with("TEST", "Cow") .with("AUX", "X").get()); builder.addTarget(req1); graph = builder.getDependencyGraph(); assertNotNull(graph); assertEquals(graph.getSize(), 5); // v1, v2, v3, NodeProducing1 and NodeProducing2 assertEquals(DependencyGraphImpl.getAllOutputSpecifications(graph), ImmutableSet.of(v1._spec1.compose(req1), v1._spec2, v2._spec1, v2._spec2, v3._spec1, v3._spec2, helper.getSpecification1Bar(), helper.getSpec2Bar())); final ValueRequirement req2 = new ValueRequirement(helper.getRequirement2().getValueName(), helper.getRequirement2().getTargetReference(), ValueProperties.with("TEST", "Cow") .with("AUX", "X").get()); builder.addTarget(req2); graph = builder.getDependencyGraph(); assertNotNull(graph); assertEquals(graph.getSize(), 5); // v1, v2, v3, NodeProducing1 and NodeProducing2 assertEquals(DependencyGraphImpl.getAllOutputSpecifications(graph), ImmutableSet.of(v1._spec1.compose(req1), v1._spec2.compose(req2), v2._spec1, v2._spec2, v3._spec1, v3._spec2, helper.getSpecification1Bar(), helper.getSpec2Bar())); final ValueRequirement req3 = new ValueRequirement(helper.getRequirement2().getValueName(), helper.getRequirement2().getTargetReference(), ValueProperties.with("TEST", "Dog") .with("AUX", "Y").get()); builder.addTarget(req3); graph = builder.getDependencyGraph(); assertNotNull(graph); assertEquals(graph.getSize(), 5); // v1, v2, v3, NodeProducing1 and NodeProducing2 assertEquals(DependencyGraphImpl.getAllOutputSpecifications(graph), ImmutableSet.of(v1._spec1.compose(req1), v1._spec2.compose(req2), v2._spec1, v2._spec2.compose(req3), v3._spec1, v3._spec2, helper.getSpecification1Bar(), helper.getSpec2Bar())); } finally { TestLifecycle.end(); } } public void req1And2_insitu_rewrite() { TestLifecycle.begin(); try { final DepGraphTestHelper helper = new DepGraphTestHelper(); helper.addFunctionProducing(new ComputedValue(helper.getSpecification1Bar(), null)); helper.addFunctionProducing(new ComputedValue(helper.getSpec2Bar(), null)); final VariantInputFunction v1 = new VariantInputFunction(helper, "Bar", "Cow"); final VariantInputFunction v2 = new VariantInputFunction(helper, "Cow", "Dog"); final VariantInputFunction v3 = new VariantInputFunction(helper, "Dog", "Foo"); helper.getFunctionRepository().addFunction(v1); helper.getFunctionRepository().addFunction(v2); helper.getFunctionRepository().addFunction(v3); final DependencyGraphBuilder builder = helper.createBuilder(null); builder.addTarget(helper.getRequirement1Foo()); builder.addTarget(helper.getRequirement2Foo()); DependencyGraph graph = builder.getDependencyGraph(); assertNotNull(graph); assertEquals(graph.getSize(), 5); // v1, v2, v3, NodeProducing1 and NodeProducing2 assertEquals(DependencyGraphImpl.getAllOutputSpecifications(graph), ImmutableSet.of(v1._spec1, v1._spec2, v2._spec1, v2._spec2, v3._spec1, v3._spec2, helper.getSpecification1Bar(), helper.getSpec2Bar())); // Asking for this will cause a rewrite of the v1->v2 edge during the graph build ([PLAT-6321] error) final ValueRequirement req = new ValueRequirement(helper.getRequirement2().getValueName(), helper.getRequirement2().getTargetReference(), ValueProperties.with("TEST", "Dog") .with("AUX", "Y").withOptional("AUX'").with("AUX'", "X").get()); // This is the implied requirement that will be requested and used for the edge re-write above (needed to check the graph below) final ValueRequirement ireq = new ValueRequirement(helper.getRequirement2().getValueName(), helper.getRequirement2().getTargetReference(), ValueProperties.with("AUX", "X").get()); builder.addTarget(req); graph = builder.getDependencyGraph(); assertNotNull(graph); assertEquals(graph.getSize(), 5); // v1, v2, v3, NodeProducing1 and NodeProducing2 assertEquals(DependencyGraphImpl.getAllOutputSpecifications(graph), ImmutableSet.of(v1._spec1, v1._spec2.compose(ireq), v2._spec1, v2._spec2.compose(req), v3._spec1, v3._spec2, helper.getSpecification1Bar(), helper.getSpec2Bar())); } finally { TestLifecycle.end(); } } }
apache-2.0
AlienQueen/wicket
wicket-core/src/main/java/org/apache/wicket/protocol/http/request/UserAgent.java
3177
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.protocol.http.request; import java.util.Arrays; import java.util.List; import org.apache.wicket.util.string.Strings; /** * UserAgent */ enum UserAgent { MOZILLA("Opera,AppleWebKit,Konqueror,Trident", Arrays.asList("Mozilla", "Gecko")), FIREFOX("Opera,AppleWebKit,Konqueror,Trident", Arrays.asList("Mozilla", "Gecko", "Firefox")), INTERNET_EXPLORER("Opera", Arrays.asList("Mozilla", "MSIE", "Windows"), Arrays.asList("Mozilla", "MSIE", "Trident"), Arrays.asList("Mozilla", "MSIE", "Mac_PowerPC"), Arrays.asList("Mozilla", "Windows", "Trident", "like Gecko")), OPERA(Arrays.asList("Opera")), CHROME(Arrays.asList("Mozilla", "Chrome", "AppleWebKit", "Safari")), SAFARI("Chrome", Arrays.asList("Mozilla", "AppleWebKit", "Safari")), KONQUEROR(Arrays.asList("Konqueror")); /** * The values which are not allowed in the user agent. */ private final String[] notAllowedList; /** * A list with strings which has to be in the user agent string. */ private final List<String>[] detectionStrings; /** * Construct. * * @param notAllowed * comma separated list with values which are not allowed in the user agent * @param detectionStrings * a list with strings which has to be in the user agent string */ UserAgent(String notAllowed, List<String>... detectionStrings) { notAllowedList = Strings.split(notAllowed, ','); this.detectionStrings = detectionStrings; } /** * Construct. * * @param detectionStrings * list with string which has to be in the user agent string */ UserAgent(List<String>... detectionStrings) { this(null, detectionStrings); } /** * @param userAgent * The user agent string * @return Whether the user agent matches this enum or not */ public boolean matches(String userAgent) { if (userAgent == null) { return false; } if (notAllowedList != null) { for (String value : notAllowedList) { if (userAgent.contains(value)) { return false; } } } for (List<String> detectionGroup : detectionStrings) { boolean groupPassed = true; for (String detectionString : detectionGroup) { if (!userAgent.contains(detectionString)) { groupPassed = false; break; } } if (groupPassed) { return true; } } return false; } }
apache-2.0
jeorme/OG-Platform
projects/OG-Financial/src/main/java/com/opengamma/financial/analytics/LabelledObjectMatrix1D.java
5706
/** * Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.financial.analytics; import java.util.Arrays; import org.apache.commons.lang.Validate; import com.opengamma.financial.analytics.QuickSorter.ArrayQuickSorter; /** * @param <TKey> the type of the keys * @param <TValue> the type of the values * @param <TTolerance> the type of the tolerance */ public abstract class LabelledObjectMatrix1D<TKey extends Comparable<? super TKey>, TValue, TTolerance> { private final String _labelsTitle; private final String _valuesTitle; private final TKey[] _keys; private final Object[] _labels; private final TValue[] _values; private final TTolerance _defaultTolerance; public LabelledObjectMatrix1D(final TKey[] keys, final TValue[] values, final TTolerance defaultTolerance) { this(keys, LabelledMatrixUtils.toString(keys), values, defaultTolerance); } public LabelledObjectMatrix1D(final TKey[] keys, final Object[] labels, final TValue[] values, final TTolerance defaultTolerance) { this(keys, labels, null, values, null, defaultTolerance); } public LabelledObjectMatrix1D(final TKey[] keys, final String labelsTitle, final TValue[] values, final String valuesTitle, final TTolerance defaultTolerance) { this(keys, LabelledMatrixUtils.toString(keys), labelsTitle, values, valuesTitle, defaultTolerance); } public LabelledObjectMatrix1D(TKey[] keys, Object[] labels, String labelsTitle, TValue[] values, String valuesTitle, TTolerance defaultTolerance) { Validate.notNull(keys, "labels"); Validate.notNull(labels, "label names"); Validate.notNull(values, "values"); final int n = keys.length; Validate.isTrue(n == labels.length, "length of keys array must match length of label names array"); Validate.isTrue(n == values.length, "length of keys array must match length of values array"); _keys = Arrays.copyOf(keys, n); _labels = Arrays.copyOf(labels, n); _labelsTitle = labelsTitle; _values = Arrays.copyOf(values, n); _valuesTitle = valuesTitle; _defaultTolerance = defaultTolerance; quickSort(); } public TKey[] getKeys() { return _keys; } public Object[] getLabels() { return _labels; } public String getLabelsTitle() { return _labelsTitle; } public TValue[] getValues() { return _values; } public String getValuesTitle() { return _valuesTitle; } public int size() { return _keys.length; } protected TTolerance getDefaultTolerance() { return _defaultTolerance; } /** * Compares two keys and indicates whether the first would be considered less than, equal to or greater than the * second. * * @param key1 the first key to compare, not null * @param key2 the second key to compare, not null * @param tolerance the tolerance for equality of the keys * @return the value 0 if {@code key1} is equal to {@code key2}; a value less than 0 if {@code key1} is less than * {@code key2}; and a value greater than 0 if {@code key1} is greater than {@code key2}. */ public abstract int compare(TKey key1, TKey key2, TTolerance tolerance); /** * Compares two keys using the default equality tolerance, and indicates whether the first would be considered less * than, equal to or greater than the second. * * @param key1 the first key to compare, not null * @param key2 the second key to compare, not null * @return the value 0 if {@code key1} is equal to {@code key2}; a value less than 0 if {@code key1} is less than * {@code key2}; and a value greater than 0 if {@code key1} is greater than {@code key2}. */ public int compare(final TKey key1, final TKey key2) { return compare(key1, key2, getDefaultTolerance()); } private void quickSort() { (new ArrayQuickSorter<TKey>(_keys) { @Override protected int compare(final TKey first, final TKey second) { return LabelledObjectMatrix1D.this.compare(first, second); } @Override protected void swap(final int first, final int second) { super.swap(first, second); swap(_labels, first, second); swap(_values, first, second); } }).sort(); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + Arrays.hashCode(_keys); result = prime * result + Arrays.hashCode(_labels); result = prime * result + ((_labelsTitle == null) ? 0 : _labelsTitle.hashCode()); result = prime * result + Arrays.hashCode(_values); result = prime * result + ((_valuesTitle == null) ? 0 : _valuesTitle.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final LabelledObjectMatrix1D<?, ?, ?> other = (LabelledObjectMatrix1D<?, ?, ?>) obj; if (!Arrays.equals(_keys, other._keys)) { return false; } if (!Arrays.equals(_labels, other._labels)) { return false; } if (_labelsTitle == null) { if (other._labelsTitle != null) { return false; } } else if (!_labelsTitle.equals(other._labelsTitle)) { return false; } if (!Arrays.equals(_values, other._values)) { return false; } if (_valuesTitle == null) { if (other._valuesTitle != null) { return false; } } else if (!_valuesTitle.equals(other._valuesTitle)) { return false; } return true; } }
apache-2.0
deepnarsay/JGroups
src/org/jgroups/demos/applets/DrawApplet.java
7660
package org.jgroups.demos.applets; import java.applet.Applet; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.MouseEvent; import java.awt.event.MouseMotionListener; import java.io.*; import java.util.*; import java.util.List; import org.jgroups.*; import org.jgroups.logging.Log; import org.jgroups.logging.LogFactory; import org.jgroups.util.Util; public class DrawApplet extends Applet implements MouseMotionListener, ActionListener { private Graphics graphics=null; private Panel panel=null, sub_panel=null; private final ByteArrayOutputStream out=new ByteArrayOutputStream(); private DataOutputStream outstream; private DataInputStream instream; private final Random random=new Random(System.currentTimeMillis()); private Button clear_button, leave_button; private Label mbr_label; private final Font default_font=new Font("Helvetica", Font.PLAIN, 12); private static final String groupname="DrawGroup"; private Channel channel=null; private int member_size=1; private int red=0, green=0, blue=0; private Color default_color=null; private String props="tunnel.xml"; private final List<Address> members=new ArrayList<>(); private boolean fl=true; Log log=LogFactory.getLog(getClass()); public void init() { System.out.println("INIT"); setLayout(new BorderLayout()); String tmp_props=getParameter("properties"); if(tmp_props != null) { System.out.println("Setting parameters " + tmp_props); props=tmp_props; } try { channel=new JChannel(props); channel.setReceiver(new ReceiverAdapter() { public void viewAccepted(View v) { List<Address> mbrs=v.getMembers(); System.out.println("View accepted: " + v); member_size=v.size(); if(mbr_label != null) mbr_label.setText(member_size + " mbr(s)"); members.clear(); members.addAll(mbrs); } public void receive(Message msg) { if(msg == null || msg.getLength() == 0) { log.error("DrawApplet.run(): msg or msg.buffer is null !"); return; } instream=new DataInputStream(new ByteArrayInputStream(msg.getRawBuffer(), msg.getOffset(), msg.getLength())); int r=0; try { r=instream.readInt(); if(r == -13) { clearPanel(); return; } int g=instream.readInt(); int b=instream.readInt(); int my_x=instream.readInt(); int my_y=instream.readInt(); if(graphics != null) { graphics.setColor(new Color(r, g, b)); graphics.fillOval(my_x, my_y, 10, 10); graphics.setColor(default_color); } } catch(Exception ex) { ex.printStackTrace(); } } }); showStatus("Connecting to group " + groupname); channel.connect(groupname); } catch(Exception e) { e.printStackTrace(); } go(); } public void start() { System.out.println("------- START"); } public void destroy() { System.out.println("------- DESTROY"); showStatus("Disconnecting from " + groupname); channel.close(); showStatus("Disconnected"); } public void paint(Graphics g) { Rectangle bounds=panel.getBounds(); Color old=graphics.getColor(); if(bounds == null || graphics == null) return; graphics.setColor(Color.black); graphics.drawRect(0, 0, bounds.width - 1, bounds.height - 1); graphics.setColor(old); } private void selectColor() { red=Math.abs(random.nextInt()) % 255; green=Math.abs(random.nextInt()) % 255; blue=Math.abs(random.nextInt()) % 255; default_color=new Color(red, green, blue); } public void go() { try { panel=new Panel(); sub_panel=new Panel(); resize(200, 200); add("Center", panel); clear_button=new Button("Clear"); clear_button.setFont(default_font); clear_button.addActionListener(this); leave_button=new Button("Exit"); leave_button.setFont(default_font); leave_button.addActionListener(this); mbr_label=new Label("0 mbr(s)"); mbr_label.setFont(default_font); sub_panel.add("South", clear_button); sub_panel.add("South", leave_button); sub_panel.add("South", mbr_label); add("South", sub_panel); panel.addMouseMotionListener(this); setVisible(true); mbr_label.setText(member_size + " mbrs"); graphics=panel.getGraphics(); selectColor(); graphics.setColor(default_color); panel.setBackground(Color.white); clear_button.setForeground(Color.blue); leave_button.setForeground(Color.blue); } catch(Exception e) { log.error(e.toString()); } } /* --------------- Callbacks --------------- */ public void mouseMoved(MouseEvent e) { } public void mouseDragged(MouseEvent e) { int tmp[]=new int[1], x, y; tmp[0]=0; x=e.getX(); y=e.getY(); graphics.fillOval(x, y, 10, 10); try { out.reset(); outstream=new DataOutputStream(out); outstream.writeInt(red); outstream.writeInt(green); outstream.writeInt(blue); outstream.writeInt(x); outstream.writeInt(y); channel.send(new Message(null, null, out.toByteArray())); out.reset(); } catch(Exception ex) { log.error(ex.toString()); } } public void clearPanel() { Rectangle bounds=null; if(panel == null || graphics == null) return; bounds=panel.getBounds(); graphics.clearRect(1, 1, bounds.width - 2, bounds.height - 2); } public void sendClearPanelMsg() { int tmp[]=new int[1]; tmp[0]=0; clearPanel(); try { out.reset(); outstream=new DataOutputStream(out); outstream.writeInt(-13); channel.send(new Message(null, null, out.toByteArray())); outstream.flush(); } catch(Exception ex) { log.error(ex.toString()); } } public void actionPerformed(ActionEvent e) { String command=e.getActionCommand(); if(command == "Clear") { System.out.println("Members are " + members); sendClearPanelMsg(); } else if(command == "Exit") { try { destroy(); setVisible(false); } catch(Exception ex) { log.error(ex.toString()); } } else System.out.println("Unknown action"); } }
apache-2.0
jomarko/drools
drools-serialization-protobuf/src/test/java/org/drools/serialization/protobuf/MarshallerTest.java
18160
/* * Copyright (c) 2020. Red Hat, Inc. and/or its affiliates. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.serialization.protobuf; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.UUID; import org.drools.core.common.InternalFactHandle; import org.drools.core.impl.EnvironmentFactory; import org.drools.core.marshalling.impl.ClassObjectMarshallingStrategyAcceptor; import org.drools.core.marshalling.impl.JavaSerializableResolverStrategy; import org.drools.core.marshalling.impl.SerializablePlaceholderResolverStrategy; import org.drools.mvel.compiler.Person; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.kie.api.KieBase; import org.kie.api.conf.EqualityBehaviorOption; import org.kie.api.io.ResourceType; import org.kie.api.marshalling.ObjectMarshallingStrategy; import org.kie.api.runtime.Environment; import org.kie.api.runtime.EnvironmentName; import org.kie.api.runtime.KieSession; import org.kie.internal.utils.KieHelper; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @RunWith(Parameterized.class) public class MarshallerTest { private Environment env; @Parameterized.Parameters(name = "{0}") public static Object[] params() { return new Object[] { new JavaSerializableResolverStrategy( ClassObjectMarshallingStrategyAcceptor.DEFAULT ), new SerializablePlaceholderResolverStrategy( ClassObjectMarshallingStrategyAcceptor.DEFAULT ) }; } public MarshallerTest(ObjectMarshallingStrategy strategy) { this.env = EnvironmentFactory.newEnvironment(); this.env.set( EnvironmentName.OBJECT_MARSHALLING_STRATEGIES, new ObjectMarshallingStrategy[]{ strategy } ); } @Test public void testAgendaDoNotSerializeObject() throws Exception { KieSession ksession = null; try { String str = "import java.util.Collection\n" + "rule R1 when\n" + " String(this == \"x\" || this == \"y\" || this == \"z\")\n" + "then\n" + "end\n"; KieBase kbase = new KieHelper().addContent(str, ResourceType.DRL).build(); ksession = kbase.newKieSession(null, env); ksession.insert("x"); ksession.insert("y"); ksession.insert("z"); assertEquals(3, ksession.fireAllRules()); ReadSessionResult serialisedStatefulKnowledgeSession = SerializationHelper.getSerialisedStatefulKnowledgeSessionWithMessage(ksession, ksession.getKieBase(), true); ksession = serialisedStatefulKnowledgeSession.getSession(); ProtobufMessages.KnowledgeSession deserializedMessage = serialisedStatefulKnowledgeSession.getDeserializedMessage(); assertEquals(0, ksession.fireAllRules()); assertFalse(deserializedMessage.getRuleData().getAgenda().getMatchList().stream().anyMatch(ml -> { return ml.getTuple().getObjectList().size() > 0; })); } finally { if (ksession != null) { ksession.dispose(); } } } @Test public void testFromWithFireBeforeSerialization() throws Exception { String str = "import java.util.Collection\n" + "rule R1 when\n" + " String() from [ \"x\", \"y\", \"z\" ]\n" + "then\n" + "end\n"; KieBase kbase = new KieHelper().addContent(str, ResourceType.DRL).build(); KieSession ksession = null; try { ksession = kbase.newKieSession(null, env); assertEquals(3, ksession.fireAllRules()); ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession(ksession, true); assertEquals(0, ksession.fireAllRules()); } finally { if (ksession != null) { ksession.dispose(); } } } @Test public void testFromWithFireAfterSerialization() throws Exception { String str = "import java.util.Collection\n" + "rule R1 when\n" + " String() from [ \"x\", \"y\", \"z\" ]\n" + "then\n" + "end\n"; KieBase kbase = new KieHelper().addContent(str, ResourceType.DRL).build(); KieSession ksession = null; try { ksession = kbase.newKieSession(null, env); ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession(ksession, true); assertEquals(3, ksession.fireAllRules()); } finally { if (ksession != null) { ksession.dispose(); } } } @Test public void testFromWithPartialFiring() throws Exception { String str = "import java.util.Collection\n" + "rule R1 when\n" + " String() from [ \"x\", \"y\", \"z\" ]\n" + "then\n" + "end\n"; KieBase kbase = new KieHelper().addContent(str, ResourceType.DRL).build(); KieSession ksession = null; try { ksession = kbase.newKieSession(null, env); assertEquals(2, ksession.fireAllRules(2)); ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession(ksession, true); assertEquals(1, ksession.fireAllRules()); } finally { if (ksession != null) { ksession.dispose(); } } } @Test public void test2FromsWithPartialFiring() throws Exception { String str = "import java.util.Collection\n" + "rule R1 when\n" + " String() from [ \"x\", \"y\", \"z\" ]\n" + " String() from [ \"a\", \"b\", \"c\" ]\n" + "then\n" + "end\n"; KieBase kbase = new KieHelper().addContent(str, ResourceType.DRL).build(); KieSession ksession = null; try { ksession = kbase.newKieSession(null, env); assertEquals(5, ksession.fireAllRules(5)); ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession(ksession, true); assertEquals(4, ksession.fireAllRules()); } finally { if (ksession != null) { ksession.dispose(); } } } @Test public void testFromAndJoinWithPartialFiring() throws Exception { String str = "import java.util.Collection\n" + "rule R1 when\n" + " String() from [ \"x\", \"y\", \"z\" ]\n" + " Integer()\n" + "then\n" + "end\n"; KieBase kbase = new KieHelper().addContent(str, ResourceType.DRL).build(); KieSession ksession = null; try { ksession = kbase.newKieSession(null, env); ksession.insert( 42 ); assertEquals(2, ksession.fireAllRules(2)); ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession(ksession, true); assertEquals(1, ksession.fireAllRules()); } finally { if (ksession != null) { ksession.dispose(); } } } @Test public void testAgendaReconciliationAccumulate() throws Exception { String str = "import " + Person.class.getCanonicalName() + ";" + "rule X when\n" + " accumulate ( $p: Person ( getName().startsWith(\"M\")); \n" + " $sum : sum($p.getAge()) \n" + " ) \n" + "then\n" + " insert($sum);\n" + "end"; KieBase kbase = new KieHelper().addContent(str, ResourceType.DRL).build(); KieSession ksession = null; try { ksession = kbase.newKieSession(null, env); ksession.insert(new Person("Mark", 37)); ksession.insert(new Person("Edson", 35)); ksession.insert(new Person("Mario", 40)); assertEquals(1, ksession.fireAllRules()); ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession(ksession, true); assertEquals(0, ksession.fireAllRules()); } finally { if (ksession != null) { ksession.dispose(); } } } @Test public void testAgendaReconciliationAccumulate2() throws Exception { String str = "import " + Person.class.getCanonicalName() + ";" + "rule X when\n" + " accumulate ( $p: Person ( getName().startsWith(\"M\")); \n" + " $sum : sum($p.getAge()) \n" + " ) \n" + "then\n" + " insert($sum);\n" + "end"; KieBase kbase = new KieHelper().addContent(str, ResourceType.DRL).build(); KieSession ksession = null; try { ksession = kbase.newKieSession(null, env); ksession.insert(new Person("Mark", 37)); ksession.insert(new Person("Edson", 35)); ksession.insert(new Person("Mario", 40)); ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession(ksession, true); assertEquals(1, ksession.fireAllRules()); } finally { if (ksession != null) { ksession.dispose(); } } } @Test public void testMultiAccumulate() throws Exception { // DROOLS-5579 String str = "import " + Person.class.getCanonicalName() + ";" + "rule X when\n" + " accumulate ( Person ( getName().startsWith(\"M\"), $age : age ); \n" + " $sum : sum( $age ), $max : max( $age ) \n" + " ) \n" + "then\n" + " insert($sum);\n" + "end"; KieBase kbase = new KieHelper().addContent(str, ResourceType.DRL).build(); KieSession ksession = null; try { ksession = kbase.newKieSession(null, env); ksession.insert(new Person("Mark", 37)); ksession.insert(new Person("Edson", 35)); ksession.insert(new Person("Mario", 40)); assertEquals(1, ksession.fireAllRules()); ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession(ksession, true); assertEquals(0, ksession.fireAllRules()); } finally { if (ksession != null) { ksession.dispose(); } } } @Test public void testSubnetwork() throws Exception { final String str = "rule R1 when\n" + " String()\n" + " Long()\n" + " not( Long() and Integer() )\n" + "then end\n"; KieBase kbase = new KieHelper().addContent(str, ResourceType.DRL) .build(EqualityBehaviorOption.EQUALITY); KieSession ksession = null; try { ksession = kbase.newKieSession(null, env); ksession.insert("Luca"); ksession.insert(2L); ksession.insert(10); ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession(ksession, true); assertEquals(0, ksession.fireAllRules()); ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession(ksession, true); ksession.delete(ksession.getFactHandle(10)); ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession(ksession, true); assertEquals(1, ksession.fireAllRules()); } finally { if (ksession != null) { ksession.dispose(); } } } @Test public void testSubnetwork2() throws Exception { final String str = "rule R1 when\n" + " String()\n" + " Long()\n" + " not( Long() and Integer() )\n" + "then end\n"; KieBase kbase = new KieHelper().addContent(str, ResourceType.DRL) .build(EqualityBehaviorOption.EQUALITY); KieSession ksession = null; try { ksession = kbase.newKieSession(null, env); ksession.insert("Luca"); ksession.insert(2L); ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession(ksession, true); assertEquals(1, ksession.fireAllRules()); ksession.insert("Mario"); ksession.insert(11); ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession(ksession, true); assertEquals(0, ksession.fireAllRules()); } finally { ksession.dispose(); } } @Test public void testFromJoinWithPartialFiring() throws Exception { String str = "import java.util.Collection\n" + "rule R1 when\n" + " Integer()\n" + " String() from [ \"x\", \"y\", \"z\" ]\n" + "then\n" + "end\n"; KieBase kbase = new KieHelper().addContent(str, ResourceType.DRL).build(); KieSession ksession = null; try { ksession = kbase.newKieSession(null, env); InternalFactHandle fh1 = ( InternalFactHandle ) ksession.insert( 1 ); assertEquals(2, ksession.fireAllRules(2)); ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession(ksession, true); assertEquals(1, ksession.fireAllRules()); // old FH should keep its id InternalFactHandle intFH = ( InternalFactHandle ) ksession.getFactHandles().iterator().next(); assertEquals( fh1.getId(), intFH.getId() ); // serialization/deserialization of derived FHs shouldn't consume more FH ids assertEquals( fh1.getId() + 4, (( InternalFactHandle ) ksession.insert( 2 )).getId() ); } finally { if (ksession != null) { ksession.dispose(); } } } public static class LongFact implements Serializable { private long value; public LongFact() { } public LongFact(long value) { this.value = value; } public long getValue() { return value; } public void setValue(long value) { this.value = value; } @Override public boolean equals( Object o ) { if ( this == o ) return true; if ( o == null || getClass() != o.getClass() ) return false; LongFact longFact = ( LongFact ) o; return value == longFact.value; } @Override public int hashCode() { return Objects.hash( value ); } } public static class LongFacts implements Serializable { private List<LongFact> facts; public LongFacts() { this.facts = new ArrayList<>(); } public List<LongFact> getFacts() { return facts; } public void setFacts(List<LongFact> facts) { this.facts = facts; } @Override public boolean equals( Object o ) { if ( this == o ) return true; if ( o == null || getClass() != o.getClass() ) return false; LongFacts longFacts = ( LongFacts ) o; return Objects.equals( facts, longFacts.facts ); } @Override public int hashCode() { return Objects.hash( facts ); } } @Test public void testFromWithInsertLogical() throws Exception { // DROOLS-5713 String str = "import " + LongFact.class.getCanonicalName() + "\n" + "import " + LongFacts.class.getCanonicalName() + "\n" + "rule R1 when\n" + " LongFacts($lfs: facts)\n" + " $lf: LongFact() from $lfs\n" + "then\n" + " insertLogical($lf);" + "end\n"; KieBase kbase = new KieHelper().addContent(str, ResourceType.DRL).build(); KieSession ksession = null; try { ksession = kbase.newKieSession(null, env); String id = UUID.randomUUID().toString(); LongFacts longFacts = new LongFacts(); longFacts.getFacts().add(new LongFact(123456)); ksession.insert( longFacts ); assertEquals(1, ksession.fireAllRules()); ksession = SerializationHelper.getSerialisedStatefulKnowledgeSession(ksession, true); assertEquals(0, ksession.fireAllRules()); } finally { if (ksession != null) { ksession.dispose(); } } } }
apache-2.0
q474818917/solr-5.2.0
lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestKeepFilterFactory.java
3620
package org.apache.lucene.analysis.miscellaneous; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.Reader; import java.io.StringReader; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.util.BaseTokenStreamFactoryTestCase; import org.apache.lucene.analysis.util.CharArraySet; import org.apache.lucene.analysis.util.ClasspathResourceLoader; import org.apache.lucene.analysis.util.ResourceLoader; import org.apache.lucene.util.Version; public class TestKeepFilterFactory extends BaseTokenStreamFactoryTestCase { public void testInform() throws Exception { ResourceLoader loader = new ClasspathResourceLoader(getClass()); assertTrue("loader is null and it shouldn't be", loader != null); KeepWordFilterFactory factory = (KeepWordFilterFactory) tokenFilterFactory("KeepWord", "words", "keep-1.txt", "ignoreCase", "true"); CharArraySet words = factory.getWords(); assertTrue("words is null and it shouldn't be", words != null); assertTrue("words Size: " + words.size() + " is not: " + 2, words.size() == 2); factory = (KeepWordFilterFactory) tokenFilterFactory("KeepWord", "words", "keep-1.txt, keep-2.txt", "ignoreCase", "true"); words = factory.getWords(); assertTrue("words is null and it shouldn't be", words != null); assertTrue("words Size: " + words.size() + " is not: " + 4, words.size() == 4); } /** Test that bogus arguments result in exception */ public void testBogusArguments() throws Exception { try { tokenFilterFactory("KeepWord", "bogusArg", "bogusValue"); fail(); } catch (IllegalArgumentException expected) { assertTrue(expected.getMessage().contains("Unknown parameters")); } } public void test43Backcompat() throws Exception { Reader reader = new StringReader("a foo bar"); TokenStream stream = whitespaceMockTokenizer(reader); stream = tokenFilterFactory("KeepWord", Version.LUCENE_4_3_1, "enablePositionIncrements", "false", "words", "keep-1.txt").create(stream); assertTrue(stream instanceof Lucene43KeepWordFilter); assertTokenStreamContents(stream, new String[] {"foo", "bar"}, new int[] {2, 6}, new int[] {5, 9}, new int[] {1, 1}); try { tokenFilterFactory("KeepWord", Version.LUCENE_4_4_0, "enablePositionIncrements", "false"); fail(); } catch (IllegalArgumentException expected) { assertTrue(expected.getMessage().contains("enablePositionIncrements=false is not supported")); } tokenFilterFactory("KeepWord", Version.LUCENE_4_4_0, "enablePositionIncrements", "true"); try { tokenFilterFactory("KeepWord", "enablePositionIncrements", "true"); fail(); } catch (IllegalArgumentException expected) { assertTrue(expected.getMessage().contains("not a valid option")); } } }
apache-2.0
meteorcloudy/bazel
third_party/java/proguard/proguard6.2.2/src/proguard/classfile/util/StringReferenceInitializer.java
2943
/* * ProGuard -- shrinking, optimization, obfuscation, and preverification * of Java bytecode. * * Copyright (c) 2002-2019 Guardsquare NV * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the Free * Software Foundation; either version 2 of the License, or (at your option) * any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package proguard.classfile.util; import proguard.classfile.*; import proguard.classfile.constant.*; import proguard.classfile.constant.visitor.ConstantVisitor; /** * This ConstantVisitor initializes any class references of all string constants * it visits. More specifically, it fills out the references of string constant * pool entries that happen to refer to a class in the program class pool or in * the library class pool. * * @author Eric Lafortune */ public class StringReferenceInitializer extends SimplifiedVisitor implements ConstantVisitor { private final ClassPool programClassPool; private final ClassPool libraryClassPool; /** * Creates a new StringReferenceInitializer. */ public StringReferenceInitializer(ClassPool programClassPool, ClassPool libraryClassPool) { this.programClassPool = programClassPool; this.libraryClassPool = libraryClassPool; } // Implementations for ConstantVisitor. public void visitAnyConstant(Clazz clazz, Constant constant) {} public void visitStringConstant(Clazz clazz, StringConstant stringConstant) { if (stringConstant.referencedClass == null) { // See if we can find the referenced class. stringConstant.referencedClass = findClass(ClassUtil.internalClassName( ClassUtil.externalBaseType(stringConstant.getString(clazz)))); } } // Small utility methods. /** * Returns the class with the given name, either for the program class pool * or from the library class pool, or <code>null</code> if it can't be found. */ private Clazz findClass(String name) { // First look for the class in the program class pool. Clazz clazz = programClassPool.getClass(name); // Otherwise look for the class in the library class pool. if (clazz == null) { clazz = libraryClassPool.getClass(name); } return clazz; } }
apache-2.0
genericDataCompany/hsandbox
common/mahout-distribution-0.7-hadoop1/math/src/main/java/org/apache/mahout/math/DiagonalMatrix.java
4145
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.mahout.math; public class DiagonalMatrix extends AbstractMatrix { private final Vector diagonal; public DiagonalMatrix(Vector values) { super(values.size(), values.size()); this.diagonal = values; } public DiagonalMatrix(Matrix values) { this(values.viewDiagonal()); } public DiagonalMatrix(double value, int size) { this(new ConstantVector(value, size)); } public DiagonalMatrix(double[] values) { super(values.length, values.length); this.diagonal = new DenseVector(values); } public static DiagonalMatrix identity(int size) { return new DiagonalMatrix(1, size); } @Override public Matrix assignColumn(int column, Vector other) { throw new UnsupportedOperationException("Can't assign a column to a diagonal matrix"); } /** * Assign the other vector values to the row of the receiver * * @param row the int row to assign * @param other a Vector * @return the modified receiver * @throws CardinalityException if the cardinalities differ */ @Override public Matrix assignRow(int row, Vector other) { throw new UnsupportedOperationException("Can't assign a row to a diagonal matrix"); } /** * Provides a view of the diagonal of a matrix. */ @Override public Vector viewDiagonal() { return this.diagonal; } /** * Return the value at the given location, without checking bounds * * @param row an int row index * @param column an int column index * @return the double at the index */ @Override public double getQuick(int row, int column) { if (row == column) { return diagonal.get(row); } else { return 0; } } /** * Return an empty matrix of the same underlying class as the receiver * * @return a Matrix */ @Override public Matrix like() { return new SparseRowMatrix(rowSize(), columnSize()); } /** * Returns an empty matrix of the same underlying class as the receiver and of the specified * size. * * @param rows the int number of rows * @param columns the int number of columns */ @Override public Matrix like(int rows, int columns) { return new SparseRowMatrix(rows, columns); } @Override public void setQuick(int row, int column, double value) { if (row == column) { diagonal.set(row, value); } else { throw new UnsupportedOperationException("Can't set off-diagonal element"); } } /** * Return the number of values in the recipient * * @return an int[2] containing [row, column] count */ @Override public int[] getNumNondefaultElements() { throw new UnsupportedOperationException("Don't understand how to implement this"); } /** * Return a new matrix containing the subset of the recipient * * @param offset an int[2] offset into the receiver * @param size the int[2] size of the desired result * @return a new Matrix that is a view of the original * @throws CardinalityException if the length is greater than the cardinality of the receiver * @throws IndexException if the offset is negative or the offset+length is outside of the * receiver */ @Override public Matrix viewPart(int[] offset, int[] size) { return new MatrixView(this, offset, size); } }
apache-2.0
wiztools/rest-client
restclient-ui/src/main/java/org/wiztools/restclient/ui/SessionTableModel.java
2860
package org.wiztools.restclient.ui; import java.util.Collections; import java.util.Map; import javax.swing.table.AbstractTableModel; import org.wiztools.restclient.bean.Request; import org.wiztools.restclient.bean.Response; /** * * @author subwiz */ class SessionTableModel extends AbstractTableModel implements ISessionView{ private final String[] colNames; private Object[][] data = new String[0][0]; public SessionTableModel(final String[] colNames){ if(colNames.length != 2){ throw new IllegalArgumentException("The length of array should be equal to 2."); } this.colNames = colNames; } public int getRowCount() { return data.length; } public int getColumnCount() { return 2; } public Object getValueAt(int row, int col) { return data[row][col]; } @Override public String getColumnName(int col) { return colNames[col]; } @Override public void setValueAt(Object value, int row, int col) { data[row][col] = value; fireTableCellUpdated(row, col); } public void insertRow(Object key, Object value){ int len = data.length; Object[][] t = new Object[len+1][2]; t[0][0] = key; t[0][1] = value; for(int i=1; i<len+1; i++){ for(int j=0; j<2; j++){ t[i][j] = data[i-1][j]; } } data = null; data = t; fireTableDataChanged(); } /*public void deleteRow(int row){ int len = data.length; Object[][] t = new Object[len-1][2]; boolean passedDeletionRow = false; for(int i=0; i<len; i++){ if(i == row){ passedDeletionRow = true; continue; } for(int j=0; j<2; j++){ if(!passedDeletionRow){ t[i][j] = data[i][j]; } else{ t[i-1][j] = data[i][j]; } } } data = null; data = t; fireTableDataChanged(); }*/ public void setData(final Map<String, String> dataMap){ int size = dataMap.size(); Object[] keys = dataMap.keySet().toArray(); Object[][] o = new Object[size][2]; for(int i=0; i<size; i++){ o[i][0] = keys[i]; o[i][1] = dataMap.get(keys[i]); } data = null; data = o; fireTableDataChanged(); } public Object[][] getData(){ return data; } @Override public void add(final Request request, final Response response) { insertRow(request.toString(), response.toString()); } @Override public void clear() { setData(Collections.<String, String>emptyMap()); } }
apache-2.0
elkingtonmcb/oryx
rdf-common/src/main/java/com/cloudera/oryx/rdf/common/example/ExampleSet.java
5805
/* * Copyright (c) 2013, Cloudera, Inc. All Rights Reserved. * * Cloudera, Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"). You may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for * the specific language governing permissions and limitations under the * License. */ package com.cloudera.oryx.rdf.common.example; import com.google.common.base.Preconditions; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import com.cloudera.oryx.common.settings.ConfigUtils; import com.cloudera.oryx.common.settings.InboundSettings; import com.cloudera.oryx.rdf.common.rule.Decision; /** * Encapsulates a set of {@link Example}s, including derived information about the data set, such * as the type of each feature and the type of the target. * * @author Sean Owen */ public final class ExampleSet implements Iterable<Example> { private final List<Example> examples; private final FeatureType[] featureTypes; private final int[] categoryCounts; private final FeatureType targetType; private final int targetCategoryCount; public ExampleSet(List<Example> examples) { Preconditions.checkNotNull(examples); Preconditions.checkArgument(!examples.isEmpty()); this.examples = examples; InboundSettings inbound = InboundSettings.create(ConfigUtils.getDefaultConfig()); int numFeatures = inbound.getColumnNames().size(); featureTypes = new FeatureType[numFeatures]; for (int i = 0; i < numFeatures; i++) { FeatureType type; if (inbound.isNumeric(i)) { type = FeatureType.NUMERIC; } else if (inbound.isCategorical(i)) { type = FeatureType.CATEGORICAL; } else { type = FeatureType.IGNORED; } featureTypes[i] = type; } int targetColumn = inbound.getTargetColumn(); featureTypes[targetColumn] = FeatureType.IGNORED; targetType = inbound.isNumeric(targetColumn) ? FeatureType.NUMERIC : FeatureType.CATEGORICAL; categoryCounts = new int[numFeatures]; int theTargetCategoryCount = 0; for (Example example : examples) { for (int i = 0; i < numFeatures; i++) { if (featureTypes[i] == FeatureType.CATEGORICAL) { CategoricalFeature feature = (CategoricalFeature) example.getFeature(i); if (feature != null) { categoryCounts[i] = Math.max(categoryCounts[i], feature.getValueID() + 1); } } } if (targetType == FeatureType.CATEGORICAL) { theTargetCategoryCount = Math.max(theTargetCategoryCount, ((CategoricalFeature) example.getTarget()).getValueID() + 1); } } this.targetCategoryCount = theTargetCategoryCount; } /** * For testing. */ public ExampleSet(List<Example> examples, FeatureType[] featureTypes, FeatureType targetType) { Preconditions.checkNotNull(examples); Preconditions.checkArgument(!examples.isEmpty()); this.examples = examples; this.featureTypes = featureTypes; this.targetType = targetType; int numFeatures = featureTypes.length; categoryCounts = new int[numFeatures]; int theTargetCategoryCount = 0; for (Example example : examples) { for (int i = 0; i < numFeatures; i++) { if (featureTypes[i] == FeatureType.CATEGORICAL) { CategoricalFeature feature = (CategoricalFeature) example.getFeature(i); if (feature != null) { categoryCounts[i] = Math.max(categoryCounts[i], feature.getValueID() + 1); } } } if (targetType == FeatureType.CATEGORICAL) { theTargetCategoryCount = Math.max(theTargetCategoryCount, ((CategoricalFeature) example.getTarget()).getValueID() + 1); } } this.targetCategoryCount = theTargetCategoryCount; } private ExampleSet(List<Example> subset, ExampleSet of) { this.examples = subset; this.featureTypes = of.featureTypes; this.categoryCounts = of.categoryCounts; this.targetType = of.targetType; this.targetCategoryCount = of.targetCategoryCount; } public List<Example> getExamples() { return examples; } public int getNumFeatures() { return featureTypes.length; } public FeatureType getFeatureType(int featureNumber) { return featureTypes[featureNumber]; } public int getCategoryCount(int featureNumber) { return categoryCounts[featureNumber]; } public FeatureType getTargetType() { return targetType; } public int getTargetCategoryCount() { return targetCategoryCount; } @Override public Iterator<Example> iterator() { return examples.iterator(); } public ExampleSet subset(List<Example> explicitSubset) { return new ExampleSet(explicitSubset, this); } public ExampleSet[] split(Decision decision) { List<Example> positive = new ArrayList<>(); List<Example> negative = new ArrayList<>(); for (Example example : examples) { if (decision.isPositive(example)) { positive.add(example); } else { negative.add(example); } } return new ExampleSet[] { subset(negative), subset(positive) }; } @Override public String toString() { StringBuilder result = new StringBuilder(); result.append(Arrays.toString(featureTypes)).append(" -> ").append(targetType).append('\n'); for (Example example : examples) { result.append(example).append('\n'); } return result.toString(); } }
apache-2.0
droolsjbpm/jbpm
jbpm-query-jpa/src/main/java/org/jbpm/query/jpa/impl/QueryAndParameterAppender.java
14416
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.query.jpa.impl; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TreeMap; import org.kie.internal.query.QueryParameterIdentifiers; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class is a utility class for dynamically creating JPA queries. * </p> * See the jbpm-human-task-core and jbpm-audit *query() method logic. * </p> * This class is <em>not</em> thread-safe and should only be used locally in a method. */ public class QueryAndParameterAppender { private static Logger logger = LoggerFactory.getLogger(QueryAndParameterAppender.class); private boolean noWhereClauseYet = true; private boolean noClauseAddedYet = true; private int nestedParentheses = 0; private boolean alreadyUsed = false; private final StringBuilder queryBuilder; private final Map<String, Object> queryParams; private int queryParamId; private int queryParamIndex; public QueryAndParameterAppender(StringBuilder queryBuilder, Map<String, Object> params, int queryParamId) { this.queryBuilder = queryBuilder; this.queryParams = params; this.noWhereClauseYet = ! queryBuilder.toString().contains("WHERE"); this.queryParamId = queryParamId; } public QueryAndParameterAppender(StringBuilder queryBuilder, Map<String, Object> params) { this(queryBuilder, params, 0); } public boolean hasBeenUsed() { return ! this.noClauseAddedYet; } public void markAsUsed() { this.noClauseAddedYet = false; } public void addNamedQueryParam(String name, Object value) { queryParams.put(name, value); } public void openParentheses() { ++nestedParentheses; queryBuilder.append(" ( "); } public void closeParentheses() { queryBuilder.append(" ) "); --nestedParentheses; } public int getParenthesesNesting() { return nestedParentheses; } // "Normal" query parameters -------------------------------------------------------------------------------------------------- public <T> void addQueryParameters( List<? extends Object> paramList, String listId, Class<T> type, String fieldName, String joinClause, boolean union ) { List<T> listIdParams; if( paramList != null && paramList.size() > 0 ) { Object inputObject = paramList.get(0); listIdParams = checkAndConvertListToType(paramList, inputObject, listId, type); } else { return; } String paramName = generateParamName(); StringBuilder queryClause = new StringBuilder("( " + fieldName + " IN (:" + paramName + ")"); if( joinClause != null ) { queryClause.append(" AND " + joinClause); } queryClause.append(" )"); addToQueryBuilder(queryClause.toString(), union, paramName, listIdParams ); } public <T> void addQueryParameters( Map<String, List<? extends Object>> inputParamsMap, String listId, Class<T> type, String fieldName, boolean union, String joinClause ) { List<? extends Object> inputParams = inputParamsMap.get(listId); addQueryParameters(inputParams, listId, type, fieldName, joinClause, union ); } public <T> void addQueryParameters( List<? extends Object> inputParams, String listId, Class<T> type, String fieldName, boolean union ) { addQueryParameters(inputParams, listId, type, fieldName, null, union ); } public <T> void addQueryParameters( Map<String, List<? extends Object>> inputParamsMap, String listId, Class<T> type, String fieldName, boolean union ) { List<? extends Object> inputParams = inputParamsMap.get(listId); addQueryParameters(inputParams, listId, type, fieldName, null, union ); } // Range query parameters ----------------------------------------------------------------------------------------------------- public <T> void addRangeQueryParameters(List<? extends Object> paramList, String listId, Class<T> type, String fieldName, String joinClause, boolean union ) { List<T> listIdParams; if( paramList != null && paramList.size() > 0 ) { Object inputObject = paramList.get(0); if( inputObject == null ) { inputObject = paramList.get(1); if( inputObject == null ) { return; } } listIdParams = checkAndConvertListToType(paramList, inputObject, listId, type); } else { return; } T min = listIdParams.get(0); T max = listIdParams.get(1); Map<String, T> paramNameMinMaxMap = new HashMap<String, T>(2); StringBuilder queryClause = new StringBuilder("( " ); if( joinClause != null ) { queryClause.append("( "); } queryClause.append(fieldName); if( min == null ) { if( max == null ) { return; } else { // only max String maxParamName = generateParamName(); queryClause.append(" <= :" + maxParamName + " " ); paramNameMinMaxMap.put(maxParamName, max); } } else if( max == null ) { // only min String minParamName = generateParamName(); queryClause.append(" >= :" + minParamName + " "); paramNameMinMaxMap.put(minParamName, min); } else { // both min and max String minParamName = generateParamName(); String maxParamName = generateParamName(); if( union ) { queryClause.append(" >= :" + minParamName + " OR " + fieldName + " <= :" + maxParamName + " " ); } else { queryClause.append(" BETWEEN :" + minParamName + " AND :" + maxParamName + " " ); } paramNameMinMaxMap.put(minParamName, min); paramNameMinMaxMap.put(maxParamName, max); } if( joinClause != null ) { queryClause.append(") and " + joinClause.trim() + " "); } queryClause.append(")"); // add query string to query builder and fill params map internalAddToQueryBuilder(queryClause.toString(), union); for( Entry<String, T> nameMinMaxEntry : paramNameMinMaxMap.entrySet() ) { addNamedQueryParam(nameMinMaxEntry.getKey(), nameMinMaxEntry.getValue()); } queryBuilderModificationCleanup(); } public <T> void addRangeQueryParameters( Map<String, List<? extends Object>> inputParamsMap, String listId, Class<T> type, String fieldName, boolean union, String joinClause ) { List<? extends Object> inputParams = inputParamsMap.get(listId); addRangeQueryParameters(inputParams, listId, type, fieldName, joinClause, union ); } public <T> void addRangeQueryParameters( List<? extends Object> inputParams, String listId, Class<T> type, String fieldName, boolean union ) { addRangeQueryParameters(inputParams, listId, type, fieldName, null, union); } public <T> void addRangeQueryParameters( Map<String, List<? extends Object>> inputParamsMap, String listId, Class<T> type, String fieldName, boolean union ) { List<? extends Object> inputParams = inputParamsMap.get(listId); addRangeQueryParameters(inputParams, listId, type, fieldName, null, union); } // Regex query parameters ----------------------------------------------------------------------------------------------------- public void addRegexQueryParameters( List<String> inputParams, String listId, String fieldName, boolean union ) { addRegexQueryParameters(inputParams, listId, fieldName, null, union); } public void addRegexQueryParameters( List<String> paramValList, String listId, String fieldName, String joinClause, boolean union) { // setup if( paramValList == null || paramValList.isEmpty() ) { return; } List<String> regexList = new ArrayList<String>(paramValList.size()); for( String input : paramValList ) { if( input == null || input.isEmpty() ) { continue; } String regex = input.replace('*', '%').replace('.', '_'); regexList.add(regex); } // build query string Map<String, String> paramNameRegexMap = new HashMap<String, String>(); StringBuilder queryClause = new StringBuilder("( "); if( joinClause != null ) { queryClause.append("( "); } for( int i = 0; i < regexList.size(); ++i ) { String paramName = generateParamName(); queryClause.append(fieldName + " LIKE :" + paramName + " " ); paramNameRegexMap.put(paramName, regexList.get(i)); if( i + 1 < regexList.size() ) { queryClause.append(union ? "OR" : "AND").append(" "); } } if( joinClause != null ) { queryClause.append(") AND " + joinClause.trim() + " "); } queryClause.append(")"); // add query string to query builder and fill params map internalAddToQueryBuilder(queryClause.toString(), union); for( Entry<String, String> nameRegexEntry : paramNameRegexMap.entrySet() ) { addNamedQueryParam(nameRegexEntry.getKey(), nameRegexEntry.getValue()); } queryBuilderModificationCleanup(); } public void addToQueryBuilder( String query, boolean union ) { // modify query builder internalAddToQueryBuilder(query, union); // cleanup queryBuilderModificationCleanup(); } public <T> void addToQueryBuilder( String query, boolean union, String paramName, List<T> paramValList ) { // modify query builder internalAddToQueryBuilder(query, union); // add query parameters Set<T> paramVals = new HashSet<T>(paramValList); addNamedQueryParam(paramName, paramVals); // cleanup queryBuilderModificationCleanup(); } private void internalAddToQueryBuilder( String query, boolean union ) { if( this.noClauseAddedYet ) { if( noWhereClauseYet ) { queryBuilder.append(" WHERE "); } else { queryBuilder.append(" AND "); } this.noClauseAddedYet = false; } else if( this.alreadyUsed ) { queryBuilder.append(union ? "\nOR " : "\nAND "); } queryBuilder.append(query); } public void queryBuilderModificationCleanup() { this.alreadyUsed = true; } public boolean whereClausePresent() { return ! noWhereClauseYet; } @SuppressWarnings("unchecked") private <T> List<T> checkAndConvertListToType( List<?> inputList, Object inputObject, String listId, Class<T> type ) { if( logger.isDebugEnabled() ) { debugQueryParametersIdentifiers(); } assert type != null : listId + ": type is null!"; assert inputObject != null : listId + ": input object is null!"; if( type.equals(inputObject.getClass()) ) { return (List<T>) inputList; } else { throw new IllegalArgumentException(listId + " parameter is an instance of " + "List<" + inputObject.getClass().getSimpleName() + "> instead of " + "List<" + type.getSimpleName() + ">"); } } public String generateParamName() { // it generates A1, A2.... A10, A11.... Integer id = ++queryParamIndex; char first = (char) ('A' + queryParamId); return new String(first + id.toString()); } public StringBuilder getQueryBuilder() { return queryBuilder; } public static void debugQueryParametersIdentifiers() { try { Field [] fields = QueryParameterIdentifiers.class.getDeclaredFields(); Map<String, String> fieldValueMap = new TreeMap<String, String>(new Comparator<String>() { @Override public int compare( String o1, String o2 ) { int int1 = -1; try { int1 = Integer.parseInt(o1); } catch(Exception e) { // no op } int int2 = -1; try { int2 = Integer.parseInt(o2); } catch(Exception e) { // no op } if( int1 > -1 && int2 > -1 ) { return new Integer(int1).compareTo(int2); } if( int1 > -1 && int2 == -1 ) { return -1; } if( int1 == -1 && int2 > -1 ) { return 1; } return o1.compareTo(o2); } }); for( Field field : fields ) { fieldValueMap.put(field.get(null).toString(), field.getName()); } for( Entry<String, String> entry : fieldValueMap.entrySet() ) { logger.debug(String.format("%-12s : %s", entry.getKey(), entry.getValue())); } } catch( Exception e ) { // ignore } } public Map<String, Object> getQueryParams() { return queryParams; } public String toSQL() { return queryBuilder.toString(); } }
apache-2.0
android-ia/platform_tools_idea
plugins/InspectionGadgets/testsrc/com/siyeh/ig/performance/MethodMayBeStaticInspectionTest.java
390
package com.siyeh.ig.performance; import com.siyeh.ig.IGInspectionTestCase; public class MethodMayBeStaticInspectionTest extends IGInspectionTestCase { public void test() throws Exception { final MethodMayBeStaticInspection tool = new MethodMayBeStaticInspection(); tool.m_ignoreEmptyMethods = false; doTest("com/siyeh/igtest/performance/method_may_be_static", tool); } }
apache-2.0
js0701/chromium-crosswalk
chrome/android/java/src/org/chromium/chrome/browser/snackbar/SnackbarManager.java
13183
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.snackbar; import android.graphics.Rect; import android.os.Handler; import android.view.Gravity; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewTreeObserver.OnGlobalLayoutListener; import android.view.Window; import org.chromium.base.ApiCompatibilityUtils; import org.chromium.base.VisibleForTesting; import org.chromium.chrome.R; import org.chromium.chrome.browser.ChromeActivity; import org.chromium.chrome.browser.device.DeviceClassManager; import org.chromium.ui.UiUtils; import org.chromium.ui.base.DeviceFormFactor; import java.util.HashSet; import java.util.Stack; /** * Manager for the snackbar showing at the bottom of activity. * <p/> * There should be only one SnackbarManager and one snackbar in the activity. The manager maintains * a stack to store all entries that should be displayed. When showing a new snackbar, old entry * will be pushed to stack and text/button will be updated to the newest entry. * <p/> * When action button is clicked, this manager will call * {@link SnackbarController#onAction(Object)} in corresponding listener, and show the next * entry in stack. Otherwise if no action is taken by user during * {@link #DEFAULT_SNACKBAR_DURATION_MS} milliseconds, it will clear the stack and call * {@link SnackbarController#onDismissNoAction(Object)} to all listeners. */ public class SnackbarManager implements OnClickListener, OnGlobalLayoutListener { /** * Interface that shows the ability to provide a snackbar manager. Activities implementing this * interface must call {@link SnackbarManager#onStart()} and {@link SnackbarManager#onStop()} in * corresponding lifecycle events. */ public interface SnackbarManageable { /** * @return The snackbar manager that has a proper anchor view. */ SnackbarManager getSnackbarManager(); } /** * Controller that post entries to snackbar manager and interact with snackbar manager during * dismissal and action click event. */ public static interface SnackbarController { /** * Callback triggered when user clicks on button at end of snackbar. This method is only * called for controller having posted the entry the user clicked on; other controllers are * not notified. Also once this {@link #onAction(Object)} is called, * {@link #onDismissNoAction(Object)} and {@link #onDismissForEachType(boolean)} will not be * called. * @param actionData Data object passed when showing this specific snackbar. */ void onAction(Object actionData); /** * Callback triggered when the snackbar is dismissed by either timeout or UI environment * change. This callback will be called for each entry a controller has posted, _except_ for * entries which the user has done action with, by clicking the action button. * @param actionData Data object associated with the dismissed snackbar entry. */ void onDismissNoAction(Object actionData); /** * Notify each SnackbarControllers instance only once immediately before the snackbar is * dismissed. This function is likely to be used for controllers to do user metrics for * dismissal. * @param isTimeout Whether this dismissal is triggered by timeout. */ void onDismissForEachType(boolean isTimeout); } private static final int DEFAULT_SNACKBAR_DURATION_MS = 3000; private static final int ACCESSIBILITY_MODE_SNACKBAR_DURATION_MS = 6000; // Used instead of the constant so tests can override the value. private static int sSnackbarDurationMs = DEFAULT_SNACKBAR_DURATION_MS; private static int sAccessibilitySnackbarDurationMs = ACCESSIBILITY_MODE_SNACKBAR_DURATION_MS; private final boolean mIsTablet; private View mDecor; private final Handler mUIThreadHandler; private Stack<Snackbar> mStack = new Stack<Snackbar>(); private SnackbarPopupWindow mPopup; private boolean mActivityInForeground; private final Runnable mHideRunnable = new Runnable() { @Override public void run() { dismissAllSnackbars(true); } }; // Variables used and reused in local calculations. private int[] mTempDecorPosition = new int[2]; private Rect mTempVisibleDisplayFrame = new Rect(); /** * Constructs a SnackbarManager to show snackbars in the given window. */ public SnackbarManager(Window window) { mDecor = window.getDecorView(); mUIThreadHandler = new Handler(); mIsTablet = DeviceFormFactor.isTablet(mDecor.getContext()); } /** * Notifies the snackbar manager that the activity is running in foreground now. */ public void onStart() { mActivityInForeground = true; } /** * Notifies the snackbar manager that the activity has been pushed to background. */ public void onStop() { dismissAllSnackbars(false); mActivityInForeground = false; } /** * Shows a snackbar at the bottom of the screen, or above the keyboard if the keyboard is * visible. If the currently displayed snackbar is forcing display, the new snackbar is added as * the next to be displayed on the stack. */ public void showSnackbar(Snackbar snackbar) { if (!mActivityInForeground) return; if (mPopup != null && !mStack.empty() && mStack.peek().getForceDisplay()) { mStack.add(mStack.size() - 1, snackbar); return; } int durationMs = snackbar.getDuration(); if (durationMs == 0) { durationMs = DeviceClassManager.isAccessibilityModeEnabled(mDecor.getContext()) ? sAccessibilitySnackbarDurationMs : sSnackbarDurationMs; } mUIThreadHandler.removeCallbacks(mHideRunnable); mUIThreadHandler.postDelayed(mHideRunnable, durationMs); mStack.push(snackbar); if (mPopup == null) { mPopup = new SnackbarPopupWindow(mDecor, this, snackbar); showPopupAtBottom(); mDecor.getViewTreeObserver().addOnGlobalLayoutListener(this); } else { mPopup.update(snackbar, true); } mPopup.announceforAccessibility(); } /** * Warning: Calling this method might cause cascading destroy loop, because you might trigger * callbacks for other {@link SnackbarController}. This method is only meant to be used during * {@link ChromeActivity}'s destruction routine. For other purposes, use * {@link #dismissSnackbars(SnackbarController)} instead. * <p> * Dismisses all snackbars in stack. This will call * {@link SnackbarController#onDismissNoAction(Object)} for every closing snackbar. * * @param isTimeout Whether dismissal was triggered by timeout. */ public void dismissAllSnackbars(boolean isTimeout) { mUIThreadHandler.removeCallbacks(mHideRunnable); if (!mActivityInForeground) return; if (mPopup != null) { mPopup.dismiss(); mPopup = null; } HashSet<SnackbarController> controllers = new HashSet<SnackbarController>(); while (!mStack.isEmpty()) { Snackbar snackbar = mStack.pop(); if (!controllers.contains(snackbar.getController())) { snackbar.getController().onDismissForEachType(isTimeout); controllers.add(snackbar.getController()); } snackbar.getController().onDismissNoAction(snackbar.getActionData()); if (isTimeout && !mStack.isEmpty() && mStack.peek().getForceDisplay()) { showSnackbar(mStack.pop()); return; } } mDecor.getViewTreeObserver().removeOnGlobalLayoutListener(this); } /** * Dismisses snackbars that are associated with the given {@link SnackbarController}. * * @param controller Only snackbars with this controller will be removed. */ public void dismissSnackbars(SnackbarController controller) { boolean isFound = false; Snackbar[] snackbars = new Snackbar[mStack.size()]; mStack.toArray(snackbars); for (Snackbar snackbar : snackbars) { if (snackbar.getController() == controller) { mStack.remove(snackbar); isFound = true; } } if (!isFound) return; finishSnackbarRemoval(controller); } /** * Dismisses snackbars that have a certain controller and action data. * * @param controller Only snackbars with this controller will be removed. * @param actionData Only snackbars whose action data is equal to actionData will be removed. */ public void dismissSnackbars(SnackbarController controller, Object actionData) { boolean isFound = false; for (Snackbar snackbar : mStack) { if (snackbar.getActionData() != null && snackbar.getActionData().equals(actionData) && snackbar.getController() == controller) { mStack.remove(snackbar); isFound = true; break; } } if (!isFound) return; finishSnackbarRemoval(controller); } private void finishSnackbarRemoval(SnackbarController controller) { controller.onDismissForEachType(false); if (mStack.isEmpty()) { dismissAllSnackbars(false); } else { // Refresh the snackbar to let it show top of stack and have full timeout. showSnackbar(mStack.pop()); } } /** * Handles click event for action button at end of snackbar. */ @Override public void onClick(View v) { assert !mStack.isEmpty(); Snackbar snackbar = mStack.pop(); snackbar.getController().onAction(snackbar.getActionData()); if (!mStack.isEmpty()) { showSnackbar(mStack.pop()); } else { dismissAllSnackbars(false); } } private void showPopupAtBottom() { // When the keyboard is showing, translating the snackbar upwards looks bad because it // overlaps the keyboard. In this case, use an alternative animation without translation. boolean isKeyboardShowing = UiUtils.isKeyboardShowing(mDecor.getContext(), mDecor); mPopup.setAnimationStyle(isKeyboardShowing ? R.style.SnackbarAnimationWithKeyboard : R.style.SnackbarAnimation); mDecor.getLocationInWindow(mTempDecorPosition); mDecor.getWindowVisibleDisplayFrame(mTempVisibleDisplayFrame); int decorBottom = mTempDecorPosition[1] + mDecor.getHeight(); int visibleBottom = Math.min(mTempVisibleDisplayFrame.bottom, decorBottom); int margin = mIsTablet ? mDecor.getResources().getDimensionPixelSize( R.dimen.snackbar_tablet_margin) : 0; mPopup.showAtLocation(mDecor, Gravity.START | Gravity.BOTTOM, margin, decorBottom - visibleBottom + margin); } /** * Resize and re-position popup window when the device orientation changes or the software * keyboard appears. Be careful not to let the snackbar overlap the Android navigation bar: * http://b/17789629. */ @Override public void onGlobalLayout() { if (mPopup == null) return; mDecor.getLocationInWindow(mTempDecorPosition); mDecor.getWindowVisibleDisplayFrame(mTempVisibleDisplayFrame); int decorBottom = mTempDecorPosition[1] + mDecor.getHeight(); int visibleBottom = Math.min(mTempVisibleDisplayFrame.bottom, decorBottom); if (mIsTablet) { int margin = mDecor.getResources().getDimensionPixelOffset( R.dimen.snackbar_tablet_margin); int width = mDecor.getResources().getDimensionPixelSize(R.dimen.snackbar_tablet_width); boolean isRtl = ApiCompatibilityUtils.isLayoutRtl(mDecor); int startPosition = isRtl ? mDecor.getRight() - width - margin : mDecor.getLeft() + margin; mPopup.update(startPosition, decorBottom - visibleBottom + margin, width, -1); } else { mPopup.update(mDecor.getLeft(), decorBottom - visibleBottom, mDecor.getWidth(), -1); } } /** * @return Whether there is a snackbar on screen. */ public boolean isShowing() { if (mPopup == null) return false; return mPopup.isShowing(); } /** * Overrides the default snackbar duration with a custom value for testing. * @param durationMs The duration to use in ms. */ @VisibleForTesting public static void setDurationForTesting(int durationMs) { sSnackbarDurationMs = durationMs; sAccessibilitySnackbarDurationMs = durationMs; } }
bsd-3-clause
danielmt/vshard
vendor/github.com/youtube/vitess/java/client/src/main/java/com/youtube/vitess/client/cursor/StreamCursor.java
3526
package com.youtube.vitess.client.cursor; import com.youtube.vitess.client.StreamIterator; import com.youtube.vitess.proto.Query; import com.youtube.vitess.proto.Query.Field; import com.youtube.vitess.proto.Query.QueryResult; import java.sql.SQLDataException; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.util.Iterator; import java.util.List; import javax.annotation.concurrent.NotThreadSafe; /** * A {@link Cursor} that serves records from the sequence of {@link QueryResult} objects * represented by a {@link StreamIterator}. */ @NotThreadSafe public class StreamCursor extends Cursor { private StreamIterator<QueryResult> streamIterator; private Iterator<Query.Row> rowIterator; private List<Field> fields; public StreamCursor(StreamIterator<QueryResult> streamIterator) { this.streamIterator = streamIterator; } @Override public long getRowsAffected() throws SQLException { throw new SQLFeatureNotSupportedException("getRowsAffected() is not supported on StreamCursor"); } @Override public long getInsertId() throws SQLException { throw new SQLFeatureNotSupportedException("getInsertId() is not supported on StreamCursor"); } @Override public List<Field> getFields() throws SQLException { if (streamIterator == null) { throw new SQLDataException("getFields() called on closed Cursor"); } if (fields == null) { // The first QueryResult should have the fields. if (!nextQueryResult()) { throw new SQLDataException("stream ended before fields were received"); } } return fields; } @Override public void close() throws Exception { streamIterator.close(); streamIterator = null; } @Override public Row next() throws SQLException { if (streamIterator == null) { throw new SQLDataException("next() called on closed Cursor"); } // Get the next Row from the current QueryResult. if (rowIterator != null && rowIterator.hasNext()) { return new Row(getFieldMap(), rowIterator.next()); } // Get the next QueryResult. Loop in case we get a QueryResult with no Rows (e.g. only Fields). while (nextQueryResult()) { // Get the first Row from the new QueryResult. if (rowIterator.hasNext()) { return new Row(getFieldMap(), rowIterator.next()); } } // No more Rows and no more QueryResults. return null; } /** * Fetches the next {@link QueryResult} from the stream. * * <p>Whereas the public {@link #next()} method advances the {@link Cursor} state to the next * {@link Row}, this method advances the internal state to the next {@link QueryResult}, which * contains a batch of rows. Specifically, we get the next {@link QueryResult} from * {@link #streamIterator}, and then set {@link #rowIterator} accordingly. * * <p>If {@link #fields} is null, we assume the next {@link QueryResult} must contain the fields, * and set {@link #fields} from it. * * @return false if there are no more results in the stream. */ private boolean nextQueryResult() throws SQLException { if (streamIterator.hasNext()) { QueryResult queryResult = streamIterator.next(); if (fields == null) { // The first QueryResult should have the fields. fields = queryResult.getFieldsList(); } rowIterator = queryResult.getRowsList().iterator(); return true; } else { rowIterator = null; return false; } } }
mit
Snickermicker/smarthome
bundles/core/org.eclipse.smarthome.core/src/main/java/org/eclipse/smarthome/core/scheduler/ScheduledCompletableFuture.java
1065
/** * Copyright (c) 2014,2019 Contributors to the Eclipse Foundation * * See the NOTICE file(s) distributed with this work for additional * information regarding copyright ownership. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 */ package org.eclipse.smarthome.core.scheduler; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ScheduledFuture; import org.eclipse.jdt.annotation.NonNullByDefault; /** * Interface returned by all scheduled jobs. It can be used to wait for the value, * cancel the job or check how much time till the scheduled job will run. * * @author Hilbrand Bouwkamp - initial contribution */ @NonNullByDefault public interface ScheduledCompletableFuture<T> extends ScheduledFuture<T> { /** * @return Returns the {@link CompletableFuture} associated with the scheduled job. */ CompletableFuture<T> getPromise(); }
epl-1.0
davidfestal/che
wsmaster/che-core-api-ssh/src/main/java/org/eclipse/che/api/ssh/server/spi/SshDao.java
3267
/* * Copyright (c) 2012-2018 Red Hat, Inc. * This program and the accompanying materials are made * available under the terms of the Eclipse Public License 2.0 * which is available at https://www.eclipse.org/legal/epl-2.0/ * * SPDX-License-Identifier: EPL-2.0 * * Contributors: * Red Hat, Inc. - initial API and implementation */ package org.eclipse.che.api.ssh.server.spi; import java.util.List; import org.eclipse.che.api.core.ConflictException; import org.eclipse.che.api.core.NotFoundException; import org.eclipse.che.api.core.ServerException; import org.eclipse.che.api.ssh.server.model.impl.SshPairImpl; /** * Defines data access object contract for {@link SshPairImpl}. * * @author Sergii Leschenko */ public interface SshDao { /** * Creates new ssh pair for specified user. * * @param sshPair ssh pair to create * @throws ConflictException when specified user already has ssh pair with given service and name * @throws NullPointerException when {@code sshPair} is null * @throws ServerException when any other error occurs during ssh pair creating */ void create(SshPairImpl sshPair) throws ServerException, ConflictException; /** * Returns ssh pairs by owner and service. * * @param owner the id of the user who is the owner of the ssh pairs * @param service service name of ssh pair * @return list of ssh pair with given service and owned by given service. * @throws NullPointerException when {@code owner} or {@code service} is null * @throws ServerException when any other error occurs during ssh pair fetching */ List<SshPairImpl> get(String owner, String service) throws ServerException; /** * Returns ssh pair by owner, service and name. * * @param owner the id of the user who is the owner of the ssh pair * @param service service name of ssh pair * @param name name of ssh pair * @return ssh pair instance * @throws NullPointerException when {@code owner} or {@code service} or {@code name} is null * @throws NotFoundException when ssh pair is not found * @throws ServerException when any other error occurs during ssh pair fetching */ SshPairImpl get(String owner, String service, String name) throws ServerException, NotFoundException; /** * Removes ssh pair by owner, service and name. * * @param owner the id of the user who is the owner of the ssh pair * @param service service name of ssh pair * @param name of ssh pair * @throws NullPointerException when {@code owner} or {@code service} or {@code name} is null * @throws NotFoundException when ssh pair is not found * @throws ServerException when any other error occurs during ssh pair removing */ void remove(String owner, String service, String name) throws ServerException, NotFoundException; /** * Gets ssh pairs by owner. * * @param owner the owner of the ssh key * @return the list of the ssh key pairs owned by the {@code owner}, or empty list if there are no * ssh key pairs by the given {@code owner} * @throws NullPointerException when {@code owner} is null * @throws ServerException when any error occurs(e.g. database connection error) */ List<SshPairImpl> get(String owner) throws ServerException; }
epl-1.0
rutgers-apl/Atomicity-Violation-Detector
tbb-lib/examples/parallel_for/tachyon/android/src/com/intel/tbb/example/tachyon/tachyon.java
11966
/* Copyright 2005-2014 Intel Corporation. All Rights Reserved. This file is part of Threading Building Blocks. Threading Building Blocks is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License version 2 as published by the Free Software Foundation. Threading Building Blocks is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Threading Building Blocks; if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA As a special exception, you may use this file as part of a free software library without restriction. Specifically, if other files instantiate templates or use macros or inline functions from this file, or you compile this file and link it with other files to produce an executable, this file does not by itself cause the resulting executable to be covered by the GNU General Public License. This exception does not however invalidate any other reasons why the executable file might be covered by the GNU General Public License. */ /* The original source for this example is Copyright (c) 1994-2008 John E. Stone All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.intel.tbb.example.tachyon; import java.io.File; import java.io.FileOutputStream; import java.io.InputStream; import java.io.OutputStream; import java.util.Timer; import java.util.TimerTask; import android.app.ActionBar; import android.app.Activity; import android.os.Bundle; import android.os.Environment; import android.content.Context; import android.content.res.AssetManager; import android.util.DisplayMetrics; import android.util.Log; import android.view.Gravity; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup.LayoutParams; import android.widget.LinearLayout; import android.widget.TextView; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Rect; public class tachyon extends Activity { private tachyonView myView; private int W; private int H; private String fileOnSDcard; private float currentYMenuPosition=(float) 1e5; private float previousYMenuPosition=0; public int number_of_threads=0; public static TextView txtThreadNumber; public static TextView txtElapsedTime; private static native void setPaused(boolean paused); @SuppressWarnings("deprecation") @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); try { fileOnSDcard = Environment.getExternalStorageDirectory() .getPath() + "/tachyon/data.dat"; Log.i("tachyon", "Data file name is " + fileOnSDcard); File dataFile = new File(fileOnSDcard); if (dataFile.exists()) { dataFile.delete(); } if (!dataFile.exists()) { AssetManager assetManager = getAssets(); InputStream inputFile = assetManager.open("data.dat"); dataFile.getParentFile().mkdirs(); dataFile.createNewFile(); OutputStream outputFile = new FileOutputStream(fileOnSDcard); byte[] buffer = new byte[10000]; int bytesRead; while ((bytesRead = inputFile.read(buffer)) != -1) outputFile.write(buffer, 0, bytesRead); inputFile.close(); inputFile = null; outputFile.flush(); outputFile.close(); outputFile = null; } DisplayMetrics displayMetrics = new DisplayMetrics(); getWindowManager().getDefaultDisplay().getMetrics(displayMetrics); ActionBar actionBar = getActionBar(); actionBar.hide(); H = displayMetrics.heightPixels; W = displayMetrics.widthPixels; Log.i("tachyon", "displayMetrics.heightPixels: " + H); Log.i("tachyon", "displayMetrics.widthPixels: " + W); //uncomment to override scene size int sceneWidth = 400; float ratio = W>H?(float)(W)/H:(float)(H)/W; W = sceneWidth; H = (int) (W/ratio); Log.i("tachyon", "Scene size is " + W + "*" + H ); } catch (Exception e) { Log.e("tachyon", "Exception in file copy: " + e.getMessage()); } myView = new tachyonView(this, W, H, fileOnSDcard); setContentView(myView); LinearLayout llThreadNumber = new LinearLayout(this); txtThreadNumber = new TextView(this); txtThreadNumber.setText(""); txtThreadNumber.setTextColor(0xFF00FF00); txtThreadNumber.setScaleX(1); txtThreadNumber.setScaleY(1); txtThreadNumber.setPadding(10, 10, 10, 10); llThreadNumber.setGravity(Gravity.TOP | Gravity.CENTER); llThreadNumber.addView(txtThreadNumber); this.addContentView(llThreadNumber, new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT)); LinearLayout llElapsedTime = new LinearLayout(this); txtElapsedTime = new TextView(this); txtElapsedTime.setText(""); txtElapsedTime.setTextColor(0xFFFF0000); txtElapsedTime.setScaleX(2); txtElapsedTime.setScaleY(2); txtElapsedTime.setPadding(10, 10, 40, 10); llElapsedTime.setGravity(Gravity.TOP | Gravity.RIGHT); llElapsedTime.addView(txtElapsedTime); this.addContentView(llElapsedTime, new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT)); } public boolean onCreateOptionsMenu(Menu menu) { MenuInflater inflater = getMenuInflater(); inflater.inflate(R.menu.main_screen_menu, menu); return true; } @Override public boolean onTouchEvent(MotionEvent event) { currentYMenuPosition = event.getY(); if(event.getAction()==MotionEvent.ACTION_UP ){ ActionBar actionBar = getActionBar(); if (previousYMenuPosition < currentYMenuPosition){ actionBar.show(); }else{ actionBar.hide(); } previousYMenuPosition = currentYMenuPosition; return true; } return super.onTouchEvent(event); } @Override public void onPause() { super.onPause(); Log.i("tachyon", "onPause working" ); setPaused(true); } @Override public void onResume() { super.onResume(); Log.i("tachyon", "onResume working" ); setPaused(false); } public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.thread0: { number_of_threads = 0; break; } case R.id.thread1: { number_of_threads = 1; break; } case R.id.thread2: { number_of_threads = 2; break; } case R.id.thread4: { number_of_threads = 4; break; } case R.id.thread8: { number_of_threads = 8; break; } case R.id.exit: { Log.i("tachyon", "Exiting..."); System.exit(0); } } Log.i("tachyon", "Starting in " + number_of_threads + " Thread(s)"); myView.initNative(number_of_threads); return true; } static { System.loadLibrary("gnustl_shared"); System.loadLibrary("tbb"); System.loadLibrary("jni-engine"); } } class tachyonView extends View { private Bitmap myBitmap; private Rect targetRect; private TimerTask myRefreshTask; private static Timer myRefreshTimer; private int W; private int H; private String filename; public static String strCntDisplay; public static String strFpsDisplay; private static native int renderBitmap(Bitmap bitmap, int size); private static native void initBitmap(Bitmap bitmap, int x_size, int y_size, int number_of_threads, String fileOnSDcard); private static native void pressButton(int x, int y); private static native long getElapsedTime(); public void initNative(int number_of_threads) { initBitmap(myBitmap, W, H, number_of_threads, filename); } public tachyonView(Context context, int widthPixels, int heightPixels, String fileOnSDcard) { super(context); //Landscape support only: H must be less than W //In case application is started on locked phone portrait layout comes //to the constructor even landscape is set in the manifest W = widthPixels>heightPixels?widthPixels:heightPixels; H = widthPixels>heightPixels?heightPixels:widthPixels; filename=fileOnSDcard; myBitmap = Bitmap.createBitmap(W, H, Bitmap.Config.ARGB_8888); targetRect = new Rect(); initBitmap(myBitmap, W, H, 0, filename); } @Override protected void onDraw(Canvas canvas) { //Write bitmap buffer if( renderBitmap(myBitmap, 4 * H * W) == 0 ){ targetRect.right = canvas.getWidth(); targetRect.bottom = canvas.getHeight(); //Draw bitmap buffer canvas.drawBitmap(myBitmap, null, targetRect, null); tachyon parent = (tachyon)getContext(); long elapsedTime=getElapsedTime(); if ( parent.number_of_threads > 0 ){ parent.getWindow().setTitle(parent.number_of_threads + " Thread(s): " + elapsedTime + " s."); tachyon.txtThreadNumber.setText(parent.number_of_threads + " thread(s)"); tachyon.txtElapsedTime.setText(elapsedTime + " secs"); }else{ parent.getWindow().setTitle("HW concurrency: " + elapsedTime + " s."); tachyon.txtThreadNumber.setText("Auto HW concurrency"); tachyon.txtElapsedTime.setText(elapsedTime + " secs"); } } invalidate(); return; } }
gpl-2.0
nuest/SOS
hibernate/common/src/main/java/org/n52/sos/ds/hibernate/entities/i18n/HibernateI18NFeatureOfInterestMetadata.java
1754
/** * Copyright (C) 2012-2015 52°North Initiative for Geospatial Open Source * Software GmbH * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 as published * by the Free Software Foundation. * * If the program is linked with libraries which are licensed under one of * the following licenses, the combination of the program with the linked * library is not considered a "derivative work" of the program: * * - Apache License, version 2.0 * - Apache Software License, version 1.0 * - GNU Lesser General Public License, version 3 * - Mozilla Public License, versions 1.0, 1.1 and 2.0 * - Common Development and Distribution License (CDDL), version 1.0 * * Therefore the distribution of the program linked with libraries licensed * under the aforementioned licenses, is permitted by the copyright holders * if the distribution is compliant with both the GNU General Public * License version 2 and the aforementioned licenses. * * This program is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General * Public License for more details. */ package org.n52.sos.ds.hibernate.entities.i18n; import org.n52.sos.ds.hibernate.entities.FeatureOfInterest; /** * Implementation of {@link AbstractHibernateI18NMetadata} for {@link FeatureOfInterest} * * @author Carsten Hollmann <c.hollmann@52north.org> * @since 4.0.0 * */ public class HibernateI18NFeatureOfInterestMetadata extends AbstractHibernateI18NMetadata { private static final long serialVersionUID = 1783692932017341427L; }
gpl-2.0
robertoandrade/cyclos
src/nl/strohalm/cyclos/entities/customization/documents/MemberDocument.java
2268
/* This file is part of Cyclos (www.cyclos.org). A project of the Social Trade Organisation (www.socialtrade.org). Cyclos is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. Cyclos is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Cyclos; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package nl.strohalm.cyclos.entities.customization.documents; import nl.strohalm.cyclos.entities.Relationship; import nl.strohalm.cyclos.entities.members.Member; import nl.strohalm.cyclos.utils.StringValuedEnum; public class MemberDocument extends StaticDocument { public static enum Relationships implements Relationship { MEMBER("member"); private final String name; private Relationships(final String name) { this.name = name; } public String getName() { return name; } } public static enum Visibility implements StringValuedEnum { MEMBER("M"), BROKER("B"), ADMIN("A"); private String value; private Visibility(final String value) { this.value = value; } public String getValue() { return value; } } private static final long serialVersionUID = -788736495660804107L; private Member member; private Visibility visibility; public Member getMember() { return member; } @Override public Nature getNature() { return Nature.MEMBER; } public Visibility getVisibility() { return visibility; } public void setMember(final Member member) { this.member = member; } public void setVisibility(final Visibility visibility) { this.visibility = visibility; } }
gpl-2.0
ekummerfeld/tetrad
tetrad-lib/src/test/java/edu/cmu/tetrad/test/TestChoiceGenerator.java
4133
/////////////////////////////////////////////////////////////////////////////// // For information as to what this class does, see the Javadoc, below. // // Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, // // 2007, 2008, 2009, 2010, 2014, 2015 by Peter Spirtes, Richard Scheines, Joseph // // Ramsey, and Clark Glymour. // // // // This program is free software; you can redistribute it and/or modify // // it under the terms of the GNU General Public License as published by // // the Free Software Foundation; either version 2 of the License, or // // (at your option) any later version. // // // // This program is distributed in the hope that it will be useful, // // but WITHOUT ANY WARRANTY; without even the implied warranty of // // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // // GNU General Public License for more details. // // // // You should have received a copy of the GNU General Public License // // along with this program; if not, write to the Free Software // // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA // /////////////////////////////////////////////////////////////////////////////// package edu.cmu.tetrad.test; import edu.cmu.tetrad.util.ChoiceGenerator; import edu.cmu.tetrad.util.DepthChoiceGenerator; import edu.cmu.tetrad.util.PermutationGenerator; import edu.cmu.tetrad.util.SelectionGenerator; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; /** * Implements basic tests of the choice generator. The choice generator should visit every * choice in a choose b exactly once, and then return null. * * @author Joseph Ramsey */ public class TestChoiceGenerator { /** * Prints all of the choices for the given a and b. */ @Test public void testPrintChoiceGenerator() { int a = 10; int b = 3; int numCombinations = ChoiceGenerator.getNumCombinations(a, b); assertEquals(120, numCombinations); } @Test public void testPrintDepthChoiceGenerator() { int a = 4; int b = 2; int numCombinations = DepthChoiceGenerator.getNumCombinations(a, b); assertEquals(11, numCombinations); } @Test public void testPrintPermutationGenerator() { PermutationGenerator gen = new PermutationGenerator(4); int count = 0; while (gen.next() != null) { count++; } assertEquals(24, count); } @Test public void testPrintSelectionGenerator() { SelectionGenerator gen = new SelectionGenerator(4); int count = 0; while (gen.next() != null) { count++; } assertEquals(256, count); } /** * Tests to make sure the ChoiceGenerator is output the correct number of choices * for various values of and b. */ @Test public void testChoiceGeneratorCounts() { for (int a = 0; a <= 20; a++) { for (int b = 0; b <= a; b++) { ChoiceGenerator generator = new ChoiceGenerator(a, b); int n = 0; while ((generator.next()) != null) { n++; } long numerator = 1; long denominator = 1; for (int k = a; k - b > 0; k--) { numerator *= k; denominator *= k - b; } long numChoices = numerator / denominator; if (n != numChoices) { fail("a = " + a + " b = " + b + " numChoices = " + numChoices + " n = " + n); } } } } }
gpl-2.0
jtux270/translate
ovirt/3.6_source/frontend/webadmin/modules/userportal-gwtp/src/main/java/org/ovirt/engine/ui/userportal/widget/tab/VerticalTabPanel.java
1265
package org.ovirt.engine.ui.userportal.widget.tab; import org.ovirt.engine.ui.common.widget.tab.AbstractTabPanel; import org.ovirt.engine.ui.common.widget.tab.TabDefinition; import com.google.gwt.core.client.GWT; import com.google.gwt.uibinder.client.UiBinder; import com.google.gwt.uibinder.client.UiField; import com.google.gwt.user.client.ui.FlowPanel; import com.google.gwt.user.client.ui.IsWidget; import com.google.gwt.user.client.ui.Widget; import com.gwtplatform.mvp.client.TabData; public class VerticalTabPanel extends AbstractTabPanel { interface WidgetUiBinder extends UiBinder<Widget, VerticalTabPanel> { WidgetUiBinder uiBinder = GWT.create(WidgetUiBinder.class); } @UiField FlowPanel tabContainer; public VerticalTabPanel() { initWidget(WidgetUiBinder.uiBinder.createAndBindUi(this)); } @Override public void addTabWidget(IsWidget tabWidget, int index) { tabContainer.insert(tabWidget, index); } @Override public void removeTabWidget(IsWidget tabWidget) { tabContainer.getElement().removeChild(tabWidget.asWidget().getElement()); } @Override protected TabDefinition createNewTab(TabData tabData) { return new VerticalTab(tabData, this); } }
gpl-3.0
Jackkal/jpexs-decompiler
src/com/jpexs/decompiler/flash/gui/pipes/PipeInputStream.java
3305
/* * Copyright (C) 2015 JPEXS * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package com.jpexs.decompiler.flash.gui.pipes; import com.sun.jna.Platform; import com.sun.jna.platform.win32.Kernel32; import com.sun.jna.platform.win32.WinNT.HANDLE; import com.sun.jna.ptr.IntByReference; import java.io.IOException; import java.io.InputStream; /** * * @author JPEXS */ public class PipeInputStream extends InputStream { protected HANDLE pipe; private boolean closed = false; public PipeInputStream(String pipeName, boolean newpipe) throws IOException { if (!Platform.isWindows()) { throw new IOException("Cannot create Pipe on nonWindows OS"); } String fullPipePath = "\\\\.\\pipe\\" + pipeName; if (newpipe) { pipe = Kernel32.INSTANCE.CreateNamedPipe(fullPipePath, Kernel32.PIPE_ACCESS_INBOUND, Kernel32.PIPE_TYPE_BYTE, 1, 4096, 4096, 0, null); if (pipe == null || !Kernel32.INSTANCE.ConnectNamedPipe(pipe, null)) { throw new IOException("Cannot connect to the pipe"); } } else { pipe = Kernel32.INSTANCE.CreateFile(fullPipePath, Kernel32.GENERIC_READ, Kernel32.FILE_SHARE_READ, null, Kernel32.OPEN_EXISTING, Kernel32.FILE_ATTRIBUTE_NORMAL, null); } if (pipe == null) { throw new IOException("Cannot connect to the pipe"); } Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { try { close(); } catch (IOException ex) { //ignore } } }); } @Override public synchronized void close() throws IOException { if (!closed) { Kernel32.INSTANCE.CloseHandle(pipe); closed = true; } } @Override public synchronized int read() throws IOException { byte d[] = new byte[1]; if (readPipe(d) == 0) { return -1; } return d[0]; } private int readPipe(byte res[]) throws IOException { final IntByReference ibr = new IntByReference(); int read = 0; while (read < res.length) { byte[] data = new byte[res.length - read]; boolean result = Kernel32.INSTANCE.ReadFile(pipe, data, data.length, ibr, null); if (!result) { throw new IOException("Cannot read pipe. Error " + Kernel32.INSTANCE.GetLastError()); } int readNow = ibr.getValue(); System.arraycopy(data, 0, res, read, readNow); read += readNow; } return read; } }
gpl-3.0
jvanz/core
qadevOOo/tests/java/mod/_sc/ScFilterDescriptorBase.java
4522
/* * This file is part of the LibreOffice project. * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. * * This file incorporates work covered by the following license notice: * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed * with this work for additional information regarding copyright * ownership. The ASF licenses this file to you under the Apache * License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of * the License at http://www.apache.org/licenses/LICENSE-2.0 . */ package mod._sc; import java.io.PrintWriter; import lib.TestCase; import lib.TestEnvironment; import lib.TestParameters; import util.SOfficeFactory; import com.sun.star.container.XIndexAccess; import com.sun.star.lang.XComponent; import com.sun.star.sheet.XSheetFilterable; import com.sun.star.sheet.XSpreadsheet; import com.sun.star.sheet.XSpreadsheetDocument; import com.sun.star.sheet.XSpreadsheets; import com.sun.star.uno.AnyConverter; import com.sun.star.uno.Type; import com.sun.star.uno.UnoRuntime; import com.sun.star.uno.XInterface; /** * Test for object which is represented by service * <code>com.sun.star.sheet.SheetFilterDescriptor</code>. <p> * Object implements the following interfaces : * <ul> * <li> <code>com::sun::star::sheet::XSheetFilterDescriptor</code></li> * <li> <code>com::sun::star::sheet::SheetFilterDescriptor</code></li> * <li> <code>com::sun::star::beans::XPropertySet</code></li> * </ul> * @see com.sun.star.sheet.SheetFilterDescriptor * @see com.sun.star.sheet.XSheetFilterDescriptor * @see com.sun.star.sheet.SheetFilterDescriptor * @see com.sun.star.beans.XPropertySet * @see ifc.sheet._XSheetFilterDescriptor * @see ifc.sheet._SheetFilterDescriptor * @see ifc.beans._XPropertySet */ public class ScFilterDescriptorBase extends TestCase { private XSpreadsheetDocument xSheetDoc = null; /** * Creates Spreadsheet document. */ @Override protected void initialize( TestParameters tParam, PrintWriter log ) throws Exception { // get a soffice factory object SOfficeFactory SOF = SOfficeFactory.getFactory( tParam.getMSF()); log.println( "creating a sheetdocument" ); xSheetDoc = SOF.createCalcDoc(null); } /** * Disposes Spreadsheet document. */ @Override protected void cleanup( TestParameters tParam, PrintWriter log ) { log.println( " disposing xSheetDoc " ); XComponent oComp = UnoRuntime.queryInterface (XComponent.class, xSheetDoc); util.DesktopTools.closeDoc(oComp); } /** * Creating a TestEnvironment for the interfaces to be tested. * Retrieves a collection of spreadsheets from a document * and takes one of them. Fills some cells in the spreadsheet. * Creates the filter descriptor using the interface * <code>XSheetFilterable</code>. This filter descriptor is the instance * of the service <code>com.sun.star.sheet.SheetFilterDescriptor</code>. * @see com.sun.star.sheet.XSheetFilterable * @see com.sun.star.sheet.SheetFilterDescriptor */ @Override protected TestEnvironment createTestEnvironment(TestParameters Param, PrintWriter log) throws Exception { XInterface oObj = null; XSheetFilterable SF = null; log.println("getting sheets"); XSpreadsheets xSpreadsheets = xSheetDoc.getSheets(); log.println("getting a sheet"); XSpreadsheet oSheet = null; XIndexAccess oIndexAccess = UnoRuntime.queryInterface(XIndexAccess.class, xSpreadsheets); oSheet = (XSpreadsheet) AnyConverter.toObject( new Type(XSpreadsheet.class),oIndexAccess.getByIndex(0)); log.println("filling some cells"); oSheet.getCellByPosition(5, 5).setValue(15); oSheet.getCellByPosition(1, 4).setValue(10); oSheet.getCellByPosition(2, 0).setValue(-5.15); SF = UnoRuntime.queryInterface(XSheetFilterable.class, oSheet); oObj = SF.createFilterDescriptor(true); log.println("creating a new environment for object"); TestEnvironment tEnv = new TestEnvironment(oObj); return tEnv; } // finish method getTestEnvironment } // finish class ScFilterDescriptorBase
gpl-3.0
mateor/pdroid
android-2.3.4_r1/tags/1.32/frameworks/base/media/java/android/media/MediaRecorder.java
28594
/* * Copyright (C) 2007 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.media; import android.media.CamcorderProfile; import android.hardware.Camera; import android.os.Handler; import android.os.Looper; import android.os.Message; import android.util.Log; import android.view.Surface; import java.io.IOException; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileDescriptor; import java.lang.ref.WeakReference; /** * Used to record audio and video. The recording control is based on a * simple state machine (see below). * * <p><img src="{@docRoot}images/mediarecorder_state_diagram.gif" border="0" /> * </p> * * <p>A common case of using MediaRecorder to record audio works as follows: * * <pre>MediaRecorder recorder = new MediaRecorder(); * recorder.setAudioSource(MediaRecorder.AudioSource.MIC); * recorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP); * recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB); * recorder.setOutputFile(PATH_NAME); * recorder.prepare(); * recorder.start(); // Recording is now started * ... * recorder.stop(); * recorder.reset(); // You can reuse the object by going back to setAudioSource() step * recorder.release(); // Now the object cannot be reused * </pre> * * <p>See the <a href="{@docRoot}guide/topics/media/index.html">Audio and Video</a> * documentation for additional help with using MediaRecorder. * <p>Note: Currently, MediaRecorder does not work on the emulator. */ public class MediaRecorder { static { System.loadLibrary("media_jni"); native_init(); } private final static String TAG = "MediaRecorder"; // The two fields below are accessed by native methods @SuppressWarnings("unused") private int mNativeContext; @SuppressWarnings("unused") private Surface mSurface; private String mPath; private FileDescriptor mFd; private EventHandler mEventHandler; private OnErrorListener mOnErrorListener; private OnInfoListener mOnInfoListener; /** * Default constructor. */ public MediaRecorder() { Looper looper; if ((looper = Looper.myLooper()) != null) { mEventHandler = new EventHandler(this, looper); } else if ((looper = Looper.getMainLooper()) != null) { mEventHandler = new EventHandler(this, looper); } else { mEventHandler = null; } /* Native setup requires a weak reference to our object. * It's easier to create it here than in C++. */ native_setup(new WeakReference<MediaRecorder>(this)); } /** * Sets a Camera to use for recording. Use this function to switch * quickly between preview and capture mode without a teardown of * the camera object. Must call before prepare(). * * @param c the Camera to use for recording */ public native void setCamera(Camera c); /** * Sets a Surface to show a preview of recorded media (video). Calls this * before prepare() to make sure that the desirable preview display is * set. * * @param sv the Surface to use for the preview */ public void setPreviewDisplay(Surface sv) { mSurface = sv; } /** * Defines the audio source. These constants are used with * {@link MediaRecorder#setAudioSource(int)}. */ public final class AudioSource { /* Do not change these values without updating their counterparts * in include/media/mediarecorder.h! */ private AudioSource() {} public static final int DEFAULT = 0; /** Microphone audio source */ public static final int MIC = 1; /** Voice call uplink (Tx) audio source */ public static final int VOICE_UPLINK = 2; /** Voice call downlink (Rx) audio source */ public static final int VOICE_DOWNLINK = 3; /** Voice call uplink + downlink audio source */ public static final int VOICE_CALL = 4; /** Microphone audio source with same orientation as camera if available, the main * device microphone otherwise */ public static final int CAMCORDER = 5; /** Microphone audio source tuned for voice recognition if available, behaves like * {@link #DEFAULT} otherwise. */ public static final int VOICE_RECOGNITION = 6; /** * @hide * Microphone audio source tuned for voice communications such as VoIP. It * will for instance take advantage of echo cancellation or automatic gain control * if available. It otherwise behaves like {@link #DEFAULT} if no voice processing * is available. */ public static final int VOICE_COMMUNICATION = 7; } /** * Defines the video source. These constants are used with * {@link MediaRecorder#setVideoSource(int)}. */ public final class VideoSource { /* Do not change these values without updating their counterparts * in include/media/mediarecorder.h! */ private VideoSource() {} public static final int DEFAULT = 0; /** Camera video source */ public static final int CAMERA = 1; } /** * Defines the output format. These constants are used with * {@link MediaRecorder#setOutputFormat(int)}. */ public final class OutputFormat { /* Do not change these values without updating their counterparts * in include/media/mediarecorder.h! */ private OutputFormat() {} public static final int DEFAULT = 0; /** 3GPP media file format*/ public static final int THREE_GPP = 1; /** MPEG4 media file format*/ public static final int MPEG_4 = 2; /** The following formats are audio only .aac or .amr formats **/ /** @deprecated Deprecated in favor of AMR_NB */ /** Deprecated in favor of MediaRecorder.OutputFormat.AMR_NB */ /** AMR NB file format */ public static final int RAW_AMR = 3; /** AMR NB file format */ public static final int AMR_NB = 3; /** AMR WB file format */ public static final int AMR_WB = 4; /** @hide AAC ADIF file format */ public static final int AAC_ADIF = 5; /** @hide AAC ADTS file format */ public static final int AAC_ADTS = 6; /** @hide Stream over a socket, limited to a single stream */ public static final int OUTPUT_FORMAT_RTP_AVP = 7; /** @hide H.264/AAC data encapsulated in MPEG2/TS */ public static final int OUTPUT_FORMAT_MPEG2TS = 8; }; /** * Defines the audio encoding. These constants are used with * {@link MediaRecorder#setAudioEncoder(int)}. */ public final class AudioEncoder { /* Do not change these values without updating their counterparts * in include/media/mediarecorder.h! */ private AudioEncoder() {} public static final int DEFAULT = 0; /** AMR (Narrowband) audio codec */ public static final int AMR_NB = 1; /** AMR (Wideband) audio codec */ public static final int AMR_WB = 2; /** AAC audio codec */ public static final int AAC = 3; /** @hide enhanced AAC audio codec */ public static final int AAC_PLUS = 4; /** @hide enhanced AAC plus audio codec */ public static final int EAAC_PLUS = 5; } /** * Defines the video encoding. These constants are used with * {@link MediaRecorder#setVideoEncoder(int)}. */ public final class VideoEncoder { /* Do not change these values without updating their counterparts * in include/media/mediarecorder.h! */ private VideoEncoder() {} public static final int DEFAULT = 0; public static final int H263 = 1; public static final int H264 = 2; public static final int MPEG_4_SP = 3; } /** * Sets the audio source to be used for recording. If this method is not * called, the output file will not contain an audio track. The source needs * to be specified before setting recording-parameters or encoders. Call * this only before setOutputFormat(). * * @param audio_source the audio source to use * @throws IllegalStateException if it is called after setOutputFormat() * @see android.media.MediaRecorder.AudioSource */ public native void setAudioSource(int audio_source) throws IllegalStateException; /** * Gets the maximum value for audio sources. * @see android.media.MediaRecorder.AudioSource */ public static final int getAudioSourceMax() { return AudioSource.VOICE_RECOGNITION; } /** * Sets the video source to be used for recording. If this method is not * called, the output file will not contain an video track. The source needs * to be specified before setting recording-parameters or encoders. Call * this only before setOutputFormat(). * * @param video_source the video source to use * @throws IllegalStateException if it is called after setOutputFormat() * @see android.media.MediaRecorder.VideoSource */ public native void setVideoSource(int video_source) throws IllegalStateException; /** * Uses the settings from a CamcorderProfile object for recording. This method should * be called after the video AND audio sources are set, and before setOutputFile(). * * @param profile the CamcorderProfile to use * @see android.media.CamcorderProfile */ public void setProfile(CamcorderProfile profile) { setOutputFormat(profile.fileFormat); setVideoFrameRate(profile.videoFrameRate); setVideoSize(profile.videoFrameWidth, profile.videoFrameHeight); setVideoEncodingBitRate(profile.videoBitRate); setAudioEncodingBitRate(profile.audioBitRate); setAudioChannels(profile.audioChannels); setAudioSamplingRate(profile.audioSampleRate); setVideoEncoder(profile.videoCodec); setAudioEncoder(profile.audioCodec); } /** * Sets the orientation hint for output video playback. * This method should be called before prepare(). This method will not * trigger the source video frame to rotate during video recording, but to * add a composition matrix containing the rotation angle in the output * video if the output format is OutputFormat.THREE_GPP or * OutputFormat.MPEG_4 so that a video player can choose the proper * orientation for playback. Note that some video players may choose * to ignore the compostion matrix in a video during playback. * * @param degrees the angle to be rotated clockwise in degrees. * The supported angles are 0, 90, 180, and 270 degrees. * @throws IllegalArgumentException if the angle is not supported. * */ public void setOrientationHint(int degrees) { if (degrees != 0 && degrees != 90 && degrees != 180 && degrees != 270) { throw new IllegalArgumentException("Unsupported angle: " + degrees); } setParameter(String.format("video-param-rotation-angle-degrees=%d", degrees)); } /** * Sets the format of the output file produced during recording. Call this * after setAudioSource()/setVideoSource() but before prepare(). * * <p>It is recommended to always use 3GP format when using the H.263 * video encoder and AMR audio encoder. Using an MPEG-4 container format * may confuse some desktop players.</p> * * @param output_format the output format to use. The output format * needs to be specified before setting recording-parameters or encoders. * @throws IllegalStateException if it is called after prepare() or before * setAudioSource()/setVideoSource(). * @see android.media.MediaRecorder.OutputFormat */ public native void setOutputFormat(int output_format) throws IllegalStateException; /** * Sets the width and height of the video to be captured. Must be called * after setVideoSource(). Call this after setOutFormat() but before * prepare(). * * @param width the width of the video to be captured * @param height the height of the video to be captured * @throws IllegalStateException if it is called after * prepare() or before setOutputFormat() */ public native void setVideoSize(int width, int height) throws IllegalStateException; /** * Sets the frame rate of the video to be captured. Must be called * after setVideoSource(). Call this after setOutFormat() but before * prepare(). * * @param rate the number of frames per second of video to capture * @throws IllegalStateException if it is called after * prepare() or before setOutputFormat(). * * NOTE: On some devices that have auto-frame rate, this sets the * maximum frame rate, not a constant frame rate. Actual frame rate * will vary according to lighting conditions. */ public native void setVideoFrameRate(int rate) throws IllegalStateException; /** * Sets the maximum duration (in ms) of the recording session. * Call this after setOutFormat() but before prepare(). * After recording reaches the specified duration, a notification * will be sent to the {@link android.media.MediaRecorder.OnInfoListener} * with a "what" code of {@link #MEDIA_RECORDER_INFO_MAX_DURATION_REACHED} * and recording will be stopped. Stopping happens asynchronously, there * is no guarantee that the recorder will have stopped by the time the * listener is notified. * * @param max_duration_ms the maximum duration in ms (if zero or negative, disables the duration limit) * */ public native void setMaxDuration(int max_duration_ms) throws IllegalArgumentException; /** * Sets the maximum filesize (in bytes) of the recording session. * Call this after setOutFormat() but before prepare(). * After recording reaches the specified filesize, a notification * will be sent to the {@link android.media.MediaRecorder.OnInfoListener} * with a "what" code of {@link #MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED} * and recording will be stopped. Stopping happens asynchronously, there * is no guarantee that the recorder will have stopped by the time the * listener is notified. * * @param max_filesize_bytes the maximum filesize in bytes (if zero or negative, disables the limit) * */ public native void setMaxFileSize(long max_filesize_bytes) throws IllegalArgumentException; /** * Sets the audio encoder to be used for recording. If this method is not * called, the output file will not contain an audio track. Call this after * setOutputFormat() but before prepare(). * * @param audio_encoder the audio encoder to use. * @throws IllegalStateException if it is called before * setOutputFormat() or after prepare(). * @see android.media.MediaRecorder.AudioEncoder */ public native void setAudioEncoder(int audio_encoder) throws IllegalStateException; /** * Sets the video encoder to be used for recording. If this method is not * called, the output file will not contain an video track. Call this after * setOutputFormat() and before prepare(). * * @param video_encoder the video encoder to use. * @throws IllegalStateException if it is called before * setOutputFormat() or after prepare() * @see android.media.MediaRecorder.VideoEncoder */ public native void setVideoEncoder(int video_encoder) throws IllegalStateException; /** * Sets the audio sampling rate for recording. Call this method before prepare(). * Prepare() may perform additional checks on the parameter to make sure whether * the specified audio sampling rate is applicable. The sampling rate really depends * on the format for the audio recording, as well as the capabilities of the platform. * For instance, the sampling rate supported by AAC audio coding standard ranges * from 8 to 96 kHz. Please consult with the related audio coding standard for the * supported audio sampling rate. * * @param samplingRate the sampling rate for audio in samples per second. */ public void setAudioSamplingRate(int samplingRate) { if (samplingRate <= 0) { throw new IllegalArgumentException("Audio sampling rate is not positive"); } setParameter(String.format("audio-param-sampling-rate=%d", samplingRate)); } /** * Sets the number of audio channels for recording. Call this method before prepare(). * Prepare() may perform additional checks on the parameter to make sure whether the * specified number of audio channels are applicable. * * @param numChannels the number of audio channels. Usually it is either 1 (mono) or 2 * (stereo). */ public void setAudioChannels(int numChannels) { if (numChannels <= 0) { throw new IllegalArgumentException("Number of channels is not positive"); } setParameter(String.format("audio-param-number-of-channels=%d", numChannels)); } /** * Sets the audio encoding bit rate for recording. Call this method before prepare(). * Prepare() may perform additional checks on the parameter to make sure whether the * specified bit rate is applicable, and sometimes the passed bitRate will be clipped * internally to ensure the audio recording can proceed smoothly based on the * capabilities of the platform. * * @param bitRate the audio encoding bit rate in bits per second. */ public void setAudioEncodingBitRate(int bitRate) { if (bitRate <= 0) { throw new IllegalArgumentException("Audio encoding bit rate is not positive"); } setParameter(String.format("audio-param-encoding-bitrate=%d", bitRate)); } /** * Sets the video encoding bit rate for recording. Call this method before prepare(). * Prepare() may perform additional checks on the parameter to make sure whether the * specified bit rate is applicable, and sometimes the passed bitRate will be * clipped internally to ensure the video recording can proceed smoothly based on * the capabilities of the platform. * * @param bitRate the video encoding bit rate in bits per second. */ public void setVideoEncodingBitRate(int bitRate) { if (bitRate <= 0) { throw new IllegalArgumentException("Video encoding bit rate is not positive"); } setParameter(String.format("video-param-encoding-bitrate=%d", bitRate)); } /** * Pass in the file descriptor of the file to be written. Call this after * setOutputFormat() but before prepare(). * * @param fd an open file descriptor to be written into. * @throws IllegalStateException if it is called before * setOutputFormat() or after prepare() */ public void setOutputFile(FileDescriptor fd) throws IllegalStateException { mPath = null; mFd = fd; } /** * Sets the path of the output file to be produced. Call this after * setOutputFormat() but before prepare(). * * @param path The pathname to use. * @throws IllegalStateException if it is called before * setOutputFormat() or after prepare() */ public void setOutputFile(String path) throws IllegalStateException { mFd = null; mPath = path; } // native implementation private native void _setOutputFile(FileDescriptor fd, long offset, long length) throws IllegalStateException, IOException; private native void _prepare() throws IllegalStateException, IOException; /** * Prepares the recorder to begin capturing and encoding data. This method * must be called after setting up the desired audio and video sources, * encoders, file format, etc., but before start(). * * @throws IllegalStateException if it is called after * start() or before setOutputFormat(). * @throws IOException if prepare fails otherwise. */ public void prepare() throws IllegalStateException, IOException { if (mPath != null) { FileOutputStream fos = new FileOutputStream(mPath); try { _setOutputFile(fos.getFD(), 0, 0); } finally { fos.close(); } } else if (mFd != null) { _setOutputFile(mFd, 0, 0); } else { throw new IOException("No valid output file"); } _prepare(); } /** * Begins capturing and encoding data to the file specified with * setOutputFile(). Call this after prepare(). * * @throws IllegalStateException if it is called before * prepare(). */ public native void start() throws IllegalStateException; /** * Stops recording. Call this after start(). Once recording is stopped, * you will have to configure it again as if it has just been constructed. * * @throws IllegalStateException if it is called before start() */ public native void stop() throws IllegalStateException; /** * Restarts the MediaRecorder to its idle state. After calling * this method, you will have to configure it again as if it had just been * constructed. */ public void reset() { native_reset(); // make sure none of the listeners get called anymore mEventHandler.removeCallbacksAndMessages(null); } private native void native_reset(); /** * Returns the maximum absolute amplitude that was sampled since the last * call to this method. Call this only after the setAudioSource(). * * @return the maximum absolute amplitude measured since the last call, or * 0 when called for the first time * @throws IllegalStateException if it is called before * the audio source has been set. */ public native int getMaxAmplitude() throws IllegalStateException; /* Do not change this value without updating its counterpart * in include/media/mediarecorder.h! */ /** Unspecified media recorder error. * @see android.media.MediaRecorder.OnErrorListener */ public static final int MEDIA_RECORDER_ERROR_UNKNOWN = 1; /** * Interface definition for a callback to be invoked when an error * occurs while recording. */ public interface OnErrorListener { /** * Called when an error occurs while recording. * * @param mr the MediaRecorder that encountered the error * @param what the type of error that has occurred: * <ul> * <li>{@link #MEDIA_RECORDER_ERROR_UNKNOWN} * </ul> * @param extra an extra code, specific to the error type */ void onError(MediaRecorder mr, int what, int extra); } /** * Register a callback to be invoked when an error occurs while * recording. * * @param l the callback that will be run */ public void setOnErrorListener(OnErrorListener l) { mOnErrorListener = l; } /* Do not change these values without updating their counterparts * in include/media/mediarecorder.h! */ /** Unspecified media recorder error. * @see android.media.MediaRecorder.OnInfoListener */ public static final int MEDIA_RECORDER_INFO_UNKNOWN = 1; /** A maximum duration had been setup and has now been reached. * @see android.media.MediaRecorder.OnInfoListener */ public static final int MEDIA_RECORDER_INFO_MAX_DURATION_REACHED = 800; /** A maximum filesize had been setup and has now been reached. * @see android.media.MediaRecorder.OnInfoListener */ public static final int MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED = 801; /** * Interface definition for a callback to be invoked when an error * occurs while recording. */ public interface OnInfoListener { /** * Called when an error occurs while recording. * * @param mr the MediaRecorder that encountered the error * @param what the type of error that has occurred: * <ul> * <li>{@link #MEDIA_RECORDER_INFO_UNKNOWN} * <li>{@link #MEDIA_RECORDER_INFO_MAX_DURATION_REACHED} * <li>{@link #MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED} * </ul> * @param extra an extra code, specific to the error type */ void onInfo(MediaRecorder mr, int what, int extra); } /** * Register a callback to be invoked when an informational event occurs while * recording. * * @param listener the callback that will be run */ public void setOnInfoListener(OnInfoListener listener) { mOnInfoListener = listener; } private class EventHandler extends Handler { private MediaRecorder mMediaRecorder; public EventHandler(MediaRecorder mr, Looper looper) { super(looper); mMediaRecorder = mr; } /* Do not change these values without updating their counterparts * in include/media/mediarecorder.h! */ private static final int MEDIA_RECORDER_EVENT_ERROR = 1; private static final int MEDIA_RECORDER_EVENT_INFO = 2; @Override public void handleMessage(Message msg) { if (mMediaRecorder.mNativeContext == 0) { Log.w(TAG, "mediarecorder went away with unhandled events"); return; } switch(msg.what) { case MEDIA_RECORDER_EVENT_ERROR: if (mOnErrorListener != null) mOnErrorListener.onError(mMediaRecorder, msg.arg1, msg.arg2); return; case MEDIA_RECORDER_EVENT_INFO: if (mOnInfoListener != null) mOnInfoListener.onInfo(mMediaRecorder, msg.arg1, msg.arg2); return; default: Log.e(TAG, "Unknown message type " + msg.what); return; } } } /** * Called from native code when an interesting event happens. This method * just uses the EventHandler system to post the event back to the main app thread. * We use a weak reference to the original MediaRecorder object so that the native * code is safe from the object disappearing from underneath it. (This is * the cookie passed to native_setup().) */ private static void postEventFromNative(Object mediarecorder_ref, int what, int arg1, int arg2, Object obj) { MediaRecorder mr = (MediaRecorder)((WeakReference)mediarecorder_ref).get(); if (mr == null) { return; } if (mr.mEventHandler != null) { Message m = mr.mEventHandler.obtainMessage(what, arg1, arg2, obj); mr.mEventHandler.sendMessage(m); } } /** * Releases resources associated with this MediaRecorder object. * It is good practice to call this method when you're done * using the MediaRecorder. */ public native void release(); private static native final void native_init(); private native final void native_setup(Object mediarecorder_this) throws IllegalStateException; private native final void native_finalize(); private native void setParameter(String nameValuePair); @Override protected void finalize() { native_finalize(); } }
gpl-3.0
acenode/jpexs-decompiler
src/com/jpexs/decompiler/flash/gui/MyProgressBarUI.java
3726
/* * Copyright (C) 2010-2015 JPEXS * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package com.jpexs.decompiler.flash.gui; import javax.swing.CellRendererPane; import javax.swing.JComponent; import javax.swing.JProgressBar; import javax.swing.SwingUtilities; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import javax.swing.plaf.ComponentUI; import org.pushingpixels.substance.internal.ui.SubstanceProgressBarUI; import org.pushingpixels.substance.internal.utils.SubstanceCoreUtilities; /** * * @author JPEXS */ public class MyProgressBarUI extends SubstanceProgressBarUI { private final class MySubstanceChangeListener implements ChangeListener { @Override public void stateChanged(ChangeEvent e) { SubstanceCoreUtilities.testComponentStateChangeThreadingViolation(progressBar); //if (displayTimeline != null) { //Main Change - this should be first // displayTimeline.abort(); //} int currValue = progressBar.getValue(); int span = progressBar.getMaximum() - progressBar.getMinimum(); int barRectWidth = progressBar.getWidth() - 2 * margin; int barRectHeight = progressBar.getHeight() - 2 * margin; int totalPixels = (progressBar.getOrientation() == JProgressBar.HORIZONTAL) ? barRectWidth : barRectHeight; int pixelDelta = (span <= 0) ? 0 : (currValue - displayedValue) * totalPixels / span; /*displayTimeline = new Timeline(progressBar); displayTimeline.addPropertyToInterpolate(Timeline .<Integer>property("displayedValue").from(displayedValue) .to(currValue).setWith(new TimelinePropertyBuilder.PropertySetter<Integer>() { @Override public void set(Object obj, String fieldName, Integer value) { displayedValue = value; progressBar.repaint(); } })); displayTimeline.setEase(new Spline(0.4f)); AnimationConfigurationManager.getInstance().configureTimeline( displayTimeline);*/ boolean isInCellRenderer = (SwingUtilities.getAncestorOfClass( CellRendererPane.class, progressBar) != null); //if (false) {//currValue > 0 && !isInCellRenderer && Math.abs(pixelDelta) > 5) { // displayTimeline.play(); //} else { displayedValue = currValue; progressBar.repaint(); //} } } public static ComponentUI createUI(JComponent comp) { SubstanceCoreUtilities.testComponentCreationThreadingViolation(comp); return new MyProgressBarUI(); } @Override protected void installListeners() { super.installListeners(); this.progressBar.removeChangeListener(substanceValueChangeListener); this.substanceValueChangeListener = new MySubstanceChangeListener(); this.progressBar.addChangeListener(this.substanceValueChangeListener); } }
gpl-3.0
layely/focus-android
app/src/main/java/org/mozilla/focus/firstrun/FirstrunCardView.java
2215
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ package org.mozilla.focus.firstrun; import android.content.Context; import android.content.res.Resources; import android.support.v7.widget.CardView; import android.util.AttributeSet; import org.mozilla.focus.R; public class FirstrunCardView extends CardView { private int maxWidth; private int maxHeight; public FirstrunCardView(Context context) { super(context); init(); } public FirstrunCardView(Context context, AttributeSet attrs) { super(context, attrs); init(); } public FirstrunCardView(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); init(); } private void init() { final Resources resources = getResources(); maxWidth = resources.getDimensionPixelSize(R.dimen.firstrun_card_width); maxHeight = resources.getDimensionPixelSize(R.dimen.firstrun_card_height); } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { // The view is set to match_parent in the layout file. So width and height should be the // value needed to fill the whole parent view. final int availableWidth = MeasureSpec.getSize(widthMeasureSpec); final int availableHeight = MeasureSpec.getSize(heightMeasureSpec); // Now let's use those sizes to measure - but let's not exceed our defined max sizes (We do // not want to have gigantic cards on large devices like tablets.) final int measuredWidth = Math.min(availableWidth, maxWidth); final int measuredHeight = Math.min(availableHeight, maxHeight); // Let's use the measured values to hand them to the super class to measure the child views etc. widthMeasureSpec = MeasureSpec.makeMeasureSpec(measuredWidth, MeasureSpec.EXACTLY); heightMeasureSpec = MeasureSpec.makeMeasureSpec(measuredHeight, MeasureSpec.EXACTLY); super.onMeasure(widthMeasureSpec, heightMeasureSpec); } }
mpl-2.0
anhnv-3991/VoltDB
examples/uniquedevices/hyperloglogsrc/org/voltdb/hll/RegisterSet.java
4561
/* This file is part of VoltDB. * Copyright (C) 2008-2015 VoltDB Inc. * * This file contains original code and/or modifications of original code. * Any modifications made by VoltDB Inc. are licensed under the following * terms and conditions: * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. * IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR * OTHER DEALINGS IN THE SOFTWARE. */ /* * Copyright (C) 2012 Clearspring Technologies, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* This code was originally sourced from https://github.com/addthis/stream-lib in December 2014. */ package org.voltdb.hll; public class RegisterSet { public final static int REGISTER_SIZE = 5; private final byte[] bytes; private boolean dirty = false; public RegisterSet(int count) { int bitsNeeded = count * REGISTER_SIZE; int bytesNeeded = bitsNeeded / 8; if ((bitsNeeded % 8) != 0) { bytesNeeded += 1; } this.bytes = new byte[bytesNeeded]; } public RegisterSet(byte[] bytes) { this.bytes = bytes; } public int count() { return (bytes.length * 8) / REGISTER_SIZE; } public int size() { return bytes.length; } public boolean getDirty() { return dirty; } int getBit(final int bitPos) { // bitPos >>> 3 == bitPos / 8 // bitpos & 7 == bitpos % 8 final byte b = bytes[bitPos >>> 3]; return (b >>> (bitPos & 7)) & 1; } void setBit(int bitPos, int value) { // bitPos >>> 3 == bitPos / 8 // bitpos & 7 == bitpos % 8 if (value > 0) { bytes[bitPos >>> 3] |= 1 << (bitPos & 7); } else { bytes[bitPos >>> 3] &= ~(1 << (bitPos & 7)); } } public void set(int position, int value) { assert (position < count()); assert (value >= 0); assert (value < (1 << REGISTER_SIZE)); dirty = true; int bitPos = position * REGISTER_SIZE; for (int i = 0; i < REGISTER_SIZE; i++) { setBit(bitPos + i, value & (1 << i)); } } public int get(final int position) { assert (position < count()); int retval = 0; final int bitPos = position * REGISTER_SIZE; for (int i = 0; i < REGISTER_SIZE; i++) { retval |= getBit(bitPos + i) << i; } return retval; } public boolean updateIfGreater(int position, int value) { assert (position < count()); assert (value >= 0); assert (value < (1 << REGISTER_SIZE)); int oldVal = get(position); if (value > oldVal) { set(position, value); return true; } return false; } public void merge(RegisterSet that) { assert(that.size() == this.size()); int count = count(); for (int i = 0; i < count; i++) { int thisVal = this.get(i); int thatVal = that.get(i); if (thisVal < thatVal) set(i, thatVal); } } byte[] toBytes() { return bytes; } }
agpl-3.0
nguyentienlong/community-edition
projects/repository/source/java/org/alfresco/service/cmr/transfer/TransferEventSentContent.java
1010
/* * Copyright (C) 2005-2010 Alfresco Software Limited. * * This file is part of Alfresco * * Alfresco is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Alfresco is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with Alfresco. If not, see <http://www.gnu.org/licenses/>. */ package org.alfresco.service.cmr.transfer; import org.alfresco.repo.transfer.TransferEventImpl; /** * Event for sent content. */ public class TransferEventSentContent extends TransferEventImpl implements TransferEvent { }
lgpl-3.0
deepakddixit/incubator-geode
geode-core/src/main/java/org/apache/geode/distributed/internal/membership/gms/interfaces/Locator.java
1400
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.distributed.internal.membership.gms.interfaces; import org.apache.geode.distributed.internal.membership.NetView; /** * The Locator interface allows member services to interact with the Locator TcpHandler component of * Geode's locator. The Locator handler's lifecycle is not controlled by member services. */ public interface Locator { /** * called when a new view is installed by Membership */ void installView(NetView v); /** * informs the locator that this process is becoming the membership coordinator */ void setIsCoordinator(boolean isCoordinator); }
apache-2.0
IBM-Bluemix/controller-kinect-bluemix
src/main/java/org/openkinect/freenect/LogHandler.java
1215
/** * This file is part of the OpenKinect Project. http://www.openkinect.org * * Copyright (c) 2010 individual OpenKinect contributors. See the CONTRIB file * for details. * * This code is licensed to you under the terms of the Apache License, version * 2.0, or, at your option, the terms of the GNU General Public License, * version 2.0. See the APACHE20 and GPL20 files for the text of the licenses, * or the following URLs: * http://www.apache.org/licenses/LICENSE-2.0 * http://www.gnu.org/licenses/gpl-2.0.txt * * If you redistribute this file in source form, modified or unmodified, * you may: * 1) Leave this header intact and distribute it under the same terms, * accompanying it with the APACHE20 and GPL20 files, or * 2) Delete the Apache 2.0 clause and accompany it with the GPL20 file, or * 3) Delete the GPL v2.0 clause and accompany it with the APACHE20 file * In all cases you must keep the copyright notice intact and include a copy * of the CONTRIB file. * Binary distributions must follow the binary distribution requirements of * either License. */ package org.openkinect.freenect; public interface LogHandler { void onMessage(Device dev, LogLevel level, String msg); }
apache-2.0
WANdisco/amplab-hive
ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
16218
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.parse; import java.io.IOException; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.InvalidTableException; import org.apache.hadoop.hive.ql.metadata.Table; /** * ColumnStatsSemanticAnalyzer. * Handles semantic analysis and rewrite for gathering column statistics both at the level of a * partition and a table. Note that table statistics are implemented in SemanticAnalyzer. * */ public class ColumnStatsSemanticAnalyzer extends SemanticAnalyzer { private static final Log LOG = LogFactory .getLog(ColumnStatsSemanticAnalyzer.class); private ASTNode originalTree; private ASTNode rewrittenTree; private String rewrittenQuery; private Context ctx; private boolean isRewritten; private boolean isTableLevel; private List<String> colNames; private List<String> colType; private Table tbl; public ColumnStatsSemanticAnalyzer(HiveConf conf) throws SemanticException { super(conf); } private boolean shouldRewrite(ASTNode tree) { boolean rwt = false; if (tree.getChildCount() > 1) { ASTNode child0 = (ASTNode) tree.getChild(0); ASTNode child1; if (child0.getToken().getType() == HiveParser.TOK_TAB) { child0 = (ASTNode) child0.getChild(0); if (child0.getToken().getType() == HiveParser.TOK_TABNAME) { child1 = (ASTNode) tree.getChild(1); if (child1.getToken().getType() == HiveParser.KW_COLUMNS) { rwt = true; } } } } return rwt; } private boolean isPartitionLevelStats(ASTNode tree) { boolean isPartitioned = false; ASTNode child = (ASTNode) tree.getChild(0); if (child.getChildCount() > 1) { child = (ASTNode) child.getChild(1); if (child.getToken().getType() == HiveParser.TOK_PARTSPEC) { isPartitioned = true; } } return isPartitioned; } private Table getTable(ASTNode tree) throws SemanticException { String tableName = getUnescapedName((ASTNode) tree.getChild(0).getChild(0)); try { return db.getTable(tableName); } catch (InvalidTableException e) { throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName), e); } catch (HiveException e) { throw new SemanticException(e.getMessage(), e); } } private Map<String,String> getPartKeyValuePairsFromAST(ASTNode tree) { ASTNode child = ((ASTNode) tree.getChild(0).getChild(1)); Map<String,String> partSpec = new HashMap<String, String>(); if (null == child) { // case of analyze table T compute statistics for columns; return partSpec; } String partKey; String partValue; for (int i = 0; i < child.getChildCount(); i++) { partKey = new String(getUnescapedName((ASTNode) child.getChild(i).getChild(0))).toLowerCase(); if (child.getChild(i).getChildCount() > 1) { partValue = new String(getUnescapedName((ASTNode) child.getChild(i).getChild(1))); partValue = partValue.replaceAll("'", ""); } else { partValue = null; } partSpec.put(partKey, partValue); } return partSpec; } private List<String> getColumnName(ASTNode tree) throws SemanticException{ switch (tree.getChildCount()) { case 2: return Utilities.getColumnNamesFromFieldSchema(tbl.getCols()); case 3: int numCols = tree.getChild(2).getChildCount(); List<String> colName = new LinkedList<String>(); for (int i = 0; i < numCols; i++) { colName.add(i, new String(getUnescapedName((ASTNode) tree.getChild(2).getChild(i)))); } return colName; default: throw new SemanticException("Internal error. Expected number of children of ASTNode to be" + " either 2 or 3. Found : " + tree.getChildCount()); } } private void handlePartialPartitionSpec(Map<String,String> partSpec) throws SemanticException { // If user has fully specified partition, validate that partition exists int partValsSpecified = 0; for (String partKey : partSpec.keySet()) { partValsSpecified += partSpec.get(partKey) == null ? 0 : 1; } try { if ((partValsSpecified == tbl.getPartitionKeys().size()) && (db.getPartition(tbl, partSpec, false, null, false) == null)) { throw new SemanticException(ErrorMsg.COLUMNSTATSCOLLECTOR_INVALID_PARTITION.getMsg() + " : " + partSpec); } } catch (HiveException he) { throw new SemanticException(ErrorMsg.COLUMNSTATSCOLLECTOR_INVALID_PARTITION.getMsg() + " : " + partSpec); } // User might have only specified partial list of partition keys, in which case add other partition keys in partSpec List<String> partKeys = Utilities.getColumnNamesFromFieldSchema(tbl.getPartitionKeys()); for (String partKey : partKeys){ if(!partSpec.containsKey(partKey)) { partSpec.put(partKey, null); } } // Check if user have erroneously specified non-existent partitioning columns for (String partKey : partSpec.keySet()) { if(!partKeys.contains(partKey)){ throw new SemanticException(ErrorMsg.COLUMNSTATSCOLLECTOR_INVALID_PART_KEY.getMsg() + " : " + partKey); } } } private StringBuilder genPartitionClause(Map<String,String> partSpec) throws SemanticException { StringBuilder whereClause = new StringBuilder(" where "); boolean predPresent = false; StringBuilder groupByClause = new StringBuilder(" group by "); boolean aggPresent = false; for (String partKey : partSpec.keySet()) { String value; if ((value = partSpec.get(partKey)) != null) { if (!predPresent) { predPresent = true; } else { whereClause.append(" and "); } whereClause.append(partKey); whereClause.append(" = "); if (getColTypeOf(partKey).equalsIgnoreCase("string")) { whereClause.append("'"); } whereClause.append(value); if (getColTypeOf(partKey).equalsIgnoreCase("string")) { whereClause.append("'"); } } } for (FieldSchema fs : tbl.getPartitionKeys()) { if (!aggPresent) { aggPresent = true; } else { groupByClause.append(","); } groupByClause.append(fs.getName()); } // attach the predicate and group by to the return clause return predPresent ? whereClause.append(groupByClause) : groupByClause; } private String getColTypeOf (String partKey) throws SemanticException{ for (FieldSchema fs : tbl.getPartitionKeys()) { if (partKey.equalsIgnoreCase(fs.getName())) { return fs.getType(); } } throw new SemanticException ("Unknown partition key : " + partKey); } private int getNumBitVectorsForNDVEstimation(HiveConf conf) throws SemanticException { int numBitVectors; float percentageError = HiveConf.getFloatVar(conf, HiveConf.ConfVars.HIVE_STATS_NDV_ERROR); if (percentageError < 0.0) { throw new SemanticException("hive.stats.ndv.error can't be negative"); } else if (percentageError <= 2.4) { numBitVectors = 1024; LOG.info("Lowest error achievable is 2.4% but error requested is " + percentageError + "%"); LOG.info("Choosing 1024 bit vectors.."); } else if (percentageError <= 3.4 ) { numBitVectors = 1024; LOG.info("Error requested is " + percentageError + "%"); LOG.info("Choosing 1024 bit vectors.."); } else if (percentageError <= 4.8) { numBitVectors = 512; LOG.info("Error requested is " + percentageError + "%"); LOG.info("Choosing 512 bit vectors.."); } else if (percentageError <= 6.8) { numBitVectors = 256; LOG.info("Error requested is " + percentageError + "%"); LOG.info("Choosing 256 bit vectors.."); } else if (percentageError <= 9.7) { numBitVectors = 128; LOG.info("Error requested is " + percentageError + "%"); LOG.info("Choosing 128 bit vectors.."); } else if (percentageError <= 13.8) { numBitVectors = 64; LOG.info("Error requested is " + percentageError + "%"); LOG.info("Choosing 64 bit vectors.."); } else if (percentageError <= 19.6) { numBitVectors = 32; LOG.info("Error requested is " + percentageError + "%"); LOG.info("Choosing 32 bit vectors.."); } else if (percentageError <= 28.2) { numBitVectors = 16; LOG.info("Error requested is " + percentageError + "%"); LOG.info("Choosing 16 bit vectors.."); } else if (percentageError <= 40.9) { numBitVectors = 8; LOG.info("Error requested is " + percentageError + "%"); LOG.info("Choosing 8 bit vectors.."); } else if (percentageError <= 61.0) { numBitVectors = 4; LOG.info("Error requested is " + percentageError + "%"); LOG.info("Choosing 4 bit vectors.."); } else { numBitVectors = 2; LOG.info("Error requested is " + percentageError + "%"); LOG.info("Choosing 2 bit vectors.."); } return numBitVectors; } private List<String> getColumnTypes(List<String> colNames) throws SemanticException{ List<String> colTypes = new LinkedList<String>(); List<FieldSchema> cols = tbl.getCols(); for (String colName : colNames) { for (FieldSchema col: cols) { if (colName.equalsIgnoreCase(col.getName())) { colTypes.add(new String(col.getType())); } } } return colTypes; } private String genRewrittenQuery(List<String> colNames, int numBitVectors, Map<String,String> partSpec, boolean isPartitionStats) throws SemanticException{ StringBuilder rewrittenQueryBuilder = new StringBuilder("select "); String rewrittenQuery; for (int i = 0; i < colNames.size(); i++) { if (i > 0) { rewrittenQueryBuilder.append(" , "); } rewrittenQueryBuilder.append("compute_stats("); rewrittenQueryBuilder.append(colNames.get(i)); rewrittenQueryBuilder.append(" , "); rewrittenQueryBuilder.append(numBitVectors); rewrittenQueryBuilder.append(" )"); } if (isPartitionStats) { for (FieldSchema fs : tbl.getPartCols()) { rewrittenQueryBuilder.append(" , " + fs.getName()); } } rewrittenQueryBuilder.append(" from "); rewrittenQueryBuilder.append(tbl.getTableName()); isRewritten = true; // If partition level statistics is requested, add predicate and group by as needed to rewritten // query if (isPartitionStats) { rewrittenQueryBuilder.append(genPartitionClause(partSpec)); } rewrittenQuery = rewrittenQueryBuilder.toString(); rewrittenQuery = new VariableSubstitution().substitute(conf, rewrittenQuery); return rewrittenQuery; } private ASTNode genRewrittenTree(String rewrittenQuery) throws SemanticException { ASTNode rewrittenTree; // Parse the rewritten query string try { ctx = new Context(conf); } catch (IOException e) { throw new SemanticException(ErrorMsg.COLUMNSTATSCOLLECTOR_IO_ERROR.getMsg()); } ctx.setCmd(rewrittenQuery); ParseDriver pd = new ParseDriver(); try { rewrittenTree = pd.parse(rewrittenQuery, ctx); } catch (ParseException e) { throw new SemanticException(ErrorMsg.COLUMNSTATSCOLLECTOR_PARSE_ERROR.getMsg()); } rewrittenTree = ParseUtils.findRootNonNullToken(rewrittenTree); return rewrittenTree; } // fail early if the columns specified for column statistics are not valid private void validateSpecifiedColumnNames(List<String> specifiedCols) throws SemanticException { List<String> tableCols = Utilities.getColumnNamesFromFieldSchema(tbl.getCols()); for(String sc : specifiedCols) { if (!tableCols.contains(sc.toLowerCase())) { String msg = "'" + sc + "' (possible columns are " + tableCols.toString() + ")"; throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(msg)); } } } private void checkForPartitionColumns(List<String> specifiedCols, List<String> partCols) throws SemanticException { // Raise error if user has specified partition column for stats for (String pc : partCols) { for (String sc : specifiedCols) { if (pc.equalsIgnoreCase(sc)) { throw new SemanticException(ErrorMsg.COLUMNSTATSCOLLECTOR_INVALID_COLUMN.getMsg() + " [Try removing column '" + sc + "' from column list]"); } } } } @Override public void analyze(ASTNode ast, Context origCtx) throws SemanticException { QB qb; QBParseInfo qbp; // initialize QB init(true); // check if it is no scan. grammar prevents coexit noscan/columns super.processNoScanCommand(ast); // check if it is partial scan. grammar prevents coexit partialscan/columns super.processPartialScanCommand(ast); /* Rewrite only analyze table <> column <> compute statistics; Don't rewrite analyze table * command - table stats are collected by the table scan operator and is not rewritten to * an aggregation. */ if (shouldRewrite(ast)) { tbl = getTable(ast); colNames = getColumnName(ast); // Save away the original AST originalTree = ast; boolean isPartitionStats = isPartitionLevelStats(ast); Map<String,String> partSpec = null; checkForPartitionColumns( colNames, Utilities.getColumnNamesFromFieldSchema(tbl.getPartitionKeys())); validateSpecifiedColumnNames(colNames); if (conf.getBoolVar(ConfVars.HIVE_STATS_COLLECT_PART_LEVEL_STATS) && tbl.isPartitioned()) { isPartitionStats = true; } if (isPartitionStats) { isTableLevel = false; partSpec = getPartKeyValuePairsFromAST(ast); handlePartialPartitionSpec(partSpec); } else { isTableLevel = true; } colType = getColumnTypes(colNames); int numBitVectors = getNumBitVectorsForNDVEstimation(conf); rewrittenQuery = genRewrittenQuery(colNames, numBitVectors, partSpec, isPartitionStats); rewrittenTree = genRewrittenTree(rewrittenQuery); } else { // Not an analyze table column compute statistics statement - don't do any rewrites originalTree = rewrittenTree = ast; rewrittenQuery = null; isRewritten = false; } // Setup the necessary metadata if originating from analyze rewrite if (isRewritten) { qb = getQB(); qb.setAnalyzeRewrite(true); qbp = qb.getParseInfo(); qbp.setTableName(tbl.getTableName()); qbp.setTblLvl(isTableLevel); qbp.setColName(colNames); qbp.setColType(colType); initCtx(ctx); LOG.info("Invoking analyze on rewritten query"); analyzeInternal(rewrittenTree); } else { initCtx(origCtx); LOG.info("Invoking analyze on original query"); analyzeInternal(originalTree); } } }
apache-2.0
kishorvpatil/incubator-storm
external/storm-hdfs/src/main/java/org/apache/storm/hdfs/common/NullPartitioner.java
1163
/** * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version * 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package org.apache.storm.hdfs.common; import org.apache.storm.tuple.Tuple; /** * The NullPartitioner partitions every tuple to the empty string. In otherwords, no partition sub directories will * be added to the path. */ public class NullPartitioner implements Partitioner { @Override public String getPartitionPath(final Tuple tuple) { return ""; } }
apache-2.0
Distrotech/fop
src/java/org/apache/fop/fo/properties/FontShorthandParser.java
2151
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* $Id$ */ package org.apache.fop.fo.properties; import org.apache.fop.fo.Constants; import org.apache.fop.fo.PropertyList; import org.apache.fop.fo.expr.PropertyException; /** * A shorthand parser for the font shorthand property */ public class FontShorthandParser extends GenericShorthandParser { /** * {@inheritDoc} */ public Property getValueForProperty(int propId, Property property, PropertyMaker maker, PropertyList propertyList) throws PropertyException { int index = -1; Property newProp; switch (propId) { case Constants.PR_FONT_SIZE: index = 0; break; case Constants.PR_FONT_FAMILY: index = 1; break; case Constants.PR_LINE_HEIGHT: index = 2; break; case Constants.PR_FONT_STYLE: index = 3; break; case Constants.PR_FONT_VARIANT: index = 4; break; case Constants.PR_FONT_WEIGHT: index = 5; break; default: //nop } newProp = (Property) property.getList().get(index); return newProp; } }
apache-2.0
siosio/intellij-community
java/jsp-spi/src/com/intellij/jsp/JspSpiUtil.java
8002
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.jsp; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.module.Module; import com.intellij.openapi.roots.ModuleRootManager; import com.intellij.openapi.roots.OrderEnumerator; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.vfs.JarFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiClass; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiReference; import com.intellij.psi.impl.source.jsp.jspJava.JspClass; import com.intellij.psi.jsp.BaseJspFile; import com.intellij.psi.jsp.JspFile; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.util.ArrayUtilRt; import com.intellij.util.Consumer; import com.intellij.util.IncorrectOperationException; import com.intellij.util.Processor; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.net.MalformedURLException; import java.net.URL; import java.nio.file.Path; import java.util.ArrayList; import java.util.List; /** * @author peter */ public abstract class JspSpiUtil { private static final Logger LOG = Logger.getInstance(JspSpiUtil.class); @NonNls private static final String JAR_EXTENSION = "jar"; @Nullable private static JspSpiUtil getJspSpiUtil() { return ApplicationManager.getApplication().getService(JspSpiUtil.class); } public static int escapeCharsInJspContext(JspFile file, int offset, String toEscape) throws IncorrectOperationException { final JspSpiUtil util = getJspSpiUtil(); return util != null ? util._escapeCharsInJspContext(file, offset, toEscape) : 0; } protected abstract int _escapeCharsInJspContext(JspFile file, int offset, String toEscape) throws IncorrectOperationException; public static void visitAllIncludedFilesRecursively(BaseJspFile jspFile, Processor<? super BaseJspFile> visitor) { final JspSpiUtil util = getJspSpiUtil(); if (util != null) { util._visitAllIncludedFilesRecursively(jspFile, visitor); } } protected abstract void _visitAllIncludedFilesRecursively(BaseJspFile jspFile, Processor<? super BaseJspFile> visitor); @Nullable public static PsiElement resolveMethodPropertyReference(@NotNull PsiReference reference, @Nullable PsiClass resolvedClass, boolean readable) { final JspSpiUtil util = getJspSpiUtil(); return util == null ? null : util._resolveMethodPropertyReference(reference, resolvedClass, readable); } @Nullable protected abstract PsiElement _resolveMethodPropertyReference(@NotNull PsiReference reference, @Nullable PsiClass resolvedClass, boolean readable); public static Object @NotNull [] getMethodPropertyReferenceVariants(@NotNull PsiReference reference, @Nullable PsiClass resolvedClass, boolean readable) { final JspSpiUtil util = getJspSpiUtil(); return util == null ? ArrayUtilRt.EMPTY_OBJECT_ARRAY : util._getMethodPropertyReferenceVariants(reference, resolvedClass, readable); } protected abstract Object[] _getMethodPropertyReferenceVariants(@NotNull PsiReference reference, @Nullable PsiClass resolvedClass, boolean readable); public static boolean isIncludedOrIncludesSomething(@NotNull JspFile file) { return isIncludingAnything(file) || isIncluded(file); } public static boolean isIncluded(@NotNull JspFile jspFile) { final JspSpiUtil util = getJspSpiUtil(); return util != null && util._isIncluded(jspFile); } public abstract boolean _isIncluded(@NotNull final JspFile jspFile); public static boolean isIncludingAnything(@NotNull JspFile jspFile) { final JspSpiUtil util = getJspSpiUtil(); return util != null && util._isIncludingAnything(jspFile); } protected abstract boolean _isIncludingAnything(@NotNull final JspFile jspFile); public static PsiFile[] getIncludedFiles(@NotNull JspFile jspFile) { final JspSpiUtil util = getJspSpiUtil(); return util == null ? PsiFile.EMPTY_ARRAY : util._getIncludedFiles(jspFile); } public static PsiFile[] getIncludingFiles(@NotNull JspFile jspFile) { final JspSpiUtil util = getJspSpiUtil(); return util == null ? PsiFile.EMPTY_ARRAY : util._getIncludingFiles(jspFile); } protected abstract PsiFile[] _getIncludingFiles(@NotNull PsiFile file); protected abstract PsiFile @NotNull [] _getIncludedFiles(@NotNull final JspFile jspFile); public static boolean isJavaContext(PsiElement position) { if(PsiTreeUtil.getContextOfType(position, JspClass.class, false) != null) return true; return false; } public static boolean isJarFile(@Nullable VirtualFile file) { if (file != null){ final String ext = file.getExtension(); if(ext != null && ext.equalsIgnoreCase(JAR_EXTENSION)) { return true; } } return false; } public static List<URL> buildUrls(@Nullable final VirtualFile virtualFile, @Nullable final Module module) { return buildUrls(virtualFile, module, true); } public static List<URL> buildUrls(@Nullable final VirtualFile virtualFile, @Nullable final Module module, boolean includeModuleOutput) { final List<URL> urls = new ArrayList<>(); processClassPathItems(virtualFile, module, file -> addUrl(urls, file), includeModuleOutput); return urls; } public static List<Path> buildFiles(@Nullable VirtualFile virtualFile, @Nullable Module module, boolean includeModuleOutput) { List<Path> result = new ArrayList<>(); processClassPathItems(virtualFile, module, file -> { if (file != null && file.isValid()) { Path path = file.getFileSystem().getNioPath(file); if (path != null) { result.add(path); } } }, includeModuleOutput); return result; } public static void processClassPathItems(final VirtualFile virtualFile, final Module module, final Consumer<? super VirtualFile> consumer) { processClassPathItems(virtualFile, module, consumer, true); } public static void processClassPathItems(final VirtualFile virtualFile, final Module module, final Consumer<? super VirtualFile> consumer, boolean includeModuleOutput) { if (isJarFile(virtualFile)){ consumer.consume(virtualFile); } if (module != null) { OrderEnumerator enumerator = ModuleRootManager.getInstance(module).orderEntries().recursively(); if (!includeModuleOutput) { enumerator = enumerator.withoutModuleSourceEntries(); } for (VirtualFile root : enumerator.getClassesRoots()) { final VirtualFile file; if (root.getFileSystem().getProtocol().equals(JarFileSystem.PROTOCOL)) { file = JarFileSystem.getInstance().getVirtualFileForJar(root); } else { file = root; } consumer.consume(file); } } } private static void addUrl(List<? super URL> urls, VirtualFile file) { if (file == null || !file.isValid()) return; final URL url = getUrl(file); if (url != null) { urls.add(url); } } @SuppressWarnings({"HardCodedStringLiteral"}) @Nullable private static URL getUrl(VirtualFile file) { if (file.getFileSystem() instanceof JarFileSystem && file.getParent() != null) return null; String path = file.getPath(); if (path.endsWith(JarFileSystem.JAR_SEPARATOR)) { path = path.substring(0, path.length() - 2); } String url; if (SystemInfo.isWindows) { url = "file:/" + path; } else { url = "file://" + path; } if (file.isDirectory() && !(file.getFileSystem() instanceof JarFileSystem)) url += "/"; try { return new URL(url); } catch (MalformedURLException e) { LOG.error(e); return null; } } }
apache-2.0
GunoH/intellij-community
java/java-tests/testSrc/com/intellij/java/propertyBased/MakeClassSealedPropertyTest.java
5528
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.java.propertyBased; import com.intellij.codeInsight.intention.impl.SealClassAction; import com.intellij.codeInsight.intention.impl.ShowIntentionActionsHandler; import com.intellij.openapi.application.WriteAction; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.fileEditor.FileEditorManager; import com.intellij.openapi.fileEditor.OpenFileDescriptor; import com.intellij.openapi.roots.LanguageLevelProjectExtension; import com.intellij.openapi.util.RecursionManager; import com.intellij.pom.java.LanguageLevel; import com.intellij.psi.*; import com.intellij.psi.impl.PsiDocumentManagerImpl; import com.intellij.psi.search.searches.DirectClassInheritorsSearch; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.refactoring.util.CommonRefactoringUtil; import com.intellij.testFramework.propertyBased.InvokeIntention; import com.intellij.testFramework.propertyBased.MadTestingUtil; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.jetCheck.Generator; import org.jetbrains.jetCheck.ImperativeCommand; import org.jetbrains.jetCheck.IntDistribution; import org.jetbrains.jetCheck.PropertyChecker; import java.util.Collection; import java.util.List; import java.util.Objects; import java.util.Set; public class MakeClassSealedPropertyTest extends BaseUnivocityTest { @Override public void setUp() throws Exception { super.setUp(); WriteAction.run(() -> LanguageLevelProjectExtension.getInstance(myProject).setLanguageLevel(LanguageLevel.JDK_15_PREVIEW)); ((PsiDocumentManagerImpl)PsiDocumentManager.getInstance(myProject)).disableBackgroundCommit(getTestRootDisposable()); MadTestingUtil.enableAllInspections(myProject); } public void testMakeClassSealed() { RecursionManager.disableMissedCacheAssertions(getTestRootDisposable()); PropertyChecker.customized() .withIterationCount(30) .checkScenarios(() -> this::doTestMakeClassSealed); } private void doTestMakeClassSealed(@NotNull ImperativeCommand.Environment env) { Generator<PsiJavaFile> javaFiles = psiJavaFiles(); PsiJavaFile psiFile = env.generateValue(javaFiles, "Open %s in editor"); SealClassAction makeSealedAction = new SealClassAction(); FileEditorManager editorManager = FileEditorManager.getInstance(myProject); Editor editor = editorManager.openTextEditor(new OpenFileDescriptor(myProject, psiFile.getVirtualFile()), true); Collection<PsiClass> classes = PsiTreeUtil.findChildrenOfType(psiFile, PsiClass.class); List<PsiClass> psiClasses = ContainerUtil.filter(classes, psiClass -> canConvertToSealedClass(editor, makeSealedAction, psiClass)); if (psiClasses.isEmpty()) { env.logMessage("Haven't found any suitable classes, skipping"); return; } MadTestingUtil.changeAndRevert(myProject, () -> { PsiClass psiClass = env.generateValue(Generator.sampledFrom(psiClasses), "Converting class: %s"); PsiIdentifier classIdentifier = Objects.requireNonNull(psiClass.getNameIdentifier()); editor.getCaretModel().moveToOffset(classIdentifier.getTextOffset()); boolean convertedToSealed = convertToSealedClass(editor, makeSealedAction, classIdentifier); if (!convertedToSealed) { env.logMessage("Failed to convert to sealed class, skipping"); return; } PsiDocumentManager.getInstance(myProject).commitAllDocuments(); Set<PsiFile> relatedFiles = ContainerUtil.set(psiFile); DirectClassInheritorsSearch.search(psiClass).mapping(PsiElement::getContainingFile).forEach(relatedFiles::add); relatedFiles.forEach(f -> assertFalse(MadTestingUtil.containsErrorElements(f.getViewProvider()))); PsiFile fileToChange = env.generateValue(Generator.sampledFrom(relatedFiles.toArray(PsiFile.EMPTY_ARRAY)), "Invoking intention in %s"); env.executeCommands(IntDistribution.uniform(1, 5), Generator.constant(new InvokeIntention(fileToChange, new JavaGreenIntentionPolicy()))); PsiDocumentManager.getInstance(myProject).commitAllDocuments(); relatedFiles.forEach(f -> assertFalse(MadTestingUtil.containsErrorElements(f.getViewProvider()))); }); } private static boolean convertToSealedClass(@NotNull Editor editor, @NotNull SealClassAction makeSealedAction, @NotNull PsiIdentifier classIdentifier) { try { PsiFile containingFile = classIdentifier.getContainingFile(); ShowIntentionActionsHandler.chooseActionAndInvoke(containingFile, editor, makeSealedAction, makeSealedAction.getText()); return true; } catch (CommonRefactoringUtil.RefactoringErrorHintException e) { return false; } } private static boolean canConvertToSealedClass(@NotNull Editor editor, @NotNull SealClassAction makeSealedAction, @NotNull PsiClass psiClass) { PsiIdentifier nameIdentifier = psiClass.getNameIdentifier(); if (nameIdentifier == null) return false; editor.getCaretModel().moveToOffset(nameIdentifier.getTextOffset()); return makeSealedAction.isAvailable(psiClass.getProject(), editor, nameIdentifier); } }
apache-2.0
GunoH/intellij-community
java/java-tests/testData/inspection/dataFlow/jspecify/NotNullAwareUseOfTypeVariable.java
2292
/* * Copyright 2020 The JSpecify Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.jspecify.annotations.DefaultNonNull; import org.jspecify.annotations.Nullable; import org.jspecify.annotations.NullnessUnspecified; class NotNullAwareUseOfTypeVariable { interface Super<T extends @Nullable Object> { T get(); } @DefaultNonNull interface SubObject extends Super<Object> {} @DefaultNonNull interface SubObjectUnspec extends Super<@NullnessUnspecified Object> {} @DefaultNonNull interface SubObjectUnionNull extends Super<@Nullable Object> {} @DefaultNonNull class Caller { Object x0(SubObject s) { // jspecify_nullness_not_enough_information return s.get(); } Object x1(SubObjectUnspec s) { // jspecify_nullness_not_enough_information return s.get(); } Object x2(SubObjectUnionNull s) { // jspecify_nullness_mismatch return s.get(); } Object x3(Super<Object> s) { // jspecify_nullness_not_enough_information return s.get(); } Object x4(Super<@NullnessUnspecified Object> s) { // jspecify_nullness_not_enough_information return s.get(); } Object x5(Super<@Nullable Object> s) { // jspecify_nullness_mismatch return s.get(); } Object x6(Super<? extends Object> s) { // jspecify_nullness_not_enough_information return s.get(); } Object x7(Super<? extends @NullnessUnspecified Object> s) { // jspecify_nullness_not_enough_information return s.get(); } Object x8(Super<? extends @Nullable Object> s) { // jspecify_nullness_mismatch return s.get(); } Object x9(Super<?> s) { // jspecify_nullness_mismatch return s.get(); } } }
apache-2.0
sdmcraft/jackrabbit
jackrabbit-jcr-tests/src/main/java/org/apache/jackrabbit/test/api/version/MergeCheckedoutSubNodeTest.java
7310
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.test.api.version; import javax.jcr.MergeException; import javax.jcr.Node; import javax.jcr.RepositoryException; import javax.jcr.version.VersionManager; /** * <code>MergeCheckedoutSubNodeTest</code> contains tests dealing with * checked-out nodes in the subtree of the node on which merge is called. * * @test * @sources MergeCheckedoutSubNodeTest.java * @executeClass org.apache.jackrabbit.test.api.version.MergeCheckedoutSubNodeTest * @keywords versioning */ public class MergeCheckedoutSubNodeTest extends AbstractMergeTest { /** * node to merge */ Node nodeToMerge; protected void setUp() throws Exception { super.setUp(); nodeToMerge = testRootNodeW2.getNode(nodeName1); // node has to be checked out while merging VersionManager versionManager = nodeToMerge.getSession().getWorkspace().getVersionManager(); versionManager.checkout(nodeToMerge.getPath()); } protected void tearDown() throws Exception { nodeToMerge = null; super.tearDown(); } /** * Node.merge(): If V' of a versionable subnode N' in the source workspace * is a successor of V (the base version of a subnode N in this workspace), * calling merge must fail. */ public void testFailIfCorrespondingNodeIsSuccessor() throws RepositoryException { // make V' of a subnode N' in source workspace be a successor version of // the base version of the corresponding subnode. Node n = testRootNode.getNode(nodeName1 + "/" + nodeName2); n.checkout(); n.checkin(); n.checkout(); try { // merge, besteffort set to false to stop at the first failure nodeToMerge.merge(workspace.getName(), false); fail("Merging a checkedout node if the version V' of the corresponding node is a successor of this node's base version must fail."); } catch (MergeException e) { // success } } /** * VersionManager.merge(): If V' of a versionable subnode N' in the source workspace * is a successor of V (the base version of a subnode N in this workspace), * calling merge must fail. */ public void testFailIfCorrespondingNodeIsSuccessorJcr2() throws RepositoryException { // make V' of a subnode N' in source workspace be a successor version of // the base version of the corresponding subnode. Node n = testRootNode.getNode(nodeName1 + "/" + nodeName2); VersionManager versionManager = n.getSession().getWorkspace().getVersionManager(); String path = n.getPath(); versionManager.checkout(path); versionManager.checkin(path); versionManager.checkout(path); try { // merge, besteffort set to false to stop at the first failure nodeToMerge.getSession().getWorkspace().getVersionManager().merge(nodeToMerge.getPath(), workspace.getName(), false); fail("Merging a checkedout node if the version V' of the corresponding node is a successor of this node's base version must fail."); } catch (MergeException e) { // success } } /** * Node.merge(): If V' of a versionable subnode N' in the source workspace * is a predeccessor of V or V' identical to V (the base version of a * subnode N in this workspace), calling merge must be leave. */ public void testLeaveIfCorrespondingNodeIsPredeccessor() throws RepositoryException { // make V' of a subnode N' in source workspace be a predeccessor version of // the base version of the corresponding subnode. Node n = testRootNodeW2.getNode(nodeName1 + "/" + nodeName2); n.checkout(); n.setProperty(propertyName1, CHANGED_STRING); testRootNodeW2.save(); n.checkin(); n.checkout(); // merge, besteffort set to false to stop at the first failure nodeToMerge.merge(workspace.getName(), false); // check if subnode has status "leave" assertTrue(n.getProperty(propertyName1).getString().equals(CHANGED_STRING)); } /** * VersionManager.merge(): If V' of a versionable subnode N' in the source workspace * is a predeccessor of V or V' identical to V (the base version of a * subnode N in this workspace), calling merge must be leave. */ public void testLeaveIfCorrespondingNodeIsPredeccessorJcr2() throws RepositoryException { // make V' of a subnode N' in source workspace be a predeccessor version of // the base version of the corresponding subnode. Node n = testRootNodeW2.getNode(nodeName1 + "/" + nodeName2); VersionManager versionManager = n.getSession().getWorkspace().getVersionManager(); String path = n.getPath(); versionManager.checkout(path); n.setProperty(propertyName1, CHANGED_STRING); testRootNodeW2.getSession().save(); versionManager.checkin(path); versionManager.checkout(path); // merge, besteffort set to false to stop at the first failure nodeToMerge.getSession().getWorkspace().getVersionManager().merge(nodeToMerge.getPath(), workspace.getName(), false); // check if subnode has status "leave" assertTrue(n.getProperty(propertyName1).getString().equals(CHANGED_STRING)); } /** * initialize a two-step-hierarchy on default and second workspace */ protected void initNodes() throws RepositoryException { // create a versionable parent node // nodeName1 Node topVNode = testRootNode.addNode(nodeName1, versionableNodeType); topVNode.setProperty(propertyName1, topVNode.getName()); // create a versionable sub node // nodeName1/nodeName2 Node subNvNode = topVNode.addNode(nodeName2, versionableNodeType); subNvNode.setProperty(propertyName1, subNvNode.getName()); // save default workspace testRootNode.getSession().save(); log.println("test nodes created successfully on " + workspace.getName()); // clone the newly created node from src workspace into second workspace workspaceW2.clone(workspace.getName(), topVNode.getPath(), topVNode.getPath(), true); log.println(topVNode.getPath() + " cloned on " + superuserW2.getWorkspace().getName() + " at " + topVNode.getPath()); testRootNodeW2 = (Node) superuserW2.getItem(testRoot); } }
apache-2.0
danekja/spring-lab
labs/cv09_docker/app_nodb/src/main/java/org/danekja/edu/pia/manager/UserManager.java
880
package org.danekja.edu.pia.manager; import org.danekja.edu.pia.domain.User; import org.danekja.edu.pia.domain.UserValidationException; /** * Date: 26.11.15 * * @author Jakub Danek */ public interface UserManager { /** * Method for authentication of user's credentials. * * @param username provided login * @param password provided password * @return true if username and password are a match, false otherwise */ boolean authenticate(String username, String password); /** * Method for registering a new user. * @param newUser instance with new user data, expected not-null value * @throws UserValidationException if the new user data instance is not in valid state, * e.g. required fields are missing */ void register(User newUser) throws UserValidationException; }
apache-2.0
DavidHerzogTU-Berlin/astyanax
astyanax-thrift/src/main/java/com/netflix/astyanax/thrift/ThriftCqlQuery.java
1685
package com.netflix.astyanax.thrift; import java.nio.ByteBuffer; import java.util.List; import org.apache.cassandra.thrift.Compression; import org.apache.cassandra.thrift.InvalidRequestException; import org.apache.cassandra.thrift.SchemaDisagreementException; import org.apache.cassandra.thrift.TimedOutException; import org.apache.cassandra.thrift.UnavailableException; import org.apache.cassandra.thrift.Cassandra.Client; import org.apache.thrift.TException; import com.netflix.astyanax.serializers.StringSerializer; public class ThriftCqlQuery<K, C> extends AbstractThriftCqlQuery<K, C> { ThriftCqlQuery(ThriftColumnFamilyQueryImpl<K, C> cfQuery, String cql) { super(cfQuery, cql); } @Override protected org.apache.cassandra.thrift.CqlPreparedResult prepare_cql_query(Client client) throws InvalidRequestException, TException { return client.prepare_cql_query(StringSerializer.get().toByteBuffer(cql), Compression.NONE); } @Override protected org.apache.cassandra.thrift.CqlResult execute_prepared_cql_query(Client client, int id, List<ByteBuffer> values) throws InvalidRequestException, UnavailableException, TimedOutException, SchemaDisagreementException, TException { return client.execute_prepared_cql_query(id, values); } @Override protected org.apache.cassandra.thrift.CqlResult execute_cql_query(Client client) throws InvalidRequestException, UnavailableException, TimedOutException, SchemaDisagreementException, TException { return client.execute_cql_query( StringSerializer.get().toByteBuffer(cql), useCompression ? Compression.GZIP : Compression.NONE); } }
apache-2.0
codeaudit/optaplanner
optaplanner-benchmark/src/test/java/org/optaplanner/benchmark/impl/measurement/ScoreDifferencePercentageTest.java
10007
/* * Copyright 2015 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.optaplanner.benchmark.impl.measurement; import org.junit.Test; import org.optaplanner.core.api.score.buildin.bendable.BendableScore; import org.optaplanner.core.api.score.buildin.hardsoft.HardSoftScore; import org.optaplanner.core.api.score.buildin.simple.SimpleScore; import static org.junit.Assert.assertEquals; public class ScoreDifferencePercentageTest { @Test(expected = IllegalStateException.class) public void calculateScoreDifferencePercentageException() { BendableScore score1 = BendableScore.valueOf(new int[]{1, 2, 3}, new int[]{4, 5}); BendableScore score2 = BendableScore.valueOf(new int[]{1, 2}, new int[]{4, 5}); ScoreDifferencePercentage.calculateScoreDifferencePercentage(score1, score2); } @Test public void calculateScoreDifferencePercentage() { double tolerance = 0.00001; SimpleScore score1 = SimpleScore.valueOf(-100); SimpleScore score2 = SimpleScore.valueOf(-100); ScoreDifferencePercentage scoreDifferencePercentage = ScoreDifferencePercentage.calculateScoreDifferencePercentage(score1, score2); assertEquals(0.0, scoreDifferencePercentage.getPercentageLevels()[0], tolerance); score1 = SimpleScore.valueOf(100); score2 = SimpleScore.valueOf(100); scoreDifferencePercentage = ScoreDifferencePercentage.calculateScoreDifferencePercentage(score1, score2); assertEquals(0.0, scoreDifferencePercentage.getPercentageLevels()[0], tolerance); score1 = SimpleScore.valueOf(-100); score2 = SimpleScore.valueOf(-10); scoreDifferencePercentage = ScoreDifferencePercentage.calculateScoreDifferencePercentage(score1, score2); assertEquals(0.9, scoreDifferencePercentage.getPercentageLevels()[0], tolerance); score1 = SimpleScore.valueOf(100); score2 = SimpleScore.valueOf(10); scoreDifferencePercentage = ScoreDifferencePercentage.calculateScoreDifferencePercentage(score1, score2); assertEquals(-0.9, scoreDifferencePercentage.getPercentageLevels()[0], tolerance); score1 = SimpleScore.valueOf(-100); score2 = SimpleScore.valueOf(-1); scoreDifferencePercentage = ScoreDifferencePercentage.calculateScoreDifferencePercentage(score1, score2); assertEquals(0.99, scoreDifferencePercentage.getPercentageLevels()[0], tolerance); score1 = SimpleScore.valueOf(100); score2 = SimpleScore.valueOf(1); scoreDifferencePercentage = ScoreDifferencePercentage.calculateScoreDifferencePercentage(score1, score2); assertEquals(-0.99, scoreDifferencePercentage.getPercentageLevels()[0], tolerance); HardSoftScore hardSoftScore1 = HardSoftScore.valueOf(-100, -1); HardSoftScore hardSoftScore2 = HardSoftScore.valueOf(-100, -1); scoreDifferencePercentage = ScoreDifferencePercentage.calculateScoreDifferencePercentage(hardSoftScore1, hardSoftScore2); assertEquals(0.0, scoreDifferencePercentage.getPercentageLevels()[0], tolerance); assertEquals(0.0, scoreDifferencePercentage.getPercentageLevels()[1], tolerance); hardSoftScore1 = HardSoftScore.valueOf(-100, -100); hardSoftScore2 = HardSoftScore.valueOf(-1, -10); scoreDifferencePercentage = ScoreDifferencePercentage.calculateScoreDifferencePercentage(hardSoftScore1, hardSoftScore2); assertEquals(0.99, scoreDifferencePercentage.getPercentageLevels()[0], tolerance); assertEquals(0.9, scoreDifferencePercentage.getPercentageLevels()[1], tolerance); hardSoftScore1 = HardSoftScore.valueOf(100, 100); hardSoftScore2 = HardSoftScore.valueOf(1, 10); scoreDifferencePercentage = ScoreDifferencePercentage.calculateScoreDifferencePercentage(hardSoftScore1, hardSoftScore2); assertEquals(-0.99, scoreDifferencePercentage.getPercentageLevels()[0], tolerance); assertEquals(-0.9, scoreDifferencePercentage.getPercentageLevels()[1], tolerance); hardSoftScore1 = HardSoftScore.valueOf(100, -100); hardSoftScore2 = HardSoftScore.valueOf(-100, 200); scoreDifferencePercentage = ScoreDifferencePercentage.calculateScoreDifferencePercentage(hardSoftScore1, hardSoftScore2); assertEquals(-2, scoreDifferencePercentage.getPercentageLevels()[0], tolerance); assertEquals(3, scoreDifferencePercentage.getPercentageLevels()[1], tolerance); } @Test public void add() { double tolerance = 0.00001; HardSoftScore hardSoftScore1 = HardSoftScore.valueOf(-100, -1); HardSoftScore hardSoftScore2 = HardSoftScore.valueOf(-200, -10); ScoreDifferencePercentage scoreDifferencePercentage = ScoreDifferencePercentage.calculateScoreDifferencePercentage(hardSoftScore1, hardSoftScore2); hardSoftScore1 = HardSoftScore.valueOf(-100, -1); hardSoftScore2 = HardSoftScore.valueOf(-200, -10); ScoreDifferencePercentage scoreDifferencePercentage2 = ScoreDifferencePercentage.calculateScoreDifferencePercentage(hardSoftScore1, hardSoftScore2); double[] levels = scoreDifferencePercentage.add(scoreDifferencePercentage2).getPercentageLevels(); assertEquals(-2.0, levels[0], tolerance); assertEquals(-18.0, levels[1], tolerance); } @Test public void subtract() { double tolerance = 0.00001; HardSoftScore hardSoftScore1 = HardSoftScore.valueOf(-100, -1); HardSoftScore hardSoftScore2 = HardSoftScore.valueOf(-200, -10); ScoreDifferencePercentage scoreDifferencePercentage = ScoreDifferencePercentage.calculateScoreDifferencePercentage(hardSoftScore1, hardSoftScore2); hardSoftScore1 = HardSoftScore.valueOf(-100, -1); hardSoftScore2 = HardSoftScore.valueOf(-200, -10); ScoreDifferencePercentage scoreDifferencePercentage2 = ScoreDifferencePercentage.calculateScoreDifferencePercentage(hardSoftScore1, hardSoftScore2); double[] levels = scoreDifferencePercentage.subtract(scoreDifferencePercentage2).getPercentageLevels(); assertEquals(0.0, levels[0], tolerance); assertEquals(0.0, levels[1], tolerance); } @Test public void multiply() { double tolerance = 0.00001; HardSoftScore hardSoftScore1 = HardSoftScore.valueOf(-100, -1); HardSoftScore hardSoftScore2 = HardSoftScore.valueOf(-200, -10); ScoreDifferencePercentage scoreDifferencePercentage = ScoreDifferencePercentage.calculateScoreDifferencePercentage(hardSoftScore1, hardSoftScore2); double[] levels = scoreDifferencePercentage.multiply(3.14).getPercentageLevels(); assertEquals(-3.14, levels[0], tolerance); assertEquals(-28.26, levels[1], tolerance); levels = scoreDifferencePercentage.multiply(-1).getPercentageLevels(); assertEquals(1, levels[0], tolerance); assertEquals(9.0, levels[1], tolerance); } @Test public void divide() { double tolerance = 0.00001; HardSoftScore hardSoftScore1 = HardSoftScore.valueOf(-100, -1); HardSoftScore hardSoftScore2 = HardSoftScore.valueOf(-200, -10); ScoreDifferencePercentage scoreDifferencePercentage = ScoreDifferencePercentage.calculateScoreDifferencePercentage(hardSoftScore1, hardSoftScore2); double[] levels = scoreDifferencePercentage.multiply(0.5).getPercentageLevels(); assertEquals(-0.5, levels[0], tolerance); assertEquals(-4.5, levels[1], tolerance); levels = scoreDifferencePercentage.multiply(-1).getPercentageLevels(); assertEquals(1, levels[0], tolerance); assertEquals(9.0, levels[1], tolerance); } @Test(expected = IllegalStateException.class) public void addWithWrongDimension() { HardSoftScore hardSoftScore1 = HardSoftScore.valueOf(-100, -1); HardSoftScore hardSoftScore2 = HardSoftScore.valueOf(-200, -10); ScoreDifferencePercentage scoreDifferencePercentage = ScoreDifferencePercentage.calculateScoreDifferencePercentage(hardSoftScore1, hardSoftScore2); SimpleScore score1 = SimpleScore.valueOf(-100); SimpleScore score2 = SimpleScore.valueOf(-200); ScoreDifferencePercentage scoreDifferencePercentage2 = ScoreDifferencePercentage.calculateScoreDifferencePercentage(score1, score2); scoreDifferencePercentage.add(scoreDifferencePercentage2); } @Test(expected = IllegalStateException.class) public void subtractWithWrongDimension() { HardSoftScore hardSoftScore1 = HardSoftScore.valueOf(-100, -1); HardSoftScore hardSoftScore2 = HardSoftScore.valueOf(-200, -10); ScoreDifferencePercentage scoreDifferencePercentage = ScoreDifferencePercentage.calculateScoreDifferencePercentage(hardSoftScore1, hardSoftScore2); SimpleScore score1 = SimpleScore.valueOf(-100); SimpleScore score2 = SimpleScore.valueOf(-200); ScoreDifferencePercentage scoreDifferencePercentage2 = ScoreDifferencePercentage.calculateScoreDifferencePercentage(score1, score2); scoreDifferencePercentage.subtract(scoreDifferencePercentage2); } }
apache-2.0
twalpole/selenium
java/server/test/org/openqa/grid/web/servlet/RegistrationAwareServletTest.java
1323
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.grid.web.servlet; public class RegistrationAwareServletTest extends BaseServletTest { /** * Gives the servlet some time to add the proxy -- which happens on a separate thread. */ protected void waitForServletToAddProxy() throws Exception { int tries = 0; int size = 0; while (tries < 10) { size = ((RegistryBasedServlet) servlet).getRegistry().getAllProxies().size(); if (size > 0) { break; } Thread.sleep(1000); tries += 1; } } }
apache-2.0
cstamas/orientdb
core/src/main/java/com/orientechnologies/orient/core/type/tree/OMVRBTreePersistent.java
32392
/* * * * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com) * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * * For more information: http://www.orientechnologies.com * */ package com.orientechnologies.orient.core.type.tree; import com.orientechnologies.common.collection.OLimitedMap; import com.orientechnologies.common.log.OLogManager; import com.orientechnologies.common.profiler.OProfilerMBean; import com.orientechnologies.orient.core.Orient; import com.orientechnologies.orient.core.config.OGlobalConfiguration; import com.orientechnologies.orient.core.db.ODatabaseDocumentInternal; import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal; import com.orientechnologies.orient.core.exception.OStorageException; import com.orientechnologies.orient.core.id.ORID; import com.orientechnologies.orient.core.index.mvrbtree.OMVRBTree; import com.orientechnologies.orient.core.index.mvrbtree.OMVRBTreeEntry; import com.orientechnologies.orient.core.memory.OLowMemoryException; import com.orientechnologies.orient.core.record.ORecord; import com.orientechnologies.orient.core.storage.impl.local.OAbstractPaginatedStorage; import com.orientechnologies.orient.core.type.tree.provider.OMVRBTreeProvider; import java.io.IOException; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; /** * Persistent based MVRB-Tree implementation. The difference with the class OMVRBTreePersistent is the level. In facts this class * works directly at the storage level, while the other at database level. This class is used for Logical Clusters. It can'be * transactional. It uses the entryPoints tree-map to get the closest entry point where start searching a node. * */ @SuppressWarnings("serial") public abstract class OMVRBTreePersistent<K, V> extends OMVRBTree<K, V> { protected OMVRBTreeProvider<K, V> dataProvider; protected ORecord owner; protected final Set<OMVRBTreeEntryPersistent<K, V>> recordsToCommit = new HashSet<OMVRBTreeEntryPersistent<K, V>>(); // STORES IN MEMORY DIRECT REFERENCES TO PORTION OF THE TREE protected volatile int optimization = 0; protected int entryPointsSize; protected float optimizeEntryPointsFactor; private final TreeMap<K, OMVRBTreeEntryPersistent<K, V>> entryPoints; private final Map<ORID, OMVRBTreeEntryPersistent<K, V>> cache; protected static final OProfilerMBean PROFILER = Orient.instance().getProfiler(); private static final int OPTIMIZE_MAX_RETRY = 10; public OMVRBTreePersistent(OMVRBTreeProvider<K, V> iProvider) { super(); cache = new OLimitedMap<ORID, OMVRBTreeEntryPersistent<K, V>>(256, 0.90f, OGlobalConfiguration.MVRBTREE_OPTIMIZE_THRESHOLD.getValueAsInteger()) { /** * Set the optimization rather than remove eldest element. */ @Override protected boolean removeEldestEntry(final Map.Entry<ORID, OMVRBTreeEntryPersistent<K, V>> eldest) { if (super.removeEldestEntry(eldest)) // TOO MANY ITEMS: SET THE OPTIMIZATION setOptimization(2); return false; } }; if (comparator != null) entryPoints = new TreeMap<K, OMVRBTreeEntryPersistent<K, V>>(comparator); else entryPoints = new TreeMap<K, OMVRBTreeEntryPersistent<K, V>>(); pageLoadFactor = (Float) OGlobalConfiguration.MVRBTREE_LOAD_FACTOR.getValue(); dataProvider = iProvider; config(); } public OMVRBTreePersistent(OMVRBTreeProvider<K, V> iProvider, int keySize) { this(iProvider); this.keySize = keySize; dataProvider.setKeySize(keySize); } @Override protected OMVRBTreeEntryPersistent<K, V> createEntry(OMVRBTreeEntry<K, V> iParent) { adjustPageSize(); return new OMVRBTreeEntryPersistent<K, V>(iParent, iParent.getPageSplitItems()); } @Override protected OMVRBTreeEntryPersistent<K, V> createEntry(final K key, final V value) { adjustPageSize(); return new OMVRBTreeEntryPersistent<K, V>(this, key, value, null); } /** * Create a new entry for {@link #loadEntry(OMVRBTreeEntryPersistent, ORID)}. */ protected OMVRBTreeEntryPersistent<K, V> createEntry(OMVRBTreeEntryPersistent<K, V> iParent, ORID iRecordId) { return new OMVRBTreeEntryPersistent<K, V>(this, iParent, iRecordId); } public OMVRBTreePersistent<K, V> load() { dataProvider.load(); // RESET LAST SEARCH STATE setLastSearchNode(null, null); keySize = dataProvider.getKeySize(); // LOAD THE ROOT OBJECT AFTER ALL final ORID rootRid = dataProvider.getRoot(); if (rootRid != null && rootRid.isValid()) root = loadEntry(null, rootRid); return this; } protected void initAfterLoad() throws IOException { } public OMVRBTreePersistent<K, V> save() { commitChanges(); return this; } protected void saveTreeNode() throws IOException { if (root != null) { OMVRBTreeEntryPersistent<K, V> pRoot = (OMVRBTreeEntryPersistent<K, V>) root; if (pRoot.getProvider().getIdentity().isNew()) { // FIRST TIME: SAVE IT pRoot.save(); } } dataProvider.save(); } /** * Lazy loads a node. */ protected OMVRBTreeEntryPersistent<K, V> loadEntry(final OMVRBTreeEntryPersistent<K, V> iParent, final ORID iRecordId) { // SEARCH INTO THE CACHE OMVRBTreeEntryPersistent<K, V> entry = searchNodeInCache(iRecordId); if (entry == null) { // NOT FOUND: CREATE IT AND PUT IT INTO THE CACHE entry = createEntry(iParent, iRecordId); addNodeInMemory(entry); // RECONNECT THE LOADED NODE WITH IN-MEMORY PARENT, LEFT AND RIGHT if (entry.parent == null && entry.dataProvider.getParent().isValid()) { // TRY TO ASSIGN THE PARENT IN CACHE IF ANY final OMVRBTreeEntryPersistent<K, V> parentNode = searchNodeInCache(entry.dataProvider.getParent()); if (parentNode != null) entry.setParent(parentNode); } if (entry.left == null && entry.dataProvider.getLeft().isValid()) { // TRY TO ASSIGN THE PARENT IN CACHE IF ANY final OMVRBTreeEntryPersistent<K, V> leftNode = searchNodeInCache(entry.dataProvider.getLeft()); if (leftNode != null) entry.setLeft(leftNode); } if (entry.right == null && entry.dataProvider.getRight().isValid()) { // TRY TO ASSIGN THE PARENT IN CACHE IF ANY final OMVRBTreeEntryPersistent<K, V> rightNode = searchNodeInCache(entry.dataProvider.getRight()); if (rightNode != null) entry.setRight(rightNode); } } else { // COULD BE A PROBLEM BECAUSE IF A NODE IS DISCONNECTED CAN IT STAY IN CACHE? // entry.load(); if (iParent != null) // FOUND: ASSIGN IT ONLY IF NOT NULL entry.setParent(iParent); } entry.checkEntryStructure(); return entry; } @Override protected int getTreeSize() { return dataProvider.getSize(); } protected void setSize(final int iSize) { if (dataProvider.setSize(iSize)) markDirty(); } public int getDefaultPageSize() { return dataProvider.getDefaultPageSize(); } @Override public void clear() { final long timer = PROFILER.startChrono(); try { recordsToCommit.clear(); entryPoints.clear(); cache.clear(); if (root != null) try { ((OMVRBTreeEntryPersistent<K, V>) root).delete(); } catch (Exception e) { // IGNORE ANY EXCEPTION dataProvider = dataProvider.copy(); } super.clear(); markDirty(); } finally { PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.clear"), "Clear a MVRBTree", timer); } } public void delete() { clear(); dataProvider.delete(); } /** * Unload all the in-memory nodes. This is called on transaction rollback. */ public void unload() { final long timer = PROFILER.startChrono(); try { // DISCONNECT ALL THE NODES for (OMVRBTreeEntryPersistent<K, V> entryPoint : entryPoints.values()) entryPoint.disconnectLinked(true); entryPoints.clear(); cache.clear(); recordsToCommit.clear(); root = null; final ODatabaseDocumentInternal db = ODatabaseRecordThreadLocal.INSTANCE.getIfDefined(); if (db != null && !db.isClosed() && db.getStorage().getUnderlying() instanceof OAbstractPaginatedStorage) { // RELOAD IT try { load(); } catch (Exception e) { // IGNORE IT } } } catch (Exception e) { OLogManager.instance().error(this, "Error on unload the tree: " + dataProvider, e, OStorageException.class); } finally { PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.unload"), "Unload a MVRBTree", timer); } } /** * Calls the optimization in soft mode: free resources only if needed. */ protected void optimize() { optimize(false); } /** * Optimizes the memory needed by the tree in memory by reducing the number of entries to the configured size. * * @return The total freed nodes */ public int optimize(final boolean iForce) { if (optimization == -1) // IS ALREADY RUNNING return 0; if (!iForce && optimization == 0) // NO OPTIMIZATION IS NEEDED return 0; // SET OPTIMIZATION STATUS AS RUNNING optimization = -1; final long timer = PROFILER.startChrono(); try { if (root == null) return 0; if (OLogManager.instance().isDebugEnabled()) OLogManager.instance().debug(this, "Starting optimization of MVRB+Tree with %d items in memory...", cache.size()); // printInMemoryStructure(); if (entryPoints.size() == 0) // FIRST TIME THE LIST IS NULL: START FROM ROOT addNodeAsEntrypoint((OMVRBTreeEntryPersistent<K, V>) root); // RECONFIG IT TO CATCH CHANGED VALUES config(); if (OLogManager.instance().isDebugEnabled()) OLogManager.instance().debug(this, "Found %d items on disk, threshold=%f, entryPoints=%d, nodesInCache=%d", size(), (entryPointsSize * optimizeEntryPointsFactor), entryPoints.size(), cache.size()); final int nodesInMemory = cache.size(); if (!iForce && nodesInMemory < entryPointsSize * optimizeEntryPointsFactor) // UNDER THRESHOLD AVOID TO OPTIMIZE return 0; lastSearchFound = false; lastSearchKey = null; lastSearchNode = null; int totalDisconnected = 0; if (nodesInMemory > entryPointsSize) { // REDUCE THE ENTRYPOINTS final int distance = nodesInMemory / entryPointsSize + 1; final Set<OMVRBTreeEntryPersistent<K, V>> entryPointsToRemove = new HashSet<OMVRBTreeEntryPersistent<K, V>>(nodesInMemory - entryPointsSize + 2); // REMOVE ENTRYPOINTS AT THE SAME DISTANCE int currNode = 0; for (final Iterator<OMVRBTreeEntryPersistent<K, V>> it = entryPoints.values().iterator(); it.hasNext();) { final OMVRBTreeEntryPersistent<K, V> currentNode = it.next(); // JUMP THE FIRST (1 cannot never be the % of distance) THE LAST, ROOT AND LAST USED // RECORDS THAT WERE CREATED INSIDE OF TRANSACTION CAN'T BE REMOVED TILL COMMIT if (currentNode != root && currentNode != lastSearchNode && !currentNode.dataProvider.getIdentity().isTemporary() && it.hasNext()) if (++currNode % distance != 0) { // REMOVE THE NODE entryPointsToRemove.add(currentNode); it.remove(); } } addNodeAsEntrypoint((OMVRBTreeEntryPersistent<K, V>) lastSearchNode); addNodeAsEntrypoint((OMVRBTreeEntryPersistent<K, V>) root); // DISCONNECT THE REMOVED NODES for (OMVRBTreeEntryPersistent<K, V> currentNode : entryPointsToRemove) totalDisconnected += currentNode.disconnectLinked(false); cache.clear(); for (OMVRBTreeEntryPersistent<K, V> entry : entryPoints.values()) addNodeInCache(entry); } if (isRuntimeCheckEnabled()) { for (OMVRBTreeEntryPersistent<K, V> entryPoint : entryPoints.values()) for (OMVRBTreeEntryPersistent<K, V> e = (OMVRBTreeEntryPersistent<K, V>) entryPoint.getFirstInMemory(); e != null; e = e .getNextInMemory()) e.checkEntryStructure(); } // COUNT ALL IN-MEMORY NODES BY BROWSING ALL THE ENTRYPOINT NODES if (OLogManager.instance().isDebugEnabled()) OLogManager.instance().debug(this, "After optimization: %d items on disk, threshold=%f, entryPoints=%d, nodesInCache=%d", size(), (entryPointsSize * optimizeEntryPointsFactor), entryPoints.size(), cache.size()); return totalDisconnected; } finally { optimization = 0; if (isRuntimeCheckEnabled()) { if (!entryPoints.isEmpty()) for (OMVRBTreeEntryPersistent<K, V> entryPoint : entryPoints.values()) checkTreeStructure(entryPoint.getFirstInMemory()); else checkTreeStructure(root); } PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.optimize"), "Optimize a MVRBTree", timer); if (OLogManager.instance().isDebugEnabled()) OLogManager.instance().debug(this, "Optimization completed in %d ms\n", System.currentTimeMillis() - timer); } } @Override public OMVRBTreeEntry<K, V> getCeilingEntry(K key, PartialSearchMode partialSearchMode) { for (int i = 0; i < OPTIMIZE_MAX_RETRY; ++i) { try { return super.getCeilingEntry(key, partialSearchMode); } catch (OLowMemoryException e) { OLogManager.instance().debug(this, "Optimization required during node search %d/%d", i, OPTIMIZE_MAX_RETRY); freeMemory(i); } } throw new OLowMemoryException("OMVRBTreePersistent.getCeilingEntry()"); } @Override public OMVRBTreeEntry<K, V> getFloorEntry(K key, PartialSearchMode partialSearchMode) { for (int i = 0; i < OPTIMIZE_MAX_RETRY; ++i) { try { return super.getFloorEntry(key, partialSearchMode); } catch (OLowMemoryException e) { OLogManager.instance().debug(this, "Optimization required during node search %d/%d", i, OPTIMIZE_MAX_RETRY); freeMemory(i); } } throw new OLowMemoryException("OMVRBTreePersistent.getFloorEntry()"); } @Override public OMVRBTreeEntry<K, V> getHigherEntry(K key) { for (int i = 0; i < OPTIMIZE_MAX_RETRY; ++i) { try { return super.getHigherEntry(key); } catch (OLowMemoryException e) { OLogManager.instance().debug(this, "Optimization required during node search %d/%d", i, OPTIMIZE_MAX_RETRY); freeMemory(i); } } throw new OLowMemoryException("OMVRBTreePersistent.getHigherEntry)"); } @Override public OMVRBTreeEntry<K, V> getLowerEntry(K key) { for (int i = 0; i < OPTIMIZE_MAX_RETRY; ++i) { try { return super.getLowerEntry(key); } catch (OLowMemoryException e) { OLogManager.instance().debug(this, "Optimization required during node search %d/%d", i, OPTIMIZE_MAX_RETRY); freeMemory(i); } } throw new OLowMemoryException("OMVRBTreePersistent.getLowerEntry()"); } @Override public V put(final K key, final V value) { optimize(); final long timer = PROFILER.startChrono(); try { final V v = internalPut(key, value); commitChanges(); return v; } finally { PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.put"), "Put a value into a MVRBTree", timer); } } @Override public void putAll(final Map<? extends K, ? extends V> map) { final long timer = PROFILER.startChrono(); try { for (Entry<? extends K, ? extends V> entry : map.entrySet()) internalPut(entry.getKey(), entry.getValue()); commitChanges(); } finally { PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.putAll"), "Put multiple values into a MVRBTree", timer); } } @Override public V remove(final Object key) { optimize(); final long timer = PROFILER.startChrono(); try { for (int i = 0; i < OPTIMIZE_MAX_RETRY; ++i) { try { V v = super.remove(key); commitChanges(); return v; } catch (OLowMemoryException e) { OLogManager.instance().debug(this, "Optimization required during remove %d/%d", i, OPTIMIZE_MAX_RETRY); freeMemory(i); // AVOID CONTINUE EXCEPTIONS optimization = -1; } } } finally { PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.remove"), "Remove a value from a MVRBTree", timer); } throw new OLowMemoryException("OMVRBTreePersistent.remove()"); } public int commitChanges() { final long timer = PROFILER.startChrono(); int totalCommitted = 0; try { if (!recordsToCommit.isEmpty()) { final List<OMVRBTreeEntryPersistent<K, V>> tmp = new ArrayList<OMVRBTreeEntryPersistent<K, V>>(); while (recordsToCommit.iterator().hasNext()) { // COMMIT BEFORE THE NEW RECORDS (TO ASSURE RID IN RELATIONSHIPS) tmp.addAll(recordsToCommit); recordsToCommit.clear(); for (OMVRBTreeEntryPersistent<K, V> node : tmp) if (node.dataProvider.isEntryDirty()) { boolean wasNew = node.dataProvider.getIdentity().isNew(); // CREATE THE RECORD node.save(); if (debug) System.out.printf("\nSaved %s tree node %s: parent %s, left %s, right %s", wasNew ? "new" : "", node.dataProvider.getIdentity(), node.dataProvider.getParent(), node.dataProvider.getLeft(), node.dataProvider.getRight()); } totalCommitted += tmp.size(); tmp.clear(); } } if (dataProvider.isDirty()) // TREE IS CHANGED AS WELL saveTreeNode(); } catch (IOException e) { throw new OStorageException("Error on saving the tree", e); } finally { PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.commitChanges"), "Commit pending changes to a MVRBTree", timer); } return totalCommitted; } public void signalNodeChanged(final OMVRBTreeEntry<K, V> iNode) { recordsToCommit.add((OMVRBTreeEntryPersistent<K, V>) iNode); } @Override public int hashCode() { return dataProvider.hashCode(); } protected void adjustPageSize() { } @Override public V get(final Object iKey) { final long timer = PROFILER.startChrono(); try { for (int i = 0; i < OPTIMIZE_MAX_RETRY; ++i) { try { return super.get(iKey); } catch (OLowMemoryException e) { OLogManager.instance().debug(this, "Optimization required during node search %d/%d", i, OPTIMIZE_MAX_RETRY); freeMemory(i); } } throw new OLowMemoryException("OMVRBTreePersistent.get()"); } finally { PROFILER.stopChrono(PROFILER.getProcessMetric("mvrbtree.get"), "Get a value from a MVRBTree", timer); } } @Override public boolean containsKey(final Object iKey) { for (int i = 0; i < OPTIMIZE_MAX_RETRY; ++i) { try { return super.containsKey(iKey); } catch (OLowMemoryException e) { OLogManager.instance().debug(this, "Optimization required during node search %d/%d", i, OPTIMIZE_MAX_RETRY); freeMemory(i); } } throw new OLowMemoryException("OMVRBTreePersistent.containsKey()"); } @Override public boolean containsValue(final Object iValue) { for (int i = 0; i < OPTIMIZE_MAX_RETRY; ++i) { try { return super.containsValue(iValue); } catch (OLowMemoryException e) { OLogManager.instance().debug(this, "Optimization required during node search %d/%d", i, OPTIMIZE_MAX_RETRY); freeMemory(i); } } throw new OLowMemoryException("OMVRBTreePersistent.containsValue()"); } public OMVRBTreeProvider<K, V> getProvider() { return dataProvider; } public int getOptimization() { return optimization; } /** * Set the optimization to be executed at the next call. * * @param iMode * <ul> * <li>-1 = ALREADY RUNNING</li> * <li>0 = NO OPTIMIZATION (DEFAULT)</li> * <li>1 = SOFT MODE</li> * <li>2 = HARD MODE</li> * </ul> */ public void setOptimization(final int iMode) { if (iMode > 0 && optimization == -1) // IGNORE IT, ALREADY RUNNING return; optimization = iMode; } /** * Checks if optimization is needed by raising a {@link OLowMemoryException}. */ @Override protected void searchNodeCallback() { if (optimization > 0) throw new OLowMemoryException("Optimization level: " + optimization); } public int getEntryPointSize() { return entryPointsSize; } public void setEntryPointSize(final int entryPointSize) { this.entryPointsSize = entryPointSize; } @Override public String toString() { final StringBuilder buffer = new StringBuilder(128).append('['); if (size() < 10) { OMVRBTreeEntry<K, V> current = getFirstEntry(); for (int i = 0; i < 10 && current != null; ++i) { if (i > 0) buffer.append(','); buffer.append(current); current = next(current); } } else { buffer.append("size="); final int size = size(); buffer.append(size); final OMVRBTreeEntry<K, V> firstEntry = getFirstEntry(); if (firstEntry != null) { final int currPageIndex = pageIndex; buffer.append(" "); buffer.append(firstEntry.getFirstKey()); if (size > 1) { buffer.append("-"); buffer.append(getLastEntry().getLastKey()); } pageIndex = currPageIndex; } } return buffer.append(']').toString(); } protected V internalPut(final K key, final V value) throws OLowMemoryException { ORecord rec; if (key instanceof ORecord) { // RECORD KEY: ASSURE IT'S PERSISTENT TO AVOID STORING INVALID RIDs rec = (ORecord) key; if (!rec.getIdentity().isValid()) rec.save(); } if (value instanceof ORecord) { // RECORD VALUE: ASSURE IT'S PERSISTENT TO AVOID STORING INVALID RIDs rec = (ORecord) value; if (!rec.getIdentity().isValid()) rec.save(); } for (int i = 0; i < OPTIMIZE_MAX_RETRY; ++i) { try { return super.put(key, value); } catch (OLowMemoryException e) { OLogManager.instance().debug(this, "Optimization required during put %d/%d", i, OPTIMIZE_MAX_RETRY); freeMemory(i); } } throw new OLowMemoryException("OMVRBTreePersistent.put()"); } /** * Returns the best entry point to start the search. Searches first between entrypoints. If nothing is found "root" is always * returned. */ @Override protected OMVRBTreeEntry<K, V> getBestEntryPoint(final K iKey) { if (!entryPoints.isEmpty()) { // SEARCHES EXACT OR BIGGER ENTRY Entry<K, OMVRBTreeEntryPersistent<K, V>> closerNode = entryPoints.floorEntry(iKey); if (closerNode != null) return closerNode.getValue(); // NO WAY: TRY WITH ANY NODE BEFORE THE KEY closerNode = entryPoints.ceilingEntry(iKey); if (closerNode != null) return closerNode.getValue(); } // USE ROOT return super.getBestEntryPoint(iKey); } /** * Remove an entry point from the list */ void removeEntryPoint(final OMVRBTreeEntryPersistent<K, V> iEntry) { entryPoints.remove(iEntry); } synchronized void removeEntry(final ORID iEntryId) { // DELETE THE NODE FROM THE PENDING RECORDS TO COMMIT for (OMVRBTreeEntryPersistent<K, V> node : recordsToCommit) { if (node.dataProvider.getIdentity().equals(iEntryId)) { recordsToCommit.remove(node); break; } } } /** * Returns the first Entry in the OMVRBTree (according to the OMVRBTree's key-sort function). Returns null if the OMVRBTree is * empty. */ @Override public OMVRBTreeEntry<K, V> getFirstEntry() { if (!entryPoints.isEmpty()) { // FIND THE FIRST ELEMENT STARTING FROM THE FIRST ENTRY-POINT IN MEMORY final Map.Entry<K, OMVRBTreeEntryPersistent<K, V>> entry = entryPoints.firstEntry(); if (entry != null) { OMVRBTreeEntryPersistent<K, V> e = entry.getValue(); OMVRBTreeEntryPersistent<K, V> prev; do { prev = (OMVRBTreeEntryPersistent<K, V>) predecessor(e); if (prev != null) e = prev; } while (prev != null); if (e != null && e.getSize() > 0) pageIndex = 0; return e; } } // SEARCH FROM ROOT return super.getFirstEntry(); } /** * Returns the last Entry in the OMVRBTree (according to the OMVRBTree's key-sort function). Returns null if the OMVRBTree is * empty. */ @Override public OMVRBTreeEntry<K, V> getLastEntry() { if (!entryPoints.isEmpty()) { // FIND THE LAST ELEMENT STARTING FROM THE FIRST ENTRY-POINT IN MEMORY final Map.Entry<K, OMVRBTreeEntryPersistent<K, V>> entry = entryPoints.lastEntry(); if (entry != null) { OMVRBTreeEntryPersistent<K, V> e = entry.getValue(); OMVRBTreeEntryPersistent<K, V> next; do { next = (OMVRBTreeEntryPersistent<K, V>) successor(e); if (next != null) e = next; } while (next != null); if (e != null && e.getSize() > 0) pageIndex = e.getSize() - 1; return e; } } // SEARCH FROM ROOT return super.getLastEntry(); } @Override protected void setRoot(final OMVRBTreeEntry<K, V> iRoot) { if (iRoot == root) return; super.setRoot(iRoot); if (iRoot == null) dataProvider.setRoot(null); else dataProvider.setRoot(((OMVRBTreeEntryPersistent<K, V>) iRoot).getProvider().getIdentity()); } protected void config() { if (dataProvider.updateConfig()) markDirty(); pageLoadFactor = OGlobalConfiguration.MVRBTREE_LOAD_FACTOR.getValueAsFloat(); optimizeEntryPointsFactor = OGlobalConfiguration.MVRBTREE_OPTIMIZE_ENTRYPOINTS_FACTOR.getValueAsFloat(); entryPointsSize = OGlobalConfiguration.MVRBTREE_ENTRYPOINTS.getValueAsInteger(); } @Override protected void rotateLeft(final OMVRBTreeEntry<K, V> p) { if (debug && p != null) System.out.printf("\nRotating to the left the node %s", ((OMVRBTreeEntryPersistent<K, V>) p).dataProvider.getIdentity()); super.rotateLeft(p); } @Override protected void rotateRight(final OMVRBTreeEntry<K, V> p) { if (debug && p != null) System.out.printf("\nRotating to the right the node %s", ((OMVRBTreeEntryPersistent<K, V>) p).dataProvider.getIdentity()); super.rotateRight(p); } /** * Removes the node also from the memory. */ @Override protected OMVRBTreeEntry<K, V> removeNode(final OMVRBTreeEntry<K, V> p) { final OMVRBTreeEntryPersistent<K, V> removed = (OMVRBTreeEntryPersistent<K, V>) super.removeNode(p); removeNodeFromMemory(removed); // this prevents NPE in case if tree contains single node and it was deleted inside of super.removeNode method. if (removed.getProvider() != null) removed.getProvider().delete(); // prevent record saving if it has been deleted. recordsToCommit.remove(removed); return removed; } /** * Removes the node from the memory. * * @param iNode * Node to remove */ protected void removeNodeFromMemory(final OMVRBTreeEntryPersistent<K, V> iNode) { if (iNode.dataProvider != null && iNode.dataProvider.getIdentity().isValid()) cache.remove(iNode.dataProvider.getIdentity()); if (iNode.getSize() > 0) entryPoints.remove(iNode.getKeyAt(0)); } protected void addNodeInMemory(final OMVRBTreeEntryPersistent<K, V> iNode) { addNodeAsEntrypoint(iNode); addNodeInCache(iNode); } protected boolean isNodeEntryPoint(final OMVRBTreeEntryPersistent<K, V> iNode) { if (iNode != null && iNode.getSize() > 0) return entryPoints.containsKey(iNode.getKeyAt(0)); return false; } protected void addNodeAsEntrypoint(final OMVRBTreeEntryPersistent<K, V> iNode) { if (iNode != null && iNode.getSize() > 0) entryPoints.put(iNode.getKeyAt(0), iNode); } /** * Updates the position of the node between the entry-points. If the node has 0 items, it's simply removed. * * @param iOldKey * Old key to remove * @param iNode * Node to update */ protected void updateEntryPoint(final K iOldKey, final OMVRBTreeEntryPersistent<K, V> iNode) { final OMVRBTreeEntryPersistent<K, V> node = entryPoints.remove(iOldKey); if (node != null) { if (node != iNode) OLogManager.instance().warn(this, "Entrypoints nodes are different during update: old %s <-> new %s", node, iNode); addNodeAsEntrypoint(iNode); } } /** * Keeps the node in memory. * * @param iNode * Node to store */ protected void addNodeInCache(final OMVRBTreeEntryPersistent<K, V> iNode) { if (iNode.dataProvider != null && iNode.dataProvider.getIdentity().isValid()) cache.put(iNode.dataProvider.getIdentity(), iNode); } /** * Searches the node in local cache by RID. * * @param iRid * RID to search * @return Node is found, otherwise NULL */ protected OMVRBTreeEntryPersistent<K, V> searchNodeInCache(final ORID iRid) { return cache.get(iRid); } public int getNumberOfNodesInCache() { return cache.size(); } /** * Returns all the RID of the nodes in memory. */ protected Set<ORID> getAllNodesInCache() { return cache.keySet(); } /** * Removes the node from the local cache. * * @param iRid * RID of node to remove */ protected void removeNodeFromCache(final ORID iRid) { cache.remove(iRid); } protected void markDirty() { } public ORecord getOwner() { return owner; } public OMVRBTreePersistent<K, V> setOwner(ORecord owner) { this.owner = owner; return this; } protected void freeMemory(final int i) { // LOW MEMORY DURING LOAD: THIS MEANS DEEP LOADING OF NODES. EXECUTE THE OPTIMIZATION AND RETRY IT optimize(true); } }
apache-2.0
robin13/elasticsearch
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/ProcessBuilderUtils.java
1413
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.ml.job.process; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.env.Environment; import org.elasticsearch.xpack.core.XPackPlugin; import java.nio.file.Files; import java.nio.file.Path; import java.util.List; public final class ProcessBuilderUtils { private ProcessBuilderUtils() {} /** * Name of the model config file */ public static final String ML_MODEL_CONF = "mlmodel.conf"; public static <T> void addIfNotNull(T object, String argKey, List<String> command) { if (object != null) { String param = argKey + object; command.add(param); } } public static void addIfNotNull(TimeValue timeValue, String argKey, List<String> command) { addIfNotNull(timeValue == null ? null : timeValue.getSeconds(), argKey, command); } /** * Return true if there is a file ES_HOME/config/mlmodel.conf */ public static boolean modelConfigFilePresent(Environment env) { Path modelConfPath = XPackPlugin.resolveConfigFile(env, ML_MODEL_CONF); return Files.isRegularFile(modelConfPath); } }
apache-2.0
dahlstrom-g/intellij-community
python/python-rest/src/com/jetbrains/rest/RestPythonUtil.java
1786
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.jetbrains.rest; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.CommonDataKeys; import com.intellij.openapi.actionSystem.LangDataKeys; import com.intellij.openapi.actionSystem.Presentation; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.projectRoots.Sdk; import com.jetbrains.python.PyPsiPackageUtil; import com.jetbrains.python.packaging.PyPackage; import com.jetbrains.python.packaging.PyPackageManager; import com.jetbrains.python.sdk.PythonSdkUtil; import java.util.List; /** * User : catherine */ public final class RestPythonUtil { private RestPythonUtil() {} public static Presentation updateSphinxQuickStartRequiredAction(final AnActionEvent e) { final Presentation presentation = e.getPresentation(); final Project project = e.getData(CommonDataKeys.PROJECT); if (project != null) { Module module = e.getData(LangDataKeys.MODULE); if (module == null) { Module[] modules = ModuleManager.getInstance(project).getModules(); module = modules.length == 0 ? null : modules [0]; } if (module != null) { final Sdk sdk = PythonSdkUtil.findPythonSdk(module); if (sdk != null) { final List<PyPackage> packages = PyPackageManager.getInstance(sdk).getPackages(); final PyPackage sphinx = packages != null ? PyPsiPackageUtil.findPackage(packages, "Sphinx") : null; presentation.setEnabled(sphinx != null); } } } return presentation; } }
apache-2.0
archanah24/lens
lens-client/src/main/java/org/apache/lens/client/LensClientConfig.java
5290
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.lens.client; import org.apache.hadoop.conf.Configuration; /** * Configuration Class which is used in the lens client. */ public class LensClientConfig extends Configuration { /** * Instantiates a new lens client config. */ public LensClientConfig() { super(false); addResource("lens-client-default.xml"); addResource("lens-client-site.xml"); } // config prefixes // All the config variables will use one of these prefixes /** The Constant CLIENT_PFX. */ public static final String CLIENT_PFX = "lens.client."; /** The Constant DBNAME_KEY. */ public static final String DBNAME_KEY = CLIENT_PFX + "dbname"; /** The Constant DEFAULT_DBNAME_VALUE. */ public static final String DEFAULT_DBNAME_VALUE = "default"; /** The Constant QUERY_POLL_INTERVAL_KEY. */ private static final String QUERY_POLL_INTERVAL_KEY = CLIENT_PFX + "query.poll.interval"; /** The Constant DEFAULT_QUERY_POLL_INTERVAL. */ private static final long DEFAULT_QUERY_POLL_INTERVAL = 10L; /** The Constant USER_NAME. */ private static final String USER_NAME = CLIENT_PFX + "user.name"; /** The Constant DEFAULT_USER_NAME. */ public static final String DEFAULT_USER_NAME = "anonymous"; /** The Constant DEFAULT_METASTORE_RESOURCE_PATH. */ private static final String DEFAULT_METASTORE_RESOURCE_PATH = "metastore"; /** The Constant DEFAULT_QUERY_RESOURCE_PATH. */ private static final String DEFAULT_QUERY_RESOURCE_PATH = "queryapi"; /** The Constant DEFAULT_SESSION_RESOURCE_PATH. */ public static final String DEFAULT_SESSION_RESOURCE_PATH = "session"; public static final String DEFAULT_LOG_RESOURCE_PATH = "logs"; // server side conf properties copied here /** The Constant SERVER_BASE_URL. */ public static final String SERVER_BASE_URL = "lens.server.base.url"; /** The Constant DEFAULT_SERVER_BASE_URL. */ public static final String DEFAULT_SERVER_BASE_URL = "http://0.0.0.0:9999/lensapi"; /** The Constant SESSION_CLUSTER_USER. */ public static final String SESSION_CLUSTER_USER = "lens.session.cluster.user"; public static final String SESSION_FILTER_NAMES = CLIENT_PFX + "ws.request.filternames"; public static final String WS_FILTER_IMPL_SFX = ".ws.filter.impl"; public static final String READ_TIMEOUT_MILLIS = CLIENT_PFX + "read.timeout.millis"; public static final int DEFAULT_READ_TIMEOUT_MILLIS = 300000; //5 mins public static final String CONNECTION_TIMEOUT_MILLIS = CLIENT_PFX + "connection.timeout.millis"; public static final int DEFAULT_CONNECTION_TIMEOUT_MILLIS = 60000; //60 secs /** * Get the username from config * * @return Returns lens client user name */ public String getUser() { return this.get(USER_NAME, DEFAULT_USER_NAME); } public void setUser(String user) { this.set(USER_NAME, user); } /** * Returns the configured lens server url * * @return server url */ public String getBaseURL() { return this.get(SERVER_BASE_URL, DEFAULT_SERVER_BASE_URL); } /** * Returns the configured lens server database client wants to access * * @return database returns database to connect, defaults to 'default' */ public String getLensDatabase() { return this.get(DBNAME_KEY, DEFAULT_DBNAME_VALUE); } /** * Returns the session service path on lens server * * @return web app fragment pointing to session service, defaults to session */ public String getSessionResourcePath() { return DEFAULT_SESSION_RESOURCE_PATH; } /** * Returns the query service path on lens server * * @return web app fragment pointing to query service, defaults to queryapi */ public String getQueryResourcePath() { return DEFAULT_QUERY_RESOURCE_PATH; } /** * Sets the database to connect on lens server * * @param dbName database to connect to */ public void setLensDatabase(String dbName) { this.set(DBNAME_KEY, dbName); } public long getQueryPollInterval() { return this.getLong(QUERY_POLL_INTERVAL_KEY, DEFAULT_QUERY_POLL_INTERVAL); } public String getMetastoreResourcePath() { return DEFAULT_METASTORE_RESOURCE_PATH; } public void setBaseUrl(String baseUrl) { this.set(SERVER_BASE_URL, baseUrl); } public String getLogResourcePath() { return DEFAULT_LOG_RESOURCE_PATH; } public static String getWSFilterImplConfKey(String filterName) { return CLIENT_PFX + filterName + WS_FILTER_IMPL_SFX; } }
apache-2.0
gmile/elasticsearch
src/main/java/org/elasticsearch/index/cache/IndexCache.java
3366
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.cache; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.CloseableComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.Index; import org.elasticsearch.index.cache.docset.DocSetCache; import org.elasticsearch.index.cache.filter.FilterCache; import org.elasticsearch.index.cache.query.parser.QueryParserCache; import org.elasticsearch.index.settings.IndexSettings; /** * */ public class IndexCache extends AbstractIndexComponent implements CloseableComponent, ClusterStateListener { private final FilterCache filterCache; private final QueryParserCache queryParserCache; private final DocSetCache docSetCache; private ClusterService clusterService; @Inject public IndexCache(Index index, @IndexSettings Settings indexSettings, FilterCache filterCache, QueryParserCache queryParserCache, DocSetCache docSetCache) { super(index, indexSettings); this.filterCache = filterCache; this.queryParserCache = queryParserCache; this.docSetCache = docSetCache; } @Inject(optional = true) public void setClusterService(@Nullable ClusterService clusterService) { this.clusterService = clusterService; if (clusterService != null) { clusterService.add(this); } } public FilterCache filter() { return filterCache; } public DocSetCache docSet() { return this.docSetCache; } public QueryParserCache queryParserCache() { return this.queryParserCache; } @Override public void close() throws ElasticsearchException { filterCache.close(); queryParserCache.close(); docSetCache.clear("close"); if (clusterService != null) { clusterService.remove(this); } } public void clear(String reason) { filterCache.clear(reason); queryParserCache.clear(); docSetCache.clear(reason); } @Override public void clusterChanged(ClusterChangedEvent event) { // clear the query parser cache if the metadata (mappings) changed... if (event.metaDataChanged()) { queryParserCache.clear(); } } }
apache-2.0
placrosse/ACaZoo
test/unit/org/apache/cassandra/io/sstable/SSTableSimpleWriterTest.java
3888
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cassandra.io.sstable; import java.io.File; import org.apache.cassandra.dht.IPartitioner; import org.junit.Test; import org.apache.cassandra.SchemaLoader; import org.apache.cassandra.Util; import org.apache.cassandra.db.*; import org.apache.cassandra.db.marshal.IntegerType; import org.apache.cassandra.service.StorageService; import static org.apache.cassandra.utils.ByteBufferUtil.bytes; import static org.apache.cassandra.utils.ByteBufferUtil.toInt; public class SSTableSimpleWriterTest extends SchemaLoader { @Test public void testSSTableSimpleUnsortedWriter() throws Exception { final int INC = 5; final int NBCOL = 10; String keyspaceName = "Keyspace1"; String cfname = "StandardInteger1"; Keyspace t = Keyspace.open(keyspaceName); // make sure we create the directory File dir = Directories.create(keyspaceName, cfname).getDirectoryForNewSSTables(); assert dir.exists(); IPartitioner partitioner = StorageService.getPartitioner(); SSTableSimpleUnsortedWriter writer = new SSTableSimpleUnsortedWriter(dir, partitioner, keyspaceName, cfname, IntegerType.instance, null, 16); int k = 0; // Adding a few rows first for (; k < 10; ++k) { writer.newRow(bytes("Key" + k)); writer.addColumn(bytes(1), bytes("v"), 0); writer.addColumn(bytes(2), bytes("v"), 0); writer.addColumn(bytes(3), bytes("v"), 0); } // Testing multiple opening of the same row // We'll write column 0, 5, 10, .., on the first row, then 1, 6, 11, ... on the second one, etc. for (int i = 0; i < INC; ++i) { writer.newRow(bytes("Key" + k)); for (int j = 0; j < NBCOL; ++j) { writer.addColumn(bytes(i + INC * j), bytes("v"), 1); } } k++; // Adding a few more rows for (; k < 20; ++k) { writer.newRow(bytes("Key" + k)); writer.addColumn(bytes(1), bytes("v"), 0); writer.addColumn(bytes(2), bytes("v"), 0); writer.addColumn(bytes(3), bytes("v"), 0); } writer.close(); // Now add that newly created files to the column family ColumnFamilyStore cfs = t.getColumnFamilyStore(cfname); cfs.loadNewSSTables(); // Check we get expected results ColumnFamily cf = Util.getColumnFamily(t, Util.dk("Key10"), cfname); assert cf.getColumnCount() == INC * NBCOL : "expecting " + (INC * NBCOL) + " columns, got " + cf.getColumnCount(); int i = 0; for (Column c : cf) { assert toInt(c.name()) == i : "Column name should be " + i + ", got " + toInt(c.name()); assert c.value().equals(bytes("v")); assert c.timestamp() == 1; ++i; } cf = Util.getColumnFamily(t, Util.dk("Key19"), cfname); assert cf.getColumnCount() == 3 : "expecting 3 columns, got " + cf.getColumnCount(); } }
apache-2.0
romankagan/DDBWorkbench
java/java-tests/testData/codeInsight/daemonCodeAnalyzer/quickFix/staticImportMethod/before2.java
380
// "Static Import Method..." "true" package p; public class X { public void test() throws Exception { assertMe("", my<caret>EqualTo("")); } <V> void assertMe(V v, M<V> m) { } } class M<T> { } class FFF { public static <T> M<T> myEqualTo(T operand) { return null; } } class LLL { public static M<String> myEqualTo(String string) { return null; } }
apache-2.0
kishorvpatil/incubator-storm
storm-client/src/jvm/org/apache/storm/topology/base/BaseRichSpout.java
1320
/** * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version * 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ /* * To change this template, choose Tools | Templates * and open the template in the editor. */ package org.apache.storm.topology.base; import org.apache.storm.topology.IRichSpout; public abstract class BaseRichSpout extends BaseComponent implements IRichSpout { @Override public void close() { } @Override public void activate() { } @Override public void deactivate() { } @Override public void ack(Object msgId) { } @Override public void fail(Object msgId) { } }
apache-2.0
Cloudyle/aries
spi-fly/spi-fly-weaver/src/main/java/org/apache/aries/spifly/weaver/TCCLSetterVisitor.java
10343
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.aries.spifly.weaver; import java.util.Arrays; import java.util.HashSet; import java.util.ServiceLoader; import java.util.Set; import org.apache.aries.spifly.Util; import org.apache.aries.spifly.WeavingData; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.Label; import org.objectweb.asm.MethodVisitor; import org.objectweb.asm.Opcodes; import org.objectweb.asm.Type; import org.objectweb.asm.commons.GeneratorAdapter; import org.objectweb.asm.commons.Method; /** * This class implements an ASM ClassVisitor which puts the appropriate ThreadContextClassloader * calls around applicable method invocations. It does the actual bytecode weaving. */ public class TCCLSetterVisitor extends ClassVisitor implements Opcodes { private static final Type CLASSLOADER_TYPE = Type.getType(ClassLoader.class); private static final String GENERATED_METHOD_NAME = "$$FCCL$$"; private static final Type UTIL_CLASS = Type.getType(Util.class); private static final Type CLASS_TYPE = Type.getType(Class.class); private static final Type String_TYPE = Type.getType(String.class); private final Type targetClass; private final Set<WeavingData> weavingData; // Set to true when the weaving code has changed the client such that an additional import // (to the Util.class.getPackage()) is needed. private boolean additionalImportRequired = false; // This field is true when the class was woven private boolean woven = false; public TCCLSetterVisitor(ClassVisitor cv, String className, Set<WeavingData> weavingData) { super(Opcodes.ASM4, cv); this.targetClass = Type.getType("L" + className.replace('.', '/') + ";"); this.weavingData = weavingData; } public boolean isWoven() { return woven; } @Override public MethodVisitor visitMethod(int access, String name, String desc, String signature, String[] exceptions) { MethodVisitor mv = super.visitMethod(access, name, desc, signature, exceptions); return new TCCLSetterMethodVisitor(mv, access, name, desc); } @Override public void visitEnd() { if (!woven) { // if this class wasn't woven, then don't add the synthesized method either. super.visitEnd(); return; } // Add generated static method Set<String> methodNames = new HashSet<String>(); for (WeavingData wd : weavingData) { /* Equivalent to: * private static void $$FCCL$$<className>$<methodName>(Class<?> cls) { * Util.fixContextClassLoader("java.util.ServiceLoader", "load", cls, WovenClass.class.getClassLoader()); * } */ String methodName = getGeneratedMethodName(wd); if (methodNames.contains(methodName)) continue; methodNames.add(methodName); Method method = new Method(methodName, Type.VOID_TYPE, new Type[] {CLASS_TYPE}); GeneratorAdapter mv = new GeneratorAdapter(cv.visitMethod(ACC_PRIVATE + ACC_STATIC, methodName, method.getDescriptor(), null, null), ACC_PRIVATE + ACC_STATIC, methodName, method.getDescriptor()); //Load the strings, method parameter and target mv.visitLdcInsn(wd.getClassName()); mv.visitLdcInsn(wd.getMethodName()); mv.loadArg(0); mv.visitLdcInsn(targetClass); //Change the class on the stack into a classloader mv.invokeVirtual(CLASS_TYPE, new Method("getClassLoader", CLASSLOADER_TYPE, new Type[0])); //Call our util method mv.invokeStatic(UTIL_CLASS, new Method("fixContextClassloader", Type.VOID_TYPE, new Type[] {String_TYPE, String_TYPE, CLASS_TYPE, CLASSLOADER_TYPE})); mv.returnValue(); mv.endMethod(); } super.visitEnd(); } private String getGeneratedMethodName(WeavingData wd) { StringBuilder name = new StringBuilder(GENERATED_METHOD_NAME); name.append(wd.getClassName().replace('.', '#')); name.append("$"); name.append(wd.getMethodName()); if (wd.getArgClasses() != null) { for (String cls : wd.getArgClasses()) { name.append("$"); name.append(cls.replace('.', '#')); } } return name.toString(); } private class TCCLSetterMethodVisitor extends GeneratorAdapter { Type lastLDCType; public TCCLSetterMethodVisitor(MethodVisitor mv, int access, String name, String descriptor) { super(Opcodes.ASM4, mv, access, name, descriptor); } /** * Store the last LDC call. When ServiceLoader.load(Class cls) is called * the last LDC call before the ServiceLoader.load() visitMethodInsn call * contains the class being passed in. We need to pass this class to $$FCCL$$ as well * so we can copy the value found in here. */ @Override public void visitLdcInsn(Object cst) { if (cst instanceof Type) { lastLDCType = ((Type) cst); } super.visitLdcInsn(cst); } /** * Wrap selected method calls with * Util.storeContextClassloader(); * $$FCCL$$(<class>) * Util.restoreContextClassloader(); */ @Override public void visitMethodInsn(int opcode, String owner, String name, String desc) { WeavingData wd = findWeavingData(owner, name, desc); if (opcode == INVOKESTATIC && wd != null) { additionalImportRequired = true; woven = true; Label startTry = newLabel(); Label endTry = newLabel(); //start try block visitTryCatchBlock(startTry, endTry, endTry, null); mark(startTry); // Add: Util.storeContextClassloader(); invokeStatic(UTIL_CLASS, new Method("storeContextClassloader", Type.VOID_TYPE, new Type[0])); // Add: MyClass.$$FCCL$$<classname>$<methodname>(<class>); if (ServiceLoader.class.getName().equals(wd.getClassName()) && "load".equals(wd.getMethodName()) && (wd.getArgClasses() == null || Arrays.equals(new String [] {Class.class.getName()}, wd.getArgClasses()))) { // ServiceLoader.load() is a special case because it's a general-purpose service loader, // therefore, the target class it the class being passed in to the ServiceLoader.load() // call itself. mv.visitLdcInsn(lastLDCType); } else { // In any other case, we're not dealing with a general-purpose service loader, but rather // with a specific one, such as DocumentBuilderFactory.newInstance(). In that case the // target class is the class that is being invoked on (i.e. DocumentBuilderFactory). Type type = Type.getObjectType(owner); mv.visitLdcInsn(type); } invokeStatic(targetClass, new Method(getGeneratedMethodName(wd), Type.VOID_TYPE, new Type[] {CLASS_TYPE})); //Call the original instruction super.visitMethodInsn(opcode, owner, name, desc); //If no exception then go to the finally (finally blocks are a catch block with a jump) Label afterCatch = newLabel(); goTo(afterCatch); //start the catch mark(endTry); //Run the restore method then throw on the exception invokeStatic(UTIL_CLASS, new Method("restoreContextClassloader", Type.VOID_TYPE, new Type[0])); throwException(); //start the finally mark(afterCatch); //Run the restore and continue invokeStatic(UTIL_CLASS, new Method("restoreContextClassloader", Type.VOID_TYPE, new Type[0])); } else { super.visitMethodInsn(opcode, owner, name, desc); } } private WeavingData findWeavingData(String owner, String methodName, String methodDesc) { owner = owner.replace('/', '.'); Type[] argTypes = Type.getArgumentTypes(methodDesc); String [] argClassNames = new String[argTypes.length]; for (int i = 0; i < argTypes.length; i++) { argClassNames[i] = argTypes[i].getClassName(); } for (WeavingData wd : weavingData) { if (wd.getClassName().equals(owner) && wd.getMethodName().equals(methodName) && (wd.getArgClasses() != null ? Arrays.equals(argClassNames, wd.getArgClasses()) : true)) { return wd; } } return null; } } public boolean additionalImportRequired() { return additionalImportRequired ; } }
apache-2.0
ingokegel/intellij-community
platform/testRunner/src/com/intellij/execution/testframework/export/ExportTestResultsForm.java
7736
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.execution.testframework.export; import com.intellij.execution.ExecutionBundle; import com.intellij.openapi.fileChooser.FileChooserDescriptor; import com.intellij.openapi.fileChooser.FileChooserDescriptorFactory; import com.intellij.openapi.ui.TextComponentAccessor; import com.intellij.openapi.ui.TextFieldWithBrowseButton; import com.intellij.openapi.util.NlsContexts; import com.intellij.openapi.util.NlsSafe; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.wm.IdeFocusManager; import com.intellij.ui.DocumentAdapter; import com.intellij.ui.UserActivityListener; import com.intellij.ui.UserActivityWatcher; import com.intellij.util.EventDispatcher; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import javax.swing.event.DocumentEvent; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.File; public class ExportTestResultsForm { private JRadioButton myXmlRb; private JRadioButton myBundledTemplateRb; private TextFieldWithBrowseButton myCustomTemplateField; private TextFieldWithBrowseButton myFolderField; private JPanel myContentPane; private JLabel myOutputFolderLabel; private JRadioButton myCustomTemplateRb; private JTextField myFileNameField; private JLabel myMessageLabel; private JCheckBox myOpenExportedFileCb; private final EventDispatcher<ChangeListener> myEventDispatcher = EventDispatcher.create(ChangeListener.class); public ExportTestResultsForm(ExportTestResultsConfiguration config, String defaultFileName, @NlsSafe String defaultFolder) { ActionListener listener = new ActionListener() { @Override public void actionPerformed(ActionEvent e) { updateOnFormatChange(); } }; myXmlRb.addActionListener(listener); myBundledTemplateRb.addActionListener(listener); myCustomTemplateRb.addActionListener(listener); myOutputFolderLabel.setLabelFor(myFolderField.getChildComponent()); myFileNameField.setText(defaultFileName); myCustomTemplateField.addBrowseFolderListener(ExecutionBundle.message("export.test.results.custom.template.chooser.title"), null, null, new FileChooserDescriptor(true, false, false, false, false, false) { @Override public boolean isFileSelectable(VirtualFile file) { return "xsl".equalsIgnoreCase(file.getExtension()) || "xslt".equalsIgnoreCase(file.getExtension()); } }, TextComponentAccessor.TEXT_FIELD_WHOLE_TEXT); myFolderField.addBrowseFolderListener(ExecutionBundle.message("export.test.results.output.folder.chooser.title"), null, null, FileChooserDescriptorFactory.createSingleFolderDescriptor(), TextComponentAccessor.TEXT_FIELD_WHOLE_TEXT); myFileNameField.getDocument().addDocumentListener(new DocumentAdapter() { @Override protected void textChanged(@NotNull DocumentEvent e) { updateOpenInLabel(); } }); UserActivityWatcher watcher = new UserActivityWatcher(); watcher.register(myContentPane); watcher.addUserActivityListener(new UserActivityListener() { @Override public void stateChanged() { myEventDispatcher.getMulticaster().stateChanged(new ChangeEvent(this)); } }); myMessageLabel.setIcon(UIUtil.getBalloonWarningIcon()); JRadioButton b; if (config.getExportFormat() == ExportTestResultsConfiguration.ExportFormat.Xml) { b = myXmlRb; } else if (config.getExportFormat() == ExportTestResultsConfiguration.ExportFormat.BundledTemplate) { b = myBundledTemplateRb; } else { b = myCustomTemplateRb; } b.setSelected(true); IdeFocusManager.findInstanceByComponent(myContentPane).requestFocus(b, true); myFolderField.setText(defaultFolder); myCustomTemplateField.setText(FileUtil.toSystemDependentName(StringUtil.notNullize(config.getUserTemplatePath()))); myOpenExportedFileCb.setSelected(config.isOpenResults()); updateOnFormatChange(); updateOpenInLabel(); } private void updateOpenInLabel() { myOpenExportedFileCb.setText(ExecutionBundle.message( shouldOpenInBrowser(myFileNameField.getText()) ? "export.test.results.open.browser" : "export.test.results.open.editor")); } public static boolean shouldOpenInBrowser(String filename) { return StringUtil.isNotEmpty(filename) && (filename.endsWith(".html") || filename.endsWith(".htm")); } private void updateOnFormatChange() { if (getExportFormat() == ExportTestResultsConfiguration.ExportFormat.UserTemplate) { myCustomTemplateField.setEnabled(true); IdeFocusManager.findInstanceByComponent(myContentPane).requestFocus(myCustomTemplateField.getChildComponent(), true); } else { myCustomTemplateField.setEnabled(false); } String filename = myFileNameField.getText(); if (filename != null && filename.indexOf('.') != -1) { myFileNameField.setText(filename.substring(0, filename.lastIndexOf('.') + 1) + getExportFormat().getDefaultExtension()); } } public void apply(ExportTestResultsConfiguration config) { config.setExportFormat(getExportFormat()); config.setUserTemplatePath(FileUtil.toSystemIndependentName(myCustomTemplateField.getText())); config.setOutputFolder(FileUtil.toSystemIndependentName(myFolderField.getText())); config.setOpenResults(myOpenExportedFileCb.isSelected()); } private ExportTestResultsConfiguration.ExportFormat getExportFormat() { if (myXmlRb.isSelected()) return ExportTestResultsConfiguration.ExportFormat.Xml; if (myBundledTemplateRb.isSelected()) return ExportTestResultsConfiguration.ExportFormat.BundledTemplate; return ExportTestResultsConfiguration.ExportFormat.UserTemplate; } public JComponent getContentPane() { return myContentPane; } public void addChangeListener(ChangeListener changeListener) { myEventDispatcher.addListener(changeListener); } @Nullable public @NlsContexts.Label String validate() { if (getExportFormat() == ExportTestResultsConfiguration.ExportFormat.UserTemplate) { if (StringUtil.isEmpty(myCustomTemplateField.getText())) { return ExecutionBundle.message("export.test.results.custom.template.path.empty"); } File file = new File(myCustomTemplateField.getText()); if (!file.isFile()) { return ExecutionBundle.message("export.test.results.custom.template.not.found", file.getAbsolutePath()); } } if (StringUtil.isEmpty(myFileNameField.getText())) { return ExecutionBundle.message("export.test.results.output.filename.empty"); } if (StringUtil.isEmpty(myFolderField.getText())) { return ExecutionBundle.message("export.test.results.output.path.empty"); } return null; } public void showMessage(@Nullable @NlsContexts.Label String message) { myMessageLabel.setText(message); myMessageLabel.setVisible(message != null); } public JComponent getPreferredFocusedComponent() { return myFileNameField; } public String getFileName() { return myFileNameField.getText(); } }
apache-2.0
xli/gocd
server/test/unit/com/thoughtworks/go/server/websocket/AgentStub.java
932
/* * Copyright 2015 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.server.websocket; import com.thoughtworks.go.websocket.Message; import java.util.ArrayList; import java.util.List; public class AgentStub implements Agent { public List<Message> messages = new ArrayList<>(); @Override public void send(Message msg) { messages.add(msg); } }
apache-2.0
apache/drill
exec/jdbc/src/test/java/org/apache/drill/jdbc/test/Drill2463GetNullsFailedWithAssertionsBugTest.java
4692
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.jdbc.test; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.nullValue; import static org.junit.Assert.assertTrue; import static org.hamcrest.MatcherAssert.assertThat; import org.apache.drill.categories.JdbcTest; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.apache.drill.jdbc.JdbcTestBase; import org.junit.experimental.categories.Category; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; @Category(JdbcTest.class) public class Drill2463GetNullsFailedWithAssertionsBugTest extends JdbcTestBase { private static Connection connection; private static Statement statement; @BeforeClass public static void setUpConnection() throws SQLException { // (Note: Can't use JdbcTest's connect(...) because JdbcTest closes // Connection--and other JDBC objects--on test method failure, but this test // class uses some objects across methods.) connection = connect(); statement = connection.createStatement(); } @AfterClass public static void tearDownConnection() throws SQLException { connection.close(); } // Test primitive types vs. non-primitive types: @Test public void testGetPrimitiveTypeNullAsOwnType() throws Exception { final ResultSet rs = statement.executeQuery( "SELECT CAST( NULL AS INTEGER ) FROM INFORMATION_SCHEMA.CATALOGS" ); assertTrue( rs.next() ); assertThat( "getInt(...) for NULL", rs.getInt( 1 ), equalTo( 0 ) ); assertThat( "wasNull", rs.wasNull(), equalTo( true ) ); } @Test public void testGetPrimitiveTypeNullAsObject() throws Exception { final ResultSet rs = statement.executeQuery( "SELECT CAST( NULL AS INTEGER ) FROM INFORMATION_SCHEMA.CATALOGS" ); assertTrue( rs.next() ); assertThat( "getObject(...) for NULL", rs.getObject( 1 ), nullValue() ); assertThat( "wasNull", rs.wasNull(), equalTo( true ) ); } @Test public void testGetNonprimitiveTypeNullAsOwnType() throws Exception { final ResultSet rs = statement.executeQuery( "SELECT CAST( NULL AS VARCHAR ) FROM INFORMATION_SCHEMA.CATALOGS" ); assertTrue( rs.next() ); assertThat( "getString(...) for NULL", rs.getString( 1 ), nullValue() ); assertThat( "wasNull", rs.wasNull(), equalTo( true ) ); } // Test a few specifics @Test public void testGetBooleanNullAsOwnType() throws Exception { final ResultSet rs = statement.executeQuery( "SELECT CAST( NULL AS BOOLEAN ) FROM INFORMATION_SCHEMA.CATALOGS" ); assertTrue( rs.next() ); assertThat( "getBoolean(...) for NULL", rs.getBoolean( 1 ), equalTo( false ) ); assertThat( "wasNull", rs.wasNull(), equalTo( true ) ); } @Test public void testGetBooleanNullAsObject() throws Exception { final ResultSet rs = statement.executeQuery( "SELECT CAST( NULL AS BOOLEAN ) FROM INFORMATION_SCHEMA.CATALOGS" ); assertTrue( rs.next() ); assertThat( "getObject(...) for NULL", rs.getObject( 1 ), nullValue() ); assertThat( "wasNull", rs.wasNull(), equalTo( true ) ); } @Test public void testGetIntegerNullAsOwnType() throws Exception { final ResultSet rs = statement.executeQuery( "SELECT CAST( NULL AS INTEGER ) FROM INFORMATION_SCHEMA.CATALOGS" ); assertTrue( rs.next() ); assertThat( "getInt(...) for NULL", rs.getInt( 1 ), equalTo( 0 ) ); assertThat( "wasNull", rs.wasNull(), equalTo( true ) ); } @Test public void testGetIntegerNullAsObject() throws Exception { final ResultSet rs = statement.executeQuery( "SELECT CAST( NULL AS INTEGER ) FROM INFORMATION_SCHEMA.CATALOGS" ); assertTrue( rs.next() ); assertThat( "getObject(...) for NULL", rs.getObject( 1 ), nullValue() ); assertThat( "wasNull", rs.wasNull(), equalTo( true ) ); } }
apache-2.0
cementsuf/hapi-fhir
hapi-fhir-base/src/main/java/ca/uhn/fhir/model/api/ExtensionDt.java
4707
package ca.uhn.fhir.model.api; /* * #%L * HAPI FHIR - Core Library * %% * Copyright (C) 2014 - 2015 University Health Network * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.ArrayList; import java.util.List; import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IBaseDatatype; import org.hl7.fhir.instance.model.api.IBaseExtension; import ca.uhn.fhir.model.api.annotation.Child; import ca.uhn.fhir.model.api.annotation.DatatypeDef; import ca.uhn.fhir.model.primitive.StringDt; @DatatypeDef(name = "Extension") public class ExtensionDt extends BaseIdentifiableElement implements ICompositeDatatype, IBaseExtension<ExtensionDt, IDatatype> { private boolean myModifier; @Child(name="url", type=StringDt.class, order=0, min=1, max=1) private StringDt myUrl; @Child(name = "value", type = IDatatype.class, order = 1, min = 0, max = 1) private IBaseDatatype myValue; public ExtensionDt() { } public ExtensionDt(boolean theIsModifier) { myModifier = theIsModifier; } public ExtensionDt(boolean theIsModifier, String theUrl) { Validate.notEmpty(theUrl, "URL must be populated"); myModifier = theIsModifier; myUrl = new StringDt(theUrl); } public ExtensionDt(boolean theIsModifier, String theUrl, IBaseDatatype theValue) { Validate.notEmpty(theUrl, "URL must be populated"); Validate.notNull(theValue, "Value must not be null"); myModifier = theIsModifier; myUrl = new StringDt(theUrl); myValue=theValue; } /** * Returns the URL for this extension. * <p> * Note that before HAPI 0.9 this method returned a {@link StringDt} but as of * HAPI 0.9 this method returns a plain string. This was changed because it does not make sense to use a StringDt here * since the URL itself can not contain extensions and it was therefore misleading. * </p> */ public String getUrl() { return myUrl != null ? myUrl.getValue() : null; } /** * Retained for backward compatibility * * @see ExtensionDt#getUrl() */ public String getUrlAsString() { return getUrl(); } /** * Returns the value of this extension, if one exists. * <p> * Note that if this extension contains extensions (instead of a datatype) then <b>this method will return null</b>. In that case, you must use {@link #getUndeclaredExtensions()} and * {@link #getUndeclaredModifierExtensions()} to retrieve the child extensions. * </p> */ public IBaseDatatype getValue() { return myValue; } /** * Returns the value of this extension, casted to a primitive datatype. This is a convenience method which should only be called if you are sure that the value for this particular extension will * be a primitive. * <p> * Note that if this extension contains extensions (instead of a datatype) then <b>this method will return null</b>. In that case, you must use {@link #getUndeclaredExtensions()} and * {@link #getUndeclaredModifierExtensions()} to retrieve the child extensions. * </p> * * @throws ClassCastException * If the value of this extension is not a primitive datatype */ public IPrimitiveDatatype<?> getValueAsPrimitive() { if (!(getValue() instanceof IPrimitiveDatatype)) { throw new ClassCastException("Extension with URL["+myUrl+"] can not be cast to primitive type, type is: "+ getClass().getCanonicalName()); } return (IPrimitiveDatatype<?>) getValue(); } @Override public boolean isEmpty() { return super.isBaseEmpty() && (myValue == null || myValue.isEmpty()); } public boolean isModifier() { return myModifier; } public void setModifier(boolean theModifier) { myModifier = theModifier; } public ExtensionDt setUrl(String theUrl) { myUrl = theUrl != null ? new StringDt(theUrl) : myUrl; return this; } public ExtensionDt setUrl(StringDt theUrl) { myUrl = theUrl; return this; } public ExtensionDt setValue(IBaseDatatype theValue) { myValue = theValue; return this; } @Override public <T extends IElement> List<T> getAllPopulatedChildElementsOfType(Class<T> theType) { return new ArrayList<T>(); } @Override public List<ExtensionDt> getExtension() { return getAllUndeclaredExtensions(); } }
apache-2.0
apache/drill
exec/java-exec/src/test/java/org/apache/drill/exec/util/TestQueryMemoryAlloc.java
6419
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.util; import static org.junit.Assert.assertEquals; import org.apache.drill.exec.ExecConstants; import org.apache.drill.exec.server.options.OptionManager; import org.apache.drill.test.BaseDirTestWatcher; import org.apache.drill.test.DrillTest; import org.apache.drill.test.OperatorFixture; import org.junit.Rule; import org.junit.Test; public class TestQueryMemoryAlloc extends DrillTest { public static final long ONE_MB = 1024 * 1024; public static final long ONE_GB = 1024L * ONE_MB; @Rule public final BaseDirTestWatcher dirTestWatcher = new BaseDirTestWatcher(); @Test public void testDefaultOptions() throws Exception { OperatorFixture.Builder builder = OperatorFixture.builder(dirTestWatcher); builder.systemOption(ExecConstants.PERCENT_MEMORY_PER_QUERY_KEY, 0.05); builder.systemOption(ExecConstants.MAX_QUERY_MEMORY_PER_NODE_KEY, 2 * ONE_GB); try (OperatorFixture fixture = builder.build()) { final OptionManager optionManager = fixture.getOptionManager(); optionManager.setLocalOption(ExecConstants.PERCENT_MEMORY_PER_QUERY_KEY, 0.05); optionManager.setLocalOption(ExecConstants.MAX_QUERY_MEMORY_PER_NODE_KEY, 2 * ONE_GB); // Out-of-box memory, use query memory per node as floor. long mem = MemoryAllocationUtilities.computeQueryMemory(fixture.config(), optionManager, 8 * ONE_GB); assertEquals(2 * ONE_GB, mem); // Up to 40 GB, query memory dominates. mem = MemoryAllocationUtilities.computeQueryMemory(fixture.config(), optionManager, 40 * ONE_GB); assertEquals(2 * ONE_GB, mem); // After 40 GB, the percent dominates mem = MemoryAllocationUtilities.computeQueryMemory(fixture.config(), optionManager, 100 * ONE_GB); assertEquals(5 * ONE_GB, mem); } } @Test public void testCustomFloor() throws Exception { OperatorFixture.Builder builder = OperatorFixture.builder(dirTestWatcher); builder.systemOption(ExecConstants.PERCENT_MEMORY_PER_QUERY_KEY, 0.05); builder.systemOption(ExecConstants.MAX_QUERY_MEMORY_PER_NODE_KEY, 2 * ONE_GB); try (OperatorFixture fixture = builder.build()) { final OptionManager optionManager = fixture.getOptionManager(); optionManager.setLocalOption(ExecConstants.PERCENT_MEMORY_PER_QUERY_KEY, 0.05); optionManager.setLocalOption(ExecConstants.MAX_QUERY_MEMORY_PER_NODE_KEY, 2 * ONE_GB); // Out-of-box memory, use query memory per node as floor. long mem = MemoryAllocationUtilities.computeQueryMemory(fixture.config(), optionManager, 8 * ONE_GB); assertEquals(2 * ONE_GB, mem); // Up to 60 GB, query memory dominates. mem = MemoryAllocationUtilities.computeQueryMemory(fixture.config(), optionManager, 60 * ONE_GB); assertEquals(3 * ONE_GB, mem); // After 60 GB, the percent dominates mem = MemoryAllocationUtilities.computeQueryMemory(fixture.config(), optionManager, 100 * ONE_GB); assertEquals(5 * ONE_GB, mem); } } @Test public void testCustomPercent() throws Exception { OperatorFixture.Builder builder = OperatorFixture.builder(dirTestWatcher); builder.systemOption(ExecConstants.PERCENT_MEMORY_PER_QUERY_KEY, 0.10); builder.systemOption(ExecConstants.MAX_QUERY_MEMORY_PER_NODE_KEY, 2 * ONE_GB); try (OperatorFixture fixture = builder.build()) { final OptionManager optionManager = fixture.getOptionManager(); optionManager.setLocalOption(ExecConstants.PERCENT_MEMORY_PER_QUERY_KEY, 0.10); optionManager.setLocalOption(ExecConstants.MAX_QUERY_MEMORY_PER_NODE_KEY, 2 * ONE_GB); // Out-of-box memory, use query memory per node as floor. long mem = MemoryAllocationUtilities.computeQueryMemory(fixture.config(), optionManager, 8 * ONE_GB); assertEquals(2 * ONE_GB, mem); // Up to 20 GB, query memory dominates. mem = MemoryAllocationUtilities.computeQueryMemory(fixture.config(), optionManager, 20 * ONE_GB); assertEquals(2 * ONE_GB, mem); // After 20 GB, the percent dominates mem = MemoryAllocationUtilities.computeQueryMemory(fixture.config(), optionManager, 30 * ONE_GB); assertEquals(3 * ONE_GB, mem); } } /** * Test with default options, various memory configs. * Since we can't change the actual CPUs on this node, use an * option to specify the number (rather than the usual 70% of * actual cores.) * * @throws Exception */ @Test public void testOpMemory() throws Exception { OperatorFixture.Builder builder = OperatorFixture.builder(dirTestWatcher); builder.systemOption(ExecConstants.CPU_LOAD_AVERAGE_KEY, 0.7); builder.systemOption(ExecConstants.MAX_WIDTH_PER_NODE_KEY, 10); builder.systemOption(ExecConstants.MIN_MEMORY_PER_BUFFERED_OP_KEY, 40 * ONE_MB); try (OperatorFixture fixture = builder.build()) { final OptionManager optionManager = fixture.getOptionManager(); optionManager.setLocalOption(ExecConstants.CPU_LOAD_AVERAGE_KEY, 0.7); optionManager.setLocalOption(ExecConstants.MAX_WIDTH_PER_NODE_KEY, 10); optionManager.setLocalOption(ExecConstants.MIN_MEMORY_PER_BUFFERED_OP_KEY, 40 * ONE_MB); // Enough memory to go above configured minimum. long opMinMem = MemoryAllocationUtilities.computeOperatorMemory(optionManager, 4 * ONE_GB, 2); assertEquals(4 * ONE_GB / 10 / 2, opMinMem); // Too little memory per operator. Use configured minimum. opMinMem = MemoryAllocationUtilities.computeOperatorMemory(optionManager, ONE_GB, 100); assertEquals(40 * ONE_MB, opMinMem); } } }
apache-2.0
cchang738/parquet-mr
parquet-column/src/main/java/org/apache/parquet/column/values/deltalengthbytearray/DeltaLengthByteArrayValuesReader.java
2330
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.parquet.column.values.deltalengthbytearray; import static org.apache.parquet.Log.DEBUG; import java.io.IOException; import java.nio.ByteBuffer; import org.apache.parquet.Log; import org.apache.parquet.column.values.ValuesReader; import org.apache.parquet.column.values.delta.DeltaBinaryPackingValuesReader; import org.apache.parquet.io.api.Binary; /** * Reads binary data written by {@link DeltaLengthByteArrayValuesWriter} * * @author Aniket Mokashi * */ public class DeltaLengthByteArrayValuesReader extends ValuesReader { private static final Log LOG = Log.getLog(DeltaLengthByteArrayValuesReader.class); private ValuesReader lengthReader; private ByteBuffer in; private int offset; public DeltaLengthByteArrayValuesReader() { this.lengthReader = new DeltaBinaryPackingValuesReader(); } @Override public void initFromPage(int valueCount, ByteBuffer in, int offset) throws IOException { if (DEBUG) LOG.debug("init from page at offset "+ offset + " for length " + (in.limit() - offset)); lengthReader.initFromPage(valueCount, in, offset); offset = lengthReader.getNextOffset(); this.in = in; this.offset = offset; } @Override public Binary readBytes() { int length = lengthReader.readInteger(); int start = offset; offset = start + length; return Binary.fromConstantByteBuffer(in, start, length); } @Override public void skip() { int length = lengthReader.readInteger(); offset = offset + length; } }
apache-2.0
diy1/error-prone-aspirator
core/src/test/resources/com/google/errorprone/bugpatterns/CollectionIncompatibleTypeClassCast.java
863
/* * Copyright 2014 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.errorprone.bugpatterns; import java.util.HashMap; /** * This is a regression test for Issue 222. */ public class CollectionIncompatibleTypeClassCast<K, V> extends HashMap<K, V> { public void test(K k) { get(k); } }
apache-2.0
pedrofvteixeira/big-data-plugin
impl/shim/hbase/src/test/java/com/pentaho/big/data/bundles/impl/shim/hbase/connectionPool/HBaseConnectionPoolConnectionTest.java
4480
/******************************************************************************* * * Pentaho Big Data * * Copyright (C) 2002-2017 by Hitachi Vantara : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package com.pentaho.big.data.bundles.impl.shim.hbase.connectionPool; import com.pentaho.big.data.bundles.impl.shim.hbase.HBaseConnectionTestImpls; import org.junit.Before; import org.junit.Test; import org.pentaho.hbase.shim.spi.HBaseConnection; import java.util.Properties; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; /** * Created by bryan on 2/4/16. */ public class HBaseConnectionPoolConnectionTest { private HBaseConnection delegate; private HBaseConnectionPoolConnection hBaseConnectionPoolConnection; @Before public void setup() { delegate = mock( HBaseConnectionTestImpls.HBaseConnectionWithResultField.class ); hBaseConnectionPoolConnection = new HBaseConnectionPoolConnection( delegate ); } @Test( expected = UnsupportedOperationException.class ) public void testNewSourceTable() throws Exception { hBaseConnectionPoolConnection.newSourceTable( "test" ); } @Test( expected = UnsupportedOperationException.class ) public void testCloseSourceTable() throws Exception { hBaseConnectionPoolConnection.closeSourceTable(); } @Test( expected = UnsupportedOperationException.class ) public void testNewTargetTable() throws Exception { hBaseConnectionPoolConnection.newTargetTable( "test", new Properties() ); } @Test( expected = UnsupportedOperationException.class ) public void testCloseTargetTable() throws Exception { hBaseConnectionPoolConnection.closeTargetTable(); } @Test( expected = UnsupportedOperationException.class ) public void testClose() throws Exception { hBaseConnectionPoolConnection.close(); } @Test public void testCloseInternal() throws Exception { String name = "name"; Properties properties = mock( Properties.class ); hBaseConnectionPoolConnection.newTargetTableInternal( name, properties ); hBaseConnectionPoolConnection.newSourceTableInternal( name ); hBaseConnectionPoolConnection.closeInternal(); verify( delegate ).closeSourceTable(); verify( delegate ).closeTargetTable(); verify( delegate ).close(); assertNull( hBaseConnectionPoolConnection.getSourceTable() ); assertNull( hBaseConnectionPoolConnection.getTargetTable() ); assertNull( hBaseConnectionPoolConnection.getTargetTableProperties() ); } @Test public void testNewTargetTableInternalAndCloseTargetTable() throws Exception { String name = "name"; Properties properties = mock( Properties.class ); hBaseConnectionPoolConnection.newTargetTableInternal( name, properties ); verify( delegate ).newTargetTable( name, properties ); assertEquals( name, hBaseConnectionPoolConnection.getTargetTable() ); assertEquals( properties, hBaseConnectionPoolConnection.getTargetTableProperties() ); hBaseConnectionPoolConnection.closeTargetTableInternal(); verify( delegate ).closeTargetTable(); assertNull( hBaseConnectionPoolConnection.getTargetTable() ); assertNull( hBaseConnectionPoolConnection.getTargetTableProperties() ); } @Test public void testNewSourceTableInternal() throws Exception { String name = "name"; hBaseConnectionPoolConnection.newSourceTableInternal( name ); verify( delegate ).newSourceTable( name ); assertEquals( name, hBaseConnectionPoolConnection.getSourceTable() ); hBaseConnectionPoolConnection.closeSourceTableInternal(); verify( delegate ).closeSourceTable(); assertNull( hBaseConnectionPoolConnection.getSourceTable() ); } }
apache-2.0
android-ia/platform_tools_idea
platform/platform-impl/src/com/intellij/internal/statistic/ideSettings/LaFUsagesCollector.java
1346
package com.intellij.internal.statistic.ideSettings; import com.intellij.ide.ui.LafManager; import com.intellij.internal.statistic.CollectUsagesException; import com.intellij.internal.statistic.UsagesCollector; import com.intellij.internal.statistic.beans.GroupDescriptor; import com.intellij.internal.statistic.beans.UsageDescriptor; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.text.StringUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.util.Collections; import java.util.Set; public class LaFUsagesCollector extends UsagesCollector { @NotNull @Override public Set<UsageDescriptor> getUsages(@Nullable Project project) throws CollectUsagesException { UIManager.LookAndFeelInfo laf = LafManager.getInstance().getCurrentLookAndFeel(); String key = SystemInfo.OS_NAME + " - "; if (!StringUtil.isEmptyOrSpaces(SystemInfo.SUN_DESKTOP)) { key += SystemInfo.SUN_DESKTOP + " - "; } return laf != null ? Collections.singleton(new UsageDescriptor(key + laf.getName(), 1)) : Collections.<UsageDescriptor>emptySet(); } @NotNull @Override public GroupDescriptor getGroupId() { return GroupDescriptor.create("Look and Feel"); } }
apache-2.0
slav9nin/srcdemo2
lib/any/jlibs/src/jlibs/core/graph/visitors/ClassSorter.java
1747
/** * JLibs: Common Utilities for Java * Copyright (C) 2009 Santhosh Kumar T <santhosh.tekuri@gmail.com> * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. */ package jlibs.core.graph.visitors; import jlibs.core.graph.Filter; import jlibs.core.graph.Navigator; import jlibs.core.graph.Sequence; import jlibs.core.graph.WalkerUtil; import jlibs.core.graph.sequences.FilteredSequence; import jlibs.core.graph.sequences.IterableSequence; import java.util.Collection; import java.util.List; /** * @author Santhosh Kumar T */ public class ClassSorter{ public static List<Class<?>> sort(final Sequence<Class<?>> classes){ return WalkerUtil.topologicalSort(classes, new Navigator<Class<?>>(){ @Override public Sequence<Class<?>> children(final Class<?> parent){ return new FilteredSequence<Class<?>>(classes.copy(), new Filter<Class<?>>(){ @Override public boolean select(Class<?> child){ return child!=parent && child.isAssignableFrom(parent); } }); } }); } public static List<Class<?>> sort(Collection<Class<?>> classes){ return sort(new IterableSequence<Class<?>>(classes)); } }
bsd-2-clause
umbrant/rain-workload-toolkit
src/radlab/rain/workload/olio/LoginOperation.java
3167
/* * Copyright (c) 2010, Regents of the University of California * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the University of California, Berkeley * nor the names of its contributors may be used to endorse or promote * products derived from this software without specific prior written * permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ package radlab.rain.workload.olio; import radlab.rain.IScoreboard; /** * The LoginOperation is an operation that logs in as a random user. If the * user executing this operation is already logged in, the user is first * logged out. After logging in, the user is redirected to the home page.<br /> * <br /> * The process of logging in as a random user entails picking a random user * ID, mapping the ID to a username deterministically, and finally logging in * by constructing a post message where the user password is the ID.<br /> * <br /> * The application is checked (via parsing of the document) for indication * that the login succeeded; the logged in state is saved. */ public class LoginOperation extends OlioOperation { public LoginOperation( boolean interactive, IScoreboard scoreboard ) { super( interactive, scoreboard ); this._operationName = "Login"; this._operationIndex = OlioGenerator.LOGIN; /* Logging in cannot occur asynchronously because the state of the * HTTP client changes, affecting the execution of the following * operation. */ this._mustBeSync = true; } public void execute() throws Throwable { if ( this.isLoggedOn() ) { this.logOff(); } // Logging in redirects the user to the home page. StringBuilder homeResponse = this.logOn(); // Check that the user was successfully logged in. if ( homeResponse.indexOf("Successfully logged in!") < 0 ) { throw new Exception( "Login did not persist for an unknown reason" ); } this.setFailed( false ); } }
bsd-3-clause
angelitorb99/CardinalPGM
src/main/java/in/twizmwaz/cardinal/module/modules/deathMessages/DeathMessagesBuilder.java
412
package in.twizmwaz.cardinal.module.modules.deathMessages; import in.twizmwaz.cardinal.match.Match; import in.twizmwaz.cardinal.module.ModuleBuilder; import in.twizmwaz.cardinal.module.ModuleCollection; public class DeathMessagesBuilder implements ModuleBuilder { @Override public ModuleCollection<DeathMessages> load(Match match) { return new ModuleCollection<>(new DeathMessages()); } }
mit
rokn/Count_Words_2015
testing/openjdk2/jdk/src/share/classes/javax/swing/FocusManager.java
6226
/* * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package javax.swing; import java.awt.*; /** * This class has been obsoleted by the 1.4 focus APIs. While client code may * still use this class, developers are strongly encouraged to use * <code>java.awt.KeyboardFocusManager</code> and * <code>java.awt.DefaultKeyboardFocusManager</code> instead. * <p> * Please see * <a href="http://docs.oracle.com/javase/tutorial/uiswing/misc/focus.html"> * How to Use the Focus Subsystem</a>, * a section in <em>The Java Tutorial</em>, and the * <a href="../../java/awt/doc-files/FocusSpec.html">Focus Specification</a> * for more information. * * @see <a href="../../java/awt/doc-files/FocusSpec.html">Focus Specification</a> * * @author Arnaud Weber * @author David Mendenhall */ public abstract class FocusManager extends DefaultKeyboardFocusManager { /** * This field is obsolete, and its use is discouraged since its * specification is incompatible with the 1.4 focus APIs. * The current FocusManager is no longer a property of the UI. * Client code must query for the current FocusManager using * <code>KeyboardFocusManager.getCurrentKeyboardFocusManager()</code>. * See the Focus Specification for more information. * * @see java.awt.KeyboardFocusManager#getCurrentKeyboardFocusManager * @see <a href="../../java/awt/doc-files/FocusSpec.html">Focus Specification</a> */ public static final String FOCUS_MANAGER_CLASS_PROPERTY = "FocusManagerClassName"; private static boolean enabled = true; /** * Returns the current <code>KeyboardFocusManager</code> instance * for the calling thread's context. * * @return this thread's context's <code>KeyboardFocusManager</code> * @see #setCurrentManager */ public static FocusManager getCurrentManager() { KeyboardFocusManager manager = KeyboardFocusManager.getCurrentKeyboardFocusManager(); if (manager instanceof FocusManager) { return (FocusManager)manager; } else { return new DelegatingDefaultFocusManager(manager); } } /** * Sets the current <code>KeyboardFocusManager</code> instance * for the calling thread's context. If <code>null</code> is * specified, then the current <code>KeyboardFocusManager</code> * is replaced with a new instance of * <code>DefaultKeyboardFocusManager</code>. * <p> * If a <code>SecurityManager</code> is installed, * the calling thread must be granted the <code>AWTPermission</code> * "replaceKeyboardFocusManager" in order to replace the * the current <code>KeyboardFocusManager</code>. * If this permission is not granted, * this method will throw a <code>SecurityException</code>, * and the current <code>KeyboardFocusManager</code> will be unchanged. * * @param aFocusManager the new <code>KeyboardFocusManager</code> * for this thread's context * @see #getCurrentManager * @see java.awt.DefaultKeyboardFocusManager * @throws SecurityException if the calling thread does not have permission * to replace the current <code>KeyboardFocusManager</code> */ public static void setCurrentManager(FocusManager aFocusManager) throws SecurityException { // Note: This method is not backward-compatible with 1.3 and earlier // releases. It now throws a SecurityException in an applet, whereas // in previous releases, it did not. This issue was discussed at // length, and ultimately approved by Hans. KeyboardFocusManager toSet = (aFocusManager instanceof DelegatingDefaultFocusManager) ? ((DelegatingDefaultFocusManager)aFocusManager).getDelegate() : aFocusManager; KeyboardFocusManager.setCurrentKeyboardFocusManager(toSet); } /** * Changes the current <code>KeyboardFocusManager</code>'s default * <code>FocusTraversalPolicy</code> to * <code>DefaultFocusTraversalPolicy</code>. * * @see java.awt.DefaultFocusTraversalPolicy * @see java.awt.KeyboardFocusManager#setDefaultFocusTraversalPolicy * @deprecated as of 1.4, replaced by * <code>KeyboardFocusManager.setDefaultFocusTraversalPolicy(FocusTraversalPolicy)</code> */ @Deprecated public static void disableSwingFocusManager() { if (enabled) { enabled = false; KeyboardFocusManager.getCurrentKeyboardFocusManager(). setDefaultFocusTraversalPolicy( new DefaultFocusTraversalPolicy()); } } /** * Returns whether the application has invoked * <code>disableSwingFocusManager()</code>. * * @see #disableSwingFocusManager * @deprecated As of 1.4, replaced by * <code>KeyboardFocusManager.getDefaultFocusTraversalPolicy()</code> */ @Deprecated public static boolean isFocusManagerEnabled() { return enabled; } }
mit
lbchen/odl-mod
opendaylight/sal/yang-prototype/sal/sal-core-demo/src/main/java/org/opendaylight/controller/sal/demo/DemoProviderImpl.java
2365
/* * Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 which accompanies this distribution, * and is available at http://www.eclipse.org/legal/epl-v10.html */ package org.opendaylight.controller.sal.demo; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import org.opendaylight.controller.sal.core.api.Broker.ProviderSession; import org.opendaylight.controller.sal.core.api.notify.NotificationProviderService; import org.opendaylight.controller.yang.data.api.Node; import org.opendaylight.controller.yang.data.util.Nodes; public class DemoProviderImpl implements org.opendaylight.controller.sal.core.api.Provider { private ProviderSession session; private NotificationProviderService notifier; @Override public void onSessionInitiated(ProviderSession session) { this.session = session; notifier = session.getService(NotificationProviderService.class); } @Override public Collection<ProviderFunctionality> getProviderFunctionality() { return Collections.emptySet(); } public void sendAlertNotification(String content) { List<Node<?>> nodes = new ArrayList<Node<?>>(); nodes.add(DemoUtils.contentNode(content)); if (notifier == null) { System.out.println("Provider: Error: Session not available"); System.out .println(" Notification Service not available"); return; } notifier.sendNotification(Nodes.containerNode( DemoUtils.alertNotification, nodes)); } public void sendChangeNotification(String content) { List<Node<?>> nodes = new ArrayList<Node<?>>(); nodes.add(DemoUtils.contentNode(content)); if (notifier == null) { System.out.println("Provider: Error: Session not available"); System.out .println(" Notification Service not available"); return; } notifier.sendNotification(Nodes.containerNode( DemoUtils.changeNotification, nodes)); } public void closeSession() { session.close(); } }
epl-1.0
zbanga/open-cyclos
src/nl/strohalm/cyclos/utils/BigDecimalHelper.java
15098
/* This file is part of Cyclos (www.cyclos.org). A project of the Social Trade Organisation (www.socialtrade.org). Cyclos is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. Cyclos is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Cyclos; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package nl.strohalm.cyclos.utils; import java.math.BigDecimal; import java.math.BigInteger; import java.math.MathContext; import nl.strohalm.cyclos.entities.settings.LocalSettings; /** * Helper class for handling <code>BigDecimal</code>s Contains several useful BigDecimal mathematical functions taken from: * "Java Number Cruncher, the java programmer's guide to numerical computing" by Ronald Mak, Prentice Hall PTR, 2003. pages 330 & 331 * * @author luis * @author Rinke * @author Ronald Mak */ public class BigDecimalHelper { public static final BigDecimal ONE_HUNDRED = new BigDecimal(100.0); /** * Compute the arctangent of x to a given scale, |x| < 1 * @param x the value of x * @param scale the desired scale of the result * @return the result value */ public static BigDecimal arctan(final BigDecimal x, final int scale) { // Check that |x| < 1. if (x.abs().compareTo(BigDecimal.valueOf(1)) >= 0) { throw new IllegalArgumentException("|x| >= 1"); } // If x is negative, return -arctan(-x). if (x.signum() == -1) { return arctan(x.negate(), scale).negate(); } else { return arctanTaylor(x, scale); } } /** * returns amount as a percentage of total. Example: if amount is 5 and total is 50, then BigDecimalHelper.asPercentageOf(amount, total) equals 10 * (%). * @param amount the BigDecimal to be written as a percentage. * @param total the BigDecimal representing the total amount of which amount is a percentage. * @return amount as a BigDecimal percentage of total. */ public static BigDecimal asPercentageOf(final BigDecimal amount, final BigDecimal total) { final MathContext mathContext = new MathContext(LocalSettings.MAX_PRECISION); final BigDecimal asFractionOf = amount.divide(total, mathContext); return asFractionOf.multiply(ONE_HUNDRED, mathContext); } /** * returns a nominal percentage (like 5.2%) as a fraction (0.052). * @return the BigDecimal divided by 100. In the above example this would be 0.052. */ public static BigDecimal asPercentFraction(final BigDecimal bigDecimal) { final MathContext mathContext = new MathContext(LocalSettings.MAX_PRECISION); return bigDecimal.divide(ONE_HUNDRED, mathContext); } /** * Compute e^x to a given scale. Break x into its whole and fraction parts and compute (e^(1 + fraction/whole))^whole using Taylor's formula. * @param x the value of x * @param scale the desired scale of the result * @return the result value */ public static BigDecimal exp(final BigDecimal x, final int scale) { // e^0 = 1 if (x.signum() == 0) { return BigDecimal.valueOf(1); } // If x is negative, return 1/(e^-x). else if (x.signum() == -1) { return BigDecimal.valueOf(1).divide(exp(x.negate(), scale), scale, BigDecimal.ROUND_HALF_EVEN); } // Compute the whole part of x. BigDecimal xWhole = x.setScale(0, BigDecimal.ROUND_DOWN); // If there isn't a whole part, compute and return e^x. if (xWhole.signum() == 0) { return expTaylor(x, scale); } // Compute the fraction part of x. final BigDecimal xFraction = x.subtract(xWhole); // z = 1 + fraction/whole final BigDecimal z = BigDecimal.valueOf(1).add(xFraction.divide(xWhole, scale, BigDecimal.ROUND_HALF_EVEN)); // t = e^z final BigDecimal t = expTaylor(z, scale); final BigDecimal maxLong = BigDecimal.valueOf(Long.MAX_VALUE); BigDecimal result = BigDecimal.valueOf(1); // Compute and return t^whole using intPower(). // If whole > Long.MAX_VALUE, then first compute products // of e^Long.MAX_VALUE. while (xWhole.compareTo(maxLong) >= 0) { result = result.multiply(intPower(t, Long.MAX_VALUE, scale)).setScale(scale, BigDecimal.ROUND_HALF_EVEN); xWhole = xWhole.subtract(maxLong); Thread.yield(); } return result.multiply(intPower(t, xWhole.longValue(), scale)).setScale(scale, BigDecimal.ROUND_HALF_EVEN); } /** * Compute x^exponent to a given scale. * @param x the value x * @param exponent the exponent value * @param scale the desired scale of the result * @return the result value */ public static BigDecimal intPower(BigDecimal x, long exponent, final int scale) { // If the exponent is negative, compute 1/(x^-exponent). if (exponent < 0) { return BigDecimal.valueOf(1).divide(intPower(x, -exponent, scale), scale, BigDecimal.ROUND_HALF_EVEN); } BigDecimal power = BigDecimal.valueOf(1); // Loop to compute value^exponent. while (exponent > 0) { // Is the rightmost bit a 1? if ((exponent & 1) == 1) { power = power.multiply(x).setScale(scale, BigDecimal.ROUND_HALF_EVEN); } // Square x and shift exponent 1 bit to the right. x = x.multiply(x).setScale(scale, BigDecimal.ROUND_HALF_EVEN); exponent >>= 1; Thread.yield(); } return power; } /** * Compute the integral root of x to a given scale, x >= 0. Use Newton's algorithm. * @param x the value of x * @param index the integral root value * @param scale the desired scale of the result * @return the result value */ public static BigDecimal intRoot(BigDecimal x, final long index, final int scale) { // Check that x >= 0. if (x.signum() < 0) { throw new IllegalArgumentException("x < 0"); } final int sp1 = scale + 1; final BigDecimal n = x; final BigDecimal i = BigDecimal.valueOf(index); final BigDecimal im1 = BigDecimal.valueOf(index - 1); final BigDecimal tolerance = BigDecimal.valueOf(5).movePointLeft(sp1); BigDecimal xPrev; // The initial approximation is x/index. x = x.divide(i, scale, BigDecimal.ROUND_HALF_EVEN); // Loop until the approximations converge // (two successive approximations are equal after rounding). do { // x^(index-1) final BigDecimal xToIm1 = intPower(x, index - 1, sp1); // x^index final BigDecimal xToI = x.multiply(xToIm1).setScale(sp1, BigDecimal.ROUND_HALF_EVEN); // n + (index-1)*(x^index) final BigDecimal numerator = n.add(im1.multiply(xToI)).setScale(sp1, BigDecimal.ROUND_HALF_EVEN); // (index*(x^(index-1)) final BigDecimal denominator = i.multiply(xToIm1).setScale(sp1, BigDecimal.ROUND_HALF_EVEN); // x = (n + (index-1)*(x^index)) / (index*(x^(index-1))) xPrev = x; x = numerator.divide(denominator, sp1, BigDecimal.ROUND_DOWN); Thread.yield(); } while (x.subtract(xPrev).abs().compareTo(tolerance) > 0); return x; } /** * Compute the natural logarithm of x to a given scale, x > 0. */ public static BigDecimal ln(final BigDecimal x, final int scale) { // Check that x > 0. if (x.signum() <= 0) { throw new IllegalArgumentException("x <= 0"); } // The number of digits to the left of the decimal point. final int magnitude = x.toString().length() - x.scale() - 1; if (magnitude < 3) { return lnNewton(x, scale); } // Compute magnitude*ln(x^(1/magnitude)). else { // x^(1/magnitude) final BigDecimal root = intRoot(x, magnitude, scale); // ln(x^(1/magnitude)) final BigDecimal lnRoot = lnNewton(root, scale); // magnitude*ln(x^(1/magnitude)) return BigDecimal.valueOf(magnitude).multiply(lnRoot).setScale(scale, BigDecimal.ROUND_HALF_EVEN); } } /** * Returns zero when the given BigDecimal is null */ public static BigDecimal nvl(final BigDecimal bigDecimal) { return bigDecimal == null ? BigDecimal.ZERO : bigDecimal; } /** * calculates x^exponent, that is, raise x to the poser of exponent. (by Rinke) * @param x the BigDecimal to be "powered". * @param scale the desired scale of the result * @param exponent the double which will be used to raise x to its power. * @return the result value */ public static BigDecimal pow(final BigDecimal x, final int scale, final BigDecimal exponent) { final BigDecimal lnX = BigDecimalHelper.ln(x, scale); final BigDecimal newExponent = lnX.multiply(exponent); return BigDecimalHelper.exp(newExponent, scale); } /** * extremely simple version natural logarithm, using no algorith tricks, but just via doubles and recreating BigDecimal Might give less accurate * results, but this is to be tested. As long as this hasn't happened it is deprecated. */ @Deprecated public static BigDecimal simpleLn(final BigDecimal x, final int scale) { final long unscaledValue = x.unscaledValue().longValue(); final int scalevalue = x.scale(); final double result = Math.log(unscaledValue) - (scalevalue * Math.log(10.0)); return new BigDecimal(result, new MathContext(scale)); } /** * Compute the square root of x to a given scale, x >= 0. Use Newton's algorithm. * @param x the value of x * @param scale the desired scale of the result * @return the result value */ public static BigDecimal sqrt(final BigDecimal x, final int scale) { // Check that x >= 0. if (x.signum() < 0) { throw new IllegalArgumentException("x < 0"); } // n = x*(10^(2*scale)) final BigInteger n = x.movePointRight(scale << 1).toBigInteger(); // The first approximation is the upper half of n. final int bits = (n.bitLength() + 1) >> 1; BigInteger ix = n.shiftRight(bits); BigInteger ixPrev; // Loop until the approximations converge // (two successive approximations are equal after rounding). do { ixPrev = ix; // x = (x + n/x)/2 ix = ix.add(n.divide(ix)).shiftRight(1); Thread.yield(); } while (ix.compareTo(ixPrev) != 0); return new BigDecimal(ix, scale); } /** * Compute the arctangent of x to a given scale by the Taylor series, |x| < 1 * @param x the value of x * @param scale the desired scale of the result * @return the result value * @author Ronald Mak: "Java Number Cruncher, the java programmer's guide to numerical computing" Prentice Hall PTR, 2003. pages 330 & 331 */ private static BigDecimal arctanTaylor(final BigDecimal x, final int scale) { final int sp1 = scale + 1; int i = 3; boolean addFlag = false; BigDecimal power = x; BigDecimal sum = x; BigDecimal term; // Convergence tolerance = 5*(10^-(scale+1)) final BigDecimal tolerance = BigDecimal.valueOf(5).movePointLeft(sp1); // Loop until the approximations converge // (two successive approximations are within the tolerance). do { // x^i power = power.multiply(x).multiply(x).setScale(sp1, BigDecimal.ROUND_HALF_EVEN); // (x^i)/i term = power.divide(BigDecimal.valueOf(i), sp1, BigDecimal.ROUND_HALF_EVEN); // sum = sum +- (x^i)/i sum = addFlag ? sum.add(term) : sum.subtract(term); i += 2; addFlag = !addFlag; Thread.yield(); } while (term.compareTo(tolerance) > 0); return sum; } /** * Compute e^x to a given scale by the Taylor series. * @param x the value of x * @param scale the desired scale of the result * @return the result value * @author Ronald Mak: "Java Number Cruncher, the java programmer's guide to numerical computing" Prentice Hall PTR, 2003. pages 330 & 331 */ private static BigDecimal expTaylor(final BigDecimal x, final int scale) { BigDecimal factorial = BigDecimal.valueOf(1); BigDecimal xPower = x; BigDecimal sumPrev; // 1 + x BigDecimal sum = x.add(BigDecimal.valueOf(1)); // Loop until the sums converge // (two successive sums are equal after rounding). int i = 2; do { // x^i xPower = xPower.multiply(x).setScale(scale, BigDecimal.ROUND_HALF_EVEN); // i! factorial = factorial.multiply(BigDecimal.valueOf(i)); // x^i/i! final BigDecimal term = xPower.divide(factorial, scale, BigDecimal.ROUND_HALF_EVEN); // sum = sum + x^i/i! sumPrev = sum; sum = sum.add(term); ++i; Thread.yield(); } while (sum.compareTo(sumPrev) != 0); return sum; } /** * Compute the natural logarithm of x to a given scale, x > 0. Use Newton's algorithm. * @author Ronald Mak: "Java Number Cruncher, the java programmer's guide to numerical computing" Prentice Hall PTR, 2003. pages 330 & 331 */ private static BigDecimal lnNewton(BigDecimal x, final int scale) { final int sp1 = scale + 1; final BigDecimal n = x; BigDecimal term; // Convergence tolerance = 5*(10^-(scale+1)) final BigDecimal tolerance = BigDecimal.valueOf(5).movePointLeft(sp1); // Loop until the approximations converge // (two successive approximations are within the tolerance). do { // e^x final BigDecimal eToX = exp(x, sp1); // (e^x - n)/e^x term = eToX.subtract(n).divide(eToX, sp1, BigDecimal.ROUND_DOWN); // x - (e^x - n)/e^x x = x.subtract(term); Thread.yield(); } while (term.compareTo(tolerance) > 0); return x.setScale(scale, BigDecimal.ROUND_HALF_EVEN); } }
gpl-2.0
robertoandrade/cyclos
src/nl/strohalm/cyclos/utils/SortedProperties.java
1697
/* This file is part of Cyclos (www.cyclos.org). A project of the Social Trade Organisation (www.socialtrade.org). Cyclos is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. Cyclos is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Cyclos; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package nl.strohalm.cyclos.utils; import java.util.Arrays; import java.util.Enumeration; import java.util.Properties; import java.util.Set; import java.util.TreeSet; import org.apache.commons.collections.IteratorUtils; import org.apache.commons.collections.iterators.ArrayIterator; /** * Extended properties used to sort the output of the store method * @author luis */ public class SortedProperties extends Properties { private static final long serialVersionUID = 2361510798290892779L; @Override @SuppressWarnings("unchecked") public synchronized Enumeration<Object> keys() { final Object[] keys = super.keySet().toArray(); Arrays.sort(keys); return IteratorUtils.asEnumeration(new ArrayIterator(keys)); } @Override public Set<Object> keySet() { return new TreeSet<Object>(super.keySet()); } }
gpl-2.0
zhiqinghuang/core
src/com/dotmarketing/portlets/usermanager/factories/UserManagerListBuilderFactory.java
23873
package com.dotmarketing.portlets.usermanager.factories; import java.util.ArrayList; import java.util.Date; import java.util.GregorianCalendar; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.StringTokenizer; import com.dotcms.repackage.javax.portlet.ActionRequest; import javax.servlet.http.HttpSession; import com.dotmarketing.beans.UserProxy; import com.dotmarketing.business.APILocator; import com.dotmarketing.business.Role; import com.dotmarketing.cms.factories.PublicCompanyFactory; import com.dotmarketing.common.db.DotConnect; import com.dotmarketing.db.DbConnectionFactory; import com.dotmarketing.exception.DotDataException; import com.dotmarketing.exception.DotRuntimeException; import com.dotmarketing.portlets.usermanager.struts.UserManagerListSearchForm; import com.dotmarketing.util.Config; import com.dotmarketing.util.InodeUtils; import com.dotmarketing.util.Logger; import com.dotmarketing.util.UtilMethods; import com.dotmarketing.util.WebKeys; import com.liferay.portal.PortalException; import com.liferay.portal.SystemException; import com.liferay.portal.model.User; import com.liferay.portlet.ActionRequestImpl; /** * @author David Torres * @author Oswaldo Gallango * */ public class UserManagerListBuilderFactory { public static List<Map<String, Object>> doSearch(UserManagerListSearchForm form) { boolean isCount = false; return doSearch( form, isCount); } /** * Return the list of user ids or the count of users from the given search form * @param form * @param isCount * @return */ @SuppressWarnings("unchecked") public static List<Map<String, Object>> doSearch(UserManagerListSearchForm form, boolean isCount) { StringBuffer querySelectPortion = new StringBuffer(); StringBuffer queryWherePortion = new StringBuffer(); DotConnect dc = new DotConnect(); String select = "select distinct user_.userid as userid, user_.createdate from user_"; if(isCount) { select = "select count(distinct user_.userid) as total from user_"; } querySelectPortion.append(select); if(UtilMethods.isSet(form.getTagName())) { querySelectPortion.append(", tag_inode, tag"); } queryWherePortion.append(" where user_.companyid ='"); queryWherePortion.append(PublicCompanyFactory.getDefaultCompany().getCompanyId()); queryWherePortion.append("'"); String[] arrayUserIds = form.getArrayUserIds(); List<Map<String, Object>> results = new ArrayList<Map<String,Object>>(); boolean runQuery = true; if (arrayUserIds!=null && arrayUserIds.length > 0) { runQuery = false; boolean first = true; StringBuilder queryUserArrayPortion = new StringBuilder(); for (int i=0; i<arrayUserIds.length; i++) { if(first){ queryUserArrayPortion.append(" and user_.userid in ('" + arrayUserIds[i] + "'"); }else{ queryUserArrayPortion.append(",'"+arrayUserIds[i]+"'"); } first = false; if((i % 500) == 0 && i != 0){ queryUserArrayPortion.append(")"); String query = querySelectPortion.toString() + queryWherePortion.toString() + queryUserArrayPortion.toString(); Logger.debug(UserManagerListBuilderFactory.class, "query:" + query); dc.setSQL(query); try { results.addAll(dc.getResults()); } catch (DotDataException e) { Logger.error(UserManagerListBuilderFactory.class,e.getMessage(),e); } queryUserArrayPortion.delete(0, queryUserArrayPortion.length()); first = true; } } }else{ String userIdSearch = (UtilMethods.isSet(form.getUserIdSearch()) ? form.getUserIdSearch().trim().toLowerCase() : null); String firstName = (UtilMethods.isSet(form.getFirstName()) ? form.getFirstName().trim().toLowerCase() : null); String middleName = (UtilMethods.isSet(form.getMiddleName()) ? form.getMiddleName().trim().toLowerCase() : null); String lastName = (UtilMethods.isSet(form.getLastName()) ? form.getLastName().trim().toLowerCase() : null); String emailAddress = (UtilMethods.isSet(form.getEmailAddress()) ? form.getEmailAddress().trim().toLowerCase() : null); String dateOfBirthTypeSearch = (UtilMethods.isSet(form.getDateOfBirthTypeSearch()) ? form.getDateOfBirthTypeSearch() : null); Date dateOfBirthFromDate = (UtilMethods.isSet(form.getDateOfBirthFromDate()) ? form.getDateOfBirthFromDate() : null); Date dateOfBirthToDate = (UtilMethods.isSet(form.getDateOfBirthToDate()) ? form.getDateOfBirthToDate() : null); Date dateOfBirthSinceDate = (UtilMethods.isSet(form.getDateOfBirthSinceDate()) ? form.getDateOfBirthSinceDate() : null); String lastLoginTypeSearch = (UtilMethods.isSet(form.getLastLoginTypeSearch()) ? form.getLastLoginTypeSearch() : null); Date lastLoginFromDate = (UtilMethods.isSet(form.getLastLoginDateFromDate()) ? form.getLastLoginDateFromDate() : null); Date lastLoginToDate = (UtilMethods.isSet(form.getLastLoginDateToDate()) ? form.getLastLoginDateToDate() : null); String lastLoginSince = (UtilMethods.isSet(form.getLastLoginSince()) ? form.getLastLoginSince() : null); String createdTypeSearch = (UtilMethods.isSet(form.getCreatedTypeSearch()) ? form.getCreatedTypeSearch() : null); Date createdDateFromDate = (UtilMethods.isSet(form.getCreatedDateFromDate()) ? form.getCreatedDateFromDate() : null); Date createdDateToDate = (UtilMethods.isSet(form.getCreatedDateToDate()) ? form.getCreatedDateToDate() : null); String createdSince = (UtilMethods.isSet(form.getCreatedSince()) ? form.getCreatedSince() : null); String lastVisitTypeSearch = (UtilMethods.isSet(form.getLastVisitTypeSearch()) ? form.getLastVisitTypeSearch() : null); Date lastVisitDateFromDate = (UtilMethods.isSet(form.getLastVisitDateFromDate()) ? form.getLastVisitDateFromDate() : null); Date lastVisitDateToDate = (UtilMethods.isSet(form.getLastVisitDateToDate()) ? form.getLastVisitDateToDate() : null); String lastVisitSince = (UtilMethods.isSet(form.getLastVisitSince()) ? form.getLastVisitSince() : null); String active = (UtilMethods.isSet(form.getActive()) ? form.getActive() : null); String tagName = (UtilMethods.isSet(form.getTagName()) ? form.getTagName() : null); // User fields filters if (UtilMethods.isSet(firstName)) { firstName = "%" + firstName + "%"; queryWherePortion.append(" and lower(user_.firstName) like ? "); } if (UtilMethods.isSet(middleName)) { middleName = "%" + middleName + "%"; queryWherePortion.append(" and lower(user_.middleName) like ? "); } if (UtilMethods.isSet(lastName)) { lastName = "%" + lastName + "%"; queryWherePortion.append(" and lower(user_.lastName) like ? "); } if (UtilMethods.isSet(emailAddress)) { emailAddress = "%" + emailAddress + "%"; queryWherePortion.append(" and lower(user_.emailAddress) like ? "); } if (UtilMethods.isSet(dateOfBirthTypeSearch)) { if (dateOfBirthTypeSearch.equalsIgnoreCase("DateRange")) { if (UtilMethods.isSet(dateOfBirthFromDate)) { queryWherePortion.append(" and user_.birthday >= ? "); } if (UtilMethods.isSet(dateOfBirthToDate)) { queryWherePortion.append(" and user_.birthday <= ? "); } } else if (dateOfBirthTypeSearch.equalsIgnoreCase("Since")) { if (UtilMethods.isSet(dateOfBirthSinceDate)) { queryWherePortion.append(" and user_.birthday like '"); queryWherePortion.append(UtilMethods.dateToShortJDBC(dateOfBirthSinceDate)); queryWherePortion.append("%'"); } } } if (UtilMethods.isSet(lastLoginTypeSearch)) { if (lastLoginTypeSearch.equalsIgnoreCase("DateRange")) { if (UtilMethods.isSet(lastLoginFromDate)) { queryWherePortion.append(" and user_.logindate >= ? "); } if (UtilMethods.isSet(lastLoginToDate)) { queryWherePortion.append(" and user_.logindate <= ? "); } } else if (lastLoginTypeSearch.equalsIgnoreCase("Since")) { if (UtilMethods.isSet(lastLoginSince)) { queryWherePortion.append(" and user_.logindate >= ? "); } } } if (UtilMethods.isSet(createdTypeSearch)) { if (createdTypeSearch.equalsIgnoreCase("DateRange")) { if (UtilMethods.isSet(createdDateFromDate)) { queryWherePortion.append(" and user_.createdate >= ? "); } if (UtilMethods.isSet(createdDateToDate)) { queryWherePortion.append(" and user_.createdate <= ? "); } } else if (createdTypeSearch.equalsIgnoreCase("Since")) { if (UtilMethods.isSet(createdSince)) { queryWherePortion.append(" and user_.createdate >= ? "); } } } boolean usingClickStreamTable = false; if (UtilMethods.isSet(lastVisitTypeSearch)) { if (lastVisitTypeSearch.equalsIgnoreCase("DateRange")) { boolean userVerification = false; if (UtilMethods.isSet(lastVisitDateFromDate)) { if(!usingClickStreamTable) { querySelectPortion.append(", clickstream"); usingClickStreamTable = true; } if (!userVerification) { queryWherePortion.append(" and clickstream.user_id = user_.userid "); userVerification = true; } queryWherePortion.append(" and clickstream.start_date >= ? "); } if (UtilMethods.isSet(lastVisitDateToDate)) { if(!usingClickStreamTable) { querySelectPortion.append(", clickstream"); usingClickStreamTable = true; } if (!userVerification) { queryWherePortion.append(" and clickstream.user_id = user_.userid "); userVerification = true; } queryWherePortion.append(" and clickstream.start_date <= ? "); } } else if (lastVisitTypeSearch.equalsIgnoreCase("Since")) { if (UtilMethods.isSet(lastVisitSince)) { if(!usingClickStreamTable) { querySelectPortion.append(", clickstream"); usingClickStreamTable = true; } queryWherePortion.append(" and clickstream.user_id = user_.userid "); queryWherePortion.append(" and clickstream.start_date >= ? "); } } } if (UtilMethods.isSet(active)) { if (active.equalsIgnoreCase("true")) { queryWherePortion.append(" and user_.active_ = " + DbConnectionFactory.getDBTrue() + " "); } else if (active.equalsIgnoreCase("false")) { queryWherePortion.append(" and user_.active_ = " + DbConnectionFactory.getDBFalse() + " "); } } // User Proxy fields filters boolean addingUserProxyWhere = false; if (UtilMethods.isSet(tagName)) { StringTokenizer tagNameToken = new StringTokenizer(tagName, ","); StringBuffer tagNameParam = new StringBuffer(""); if (tagNameToken.hasMoreTokens()) { for (; tagNameToken.hasMoreTokens();) { String token = tagNameToken.nextToken(); tagNameParam.append("'"+token.trim().replace("'", "''")+"'"); if (tagNameToken.hasMoreTokens()) { tagNameParam.append(","); } } } querySelectPortion.append(", user_proxy"); queryWherePortion.append(" and tag.tag_id = tag_inode.tag_id and tag.tagname in ("+tagNameParam+") "); queryWherePortion.append(" and tag_inode.inode = user_proxy.inode "); queryWherePortion.append(" and user_proxy.user_Id = user_.userId "); addingUserProxyWhere = true; } // Address fields filters if (UtilMethods.isSet(form.getCity()) || UtilMethods.isSet(form.getCountry()) || UtilMethods.isSet(form.getState()) || UtilMethods.isSet(form.getZipStr()) || UtilMethods.isSet(form.getPhone()) || UtilMethods.isSet(form.getFax()) || UtilMethods.isSet(form.getCellPhone())) { querySelectPortion.append(", address"); queryWherePortion.append(" and address.userId = user_.userId"); } String city = (UtilMethods.isSet(form.getCity()) ? form.getCity().trim().toLowerCase() : null); String state = (UtilMethods.isSet(form.getState()) ? form.getState().trim().toLowerCase() : null); String country = (UtilMethods.isSet(form.getCountry()) ? form.getCountry().trim().toLowerCase() : null); String zip = (UtilMethods.isSet(form.getZipStr()) ? form.getZipStr().trim().toLowerCase() : null); String phone = (UtilMethods.isSet(form.getPhone()) ? form.getPhone().trim().toLowerCase() : null); String fax = (UtilMethods.isSet(form.getFax()) ? form.getFax().trim().toLowerCase() : null); String cellPhone = (UtilMethods.isSet(form.getCellPhone()) ? form.getCellPhone().trim().toLowerCase() : null); String referer = (UtilMethods.isSet(form.getUserReferer()) ? form.getUserReferer() : null); if (UtilMethods.isSet(city)) { city = "%" + city + "%"; queryWherePortion.append(" and lower(address.city) like ? "); } if (UtilMethods.isSet(state)) { state = "%" + state + "%"; queryWherePortion.append(" and lower(address.state) like ? "); } if (UtilMethods.isSet(country)) { country = "%" + country + "%"; queryWherePortion.append(" and lower(address.country) like ? "); } if (UtilMethods.isSet(zip)) { zip = "%" + zip + "%"; queryWherePortion.append(" and lower(address.zip) like ? "); } if (UtilMethods.isSet(phone)) { phone = "%" + phone + "%"; queryWherePortion.append(" and lower(address.phone) like ? "); } if (UtilMethods.isSet(fax)) { fax = "%" + fax + "%"; queryWherePortion.append(" and lower(address.fax) like ? "); } if (UtilMethods.isSet(cellPhone)) { cellPhone = "%" + cellPhone + "%"; queryWherePortion.append(" and lower(address.cell) like ? "); } if(UtilMethods.isSet(form.getUserReferer())){ if(!usingClickStreamTable) { querySelectPortion.append(", clickstream, clickstream_request"); usingClickStreamTable = true; } else { querySelectPortion.append(", clickstream_request"); } referer = "%" + referer + "%"; queryWherePortion.append(" and user_.userid=clickstream.user_id and clickstream_request.clickstream_id = clickstream.clickstream_id "); queryWherePortion.append(" and (clickstream_request.request_uri like ? or clickstream.referer like ?)"); } //User Id Search if (UtilMethods.isSet(userIdSearch)) { userIdSearch = "%" + userIdSearch + "%"; queryWherePortion.append(" and lower(user_.userid) like ? "); } if (form.isSetVar()) { if (!addingUserProxyWhere) { querySelectPortion.append(", user_proxy"); queryWherePortion.append(" and user_proxy.user_Id = user_.userId"); addingUserProxyWhere = true; } int numberGenericVariables = Config.getIntProperty("MAX_NUMBER_VARIABLES_TO_SHOW"); for (int i=1; i<=numberGenericVariables; i++) { if (UtilMethods.isSet(form.getVar(i))) queryWherePortion.append(" and lower(user_proxy.var"+i+") = '" + form.getVar(i).trim().toLowerCase() + "'"); } } String[] categoriesList = form.getCategories(); if((categoriesList != null) && (categoriesList.length > 0)) { String categories=""; int counter = 0; for(String cat : categoriesList){ if(counter == 0){ categories = categories + "tree.child = " + cat ; }else{ categories = categories + " or tree.child = " + cat ; } counter+=1; } if (!addingUserProxyWhere) { querySelectPortion.append(", user_proxy, tree"); queryWherePortion.append(" and user_.userid = user_proxy.user_id "); addingUserProxyWhere = true; } else { querySelectPortion.append(", tree"); } queryWherePortion.append(" and tree.parent = user_proxy.inode and tree.parent in (select parent from tree where ("+categories+")" + " group by parent having count(parent) > "+(categoriesList.length - 1)+" ) "); if(!isCount){ queryWherePortion.append(" group by user_proxy.user_id, user_.userid, user_.createdate "); } } String query = querySelectPortion.toString() + queryWherePortion.toString(); if(!isCount) { query = query + " order by user_.createdate desc "; } Logger.debug(UserManagerListBuilderFactory.class, "query:" + query); dc.setSQL(query); if(UtilMethods.isSet(firstName)) { dc.addParam(firstName); } if(UtilMethods.isSet(middleName)) { dc.addParam(middleName); } if(UtilMethods.isSet(lastName)) { dc.addParam(lastName); } if(UtilMethods.isSet(emailAddress)) { dc.addParam(emailAddress); } if (UtilMethods.isSet(dateOfBirthTypeSearch)) { if (dateOfBirthTypeSearch.equalsIgnoreCase("DateRange")) { if (UtilMethods.isSet(dateOfBirthFromDate)) { dc.addParam(dateOfBirthFromDate); } if (UtilMethods.isSet(dateOfBirthToDate)) { dc.addParam(dateOfBirthToDate); } } } if (UtilMethods.isSet(lastLoginTypeSearch)) { if (lastLoginTypeSearch.equalsIgnoreCase("DateRange")) { if (UtilMethods.isSet(lastLoginFromDate)) { dc.addParam(lastLoginFromDate); } if (UtilMethods.isSet(lastLoginToDate)) { dc.addParam(lastLoginToDate); } } if (lastLoginTypeSearch.equalsIgnoreCase("Since")) { if (UtilMethods.isSet(lastLoginSince)){ GregorianCalendar cal = new GregorianCalendar(); cal.setTime(new java.util.Date()); cal.add(GregorianCalendar.DATE, -new Integer(lastLoginSince).intValue()); dc.addParam(cal.getTime()); } } } if (UtilMethods.isSet(createdTypeSearch)) { if (createdTypeSearch.equalsIgnoreCase("DateRange")) { if (UtilMethods.isSet(createdDateFromDate)) { dc.addParam(createdDateFromDate); } if (UtilMethods.isSet(createdDateToDate)) { dc.addParam(createdDateToDate); } } if (createdTypeSearch.equalsIgnoreCase("Since")) { if (UtilMethods.isSet(createdSince)){ GregorianCalendar cal = new GregorianCalendar(); cal.setTime(new java.util.Date()); cal.add(GregorianCalendar.DATE, -new Integer(createdSince).intValue()); dc.addParam(cal.getTime()); } } } if (UtilMethods.isSet(lastVisitTypeSearch)) { if (lastVisitTypeSearch.equalsIgnoreCase("DateRange")) { if (UtilMethods.isSet(lastVisitDateFromDate)) { dc.addParam(lastVisitDateFromDate); } if (UtilMethods.isSet(lastVisitDateToDate)) { dc.addParam(lastVisitDateFromDate); } } if (lastVisitTypeSearch.equalsIgnoreCase("Since")) { if (UtilMethods.isSet(lastVisitSince)){ GregorianCalendar cal = new GregorianCalendar(); cal.setTime(new java.util.Date()); cal.add(GregorianCalendar.DATE, -new Integer(lastVisitSince).intValue()); dc.addParam(cal.getTime()); } } } if(UtilMethods.isSet(city)) { dc.addParam(city); } if(UtilMethods.isSet(state)) { dc.addParam(state); } if(UtilMethods.isSet(country)) { dc.addParam(country); } if(UtilMethods.isSet(zip)) { dc.addParam(zip); } if(UtilMethods.isSet(phone)) { dc.addParam(phone); } if(UtilMethods.isSet(fax)) { dc.addParam(fax); } if(UtilMethods.isSet(cellPhone)) { dc.addParam(cellPhone); } if(UtilMethods.isSet(referer)){ dc.addParam(referer); dc.addParam(referer); } //User Id Search if(UtilMethods.isSet(userIdSearch)) { dc.addParam(userIdSearch); } int startRow = form.getStartRow(); int maxRow = form.getMaxRow(); if(form.getMaxRow() > 0) { dc.setStartRow(startRow); dc.setMaxRows(maxRow); } } if(runQuery) try { results = dc.getResults(); } catch (DotDataException e) { Logger.error(UserManagerListBuilderFactory.class, e.getMessage(), e); } return results; } public static boolean isUserManagerAdmin (User user) throws PortalException, SystemException { List<Role> roles; try { roles = com.dotmarketing.business.APILocator.getRoleAPI().loadRolesForUser(user.getUserId()); } catch (DotDataException e) { Logger.error(UserManagerListBuilderFactory.class,e.getMessage(),e); throw new SystemException(e); } Iterator<Role> rolesIt = roles.iterator(); boolean isUserManagerAdmin = false; while (rolesIt.hasNext()) { Role role = (Role) rolesIt.next(); if (role.getName().equals(Config.getStringProperty("USER_MANAGER_ADMIN_ROLE"))) { isUserManagerAdmin = true; break; } } return isUserManagerAdmin; } /** * Check the request to know if the form have the fullCommmand value set is set or not * @param req the request to be checked * @return the value of the fullCommand parameter */ public static boolean isFullCommand(ActionRequest req) { boolean fullCommand = false; try { String fullCommandString = req.getParameter("fullCommand"); fullCommand = Boolean.parseBoolean(fullCommandString); } catch(Exception ex){} return fullCommand; } /** * return a String with the userIds that are retieved from a UserManager's SearchForm, also save that String to the session in this * variable "usersFullCommand" * @param req the request where the UserManagerListSearchForm object is store * @return String with the userIds, separated by "," */ public static String loadFullCommand(ActionRequest req) { String userIdFullCommand = ""; if(isFullCommand(req)) { HttpSession session = ((ActionRequestImpl) req).getHttpServletRequest().getSession(); //Get all the user of the filter UserManagerListSearchForm searchFormFullCommand = (UserManagerListSearchForm) session.getAttribute(WebKeys.USERMANAGERLISTPARAMETERS); searchFormFullCommand.setStartRow(0); searchFormFullCommand.setMaxRow(0); List matches = UserManagerListBuilderFactory.doSearch(searchFormFullCommand); //Create the String buffer StringBuffer userFullCommandSB = new StringBuffer(); //Get the Iterator and the userIds Iterator it = matches.iterator(); for (int i = 0; it.hasNext(); i++) { String userId = (String) ((Map) it.next()).get("userid"); userFullCommandSB.append(userId + ","); } userIdFullCommand = userFullCommandSB.toString(); if(userIdFullCommand.indexOf(",") != -1) { userIdFullCommand = userIdFullCommand.substring(0,userIdFullCommand.lastIndexOf(",")); } session.setAttribute("usersFullCommand",userIdFullCommand); } return userIdFullCommand; } /** * Get a String with the userIds of the users to retrieve, separated by a "," and return an arraylist of the userproxies * that represent those userIds * @param userIdList userIds to be retrieved * @return a list of userProxy that represent those user */ public static List<UserProxy> getUserProxiesFromList(String userIdList) { ArrayList<UserProxy> userProxyList = new ArrayList<UserProxy>(); String[] userIdArray = userIdList.split(","); for(String userId : userIdArray) { UserProxy userProxy; try { userProxy = com.dotmarketing.business.APILocator.getUserProxyAPI().getUserProxy(userId,APILocator.getUserAPI().getSystemUser(), false); } catch (Exception e) { Logger.error(UserManagerListBuilderFactory.class, e.getMessage(), e); throw new DotRuntimeException(e.getMessage(), e); } if(InodeUtils.isSet(userProxy.getInode())) { userProxyList.add(userProxy); } } return userProxyList; } }
gpl-3.0
mutantzombie/pfff
data/java_stdlib/classpath/gnu.java.nio.charset.java
2744
package gnu.java.nio.charset; class Windows1258 { int lookup; } class Windows1257 { int lookup; } class Windows1256 { int lookup; } class Windows1255 { int lookup; } class Windows1254 { int lookup; } class Windows1253 { int lookup; } class Windows1252 { int lookup; } class Windows1251 { int lookup; } class Windows1250 { int lookup; } class UnicodeLittle { } class UTF_8 { class Encoder { } class Decoder { } } class UTF_16LE { } class UTF_16Encoder { int needsByteOrderMark; int useByteOrderMark; int byteOrder; int BYTE_ORDER_MARK; int LITTLE_ENDIAN; int BIG_ENDIAN; } class UTF_16Decoder { int byteOrder; int originalByteOrder; int REVERSED_BYTE_ORDER_MARK; int BYTE_ORDER_MARK; int MAYBE_LITTLE_ENDIAN; int MAYBE_BIG_ENDIAN; int UNKNOWN_ENDIAN; int LITTLE_ENDIAN; int BIG_ENDIAN; } class UTF_16BE { } class UTF_16 { } class US_ASCII { class Encoder { int helper; } class Decoder { int helper; } } class Provider { int extendedLoaded; int charsets; int canonicalNames; int singleton; } class MacTurkish { int lookup; } class MacThai { int lookup; } class MacSymbol { int lookup; } class MacRomania { int lookup; } class MacRoman { int lookup; } class MacIceland { int lookup; } class MacGreek { int lookup; } class MacDingbat { int lookup; } class MacCyrillic { int lookup; } class MacCroatian { int lookup; } class MacCentralEurope { int lookup; } class MS874 { int lookup; } class KOI_8 { int lookup; } class ISO_8859_9 { int lookup; } class ISO_8859_8 { int lookup; } class ISO_8859_7 { int lookup; } class ISO_8859_6 { int lookup; } class ISO_8859_5 { int lookup; } class ISO_8859_4 { int lookup; } class ISO_8859_3 { int lookup; } class ISO_8859_2 { int lookup; } class ISO_8859_15 { int lookup; } class ISO_8859_13 { int lookup; } class ISO_8859_1 { class Encoder { int helper; } class Decoder { int helper; } } class EncodingHelper { int canonicalNames; } class Cp874 { int lookup; } class Cp869 { int lookup; } class Cp866 { int lookup; } class Cp865 { int lookup; } class Cp864 { int lookup; } class Cp863 { int lookup; } class Cp862 { int lookup; } class Cp861 { int lookup; } class Cp860 { int lookup; } class Cp857 { int lookup; } class Cp855 { int lookup; } class Cp852 { int lookup; } class Cp850 { int lookup; } class Cp775 { int lookup; } class Cp737 { int lookup; } class Cp437 { int lookup; } class Cp424 { int lookup; } class ByteEncodeLoopHelper { } class ByteDecodeLoopHelper { } class ByteCharset { class Encoder { int helper; int lookup; } class Decoder { int helper; int lookup; } int NONE; int lookupTable; }
lgpl-2.1
klabarge/qz-print
src/qz/ui/CertificateTable.java
8417
package qz.ui; import org.joor.Reflect; import qz.auth.Certificate; import qz.common.Constants; import javax.swing.*; import javax.swing.table.DefaultTableCellRenderer; import javax.swing.table.DefaultTableModel; import java.awt.*; import java.util.Calendar; /** * Created by Tres on 2/22/2015. * Displays Certificate information in a JTable */ public class CertificateTable extends JTable { /** * Certificate fields to be displayed (and the corresponding function to Reflect upon) */ enum CertificateField { ORGANIZATION("Organization", "getOrganization"), COMMON_NAME("Common Name", "getCommonName"), TRUSTED("Trusted", "isTrusted"), VALID_FROM("Valid From", "getValidFrom"), VALID_TO("Valid To", "getValidTo"), FINGERPRINT("Fingerprint", "getFingerprint"); String description; String callBack; CertificateField(String description, String callBack) { this.description = description; this.callBack = callBack; } /** * Returns the <code>String</code> value associated with this certificate field * @return Certificate field such as "commonName" */ public String getValue(Certificate cert) { if (cert == null) { return ""; } Reflect reflect = Reflect.on(cert).call(callBack); Object value = reflect == null ? null : reflect.get(); if (value == null) { return ""; } return value.toString(); } @Override public String toString() { return description; } public String getDescription() { return description; } public static int size() { return values().length; } } private Certificate cert; private DefaultTableModel model; private Calendar warn; private Calendar now; private Color defaultForeground; private Color defaultSelectedForeground; private IconCache iconCache; public CertificateTable(Certificate cert, IconCache iconCache) { super(); initComponents(); setIconCache(iconCache); setCertificate(cert); } private void initComponents() { model = new DefaultTableModel() { @Override public boolean isCellEditable(int x, int y){ return false; } }; model.addColumn("Field"); model.addColumn("Value"); getTableHeader().setReorderingAllowed(false); setSelectionMode(ListSelectionModel.SINGLE_SELECTION); setRowSelectionAllowed(true); setDefaultRenderer(Object.class, new CertificateTableCellRenderer()); setModel(model); defaultForeground = UIManager.getDefaults().getColor("Table.foreground"); defaultSelectedForeground = UIManager.getDefaults().getColor("Table.selectionForeground"); } public void refreshComponents() { removeRows(); if (cert == null) { return; } now = Calendar.getInstance(); warn = Calendar.getInstance(); warn.add(Calendar.DAY_OF_MONTH, -1 * Constants.EXPIRY_WARN); // First Column for (CertificateField field : CertificateField.values()) { model.addRow(new Object[]{field, ""}); } // Second Column for (int col = 0; col < model.getColumnCount(); col++) { for (int row = 0; row < model.getRowCount(); row++) { Object cell = (model.getValueAt(row, col)); if (cell instanceof CertificateField) { model.setValueAt(((CertificateField) cell).getValue(cert), row, col + 1); } } } repaint(); } public void setIconCache(IconCache iconCache) { this.iconCache = iconCache; } public void setCertificate(Certificate cert) { this.cert = cert; refreshComponents(); } public void removeRows() { for (int row = model.getRowCount() - 1; row >= 0 ; row--) { model.removeRow(row); } } /** * Sets preferred <code>ScrollPane</code> preferred viewable height to match the natural table height * Leaves the <code>ScrollPane</code> preferred viewable width as default */ public void autoSize() { removeRows(); for (int row = 0; row < CertificateField.size(); row++) { model.addRow(new Object[2]); } int normalWidth = (int)getPreferredScrollableViewportSize().getWidth(); int autoHeight = (int)getPreferredSize().getHeight(); setPreferredScrollableViewportSize(new Dimension(normalWidth, autoHeight)); setFillsViewportHeight(true); refreshComponents(); } /** * Custom cell renderer for JTable to allow colors and styles not directly available in a JTable */ private class CertificateTableCellRenderer extends DefaultTableCellRenderer { final int STATUS_NORMAL = 0; final int STATUS_WARNING = 1; final int STATUS_TRUSTED = 2; @Override public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int col) { JLabel label = (JLabel) super.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, col); // First Column if (value instanceof CertificateField) { label = stylizeLabel(STATUS_NORMAL, label, isSelected); if (iconCache != null) { label.setIcon(iconCache.getIcon(IconCache.Icon.FIELD_ICON)); } return label; } // Second Column if (cert == null || col < 1) { return stylizeLabel(STATUS_NORMAL, label, isSelected); } CertificateField field = (CertificateField)table.getValueAt(row, col - 1); if (field == null) { return stylizeLabel(STATUS_NORMAL, label, isSelected); } switch (field) { case TRUSTED: label.setText(cert.isTrusted() ? Constants.TRUSTED_PUBLISHER : Constants.UNTRUSTED_PUBLISHER); return stylizeLabel(!cert.isTrusted() ? STATUS_WARNING : STATUS_TRUSTED, label, isSelected); case VALID_FROM: boolean futureExpiration = cert.getValidFromDate().compareTo(now.getTime()) > 0; return stylizeLabel(futureExpiration ? STATUS_WARNING : STATUS_NORMAL, label, isSelected, "future expiration"); case VALID_TO: boolean expiresSoon = cert.getValidToDate().compareTo(warn.getTime()) < 0; boolean expired = cert.getValidToDate().compareTo(now.getTime()) < 0; String reason = expiresSoon ? "expired" : "expires soon"; return stylizeLabel(expiresSoon || expired ? STATUS_WARNING : STATUS_NORMAL, label, isSelected, reason); default: return stylizeLabel(STATUS_NORMAL, label, isSelected); } } private JLabel stylizeLabel(int statusCode, JLabel label, boolean isSelected) { return stylizeLabel(statusCode, label, isSelected, null); } private JLabel stylizeLabel(int statusCode, JLabel label, boolean isSelected, String reason) { label.setIcon(null); int fontWeight; Color foreground; switch (statusCode) { case STATUS_WARNING: foreground = Constants.WARNING_COLOR; fontWeight = Font.BOLD; break; case STATUS_TRUSTED: foreground = Constants.TRUSTED_COLOR; fontWeight = Font.PLAIN; break; case STATUS_NORMAL: default: foreground = defaultForeground; fontWeight = Font.PLAIN; } label.setFont(label.getFont().deriveFont(fontWeight)); label.setForeground(isSelected ? defaultSelectedForeground : foreground); if (statusCode == STATUS_WARNING && reason != null) { label.setText(label.getText() + " (" + reason + ")"); } return label; } } }
lgpl-2.1
mapr/hadoop-common
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/utils/TestServiceApiUtil.java
27667
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.service.utils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.registry.client.api.RegistryConstants; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.service.ServiceTestUtils; import org.apache.hadoop.yarn.service.api.records.Artifact; import org.apache.hadoop.yarn.service.api.records.Component; import org.apache.hadoop.yarn.service.api.records.KerberosPrincipal; import org.apache.hadoop.yarn.service.api.records.PlacementConstraint; import org.apache.hadoop.yarn.service.api.records.PlacementPolicy; import org.apache.hadoop.yarn.service.api.records.PlacementScope; import org.apache.hadoop.yarn.service.api.records.PlacementType; import org.apache.hadoop.yarn.service.api.records.Resource; import org.apache.hadoop.yarn.service.api.records.Service; import org.apache.hadoop.yarn.service.api.records.ServiceState; import org.apache.hadoop.yarn.service.exceptions.RestApiErrorMessages; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import static org.assertj.core.api.Assertions.assertThat; import static org.apache.hadoop.test.LambdaTestUtils.intercept; import static org.apache.hadoop.yarn.service.conf.RestApiConstants.DEFAULT_UNLIMITED_LIFETIME; import static org.apache.hadoop.yarn.service.exceptions.RestApiErrorMessages.*; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.fail; /** * Test for ServiceApiUtil helper methods. */ public class TestServiceApiUtil extends ServiceTestUtils { private static final Logger LOG = LoggerFactory .getLogger(TestServiceApiUtil.class); private static final String EXCEPTION_PREFIX = "Should have thrown " + "exception: "; private static final String NO_EXCEPTION_PREFIX = "Should not have thrown " + "exception: "; private static final String LEN_64_STR = "abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz01"; private static final YarnConfiguration CONF_DEFAULT_DNS = new YarnConfiguration(); private static final YarnConfiguration CONF_DNS_ENABLED = new YarnConfiguration(); @BeforeClass public static void init() { CONF_DNS_ENABLED.setBoolean(RegistryConstants.KEY_DNS_ENABLED, true); } @Test(timeout = 90000) public void testResourceValidation() throws Exception { assertEquals(RegistryConstants.MAX_FQDN_LABEL_LENGTH + 1, LEN_64_STR .length()); SliderFileSystem sfs = ServiceTestUtils.initMockFs(); Service app = new Service(); // no name try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); Assert.fail(EXCEPTION_PREFIX + "service with no name"); } catch (IllegalArgumentException e) { assertEquals(ERROR_APPLICATION_NAME_INVALID, e.getMessage()); } app.setName("test"); // no version try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); Assert.fail(EXCEPTION_PREFIX + " service with no version"); } catch (IllegalArgumentException e) { assertEquals(String.format(ERROR_APPLICATION_VERSION_INVALID, app.getName()), e.getMessage()); } app.setVersion("v1"); // bad format name String[] badNames = {"4finance", "Finance", "finance@home", LEN_64_STR}; for (String badName : badNames) { app.setName(badName); try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); Assert.fail(EXCEPTION_PREFIX + "service with bad name " + badName); } catch (IllegalArgumentException e) { } } // launch command not specified app.setName(LEN_64_STR); Component comp = new Component().name("comp1"); app.addComponent(comp); try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DEFAULT_DNS); Assert.fail(EXCEPTION_PREFIX + "service with no launch command"); } catch (IllegalArgumentException e) { assertEquals(RestApiErrorMessages.ERROR_ABSENT_LAUNCH_COMMAND, e.getMessage()); } // launch command not specified app.setName(LEN_64_STR.substring(0, RegistryConstants .MAX_FQDN_LABEL_LENGTH)); try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); Assert.fail(EXCEPTION_PREFIX + "service with no launch command"); } catch (IllegalArgumentException e) { assertEquals(RestApiErrorMessages.ERROR_ABSENT_LAUNCH_COMMAND, e.getMessage()); } // memory not specified comp.setLaunchCommand("sleep 1"); Resource res = new Resource(); app.setResource(res); try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); Assert.fail(EXCEPTION_PREFIX + "service with no memory"); } catch (IllegalArgumentException e) { assertEquals(String.format( RestApiErrorMessages.ERROR_RESOURCE_MEMORY_FOR_COMP_INVALID, comp.getName()), e.getMessage()); } // invalid no of cpus res.setMemory("100mb"); res.setCpus(-2); try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); Assert.fail( EXCEPTION_PREFIX + "service with invalid no of cpus"); } catch (IllegalArgumentException e) { assertEquals(String.format( RestApiErrorMessages.ERROR_RESOURCE_CPUS_FOR_COMP_INVALID_RANGE, comp.getName()), e.getMessage()); } // number of containers not specified res.setCpus(2); try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); Assert.fail(EXCEPTION_PREFIX + "service with no container count"); } catch (IllegalArgumentException e) { Assert.assertTrue(e.getMessage() .contains(ERROR_CONTAINERS_COUNT_INVALID)); } // specifying profile along with cpus/memory raises exception res.setProfile("hbase_finance_large"); try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); Assert.fail(EXCEPTION_PREFIX + "service with resource profile along with cpus/memory"); } catch (IllegalArgumentException e) { assertEquals(String.format(RestApiErrorMessages .ERROR_RESOURCE_PROFILE_MULTIPLE_VALUES_FOR_COMP_NOT_SUPPORTED, comp.getName()), e.getMessage()); } // currently resource profile alone is not supported. // TODO: remove the next test once resource profile alone is supported. res.setCpus(null); res.setMemory(null); try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); Assert.fail(EXCEPTION_PREFIX + "service with resource profile only"); } catch (IllegalArgumentException e) { assertEquals(ERROR_RESOURCE_PROFILE_NOT_SUPPORTED_YET, e.getMessage()); } // unset profile here and add cpus/memory back res.setProfile(null); res.setCpus(2); res.setMemory("2gb"); // null number of containers try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); Assert.fail(EXCEPTION_PREFIX + "null number of containers"); } catch (IllegalArgumentException e) { Assert.assertTrue(e.getMessage() .startsWith(ERROR_CONTAINERS_COUNT_INVALID)); } } @Test public void testArtifacts() throws IOException { SliderFileSystem sfs = ServiceTestUtils.initMockFs(); Service app = new Service(); app.setName("service1"); app.setVersion("v1"); Resource res = new Resource(); app.setResource(res); res.setMemory("512M"); // no artifact id fails with default type Artifact artifact = new Artifact(); app.setArtifact(artifact); String compName = "comp1"; Component comp = ServiceTestUtils.createComponent(compName); app.setComponents(Collections.singletonList(comp)); try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); Assert.fail(EXCEPTION_PREFIX + "service with no artifact id"); } catch (IllegalArgumentException e) { assertEquals(String.format(ERROR_ARTIFACT_ID_FOR_COMP_INVALID, compName), e.getMessage()); } // no artifact id fails with SERVICE type artifact.setType(Artifact.TypeEnum.SERVICE); try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); Assert.fail(EXCEPTION_PREFIX + "service with no artifact id"); } catch (IllegalArgumentException e) { assertEquals(ERROR_ARTIFACT_ID_INVALID, e.getMessage()); } // no artifact id fails with TARBALL type artifact.setType(Artifact.TypeEnum.TARBALL); try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); Assert.fail(EXCEPTION_PREFIX + "service with no artifact id"); } catch (IllegalArgumentException e) { assertEquals(String.format(ERROR_ARTIFACT_ID_FOR_COMP_INVALID, compName), e.getMessage()); } // everything valid here artifact.setType(Artifact.TypeEnum.DOCKER); artifact.setId("docker.io/centos:centos7"); try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); } catch (IllegalArgumentException e) { LOG.error("service attributes specified should be valid here", e); Assert.fail(NO_EXCEPTION_PREFIX + e.getMessage()); } assertThat(app.getLifetime()).isEqualTo(DEFAULT_UNLIMITED_LIFETIME); } private static Resource createValidResource() { Resource res = new Resource(); res.setMemory("512M"); return res; } private static Component createValidComponent(String compName) { Component comp = new Component(); comp.setName(compName); comp.setResource(createValidResource()); comp.setNumberOfContainers(1L); comp.setLaunchCommand("sleep 1"); return comp; } private static Service createValidApplication(String compName) { Service app = new Service(); app.setName("name"); app.setVersion("v1"); app.setResource(createValidResource()); if (compName != null) { app.addComponent(createValidComponent(compName)); } return app; } @Test public void testExternalApplication() throws IOException { Service ext = createValidApplication("comp1"); SliderFileSystem sfs = ServiceTestUtils.initMockFs(ext); Service app = createValidApplication(null); Artifact artifact = new Artifact(); artifact.setType(Artifact.TypeEnum.SERVICE); artifact.setId("id"); app.setArtifact(artifact); app.addComponent(ServiceTestUtils.createComponent("comp2")); try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); } catch (IllegalArgumentException e) { Assert.fail(NO_EXCEPTION_PREFIX + e.getMessage()); } assertEquals(1, app.getComponents().size()); assertNotNull(app.getComponent("comp2")); } @Test public void testDuplicateComponents() throws IOException { SliderFileSystem sfs = ServiceTestUtils.initMockFs(); String compName = "comp1"; Service app = createValidApplication(compName); app.addComponent(createValidComponent(compName)); // duplicate component name fails try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); Assert.fail(EXCEPTION_PREFIX + "service with component collision"); } catch (IllegalArgumentException e) { assertEquals("Component name collision: " + compName, e.getMessage()); } } @Test public void testComponentNameSameAsServiceName() throws IOException { SliderFileSystem sfs = ServiceTestUtils.initMockFs(); Service app = new Service(); app.setName("test"); app.setVersion("v1"); app.addComponent(createValidComponent("test")); //component name same as service name try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); Assert.fail(EXCEPTION_PREFIX + "component name matches service name"); } catch (IllegalArgumentException e) { assertEquals("Component name test must not be same as service name test", e.getMessage()); } } @Test public void testExternalDuplicateComponent() throws IOException { Service ext = createValidApplication("comp1"); SliderFileSystem sfs = ServiceTestUtils.initMockFs(ext); Service app = createValidApplication("comp1"); Artifact artifact = new Artifact(); artifact.setType(Artifact.TypeEnum.SERVICE); artifact.setId("id"); app.getComponent("comp1").setArtifact(artifact); // duplicate component name okay in the case of SERVICE component try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); } catch (IllegalArgumentException e) { Assert.fail(NO_EXCEPTION_PREFIX + e.getMessage()); } } @Test public void testExternalComponent() throws IOException { Service ext = createValidApplication("comp1"); SliderFileSystem sfs = ServiceTestUtils.initMockFs(ext); Service app = createValidApplication("comp2"); Artifact artifact = new Artifact(); artifact.setType(Artifact.TypeEnum.SERVICE); artifact.setId("id"); app.setArtifact(artifact); try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); } catch (IllegalArgumentException e) { Assert.fail(NO_EXCEPTION_PREFIX + e.getMessage()); } assertEquals(1, app.getComponents().size()); // artifact ID not inherited from global assertNotNull(app.getComponent("comp2")); // set SERVICE artifact id on component app.getComponent("comp2").setArtifact(artifact); try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); } catch (IllegalArgumentException e) { Assert.fail(NO_EXCEPTION_PREFIX + e.getMessage()); } assertEquals(1, app.getComponents().size()); // original component replaced by external component assertNotNull(app.getComponent("comp1")); } public static void verifyDependencySorting(List<Component> components, Component... expectedSorting) { Collection<Component> actualSorting = ServiceApiUtil.sortByDependencies( components); assertEquals(expectedSorting.length, actualSorting.size()); int i = 0; for (Component component : actualSorting) { assertEquals(expectedSorting[i++], component); } } @Test public void testDependencySorting() throws IOException { Component a = ServiceTestUtils.createComponent("a"); Component b = ServiceTestUtils.createComponent("b"); Component c = ServiceTestUtils.createComponent("c"); Component d = ServiceTestUtils.createComponent("d").dependencies(Arrays.asList("c")); Component e = ServiceTestUtils.createComponent("e") .dependencies(Arrays.asList("b", "d")); verifyDependencySorting(Arrays.asList(a, b, c), a, b, c); verifyDependencySorting(Arrays.asList(c, a, b), c, a, b); verifyDependencySorting(Arrays.asList(a, b, c, d, e), a, b, c, d, e); verifyDependencySorting(Arrays.asList(e, d, c, b, a), c, b, a, d, e); c.setDependencies(Arrays.asList("e")); try { verifyDependencySorting(Arrays.asList(a, b, c, d, e)); Assert.fail(EXCEPTION_PREFIX + "components with dependency cycle"); } catch (IllegalArgumentException ex) { assertEquals(String.format( RestApiErrorMessages.ERROR_DEPENDENCY_CYCLE, Arrays.asList(c, d, e)), ex.getMessage()); } SliderFileSystem sfs = ServiceTestUtils.initMockFs(); Service service = createValidApplication(null); service.setComponents(Arrays.asList(c, d, e)); try { ServiceApiUtil.validateAndResolveService(service, sfs, CONF_DEFAULT_DNS); Assert.fail(EXCEPTION_PREFIX + "components with bad dependencies"); } catch (IllegalArgumentException ex) { assertEquals(String.format( RestApiErrorMessages.ERROR_DEPENDENCY_INVALID, "b", "e"), ex .getMessage()); } } @Test public void testInvalidComponent() throws IOException { SliderFileSystem sfs = ServiceTestUtils.initMockFs(); testComponent(sfs); } @Test public void testValidateCompName() { String[] invalidNames = { "EXAMPLE", // UPPER case not allowed "example_app" // underscore not allowed. }; for (String name : invalidNames) { try { ServiceApiUtil.validateNameFormat(name, new Configuration()); Assert.fail(); } catch (IllegalArgumentException ex) { ex.printStackTrace(); } } } private static void testComponent(SliderFileSystem sfs) throws IOException { int maxLen = RegistryConstants.MAX_FQDN_LABEL_LENGTH; assertEquals(19, Long.toString(Long.MAX_VALUE).length()); maxLen = maxLen - Long.toString(Long.MAX_VALUE).length(); String compName = LEN_64_STR.substring(0, maxLen + 1); Service app = createValidApplication(null); app.addComponent(createValidComponent(compName)); // invalid component name fails if dns is enabled try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); Assert.fail(EXCEPTION_PREFIX + "service with invalid component name"); } catch (IllegalArgumentException e) { assertEquals(String.format(RestApiErrorMessages .ERROR_COMPONENT_NAME_INVALID, maxLen, compName), e.getMessage()); } // does not fail if dns is disabled try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DEFAULT_DNS); } catch (IllegalArgumentException e) { Assert.fail(NO_EXCEPTION_PREFIX + e.getMessage()); } compName = LEN_64_STR.substring(0, maxLen); app = createValidApplication(null); app.addComponent(createValidComponent(compName)); // does not fail try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); } catch (IllegalArgumentException e) { Assert.fail(NO_EXCEPTION_PREFIX + e.getMessage()); } } @Test public void testPlacementPolicy() throws IOException { SliderFileSystem sfs = ServiceTestUtils.initMockFs(); Service app = createValidApplication("comp-a"); Component comp = app.getComponents().get(0); PlacementPolicy pp = new PlacementPolicy(); PlacementConstraint pc = new PlacementConstraint(); pc.setName("CA1"); pp.setConstraints(Collections.singletonList(pc)); comp.setPlacementPolicy(pp); try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); Assert.fail(EXCEPTION_PREFIX + "constraint with no type"); } catch (IllegalArgumentException e) { assertEquals(String.format( RestApiErrorMessages.ERROR_PLACEMENT_POLICY_CONSTRAINT_TYPE_NULL, "CA1 ", "comp-a"), e.getMessage()); } // Set the type pc.setType(PlacementType.ANTI_AFFINITY); try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); Assert.fail(EXCEPTION_PREFIX + "constraint with no scope"); } catch (IllegalArgumentException e) { assertEquals(String.format( RestApiErrorMessages.ERROR_PLACEMENT_POLICY_CONSTRAINT_SCOPE_NULL, "CA1 ", "comp-a"), e.getMessage()); } // Set the scope pc.setScope(PlacementScope.NODE); // Target tag is optional. pc.setTargetTags(Collections.singletonList("comp-a")); // Validation can succeed for any arbitrary target, only scheduler knows // if the target tag is valid. try { ServiceApiUtil.validateAndResolveService(app, sfs, CONF_DNS_ENABLED); } catch (IllegalArgumentException e) { Assert.fail(NO_EXCEPTION_PREFIX + e.getMessage()); } } @Test public void testKerberosPrincipal() throws IOException { SliderFileSystem sfs = ServiceTestUtils.initMockFs(); Service app = createValidApplication("comp-a"); KerberosPrincipal kp = new KerberosPrincipal(); kp.setKeytab("file:///tmp/a.keytab"); kp.setPrincipalName("user/_HOST@domain.com"); app.setKerberosPrincipal(kp); // This should succeed try { ServiceApiUtil.validateKerberosPrincipal(app.getKerberosPrincipal()); } catch (IllegalArgumentException e) { Assert.fail(NO_EXCEPTION_PREFIX + e.getMessage()); } // Keytab with no URI scheme should succeed too kp.setKeytab("/some/path"); try { ServiceApiUtil.validateKerberosPrincipal(app.getKerberosPrincipal()); } catch (IllegalArgumentException e) { Assert.fail(NO_EXCEPTION_PREFIX + e.getMessage()); } } @Test public void testKerberosPrincipalNameFormat() throws IOException { Service app = createValidApplication("comp-a"); KerberosPrincipal kp = new KerberosPrincipal(); kp.setPrincipalName("user@domain.com"); app.setKerberosPrincipal(kp); try { ServiceApiUtil.validateKerberosPrincipal(app.getKerberosPrincipal()); Assert.fail(EXCEPTION_PREFIX + "service with invalid principal name " + "format."); } catch (IllegalArgumentException e) { assertEquals( String.format( RestApiErrorMessages.ERROR_KERBEROS_PRINCIPAL_NAME_FORMAT, kp.getPrincipalName()), e.getMessage()); } kp.setPrincipalName("user/_HOST@domain.com"); try { ServiceApiUtil.validateKerberosPrincipal(app.getKerberosPrincipal()); } catch (IllegalArgumentException e) { Assert.fail(NO_EXCEPTION_PREFIX + e.getMessage()); } kp.setPrincipalName(null); kp.setKeytab(null); try { ServiceApiUtil.validateKerberosPrincipal(app.getKerberosPrincipal()); } catch (NullPointerException e) { Assert.fail(NO_EXCEPTION_PREFIX + e.getMessage()); } } @Test public void testResolveCompsDependency() { Service service = createExampleApplication(); List<String> dependencies = new ArrayList<String>(); dependencies.add("compb"); Component compa = createComponent("compa"); compa.setDependencies(dependencies); Component compb = createComponent("compb"); service.addComponent(compa); service.addComponent(compb); List<String> order = ServiceApiUtil.resolveCompsDependency(service); List<String> expected = new ArrayList<String>(); expected.add("compb"); expected.add("compa"); for (int i = 0; i < expected.size(); i++) { Assert.assertEquals("Components are not equal.", expected.get(i), order.get(i)); } } @Test public void testResolveCompsDependencyReversed() { Service service = createExampleApplication(); List<String> dependencies = new ArrayList<String>(); dependencies.add("compa"); Component compa = createComponent("compa"); Component compb = createComponent("compb"); compb.setDependencies(dependencies); service.addComponent(compa); service.addComponent(compb); List<String> order = ServiceApiUtil.resolveCompsDependency(service); List<String> expected = new ArrayList<String>(); expected.add("compa"); expected.add("compb"); for (int i = 0; i < expected.size(); i++) { Assert.assertEquals("Components are not equal.", expected.get(i), order.get(i)); } } @Test public void testResolveCompsCircularDependency() { Service service = createExampleApplication(); List<String> dependencies = new ArrayList<String>(); List<String> dependencies2 = new ArrayList<String>(); dependencies.add("compb"); dependencies2.add("compa"); Component compa = createComponent("compa"); compa.setDependencies(dependencies); Component compb = createComponent("compb"); compa.setDependencies(dependencies2); service.addComponent(compa); service.addComponent(compb); List<String> order = ServiceApiUtil.resolveCompsDependency(service); List<String> expected = new ArrayList<String>(); expected.add("compa"); expected.add("compb"); for (int i = 0; i < expected.size(); i++) { Assert.assertEquals("Components are not equal.", expected.get(i), order.get(i)); } } @Test public void testResolveNoCompsDependency() { Service service = createExampleApplication(); Component compa = createComponent("compa"); Component compb = createComponent("compb"); service.addComponent(compa); service.addComponent(compb); List<String> order = ServiceApiUtil.resolveCompsDependency(service); List<String> expected = new ArrayList<String>(); expected.add("compa"); expected.add("compb"); for (int i = 0; i < expected.size(); i++) { Assert.assertEquals("Components are not equal.", expected.get(i), order.get(i)); } } @Test(timeout = 1500) public void testNoServiceDependencies() { Service service = createExampleApplication(); Component compa = createComponent("compa"); Component compb = createComponent("compb"); service.addComponent(compa); service.addComponent(compb); List<String> dependencies = new ArrayList<String>(); service.setDependencies(dependencies); ServiceApiUtil.checkServiceDependencySatisified(service); } @Test public void testServiceDependencies() { Thread thread = new Thread() { @Override public void run() { Service service = createExampleApplication(); Component compa = createComponent("compa"); Component compb = createComponent("compb"); service.addComponent(compa); service.addComponent(compb); List<String> dependencies = new ArrayList<String>(); dependencies.add("abc"); service.setDependencies(dependencies); Service dependent = createExampleApplication(); dependent.setState(ServiceState.STOPPED); ServiceApiUtil.checkServiceDependencySatisified(service); } }; thread.start(); try { Thread.sleep(1000); } catch (InterruptedException e) { } Assert.assertTrue(thread.isAlive()); } @Test public void testJvmOpts() throws Exception { String invalidJvmOpts = "`ping -c 3 example.com`"; intercept(IllegalArgumentException.class, "Invalid character in yarn.service.am.java.opts.", () -> ServiceApiUtil.validateJvmOpts(invalidJvmOpts)); String validJvmOpts = "-Dyarn.service.am.java.opts=-Xmx768m " + "-Djava.security.auth.login.config=/opt/hadoop/etc/jaas-zk.conf"; try { ServiceApiUtil.validateJvmOpts(validJvmOpts); } catch (Exception ex) { fail("Invalid character in yarn.service.am.java.opts."); } } public static Service createExampleApplication() { Service exampleApp = new Service(); exampleApp.setName("example-app"); exampleApp.setVersion("v1"); return exampleApp; } }
apache-2.0
krzysztof-magosa/encog-java-core
src/main/java/org/encog/mathutil/matrices/decomposition/EigenvalueDecomposition.java
24172
/* * Encog(tm) Core v3.3 - Java Version * http://www.heatonresearch.com/encog/ * https://github.com/encog/encog-java-core * Copyright 2008-2014 Heaton Research, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * For more information on Heaton Research copyrights, licenses * and trademarks visit: * http://www.heatonresearch.com/copyright */ package org.encog.mathutil.matrices.decomposition; import java.util.Arrays; import org.encog.mathutil.EncogMath; import org.encog.mathutil.matrices.Matrix; /** * Eigenvalues and eigenvectors of a real matrix. * <P> * If A is symmetric, then A = V*D*V' where the eigenvalue matrix D is diagonal * and the eigenvector matrix V is orthogonal. I.e. A = * V.times(D.times(V.transpose())) and V.times(V.transpose()) equals the * identity matrix. * <P> * If A is not symmetric, then the eigenvalue matrix D is block diagonal with * the real eigenvalues in 1-by-1 blocks and any complex eigenvalues, lambda + * i*mu, in 2-by-2 blocks, [lambda, mu; -mu, lambda]. The columns of V represent * the eigenvectors in the sense that A*V = V*D, i.e. A.times(V) equals * V.times(D). The matrix V may be badly conditioned, or even singular, so the * validity of the equation A = V*D*inverse(V) depends upon V.cond(). * * This file based on a class from the public domain JAMA package. * http://math.nist.gov/javanumerics/jama/ */ public class EigenvalueDecomposition { /** * Row and column dimension (square matrix). */ private final int n; /** * Symmetry flag. */ private final boolean issymmetric; /** * Arrays for internal storage of eigenvalues. */ private final double[] d, e; /** * Array for internal storage of eigenvectors. */ private final double[][] v; /** * Complex scalar division. */ private double cdivr; /** * Complex scalar division. */ private double cdivi; /** * Array for internal storage of nonsymmetric Hessenberg form. * * @serial internal storage of nonsymmetric Hessenberg form. */ private double[][] h; /** * Working storage for nonsymmetric algorithm. * * @serial working storage for nonsymmetric algorithm. */ private double[] ort; /** * Check for symmetry, then construct the eigenvalue decomposition Structure * to access D and V. * * @param matrix * Square matrix */ public EigenvalueDecomposition(final Matrix matrix) { final double[][] a = matrix.getData(); this.n = matrix.getCols(); this.v = new double[this.n][this.n]; this.d = new double[this.n]; this.e = new double[this.n]; this.issymmetric = isSymmetric(a); if (this.issymmetric) { // Copy matrix a to v. for (int i = 0; i < this.n; i++) { System.arraycopy(a[i], 0, v[i], 0, this.n); } // Tridiagonalize. tred2(); // Diagonalize. tql2(); } else { this.h = new double[this.n][this.n]; this.ort = new double[this.n]; // Copy matrix a to h. for (int j = 0; j < this.n; j++) { System.arraycopy(a, 0, h, 0, n); } // Reduce to Hessenberg form. orthes(); // Reduce Hessenberg to real Schur form. hqr2(); } } /** * Returns whether the given arrays make a symmetric matrix. A symmetric * matrix is defined as a square matrix that is identical when flipped * around its diagonal. A matrix with no rows and no columns is defined to * be symmetric. * * @param a the matrix to analyze. * @return {@code true} iff the matrix is symmetric. Malformed matrices are * considered asymetric. */ static boolean isSymmetric(final double[][] a) { // TODO: Perhaps move this to the Matrix class. // Note that we only need to analyze the positions on one side of the // diagonal as the diagonal always stays the same. final int len = a.length; if (len == 0) { return true; } // Because we skip the first row, verify its length explicitly if (a[0].length != len) { return false; } // Loop through all of the rows, skipping the first for (int j = 1; j < len; j++) { if (a[j].length != len) { return false; } // Loop through all of the columns up to the diagonal for (int i = 0; i < j; i++) { if (a[i][j] != a[j][i]) { return false; } } } return true; } // Symmetric tridiagonal QL algorithm. private void cdiv(final double xr, final double xi, final double yr, final double yi) { double r, d; if (Math.abs(yr) > Math.abs(yi)) { r = yi / yr; d = yr + r * yi; this.cdivr = (xr + r * xi) / d; this.cdivi = (xi - r * xr) / d; } else { r = yr / yi; d = yi + r * yr; this.cdivr = (r * xr + xi) / d; this.cdivi = (r * xi - xr) / d; } } /** * Return the block diagonal eigenvalue matrix * * @return D */ public Matrix getD() { final Matrix X = new Matrix(this.n, this.n); final double[][] D = X.getData(); for (int i = 0; i < this.n; i++) { Arrays.fill(D[i], 0.0); D[i][i] = this.d[i]; if (this.e[i] > 0) { D[i][i + 1] = this.e[i]; } else if (this.e[i] < 0) { D[i][i - 1] = this.e[i]; } } return X; } /** * Return the imaginary parts of the eigenvalues. * * @return imag(diag(D)). */ public double[] getImagEigenvalues() { return this.e; } /** * Return the real parts of the eigenvalues. * * @return real(diag(D)). */ public double[] getRealEigenvalues() { return this.d; } /** * Return the eigenvector matrix. * * @return V */ public Matrix getV() { return new Matrix(this.v); } /** * This is derived from the Algol procedure hqr2, by Martin and Wilkinson, * Handbook for Auto. Comp., Vol.ii-Linear Algebra, and the corresponding * Fortran subroutine in EISPACK. */ private void hqr2() { // Initialize final int nn = this.n; int n = nn - 1; final int low = 0; final int high = nn - 1; final double eps = Math.pow(2.0, -52.0); double exshift = 0.0; double p = 0, q = 0, r = 0, s = 0, z = 0, t, w, x, y; // Store roots isolated by balanc and compute matrix norm double norm = 0.0; for (int i = 0; i < nn; i++) { if ((i < low) | (i > high)) { this.d[i] = this.h[i][i]; this.e[i] = 0.0; } for (int j = Math.max(i - 1, 0); j < nn; j++) { norm = norm + Math.abs(this.h[i][j]); } } // Outer loop over eigenvalue index int iter = 0; while (n >= low) { // Look for single small sub-diagonal element int l = n; while (l > low) { s = Math.abs(this.h[l - 1][l - 1]) + Math.abs(this.h[l][l]); if (s == 0.0) { s = norm; } if (Math.abs(this.h[l][l - 1]) < eps * s) { break; } l--; } // Check for convergence // One root found if (l == n) { this.h[n][n] = this.h[n][n] + exshift; this.d[n] = this.h[n][n]; this.e[n] = 0.0; n--; iter = 0; // Two roots found } else if (l == n - 1) { w = this.h[n][n - 1] * this.h[n - 1][n]; p = (this.h[n - 1][n - 1] - this.h[n][n]) / 2.0; q = p * p + w; z = Math.sqrt(Math.abs(q)); this.h[n][n] = this.h[n][n] + exshift; this.h[n - 1][n - 1] = this.h[n - 1][n - 1] + exshift; x = this.h[n][n]; // Real pair if (q >= 0) { if (p >= 0) { z = p + z; } else { z = p - z; } this.d[n - 1] = x + z; this.d[n] = this.d[n - 1]; if (z != 0.0) { this.d[n] = x - w / z; } this.e[n - 1] = 0.0; this.e[n] = 0.0; x = this.h[n][n - 1]; s = Math.abs(x) + Math.abs(z); p = x / s; q = z / s; r = Math.sqrt(p * p + q * q); p = p / r; q = q / r; // Row modification for (int j = n - 1; j < nn; j++) { z = this.h[n - 1][j]; this.h[n - 1][j] = q * z + p * this.h[n][j]; this.h[n][j] = q * this.h[n][j] - p * z; } // Column modification for (int i = 0; i <= n; i++) { z = this.h[i][n - 1]; this.h[i][n - 1] = q * z + p * this.h[i][n]; this.h[i][n] = q * this.h[i][n] - p * z; } // Accumulate transformations for (int i = low; i <= high; i++) { z = this.v[i][n - 1]; this.v[i][n - 1] = q * z + p * this.v[i][n]; this.v[i][n] = q * this.v[i][n] - p * z; } // Complex pair } else { this.d[n - 1] = x + p; this.d[n] = x + p; this.e[n - 1] = z; this.e[n] = -z; } n = n - 2; iter = 0; // No convergence yet } else { // Form shift x = this.h[n][n]; y = 0.0; w = 0.0; if (l < n) { y = this.h[n - 1][n - 1]; w = this.h[n][n - 1] * this.h[n - 1][n]; } // Wilkinson's original ad hoc shift if (iter == 10) { exshift += x; for (int i = low; i <= n; i++) { this.h[i][i] -= x; } s = Math.abs(this.h[n][n - 1]) + Math.abs(this.h[n - 1][n - 2]); x = y = 0.75 * s; w = -0.4375 * s * s; } // MATLAB's new ad hoc shift if (iter == 30) { s = (y - x) / 2.0; s = s * s + w; if (s > 0) { s = Math.sqrt(s); if (y < x) { s = -s; } s = x - w / ((y - x) / 2.0 + s); for (int i = low; i <= n; i++) { this.h[i][i] -= s; } exshift += s; x = y = w = 0.964; } } iter = iter + 1; // (Could check iteration count here.) // Look for two consecutive small sub-diagonal elements int m = n - 2; while (m >= l) { z = this.h[m][m]; r = x - z; s = y - z; p = (r * s - w) / this.h[m + 1][m] + this.h[m][m + 1]; q = this.h[m + 1][m + 1] - z - r - s; r = this.h[m + 2][m + 1]; s = Math.abs(p) + Math.abs(q) + Math.abs(r); p = p / s; q = q / s; r = r / s; if (m == l) { break; } if (Math.abs(this.h[m][m - 1]) * (Math.abs(q) + Math.abs(r)) < eps * (Math.abs(p) * (Math.abs(this.h[m - 1][m - 1]) + Math.abs(z) + Math .abs(this.h[m + 1][m + 1])))) { break; } m--; } for (int i = m + 2; i <= n; i++) { this.h[i][i - 2] = 0.0; if (i > m + 2) { this.h[i][i - 3] = 0.0; } } // Double QR step involving rows l:n and columns m:n for (int k = m; k <= n - 1; k++) { final boolean notlast = (k != n - 1); if (k != m) { p = this.h[k][k - 1]; q = this.h[k + 1][k - 1]; r = (notlast ? this.h[k + 2][k - 1] : 0.0); x = Math.abs(p) + Math.abs(q) + Math.abs(r); if (x != 0.0) { p = p / x; q = q / x; r = r / x; } } if (x == 0.0) { break; } s = Math.sqrt(p * p + q * q + r * r); if (p < 0) { s = -s; } if (s != 0) { if (k != m) { this.h[k][k - 1] = -s * x; } else if (l != m) { this.h[k][k - 1] = -this.h[k][k - 1]; } p = p + s; x = p / s; y = q / s; z = r / s; q = q / p; r = r / p; // Row modification for (int j = k; j < nn; j++) { p = this.h[k][j] + q * this.h[k + 1][j]; if (notlast) { p = p + r * this.h[k + 2][j]; this.h[k + 2][j] = this.h[k + 2][j] - p * z; } this.h[k][j] = this.h[k][j] - p * x; this.h[k + 1][j] = this.h[k + 1][j] - p * y; } // Column modification for (int i = 0; i <= Math.min(n, k + 3); i++) { p = x * this.h[i][k] + y * this.h[i][k + 1]; if (notlast) { p = p + z * this.h[i][k + 2]; this.h[i][k + 2] = this.h[i][k + 2] - p * r; } this.h[i][k] = this.h[i][k] - p; this.h[i][k + 1] = this.h[i][k + 1] - p * q; } // Accumulate transformations for (int i = low; i <= high; i++) { p = x * this.v[i][k] + y * this.v[i][k + 1]; if (notlast) { p = p + z * this.v[i][k + 2]; this.v[i][k + 2] = this.v[i][k + 2] - p * r; } this.v[i][k] = this.v[i][k] - p; this.v[i][k + 1] = this.v[i][k + 1] - p * q; } } // (s != 0) } // k loop } // check convergence } // while (n >= low) // Backsubstitute to find vectors of upper triangular form if (norm == 0.0) { return; } for (n = nn - 1; n >= 0; n--) { p = this.d[n]; q = this.e[n]; // Real vector if (q == 0) { int l = n; this.h[n][n] = 1.0; for (int i = n - 1; i >= 0; i--) { w = this.h[i][i] - p; r = 0.0; for (int j = l; j <= n; j++) { r = r + this.h[i][j] * this.h[j][n]; } if (this.e[i] < 0.0) { z = w; s = r; } else { l = i; if (this.e[i] == 0.0) { if (w != 0.0) { this.h[i][n] = -r / w; } else { this.h[i][n] = -r / (eps * norm); } // Solve real equations } else { x = this.h[i][i + 1]; y = this.h[i + 1][i]; q = (this.d[i] - p) * (this.d[i] - p) + this.e[i] * this.e[i]; t = (x * s - z * r) / q; this.h[i][n] = t; if (Math.abs(x) > Math.abs(z)) { this.h[i + 1][n] = (-r - w * t) / x; } else { this.h[i + 1][n] = (-s - y * t) / z; } } // Overflow control t = Math.abs(this.h[i][n]); if ((eps * t) * t > 1) { for (int j = i; j <= n; j++) { this.h[j][n] = this.h[j][n] / t; } } } } // Complex vector } else if (q < 0) { int l = n - 1; // Last vector component imaginary so matrix is triangular if (Math.abs(this.h[n][n - 1]) > Math.abs(this.h[n - 1][n])) { this.h[n - 1][n - 1] = q / this.h[n][n - 1]; this.h[n - 1][n] = -(this.h[n][n] - p) / this.h[n][n - 1]; } else { cdiv(0.0, -this.h[n - 1][n], this.h[n - 1][n - 1] - p, q); this.h[n - 1][n - 1] = this.cdivr; this.h[n - 1][n] = this.cdivi; } this.h[n][n - 1] = 0.0; this.h[n][n] = 1.0; for (int i = n - 2; i >= 0; i--) { double ra, sa, vr, vi; ra = 0.0; sa = 0.0; for (int j = l; j <= n; j++) { ra = ra + this.h[i][j] * this.h[j][n - 1]; sa = sa + this.h[i][j] * this.h[j][n]; } w = this.h[i][i] - p; if (this.e[i] < 0.0) { z = w; r = ra; s = sa; } else { l = i; if (this.e[i] == 0) { cdiv(-ra, -sa, w, q); this.h[i][n - 1] = this.cdivr; this.h[i][n] = this.cdivi; } else { // Solve complex equations x = this.h[i][i + 1]; y = this.h[i + 1][i]; vr = (this.d[i] - p) * (this.d[i] - p) + this.e[i] * this.e[i] - q * q; vi = (this.d[i] - p) * 2.0 * q; if ((vr == 0.0) & (vi == 0.0)) { vr = eps * norm * (Math.abs(w) + Math.abs(q) + Math.abs(x) + Math.abs(y) + Math .abs(z)); } cdiv(x * r - z * ra + q * sa, x * s - z * sa - q * ra, vr, vi); this.h[i][n - 1] = this.cdivr; this.h[i][n] = this.cdivi; if (Math.abs(x) > (Math.abs(z) + Math.abs(q))) { this.h[i + 1][n - 1] = (-ra - w * this.h[i][n - 1] + q * this.h[i][n]) / x; this.h[i + 1][n] = (-sa - w * this.h[i][n] - q * this.h[i][n - 1]) / x; } else { cdiv(-r - y * this.h[i][n - 1], -s - y * this.h[i][n], z, q); this.h[i + 1][n - 1] = this.cdivr; this.h[i + 1][n] = this.cdivi; } } // Overflow control t = Math.max(Math.abs(this.h[i][n - 1]), Math.abs(this.h[i][n])); if ((eps * t) * t > 1) { for (int j = i; j <= n; j++) { this.h[j][n - 1] = this.h[j][n - 1] / t; this.h[j][n] = this.h[j][n] / t; } } } } } } // Vectors of isolated roots for (int i = 0; i < nn; i++) { if ((i < low) | (i > high)) { System.arraycopy(this.h, i, this.v, i, nn - i); } } // Back transformation to get eigenvectors of original matrix for (int j = nn - 1; j >= low; j--) { for (int i = low; i <= high; i++) { z = 0.0; for (int k = low; k <= Math.min(j, high); k++) { z = z + this.v[i][k] * this.h[k][j]; } this.v[i][j] = z; } } } /** * This is derived from the Algol procedures orthes and ortran, by Martin * and Wilkinson, Handbook for Auto. Comp., Vol.ii-Linear Algebra, and the * corresponding Fortran subroutines in EISPACK. */ private void orthes() { final int low = 0; final int high = this.n - 1; for (int m = low + 1; m <= high - 1; m++) { // Scale column. double scale = 0.0; for (int i = m; i <= high; i++) { scale = scale + Math.abs(this.h[i][m - 1]); } if (scale != 0.0) { // Compute Householder transformation. double lh = 0.0; for (int i = high; i >= m; i--) { this.ort[i] = this.h[i][m - 1] / scale; lh += this.ort[i] * this.ort[i]; } double g = Math.sqrt(lh); if (this.ort[m] > 0) { g = -g; } lh = lh - this.ort[m] * g; this.ort[m] = this.ort[m] - g; // Apply Householder similarity transformation // H = (I-u*u'/h)*H*(I-u*u')/h) for (int j = m; j < this.n; j++) { double f = 0.0; for (int i = high; i >= m; i--) { f += this.ort[i] * this.h[i][j]; } f = f / lh; for (int i = m; i <= high; i++) { this.h[i][j] -= f * this.ort[i]; } } for (int i = 0; i <= high; i++) { double f = 0.0; for (int j = high; j >= m; j--) { f += this.ort[j] * this.h[i][j]; } f = f / lh; for (int j = m; j <= high; j++) { this.h[i][j] -= f * this.ort[j]; } } this.ort[m] = scale * this.ort[m]; this.h[m][m - 1] = scale * g; } } // Accumulate transformations (Algol's ortran). // Fill v's diagonal with 1s. for (int i = 0; i < this.n; i++) { Arrays.fill(this.v[i], 0.0); this.v[i][i] = 1.0; } for (int m = high - 1; m >= low + 1; m--) { if (this.h[m][m - 1] != 0.0) { for (int i = m + 1; i <= high; i++) { this.ort[i] = this.h[i][m - 1]; } for (int j = m; j <= high; j++) { double g = 0.0; for (int i = m; i <= high; i++) { g += this.ort[i] * this.v[i][j]; } // Double division avoids possible underflow g = (g / this.ort[m]) / this.h[m][m - 1]; for (int i = m; i <= high; i++) { this.v[i][j] += g * this.ort[i]; } } } } } private void tql2() { // This is derived from the Algol procedures tql2, by // Bowdler, Martin, Reinsch, and Wilkinson, Handbook for // Auto. Comp., Vol.ii-Linear Algebra, and the corresponding // Fortran subroutine in EISPACK. for (int i = 1; i < this.n; i++) { this.e[i - 1] = this.e[i]; } this.e[this.n - 1] = 0.0; double f = 0.0; double tst1 = 0.0; final double eps = Math.pow(2.0, -52.0); for (int l = 0; l < this.n; l++) { // Find small subdiagonal element tst1 = Math.max(tst1, Math.abs(this.d[l]) + Math.abs(this.e[l])); int m = l; while (m < this.n) { if (Math.abs(this.e[m]) <= eps * tst1) { break; } m++; } // If m == l, d[l] is an eigenvalue, // otherwise, iterate. if (m > l) { int iter = 0; do { iter = iter + 1; // (Could check iteration count here.) // Compute implicit shift double g = this.d[l]; double p = (this.d[l + 1] - g) / (2.0 * this.e[l]); double r = EncogMath.hypot(p, 1.0); if (p < 0) { r = -r; } this.d[l] = this.e[l] / (p + r); this.d[l + 1] = this.e[l] * (p + r); final double dl1 = this.d[l + 1]; double h = g - this.d[l]; for (int i = l + 2; i < this.n; i++) { this.d[i] -= h; } f = f + h; // Implicit QL transformation. p = this.d[m]; double c = 1.0; double c2 = c; double c3 = c; final double el1 = this.e[l + 1]; double s = 0.0; double s2 = 0.0; for (int i = m - 1; i >= l; i--) { c3 = c2; c2 = c; s2 = s; g = c * this.e[i]; h = c * p; r = EncogMath.hypot(p, this.e[i]); this.e[i + 1] = s * r; s = this.e[i] / r; c = p / r; p = c * this.d[i] - s * g; this.d[i + 1] = h + s * (c * g + s * this.d[i]); // Accumulate transformation. for (int k = 0; k < this.n; k++) { h = this.v[k][i + 1]; this.v[k][i + 1] = s * this.v[k][i] + c * h; this.v[k][i] = c * this.v[k][i] - s * h; } } p = -s * s2 * c3 * el1 * this.e[l] / dl1; this.e[l] = s * p; this.d[l] = c * p; // Check for convergence. } while (Math.abs(this.e[l]) > eps * tst1); } this.d[l] = this.d[l] + f; this.e[l] = 0.0; } // Sort eigenvalues and corresponding vectors. for (int i = 0; i < this.n - 1; i++) { int k = i; double p = this.d[i]; for (int j = i + 1; j < this.n; j++) { if (this.d[j] < p) { k = j; p = this.d[j]; } } if (k != i) { this.d[k] = this.d[i]; this.d[i] = p; for (int j = 0; j < this.n; j++) { p = this.v[j][i]; this.v[j][i] = this.v[j][k]; this.v[j][k] = p; } } } } /** * Symmetric Householder reduction to tridiagonal form. */ private void tred2() { // This is derived from the Algol procedures tred2 by // Bowdler, Martin, Reinsch, and Wilkinson, Handbook for // Auto. Comp., Vol.ii-Linear Algebra, and the corresponding // Fortran subroutine in EISPACK. System.arraycopy(this.v[this.n - 1], 0, this.d, 0, this.n); // Householder reduction to tridiagonal form. for (int i = this.n - 1; i > 0; i--) { // Scale to avoid under/overflow. double scale = 0.0; double h = 0.0; for (int k = 0; k < i; k++) { scale = scale + Math.abs(this.d[k]); } if (scale == 0.0) { this.e[i] = this.d[i - 1]; for (int j = 0; j < i; j++) { this.d[j] = this.v[i - 1][j]; this.v[i][j] = 0.0; this.v[j][i] = 0.0; } } else { // Generate Householder vector. for (int k = 0; k < i; k++) { this.d[k] /= scale; h += this.d[k] * this.d[k]; } double f = this.d[i - 1]; double g = Math.sqrt(h); if (f > 0) { g = -g; } this.e[i] = scale * g; h = h - f * g; this.d[i - 1] = f - g; Arrays.fill(this.e, 0, i, 0.0); // Apply similarity transformation to remaining columns. for (int j = 0; j < i; j++) { f = this.d[j]; this.v[j][i] = f; g = this.e[j] + this.v[j][j] * f; for (int k = j + 1; k <= i - 1; k++) { g += this.v[k][j] * this.d[k]; this.e[k] += this.v[k][j] * f; } this.e[j] = g; } f = 0.0; for (int j = 0; j < i; j++) { this.e[j] /= h; f += this.e[j] * this.d[j]; } final double hh = f / (h + h); for (int j = 0; j < i; j++) { this.e[j] -= hh * this.d[j]; } for (int j = 0; j < i; j++) { f = this.d[j]; g = this.e[j]; for (int k = j; k <= i - 1; k++) { this.v[k][j] -= (f * this.e[k] + g * this.d[k]); } this.d[j] = this.v[i - 1][j]; this.v[i][j] = 0.0; } } this.d[i] = h; } // Accumulate transformations. for (int i = 0; i < this.n - 1; i++) { this.v[this.n - 1][i] = this.v[i][i]; this.v[i][i] = 1.0; final double h = this.d[i + 1]; if (h != 0.0) { for (int k = 0; k <= i; k++) { this.d[k] = this.v[k][i + 1] / h; } for (int j = 0; j <= i; j++) { double g = 0.0; for (int k = 0; k <= i; k++) { g += this.v[k][i + 1] * this.v[k][j]; } for (int k = 0; k <= i; k++) { this.v[k][j] -= g * this.d[k]; } } } for (int k = 0; k <= i; k++) { this.v[k][i + 1] = 0.0; } } System.arraycopy(this.v[this.n - 1], 0, this.d, 0, this.n); Arrays.fill(this.v[this.n - 1], 0.0); this.v[this.n - 1][this.n - 1] = 1.0; this.e[0] = 0.0; } }
apache-2.0
nikhilvibhav/camel
components/camel-djl/src/main/java/org/apache/camel/component/djl/model/CustomObjectDetectionPredictor.java
4277
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.djl.model; import java.io.*; import ai.djl.Model; import ai.djl.inference.Predictor; import ai.djl.modality.cv.Image; import ai.djl.modality.cv.ImageFactory; import ai.djl.modality.cv.output.DetectedObjects; import ai.djl.translate.TranslateException; import ai.djl.translate.Translator; import org.apache.camel.Exchange; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class CustomObjectDetectionPredictor extends AbstractPredictor { private static final Logger LOG = LoggerFactory.getLogger(CustomObjectDetectionPredictor.class); private final String modelName; private final String translatorName; public CustomObjectDetectionPredictor(String modelName, String translatorName) { this.modelName = modelName; this.translatorName = translatorName; } @Override public void process(Exchange exchange) throws Exception { Model model = exchange.getContext().getRegistry().lookupByNameAndType(modelName, Model.class); Translator translator = exchange.getContext().getRegistry().lookupByNameAndType(translatorName, Translator.class); if (exchange.getIn().getBody() instanceof byte[]) { byte[] bytes = exchange.getIn().getBody(byte[].class); DetectedObjects result = classify(model, translator, new ByteArrayInputStream(bytes)); exchange.getIn().setBody(result); } else if (exchange.getIn().getBody() instanceof File) { DetectedObjects result = classify(model, translator, exchange.getIn().getBody(File.class)); exchange.getIn().setBody(result); } else if (exchange.getIn().getBody() instanceof InputStream) { DetectedObjects result = classify(model, translator, exchange.getIn().getBody(InputStream.class)); exchange.getIn().setBody(result); } else { throw new RuntimeException("Data type is not supported. Body should be byte[], InputStream or File"); } } public DetectedObjects classify(Model model, Translator translator, Image image) throws Exception { try (Predictor<Image, DetectedObjects> predictor = model.newPredictor(translator)) { DetectedObjects detectedObjects = predictor.predict(image); return detectedObjects; } catch (TranslateException e) { LOG.error("Could not process input or output", e); throw new RuntimeException("Could not process input or output", e); } } public DetectedObjects classify(Model model, Translator translator, File input) throws Exception { try { Image image = ImageFactory.getInstance().fromInputStream(new FileInputStream(input)); return classify(model, translator, image); } catch (IOException e) { LOG.error("Couldn't transform input into a BufferedImage"); throw new RuntimeException("Couldn't transform input into a BufferedImage", e); } } public DetectedObjects classify(Model model, Translator translator, InputStream input) throws Exception { try { Image image = ImageFactory.getInstance().fromInputStream(input); return classify(model, translator, image); } catch (IOException e) { LOG.error("Couldn't transform input into a BufferedImage"); throw new RuntimeException("Couldn't transform input into a BufferedImage", e); } } }
apache-2.0
etirelli/drools-wb
drools-wb-screens/drools-wb-guided-dtable-editor/drools-wb-guided-dtable-editor-client/src/main/java/org/drools/workbench/screens/guided/dtable/client/editor/menu/BaseMenuViewImpl.java
1646
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.workbench.screens.guided.dtable.client.editor.menu; import java.util.Arrays; import java.util.List; import com.google.gwt.dom.client.Element; import com.google.gwt.user.client.ui.Composite; import org.gwtbootstrap3.client.ui.constants.Styles; public abstract class BaseMenuViewImpl<M extends BaseMenu> extends Composite implements BaseMenuView<M> { protected M presenter; @Override public void init( final M presenter ) { this.presenter = presenter; } @Override public void enableElement( final Element element, final boolean enabled ) { if ( enabled ) { element.removeClassName( Styles.DISABLED ); } else { element.addClassName( Styles.DISABLED ); } } @Override public boolean isDisabled( final Element element ) { final List<String> classNames = Arrays.asList( element.getClassName().split( "\\s" ) ); return classNames.contains( Styles.DISABLED ); } }
apache-2.0
nmcl/scratch
graalvm/transactions/fork/narayana/STM/src/test/java/org/jboss/stm/types/AtomicDoubleUnitTest.java
1724
/* * JBoss, Home of Professional Open Source * Copyright 2006, JBoss Inc., and individual contributors as indicated * by the @authors tag. See the copyright.txt in the distribution for a * full listing of individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.jboss.stm.types; import java.lang.reflect.Field; import java.lang.reflect.Method; import junit.framework.TestCase; /** * Unit tests for the AtomicInteger class. * * @author Mark Little */ public class AtomicDoubleUnitTest extends TestCase { public void test () { AtomicDouble ad = AtomicFactory.instance().createDouble(); assertEquals(ad.get(), (double) 0); ad.set(1); assertEquals(ad.get(), (double) 1); AtomicDouble temp = AtomicFactory.instance().createDouble(667); assertEquals(temp.get(), (double) 667); assertEquals(temp.subtract(ad).get(), (double) 666); } }
apache-2.0
robertoschwald/cas
support/cas-server-support-authy-core/src/main/java/org/apereo/cas/adaptors/authy/AuthyMultifactorAuthenticationProvider.java
826
package org.apereo.cas.adaptors.authy; import org.apereo.cas.authentication.AbstractMultifactorAuthenticationProvider; import org.apereo.cas.configuration.model.support.mfa.AuthyMultifactorProperties; import lombok.NoArgsConstructor; import org.apache.commons.lang3.StringUtils; /** * The authentication provider for google authenticator. * * @author Misagh Moayyed * @since 5.0.0 */ @NoArgsConstructor public class AuthyMultifactorAuthenticationProvider extends AbstractMultifactorAuthenticationProvider { private static final long serialVersionUID = 4789727148634156909L; @Override public String getId() { return StringUtils.defaultIfBlank(super.getId(), AuthyMultifactorProperties.DEFAULT_IDENTIFIER); } @Override public String getFriendlyName() { return "Authy"; } }
apache-2.0
haocafes/DataflowJavaSDK
sdk/src/test/java/com/google/cloud/dataflow/sdk/transforms/ViewTest.java
13131
/* * Copyright (C) 2015 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.cloud.dataflow.sdk.transforms; import static org.hamcrest.CoreMatchers.isA; import com.google.cloud.dataflow.sdk.Pipeline; import com.google.cloud.dataflow.sdk.coders.VarIntCoder; import com.google.cloud.dataflow.sdk.coders.VoidCoder; import com.google.cloud.dataflow.sdk.testing.DataflowAssert; import com.google.cloud.dataflow.sdk.testing.TestPipeline; import com.google.cloud.dataflow.sdk.transforms.windowing.FixedWindows; import com.google.cloud.dataflow.sdk.transforms.windowing.GlobalWindows; import com.google.cloud.dataflow.sdk.transforms.windowing.Window; import com.google.cloud.dataflow.sdk.values.KV; import com.google.cloud.dataflow.sdk.values.PCollection; import com.google.cloud.dataflow.sdk.values.PCollectionView; import com.google.cloud.dataflow.sdk.values.TimestampedValue; import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import java.io.Serializable; import java.util.Map; import java.util.NoSuchElementException; /** * Tests for {@link View}. See also {@link ParDoTest}, which * provides additional coverage since views can only be * observed via {@link ParDo}. */ @RunWith(JUnit4.class) @SuppressWarnings("serial") public class ViewTest implements Serializable { // This test is Serializable, just so that it's easy to have // anonymous inner classes inside the non-static test methods. @Rule public transient ExpectedException thrown = ExpectedException.none(); @Test @Category(com.google.cloud.dataflow.sdk.testing.RunnableOnService.class) public void testSingletonSideInput() { Pipeline pipeline = TestPipeline.create(); final PCollectionView<Integer> view = pipeline .apply(Create.of(47)) .apply(View.<Integer>asSingleton()); PCollection<Integer> output = pipeline .apply(Create.of(1, 2, 3)) .apply(ParDo.withSideInputs(view).of( new DoFn<Integer, Integer>() { @Override public void processElement(ProcessContext c) { c.output(c.sideInput(view)); } })); DataflowAssert.that(output) .containsInAnyOrder(47, 47, 47); pipeline.run(); } @Test public void testEmptySingletonSideInput() throws Exception { Pipeline pipeline = TestPipeline.create(); final PCollectionView<Integer> view = pipeline .apply(Create.<Integer>of()) .setCoder(VarIntCoder.of()) .apply(View.<Integer>asSingleton()); PCollection<Integer> output = pipeline .apply(Create.of(1, 2, 3)) .apply(ParDo.withSideInputs(view).of( new DoFn<Integer, Integer>() { @Override public void processElement(ProcessContext c) { c.output(c.sideInput(view)); } })); thrown.expect(RuntimeException.class); thrown.expectCause(isA(NoSuchElementException.class)); thrown.expectMessage("Empty"); thrown.expectMessage("PCollection"); thrown.expectMessage("singleton"); pipeline.run(); } @Test public void testNonSingletonSideInput() throws Exception { Pipeline pipeline = TestPipeline.create(); final PCollectionView<Integer> view = pipeline .apply(Create.<Integer>of(1, 2, 3)) .apply(View.<Integer>asSingleton()); PCollection<Integer> output = pipeline .apply(Create.of(1, 2, 3)) .apply(ParDo.withSideInputs(view).of( new DoFn<Integer, Integer>() { @Override public void processElement(ProcessContext c) { c.output(c.sideInput(view)); } })); thrown.expect(RuntimeException.class); thrown.expectCause(isA(IllegalArgumentException.class)); thrown.expectMessage("PCollection"); thrown.expectMessage("more than one"); thrown.expectMessage("singleton"); pipeline.run(); } @Test @Category(com.google.cloud.dataflow.sdk.testing.RunnableOnService.class) public void testIterableSideInput() { Pipeline pipeline = TestPipeline.create(); final PCollectionView<Iterable<Integer>> view = pipeline .apply(Create.of(11, 13, 17, 23)) .apply(View.<Integer>asIterable()); PCollection<Integer> output = pipeline .apply(Create.of(29, 31)) .apply(ParDo.withSideInputs(view).of( new DoFn<Integer, Integer>() { @Override public void processElement(ProcessContext c) { for (Integer i : c.sideInput(view)) { c.output(i); } } })); DataflowAssert.that(output).containsInAnyOrder( 11, 13, 17, 23, 11, 13, 17, 23); pipeline.run(); } @Test @Category(com.google.cloud.dataflow.sdk.testing.RunnableOnService.class) public void testMapSideInput() { Pipeline pipeline = TestPipeline.create(); final PCollectionView<Map<String, Iterable<Integer>>> view = pipeline .apply(Create.of(KV.of("a", 1), KV.of("a", 2), KV.of("b", 3))) .apply(View.<String, Integer>asMap()); PCollection<KV<String, Integer>> output = pipeline .apply(Create.of("apple", "banana", "blackberry")) .apply(ParDo.withSideInputs(view).of( new DoFn<String, KV<String, Integer>>() { @Override public void processElement(ProcessContext c) { for (Integer v : c.sideInput(view).get(c.element().substring(0, 1))) { c.output(KV.of(c.element(), v)); } } })); DataflowAssert.that(output) .containsInAnyOrder(KV.of("apple", 1), KV.of("apple", 2), KV.of("banana", 3), KV.of("blackberry", 3)); pipeline.run(); } @Test @Category(com.google.cloud.dataflow.sdk.testing.RunnableOnService.class) public void testSingletonMapSideInput() { Pipeline pipeline = TestPipeline.create(); final PCollectionView<Map<String, Integer>> view = pipeline .apply(Create.of(KV.of("a", 1), KV.of("b", 3))) .apply(View.<String, Integer>asMap().withSingletonValues()); PCollection<KV<String, Integer>> output = pipeline .apply(Create.of("apple", "banana", "blackberry")) .apply(ParDo.withSideInputs(view).of( new DoFn<String, KV<String, Integer>>() { @Override public void processElement(ProcessContext c) { c.output(KV.of(c.element(), c.sideInput(view).get(c.element().substring(0, 1)))); } })); DataflowAssert.that(output) .containsInAnyOrder(KV.of("apple", 1), KV.of("banana", 3), KV.of("blackberry", 3)); pipeline.run(); } @Test @Category(com.google.cloud.dataflow.sdk.testing.RunnableOnService.class) public void testCombinedMapSideInput() { Pipeline pipeline = TestPipeline.create(); final PCollectionView<Map<String, Integer>> view = pipeline .apply(Create.of(KV.of("a", 1), KV.of("a", 20), KV.of("b", 3))) .apply(View.<String, Integer>asMap().withCombiner(new Sum.SumIntegerFn())); PCollection<KV<String, Integer>> output = pipeline .apply(Create.of("apple", "banana", "blackberry")) .apply(ParDo.withSideInputs(view).of( new DoFn<String, KV<String, Integer>>() { @Override public void processElement(ProcessContext c) { c.output(KV.of(c.element(), c.sideInput(view).get(c.element().substring(0, 1)))); } })); DataflowAssert.that(output) .containsInAnyOrder(KV.of("apple", 21), KV.of("banana", 3), KV.of("blackberry", 3)); pipeline.run(); } @Test @Category(com.google.cloud.dataflow.sdk.testing.RunnableOnService.class) public void testWindowedSideInputFixedToFixed() { Pipeline p = TestPipeline.create(); final PCollectionView<Integer> view = p .apply(Create.timestamped( TimestampedValue.of(1, new Instant(1)), TimestampedValue.of(2, new Instant(11)), TimestampedValue.of(3, new Instant(13)))) .apply(Window.<Integer>into(FixedWindows.of(Duration.millis(10)))) .apply(Sum.integersGlobally().withoutDefaults()) .apply(View.<Integer>asSingleton()); PCollection<String> output = p .apply(Create.timestamped( TimestampedValue.of("A", new Instant(4)), TimestampedValue.of("B", new Instant(15)), TimestampedValue.of("C", new Instant(7)))) .apply(Window.<String>into(FixedWindows.of(Duration.millis(10)))) .apply(ParDo.withSideInputs(view).of( new DoFn<String, String>() { @Override public void processElement(ProcessContext c) { c.output(c.element() + c.sideInput(view)); } })); DataflowAssert.that(output).containsInAnyOrder("A1", "B5", "C1"); p.run(); } @Test @Category(com.google.cloud.dataflow.sdk.testing.RunnableOnService.class) public void testWindowedSideInputFixedToGlobal() { Pipeline p = TestPipeline.create(); final PCollectionView<Integer> view = p .apply(Create.timestamped( TimestampedValue.of(1, new Instant(1)), TimestampedValue.of(2, new Instant(11)), TimestampedValue.of(3, new Instant(13)))) .apply(Window.<Integer>into(new GlobalWindows())) .apply(Sum.integersGlobally()) .apply(View.<Integer>asSingleton()); PCollection<String> output = p .apply(Create.timestamped( TimestampedValue.of("A", new Instant(4)), TimestampedValue.of("B", new Instant(15)), TimestampedValue.of("C", new Instant(7)))) .apply(Window.<String>into(FixedWindows.of(Duration.millis(10)))) .apply(ParDo.withSideInputs(view).of( new DoFn<String, String>() { @Override public void processElement(ProcessContext c) { c.output(c.element() + c.sideInput(view)); } })); DataflowAssert.that(output).containsInAnyOrder("A6", "B6", "C6"); p.run(); } @Test @Category(com.google.cloud.dataflow.sdk.testing.RunnableOnService.class) public void testWindowedSideInputFixedToFixedWithDefault() { Pipeline p = TestPipeline.create(); final PCollectionView<Integer> view = p .apply(Create.timestamped( TimestampedValue.of(2, new Instant(11)), TimestampedValue.of(3, new Instant(13)))) .apply(Window.<Integer>into(FixedWindows.of(Duration.millis(10)))) .apply(Sum.integersGlobally().asSingletonView()); PCollection<String> output = p .apply(Create.timestamped( TimestampedValue.of("A", new Instant(4)), TimestampedValue.of("B", new Instant(15)), TimestampedValue.of("C", new Instant(7)))) .apply(Window.<String>into(FixedWindows.of(Duration.millis(10)))) .apply(ParDo.withSideInputs(view).of( new DoFn<String, String>() { @Override public void processElement(ProcessContext c) { c.output(c.element() + c.sideInput(view)); } })); DataflowAssert.that(output).containsInAnyOrder("A0", "B5", "C0"); p.run(); } @Test @Category(com.google.cloud.dataflow.sdk.testing.RunnableOnService.class) public void testSideInputWithNullDefault() { Pipeline p = TestPipeline.create(); final PCollectionView<Void> view = p .apply(Create.of((Void) null)).setCoder(VoidCoder.of()) .apply(Combine.globally(new SerializableFunction<Iterable<Void>, Void>() { @Override public Void apply(Iterable<Void> input) { return (Void) null; } }).asSingletonView()); PCollection<String> output = p .apply(Create.of("")) .apply(ParDo.withSideInputs(view).of( new DoFn<String, String>() { @Override public void processElement(ProcessContext c) { c.output(c.element() + c.sideInput(view)); } })); DataflowAssert.that(output).containsInAnyOrder("null"); p.run(); } }
apache-2.0
WilliamRen/bbossgroups-3.5
bboss-util/src-asm/bboss/org/objectweb/asm/tree/FieldNode.java
11534
/*** * ASM: a very small and fast Java bytecode manipulation framework * Copyright (c) 2000-2011 INRIA, France Telecom * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF * THE POSSIBILITY OF SUCH DAMAGE. */ package bboss.org.objectweb.asm.tree; import java.util.ArrayList; import java.util.List; import bboss.org.objectweb.asm.AnnotationVisitor; import bboss.org.objectweb.asm.Attribute; import bboss.org.objectweb.asm.ClassVisitor; import bboss.org.objectweb.asm.FieldVisitor; import bboss.org.objectweb.asm.Opcodes; import bboss.org.objectweb.asm.TypePath; /** * A node that represents a field. * * @author Eric Bruneton */ public class FieldNode extends FieldVisitor { /** * The field's access flags (see {@link bboss.org.objectweb.asm.Opcodes}). This * field also indicates if the field is synthetic and/or deprecated. */ public int access; /** * The field's name. */ public String name; /** * The field's descriptor (see {@link bboss.org.objectweb.asm.Type}). */ public String desc; /** * The field's signature. May be <tt>null</tt>. */ public String signature; /** * The field's initial value. This field, which may be <tt>null</tt> if the * field does not have an initial value, must be an {@link Integer}, a * {@link Float}, a {@link Long}, a {@link Double} or a {@link String}. */ public Object value; /** * The runtime visible annotations of this field. This list is a list of * {@link AnnotationNode} objects. May be <tt>null</tt>. * * @associates bboss.org.objectweb.asm.tree.AnnotationNode * @label visible */ public List<AnnotationNode> visibleAnnotations; /** * The runtime invisible annotations of this field. This list is a list of * {@link AnnotationNode} objects. May be <tt>null</tt>. * * @associates bboss.org.objectweb.asm.tree.AnnotationNode * @label invisible */ public List<AnnotationNode> invisibleAnnotations; /** * The runtime visible type annotations of this field. This list is a list * of {@link TypeAnnotationNode} objects. May be <tt>null</tt>. * * @associates bboss.org.objectweb.asm.tree.TypeAnnotationNode * @label visible */ public List<TypeAnnotationNode> visibleTypeAnnotations; /** * The runtime invisible type annotations of this field. This list is a list * of {@link TypeAnnotationNode} objects. May be <tt>null</tt>. * * @associates bboss.org.objectweb.asm.tree.TypeAnnotationNode * @label invisible */ public List<TypeAnnotationNode> invisibleTypeAnnotations; /** * The non standard attributes of this field. This list is a list of * {@link Attribute} objects. May be <tt>null</tt>. * * @associates bboss.org.objectweb.asm.Attribute */ public List<Attribute> attrs; /** * Constructs a new {@link FieldNode}. <i>Subclasses must not use this * constructor</i>. Instead, they must use the * {@link #FieldNode(int, int, String, String, String, Object)} version. * * @param access * the field's access flags (see * {@link bboss.org.objectweb.asm.Opcodes}). This parameter also * indicates if the field is synthetic and/or deprecated. * @param name * the field's name. * @param desc * the field's descriptor (see {@link bboss.org.objectweb.asm.Type * Type}). * @param signature * the field's signature. * @param value * the field's initial value. This parameter, which may be * <tt>null</tt> if the field does not have an initial value, * must be an {@link Integer}, a {@link Float}, a {@link Long}, a * {@link Double} or a {@link String}. * @throws IllegalStateException * If a subclass calls this constructor. */ public FieldNode(final int access, final String name, final String desc, final String signature, final Object value) { this(Opcodes.ASM5, access, name, desc, signature, value); if (getClass() != FieldNode.class) { throw new IllegalStateException(); } } /** * Constructs a new {@link FieldNode}. <i>Subclasses must not use this * constructor</i>. * * @param api * the ASM API version implemented by this visitor. Must be one * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. * @param access * the field's access flags (see * {@link bboss.org.objectweb.asm.Opcodes}). This parameter also * indicates if the field is synthetic and/or deprecated. * @param name * the field's name. * @param desc * the field's descriptor (see {@link bboss.org.objectweb.asm.Type * Type}). * @param signature * the field's signature. * @param value * the field's initial value. This parameter, which may be * <tt>null</tt> if the field does not have an initial value, * must be an {@link Integer}, a {@link Float}, a {@link Long}, a * {@link Double} or a {@link String}. */ public FieldNode(final int api, final int access, final String name, final String desc, final String signature, final Object value) { super(api); this.access = access; this.name = name; this.desc = desc; this.signature = signature; this.value = value; } // ------------------------------------------------------------------------ // Implementation of the FieldVisitor abstract class // ------------------------------------------------------------------------ @Override public AnnotationVisitor visitAnnotation(final String desc, final boolean visible) { AnnotationNode an = new AnnotationNode(desc); if (visible) { if (visibleAnnotations == null) { visibleAnnotations = new ArrayList<AnnotationNode>(1); } visibleAnnotations.add(an); } else { if (invisibleAnnotations == null) { invisibleAnnotations = new ArrayList<AnnotationNode>(1); } invisibleAnnotations.add(an); } return an; } @Override public AnnotationVisitor visitTypeAnnotation(int typeRef, TypePath typePath, String desc, boolean visible) { TypeAnnotationNode an = new TypeAnnotationNode(typeRef, typePath, desc); if (visible) { if (visibleTypeAnnotations == null) { visibleTypeAnnotations = new ArrayList<TypeAnnotationNode>(1); } visibleTypeAnnotations.add(an); } else { if (invisibleTypeAnnotations == null) { invisibleTypeAnnotations = new ArrayList<TypeAnnotationNode>(1); } invisibleTypeAnnotations.add(an); } return an; } @Override public void visitAttribute(final Attribute attr) { if (attrs == null) { attrs = new ArrayList<Attribute>(1); } attrs.add(attr); } @Override public void visitEnd() { } // ------------------------------------------------------------------------ // Accept methods // ------------------------------------------------------------------------ /** * Checks that this field node is compatible with the given ASM API version. * This methods checks that this node, and all its nodes recursively, do not * contain elements that were introduced in more recent versions of the ASM * API than the given version. * * @param api * an ASM API version. Must be one of {@link Opcodes#ASM4} or * {@link Opcodes#ASM5}. */ public void check(final int api) { if (api == Opcodes.ASM4) { if (visibleTypeAnnotations != null && visibleTypeAnnotations.size() > 0) { throw new RuntimeException(); } if (invisibleTypeAnnotations != null && invisibleTypeAnnotations.size() > 0) { throw new RuntimeException(); } } } /** * Makes the given class visitor visit this field. * * @param cv * a class visitor. */ public void accept(final ClassVisitor cv) { FieldVisitor fv = cv.visitField(access, name, desc, signature, value); if (fv == null) { return; } int i, n; n = visibleAnnotations == null ? 0 : visibleAnnotations.size(); for (i = 0; i < n; ++i) { AnnotationNode an = visibleAnnotations.get(i); an.accept(fv.visitAnnotation(an.desc, true)); } n = invisibleAnnotations == null ? 0 : invisibleAnnotations.size(); for (i = 0; i < n; ++i) { AnnotationNode an = invisibleAnnotations.get(i); an.accept(fv.visitAnnotation(an.desc, false)); } n = visibleTypeAnnotations == null ? 0 : visibleTypeAnnotations.size(); for (i = 0; i < n; ++i) { TypeAnnotationNode an = visibleTypeAnnotations.get(i); an.accept(fv.visitTypeAnnotation(an.typeRef, an.typePath, an.desc, true)); } n = invisibleTypeAnnotations == null ? 0 : invisibleTypeAnnotations .size(); for (i = 0; i < n; ++i) { TypeAnnotationNode an = invisibleTypeAnnotations.get(i); an.accept(fv.visitTypeAnnotation(an.typeRef, an.typePath, an.desc, false)); } n = attrs == null ? 0 : attrs.size(); for (i = 0; i < n; ++i) { fv.visitAttribute(attrs.get(i)); } fv.visitEnd(); } }
apache-2.0
julianhyde/phoenix
phoenix-core/src/main/java/org/apache/phoenix/execute/DelegateQueryPlan.java
2847
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.execute; import java.sql.ParameterMetaData; import java.util.List; import org.apache.hadoop.hbase.client.Scan; import org.apache.phoenix.compile.GroupByCompiler.GroupBy; import org.apache.phoenix.compile.OrderByCompiler.OrderBy; import org.apache.phoenix.compile.QueryPlan; import org.apache.phoenix.compile.RowProjector; import org.apache.phoenix.compile.StatementContext; import org.apache.phoenix.parse.FilterableStatement; import org.apache.phoenix.query.KeyRange; import org.apache.phoenix.schema.TableRef; public abstract class DelegateQueryPlan implements QueryPlan { protected final QueryPlan delegate; public DelegateQueryPlan(QueryPlan delegate) { this.delegate = delegate; } @Override public StatementContext getContext() { return delegate.getContext(); } @Override public ParameterMetaData getParameterMetaData() { return delegate.getParameterMetaData(); } @Override public long getEstimatedSize() { return delegate.getEstimatedSize(); } @Override public TableRef getTableRef() { return delegate.getTableRef(); } @Override public RowProjector getProjector() { return delegate.getProjector(); } @Override public Integer getLimit() { return delegate.getLimit(); } @Override public OrderBy getOrderBy() { return delegate.getOrderBy(); } @Override public GroupBy getGroupBy() { return delegate.getGroupBy(); } @Override public List<KeyRange> getSplits() { return delegate.getSplits(); } @Override public List<List<Scan>> getScans() { return delegate.getScans(); } @Override public FilterableStatement getStatement() { return delegate.getStatement(); } @Override public boolean isDegenerate() { return delegate.isDegenerate(); } @Override public boolean isRowKeyOrdered() { return delegate.isRowKeyOrdered(); } }
apache-2.0
miniway/presto
presto-client/src/main/java/io/prestosql/client/QueryResults.java
5636
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.client; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableList; import javax.annotation.Nullable; import javax.annotation.concurrent.Immutable; import java.net.URI; import java.util.List; import static com.google.common.base.MoreObjects.firstNonNull; import static com.google.common.base.MoreObjects.toStringHelper; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.collect.Iterables.unmodifiableIterable; import static io.prestosql.client.FixJsonDataUtils.fixData; import static java.util.Objects.requireNonNull; @Immutable public class QueryResults implements QueryStatusInfo, QueryData { private final String id; private final URI infoUri; private final URI partialCancelUri; private final URI nextUri; private final List<Column> columns; private final Iterable<List<Object>> data; private final StatementStats stats; private final QueryError error; private final List<Warning> warnings; private final String updateType; private final Long updateCount; @JsonCreator public QueryResults( @JsonProperty("id") String id, @JsonProperty("infoUri") URI infoUri, @JsonProperty("partialCancelUri") URI partialCancelUri, @JsonProperty("nextUri") URI nextUri, @JsonProperty("columns") List<Column> columns, @JsonProperty("data") List<List<Object>> data, @JsonProperty("stats") StatementStats stats, @JsonProperty("error") QueryError error, @JsonProperty("warnings") List<Warning> warnings, @JsonProperty("updateType") String updateType, @JsonProperty("updateCount") Long updateCount) { this( id, infoUri, partialCancelUri, nextUri, columns, fixData(columns, data), stats, error, firstNonNull(warnings, ImmutableList.of()), updateType, updateCount); } public QueryResults( String id, URI infoUri, URI partialCancelUri, URI nextUri, List<Column> columns, Iterable<List<Object>> data, StatementStats stats, QueryError error, List<Warning> warnings, String updateType, Long updateCount) { this.id = requireNonNull(id, "id is null"); this.infoUri = requireNonNull(infoUri, "infoUri is null"); this.partialCancelUri = partialCancelUri; this.nextUri = nextUri; this.columns = (columns != null) ? ImmutableList.copyOf(columns) : null; this.data = (data != null) ? unmodifiableIterable(data) : null; checkArgument(data == null || columns != null, "data present without columns"); this.stats = requireNonNull(stats, "stats is null"); this.error = error; this.warnings = ImmutableList.copyOf(requireNonNull(warnings, "warnings is null")); this.updateType = updateType; this.updateCount = updateCount; } @JsonProperty @Override public String getId() { return id; } @JsonProperty @Override public URI getInfoUri() { return infoUri; } @Nullable @JsonProperty @Override public URI getPartialCancelUri() { return partialCancelUri; } @Nullable @JsonProperty @Override public URI getNextUri() { return nextUri; } @Nullable @JsonProperty @Override public List<Column> getColumns() { return columns; } @Nullable @JsonProperty @Override public Iterable<List<Object>> getData() { return data; } @JsonProperty @Override public StatementStats getStats() { return stats; } @Nullable @JsonProperty @Override public QueryError getError() { return error; } @JsonProperty @Override public List<Warning> getWarnings() { return warnings; } @Nullable @JsonProperty @Override public String getUpdateType() { return updateType; } @Nullable @JsonProperty @Override public Long getUpdateCount() { return updateCount; } @Override public String toString() { return toStringHelper(this) .add("id", id) .add("infoUri", infoUri) .add("partialCancelUri", partialCancelUri) .add("nextUri", nextUri) .add("columns", columns) .add("hasData", data != null) .add("stats", stats) .add("error", error) .add("updateType", updateType) .add("updateCount", updateCount) .toString(); } }
apache-2.0