repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15
values |
|---|---|---|---|---|
fabiofumarola/elasticsearch | src/test/java/org/elasticsearch/test/rest/ElasticsearchRestTests.java | 10501 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test.rest;
import com.carrotsearch.randomizedtesting.annotations.Name;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import com.google.common.collect.Lists;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope;
import org.elasticsearch.test.rest.client.RestException;
import org.elasticsearch.test.rest.parser.RestTestParseException;
import org.elasticsearch.test.rest.parser.RestTestSuiteParser;
import org.elasticsearch.test.rest.section.*;
import org.elasticsearch.test.rest.spec.RestSpec;
import org.elasticsearch.test.rest.support.FileUtils;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.nio.file.FileSystems;
import java.nio.file.PathMatcher;
import java.nio.file.Paths;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Runs the clients test suite against an elasticsearch cluster.
*/
//tests distribution disabled for now since it causes reporting problems,
// due to the non unique suite name
//@ReplicateOnEachVm
@ClusterScope(randomDynamicTemplates = false)
public class ElasticsearchRestTests extends ElasticsearchIntegrationTest {
/**
* Property that allows to control whether the REST tests need to be run (default) or not (false)
*/
public static final String REST_TESTS = "tests.rest";
/**
* Property that allows to control which REST tests get run. Supports comma separated list of tests
* or directories that contain tests e.g. -Dtests.rest.suite=index,get,create/10_with_id
*/
public static final String REST_TESTS_SUITE = "tests.rest.suite";
/**
* Property that allows to blacklist some of the REST tests based on a comma separated list of globs
* e.g. -Dtests.rest.blacklist=get/10_basic/*
*/
public static final String REST_TESTS_BLACKLIST = "tests.rest.blacklist";
/**
* Property that allows to control where the REST spec files need to be loaded from
*/
public static final String REST_TESTS_SPEC = "tests.rest.spec";
private static final String DEFAULT_TESTS_PATH = "/rest-api-spec/test";
private static final String DEFAULT_SPEC_PATH = "/rest-api-spec/api";
private static final String PATHS_SEPARATOR = ",";
private final PathMatcher[] blacklistPathMatchers;
private static RestTestExecutionContext restTestExecutionContext;
//private static final int JVM_COUNT = systemPropertyAsInt(SysGlobals.CHILDVM_SYSPROP_JVM_COUNT, 1);
//private static final int CURRENT_JVM_ID = systemPropertyAsInt(SysGlobals.CHILDVM_SYSPROP_JVM_ID, 0);
private final RestTestCandidate testCandidate;
public ElasticsearchRestTests(@Name("yaml") RestTestCandidate testCandidate) {
this.testCandidate = testCandidate;
String[] blacklist = resolvePathsProperty(REST_TESTS_BLACKLIST, null);
if (blacklist != null) {
blacklistPathMatchers = new PathMatcher[blacklist.length];
int i = 0;
for (String glob : blacklist) {
blacklistPathMatchers[i++] = FileSystems.getDefault().getPathMatcher("glob:" + glob);
}
} else {
blacklistPathMatchers = new PathMatcher[0];
}
}
@ParametersFactory
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
List<RestTestCandidate> restTestCandidates = collectTestCandidates();
List<Object[]> objects = Lists.newArrayList();
for (RestTestCandidate restTestCandidate : restTestCandidates) {
objects.add(new Object[]{restTestCandidate});
}
return objects;
}
private static List<RestTestCandidate> collectTestCandidates() throws RestTestParseException, IOException {
String[] paths = resolvePathsProperty(REST_TESTS_SUITE, DEFAULT_TESTS_PATH);
Map<String, Set<File>> yamlSuites = FileUtils.findYamlSuites(DEFAULT_TESTS_PATH, paths);
//yaml suites are grouped by directory (effectively by api)
List<String> apis = Lists.newArrayList(yamlSuites.keySet());
List<RestTestCandidate> testCandidates = Lists.newArrayList();
RestTestSuiteParser restTestSuiteParser = new RestTestSuiteParser();
for (String api : apis) {
List<File> yamlFiles = Lists.newArrayList(yamlSuites.get(api));
for (File yamlFile : yamlFiles) {
//tests distribution disabled for now since it causes reporting problems,
// due to the non unique suite name
//if (mustExecute(yamlFile.getAbsolutePath())) {
RestTestSuite restTestSuite = restTestSuiteParser.parse(api, yamlFile);
for (TestSection testSection : restTestSuite.getTestSections()) {
testCandidates.add(new RestTestCandidate(restTestSuite, testSection));
}
//}
}
}
return testCandidates;
}
/*private static boolean mustExecute(String test) {
//we distribute the tests across the forked jvms if > 1
if (JVM_COUNT > 1) {
int jvmId = MathUtils.mod(DjbHashFunction.DJB_HASH(test), JVM_COUNT);
if (jvmId != CURRENT_JVM_ID) {
return false;
}
}
return true;
}*/
private static String[] resolvePathsProperty(String propertyName, String defaultValue) {
String property = System.getProperty(propertyName);
if (!Strings.hasLength(property)) {
return defaultValue == null ? null : new String[]{defaultValue};
} else {
return property.split(PATHS_SEPARATOR);
}
}
@BeforeClass
public static void initExecutionContext() throws IOException, RestException {
//skip REST tests if disabled through -Dtests.rest=false
assumeTrue(systemPropertyAsBoolean(REST_TESTS, true));
String[] specPaths = resolvePathsProperty(REST_TESTS_SPEC, DEFAULT_SPEC_PATH);
RestSpec restSpec = RestSpec.parseFrom(DEFAULT_SPEC_PATH, specPaths);
assert restTestExecutionContext == null;
restTestExecutionContext = new RestTestExecutionContext(restSpec);
}
@AfterClass
public static void close() {
restTestExecutionContext.close();
restTestExecutionContext = null;
}
@Before
public void reset() throws IOException, RestException {
//skip test if it matches one of the blacklist globs
for (PathMatcher blacklistedPathMatcher : blacklistPathMatchers) {
//we need to replace a few characters otherwise the test section name can't be parsed as a path on windows
String testSection = testCandidate.getTestSection().getName().replace("*", "").replace("\\", "/").replaceAll("\\s+/", "/").trim();
String testPath = testCandidate.getSuitePath() + "/" + testSection;
assumeFalse("[" + testCandidate.getTestPath() + "] skipped, reason: blacklisted", blacklistedPathMatcher.matches(Paths.get(testPath)));
}
restTestExecutionContext.resetClient(immutableCluster().httpAddresses());
restTestExecutionContext.clear();
//skip test if the whole suite (yaml file) is disabled
assumeFalse(buildSkipMessage(testCandidate.getSuitePath(), testCandidate.getSetupSection().getSkipSection()),
testCandidate.getSetupSection().getSkipSection().skip(restTestExecutionContext.esVersion()));
//skip test if test section is disabled
assumeFalse(buildSkipMessage(testCandidate.getTestPath(), testCandidate.getTestSection().getSkipSection()),
testCandidate.getTestSection().getSkipSection().skip(restTestExecutionContext.esVersion()));
}
private static String buildSkipMessage(String description, SkipSection skipSection) {
StringBuilder messageBuilder = new StringBuilder();
if (skipSection.isVersionCheck()) {
messageBuilder.append("[").append(description).append("] skipped, reason: [").append(skipSection.getReason()).append("] ");
} else {
messageBuilder.append("[").append(description).append("] skipped, reason: features ").append(skipSection.getFeatures()).append(" not supported");
}
return messageBuilder.toString();
}
@Override
protected boolean randomizeNumberOfShardsAndReplicas() {
return COMPATIBILITY_VERSION.onOrAfter(Version.V_1_2_0);
}
@Test
public void test() throws IOException {
//let's check that there is something to run, otherwise there might be a problem with the test section
if (testCandidate.getTestSection().getExecutableSections().size() == 0) {
throw new IllegalArgumentException("No executable sections loaded for [" + testCandidate.getTestPath() + "]");
}
if (!testCandidate.getSetupSection().isEmpty()) {
logger.info("start setup test [{}]", testCandidate.getTestPath());
for (DoSection doSection : testCandidate.getSetupSection().getDoSections()) {
doSection.execute(restTestExecutionContext);
}
logger.info("end setup test [{}]", testCandidate.getTestPath());
}
restTestExecutionContext.clear();
for (ExecutableSection executableSection : testCandidate.getTestSection().getExecutableSections()) {
executableSection.execute(restTestExecutionContext);
}
}
}
| apache-2.0 |
ua-eas/ksd-kc5.2.1-rice2.3.6-ua | rice-middleware/impl/src/main/java/org/kuali/rice/kew/routeheader/service/RouteHeaderService.java | 4613 | /**
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.kew.routeheader.service;
import java.math.BigDecimal;
import java.sql.Timestamp;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.kuali.rice.kew.api.action.ActionItem;
import org.kuali.rice.kew.docsearch.SearchableAttributeValue;
import org.kuali.rice.kew.doctype.bo.DocumentType;
import org.kuali.rice.kew.routeheader.DocumentRouteHeaderValue;
import org.kuali.rice.kew.routeheader.DocumentRouteHeaderValueContent;
/**
* A service providing data access for documents (a.k.a route headers).
*
* @see DocumentRouteHeaderValue
*
* @author Kuali Rice Team (rice.collab@kuali.org)
*/
public interface RouteHeaderService {
public DocumentRouteHeaderValue getRouteHeader(String documentId);
public DocumentRouteHeaderValue getRouteHeader(String documentId, boolean clearCache);
public Collection<DocumentRouteHeaderValue> getRouteHeaders (Collection<String> documentIds);
public Collection<DocumentRouteHeaderValue> getRouteHeaders (Collection<String> documentIds, boolean clearCache);
public Map<String,DocumentRouteHeaderValue> getRouteHeadersForActionItems(Collection<ActionItem> actionItems);
public void lockRouteHeader(String documentId, boolean wait);
public void saveRouteHeader(DocumentRouteHeaderValue routeHeader);
public void deleteRouteHeader(DocumentRouteHeaderValue routeHeader);
public String getNextDocumentId();
public void validateRouteHeader(DocumentRouteHeaderValue routeHeader);
public Collection findPendingByResponsibilityIds(Set responsibilityIds);
public Collection findByDocTypeAndAppId(String documentTypeName, String appId);
/**
* Removes all SearchableAttributeValues associated with the RouteHeader.
* @param routeHeader
*/
public void clearRouteHeaderSearchValues(String documentId);
/**
* Updates the searchable attribute values for the document with the given id to the given values.
* This method will clear existing search attribute values and replace with the ones given.
*/
public void updateRouteHeaderSearchValues(String documentId, List<SearchableAttributeValue> searchAttributes);
/**
* Returns the application id of the {@link DocumentType} for the Document with the given ID.
*/
public String getApplicationIdByDocumentId(String documentId);
public DocumentRouteHeaderValueContent getContent(String documentId);
public boolean hasSearchableAttributeValue(String documentId, String searchableAttributeKey, String searchableAttributeValue);
public String getDocumentStatus(String documentId);
public String getAppDocId(String documentId);
/**
*
* This method Returns the application document status for the given document id
*
* @param documentId
* @return String
*/
public String getAppDocStatus(String documentId);
/**
*
* This method is a more direct way to get the searchable attribute values
*
* @param documentId
* @param key
* @return
*/
public List<String> getSearchableAttributeStringValuesByKey(String documentId, String key);
/**
*
* This method is a more direct way to get the searchable attribute values
*
* @param documentId
* @param key
* @return
*/
public List<Timestamp> getSearchableAttributeDateTimeValuesByKey(String documentId, String key);
/**
*
* This method is a more direct way to get the searchable attribute values
*
* @param documentId
* @param key
* @return
*/
public List<BigDecimal> getSearchableAttributeFloatValuesByKey(String documentId, String key);
/**
*
* This method is a more direct way to get the searchable attribute values
*
* @param documentId
* @param key
* @return
*/
public List<Long> getSearchableAttributeLongValuesByKey(String documentId, String key);
}
| apache-2.0 |
nvoron23/socialite | src/socialite/parser/BinOp.java | 2838 | package socialite.parser;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import org.stringtemplate.v4.ST;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import socialite.codegen.CodeGen;
import socialite.util.InternalException;
public class BinOp extends Op {
private static final long serialVersionUID = 1L;
public String op;
public Object arg1, arg2;
public BinOp() { }
public BinOp(String _op, Object _arg1, Object _arg2) throws InternalException {
if (_op.equals("mod")) _op = "%";
op = _op;
arg1 = _arg1;
arg2 = _arg2;
assert !(arg1 instanceof Function);
assert !(arg2 instanceof Function);
}
public Class opType() {
return type(arg1);
}
public ST codegen() {
ST expr=CodeGen.expr();
String binExpr="(";
binExpr += codeStr(arg1);
binExpr += op;
binExpr += codeStr(arg2);
binExpr += ")";
expr.add("expr", binExpr);
return expr;
}
public String sig() {
return descr(true);
}
public String toString() {
return descr(false);
}
String descr(boolean sig) {
String arg1Str=descr(sig, arg1);
String arg2Str=descr(sig, arg2);
return "(" + arg1Str + op + arg2Str + ")";
}
public void getTypes(Collection<Class> types) {
if (arg1 instanceof Op) ((Op)arg1).getTypes(types);
else types.add(MyType.javaType(arg1));
if (arg2 instanceof Op) ((Op)arg2).getTypes(types);
else types.add(MyType.javaType(arg2));
}
public SortedSet<Const> getConsts() {
SortedSet<Const> consts = new TreeSet<Const>();
getConsts(consts, arg1);
getConsts(consts, arg2);
return consts;
}
public Set<Variable> getVars() {
Set<Variable> vars=new HashSet<Variable>();
getVariables(vars, arg1);
getVariables(vars, arg2);
return vars;
}
public void visit(OpVisitor v) {
v.visit(this);
if (arg1 instanceof Op)
v.visit((Op)arg1);
if (arg2 instanceof Op)
v.visit((Op)arg2);
}
@Override
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
char[] _op = new char[in.readInt()];
for (int i=0; i<_op.length; i++)
_op[i] = in.readChar();
op = new String(_op);
arg1=in.readObject();
arg2=in.readObject();
}
@Override
public void writeExternal(ObjectOutput out) throws IOException {
out.writeInt(op.length());
out.writeChars(op);
out.writeObject(arg1);
out.writeObject(arg2);
}
/*
public Set<Variable> getReadVariables() {
return getVariables(arg1, arg2);
}
public Set<Variable> getAllVariables() {
return getVariables(arg1, arg2);
}
public Set<Object> getAllVarsAndConsts() {
return getVarsAndConsts(arg1, arg2);
}
@Override
public Set<Object> getReadVarsAndConsts() {
return getVarsAndConsts(arg1, arg2);
}*/
}
| apache-2.0 |
robsoncardosoti/flowable-engine | modules/flowable-engine/src/main/java/org/flowable/engine/impl/persistence/entity/SuspendedJobEntity.java | 835 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.engine.impl.persistence.entity;
/**
* Suspended jobs are stored separately to increase the normal job query performance
*
* @author Tijs Rademakers
* @author Joram Barrez
*/
public interface SuspendedJobEntity extends AbstractRuntimeJobEntity {
}
| apache-2.0 |
Hanmourang/Gobblin | gobblin-utility/src/main/java/gobblin/util/ParallelRunner.java | 8328 | /*
* Copyright (C) 2014-2015 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the
* License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed
* under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied.
*/
package gobblin.util;
import java.io.Closeable;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.locks.Lock;
import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Writable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Optional;
import com.google.common.collect.Lists;
import com.google.common.io.Closer;
import com.google.common.util.concurrent.Striped;
import gobblin.configuration.State;
/**
* A class that is responsible for running certain methods in parallel. Methods in this class returns immediately and
* are run in a fixed-size thread pool.
*
* <p>
* This class is intended to be used in the following pattern. This example uses the serialize() method.
*
* <pre> {@code
* Closer closer = Closer.create();
* try {
* // Do stuff
* ParallelRunner runner = closer.register(new ParallelRunner(threads, fs));
* runner.serialize(state1, outputFilePath1);
* // Submit more serialization tasks
* runner.serialize(stateN, outputFilePathN);
* // Do stuff
* } catch (Throwable e) {
* throw closer.rethrow(e);
* } finally {
* closer.close();
* }}
* </pre>
*
* Note that calling {@link #close()} will wait for all submitted tasks to complete and then stop the
* {@link ParallelRunner} by shutting down the {@link ExecutorService}.
* </p>
*
* @author ynli
*/
public class ParallelRunner implements Closeable {
private static final Logger LOGGER = LoggerFactory.getLogger(ParallelRunner.class);
public static final String PARALLEL_RUNNER_THREADS_KEY = "parallel.runner.threads";
public static final int DEFAULT_PARALLEL_RUNNER_THREADS = 10;
private final ExecutorService executor;
private final FileSystem fs;
private final List<Future<?>> futures = Lists.newArrayList();
private final Striped<Lock> locks = Striped.lazyWeakLock(Integer.MAX_VALUE);
public ParallelRunner(int threads, FileSystem fs) {
this.executor = Executors.newFixedThreadPool(threads,
ExecutorsUtils.newThreadFactory(Optional.of(LOGGER), Optional.of("ParallelRunner")));
this.fs = fs;
}
/**
* Serialize a {@link State} object into a file.
*
* <p>
* This method submits a task to serialize the {@link State} object and returns immediately
* after the task is submitted.
* </p>
*
* @param state the {@link State} object to be serialized
* @param outputFilePath the file to write the serialized {@link State} object to
* @param <T> the {@link State} object type
*/
public <T extends State> void serializeToFile(final T state, final Path outputFilePath) {
// Use a Callable with a Void return type to allow exceptions to be thrown
this.futures.add(this.executor.submit(new Callable<Void>() {
@Override
public Void call() throws Exception {
SerializationUtils.serializeState(fs, outputFilePath, state);
return null;
}
}));
}
/**
* Deserialize a {@link State} object from a file.
*
* <p>
* This method submits a task to deserialize the {@link State} object and returns immediately
* after the task is submitted.
* </p>
*
* @param state an empty {@link State} object to which the deserialized content will be populated
* @param inputFilePath the input file to read from
* @param <T> the {@link State} object type
*/
public <T extends State> void deserializeFromFile(final T state, final Path inputFilePath) {
this.futures.add(this.executor.submit(new Callable<Void>() {
@Override
public Void call() throws Exception {
SerializationUtils.deserializeState(fs, inputFilePath, state);
return null;
}
}));
}
/**
* Deserialize a list of {@link State} objects from a Hadoop {@link SequenceFile}.
*
* <p>
* This method submits a task to deserialize the {@link State} objects and returns immediately
* after the task is submitted.
* </p>
*
* @param stateClass the {@link Class} object of the {@link State} class
* @param inputFilePath the input {@link SequenceFile} to read from
* @param states a {@link Collection} object to store the deserialized {@link State} objects
* @param <T> the {@link State} object type
*/
public <T extends State> void deserializeFromSequenceFile(final Class<? extends Writable> keyClass,
final Class<T> stateClass, final Path inputFilePath, final Collection<T> states) {
this.futures.add(this.executor.submit(new Callable<Void>() {
@Override
public Void call() throws Exception {
Closer closer = Closer.create();
try {
@SuppressWarnings("deprecation")
SequenceFile.Reader reader = closer.register(new SequenceFile.Reader(fs, inputFilePath, fs.getConf()));
Writable key = keyClass.newInstance();
T state = stateClass.newInstance();
while (reader.next(key, state)) {
states.add(state);
state = stateClass.newInstance();
}
} catch (Throwable t) {
throw closer.rethrow(t);
} finally {
closer.close();
}
return null;
}
}));
}
/**
* Delete a {@link Path}.
*
* <p>
* This method submits a task to delete a {@link Path} and returns immediately
* after the task is submitted.
* </p>
*
* @param path path to be deleted.
*/
public void deletePath(final Path path, final boolean recursive) {
this.futures.add(this.executor.submit(new Callable<Void>() {
@Override
public Void call() throws Exception {
Lock lock = locks.get(path.toString());
lock.lock();
try {
HadoopUtils.deletePath(fs, path, recursive);
return null;
} finally {
lock.unlock();
}
}
}));
}
/**
* Rename a {@link Path}.
*
* <p>
* This method submits a task to rename a {@link Path} and returns immediately
* after the task is submitted.
* </p>
*
* @param src path to be renamed
* @param dst new path after rename
* @param group an optional group name for the destination path
*/
public void renamePath(final Path src, final Path dst, final Optional<String> group) {
this.futures.add(this.executor.submit(new Callable<Void>() {
@Override
public Void call() throws Exception {
Lock lock = locks.get(src.toString());
lock.lock();
try {
if (fs.exists(src)) {
HadoopUtils.renamePath(fs, src, dst);
if (group.isPresent()) {
HadoopUtils.setGroup(fs, dst, group.get());
}
}
return null;
} catch (FileAlreadyExistsException e) {
LOGGER.warn(String.format("Failed to rename %s to %s: dst already exists", src, dst), e);
return null;
} finally {
lock.unlock();
}
}
}));
}
@Override
public void close() throws IOException {
try {
// Wait for all submitted tasks to complete
for (Future<?> future : this.futures) {
future.get();
}
} catch (InterruptedException ie) {
throw new IOException(ie);
} catch (ExecutionException ee) {
throw new IOException(ee);
} finally {
ExecutorsUtils.shutdownExecutorService(this.executor, Optional.of(LOGGER));
}
}
}
| apache-2.0 |
objectiser/camel | core/camel-core/src/test/java/org/apache/camel/component/file/FileConsumerBridgeRouteExceptionHandlerTest.java | 5383 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.file;
import java.io.File;
import java.io.IOException;
import org.apache.camel.ContextTestSupport;
import org.apache.camel.Exchange;
import org.apache.camel.LoggingLevel;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.impl.JndiRegistry;
import org.junit.Test;
/**
*
*/
public class FileConsumerBridgeRouteExceptionHandlerTest extends ContextTestSupport {
private MyReadLockStrategy myReadLockStrategy = new MyReadLockStrategy();
@Test
public void testCustomExceptionHandler() throws Exception {
getMockEndpoint("mock:result").expectedMessageCount(2);
getMockEndpoint("mock:error").expectedBodiesReceived("Error Forced to simulate no space on device");
template.sendBodyAndHeader("file:target/data/nospace", "Hello World", Exchange.FILE_NAME, "hello.txt");
template.sendBodyAndHeader("file:target/data/nospace", "Bye World", Exchange.FILE_NAME, "bye.txt");
assertMockEndpointsSatisfied();
assertEquals("Should pickup bye.txt file 2 times", 2, myReadLockStrategy.getCounter());
}
@Override
protected JndiRegistry createRegistry() throws Exception {
JndiRegistry jndi = super.createRegistry();
jndi.bind("myReadLockStrategy", myReadLockStrategy);
return jndi;
}
// START SNIPPET: e2
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
// to handle any IOException being thrown
onException(IOException.class).handled(true).log("IOException occurred due: ${exception.message}").transform().simple("Error ${exception.message}")
.to("mock:error");
// this is the file route that pickup files, notice how we
// bridge the consumer to use the Camel routing error handler
// the exclusiveReadLockStrategy is only configured because this
// is from an unit test, so we use that to simulate exceptions
from("file:target/data/nospace?exclusiveReadLockStrategy=#myReadLockStrategy&bridgeErrorHandler=true&initialDelay=0&delay=10").convertBodyTo(String.class)
.to("mock:result");
}
};
}
// END SNIPPET: e2
// used for simulating exception during acquiring a lock on the file
private static class MyReadLockStrategy implements GenericFileExclusiveReadLockStrategy<File> {
private int counter;
@Override
public void prepareOnStartup(GenericFileOperations<File> operations, GenericFileEndpoint<File> endpoint) throws Exception {
// noop
}
@Override
public boolean acquireExclusiveReadLock(GenericFileOperations<File> operations, GenericFile<File> file, Exchange exchange) throws Exception {
if (file.getFileNameOnly().equals("bye.txt")) {
if (counter++ == 0) {
// force an exception on acquire attempt for the bye.txt
// file, on the first attempt
throw new IOException("Forced to simulate no space on device");
}
}
return true;
}
@Override
public void releaseExclusiveReadLockOnAbort(GenericFileOperations<File> operations, GenericFile<File> file, Exchange exchange) throws Exception {
// noop
}
@Override
public void releaseExclusiveReadLockOnRollback(GenericFileOperations<File> operations, GenericFile<File> file, Exchange exchange) throws Exception {
// noop
}
@Override
public void releaseExclusiveReadLockOnCommit(GenericFileOperations<File> operations, GenericFile<File> file, Exchange exchange) throws Exception {
// noop
}
@Override
public void setTimeout(long timeout) {
// noop
}
@Override
public void setCheckInterval(long checkInterval) {
// noop
}
@Override
public void setReadLockLoggingLevel(LoggingLevel readLockLoggingLevel) {
// noop
}
@Override
public void setMarkerFiler(boolean markerFile) {
// noop
}
@Override
public void setDeleteOrphanLockFiles(boolean deleteOrphanLockFiles) {
// noop
}
public int getCounter() {
return counter;
}
}
}
| apache-2.0 |
alexcreasy/pnc | mapper/src/main/java/org/jboss/pnc/mapper/BuildBCRevisionFetcher.java | 3214 | /**
* JBoss, Home of Professional Open Source.
* Copyright 2014-2020 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.pnc.mapper;
import org.jboss.pnc.dto.Build;
import org.jboss.pnc.dto.BuildConfigurationRevisionRef;
import org.jboss.pnc.dto.Environment;
import org.jboss.pnc.dto.ProjectRef;
import org.jboss.pnc.dto.SCMRepository;
import org.jboss.pnc.mapper.api.BuildConfigurationRevisionMapper;
import org.jboss.pnc.mapper.api.EnvironmentMapper;
import org.jboss.pnc.mapper.api.ProjectMapper;
import org.jboss.pnc.mapper.api.SCMRepositoryMapper;
import org.jboss.pnc.model.BuildConfigurationAudited;
import org.jboss.pnc.model.BuildRecord;
import org.jboss.pnc.model.IdRev;
import org.jboss.pnc.spi.datastore.repositories.BuildConfigurationAuditedRepository;
import org.mapstruct.BeforeMapping;
import org.mapstruct.MappingTarget;
import javax.enterprise.context.ApplicationScoped;
import javax.inject.Inject;
/**
* Workaround for NCL-4889 and NCL-5257. This class will fetch the audited Build Config from DB if it is missing from
* the transient filed in BuildRecord entity and will map it to appropriate fields in the Build DTO.
*
* @author jbrazdil
*/
@ApplicationScoped
public class BuildBCRevisionFetcher {
@Inject
private BuildConfigurationRevisionMapper bcRevisionMapper;
@Inject
private ProjectMapper projectMapper;
@Inject
private EnvironmentMapper environmentMapper;
@Inject
private SCMRepositoryMapper scmRepositoryMapper;
@Inject
private BuildConfigurationAuditedRepository bcAuditedRepository;
@BeforeMapping
public void mockBrewAttributes(BuildRecord build, @MappingTarget Build.Builder dtoBuilder) {
Integer id = build.getBuildConfigurationId();
Integer revision = build.getBuildConfigurationRev();
// If somebody before us already set the BCA we don't need to query it from DB again
BuildConfigurationAudited bca = build.getBuildConfigurationAudited();
if (bca == null) {
bca = bcAuditedRepository.queryById(new IdRev(id, revision));
}
BuildConfigurationRevisionRef bcRevision = bcRevisionMapper.toRef(bca);
ProjectRef project = projectMapper.toRef(bca.getProject());
Environment environment = environmentMapper.toRef(bca.getBuildEnvironment());
SCMRepository scmRepository = scmRepositoryMapper.toRef(bca.getRepositoryConfiguration());
dtoBuilder.buildConfigRevision(bcRevision);
dtoBuilder.project(project);
dtoBuilder.environment(environment);
dtoBuilder.scmRepository(scmRepository);
}
}
| apache-2.0 |
MissionCriticalCloud/cosmic | cosmic-core/engine/schema/src/main/java/com/cloud/projects/dao/ProjectDaoImpl.java | 3954 | package com.cloud.projects.dao;
import com.cloud.projects.Project;
import com.cloud.projects.ProjectVO;
import com.cloud.server.ResourceTag.ResourceObjectType;
import com.cloud.tags.dao.ResourceTagDao;
import com.cloud.utils.db.DB;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.GenericSearchBuilder;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.SearchCriteria.Func;
import com.cloud.utils.db.TransactionLegacy;
import javax.inject.Inject;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
@Component
public class ProjectDaoImpl extends GenericDaoBase<ProjectVO, Long> implements ProjectDao {
private static final Logger s_logger = LoggerFactory.getLogger(ProjectDaoImpl.class);
protected final SearchBuilder<ProjectVO> AllFieldsSearch;
protected GenericSearchBuilder<ProjectVO, Long> CountByDomain;
protected GenericSearchBuilder<ProjectVO, Long> ProjectAccountSearch;
// ResourceTagsDaoImpl _tagsDao = ComponentLocator.inject(ResourceTagsDaoImpl.class);
@Inject
ResourceTagDao _tagsDao;
protected ProjectDaoImpl() {
AllFieldsSearch = createSearchBuilder();
AllFieldsSearch.and("name", AllFieldsSearch.entity().getName(), SearchCriteria.Op.EQ);
AllFieldsSearch.and("domainId", AllFieldsSearch.entity().getDomainId(), SearchCriteria.Op.EQ);
AllFieldsSearch.and("projectAccountId", AllFieldsSearch.entity().getProjectAccountId(), SearchCriteria.Op.EQ);
AllFieldsSearch.and("state", AllFieldsSearch.entity().getState(), SearchCriteria.Op.EQ);
AllFieldsSearch.done();
CountByDomain = createSearchBuilder(Long.class);
CountByDomain.select(null, Func.COUNT, null);
CountByDomain.and("domainId", CountByDomain.entity().getDomainId(), SearchCriteria.Op.EQ);
CountByDomain.done();
}
@Override
public ProjectVO findByNameAndDomain(final String name, final long domainId) {
final SearchCriteria<ProjectVO> sc = AllFieldsSearch.create();
sc.setParameters("name", name);
sc.setParameters("domainId", domainId);
return findOneBy(sc);
}
@Override
public Long countProjectsForDomain(final long domainId) {
final SearchCriteria<Long> sc = CountByDomain.create();
sc.setParameters("domainId", domainId);
return customSearch(sc, null).get(0);
}
@Override
public ProjectVO findByProjectAccountId(final long projectAccountId) {
final SearchCriteria<ProjectVO> sc = AllFieldsSearch.create();
sc.setParameters("projectAccountId", projectAccountId);
return findOneBy(sc);
}
@Override
public List<ProjectVO> listByState(final Project.State state) {
final SearchCriteria<ProjectVO> sc = AllFieldsSearch.create();
sc.setParameters("state", state);
return listBy(sc);
}
@Override
public ProjectVO findByProjectAccountIdIncludingRemoved(final long projectAccountId) {
final SearchCriteria<ProjectVO> sc = AllFieldsSearch.create();
sc.setParameters("projectAccountId", projectAccountId);
return findOneIncludingRemovedBy(sc);
}
@Override
@DB
public boolean remove(final Long projectId) {
boolean result = false;
final TransactionLegacy txn = TransactionLegacy.currentTxn();
txn.start();
final ProjectVO projectToRemove = findById(projectId);
projectToRemove.setName(null);
if (!update(projectId, projectToRemove)) {
s_logger.warn("Failed to reset name for the project id=" + projectId + " as a part of project remove");
return false;
}
_tagsDao.removeByIdAndType(projectId, ResourceObjectType.Project);
result = super.remove(projectId);
txn.commit();
return result;
}
}
| apache-2.0 |
massakam/pulsar | pulsar-broker/src/test/java/org/apache/pulsar/broker/zookeeper/MultiBrokerMetadataConsistencyTest.java | 3131 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.broker.zookeeper;
import static org.testng.Assert.assertTrue;
import java.util.List;
import lombok.extern.slf4j.Slf4j;
import org.apache.pulsar.broker.MultiBrokerBaseTest;
import org.apache.pulsar.client.admin.PulsarAdmin;
import org.apache.pulsar.client.admin.PulsarAdminException;
import org.apache.pulsar.metadata.TestZKServer;
import org.apache.pulsar.metadata.api.MetadataStoreConfig;
import org.apache.pulsar.metadata.api.MetadataStoreException;
import org.apache.pulsar.metadata.api.extended.MetadataStoreExtended;
import org.testng.annotations.Test;
@Slf4j
@Test(groups = "broker")
public class MultiBrokerMetadataConsistencyTest extends MultiBrokerBaseTest {
@Override
protected int numberOfAdditionalBrokers() {
return 2;
}
TestZKServer testZKServer;
@Override
protected void doInitConf() throws Exception {
super.doInitConf();
testZKServer = new TestZKServer();
}
@Override
protected void onCleanup() {
super.onCleanup();
if (testZKServer != null) {
try {
testZKServer.close();
} catch (Exception e) {
log.error("Error in stopping ZK server", e);
}
}
}
@Override
protected MetadataStoreExtended createLocalMetadataStore() throws MetadataStoreException {
return MetadataStoreExtended.create(testZKServer.getConnectionString(), MetadataStoreConfig.builder().build());
}
@Override
protected MetadataStoreExtended createConfigurationMetadataStore() throws MetadataStoreException {
return MetadataStoreExtended.create(testZKServer.getConnectionString(), MetadataStoreConfig.builder().build());
}
@Test
public void newTopicShouldBeInTopicsList() throws PulsarAdminException {
List<PulsarAdmin> admins = getAllAdmins();
PulsarAdmin first = admins.get(0);
PulsarAdmin second = admins.get(1);
List<String> cacheMiss = second.topics().getList("public/default");
assertTrue(cacheMiss.isEmpty());
first.topics().createNonPartitionedTopic("persistent://public/default/my-topic");
List<String> topics = second.topics().getList("public/default");
assertTrue(topics.contains("persistent://public/default/my-topic"));
}
}
| apache-2.0 |
massakam/pulsar | pulsar-io/hbase/src/main/java/org/apache/pulsar/io/hbase/sink/HbaseAbstractSink.java | 7387 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.io.hbase.sink;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.pulsar.functions.api.Record;
import org.apache.pulsar.io.core.Sink;
import org.apache.pulsar.io.core.SinkContext;
/**
* A Simple abstract class for Hbase sink.
* Users need to implement extractKeyValue function to use this sink
*/
@Slf4j
public abstract class HbaseAbstractSink<T> implements Sink<T> {
@Data(staticConstructor = "of")
public static class TableDefinition {
private final String rowKeyName;
private final String familyName;
private final List<String> qualifierNames;
}
private HbaseSinkConfig hbaseSinkConfig;
private Configuration configuration;
private Connection connection;
private Admin admin;
private TableName tableName;
private Table table;
protected TableDefinition tableDefinition;
// for flush
private long batchTimeMs;
private int batchSize;
private List<Record<T>> incomingList;
private ScheduledExecutorService flushExecutor;
@Override
public void open(Map<String, Object> config, SinkContext sinkContext) throws Exception {
hbaseSinkConfig = HbaseSinkConfig.load(config);
Preconditions.checkNotNull(hbaseSinkConfig.getZookeeperQuorum(), "zookeeperQuorum property not set.");
Preconditions.checkNotNull(hbaseSinkConfig.getZookeeperClientPort(), "zookeeperClientPort property not set.");
Preconditions.checkNotNull(hbaseSinkConfig.getZookeeperZnodeParent(), "zookeeperZnodeParent property not set.");
Preconditions.checkNotNull(hbaseSinkConfig.getTableName(), "hbase tableName property not set.");
getTable(hbaseSinkConfig);
tableDefinition = getTableDefinition(hbaseSinkConfig);
batchTimeMs = hbaseSinkConfig.getBatchTimeMs();
batchSize = hbaseSinkConfig.getBatchSize();
incomingList = Lists.newArrayList();
flushExecutor = Executors.newScheduledThreadPool(1);
flushExecutor.scheduleAtFixedRate(() -> flush(), batchTimeMs, batchTimeMs, TimeUnit.MILLISECONDS);
}
@Override
public void close() throws Exception {
if (null != table) {
table.close();
}
if (null != admin) {
admin.close();
}
if (null != connection) {
connection.close();
}
if (null != flushExecutor) {
flushExecutor.shutdown();
}
}
@Override
public void write(Record<T> record) throws Exception {
int number;
synchronized (this) {
if (null != record) {
incomingList.add(record);
}
number = incomingList.size();
}
if (number == batchSize) {
flushExecutor.submit(() -> flush());
}
}
private void flush() {
List<Put> puts = new ArrayList<>();
List<Record<T>> toFlushList;
synchronized (this) {
if (incomingList.isEmpty()) {
return;
}
toFlushList = incomingList;
incomingList = Lists.newArrayList();
}
if (CollectionUtils.isNotEmpty(toFlushList)) {
for (Record<T> record: toFlushList) {
try {
bindValue(record, puts);
} catch (Exception e) {
record.fail();
toFlushList.remove(record);
log.warn("Record flush thread was exception ", e);
}
}
}
try {
if (CollectionUtils.isNotEmpty(puts)) {
table.batch(puts, new Object[puts.size()]);
}
toFlushList.forEach(tRecord -> tRecord.ack());
puts.clear();
toFlushList.clear();
} catch (Exception e) {
toFlushList.forEach(tRecord -> tRecord.fail());
log.error("Hbase table put data exception ", e);
}
}
// bind value with a Hbase put
public abstract void bindValue(Record<T> message, List<Put> puts) throws Exception;
private void getTable(HbaseSinkConfig hbaseSinkConfig) throws IOException {
configuration = HBaseConfiguration.create();
String hbaseConfigResources = hbaseSinkConfig.getHbaseConfigResources();
if (StringUtils.isNotBlank(hbaseConfigResources)) {
configuration.addResource(hbaseConfigResources);
}
configuration.set("hbase.zookeeper.quorum", hbaseSinkConfig.getZookeeperQuorum());
configuration.set("hbase.zookeeper.property.clientPort", hbaseSinkConfig.getZookeeperClientPort());
configuration.set("zookeeper.znode.parent", hbaseSinkConfig.getZookeeperZnodeParent());
connection = ConnectionFactory.createConnection(configuration);
admin = connection.getAdmin();
tableName = TableName.valueOf(hbaseSinkConfig.getTableName());
if (!admin.tableExists(this.tableName)) {
throw new IllegalArgumentException(this.tableName + " table does not exist.");
}
table = connection.getTable(this.tableName);
}
/**
* Get the {@link TableDefinition} for the given table.
*/
private TableDefinition getTableDefinition(HbaseSinkConfig hbaseSinkConfig) {
Preconditions.checkNotNull(hbaseSinkConfig.getRowKeyName(), "rowKeyName property not set.");
Preconditions.checkNotNull(hbaseSinkConfig.getFamilyName(), "familyName property not set.");
Preconditions.checkNotNull(hbaseSinkConfig.getQualifierNames(), "qualifierNames property not set.");
return TableDefinition.of(hbaseSinkConfig.getRowKeyName(), hbaseSinkConfig.getFamilyName(),
hbaseSinkConfig.getQualifierNames());
}
}
| apache-2.0 |
prithvi66/carbon-device-mgt | components/policy-mgt/org.wso2.carbon.policy.mgt.core/src/test/java/org/wso2/carbon/policy/mgt/core/PolicyEvaluationTestCase.java | 5859 | /*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.policy.mgt.core;
import junit.framework.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.wso2.carbon.device.mgt.common.Device;
import org.wso2.carbon.device.mgt.common.DeviceIdentifier;
import org.wso2.carbon.device.mgt.common.DeviceManagementException;
import org.wso2.carbon.device.mgt.common.policy.mgt.Policy;
import org.wso2.carbon.device.mgt.core.service.DeviceManagementProviderService;
import org.wso2.carbon.device.mgt.core.service.DeviceManagementProviderServiceImpl;
import org.wso2.carbon.ntask.common.TaskException;
import org.wso2.carbon.policy.mgt.common.*;
import org.wso2.carbon.policy.mgt.core.internal.PolicyManagementDataHolder;
import org.wso2.carbon.policy.mgt.core.services.SimplePolicyEvaluationTest;
import java.util.Collections;
import java.util.List;
public class PolicyEvaluationTestCase extends BasePolicyManagementDAOTest {
private static final String ANDROID = "android";
private static final Log log = LogFactory.getLog(PolicyEvaluationTestCase.class);
@BeforeClass
@Override
public void init() throws Exception {
PolicyEvaluationPoint evaluationPoint = new SimplePolicyEvaluationTest();
PolicyManagementDataHolder.getInstance().setPolicyEvaluationPoint(evaluationPoint.getName(), evaluationPoint);
}
@Test
public void activatePolicies() {
PolicyManagerService policyManagerService = new PolicyManagerServiceImpl();
PolicyAdministratorPoint administratorPoint = null;
try {
administratorPoint = policyManagerService.getPAP();
} catch (PolicyManagementException e) {
log.error("Error occurred while loading the policy administration point", e);
Assert.fail();
}
List<Policy> policies = null;
try {
policies = policyManagerService.getPolicies(ANDROID);
} catch (PolicyManagementException e) {
log.error("Error occurred while retrieving the list of policies defined against the device type '" +
ANDROID + "'", e);
Assert.fail();
}
for (Policy policy : policies) {
log.debug("Policy status : " + policy.getPolicyName() + " - " + policy.isActive() + " - " + policy
.isUpdated() + " Policy id : " + policy.getId());
if (!policy.isActive()) {
try {
administratorPoint.activatePolicy(policy.getId());
} catch (PolicyManagementException e) {
log.error("Error occurred while activating the policy, which carries the id '" +
policy.getId() + "'", e);
Assert.fail();
}
}
}
// This cannot be called due to task service cannot be started from the
//administratorPoint.publishChanges();
}
@Test(dependsOnMethods = ("activatePolicies"))
public void getEffectivePolicy(DeviceIdentifier identifier) throws DeviceManagementException, PolicyEvaluationException {
log.debug("Getting effective policy for device started ..........");
DeviceManagementProviderService service = new DeviceManagementProviderServiceImpl();
List<Device> devices = service.getAllDevices(ANDROID);
PolicyEvaluationPoint evaluationPoint = PolicyManagementDataHolder.getInstance().getPolicyEvaluationPoint();
for (Device device : devices) {
identifier.setType(device.getType());
identifier.setId(device.getDeviceIdentifier());
Policy policy = evaluationPoint.getEffectivePolicy(identifier);
if (policy != null) {
log.debug("Name of the policy applied to device is " + policy.getPolicyName());
} else {
log.debug("No policy is applied to device.");
}
}
}
@Test(dependsOnMethods = ("getEffectivePolicy"))
public void updatePriorities() throws PolicyManagementException, TaskException {
PolicyManagerService policyManagerService = new PolicyManagerServiceImpl();
PolicyAdministratorPoint administratorPoint = policyManagerService.getPAP();
List<Policy> policies = administratorPoint.getPolicies();
log.debug("Re-enforcing policy started...!");
int size = policies.size();
sortPolicies(policies);
int x = 0;
for (Policy policy : policies) {
policy.setPriorityId(size - x);
x++;
}
administratorPoint.updatePolicyPriorities(policies);
// administratorPoint.publishChanges();
}
@Test(dependsOnMethods = ("updatePriorities"))
public void checkDelegations() {
log.debug("Delegation methods calls started because tasks cannot be started due to osgi constraints.....!");
//DelegationTask delegationTask = new DelegationTask();
//delegationTask.execute();
}
public void sortPolicies(List<Policy> policyList) {
Collections.sort(policyList);
}
}
| apache-2.0 |
yanzhijun/jclouds-aliyun | apis/cloudstack/src/test/java/org/jclouds/cloudstack/compute/extensions/CloudStackSecurityGroupExtensionLiveTest.java | 2304 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jclouds.cloudstack.compute.extensions;
import org.jclouds.cloudstack.CloudStackApi;
import org.jclouds.cloudstack.domain.Zone;
import org.jclouds.compute.domain.Template;
import org.jclouds.compute.extensions.internal.BaseSecurityGroupExtensionLiveTest;
import org.jclouds.sshj.config.SshjSshClientModule;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.inject.Module;
/**
* Live test for CloudStack {@link org.jclouds.compute.extensions.SecurityGroupExtension} implementation.
*/
@Test(groups = "live", singleThreaded = true, testName = "CloudStackSecurityGroupExtensionLiveTest")
public class CloudStackSecurityGroupExtensionLiveTest extends BaseSecurityGroupExtensionLiveTest {
protected Zone zone;
public CloudStackSecurityGroupExtensionLiveTest() {
provider = "cloudstack";
}
@BeforeClass(groups = { "integration", "live" })
public void setupContext() {
super.setupContext();
CloudStackApi api = view.unwrapApi(CloudStackApi.class);
for (Zone z : api.getZoneApi().listZones()) {
if (z.isSecurityGroupsEnabled()) {
zone = z;
break;
}
}
if (zone == null)
securityGroupsSupported = false;
}
protected Module getSshModule() {
return new SshjSshClientModule();
}
@Override
public Template getNodeTemplate() {
return view.getComputeService().templateBuilder().locationId(zone.getId()).build();
}
}
| apache-2.0 |
fhueske/flink | flink-runtime/src/main/java/org/apache/flink/runtime/jobmaster/slotpool/PreviousAllocationSlotSelectionStrategy.java | 3266 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.jobmaster.slotpool;
import org.apache.flink.runtime.clusterframework.types.AllocationID;
import org.apache.flink.runtime.clusterframework.types.SlotProfile;
import org.apache.flink.runtime.jobmanager.scheduler.Locality;
import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Optional;
import java.util.Set;
/**
* This class implements a {@link SlotSelectionStrategy} that is based on previous allocations and
* falls back to using location preference hints if there is no previous allocation.
*/
public enum PreviousAllocationSlotSelectionStrategy implements SlotSelectionStrategy {
INSTANCE;
@Override
public Optional<SlotInfoAndLocality> selectBestSlotForProfile(
@Nonnull Collection<SlotInfoAndResources> availableSlots,
@Nonnull SlotProfile slotProfile) {
Collection<AllocationID> priorAllocations = slotProfile.getPreferredAllocations();
// First, if there was a prior allocation try to schedule to the same/old slot
if (!priorAllocations.isEmpty()) {
for (SlotInfoAndResources availableSlot : availableSlots) {
if (priorAllocations.contains(availableSlot.getSlotInfo().getAllocationId())) {
return Optional.of(
SlotInfoAndLocality.of(availableSlot.getSlotInfo(), Locality.LOCAL));
}
}
}
// Second, select based on location preference, excluding blacklisted allocations
Set<AllocationID> blackListedAllocations = slotProfile.getPreviousExecutionGraphAllocations();
Collection<SlotInfoAndResources> availableAndAllowedSlots = computeWithoutBlacklistedSlots(availableSlots, blackListedAllocations);
return LocationPreferenceSlotSelectionStrategy.INSTANCE.selectBestSlotForProfile(availableAndAllowedSlots, slotProfile);
}
@Nonnull
private Collection<SlotInfoAndResources> computeWithoutBlacklistedSlots(
@Nonnull Collection<SlotInfoAndResources> availableSlots,
@Nonnull Set<AllocationID> blacklistedAllocations) {
if (blacklistedAllocations.isEmpty()) {
return Collections.unmodifiableCollection(availableSlots);
}
ArrayList<SlotInfoAndResources> availableAndAllowedSlots = new ArrayList<>(availableSlots.size());
for (SlotInfoAndResources availableSlot : availableSlots) {
if (!blacklistedAllocations.contains(availableSlot.getSlotInfo().getAllocationId())) {
availableAndAllowedSlots.add(availableSlot);
}
}
return availableAndAllowedSlots;
}
}
| apache-2.0 |
HebaKhaled/bposs | src/com.mentor.nucleus.bp.core/src/com/mentor/nucleus/bp/core/common/RelationshipChangeModelDelta.java | 2830 | //========================================================================
//
//File: $RCSfile: RelationshipChangeModelDelta.java,v $
//Version: $Revision: 1.12 $
//Modified: $Date: 2013/01/10 22:54:11 $
//
//(c) Copyright 2005-2014 by Mentor Graphics Corp. All rights reserved.
//
//========================================================================
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy
// of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
//========================================================================
package com.mentor.nucleus.bp.core.common;
public class RelationshipChangeModelDelta extends BaseModelDelta {
private ModelElement destinationModelElement = null;
/*
* Stores the name of relationship by which relate/unrelate was done, so that it can
* be used to perform undo.
*/
private String relationName;
/*
* The suffix added to the relate/unrelate method in case of reflexive association
*/
private String relationDirectionPhrase;
/**
* @param eventType
* @param srcModelElement
* @param destModelElement
* @param relName the association Numb through which relation/unrelation is done
* @param aRelationDirectionPhrase the direction phrase in case of reflexive association
*
* In this case the sourceModelElement is the ModelElement which is being related
* or unrelated from the destionationModelElement.
*/
public RelationshipChangeModelDelta(int eventType,
ModelElement srcModelElement, ModelElement destModelElement, String relName, String aRelationDirectionPhrase) {
super(eventType, srcModelElement);
destinationModelElement = destModelElement;
relationName = relName;
relationDirectionPhrase = aRelationDirectionPhrase;
}
public ModelElement getSourceModelElement(){
return getModelElement();
}
public void setSourceModelElement(ModelElement source) {
super.setModelElement(source);
}
public void setDestinationModelElement(ModelElement destination) {
destinationModelElement = destination;
}
public ModelElement getDestinationModelElement(){
return destinationModelElement;
}
public String getRelationName(){
return relationName;
}
public String getRelationDirectionPhrase(){
return relationDirectionPhrase;
}
}
| apache-2.0 |
nmirasch/guvnor | guvnor-rest/guvnor-rest-backend/src/main/java/org/guvnor/rest/backend/JobResultManager.java | 5909 | /*
* Copyright 2015 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.guvnor.rest.backend;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import javax.annotation.PostConstruct;
import javax.enterprise.context.ApplicationScoped;
import javax.enterprise.inject.Instance;
import javax.inject.Inject;
import org.guvnor.rest.client.JobRequest;
import org.guvnor.rest.client.JobResult;
import org.guvnor.rest.client.JobStatus;
import org.kie.api.executor.CommandContext;
import org.kie.api.executor.ExecutionResults;
import org.kie.api.executor.ExecutorService;
import org.kie.api.executor.RequestInfo;
import org.kie.api.runtime.query.QueryContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ApplicationScoped
public class JobResultManager {
private static final Logger logger = LoggerFactory.getLogger(JobResultManager.class);
private static AtomicInteger created = new AtomicInteger(0);
private static class Cache extends LinkedHashMap<String, JobResult> {
private int maxSize = 1000;
public Cache(int maxSize) {
this.maxSize = maxSize;
}
@Override
protected boolean removeEldestEntry(Map.Entry<String, JobResult> stringFutureEntry) {
return size() > maxSize;
}
public void setMaxSize(int maxSize) {
this.maxSize = maxSize;
}
}
private Map<String, JobResult> jobs = null;
private int maxCacheSize = 10000;
@Inject
private Instance<ExecutorService> jobExecutor;
@PostConstruct
public void start() {
if (!created.compareAndSet(0, 1)) {
throw new IllegalStateException("Only 1 JobResultManager instance is allowed per container!");
}
Cache cache = new Cache(maxCacheSize);
jobs = Collections.synchronizedMap(cache);
}
public JobResult getJob(String jobId) {
JobResult job = jobs.get(jobId);
if (job != null && !JobStatus.ACCEPTED.equals(job.getStatus())) {
return job;
}
if (!jobExecutor.isUnsatisfied()) {
List<RequestInfo> jobsFound = jobExecutor.get().getRequestsByBusinessKey(jobId, new QueryContext());
if (jobsFound != null && !jobsFound.isEmpty()) {
RequestInfo executorJob = jobsFound.get(0);
JobResult requestedJob = (JobResult) getItemFromRequestOutput("JobResult", executorJob);
if (requestedJob == null) {
JobRequest jobRequest = (JobRequest) getItemFromRequestInput("JobRequest", executorJob);
if (jobRequest != null) {
requestedJob = new JobResult();
requestedJob.setJobId(jobRequest.getJobId());
requestedJob.setStatus(jobRequest.getStatus());
}
}
// if it was found set it in cache
if (requestedJob != null) {
job = requestedJob;
jobs.put(jobId, job);
}
}
}
return job;
}
public void putJob(JobResult job) {
jobs.put(job.getJobId(), job);
}
public JobResult removeJob(String jobId) {
return jobs.remove(jobId);
}
protected Object getItemFromRequestInput(String itemName, RequestInfo requestInfo) {
CommandContext ctx = null;
byte[] requestData = requestInfo.getRequestData();
if (requestData != null) {
ObjectInputStream in = null;
try {
in = new ObjectInputStream(new ByteArrayInputStream(requestData));
ctx = (CommandContext) in.readObject();
} catch (Exception e) {
logger.debug("Exception while deserializing context data of job with id {}", requestInfo.getId(), e);
} finally {
if (in != null) {
try {
in.close();
} catch (IOException e) {
}
}
}
}
if (ctx != null && ctx.getData(itemName) != null) {
return ctx.getData(itemName);
}
return null;
}
protected Object getItemFromRequestOutput(String itemName, RequestInfo requestInfo) {
ExecutionResults execResults = null;
byte[] responseData = requestInfo.getResponseData();
if (responseData != null) {
ObjectInputStream in = null;
try {
in = new ObjectInputStream(new ByteArrayInputStream(responseData));
execResults = (ExecutionResults) in.readObject();
} catch (Exception e) {
logger.debug("Exception while deserializing context data of job with id {}", requestInfo.getId(), e);
} finally {
if (in != null) {
try {
in.close();
} catch (IOException e) {
}
}
}
}
if (execResults != null && execResults.getData(itemName) != null) {
return execResults.getData(itemName);
}
return null;
}
}
| apache-2.0 |
javyzheng/spring-boot | spring-boot-actuator/src/test/java/org/springframework/boot/actuate/endpoint/mvc/EnvironmentMvcEndpointTests.java | 6828 | /*
* Copyright 2012-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.actuate.endpoint.mvc;
import java.util.HashMap;
import java.util.Map;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.actuate.autoconfigure.AuditAutoConfiguration;
import org.springframework.boot.actuate.autoconfigure.EndpointWebMvcAutoConfiguration;
import org.springframework.boot.actuate.endpoint.EnvironmentEndpoint;
import org.springframework.boot.autoconfigure.http.HttpMessageConvertersAutoConfiguration;
import org.springframework.boot.autoconfigure.jackson.JacksonAutoConfiguration;
import org.springframework.boot.autoconfigure.web.servlet.WebMvcAutoConfiguration;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.util.EnvironmentTestUtils;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.core.env.ConfigurableEnvironment;
import org.springframework.core.env.MapPropertySource;
import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.TestPropertySource;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.web.context.WebApplicationContext;
import static org.hamcrest.Matchers.containsString;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.header;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/**
* Tests for {@link EnvironmentMvcEndpoint}
*
* @author Dave Syer
* @author Andy Wilkinson
*/
@RunWith(SpringRunner.class)
@SpringBootTest
@TestPropertySource(properties = "management.security.enabled=false")
@DirtiesContext
public class EnvironmentMvcEndpointTests {
@Autowired
private WebApplicationContext context;
private MockMvc mvc;
@Before
public void setUp() {
this.context.getBean(EnvironmentEndpoint.class).setEnabled(true);
this.mvc = MockMvcBuilders.webAppContextSetup(this.context).build();
EnvironmentTestUtils.addEnvironment((ConfigurableApplicationContext) this.context,
"foo:bar", "fool:baz");
}
@Test
public void homeContentTypeDefaultsToActuatorV1Json() throws Exception {
this.mvc.perform(get("/env")).andExpect(status().isOk())
.andExpect(header().string("Content-Type",
"application/vnd.spring-boot.actuator.v1+json;charset=UTF-8"));
}
@Test
public void homeContentTypeCanBeApplicationJson() throws Exception {
this.mvc.perform(
get("/env").header(HttpHeaders.ACCEPT, MediaType.APPLICATION_JSON_VALUE))
.andExpect(status().isOk()).andExpect(header().string("Content-Type",
MediaType.APPLICATION_JSON_UTF8_VALUE));
}
@Test
public void subContentTypeDefaultsToActuatorV1Json() throws Exception {
this.mvc.perform(get("/env/foo")).andExpect(status().isOk())
.andExpect(header().string("Content-Type",
"application/vnd.spring-boot.actuator.v1+json;charset=UTF-8"));
}
@Test
public void subContentTypeCanBeApplicationJson() throws Exception {
this.mvc.perform(get("/env/foo").header(HttpHeaders.ACCEPT,
MediaType.APPLICATION_JSON_VALUE)).andExpect(status().isOk())
.andExpect(header().string("Content-Type",
MediaType.APPLICATION_JSON_UTF8_VALUE));
}
@Test
public void home() throws Exception {
this.mvc.perform(get("/env")).andExpect(status().isOk())
.andExpect(content().string(containsString("systemProperties")));
}
@Test
public void sub() throws Exception {
this.mvc.perform(get("/env/foo")).andExpect(status().isOk())
.andExpect(content().string("{\"foo\":\"bar\"}"));
}
@Test
public void subWhenDisabled() throws Exception {
this.context.getBean(EnvironmentEndpoint.class).setEnabled(false);
this.mvc.perform(get("/env/foo")).andExpect(status().isNotFound());
}
@Test
public void regex() throws Exception {
Map<String, Object> map = new HashMap<>();
map.put("food", null);
((ConfigurableEnvironment) this.context.getEnvironment()).getPropertySources()
.addFirst(new MapPropertySource("null-value", map));
this.mvc.perform(get("/env/foo.*")).andExpect(status().isOk())
.andExpect(content().string(containsString("\"foo\":\"bar\"")))
.andExpect(content().string(containsString("\"fool\":\"baz\"")));
}
@Test
public void nestedPathWhenPlaceholderCannotBeResolvedShouldReturnUnresolvedProperty() throws Exception {
Map<String, Object> map = new HashMap<String, Object>();
map.put("my.foo", "${my.bar}");
((ConfigurableEnvironment) this.context.getEnvironment()).getPropertySources()
.addFirst(new MapPropertySource("unresolved-placeholder", map));
this.mvc.perform(get("/env/my.*")).andExpect(status().isOk())
.andExpect(content().string(containsString("\"my.foo\":\"${my.bar}\"")));
}
@Test
public void nestedPathWithSensitivePlaceholderShouldSanitize() throws Exception {
Map<String, Object> map = new HashMap<String, Object>();
map.put("my.foo", "${my.password}");
map.put("my.password", "hello");
((ConfigurableEnvironment) this.context.getEnvironment()).getPropertySources()
.addFirst(new MapPropertySource("placeholder", map));
this.mvc.perform(get("/env/my.*")).andExpect(status().isOk())
.andExpect(content().string(containsString("\"my.foo\":\"******\"")));
}
@Configuration
@Import({ JacksonAutoConfiguration.class,
HttpMessageConvertersAutoConfiguration.class, WebMvcAutoConfiguration.class,
EndpointWebMvcAutoConfiguration.class, AuditAutoConfiguration.class })
public static class TestConfiguration {
@Bean
public EnvironmentEndpoint endpoint() {
return new EnvironmentEndpoint();
}
}
}
| apache-2.0 |
wso2/wso2-commons-vfs | commons-vfs2/src/main/java/org/apache/commons/vfs2/tasks/AbstractSyncTask.java | 14237 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.vfs2.tasks;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Set;
import java.util.StringTokenizer;
import org.apache.commons.vfs2.FileName;
import org.apache.commons.vfs2.FileObject;
import org.apache.commons.vfs2.NameScope;
import org.apache.commons.vfs2.Selectors;
import org.apache.commons.vfs2.util.Messages;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Project;
/**
* An abstract file synchronization task. Scans a set of source files and folders, and a destination folder, and
* performs actions on missing and out-of-date files. Specifically, performs actions on the following:
* <ul>
* <li>Missing destination file.
* <li>Missing source file.
* <li>Out-of-date destination file.
* <li>Up-to-date destination file.
* </ul>
*
* TODO - Deal with case where dest file maps to a child of one of the source files.<br>
* TODO - Deal with case where dest file already exists and is incorrect type (not file, not a folder).<br>
* TODO - Use visitors.<br>
* TODO - Add default excludes.<br>
* TOOD - Allow selector, mapper, filters, etc to be specified.<br>
* TODO - Handle source/dest directories as well.<br>
* TODO - Allow selector to be specified for choosing which dest files to sync.
*/
public abstract class AbstractSyncTask extends VfsTask {
private final ArrayList<SourceInfo> srcFiles = new ArrayList<>();
private String destFileUrl;
private String destDirUrl;
private String srcDirUrl;
private boolean srcDirIsBase;
private boolean failonerror = true;
private String filesList;
/**
* Sets the destination file.
*
* @param destFile The destination file name.
*/
public void setDestFile(final String destFile) {
this.destFileUrl = destFile;
}
/**
* Sets the destination directory.
*
* @param destDir The destination directory.
*/
public void setDestDir(final String destDir) {
this.destDirUrl = destDir;
}
/**
* Sets the source file.
*
* @param srcFile The source file name.
*/
public void setSrc(final String srcFile) {
final SourceInfo src = new SourceInfo();
src.setFile(srcFile);
addConfiguredSrc(src);
}
/**
* Sets the source directory.
*
* @param srcDir The source directory.
*/
public void setSrcDir(final String srcDir) {
this.srcDirUrl = srcDir;
}
/**
* Sets whether the source directory should be consider as the base directory.
*
* @param srcDirIsBase true if the source directory is the base directory.
*/
public void setSrcDirIsBase(final boolean srcDirIsBase) {
this.srcDirIsBase = srcDirIsBase;
}
/**
* Sets whether we should fail if there was an error or not.
*
* @param failonerror true if the operation should fail if there is an error.
*/
public void setFailonerror(final boolean failonerror) {
this.failonerror = failonerror;
}
/**
* Sets whether we should fail if there was an error or not.
*
* @return true if the operation should fail if there was an error.
*/
public boolean isFailonerror() {
return failonerror;
}
/**
* Sets the files to includes.
*
* @param filesList The list of files to include.
*/
public void setIncludes(final String filesList) {
this.filesList = filesList;
}
/**
* Adds a nested <src> element.
*
* @param srcInfo A nested source element.
* @throws BuildException if the SourceInfo doesn't reference a file.
*/
public void addConfiguredSrc(final SourceInfo srcInfo) throws BuildException {
if (srcInfo.file == null) {
final String message = Messages.getString("vfs.tasks/sync.no-source-file.error");
throw new BuildException(message);
}
srcFiles.add(srcInfo);
}
/**
* Executes this task.
*
* @throws BuildException if an error occurs.
*/
@Override
public void execute() throws BuildException {
// Validate
if (destFileUrl == null && destDirUrl == null) {
final String message = Messages.getString("vfs.tasks/sync.no-destination.error");
logOrDie(message, Project.MSG_WARN);
return;
}
if (destFileUrl != null && destDirUrl != null) {
final String message = Messages.getString("vfs.tasks/sync.too-many-destinations.error");
logOrDie(message, Project.MSG_WARN);
return;
}
// Add the files of the includes attribute to the list
if (srcDirUrl != null && !srcDirUrl.equals(destDirUrl) && filesList != null && filesList.length() > 0) {
if (!srcDirUrl.endsWith("/")) {
srcDirUrl += "/";
}
final StringTokenizer tok = new StringTokenizer(filesList, ", \t\n\r\f", false);
while (tok.hasMoreTokens()) {
String nextFile = tok.nextToken();
// Basic compatibility with Ant fileset for directories
if (nextFile.endsWith("/**")) {
nextFile = nextFile.substring(0, nextFile.length() - 2);
}
final SourceInfo src = new SourceInfo();
src.setFile(srcDirUrl + nextFile);
addConfiguredSrc(src);
}
}
if (srcFiles.size() == 0) {
final String message = Messages.getString("vfs.tasks/sync.no-source-files.warn");
logOrDie(message, Project.MSG_WARN);
return;
}
// Perform the sync
try {
if (destFileUrl != null) {
handleSingleFile();
} else {
handleFiles();
}
} catch (final BuildException e) {
throw e;
} catch (final Exception e) {
throw new BuildException(e.getMessage(), e);
}
}
protected void logOrDie(final String message, final int level) {
if (!isFailonerror()) {
log(message, level);
return;
}
throw new BuildException(message);
}
/**
* Copies the source files to the destination.
*/
private void handleFiles() throws Exception {
// Locate the destination folder, and make sure it exists
final FileObject destFolder = resolveFile(destDirUrl);
destFolder.createFolder();
// Locate the source files, and make sure they exist
FileName srcDirName = null;
if (srcDirUrl != null) {
srcDirName = resolveFile(srcDirUrl).getName();
}
final ArrayList<FileObject> srcs = new ArrayList<>();
for (int i = 0; i < srcFiles.size(); i++) {
// Locate the source file, and make sure it exists
final SourceInfo src = srcFiles.get(i);
final FileObject srcFile = resolveFile(src.file);
if (!srcFile.exists()) {
final String message = Messages.getString("vfs.tasks/sync.src-file-no-exist.warn", srcFile);
logOrDie(message, Project.MSG_WARN);
} else {
srcs.add(srcFile);
}
}
// Scan the source files
final Set<FileObject> destFiles = new HashSet<>();
for (int i = 0; i < srcs.size(); i++) {
final FileObject rootFile = srcs.get(i);
final FileName rootName = rootFile.getName();
if (rootFile.isFile()) {
// Build the destination file name
String relName = null;
if (srcDirName == null || !srcDirIsBase) {
relName = rootName.getBaseName();
} else {
relName = srcDirName.getRelativeName(rootName);
}
final FileObject destFile = destFolder.resolveFile(relName, NameScope.DESCENDENT);
// Do the copy
handleFile(destFiles, rootFile, destFile);
} else {
// Find matching files
// If srcDirIsBase is true, select also the sub-directories
final FileObject[] files = rootFile
.findFiles(srcDirIsBase ? Selectors.SELECT_ALL : Selectors.SELECT_FILES);
for (final FileObject srcFile : files) {
// Build the destination file name
String relName = null;
if (srcDirName == null || !srcDirIsBase) {
relName = rootName.getRelativeName(srcFile.getName());
} else {
relName = srcDirName.getRelativeName(srcFile.getName());
}
final FileObject destFile = destFolder.resolveFile(relName, NameScope.DESCENDENT);
// Do the copy
handleFile(destFiles, srcFile, destFile);
}
}
}
// Scan the destination files for files with no source file
if (detectMissingSourceFiles()) {
final FileObject[] allDestFiles = destFolder.findFiles(Selectors.SELECT_FILES);
for (final FileObject destFile : allDestFiles) {
if (!destFiles.contains(destFile)) {
handleMissingSourceFile(destFile);
}
}
}
}
/**
* Handles a single file, checking for collisions where more than one source file maps to the same destination file.
*/
private void handleFile(final Set<FileObject> destFiles, final FileObject srcFile, final FileObject destFile)
throws Exception
{
// Check for duplicate source files
if (destFiles.contains(destFile)) {
final String message = Messages.getString("vfs.tasks/sync.duplicate-source-files.warn", destFile);
logOrDie(message, Project.MSG_WARN);
} else {
destFiles.add(destFile);
}
// Handle the file
handleFile(srcFile, destFile);
}
/**
* Copies a single file.
*/
private void handleSingleFile() throws Exception {
// Make sure there is exactly one source file, and that it exists
// and is a file.
if (srcFiles.size() > 1) {
final String message = Messages.getString("vfs.tasks/sync.too-many-source-files.error");
logOrDie(message, Project.MSG_WARN);
return;
}
final SourceInfo src = srcFiles.get(0);
final FileObject srcFile = resolveFile(src.file);
if (!srcFile.isFile()) {
final String message = Messages.getString("vfs.tasks/sync.source-not-file.error", srcFile);
logOrDie(message, Project.MSG_WARN);
return;
}
// Locate the destination file
final FileObject destFile = resolveFile(destFileUrl);
// Do the copy
handleFile(srcFile, destFile);
}
/**
* Handles a single source file.
*/
private void handleFile(final FileObject srcFile, final FileObject destFile) throws Exception {
if (!destFile.exists()
|| srcFile.getContent().getLastModifiedTime() > destFile.getContent().getLastModifiedTime()) {
// Destination file is out-of-date
handleOutOfDateFile(srcFile, destFile);
} else {
// Destination file is up-to-date
handleUpToDateFile(srcFile, destFile);
}
}
/**
* Handles an out-of-date file.
* <p>
* This is a file where the destination file either doesn't exist, or is older than the source file.
* <p>
* This implementation does nothing.
*
* @param srcFile The source file.
* @param destFile The destination file.
* @throws Exception Implementation can throw any Exception.
*/
protected void handleOutOfDateFile(final FileObject srcFile, final FileObject destFile) throws Exception {
}
/**
* Handles an up-to-date file.
* <p>
* This is where the destination file exists and is newer than the source file.
* <p>
* This implementation does nothing.
*
* @param srcFile The source file.
* @param destFile The destination file.
* @throws Exception Implementation can throw any Exception.
*/
protected void handleUpToDateFile(final FileObject srcFile, final FileObject destFile) throws Exception {
}
/**
* Handles a destination for which there is no corresponding source file.
* <p>
* This implementation does nothing.
*
* @param destFile The existing destination file.
* @throws Exception Implementation can throw any Exception.
*/
protected void handleMissingSourceFile(final FileObject destFile) throws Exception {
}
/**
* Check if this task cares about destination files with a missing source file.
* <p>
* This implementation returns false.
*
* @return True if missing file is detected.
*/
protected boolean detectMissingSourceFiles() {
return false;
}
/**
* Information about a source file.
*/
public static class SourceInfo {
private String file;
public void setFile(final String file) {
this.file = file;
}
}
}
| apache-2.0 |
xloye/tddl5 | tddl-executor/src/main/java/com/taobao/tddl/executor/function/scalar/math/Round.java | 2222 | package com.taobao.tddl.executor.function.scalar.math;
import java.math.BigDecimal;
import java.math.MathContext;
import java.math.RoundingMode;
import com.taobao.tddl.executor.common.ExecutionContext;
import com.taobao.tddl.executor.function.ScalarFunction;
import com.taobao.tddl.executor.utils.ExecUtils;
import com.taobao.tddl.optimizer.core.datatype.DataType;
/**
* Rounds the argument X to D decimal places. The rounding algorithm depends on
* the data type of X. D defaults to 0 if not specified. D can be negative to
* cause D digits left of the decimal point of the value X to become zero.
*
* <pre>
* mysql> SELECT ROUND(-1.23);
* -> -1
* mysql> SELECT ROUND(-1.58);
* -> -2
* mysql> SELECT ROUND(1.58);
* -> 2
* mysql> SELECT ROUND(1.298, 1);
* -> 1.3
* mysql> SELECT ROUND(1.298, 0);
* -> 1
* mysql> SELECT ROUND(23.298, -1);
* -> 20
* </pre>
*
* see. http://dev.mysql.com/doc/refman/5.6/en/mathematical-functions.html#
* function_round
*
* @author jianghang 2014-4-14 下午10:47:45
* @since 5.0.7
*/
public class Round extends ScalarFunction {
@Override
public Object compute(Object[] args, ExecutionContext ec) {
DataType type = getReturnType();
if (ExecUtils.isNull(args[0])) {
return null;
}
BigDecimal d = DataType.BigDecimalType.convertFrom(args[0]);
int x = 0;
if (args.length >= 2 && !ExecUtils.isNull(args[1])) {
x = DataType.IntegerType.convertFrom(args[1]);
}
if (x >= 0) {
int precision = d.precision() - d.scale() + x;
if (precision < 0) {
d = BigDecimal.ZERO;
} else {
d = d.round(new MathContext(precision, RoundingMode.HALF_UP));
}
} else {
x = Math.abs(x);
d = d.movePointLeft(x).setScale(0, RoundingMode.HALF_UP).multiply(new BigDecimal(10).pow(x));
}
return type.convertFrom(d);
}
@Override
public DataType getReturnType() {
return getFirstArgType();
}
@Override
public String[] getFunctionNames() {
return new String[] { "ROUND" };
}
}
| apache-2.0 |
apache/incubator-asterixdb | asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableIntermediateAvgAggregateFunction.java | 2638 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.runtime.aggregates.serializable.std;
import java.io.DataOutput;
import org.apache.asterix.om.types.ATypeTag;
import org.apache.asterix.om.types.EnumDeserializer;
import org.apache.hyracks.algebricks.runtime.base.IEvaluatorContext;
import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.exceptions.SourceLocation;
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class SerializableIntermediateAvgAggregateFunction extends AbstractSerializableAvgAggregateFunction {
public SerializableIntermediateAvgAggregateFunction(IScalarEvaluatorFactory[] args, IEvaluatorContext context,
SourceLocation sourceLoc) throws HyracksDataException {
super(args, context, sourceLoc);
}
@Override
public void step(IFrameTupleReference tuple, byte[] state, int start, int len) throws HyracksDataException {
processPartialResults(tuple, state, start, len);
}
@Override
public void finish(byte[] state, int start, int len, DataOutput result) throws HyracksDataException {
finishPartialResults(state, start, len, result);
}
@Override
public void finishPartial(byte[] state, int start, int len, DataOutput result) throws HyracksDataException {
finishPartialResults(state, start, len, result);
}
@Override
protected void processNull(byte[] state, int start) {
state[start + AGG_TYPE_OFFSET] = ATypeTag.SERIALIZED_NULL_TYPE_TAG;
}
@Override
protected boolean skipStep(byte[] state, int start) {
ATypeTag aggType = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(state[start + AGG_TYPE_OFFSET]);
return aggType == ATypeTag.NULL;
}
}
| apache-2.0 |
GabrielBrascher/cloudstack | plugins/ca/root-ca/src/main/java/org/apache/cloudstack/ca/provider/RootCAProvider.java | 20522 | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.ca.provider;
import java.io.IOException;
import java.io.StringReader;
import java.math.BigInteger;
import java.net.InetAddress;
import java.security.InvalidKeyException;
import java.security.KeyManagementException;
import java.security.KeyPair;
import java.security.KeyStore;
import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import java.security.SecureRandom;
import java.security.Security;
import java.security.SignatureException;
import java.security.UnrecoverableKeyException;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.security.spec.InvalidKeySpecException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import javax.naming.ConfigurationException;
import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.TrustManager;
import javax.net.ssl.TrustManagerFactory;
import javax.xml.bind.DatatypeConverter;
import org.apache.cloudstack.ca.CAManager;
import org.apache.cloudstack.framework.ca.CAProvider;
import org.apache.cloudstack.framework.ca.Certificate;
import org.apache.cloudstack.framework.config.ConfigKey;
import org.apache.cloudstack.framework.config.Configurable;
import org.apache.cloudstack.framework.config.dao.ConfigurationDao;
import org.apache.cloudstack.utils.security.CertUtils;
import org.apache.cloudstack.utils.security.KeyStoreUtils;
import org.apache.log4j.Logger;
import org.bouncycastle.asn1.pkcs.Attribute;
import org.bouncycastle.asn1.pkcs.PKCSObjectIdentifiers;
import org.bouncycastle.asn1.x509.Extension;
import org.bouncycastle.asn1.x509.Extensions;
import org.bouncycastle.asn1.x509.GeneralName;
import org.bouncycastle.asn1.x509.GeneralNames;
import org.bouncycastle.jce.provider.BouncyCastleProvider;
import org.bouncycastle.operator.OperatorCreationException;
import org.bouncycastle.pkcs.jcajce.JcaPKCS10CertificationRequest;
import org.bouncycastle.util.io.pem.PemObject;
import org.bouncycastle.util.io.pem.PemReader;
import com.cloud.certificate.dao.CrlDao;
import com.cloud.utils.component.AdapterBase;
import com.cloud.utils.db.GlobalLock;
import com.cloud.utils.exception.CloudRuntimeException;
import com.cloud.utils.net.NetUtils;
import com.google.common.base.Strings;
public final class RootCAProvider extends AdapterBase implements CAProvider, Configurable {
private static final Logger LOG = Logger.getLogger(RootCAProvider.class);
public static final Integer caValidityYears = 30;
public static final String caAlias = "root";
public static final String managementAlias = "management";
private static KeyPair caKeyPair = null;
private static X509Certificate caCertificate = null;
private static KeyStore managementKeyStore = null;
@Inject
private ConfigurationDao configDao;
@Inject
private CrlDao crlDao;
////////////////////////////////////////////////////
/////////////// Root CA Settings ///////////////////
////////////////////////////////////////////////////
private static ConfigKey<String> rootCAPrivateKey = new ConfigKey<>("Hidden", String.class,
"ca.plugin.root.private.key",
null,
"The ROOT CA private key.", true);
private static ConfigKey<String> rootCAPublicKey = new ConfigKey<>("Hidden", String.class,
"ca.plugin.root.public.key",
null,
"The ROOT CA public key.", true);
private static ConfigKey<String> rootCACertificate = new ConfigKey<>("Hidden", String.class,
"ca.plugin.root.ca.certificate",
null,
"The ROOT CA certificate.", true);
private static ConfigKey<String> rootCAIssuerDN = new ConfigKey<>("Advanced", String.class,
"ca.plugin.root.issuer.dn",
"CN=ca.cloudstack.apache.org",
"The ROOT CA issuer distinguished name.", true);
protected static ConfigKey<Boolean> rootCAAuthStrictness = new ConfigKey<>("Advanced", Boolean.class,
"ca.plugin.root.auth.strictness",
"false",
"Set client authentication strictness, setting to true will enforce and require client certificate for authentication in applicable CA providers.", true);
private static ConfigKey<Boolean> rootCAAllowExpiredCert = new ConfigKey<>("Advanced", Boolean.class,
"ca.plugin.root.allow.expired.cert",
"true",
"When set to true, it will allow expired client certificate during SSL handshake.", true);
///////////////////////////////////////////////////////////
/////////////// Root CA Private Methods ///////////////////
///////////////////////////////////////////////////////////
private Certificate generateCertificate(final List<String> domainNames, final List<String> ipAddresses, final int validityDays) throws NoSuchAlgorithmException, InvalidKeyException, NoSuchProviderException, CertificateException, SignatureException, IOException, OperatorCreationException {
if (domainNames == null || domainNames.size() < 1 || Strings.isNullOrEmpty(domainNames.get(0))) {
throw new CloudRuntimeException("No domain name is specified, cannot generate certificate");
}
final String subject = "CN=" + domainNames.get(0);
final KeyPair keyPair = CertUtils.generateRandomKeyPair(CAManager.CertKeySize.value());
final X509Certificate clientCertificate = CertUtils.generateV3Certificate(
caCertificate, caKeyPair, keyPair.getPublic(),
subject, CAManager.CertSignatureAlgorithm.value(),
validityDays, domainNames, ipAddresses);
return new Certificate(clientCertificate, keyPair.getPrivate(), Collections.singletonList(caCertificate));
}
private Certificate generateCertificateUsingCsr(final String csr, final List<String> names, final List<String> ips, final int validityDays) throws NoSuchAlgorithmException, InvalidKeyException, NoSuchProviderException, CertificateException, SignatureException, IOException, OperatorCreationException {
final List<String> dnsNames = new ArrayList<>();
final List<String> ipAddresses = new ArrayList<>();
if (names != null) {
dnsNames.addAll(names);
}
if (ips != null) {
ipAddresses.addAll(ips);
}
PemObject pemObject = null;
try {
final PemReader pemReader = new PemReader(new StringReader(csr));
pemObject = pemReader.readPemObject();
} catch (IOException e) {
LOG.error("Failed to read provided CSR string as a PEM object", e);
}
if (pemObject == null) {
throw new CloudRuntimeException("Unable to read/process CSR: " + csr);
}
final JcaPKCS10CertificationRequest request = new JcaPKCS10CertificationRequest(pemObject.getContent());
final String subject = request.getSubject().toString();
for (final Attribute attribute : request.getAttributes()) {
if (attribute == null) {
continue;
}
if (attribute.getAttrType().equals(PKCSObjectIdentifiers.pkcs_9_at_extensionRequest)) {
final Extensions extensions = Extensions.getInstance(attribute.getAttrValues().getObjectAt(0));
final GeneralNames gns = GeneralNames.fromExtensions(extensions, Extension.subjectAlternativeName);
if (gns != null && gns.getNames() != null && gns.getNames().length > 0) {
for (final GeneralName name : gns.getNames()) {
if (name.getTagNo() == GeneralName.dNSName) {
dnsNames.add(name.getName().toString());
}
if (name.getTagNo() == GeneralName.iPAddress) {
final InetAddress address = InetAddress.getByAddress(DatatypeConverter.parseHexBinary(name.getName().toString().substring(1)));
ipAddresses.add(address.toString().replace("/", ""));
}
}
}
}
}
final X509Certificate clientCertificate = CertUtils.generateV3Certificate(
caCertificate, caKeyPair, request.getPublicKey(),
subject, CAManager.CertSignatureAlgorithm.value(),
validityDays, dnsNames, ipAddresses);
return new Certificate(clientCertificate, null, Collections.singletonList(caCertificate));
}
////////////////////////////////////////////////////////
/////////////// Root CA API Handlers ///////////////////
////////////////////////////////////////////////////////
@Override
public boolean canProvisionCertificates() {
return true;
}
@Override
public List<X509Certificate> getCaCertificate() {
return Collections.singletonList(caCertificate);
}
@Override
public Certificate issueCertificate(final List<String> domainNames, final List<String> ipAddresses, final int validityDays) {
try {
return generateCertificate(domainNames, ipAddresses, validityDays);
} catch (final CertificateException | IOException | SignatureException | NoSuchAlgorithmException | NoSuchProviderException | InvalidKeyException | OperatorCreationException e) {
LOG.error("Failed to create client certificate, due to: ", e);
throw new CloudRuntimeException("Failed to generate certificate due to:" + e.getMessage());
}
}
@Override
public Certificate issueCertificate(final String csr, final List<String> domainNames, final List<String> ipAddresses, final int validityDays) {
try {
return generateCertificateUsingCsr(csr, domainNames, ipAddresses, validityDays);
} catch (final CertificateException | IOException | SignatureException | NoSuchAlgorithmException | NoSuchProviderException | InvalidKeyException | OperatorCreationException e) {
LOG.error("Failed to generate certificate from CSR: ", e);
throw new CloudRuntimeException("Failed to generate certificate using CSR due to:" + e.getMessage());
}
}
@Override
public boolean revokeCertificate(final BigInteger certSerial, final String certCn) {
return true;
}
////////////////////////////////////////////////////////////
/////////////// Root CA Trust Management ///////////////////
////////////////////////////////////////////////////////////
private KeyStore getCaKeyStore() throws CertificateException, NoSuchAlgorithmException, IOException, KeyStoreException {
final KeyStore ks = KeyStore.getInstance("JKS");
ks.load(null, null);
if (caKeyPair != null && caCertificate != null) {
ks.setKeyEntry(caAlias, caKeyPair.getPrivate(), getKeyStorePassphrase(), new X509Certificate[]{caCertificate});
} else {
return null;
}
return ks;
}
@Override
public SSLEngine createSSLEngine(final SSLContext sslContext, final String remoteAddress, final Map<String, X509Certificate> certMap) throws KeyManagementException, UnrecoverableKeyException, NoSuchAlgorithmException, KeyStoreException, IOException, CertificateException {
final KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509");
final TrustManagerFactory tmf = TrustManagerFactory.getInstance("SunX509");
final KeyStore ks = getCaKeyStore();
kmf.init(ks, getKeyStorePassphrase());
tmf.init(ks);
final boolean authStrictness = rootCAAuthStrictness.value();
final boolean allowExpiredCertificate = rootCAAllowExpiredCert.value();
TrustManager[] tms = new TrustManager[]{new RootCACustomTrustManager(remoteAddress, authStrictness, allowExpiredCertificate, certMap, caCertificate, crlDao)};
sslContext.init(kmf.getKeyManagers(), tms, new SecureRandom());
final SSLEngine sslEngine = sslContext.createSSLEngine();
sslEngine.setNeedClientAuth(authStrictness);
return sslEngine;
}
@Override
public KeyStore getManagementKeyStore() throws KeyStoreException {
return managementKeyStore;
}
@Override
public char[] getKeyStorePassphrase() {
return KeyStoreUtils.DEFAULT_KS_PASSPHRASE;
}
/////////////////////////////////////////////////
/////////////// Root CA Setup ///////////////////
/////////////////////////////////////////////////
private int getCaValidityDays() {
return 365 * caValidityYears;
}
private boolean saveNewRootCAKeypair() {
try {
LOG.debug("Generating root CA public/private keys");
final KeyPair keyPair = CertUtils.generateRandomKeyPair(2 * CAManager.CertKeySize.value());
if (!configDao.update(rootCAPublicKey.key(), rootCAPublicKey.category(), CertUtils.publicKeyToPem(keyPair.getPublic()))) {
LOG.error("Failed to save RootCA public key");
}
if (!configDao.update(rootCAPrivateKey.key(), rootCAPrivateKey.category(), CertUtils.privateKeyToPem(keyPair.getPrivate()))) {
LOG.error("Failed to save RootCA private key");
}
} catch (final NoSuchProviderException | NoSuchAlgorithmException | IOException e) {
LOG.error("Failed to generate/save RootCA private/public keys due to exception:", e);
}
return loadRootCAKeyPair();
}
private boolean saveNewRootCACertificate() {
if (caKeyPair == null) {
throw new CloudRuntimeException("Cannot issue self-signed root CA certificate as CA keypair is not initialized");
}
try {
LOG.debug("Generating root CA certificate");
final X509Certificate rootCaCertificate = CertUtils.generateV3Certificate(
null, caKeyPair, caKeyPair.getPublic(),
rootCAIssuerDN.value(), CAManager.CertSignatureAlgorithm.value(),
getCaValidityDays(), null, null);
if (!configDao.update(rootCACertificate.key(), rootCACertificate.category(), CertUtils.x509CertificateToPem(rootCaCertificate))) {
LOG.error("Failed to update RootCA public/x509 certificate");
}
} catch (final CertificateException | NoSuchAlgorithmException | NoSuchProviderException | SignatureException | InvalidKeyException | OperatorCreationException | IOException e) {
LOG.error("Failed to generate RootCA certificate from private/public keys due to exception:", e);
return false;
}
return loadRootCACertificate();
}
private boolean loadRootCAKeyPair() {
if (Strings.isNullOrEmpty(rootCAPublicKey.value()) || Strings.isNullOrEmpty(rootCAPrivateKey.value())) {
return false;
}
try {
caKeyPair = new KeyPair(CertUtils.pemToPublicKey(rootCAPublicKey.value()), CertUtils.pemToPrivateKey(rootCAPrivateKey.value()));
} catch (InvalidKeySpecException | IOException e) {
LOG.error("Failed to load saved RootCA private/public keys due to exception:", e);
return false;
}
return caKeyPair.getPrivate() != null && caKeyPair.getPublic() != null;
}
private boolean loadRootCACertificate() {
if (Strings.isNullOrEmpty(rootCACertificate.value())) {
return false;
}
try {
caCertificate = CertUtils.pemToX509Certificate(rootCACertificate.value());
caCertificate.verify(caKeyPair.getPublic());
} catch (final IOException | CertificateException | NoSuchAlgorithmException | InvalidKeyException | SignatureException | NoSuchProviderException e) {
LOG.error("Failed to load saved RootCA certificate due to exception:", e);
return false;
}
return caCertificate != null;
}
private boolean loadManagementKeyStore() {
if (managementKeyStore != null) {
return true;
}
final Certificate serverCertificate = issueCertificate(Collections.singletonList(NetUtils.getHostName()),
NetUtils.getAllDefaultNicIps(), getCaValidityDays());
if (serverCertificate == null || serverCertificate.getPrivateKey() == null) {
throw new CloudRuntimeException("Failed to generate management server certificate and load management server keystore");
}
LOG.info("Creating new management server certificate and keystore");
try {
managementKeyStore = KeyStore.getInstance("JKS");
managementKeyStore.load(null, null);
managementKeyStore.setCertificateEntry(caAlias, caCertificate);
managementKeyStore.setKeyEntry(managementAlias, serverCertificate.getPrivateKey(), getKeyStorePassphrase(),
new X509Certificate[]{serverCertificate.getClientCertificate(), caCertificate});
} catch (final CertificateException | NoSuchAlgorithmException | KeyStoreException | IOException e) {
LOG.error("Failed to load root CA management-server keystore due to exception: ", e);
return false;
}
return managementKeyStore != null;
}
private boolean setupCA() {
if (!loadRootCAKeyPair() && !saveNewRootCAKeypair()) {
LOG.error("Failed to save and load root CA keypair");
return false;
}
if (!loadRootCACertificate() && !saveNewRootCACertificate()) {
LOG.error("Failed to save and load root CA certificate");
return false;
}
if (!loadManagementKeyStore()) {
LOG.error("Failed to check and configure management server keystore");
return false;
}
return true;
}
@Override
public boolean start() {
return loadRootCAKeyPair() && loadRootCAKeyPair() && loadManagementKeyStore();
}
@Override
public boolean configure(String name, Map<String, Object> params) throws ConfigurationException {
super.configure(name, params);
Security.addProvider(new BouncyCastleProvider());
final GlobalLock caLock = GlobalLock.getInternLock("RootCAProviderSetup");
try {
if (caLock.lock(5 * 60)) {
try {
return setupCA();
} finally {
caLock.unlock();
}
} else {
LOG.error("Failed to grab lock and setup CA, startup method will try to load the CA certificate and keypair.");
}
} finally {
caLock.releaseRef();
}
return true;
}
///////////////////////////////////////////////////////
/////////////// Root CA Descriptors ///////////////////
///////////////////////////////////////////////////////
@Override
public String getConfigComponentName() {
return RootCAProvider.class.getSimpleName();
}
@Override
public ConfigKey<?>[] getConfigKeys() {
return new ConfigKey<?>[]{
rootCAPrivateKey,
rootCAPublicKey,
rootCACertificate,
rootCAIssuerDN,
rootCAAuthStrictness,
rootCAAllowExpiredCert
};
}
@Override
public String getProviderName() {
return "root";
}
@Override
public String getDescription() {
return "CloudStack's Root CA provider plugin";
}
}
| apache-2.0 |
jmandawg/camel | tests/camel-itest-spring-boot/src/test/java/org/apache/camel/itest/springboot/CamelSwaggerJavaTest.java | 1958 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.itest.springboot;
import org.apache.camel.itest.springboot.util.ArquillianPackager;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.shrinkwrap.api.Archive;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(Arquillian.class)
public class CamelSwaggerJavaTest extends AbstractSpringBootTestSupport {
private static final String HIBERNATE_VALIDATOR_VERSION = System.getProperty("hibernateValidatorVersion", "5.2.4.Final");
@Deployment
public static Archive<?> createSpringBootPackage() throws Exception {
return ArquillianPackager.springBootPackage(createTestConfig());
}
public static ITestConfig createTestConfig() {
return new ITestConfigBuilder()
.module(inferModuleName(CamelSwaggerJavaTest.class))
.dependency("org.hibernate:hibernate-validator:" + HIBERNATE_VALIDATOR_VERSION)
.build();
}
@Test
public void componentTests() throws Exception {
// no component tests
this.runModuleUnitTestsIfEnabled(config);
}
}
| apache-2.0 |
Gadreel/divconq | divconq.core/src/main/java/divconq/io/OutputWrapper.java | 839 | /* ************************************************************************
#
# DivConq
#
# http://divconq.com/
#
# Copyright:
# Copyright 2014 eTimeline, LLC. All rights reserved.
#
# License:
# See the license.txt file in the project's top-level directory for details.
#
# Authors:
# * Andy White
#
************************************************************************ */
package divconq.io;
import java.io.IOException;
import java.io.OutputStream;
import divconq.lang.Memory;
public class OutputWrapper extends OutputStream {
protected Memory mem = null;
public OutputWrapper() {
this.mem = new Memory();
}
public OutputWrapper(Memory mem) {
this.mem = mem;
}
@Override
public void write(int value) throws IOException {
this.mem.writeByte((byte)value);
}
}
| apache-2.0 |
jakemannix/decomposer | src/org/decomposer/math/vector/HashMapDoubleMatrix.java | 3322 | package org.decomposer.math.vector;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import org.decomposer.math.vector.array.DenseMapVectorFactory;
public class HashMapDoubleMatrix implements DoubleMatrix, Serializable
{
private static final long serialVersionUID = 1L;
protected Map<Integer, MapVector> _map;
protected transient VectorFactory _vectorFactory;
protected int _numCols = 0;
/**
*
* @param vectorFactory is used for creating new vectors when calling <code>times(MapVector vector)</code>, and <code>transpose()</code>
*/
public HashMapDoubleMatrix(VectorFactory vectorFactory)
{
_map = new HashMap<Integer, MapVector>();
_vectorFactory = vectorFactory;
}
public HashMapDoubleMatrix()
{
this(new DenseMapVectorFactory());
}
public HashMapDoubleMatrix(DoubleMatrix other)
{
this(other instanceof HashMapDoubleMatrix ? ((HashMapDoubleMatrix)other)._vectorFactory : new DenseMapVectorFactory(), other);
}
public HashMapDoubleMatrix(VectorFactory vectorFactory, DoubleMatrix other)
{
this(vectorFactory);
for(Entry<Integer, MapVector> entry : other)
set(entry.getKey(), entry.getValue());
}
public MapVector get(int rowNumber)
{
return _map.get(rowNumber);
}
public DoubleMatrix scale(double scale)
{
for(Map.Entry<Integer, MapVector> vectorEntry : this)
{
vectorEntry.getValue().scale(scale);
}
return this;
}
public void set(int rowNumber, MapVector row)
{
_map.put(rowNumber, row);
}
public MapVector times(MapVector vector)
{
MapVector output = _vectorFactory.zeroVector();
if(vector != null)
{
for(Map.Entry<Integer, MapVector> entry : this)
{
output.set(entry.getKey(), vector.dot(entry.getValue()));
}
}
return output;
}
public Iterator<Entry<Integer, MapVector>> iterator()
{
return _map.entrySet().iterator();
}
@Override
public boolean equals(Object object)
{
if(!(object instanceof DoubleMatrix))
return false;
DoubleMatrix other = (DoubleMatrix)object;
for(Map.Entry<Integer, MapVector> entry : this)
{
MapVector otherVector = other.get(entry.getKey());
if(!entry.getValue().equals(otherVector)) return false;
}
return true;
}
public int numRows()
{
return _map.size();
}
public int numCols()
{
if(_numCols <= 0)
for(Map.Entry<Integer, MapVector> entry : this)
_numCols = Math.max(_numCols, entry.getValue().maxDimension());
return _numCols;
}
public String toString()
{
StringBuffer buf = new StringBuffer();
buf.append("{ ");
Iterator<Entry<Integer, MapVector>> it = iterator();
while(it.hasNext())
{
Entry<Integer, MapVector> entry = it.next();
buf.append(entry.getKey() + ":" + entry.getValue());
if(it.hasNext()) buf.append(", ");
}
buf.append("}");
return buf.toString();
}
public MapVector timesSquared(MapVector vector)
{
MapVector w = times(vector);
MapVector w2 = _vectorFactory.zeroVector(numCols());
for(Entry<Integer, MapVector> entry : this)
{
w2.plus(entry.getValue(), w.get(entry.getKey()));
}
return w2;
}
}
| apache-2.0 |
DuncanDoyle/jbpm | jbpm-flow/src/main/java/org/jbpm/workflow/core/node/StartNode.java | 6320 | /*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.workflow.core.node;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.jbpm.process.core.context.variable.Mappable;
import org.jbpm.process.core.event.EventTransformer;
import org.jbpm.process.core.timer.Timer;
import org.jbpm.workflow.core.impl.ExtendedNodeImpl;
import org.kie.api.definition.process.Connection;
/**
* Default implementation of a start node.
*
*/
public class StartNode extends ExtendedNodeImpl implements Mappable {
private static final String[] EVENT_TYPES =
new String[] { EVENT_NODE_EXIT };
private static final long serialVersionUID = 510l;
private List<Trigger> triggers;
private boolean isInterrupting;
private List<DataAssociation> outMapping = new LinkedList<DataAssociation>();
private Timer timer;
private EventTransformer transformer;
public void addTrigger(Trigger trigger) {
if (triggers == null) {
triggers = new ArrayList<Trigger>();
}
triggers.add(trigger);
}
public void removeTrigger(Trigger trigger) {
if (triggers != null) {
triggers.remove(trigger);
}
}
public List<Trigger> getTriggers() {
return triggers;
}
public void setTriggers(List<Trigger> triggers) {
this.triggers = triggers;
}
public String[] getActionTypes() {
return EVENT_TYPES;
}
public void validateAddIncomingConnection(final String type, final Connection connection) {
throw new UnsupportedOperationException(
"A start node [" + this.getMetaData("UniqueId") + ", " + this.getName() + "] may not have an incoming connection!");
}
public void validateRemoveIncomingConnection(final String type, final Connection connection) {
throw new UnsupportedOperationException(
"A start node [" + this.getMetaData("UniqueId") + ", " + this.getName() + "] may not have an incoming connection!");
}
public void validateAddOutgoingConnection(final String type, final Connection connection) {
super.validateAddOutgoingConnection(type, connection);
if (!org.jbpm.workflow.core.Node.CONNECTION_DEFAULT_TYPE.equals(type)) {
throw new IllegalArgumentException(
"A start node [" + this.getMetaData("UniqueId") + ", " + this.getName() + "] only accepts default outgoing connection type!");
}
if (getTo() != null) {
throw new IllegalArgumentException(
"A start node [" + this.getMetaData("UniqueId") + ", " + this.getName() + "] cannot have more than one outgoing connection!");
}
}
public boolean isInterrupting() {
return isInterrupting;
}
public void setInterrupting(boolean isInterrupting) {
this.isInterrupting = isInterrupting;
}
@Override
public void addInMapping(String parameterName, String variableName) {
throw new IllegalArgumentException("A start event [" + this.getMetaData("UniqueId") + ", " + this.getName() + "] does not support input mappings");
}
@Override
public void setInMappings(Map<String, String> inMapping) {
throw new IllegalArgumentException("A start event [" + this.getMetaData("UniqueId") + ", " + this.getName() + "] does not support input mappings");
}
@Override
public String getInMapping(String parameterName) {
throw new IllegalArgumentException("A start event [" + this.getMetaData("UniqueId") + ", " + this.getName() + "] does not support input mappings");
}
@Override
public Map<String, String> getInMappings() {
throw new IllegalArgumentException("A start event does not support input mappings");
}
@Override
public void addInAssociation(DataAssociation dataAssociation) {
throw new IllegalArgumentException("A start event does not support input mappings");
}
@Override
public List<DataAssociation> getInAssociations() {
throw new IllegalArgumentException("A start event does not support input mappings");
}
public void addOutMapping(String parameterName, String variableName) {
outMapping.add(new DataAssociation(parameterName, variableName, null, null));
}
public void setOutMappings(Map<String, String> outMapping) {
this.outMapping = new LinkedList<DataAssociation>();
for(Map.Entry<String, String> entry : outMapping.entrySet()) {
addOutMapping(entry.getKey(), entry.getValue());
}
}
public String getOutMapping(String parameterName) {
return getOutMappings().get(parameterName);
}
public Map<String, String> getOutMappings() {
Map<String,String> out = new HashMap<String, String>();
for(DataAssociation assoc : outMapping) {
if( assoc.getSources().size() == 1
&& (assoc.getAssignments() == null || assoc.getAssignments().size() == 0)
&& assoc.getTransformation() == null ) {
out.put(assoc.getSources().get(0), assoc.getTarget());
}
}
return out;
}
public void addOutAssociation(DataAssociation dataAssociation) {
outMapping.add(dataAssociation);
}
public List<DataAssociation> getOutAssociations() {
return Collections.unmodifiableList(outMapping);
}
public Timer getTimer() {
return timer;
}
public void setTimer(Timer timer) {
this.timer = timer;
}
public void setEventTransformer(EventTransformer transformer) {
this.transformer = transformer;
}
public EventTransformer getEventTransformer() {
return transformer;
}
}
| apache-2.0 |
Sixt/ja-micro | src/main/java/com/sixt/service/framework/json/JsonRpcResponse.java | 4179 | /**
* Copyright 2016-2017 Sixt GmbH & Co. Autovermietung KG
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain a
* copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.sixt.service.framework.json;
import com.google.gson.JsonElement;
import com.google.gson.JsonNull;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.sixt.service.framework.rpc.RpcCallException;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.builder.ToStringBuilder;
import javax.servlet.http.HttpServletResponse;
public class JsonRpcResponse {
public final static String ID_FIELD = "id";
public final static String ERROR_FIELD = "error";
public final static String RESULT_FIELD = "result";
private JsonElement id;
private JsonElement result;
private JsonElement error;
// HTTP status code, not part of the JSON-RPC spec, used for internal purposes only
private int statusCode;
public JsonRpcResponse(JsonElement id, JsonElement result, JsonElement error, int statusCode) {
setId(id);
setResult(result);
setError(error);
setStatusCode(statusCode);
}
public JsonObject toJson() {
JsonObject retval = new JsonObject();
retval.add(ID_FIELD, id);
retval.add(ERROR_FIELD, error);
retval.add(RESULT_FIELD, result);
return retval;
}
@Override
public String toString() {
ToStringBuilder builder = new ToStringBuilder(this);
builder.append(ID_FIELD, getId());
builder.append(RESULT_FIELD, getResult());
builder.append(ERROR_FIELD, getError());
return builder.toString();
}
public static JsonRpcResponse fromString(String rawResponse) {
JsonParser parser = new JsonParser();
JsonObject response = parser.parse(rawResponse).getAsJsonObject();
JsonElement id = response.get("id");
JsonElement errorElement = response.get("error");
int responseStatus = HttpServletResponse.SC_OK;
String error;
if (! (errorElement instanceof JsonNull)) {
if (errorElement instanceof JsonObject) {
error = errorElement.toString();
// try parsing it into RpcCallException to get the HttpStatus from there
RpcCallException rpcEx = RpcCallException.fromJson(error);
if (rpcEx != null) {
responseStatus = rpcEx.getCategory().getHttpStatus();
JsonElement resultElement = response.get("result");
return new JsonRpcResponse(id, resultElement == null ? JsonNull.INSTANCE : resultElement,
errorElement, responseStatus);
}
}
error = errorElement.getAsString();
if (StringUtils.isNotBlank(error)) {
responseStatus = HttpServletResponse.SC_INTERNAL_SERVER_ERROR;
}
}
JsonElement resultElement = response.get("result");
return new JsonRpcResponse(id, resultElement == null ? JsonNull.INSTANCE : resultElement,
errorElement, responseStatus);
}
public JsonElement getId() {
return id;
}
public void setId(JsonElement id) {
this.id = id;
}
public JsonElement getResult() {
return result;
}
public void setResult(JsonElement result) {
this.result = result;
}
public JsonElement getError() {
return error;
}
public void setError(JsonElement error) {
this.error = error;
}
public int getStatusCode() { return statusCode; }
public void setStatusCode(int statusCode) { this.statusCode = statusCode; }
}
| apache-2.0 |
flordan/final | code/programmingModel/annotations/src/main/java/es/bsc/mobile/annotations/MultiConstraints.java | 1313 | /*
* Copyright 2002-2015 Barcelona Supercomputing Center (www.bsc.es)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package es.bsc.mobile.annotations;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.lang.annotation.ElementType;
/**
* The MultiConstraints annotation allows the developer to describe different
* requirements to run each implementation of a CE.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface MultiConstraints {
/**
* Describes the specific requirements to run a CE implementation.
*
* @return returns a Constraints array with the specific constrains of each
* method implementation
*/
Constraints[] value();
}
| apache-2.0 |
vthangathurai/SOA-Runtime | codegen/turmeric-maven-plugin/src/main/java/org/ebayopensource/turmeric/plugins/maven/utils/TurmericMavenConstants.java | 4496 | /*******************************************************************************
* Copyright (c) 2006-2010 eBay Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*******************************************************************************/
package org.ebayopensource.turmeric.plugins.maven.utils;
import org.ebayopensource.turmeric.tools.codegen.InputOptions;
/**
* @author yayu
*
*/
public final class TurmericMavenConstants {
public static final String POM_PROP_KEY_SERVICE_GROUP_ID = "serviceGroupID";
public static final String POM_PROP_KEY_SERVICE_NAME = "serviceName";
public static final String POM_PROP_KEY_PROJECT_TYPE = "projectType";
public static enum ProjectType {
INTERFACE, IMPLEMENTATION, CONSUMER, TYPELIBRARY, ERRORLIBRARY
}
public static final String GENTYPE_DISPATCHER_FOR_MAVEN = "DispatcherForMaven";
public static final String GENTYPE_CLIENT_NO_CONFIG = "ClientNoConfig";
public static final String GENTYPE_CLEAN_BUILD_TYPE_LIBRARY = "genTypeCleanBuildTypeLibrary";
public static final String GENTYPE_COMMAND_LINE_ALL = "genTypeCommandLineAll";
/* Hardcoded paths and directories are not how things are done in
* a maven plugin, use Mojo parameters. always.
*
public static final String FOLDER_GEN_META_SRC = "gen-meta-src";
public static final String FOLDER_META_SRC = "meta-src";
public static final String FOLDER_GEN_SRC = "gen-src";
public static final String FOLDER_SRC = "gen-src";
public static final String FOLDER_GEN_SRC_SERVICE = FOLDER_GEN_SRC + "/service";
public static final String FOLDER_GEN_SRC_CLIENT = FOLDER_GEN_SRC + "/cilent";
public static final String[] SRC_FOLDERS_INTERFACE = {FOLDER_GEN_META_SRC, FOLDER_GEN_SRC_CLIENT};
public static final String[] SRC_FOLDERS_IMPL = {FOLDER_GEN_META_SRC, FOLDER_GEN_SRC_SERVICE};
public static final String[] SRC_FOLDERS_TYPELIB = {FOLDER_GEN_META_SRC, FOLDER_META_SRC, FOLDER_SRC};
public static final String[] SRC_FOLDERS_ERRORLIB = {FOLDER_META_SRC, FOLDER_SRC};
*/
//FIXME use the codegen-tools.InputOptions as soon as this new option available in the repo.
public static final String PARAM_ERROR_LIBRARY_NAME = "-errorlibname"; //the name of the error library
public static final String PARAM_GENTYPE = "-genType";
public static final String PARAM_NAMESPACE = "-namespace";
public static final String PARAM_INTERFACE = "-interface";
public static final String PARAM_ADMIN_NAME = "-adminname";
public static final String PARAM_SERVICE_NAME = "-serviceName";
public static final String PARAM_LIB_NAME = "-libname";
//public static final String PARAM_SCV = "-scv";
public static final String PARAM_STAGING = "-staging";
public static final String PARAM_DEPENDENT_TYPE_LIBS = "-dependentTypeLibs";
public static final String PARAM_SICN = "-sicn";
/**
* The need to specify the project root is bad code smell.
* The Generators that require this concept, should be changed
* to to accept fully qualified paths from the project's mojo parameters,
* not using hardcoded paths based on project.basedir.
* @deprecated Fix generator to use Mojo.parameters, not hardcoded paths.
*/
@Deprecated
public static final String PARAM_PR = "-pr";
public static final String PARAM_SRC = "-src";
public static final String PARAM_DEST = "-dest";
public static final String PARAM_BIN = "-bin";
public static final String PARAM_CN = "-cn";
public static final String PARAM_SL = "-sl";
/**
* @deprecated Use {@link InputOptions#OPT_META_SRC_GEN_DIR} instead
*/
@Deprecated
public static final String PARAM_MDEST = InputOptions.OPT_META_SRC_GEN_DIR;
public static final String PARAM_WSDL = "-wsdl";
public static final String PARAM_GIP = "-gip";
public static final String PARAM_GIN = "-gin";
public static final String PARAM_AVI = "-avi";
public static final String PARAM_JDEST = "-jdest";
public static final String PARAM_UIJ = "-uij";
public static final String PARAM_GT = "-gt"; //for generating the unit test of gentype serviceFromWSDLImpl
public static final String PARAM_DOMAIN = "-domain"; //command separated list of error domains
/**
*
*/
private TurmericMavenConstants() {
super();
}
@Override
protected Object clone() throws CloneNotSupportedException {
throw new CloneNotSupportedException();
}
}
| apache-2.0 |
kebe0325/MyHelloWorld | app/src/androidTest/java/com/helloworld/myhelloworld/ApplicationTest.java | 358 | package com.helloworld.myhelloworld;
import android.app.Application;
import android.test.ApplicationTestCase;
/**
* <a href="http://d.android.com/tools/testing/testing_android.html">Testing Fundamentals</a>
*/
public class ApplicationTest extends ApplicationTestCase<Application> {
public ApplicationTest() {
super(Application.class);
}
} | apache-2.0 |
arivanajoki/keycloak | model/api/src/main/java/org/keycloak/migration/MigrationModelManager.java | 1750 | package org.keycloak.migration;
import org.jboss.logging.Logger;
import org.keycloak.migration.migrators.MigrateTo1_3_0;
import org.keycloak.migration.migrators.MigrateTo1_4_0;
import org.keycloak.migration.migrators.MigrationTo1_2_0_CR1;
import org.keycloak.models.KeycloakSession;
/**
* @author <a href="mailto:bill@burkecentral.com">Bill Burke</a>
* @version $Revision: 1 $
*/
public class MigrationModelManager {
private static Logger logger = Logger.getLogger(MigrationModelManager.class);
public static void migrate(KeycloakSession session) {
MigrationModel model = session.realms().getMigrationModel();
String storedVersion = model.getStoredVersion();
if (MigrationModel.LATEST_VERSION.equals(storedVersion)) return;
ModelVersion stored = null;
if (storedVersion != null) {
stored = new ModelVersion(storedVersion);
}
if (stored == null || stored.lessThan(MigrationTo1_2_0_CR1.VERSION)) {
if (stored != null) {
logger.debug("Migrating older model to 1.2.0.CR1 updates");
}
new MigrationTo1_2_0_CR1().migrate(session);
}
if (stored == null || stored.lessThan(MigrateTo1_3_0.VERSION)) {
if (stored != null) {
logger.debug("Migrating older model to 1.3.0 updates");
}
new MigrateTo1_3_0().migrate(session);
}
if (stored == null || stored.lessThan(MigrateTo1_4_0.VERSION)) {
if (stored != null) {
logger.debug("Migrating older model to 1.4.0 updates");
}
new MigrateTo1_4_0().migrate(session);
}
model.setStoredVersion(MigrationModel.LATEST_VERSION);
}
}
| apache-2.0 |
apache/jena | jena-cmds/src/main/java/riotcmd/nquads.java | 1415 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package riotcmd;
import org.apache.jena.atlas.lib.Lib ;
import org.apache.jena.riot.Lang ;
import org.apache.jena.riot.RDFLanguages ;
/** Run the N-Quads parser - and produce N-Quads */
public class nquads extends CmdLangParse
{
public static void main(String... argv)
{
new nquads(argv).mainRun() ;
}
protected nquads(String[] argv)
{
super(argv) ;
}
@Override
protected String getCommandName()
{
return Lib.classShortName(nquads.class) ;
}
@Override
protected Lang dftLang( )
{ return RDFLanguages.NQUADS ; }
}
| apache-2.0 |
marubinotto/Piggydb | src/test/java/marubinotto/piggydb/model/authorization/CreateFragmentTest.java | 753 | package marubinotto.piggydb.model.authorization;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.fail;
import marubinotto.piggydb.model.entity.RawFragment;
import marubinotto.piggydb.model.exception.AuthorizationException;
import org.junit.Before;
import org.junit.Test;
public class CreateFragmentTest extends AuthorizationTestBase {
@Before
public void given() throws Exception {
super.given();
}
@Test
public void plainUserCan() throws Exception {
new RawFragment(getPlainUser());
}
@Test
public void viewerCannot() throws Exception {
try {
new RawFragment(getViewer());
fail();
}
catch (AuthorizationException e) {
assertEquals(AuthErrors.toCreateFragment(), e);
}
}
}
| apache-2.0 |
apache/xmlbeans | src/test/java/org/w3c/domts/level2/core/elementgetattributenodens03.java | 2351 | /*
This Java source file was generated by test-to-java.xsl
and is a derived work from the source document.
The source document contained the following notice:
Copyright (c) 2001-2003 World Wide Web Consortium,
(Massachusetts Institute of Technology, Institut National de
Recherche en Informatique et en Automatique, Keio University). All
Rights Reserved. This program is distributed under the W3C's Software
Intellectual Property License. This program is distributed in the
hope that it will be useful, but WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE.
See W3C License http://www.w3.org/Consortium/Legal/ for more details.
*/
package org.w3c.domts.level2.core;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.w3c.dom.Attr;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.w3c.domts.DOMTest.load;
/**
* The method getAttributeNodeNS retrieves an Attr node by local name and namespace URI.
* Using the getAttributeNodeNS, retrieve and verify the value of the default
* attribute node.
*
* @see <a href="http://www.w3.org/TR/DOM-Level-2-Core/core#ID-ElGetAtNodeNS">http://www.w3.org/TR/DOM-Level-2-Core/core#ID-ElGetAtNodeNS</a>
* @see <a href="http://www.w3.org/Bugs/Public/show_bug.cgi?id=259">http://www.w3.org/Bugs/Public/show_bug.cgi?id=259</a>
*/
public class elementgetattributenodens03 {
@Test
@Disabled
public void testRun() throws Throwable {
String nullNS = null;
Document doc = load("staffNS", false);
NodeList childList = doc.getElementsByTagNameNS("http://www.nist.gov", "employee");
Element element = (Element) childList.item(1);
Attr attribute = element.getAttributeNodeNS(nullNS, "defaultAttr");
String attrValue = attribute.getNodeValue();
assertEquals("defaultVal", attrValue, "elementgetattributenodens03");
}
/**
* Gets URI that identifies the test
*
* @return uri identifier of test
*/
public String getTargetURI() {
return "http://www.w3.org/2001/DOM-Test-Suite/level2/core/elementgetattributenodens03";
}
}
| apache-2.0 |
incodehq/isis | core/runtime/src/main/java/org/apache/isis/core/runtime/runner/opts/OptionHandlerAppManifest.java | 3075 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.core.runtime.runner.opts;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.isis.core.commons.configbuilder.IsisConfigurationBuilder;
import org.apache.isis.core.runtime.optionhandler.BootPrinter;
import org.apache.isis.core.runtime.optionhandler.OptionHandlerAbstract;
import org.apache.isis.core.runtime.runner.Constants;
import org.apache.isis.core.runtime.system.SystemConstants;
import static org.apache.isis.core.runtime.runner.Constants.APP_MANIFEST_LONG_OPT;
import static org.apache.isis.core.runtime.runner.Constants.APP_MANIFEST_OPT;
public class OptionHandlerAppManifest extends OptionHandlerAbstract {
private static final Logger LOG = LoggerFactory.getLogger(OptionHandlerAppManifest.class);
private String appManifestClassName;
public OptionHandlerAppManifest() {
super();
}
@Override
@SuppressWarnings("static-access")
public void addOption(final Options options) {
final Option option = OptionBuilder
.withArgName("app manifest").hasArg()
.withLongOpt(APP_MANIFEST_LONG_OPT)
.withDescription("fully qualified AppManifest class")
.create(APP_MANIFEST_OPT);
options.addOption(option);
}
@Override
public boolean handle(final CommandLine commandLine, final BootPrinter bootPrinter, final Options options) {
appManifestClassName = commandLine.getOptionValue(Constants.APP_MANIFEST_OPT);
return true;
}
@Override
public void prime(final IsisConfigurationBuilder isisConfigurationBuilder) {
if (appManifestClassName == null) {
return;
}
prime(isisConfigurationBuilder, SystemConstants.APP_MANIFEST_KEY, appManifestClassName);
}
static void prime(IsisConfigurationBuilder isisConfigurationBuilder, String key, String value) {
LOG.info("priming: {}={}", key, value);
isisConfigurationBuilder.add(key, value);
}
}
| apache-2.0 |
mehdi149/OF_COMPILER_0.1 | src/org/openflow/protocol/OFEchoRequest.java | 1105 | package org.openflow.protocol;
import java.nio.ByteBuffer;
import org.openflow.util.U16;
/**
* Represents an ofp_echo_request message
*
* @author Rob Sherwood (rob.sherwood@stanford.edu)
*/
public class OFEchoRequest extends OFMessage {
public static int MINIMUM_LENGTH = 8;
byte[] payload;
public OFEchoRequest() {
super();
this.type = OFType.ECHO_REQUEST;
this.length = U16.t(MINIMUM_LENGTH);
}
public void readFrom(ByteBuffer bb) {
super.readFrom(bb);
int datalen = this.getLengthU() - MINIMUM_LENGTH;
if (datalen > 0) {
this.payload = new byte[datalen];
bb.get(payload);
}
}
/**
* @return the payload
*/
public byte[] getPayload() {
return payload;
}
/**
* @param payload
* the payload to set
*/
public void setPayload(byte[] payload) {
this.payload = payload;
}
public void writeTo(ByteBuffer bb) {
super.writeTo(bb);
if (payload != null)
bb.put(payload);
}
}
| apache-2.0 |
GovernmentCommunicationsHeadquarters/Gaffer | store-implementation/proxy-store/src/test/java/uk/gov/gchq/gaffer/proxystore/SingleUseProxyStore.java | 2773 | /*
* Copyright 2016-2020 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.gchq.gaffer.proxystore;
import uk.gov.gchq.gaffer.commonutil.CommonTestConstants;
import uk.gov.gchq.gaffer.commonutil.StreamUtil;
import uk.gov.gchq.gaffer.rest.RestApiTestClient;
import uk.gov.gchq.gaffer.rest.service.v2.RestApiV2TestClient;
import uk.gov.gchq.gaffer.store.StoreException;
import uk.gov.gchq.gaffer.store.StoreProperties;
import uk.gov.gchq.gaffer.store.schema.Schema;
import java.io.File;
import java.io.IOException;
/**
* An extension of {@link ProxyStore} that starts a REST API backed by a
* {@link SingleUseProxyStore} with the provided schema. This store
* is useful for testing when there is no actual REST API to connect a ProxyStore to.
* Each time this store is initialised it will reset the underlying graph, delete
* any elements that had been added and initialise it with the new schema. The
* server will not be restarted every time.
* <p>
* After using this store you must remember to call
* SingleUseProxyStore.cleanUp to stop the server and delete the temporary folder.
*/
public abstract class SingleUseProxyStore extends ProxyStore {
public static final File TEST_FOLDER = CommonTestConstants.TMP_DIRECTORY;
private static final RestApiTestClient CLIENT = new RestApiV2TestClient();
@Override
public void initialise(final String graphId, final Schema schema, final StoreProperties proxyProps) throws StoreException {
startMapStoreRestApi(schema);
super.initialise(graphId, new Schema(), proxyProps);
}
protected void startMapStoreRestApi(final Schema schema) throws StoreException {
final StoreProperties storeProperties = StoreProperties.loadStoreProperties(
StreamUtil.openStream(getClass(), getPathToDelegateProperties()));
try {
CLIENT.reinitialiseGraph(TEST_FOLDER, schema, storeProperties);
} catch (final IOException e) {
throw new StoreException("Unable to reinitialise delegate graph", e);
}
}
public static void cleanUp() {
CLIENT.stopServer();
CLIENT.cleanUpTempFiles(TEST_FOLDER);
}
protected abstract String getPathToDelegateProperties();
}
| apache-2.0 |
haikuowuya/android_system_code | src/com/android/internal/content/PackageMonitor.java | 14593 | /*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.internal.content;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.net.Uri;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.UserHandle;
import java.util.HashSet;
/**
* Helper class for monitoring the state of packages: adding, removing,
* updating, and disappearing and reappearing on the SD card.
*/
public abstract class PackageMonitor extends android.content.BroadcastReceiver {
static final IntentFilter sPackageFilt = new IntentFilter();
static final IntentFilter sNonDataFilt = new IntentFilter();
static final IntentFilter sExternalFilt = new IntentFilter();
static final Object sLock = new Object();
static HandlerThread sBackgroundThread;
static Handler sBackgroundHandler;
static {
sPackageFilt.addAction(Intent.ACTION_PACKAGE_ADDED);
sPackageFilt.addAction(Intent.ACTION_PACKAGE_REMOVED);
sPackageFilt.addAction(Intent.ACTION_PACKAGE_CHANGED);
sPackageFilt.addAction(Intent.ACTION_QUERY_PACKAGE_RESTART);
sPackageFilt.addAction(Intent.ACTION_PACKAGE_RESTARTED);
sPackageFilt.addAction(Intent.ACTION_UID_REMOVED);
sPackageFilt.addDataScheme("package");
sNonDataFilt.addAction(Intent.ACTION_UID_REMOVED);
sNonDataFilt.addAction(Intent.ACTION_USER_STOPPED);
sExternalFilt.addAction(Intent.ACTION_EXTERNAL_APPLICATIONS_AVAILABLE);
sExternalFilt.addAction(Intent.ACTION_EXTERNAL_APPLICATIONS_UNAVAILABLE);
}
final HashSet<String> mUpdatingPackages = new HashSet<String>();
Context mRegisteredContext;
Handler mRegisteredHandler;
String[] mDisappearingPackages;
String[] mAppearingPackages;
String[] mModifiedPackages;
int mChangeType;
int mChangeUserId = UserHandle.USER_NULL;
boolean mSomePackagesChanged;
String[] mTempArray = new String[1];
public void register(Context context, Looper thread, boolean externalStorage) {
register(context, thread, null, externalStorage);
}
public void register(Context context, Looper thread, UserHandle user,
boolean externalStorage) {
if (mRegisteredContext != null) {
throw new IllegalStateException("Already registered");
}
mRegisteredContext = context;
if (thread == null) {
synchronized (sLock) {
if (sBackgroundThread == null) {
sBackgroundThread = new HandlerThread("PackageMonitor",
android.os.Process.THREAD_PRIORITY_BACKGROUND);
sBackgroundThread.start();
sBackgroundHandler = new Handler(sBackgroundThread.getLooper());
}
mRegisteredHandler = sBackgroundHandler;
}
} else {
mRegisteredHandler = new Handler(thread);
}
if (user != null) {
context.registerReceiverAsUser(this, user, sPackageFilt, null, mRegisteredHandler);
context.registerReceiverAsUser(this, user, sNonDataFilt, null, mRegisteredHandler);
if (externalStorage) {
context.registerReceiverAsUser(this, user, sExternalFilt, null,
mRegisteredHandler);
}
} else {
context.registerReceiver(this, sPackageFilt, null, mRegisteredHandler);
context.registerReceiver(this, sNonDataFilt, null, mRegisteredHandler);
if (externalStorage) {
context.registerReceiver(this, sExternalFilt, null, mRegisteredHandler);
}
}
}
public Handler getRegisteredHandler() {
return mRegisteredHandler;
}
public void unregister() {
if (mRegisteredContext == null) {
throw new IllegalStateException("Not registered");
}
mRegisteredContext.unregisterReceiver(this);
mRegisteredContext = null;
}
//not yet implemented
boolean isPackageUpdating(String packageName) {
synchronized (mUpdatingPackages) {
return mUpdatingPackages.contains(packageName);
}
}
public void onBeginPackageChanges() {
}
/**
* Called when a package is really added (and not replaced).
*/
public void onPackageAdded(String packageName, int uid) {
}
/**
* Called when a package is really removed (and not replaced).
*/
public void onPackageRemoved(String packageName, int uid) {
}
/**
* Called when a package is really removed (and not replaced) for
* all users on the device.
*/
public void onPackageRemovedAllUsers(String packageName, int uid) {
}
public void onPackageUpdateStarted(String packageName, int uid) {
}
public void onPackageUpdateFinished(String packageName, int uid) {
}
public void onPackageChanged(String packageName, int uid, String[] components) {
}
public boolean onHandleForceStop(Intent intent, String[] packages, int uid, boolean doit) {
return false;
}
public void onHandleUserStop(Intent intent, int userHandle) {
}
public void onUidRemoved(int uid) {
}
public void onPackagesAvailable(String[] packages) {
}
public void onPackagesUnavailable(String[] packages) {
}
public static final int PACKAGE_UNCHANGED = 0;
public static final int PACKAGE_UPDATING = 1;
public static final int PACKAGE_TEMPORARY_CHANGE = 2;
public static final int PACKAGE_PERMANENT_CHANGE = 3;
/**
* Called when a package disappears for any reason.
*/
public void onPackageDisappeared(String packageName, int reason) {
}
/**
* Called when a package appears for any reason.
*/
public void onPackageAppeared(String packageName, int reason) {
}
public void onPackageModified(String packageName) {
}
public boolean didSomePackagesChange() {
return mSomePackagesChanged;
}
public int isPackageAppearing(String packageName) {
if (mAppearingPackages != null) {
for (int i=mAppearingPackages.length-1; i>=0; i--) {
if (packageName.equals(mAppearingPackages[i])) {
return mChangeType;
}
}
}
return PACKAGE_UNCHANGED;
}
public boolean anyPackagesAppearing() {
return mAppearingPackages != null;
}
public int isPackageDisappearing(String packageName) {
if (mDisappearingPackages != null) {
for (int i=mDisappearingPackages.length-1; i>=0; i--) {
if (packageName.equals(mDisappearingPackages[i])) {
return mChangeType;
}
}
}
return PACKAGE_UNCHANGED;
}
public boolean anyPackagesDisappearing() {
return mDisappearingPackages != null;
}
public boolean isPackageModified(String packageName) {
if (mModifiedPackages != null) {
for (int i=mModifiedPackages.length-1; i>=0; i--) {
if (packageName.equals(mModifiedPackages[i])) {
return true;
}
}
}
return false;
}
public void onSomePackagesChanged() {
}
public void onFinishPackageChanges() {
}
public int getChangingUserId() {
return mChangeUserId;
}
String getPackageName(Intent intent) {
Uri uri = intent.getData();
String pkg = uri != null ? uri.getSchemeSpecificPart() : null;
return pkg;
}
@Override
public void onReceive(Context context, Intent intent) {
mChangeUserId = intent.getIntExtra(Intent.EXTRA_USER_HANDLE,
UserHandle.USER_NULL);
if (mChangeUserId == UserHandle.USER_NULL) {
throw new IllegalArgumentException(
"Intent broadcast does not contain user handle: " + intent);
}
onBeginPackageChanges();
mDisappearingPackages = mAppearingPackages = null;
mSomePackagesChanged = false;
String action = intent.getAction();
if (Intent.ACTION_PACKAGE_ADDED.equals(action)) {
String pkg = getPackageName(intent);
int uid = intent.getIntExtra(Intent.EXTRA_UID, 0);
// We consider something to have changed regardless of whether
// this is just an update, because the update is now finished
// and the contents of the package may have changed.
mSomePackagesChanged = true;
if (pkg != null) {
mAppearingPackages = mTempArray;
mTempArray[0] = pkg;
if (intent.getBooleanExtra(Intent.EXTRA_REPLACING, false)) {
mModifiedPackages = mTempArray;
mChangeType = PACKAGE_UPDATING;
onPackageUpdateFinished(pkg, uid);
onPackageModified(pkg);
} else {
mChangeType = PACKAGE_PERMANENT_CHANGE;
onPackageAdded(pkg, uid);
}
onPackageAppeared(pkg, mChangeType);
if (mChangeType == PACKAGE_UPDATING) {
synchronized (mUpdatingPackages) {
mUpdatingPackages.remove(pkg);
}
}
}
} else if (Intent.ACTION_PACKAGE_REMOVED.equals(action)) {
String pkg = getPackageName(intent);
int uid = intent.getIntExtra(Intent.EXTRA_UID, 0);
if (pkg != null) {
mDisappearingPackages = mTempArray;
mTempArray[0] = pkg;
if (intent.getBooleanExtra(Intent.EXTRA_REPLACING, false)) {
mChangeType = PACKAGE_UPDATING;
synchronized (mUpdatingPackages) {
//not used for now
//mUpdatingPackages.add(pkg);
}
onPackageUpdateStarted(pkg, uid);
} else {
mChangeType = PACKAGE_PERMANENT_CHANGE;
// We only consider something to have changed if this is
// not a replace; for a replace, we just need to consider
// it when it is re-added.
mSomePackagesChanged = true;
onPackageRemoved(pkg, uid);
if (intent.getBooleanExtra(Intent.EXTRA_REMOVED_FOR_ALL_USERS, false)) {
onPackageRemovedAllUsers(pkg, uid);
}
}
onPackageDisappeared(pkg, mChangeType);
}
} else if (Intent.ACTION_PACKAGE_CHANGED.equals(action)) {
String pkg = getPackageName(intent);
int uid = intent.getIntExtra(Intent.EXTRA_UID, 0);
String[] components = intent.getStringArrayExtra(
Intent.EXTRA_CHANGED_COMPONENT_NAME_LIST);
if (pkg != null) {
mModifiedPackages = mTempArray;
mTempArray[0] = pkg;
onPackageChanged(pkg, uid, components);
// XXX Don't want this to always cause mSomePackagesChanged,
// since it can happen a fair amount.
onPackageModified(pkg);
}
} else if (Intent.ACTION_QUERY_PACKAGE_RESTART.equals(action)) {
mDisappearingPackages = intent.getStringArrayExtra(Intent.EXTRA_PACKAGES);
mChangeType = PACKAGE_TEMPORARY_CHANGE;
boolean canRestart = onHandleForceStop(intent,
mDisappearingPackages,
intent.getIntExtra(Intent.EXTRA_UID, 0), false);
if (canRestart) setResultCode(Activity.RESULT_OK);
} else if (Intent.ACTION_PACKAGE_RESTARTED.equals(action)) {
mDisappearingPackages = new String[] {getPackageName(intent)};
mChangeType = PACKAGE_TEMPORARY_CHANGE;
onHandleForceStop(intent, mDisappearingPackages,
intent.getIntExtra(Intent.EXTRA_UID, 0), true);
} else if (Intent.ACTION_UID_REMOVED.equals(action)) {
onUidRemoved(intent.getIntExtra(Intent.EXTRA_UID, 0));
} else if (Intent.ACTION_USER_STOPPED.equals(action)) {
if (intent.hasExtra(Intent.EXTRA_USER_HANDLE)) {
onHandleUserStop(intent, intent.getIntExtra(Intent.EXTRA_USER_HANDLE, 0));
}
} else if (Intent.ACTION_EXTERNAL_APPLICATIONS_AVAILABLE.equals(action)) {
String[] pkgList = intent.getStringArrayExtra(Intent.EXTRA_CHANGED_PACKAGE_LIST);
mAppearingPackages = pkgList;
mChangeType = PACKAGE_TEMPORARY_CHANGE;
mSomePackagesChanged = true;
if (pkgList != null) {
onPackagesAvailable(pkgList);
for (int i=0; i<pkgList.length; i++) {
onPackageAppeared(pkgList[i], PACKAGE_TEMPORARY_CHANGE);
}
}
} else if (Intent.ACTION_EXTERNAL_APPLICATIONS_UNAVAILABLE.equals(action)) {
String[] pkgList = intent.getStringArrayExtra(Intent.EXTRA_CHANGED_PACKAGE_LIST);
mDisappearingPackages = pkgList;
mChangeType = PACKAGE_TEMPORARY_CHANGE;
mSomePackagesChanged = true;
if (pkgList != null) {
onPackagesUnavailable(pkgList);
for (int i=0; i<pkgList.length; i++) {
onPackageDisappeared(pkgList[i], PACKAGE_TEMPORARY_CHANGE);
}
}
}
if (mSomePackagesChanged) {
onSomePackagesChanged();
}
onFinishPackageChanges();
mChangeUserId = UserHandle.USER_NULL;
}
}
| apache-2.0 |
ksoichiro/AndroidFormEnhancer | androidformenhancer-tests/src/com/androidformenhancer/test/VarietyForm.java | 2042 | /*
* Copyright 2012 Soichiro Kashima
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.androidformenhancer.test;
import com.androidformenhancer.annotation.AlphaNum;
import com.androidformenhancer.annotation.Digits;
import com.androidformenhancer.annotation.Email;
import com.androidformenhancer.annotation.IntRange;
import com.androidformenhancer.annotation.IntType;
import com.androidformenhancer.annotation.Katakana;
import com.androidformenhancer.annotation.MaxLength;
import com.androidformenhancer.annotation.MaxNumOfDigits;
import com.androidformenhancer.annotation.MaxValue;
import com.androidformenhancer.annotation.MinValue;
import com.androidformenhancer.annotation.Multibyte;
import com.androidformenhancer.annotation.NumOfDigits;
import com.androidformenhancer.annotation.Required;
import com.androidformenhancer.annotation.Widget;
import com.androidformenhancer.annotation.WidgetValue;
/**
* @author Soichiro Kashima
*/
public class VarietyForm {
@Widget(id = R.id.textfield_name)
public String stringType;
@Widget(id = R.id.textfield_int)
public String intType;
@Widget(id = R.id.textfield_long)
public String longType;
@Widget(id = R.id.textfield_float)
public String floatType;
@Widget(id = R.id.textfield_double)
public String doubleType;
@Widget(id = R.id.textfield_boolean)
public String booleanType;
@Widget(id = R.id.textfield_short)
public String shortType;
@Widget(id = R.id.textfield_char)
public String charType;
}
| apache-2.0 |
marques-work/gocd | domain/src/main/java/com/thoughtworks/go/validation/PresenceValidator.java | 1132 | /*
* Copyright 2021 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.validation;
import com.thoughtworks.go.domain.materials.ValidationBean;
import org.apache.commons.lang3.StringUtils;
class PresenceValidator extends Validator<String> {
public PresenceValidator(String errorMessage) {
super(errorMessage);
}
@Override
public ValidationBean validate(String value) {
if (StringUtils.isBlank(value)){
return ValidationBean.notValid(errorMessage);
} else {
return ValidationBean.valid();
}
}
}
| apache-2.0 |
ascherbakoff/ignite | modules/core/src/main/java/org/apache/ignite/configuration/ClientConfiguration.java | 17574 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.configuration;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Map;
import java.util.concurrent.Executor;
import java.util.concurrent.ForkJoinPool;
import javax.cache.configuration.Factory;
import javax.net.ssl.SSLContext;
import org.apache.ignite.client.ClientAddressFinder;
import org.apache.ignite.client.SslMode;
import org.apache.ignite.client.SslProtocol;
import org.apache.ignite.internal.client.thin.TcpIgniteClient;
import org.apache.ignite.internal.util.typedef.internal.S;
/**
* {@link TcpIgniteClient} configuration.
*/
@SuppressWarnings("AssignmentOrReturnOfFieldWithMutableType")
public final class ClientConfiguration implements Serializable {
/** Serial version uid. */
private static final long serialVersionUID = 0L;
/** @serial Server addresses. */
private String[] addrs = null;
/** Server addresses finder. */
private transient ClientAddressFinder addrFinder;
/** @serial Tcp no delay. */
private boolean tcpNoDelay = true;
/** @serial Timeout. 0 means infinite. */
private int timeout;
/** @serial Send buffer size. 0 means system default. */
private int sndBufSize = 32 * 1024;
/** @serial Receive buffer size. 0 means system default. */
private int rcvBufSize = 32 * 1024;
/** @serial Configuration for Ignite binary objects. */
private BinaryConfiguration binaryCfg;
/** @serial Ssl mode. */
private SslMode sslMode = SslMode.DISABLED;
/** @serial Ssl client certificate key store path. */
private String sslClientCertKeyStorePath;
/** @serial Ssl client certificate key store password. */
private String sslClientCertKeyStorePwd;
/** @serial Ssl trust certificate key store path. */
private String sslTrustCertKeyStorePath;
/** @serial Ssl trust certificate key store password. */
private String sslTrustCertKeyStorePwd;
/** @serial Ssl client certificate key store type. */
private String sslClientCertKeyStoreType;
/** @serial Ssl trust certificate key store type. */
private String sslTrustCertKeyStoreType;
/** @serial Ssl key algorithm. */
private String sslKeyAlgorithm;
/** @serial Flag indicating if certificate validation errors should be ignored. */
private boolean sslTrustAll;
/** @serial Ssl protocol. */
private SslProtocol sslProto = SslProtocol.TLS;
/** @serial Ssl context factory. */
private Factory<SSLContext> sslCtxFactory;
/** @serial User name. */
private String userName;
/** @serial User password. */
private String userPwd;
/** User attributes. */
private Map<String, String> userAttrs;
/** Tx config. */
private ClientTransactionConfiguration txCfg = new ClientTransactionConfiguration();
/**
* Whether partition awareness should be enabled.
*/
private boolean partitionAwarenessEnabled = true;
/**
* Reconnect throttling period (in milliseconds). There are no more than {@code reconnectThrottlingRetries}
* attempts to reconnect will be made within {@code reconnectThrottlingPeriod} in case of connection loss.
* Throttling is disabled if either {@code reconnectThrottlingRetries} or {@code reconnectThrottlingPeriod} is 0.
*/
private long reconnectThrottlingPeriod = 30_000L;
/** Reconnect throttling retries. See {@code reconnectThrottlingPeriod}. */
private int reconnectThrottlingRetries = 3;
/** Retry limit. */
private int retryLimit = 0;
/** Executor for async operations continuations. */
private Executor asyncContinuationExecutor;
/**
* @return Host addresses.
*/
public String[] getAddresses() {
if (addrs != null)
return Arrays.copyOf(addrs, addrs.length);
return null;
}
/**
* Set addresses of Ignite server nodes within a cluster. An address can be IPv4 address or hostname, with or
* without port. If port is not set then Ignite will generate multiple addresses for default port range. See
* {@link ClientConnectorConfiguration#DFLT_PORT}, {@link ClientConnectorConfiguration#DFLT_PORT_RANGE}.
*
* @param addrs Host addresses.
*/
public ClientConfiguration setAddresses(String... addrs) {
if (addrs != null)
this.addrs = Arrays.copyOf(addrs, addrs.length);
return this;
}
/**
* @return Finder that finds server node addresses.
*/
public ClientAddressFinder getAddressesFinder() {
return addrFinder;
}
/**
* @param finder Finds server node addresses.
*/
public ClientConfiguration setAddressesFinder(ClientAddressFinder finder) {
addrFinder = finder;
return this;
}
/**
* @return Whether Nagle's algorithm is enabled.
*/
public boolean isTcpNoDelay() {
return tcpNoDelay;
}
/**
* @param tcpNoDelay whether Nagle's algorithm is enabled.
*/
public ClientConfiguration setTcpNoDelay(boolean tcpNoDelay) {
this.tcpNoDelay = tcpNoDelay;
return this;
}
/**
* @return Send/receive timeout in milliseconds.
*/
public int getTimeout() {
return timeout;
}
/**
* @param timeout Send/receive timeout in milliseconds.
*/
public ClientConfiguration setTimeout(int timeout) {
this.timeout = timeout;
return this;
}
/**
* @return Send buffer size.
*/
public int getSendBufferSize() {
return sndBufSize;
}
/**
* @param sndBufSize Send buffer size.
*/
public ClientConfiguration setSendBufferSize(int sndBufSize) {
this.sndBufSize = sndBufSize;
return this;
}
/**
* @return Send buffer size.
*/
public int getReceiveBufferSize() {
return rcvBufSize;
}
/**
* @param rcvBufSize Send buffer size.
*/
public ClientConfiguration setReceiveBufferSize(int rcvBufSize) {
this.rcvBufSize = rcvBufSize;
return this;
}
/**
* @return Configuration for Ignite Binary objects.
*/
public BinaryConfiguration getBinaryConfiguration() {
return binaryCfg;
}
/**
* @param binaryCfg Configuration for Ignite Binary objects.
*/
public ClientConfiguration setBinaryConfiguration(BinaryConfiguration binaryCfg) {
this.binaryCfg = binaryCfg;
return this;
}
/**
* @return SSL mode.
*/
public SslMode getSslMode() {
return sslMode;
}
/**
* @param sslMode SSL mode.
*/
public ClientConfiguration setSslMode(SslMode sslMode) {
this.sslMode = sslMode;
return this;
}
/**
* @return Ssl client certificate key store path.
*/
public String getSslClientCertificateKeyStorePath() {
return sslClientCertKeyStorePath;
}
/**
* @param newVal Ssl client certificate key store path.
*/
public ClientConfiguration setSslClientCertificateKeyStorePath(String newVal) {
sslClientCertKeyStorePath = newVal;
return this;
}
/**
* @return Ssl client certificate key store password.
*/
public String getSslClientCertificateKeyStorePassword() {
return sslClientCertKeyStorePwd;
}
/**
* @param newVal Ssl client certificate key store password.
*/
public ClientConfiguration setSslClientCertificateKeyStorePassword(String newVal) {
sslClientCertKeyStorePwd = newVal;
return this;
}
/**
* @return Ssl client certificate key store type.
*/
public String getSslClientCertificateKeyStoreType() {
return sslClientCertKeyStoreType;
}
/**
* @param newVal Ssl client certificate key store type.
*/
public ClientConfiguration setSslClientCertificateKeyStoreType(String newVal) {
sslClientCertKeyStoreType = newVal;
return this;
}
/**
* @return Ssl trust certificate key store path.
*/
public String getSslTrustCertificateKeyStorePath() {
return sslTrustCertKeyStorePath;
}
/**
* @param newVal Ssl trust certificate key store path.
*/
public ClientConfiguration setSslTrustCertificateKeyStorePath(String newVal) {
sslTrustCertKeyStorePath = newVal;
return this;
}
/**
* @return Ssl trust certificate key store password.
*/
public String getSslTrustCertificateKeyStorePassword() {
return sslTrustCertKeyStorePwd;
}
/**
* @param newVal Ssl trust certificate key store password.
*/
public ClientConfiguration setSslTrustCertificateKeyStorePassword(String newVal) {
sslTrustCertKeyStorePwd = newVal;
return this;
}
/**
* @return Ssl trust certificate key store type.
*/
public String getSslTrustCertificateKeyStoreType() {
return sslTrustCertKeyStoreType;
}
/**
* @param newVal Ssl trust certificate key store type.
*/
public ClientConfiguration setSslTrustCertificateKeyStoreType(String newVal) {
sslTrustCertKeyStoreType = newVal;
return this;
}
/**
* @return Ssl key algorithm.
*/
public String getSslKeyAlgorithm() {
return sslKeyAlgorithm;
}
/**
* @param newVal Ssl key algorithm.
*/
public ClientConfiguration setSslKeyAlgorithm(String newVal) {
sslKeyAlgorithm = newVal;
return this;
}
/**
* @return Flag indicating if certificate validation errors should be ignored.
*/
public boolean isSslTrustAll() {
return sslTrustAll;
}
/**
* @param newVal Flag indicating if certificate validation errors should be ignored.
*/
public ClientConfiguration setSslTrustAll(boolean newVal) {
sslTrustAll = newVal;
return this;
}
/**
* @return Ssl protocol.
*/
public SslProtocol getSslProtocol() {
return sslProto;
}
/**
* @param newVal Ssl protocol.
*/
public ClientConfiguration setSslProtocol(SslProtocol newVal) {
sslProto = newVal;
return this;
}
/**
* @return User name.
*/
public String getUserName() {
return userName;
}
/**
* @param newVal User name.
*/
public ClientConfiguration setUserName(String newVal) {
userName = newVal;
return this;
}
/**
* @return User password.
*/
public String getUserPassword() {
return userPwd;
}
/**
* @param newVal User password.
*/
public ClientConfiguration setUserPassword(String newVal) {
userPwd = newVal;
return this;
}
/**
* @return SSL Context Factory.
*/
public Factory<SSLContext> getSslContextFactory() {
return sslCtxFactory;
}
/**
* @param newVal SSL Context Factory.
*/
public ClientConfiguration setSslContextFactory(Factory<SSLContext> newVal) {
sslCtxFactory = newVal;
return this;
}
/**
* Gets transactions configuration.
*
* @return Transactions configuration.
*/
public ClientTransactionConfiguration getTransactionConfiguration() {
return txCfg;
}
/**
* Sets transactions configuration.
*
* @return {@code this} for chaining.
*/
public ClientConfiguration setTransactionConfiguration(ClientTransactionConfiguration txCfg) {
this.txCfg = txCfg;
return this;
}
/**
* Gets a value indicating whether partition awareness should be enabled.
* <p>
* Default is {@code true}: client sends requests directly to the primary node for the given cache key.
* To do so, connection is established to every known server node.
* <p>
* When {@code false}, only one connection is established at a given moment to a random server node.
*/
public boolean isPartitionAwarenessEnabled() {
return partitionAwarenessEnabled;
}
/**
* Sets a value indicating whether partition awareness should be enabled.
* <p>
* Default is {@code true}: client sends requests directly to the primary node for the given cache key.
* To do so, connection is established to every known server node.
* <p>
* When {@code false}, only one connection is established at a given moment to a random server node.
*
* @return {@code this} for chaining.
*/
public ClientConfiguration setPartitionAwarenessEnabled(boolean partitionAwarenessEnabled) {
this.partitionAwarenessEnabled = partitionAwarenessEnabled;
return this;
}
/**
* Gets reconnect throttling period.
*/
public long getReconnectThrottlingPeriod() {
return reconnectThrottlingPeriod;
}
/**
* Sets reconnect throttling period.
*
* @return {@code this} for chaining.
*/
public ClientConfiguration setReconnectThrottlingPeriod(long reconnectThrottlingPeriod) {
this.reconnectThrottlingPeriod = reconnectThrottlingPeriod;
return this;
}
/**
* Gets reconnect throttling retries.
*/
public int getReconnectThrottlingRetries() {
return reconnectThrottlingRetries;
}
/**
* Sets reconnect throttling retries.
*
* @return {@code this} for chaining.
*/
public ClientConfiguration setReconnectThrottlingRetries(int reconnectThrottlingRetries) {
this.reconnectThrottlingRetries = reconnectThrottlingRetries;
return this;
}
/**
* Get retry limit.
*/
public int getRetryLimit() {
return retryLimit;
}
/**
* Sets the retry limit. When a request fails due to a connection error, and multiple server connections
* are available, Ignite will retry the request on every connection. When this property is greater than zero,
* Ignite will limit the number of retries.
*
* @return {@code this} for chaining.
*/
public ClientConfiguration setRetryLimit(int retryLimit) {
this.retryLimit = retryLimit;
return this;
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(ClientConfiguration.class, this);
}
/**
* Returns user attributes which can be used on server node.
*
* @return User attributes.
*/
public Map<String, String> getUserAttributes() {
return userAttrs;
}
/**
* Sets user attributes which can be used to send additional info to the server nodes.
*
* Sent attributes can be accessed on server nodes from
* {@link org.apache.ignite.internal.processors.rest.request.GridRestRequest GridRestRequest} or
* {@link org.apache.ignite.internal.processors.odbc.ClientListenerAbstractConnectionContext
* ClientListenerAbstractConnectionContext} (depends on client type).
*
* @param userAttrs User attributes.
* @return {@code this} for chaining.
*/
public ClientConfiguration setUserAttributes(Map<String, String> userAttrs) {
this.userAttrs = userAttrs;
return this;
}
/**
* Gets the async continuation executor.
* <p />
* When <code>null</code> (default), {@link ForkJoinPool#commonPool()} is used.
* <p />
* When async client operation completes, corresponding {@link org.apache.ignite.lang.IgniteFuture} listeners
* will be invoked using this executor. Thin client operation results are handled by a dedicated thread.
* This thread should be free from any extra work, and should not be not be used to execute future listeners
* directly.
*
* @return Executor for async continuations.
*/
public Executor getAsyncContinuationExecutor() {
return asyncContinuationExecutor;
}
/**
* Sets the async continuation executor.
* <p />
* When <code>null</code> (default), {@link ForkJoinPool#commonPool()} is used.
* <p />
* When async client operation completes, corresponding {@link org.apache.ignite.lang.IgniteFuture} listeners
* will be invoked using this executor. Thin client operation results are handled by a dedicated thread.
* This thread should be free from any extra work, and should not be not be used to execute future listeners
* directly.
*
* @param asyncContinuationExecutor Executor for async continuations.
* @return {@code this} for chaining.
*/
public ClientConfiguration setAsyncContinuationExecutor(Executor asyncContinuationExecutor) {
this.asyncContinuationExecutor = asyncContinuationExecutor;
return this;
}
}
| apache-2.0 |
kisskys/incubator-asterixdb | asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/AInt64TypeComputer.java | 1756 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.om.typecomputer.impl;
import org.apache.asterix.om.typecomputer.base.IResultTypeComputer;
import org.apache.asterix.om.types.BuiltinType;
import org.apache.asterix.om.types.IAType;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
import org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
import org.apache.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
public class AInt64TypeComputer implements IResultTypeComputer {
public static final AInt64TypeComputer INSTANCE = new AInt64TypeComputer();
private AInt64TypeComputer() {
}
@Override
public IAType computeType(ILogicalExpression expression, IVariableTypeEnvironment env,
IMetadataProvider<?, ?> metadataProvider) throws AlgebricksException {
return BuiltinType.AINT64;
}
}
| apache-2.0 |
luchuangbin/test1 | src/com/mit/dstore/wxapi/AppRegister.java | 504 | package com.mit.dstore.wxapi;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import com.tencent.mm.sdk.openapi.IWXAPI;
import com.tencent.mm.sdk.openapi.WXAPIFactory;
public class AppRegister extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
final IWXAPI api = WXAPIFactory.createWXAPI(context, null);
// 将该app注册到微信
api.registerApp(Constants.APP_ID);
}
}
| apache-2.0 |
heiko-braun/cassandra | src/java/org/apache/cassandra/repair/RepairJob.java | 7499 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.repair;
import java.net.InetAddress;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.Condition;
import com.google.common.util.concurrent.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.db.Keyspace;
import org.apache.cassandra.dht.Range;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.net.MessagingService;
import org.apache.cassandra.repair.messages.ValidationRequest;
import org.apache.cassandra.utils.FBUtilities;
import org.apache.cassandra.utils.MerkleTree;
import org.apache.cassandra.utils.SimpleCondition;
/**
* RepairJob runs repair on given ColumnFamily.
*/
public class RepairJob
{
private static Logger logger = LoggerFactory.getLogger(RepairJob.class);
public final RepairJobDesc desc;
private final boolean isSequential;
// first we send tree requests. this tracks the endpoints remaining to hear from
private final RequestCoordinator<InetAddress> treeRequests;
// tree responses are then tracked here
private final List<TreeResponse> trees = new ArrayList<>();
// once all responses are received, each tree is compared with each other, and differencer tasks
// are submitted. the job is done when all differencers are complete.
private final ListeningExecutorService taskExecutor;
private final Condition requestsSent = new SimpleCondition();
private int gcBefore = -1;
private volatile boolean failed = false;
/* Count down as sync completes */
private AtomicInteger waitForSync;
/**
* Create repair job to run on specific columnfamily
*/
public RepairJob(UUID sessionId, String keyspace, String columnFamily, Range<Token> range, boolean isSequential, ListeningExecutorService taskExecutor)
{
this.desc = new RepairJobDesc(sessionId, keyspace, columnFamily, range);
this.isSequential = isSequential;
this.taskExecutor = taskExecutor;
this.treeRequests = new RequestCoordinator<InetAddress>(isSequential)
{
public void send(InetAddress endpoint)
{
ValidationRequest request = new ValidationRequest(desc, gcBefore);
MessagingService.instance().sendOneWay(request.createMessage(), endpoint);
}
};
}
/**
* @return true if this job failed
*/
public boolean isFailed()
{
return failed;
}
/**
* Send merkle tree request to every involved neighbor.
*/
public void sendTreeRequests(Collection<InetAddress> endpoints)
{
// send requests to all nodes
List<InetAddress> allEndpoints = new ArrayList<>(endpoints);
allEndpoints.add(FBUtilities.getBroadcastAddress());
if (isSequential)
{
List<ListenableFuture<InetAddress>> snapshotTasks = new ArrayList<>(allEndpoints.size());
for (InetAddress endpoint : allEndpoints)
{
SnapshotTask snapshotTask = new SnapshotTask(desc, endpoint);
snapshotTasks.add(snapshotTask);
taskExecutor.execute(snapshotTask);
}
ListenableFuture<List<InetAddress>> allSnapshotTasks = Futures.allAsList(snapshotTasks);
// Execute send tree request after all snapshot complete
Futures.addCallback(allSnapshotTasks, new FutureCallback<List<InetAddress>>()
{
public void onSuccess(List<InetAddress> endpoints)
{
sendTreeRequestsInternal(endpoints);
}
public void onFailure(Throwable throwable)
{
// TODO need to propagate error to RepairSession
logger.error("Error while snapshot", throwable);
failed = true;
}
}, taskExecutor);
}
else
{
sendTreeRequestsInternal(allEndpoints);
}
}
private void sendTreeRequestsInternal(Collection<InetAddress> endpoints)
{
this.gcBefore = Keyspace.open(desc.keyspace).getColumnFamilyStore(desc.columnFamily).gcBefore(System.currentTimeMillis());
for (InetAddress endpoint : endpoints)
treeRequests.add(endpoint);
logger.info(String.format("[repair #%s] requesting merkle trees for %s (to %s)", desc.sessionId, desc.columnFamily, endpoints));
treeRequests.start();
requestsSent.signalAll();
}
/**
* Add a new received tree and return the number of remaining tree to
* be received for the job to be complete.
*
* Callers may assume exactly one addTree call will result in zero remaining endpoints.
*
* @param endpoint address of the endpoint that sent response
* @param tree sent Merkle tree or null if validation failed on endpoint
* @return the number of responses waiting to receive
*/
public synchronized int addTree(InetAddress endpoint, MerkleTree tree)
{
// Wait for all request to have been performed (see #3400)
try
{
requestsSent.await();
}
catch (InterruptedException e)
{
throw new AssertionError("Interrupted while waiting for requests to be sent");
}
if (tree == null)
failed = true;
else
trees.add(new TreeResponse(endpoint, tree));
return treeRequests.completed(endpoint);
}
/**
* Submit differencers for running.
* All tree *must* have been received before this is called.
*/
public void submitDifferencers()
{
assert !failed;
List<Differencer> differencers = new ArrayList<>();
// We need to difference all trees one against another
for (int i = 0; i < trees.size() - 1; ++i)
{
TreeResponse r1 = trees.get(i);
for (int j = i + 1; j < trees.size(); ++j)
{
TreeResponse r2 = trees.get(j);
Differencer differencer = new Differencer(desc, r1, r2);
differencers.add(differencer);
logger.debug("Queueing comparison {}", differencer);
}
}
waitForSync = new AtomicInteger(differencers.size());
for (Differencer differencer : differencers)
taskExecutor.submit(differencer);
trees.clear(); // allows gc to do its thing
}
/**
* @return true if the given node pair was the last remaining
*/
boolean completedSynchronization()
{
return waitForSync.decrementAndGet() == 0;
}
}
| apache-2.0 |
ylimit/PrivacyStreams | privacystreams-android-sdk/src/main/java/io/github/privacystreams/utils/UIUtils.java | 1912 | package io.github.privacystreams.utils;
import android.content.Context;
import android.graphics.Point;
import android.os.Build;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.Display;
import android.view.WindowManager;
/**
* A set of ui-related utility functions.
*/
public class UIUtils {
private static Point sScreenSize;
public static int getScreenHeight(Context context) {
fetchScreenSize(context);
return sScreenSize.y;
}
public static int getScreenWidth(Context context) {
fetchScreenSize(context);
Log.e("screen size", "" + sScreenSize.x);
return sScreenSize.x;
}
private static void fetchScreenSize(Context context) {
if (sScreenSize == null) {
int i;
int i2;
Display defaultDisplay = ((WindowManager) context.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
context.getResources().getConfiguration();
defaultDisplay.getRotation();
sScreenSize = new Point();
if (Build.VERSION.SDK_INT >= 17) {
DisplayMetrics displayMetrics = new DisplayMetrics();
defaultDisplay.getMetrics(displayMetrics);
DisplayMetrics displayMetrics2 = new DisplayMetrics();
defaultDisplay.getRealMetrics(displayMetrics2);
i = displayMetrics2.heightPixels - displayMetrics.heightPixels;
sScreenSize.x = displayMetrics.widthPixels;
sScreenSize.y = displayMetrics.heightPixels;
} else {
defaultDisplay.getSize(sScreenSize);
i = 0;
}
if (sScreenSize.x <= sScreenSize.y) {
i2 = 1;
}
i2 = sScreenSize.x;
sScreenSize.x = sScreenSize.y + i;
sScreenSize.y = i2 - i;
}
}
}
| apache-2.0 |
pkdevbox/spring-social-linkedin | spring-social-linkedin/src/main/java/org/springframework/social/linkedin/api/NewShare.java | 3562 | /*
* Copyright 2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.social.linkedin.api;
import java.io.Serializable;
/**
* Model object to represent a Share
*
* @author Robert Drysdale
*/
public class NewShare implements Serializable {
private static final long serialVersionUID = 1L;
private String comment;
private NewShareContent content;
private NewShareVisibility visibility;
public NewShare() {}
public NewShare(String comment, NewShareContent content, NewShareVisibility visibility) {
this.comment = comment;
this.content = content;
this.visibility = visibility;
}
public String getComment() {
return comment;
}
public void setComment(String comment) {
this.comment = comment;
}
public NewShareContent getContent() {
return content;
}
public void setContent(NewShareContent content) {
this.content = content;
}
public NewShareVisibility getVisibility() {
return visibility;
}
public void setVisibility(NewShareVisibility visibility) {
this.visibility = visibility;
}
public static class NewShareContent implements Serializable {
private static final long serialVersionUID = 1L;
private String title;
private String submittedUrl;
private String submittedImageUrl;
private String description;
public NewShareContent() {}
public NewShareContent(String title, String submittedUrl) {
this.title = title;
this.submittedUrl = submittedUrl;
}
public NewShareContent(String title, String submittedUrl, String description) {
this(title,submittedUrl);
this.description = description;
}
public NewShareContent(String title, String submittedUrl, String submittedImageUrl, String description) {
this(title,submittedUrl,description);
this.submittedImageUrl = submittedImageUrl;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getSubmittedUrl() {
return submittedUrl;
}
public void setSubmittedUrl(String submittedUrl) {
this.submittedUrl = submittedUrl;
}
public String getSubmittedImageUrl(String submittedImageUrl) {
return this.submittedImageUrl;
}
public void setSubmittedImageUrl(String submittedImageUrl) {
this.submittedImageUrl = submittedImageUrl;
}
public String getDescription() {
return description;
}
}
public static class NewShareVisibility implements Serializable {
private static final long serialVersionUID = 1L;
private NewShareVisibilityCode code;
public NewShareVisibility() {}
public NewShareVisibility(NewShareVisibilityCode code) {
this.code = code;
}
public NewShareVisibilityCode getCode() {
return code;
}
public void setCode(NewShareVisibilityCode code) {
this.code = code;
}
}
public static enum NewShareVisibilityCode {
ANYONE,
CONNECTIONS_ONLY;
public String toString() {
return this.name().toLowerCase().replace('_', '-');
}
}
}
| apache-2.0 |
imay/palo | fe/src/test/java/org/apache/doris/analysis/HelpStmtTest.java | 1683 | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.analysis;
import org.apache.doris.common.AnalysisException;
import org.junit.Assert;
import org.junit.Test;
public class HelpStmtTest {
@Test
public void testNormal() throws AnalysisException {
HelpStmt stmt = new HelpStmt("contents");
stmt.analyze(null);
Assert.assertEquals("contents", stmt.getMask());
Assert.assertEquals("HELP contents", stmt.toString());
Assert.assertEquals(3, stmt.getMetaData().getColumnCount());
Assert.assertEquals(3, stmt.getCategoryMetaData().getColumnCount());
Assert.assertEquals(2, stmt.getKeywordMetaData().getColumnCount());
}
@Test(expected = AnalysisException.class)
public void testEmpty() throws AnalysisException {
HelpStmt stmt = new HelpStmt("");
stmt.analyze(null);
Assert.fail("No exception throws.");
}
} | apache-2.0 |
groboclown/p4ic4idea | p4java/src/main/java/com/perforce/p4java/impl/mapbased/rpc/sys/RpcPerforceDigestType.java | 555 | package com.perforce.p4java.impl.mapbased.rpc.sys;
public enum RpcPerforceDigestType {
MD5("md5"),
GIT_TEXT("GitText"),
GIT_BINARAY("GitBinary"),
SHA256("sha256");
public String rpcName;
RpcPerforceDigestType(String rpcName) {
this.rpcName = rpcName;
}
public static RpcPerforceDigestType GetType(String rpcName) {
for( RpcPerforceDigestType t : RpcPerforceDigestType.values() ) {
if(t.rpcName.equals(rpcName)) {
return t;
}
}
return MD5;
}
}
| apache-2.0 |
apache/commons-imaging | src/test/java/org/apache/commons/imaging/palette/PaletteQuantizationTest.java | 5296 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.imaging.palette;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.awt.image.BufferedImage;
import org.apache.commons.imaging.ImageWriteException;
import org.apache.commons.imaging.ImagingTest;
import org.junit.jupiter.api.Test;
public class PaletteQuantizationTest extends ImagingTest {
@Test
public void testPaletteQuantization() throws ImageWriteException {
final BufferedImage whiteImage = new BufferedImage(10, 10, BufferedImage.TYPE_INT_RGB);
for (int y = 0; y < whiteImage.getHeight(); y++) {
for (int x = 0; x < whiteImage.getWidth(); x++) {
whiteImage.setRGB(x, y, 0xFFFFFF);
}
}
checkPaletteDetails(whiteImage, 10, 1);
final BufferedImage whiteAndBlackImage = new BufferedImage(10, 10, BufferedImage.TYPE_INT_RGB);
for (int y = 0; y < whiteImage.getHeight(); y++) {
for (int x = 0; x < 5; x++) {
whiteAndBlackImage.setRGB(x, y, 0xFFFFFF);
}
for (int x = 5; x < 10; x++) {
whiteAndBlackImage.setRGB(x, y, 0x000000);
}
}
checkPaletteDetails(whiteAndBlackImage, 10, 2);
final BufferedImage rainbowImage = new BufferedImage(9, 10, BufferedImage.TYPE_INT_RGB);
for (int y = 0; y < whiteImage.getHeight(); y++) {
for (int x = 0; x < 3; x++) {
rainbowImage.setRGB(x, y, 0xFF0000);
}
for (int x = 3; x < 6; x++) {
rainbowImage.setRGB(x, y, 0x00FF00);
}
for (int x = 6; x < 9; x++) {
rainbowImage.setRGB(x, y, 0x0000FF);
}
}
checkPaletteDetails(rainbowImage, 10, 3);
checkPaletteDetails(rainbowImage, 2, 2);
}
private void checkPaletteDetails(final BufferedImage image, final int limit, final int expectedSize) throws ImageWriteException {
final PaletteFactory paletteFactory = new PaletteFactory();
Palette palette;
palette = paletteFactory.makeExactRgbPaletteSimple(image, limit);
// too small palettes are null
boolean exact = false;
if (palette != null) {
assertEquals(expectedSize, palette.length());
//checkUniqueColors(image, palette);
exact = true;
}
if (exact) {
//checkDithering(image, palette);
}
palette = paletteFactory.makeQuantizedRgbaPalette(image, false, limit);
assertEquals(expectedSize, palette.length());
checkUniqueColors(image, palette);
if (exact) {
checkPixelsAreIdentical(image, palette);
}
palette = paletteFactory.makeQuantizedRgbPalette(image, limit);
assertEquals(expectedSize, palette.length());
//checkUniqueColors(image, palette);
if (exact) {
//checkDithering(image, palette);
}
final MedianCutQuantizer medianCutQuantizer = new MedianCutQuantizer(true);
palette = medianCutQuantizer.process(
image, limit, new MostPopulatedBoxesMedianCut());
assertEquals(expectedSize, palette.length());
checkUniqueColors(image, palette);
if (exact) {
checkPixelsAreIdentical(image, palette);
}
}
private void checkUniqueColors(final BufferedImage src, final Palette palette) throws ImageWriteException {
final BufferedImage dst = new BufferedImage(src.getWidth(), src.getHeight(), BufferedImage.TYPE_INT_RGB);
dst.getGraphics().drawImage(src, 0, 0, src.getWidth(), src.getHeight(), null);
Dithering.applyFloydSteinbergDithering(dst, palette);
final Palette ditheredPalette = new PaletteFactory().makeExactRgbPaletteSimple(dst, palette.length() * 2);
assertEquals(palette.length(), ditheredPalette.length());
}
private void checkPixelsAreIdentical(final BufferedImage src, final Palette palette) throws ImageWriteException {
final BufferedImage dst = new BufferedImage(src.getWidth(), src.getHeight(), BufferedImage.TYPE_INT_RGB);
dst.getGraphics().drawImage(src, 0, 0, src.getWidth(), src.getHeight(), null);
Dithering.applyFloydSteinbergDithering(dst, palette);
for (int y = 0; y < src.getHeight(); y++) {
for (int x = 0; x < src.getWidth(); x++) {
assertEquals(src.getRGB(x, y), dst.getRGB(x, y));
}
}
}
}
| apache-2.0 |
lburgazzoli/spring-boot | spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/condition/ConditionalOnWebApplication.java | 1804 | /*
* Copyright 2012-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.condition;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import org.springframework.context.annotation.Conditional;
/**
* {@link Conditional @Conditional} that matches when the application is a web
* application. By default, any web application will match but it can be narrowed using
* the {@link #type()} attribute.
*
* @author Dave Syer
* @author Stephane Nicoll
*/
@Target({ ElementType.TYPE, ElementType.METHOD })
@Retention(RetentionPolicy.RUNTIME)
@Documented
@Conditional(OnWebApplicationCondition.class)
public @interface ConditionalOnWebApplication {
/**
* The required type of the web application.
* @return the required web application type
*/
Type type() default Type.ANY;
/**
* Available application types.
*/
enum Type {
/**
* Any web application will match.
*/
ANY,
/**
* Only servlet-based web application will match.
*/
SERVLET,
/**
* Only reactive-based web application will match.
*/
REACTIVE
}
}
| apache-2.0 |
user234/setyon-guava-libraries-clone | guava-tests/test/com/google/common/collect/MultisetsTest.java | 13322 | /*
* Copyright (C) 2007 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.testing.SerializableTester.reserializeAndAssert;
import static org.junit.contrib.truth.Truth.ASSERT;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.collect.testing.DerivedComparable;
import com.google.common.testing.NullPointerTester;
import junit.framework.TestCase;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
/**
* Tests for {@link Multisets}.
*
* @author Mike Bostock
* @author Jared Levy
* @author Louis Wasserman
*/
@GwtCompatible(emulated = true)
public class MultisetsTest extends TestCase {
/* See MultisetsImmutableEntryTest for immutableEntry() tests. */
public void testForSet() {
Set<String> set = new HashSet<String>();
set.add("foo");
set.add("bar");
set.add(null);
Multiset<String> multiset = HashMultiset.create();
multiset.addAll(set);
Multiset<String> multisetView = Multisets.forSet(set);
assertTrue(multiset.equals(multisetView));
assertTrue(multisetView.equals(multiset));
assertEquals(multiset.toString(), multisetView.toString());
assertEquals(multiset.hashCode(), multisetView.hashCode());
assertEquals(multiset.size(), multisetView.size());
assertTrue(multisetView.contains("foo"));
assertEquals(set, multisetView.elementSet());
assertEquals(multisetView.elementSet(), set);
assertEquals(multiset.elementSet(), multisetView.elementSet());
assertEquals(multisetView.elementSet(), multiset.elementSet());
try {
multisetView.add("baz");
fail("UnsupportedOperationException expected");
} catch (UnsupportedOperationException expected) {}
try {
multisetView.addAll(Collections.singleton("baz"));
fail("UnsupportedOperationException expected");
} catch (UnsupportedOperationException expected) {}
try {
multisetView.elementSet().add("baz");
fail("UnsupportedOperationException expected");
} catch (UnsupportedOperationException expected) {}
try {
multisetView.elementSet().addAll(Collections.singleton("baz"));
fail("UnsupportedOperationException expected");
} catch (UnsupportedOperationException expected) {}
multisetView.remove("bar");
assertFalse(multisetView.contains("bar"));
assertFalse(set.contains("bar"));
assertEquals(set, multisetView.elementSet());
ASSERT.that(multisetView.elementSet()).hasContentsAnyOrder("foo", null);
ASSERT.that(multisetView.entrySet()).hasContentsAnyOrder(
Multisets.immutableEntry("foo", 1), Multisets.immutableEntry((String) null, 1));
multisetView.clear();
assertFalse(multisetView.contains("foo"));
assertFalse(set.contains("foo"));
assertTrue(set.isEmpty());
assertTrue(multisetView.isEmpty());
multiset.clear();
assertEquals(multiset.toString(), multisetView.toString());
assertEquals(multiset.hashCode(), multisetView.hashCode());
assertEquals(multiset.size(), multisetView.size());
}
@GwtIncompatible("SerializableTester")
public void testForSetSerialization() {
Set<String> set = new HashSet<String>();
set.add("foo");
set.add("bar");
set.add(null);
Multiset<String> multiset = HashMultiset.create();
multiset.addAll(set);
Multiset<String> multisetView = Multisets.forSet(set);
assertTrue(multiset.equals(multisetView));
reserializeAndAssert(multisetView);
}
public void testNewTreeMultisetDerived() {
TreeMultiset<DerivedComparable> set = TreeMultiset.create();
assertTrue(set.isEmpty());
set.add(new DerivedComparable("foo"), 2);
set.add(new DerivedComparable("bar"), 3);
ASSERT.that(set).hasContentsInOrder(
new DerivedComparable("bar"), new DerivedComparable("bar"), new DerivedComparable("bar"),
new DerivedComparable("foo"), new DerivedComparable("foo"));
}
public void testNewTreeMultisetNonGeneric() {
TreeMultiset<LegacyComparable> set = TreeMultiset.create();
assertTrue(set.isEmpty());
set.add(new LegacyComparable("foo"), 2);
set.add(new LegacyComparable("bar"), 3);
ASSERT.that(set).hasContentsInOrder(new LegacyComparable("bar"),
new LegacyComparable("bar"), new LegacyComparable("bar"),
new LegacyComparable("foo"), new LegacyComparable("foo"));
}
public void testNewTreeMultisetComparator() {
TreeMultiset<String> multiset
= TreeMultiset.create(Collections.reverseOrder());
multiset.add("bar", 3);
multiset.add("foo", 2);
ASSERT.that(multiset).hasContentsInOrder("foo", "foo", "bar", "bar", "bar");
}
public void testRetainOccurrencesEmpty() {
Multiset<String> multiset = HashMultiset.create();
Multiset<String> toRetain =
HashMultiset.create(Arrays.asList("a", "b", "a"));
assertFalse(Multisets.retainOccurrences(multiset, toRetain));
ASSERT.that(multiset).hasContentsInOrder();
}
public void testRemoveOccurrencesEmpty() {
Multiset<String> multiset = HashMultiset.create();
Multiset<String> toRemove =
HashMultiset.create(Arrays.asList("a", "b", "a"));
assertFalse(Multisets.retainOccurrences(multiset, toRemove));
assertTrue(multiset.isEmpty());
}
public void testUnion() {
Multiset<String> ms1 = HashMultiset.create(Arrays.asList("a", "b", "a"));
Multiset<String> ms2 = HashMultiset.create(
Arrays.asList("a", "b", "b", "c"));
ASSERT.that(Multisets.union(ms1, ms2))
.hasContentsAnyOrder("a", "a", "b", "b", "c");
}
public void testUnionEqualMultisets() {
Multiset<String> ms1 = HashMultiset.create(Arrays.asList("a", "b", "a"));
Multiset<String> ms2 = HashMultiset.create(Arrays.asList("a", "b", "a"));
assertEquals(ms1, Multisets.union(ms1, ms2));
}
public void testUnionEmptyNonempty() {
Multiset<String> ms1 = HashMultiset.create();
Multiset<String> ms2 = HashMultiset.create(Arrays.asList("a", "b", "a"));
assertEquals(ms2, Multisets.union(ms1, ms2));
}
public void testUnionNonemptyEmpty() {
Multiset<String> ms1 = HashMultiset.create(Arrays.asList("a", "b", "a"));
Multiset<String> ms2 = HashMultiset.create();
assertEquals(ms1, Multisets.union(ms1, ms2));
}
public void testIntersectEmptyNonempty() {
Multiset<String> ms1 = HashMultiset.create();
Multiset<String> ms2 = HashMultiset.create(Arrays.asList("a", "b", "a"));
ASSERT.that(Multisets.intersection(ms1, ms2)).hasContentsInOrder();
}
public void testIntersectNonemptyEmpty() {
Multiset<String> ms1 = HashMultiset.create(Arrays.asList("a", "b", "a"));
Multiset<String> ms2 = HashMultiset.create();
ASSERT.that(Multisets.intersection(ms1, ms2)).hasContentsInOrder();
}
public void testSum() {
Multiset<String> ms1 = HashMultiset.create(Arrays.asList("a", "b", "a"));
Multiset<String> ms2 = HashMultiset.create(Arrays.asList("b", "c"));
ASSERT.that(Multisets.sum(ms1, ms2))
.hasContentsAnyOrder("a", "a", "b", "b", "c");
}
public void testSumEmptyNonempty() {
Multiset<String> ms1 = HashMultiset.create();
Multiset<String> ms2 = HashMultiset.create(Arrays.asList("a", "b", "a"));
ASSERT.that(Multisets.sum(ms1, ms2)).hasContentsAnyOrder("a", "b", "a");
}
public void testSumNonemptyEmpty() {
Multiset<String> ms1 = HashMultiset.create(Arrays.asList("a", "b", "a"));
Multiset<String> ms2 = HashMultiset.create();
ASSERT.that(Multisets.sum(ms1, ms2)).hasContentsAnyOrder("a", "b", "a");
}
public void testDifferenceWithNoRemovedElements() {
Multiset<String> ms1 = HashMultiset.create(Arrays.asList("a", "b", "a"));
Multiset<String> ms2 = HashMultiset.create(Arrays.asList("a"));
ASSERT.that(Multisets.difference(ms1, ms2)).hasContentsAnyOrder("a", "b");
}
public void testDifferenceWithRemovedElement() {
Multiset<String> ms1 = HashMultiset.create(Arrays.asList("a", "b", "a"));
Multiset<String> ms2 = HashMultiset.create(Arrays.asList("b"));
ASSERT.that(Multisets.difference(ms1, ms2)).hasContentsAnyOrder("a", "a");
}
public void testDifferenceWithMoreElementsInSecondMultiset() {
Multiset<String> ms1 = HashMultiset.create(Arrays.asList("a", "b", "a"));
Multiset<String> ms2 = HashMultiset.create(Arrays.asList("a", "b", "b", "b"));
Multiset<String> diff = Multisets.difference(ms1, ms2);
ASSERT.that(diff).hasContentsAnyOrder("a");
assertEquals(0, diff.count("b"));
assertEquals(1, diff.count("a"));
assertFalse(diff.contains("b"));
assertTrue(diff.contains("a"));
}
public void testDifferenceEmptyNonempty() {
Multiset<String> ms1 = HashMultiset.create();
Multiset<String> ms2 = HashMultiset.create(Arrays.asList("a", "b", "a"));
assertEquals(ms1, Multisets.difference(ms1, ms2));
}
public void testDifferenceNonemptyEmpty() {
Multiset<String> ms1 = HashMultiset.create(Arrays.asList("a", "b", "a"));
Multiset<String> ms2 = HashMultiset.create();
assertEquals(ms1, Multisets.difference(ms1, ms2));
}
public void testContainsOccurrencesEmpty() {
Multiset<String> superMultiset = HashMultiset.create(Arrays.asList("a", "b", "a"));
Multiset<String> subMultiset = HashMultiset.create();
assertTrue(Multisets.containsOccurrences(superMultiset, subMultiset));
assertFalse(Multisets.containsOccurrences(subMultiset, superMultiset));
}
public void testContainsOccurrences() {
Multiset<String> superMultiset = HashMultiset.create(Arrays.asList("a", "b", "a"));
Multiset<String> subMultiset = HashMultiset.create(Arrays.asList("a", "b"));
assertTrue(Multisets.containsOccurrences(superMultiset, subMultiset));
assertFalse(Multisets.containsOccurrences(subMultiset, superMultiset));
Multiset<String> diffMultiset = HashMultiset.create(Arrays.asList("a", "b", "c"));
assertFalse(Multisets.containsOccurrences(superMultiset, diffMultiset));
assertTrue(Multisets.containsOccurrences(diffMultiset, subMultiset));
}
public void testRetainEmptyOccurrences() {
Multiset<String> multiset =
HashMultiset.create(Arrays.asList("a", "b", "a"));
Multiset<String> toRetain = HashMultiset.create();
assertTrue(Multisets.retainOccurrences(multiset, toRetain));
assertTrue(multiset.isEmpty());
}
public void testRetainOccurrences() {
Multiset<String> multiset =
TreeMultiset.create(Arrays.asList("a", "b", "a", "c"));
Multiset<String> toRetain =
HashMultiset.create(Arrays.asList("a", "b", "b"));
assertTrue(Multisets.retainOccurrences(multiset, toRetain));
ASSERT.that(multiset).hasContentsInOrder("a", "b");
}
public void testRemoveEmptyOccurrences() {
Multiset<String> multiset =
TreeMultiset.create(Arrays.asList("a", "b", "a"));
Multiset<String> toRemove = HashMultiset.create();
assertFalse(Multisets.removeOccurrences(multiset, toRemove));
ASSERT.that(multiset).hasContentsInOrder("a", "a", "b");
}
public void testRemoveOccurrences() {
Multiset<String> multiset =
TreeMultiset.create(Arrays.asList("a", "b", "a", "c"));
Multiset<String> toRemove =
HashMultiset.create(Arrays.asList("a", "b", "b"));
assertTrue(Multisets.removeOccurrences(multiset, toRemove));
ASSERT.that(multiset).hasContentsInOrder("a", "c");
}
@SuppressWarnings("deprecation")
public void testUnmodifiableMultisetShortCircuit() {
Multiset<String> mod = HashMultiset.create();
Multiset<String> unmod = Multisets.unmodifiableMultiset(mod);
assertNotSame(mod, unmod);
assertSame(unmod, Multisets.unmodifiableMultiset(unmod));
ImmutableMultiset<String> immutable = ImmutableMultiset.of("a", "a", "b", "a");
assertSame(immutable, Multisets.unmodifiableMultiset(immutable));
assertSame(immutable, Multisets.unmodifiableMultiset((Multiset<String>) immutable));
}
public void testHighestCountFirst() {
Multiset<String> multiset = HashMultiset.create(
Arrays.asList("a", "a", "a", "b", "c", "c"));
ImmutableMultiset<String> sortedMultiset =
Multisets.copyHighestCountFirst(multiset);
ASSERT.that(sortedMultiset.entrySet()).hasContentsInOrder(
Multisets.immutableEntry("a", 3), Multisets.immutableEntry("c", 2),
Multisets.immutableEntry("b", 1));
ASSERT.that(sortedMultiset).hasContentsInOrder(
"a",
"a",
"a",
"c",
"c",
"b");
ASSERT.that(Multisets.copyHighestCountFirst(ImmutableMultiset.of())).isEmpty();
}
@GwtIncompatible("NullPointerTester")
public void testNullPointers() {
new NullPointerTester().testAllPublicStaticMethods(Multisets.class);
}
}
| apache-2.0 |
vadimv/PlatypusJS | web-client/src/platypus/src/com/eas/client/form/published/HasJsFacade.java | 111 | package com.eas.client.form.published;
public interface HasJsFacade extends HasPublished, HasJsName {
}
| apache-2.0 |
jchampion/elasticsearch | core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java | 17003 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.sort;
import org.elasticsearch.Version;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.VersionUtils;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.ExecutionException;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.search.sort.SortBuilders.fieldSort;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertOrderedSearchHits;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSortValues;
import static org.hamcrest.Matchers.closeTo;
public class GeoDistanceSortBuilderIT extends ESIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return pluginList(InternalSettingsPlugin.class);
}
public void testManyToManyGeoPoints() throws ExecutionException, InterruptedException, IOException {
/**
* | q | d1 | d2
* | | |
* | | |
* | | |
* |2 o| x | x
* | | |
* |1 o| x | x
* |___________________________
* 1 2 3 4 5 6 7
*/
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", "location", "type=geo_point"));
XContentBuilder d1Builder = jsonBuilder();
GeoPoint[] d1Points = {new GeoPoint(3, 2), new GeoPoint(4, 1)};
createShuffeldJSONArray(d1Builder, d1Points);
XContentBuilder d2Builder = jsonBuilder();
GeoPoint[] d2Points = {new GeoPoint(5, 1), new GeoPoint(6, 2)};
createShuffeldJSONArray(d2Builder, d2Points);
logger.info(d1Builder.string());
logger.info(d2Builder.string());
indexRandom(true,
client().prepareIndex("index", "type", "d1").setSource(d1Builder),
client().prepareIndex("index", "type", "d2").setSource(d2Builder));
ensureYellow();
GeoPoint[] q = new GeoPoint[2];
if (randomBoolean()) {
q[0] = new GeoPoint(2, 1);
q[1] = new GeoPoint(2, 2);
} else {
q[1] = new GeoPoint(2, 2);
q[0] = new GeoPoint(2, 1);
}
SearchResponse searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort(new GeoDistanceSortBuilder("location", q).sortMode("min").order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
.execute().actionGet();
assertOrderedSearchHits(searchResponse, "d1", "d2");
assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 2, 3, 2, DistanceUnit.KILOMETERS), 0.01d));
assertThat((Double)searchResponse.getHits().getAt(1).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 1, 5, 1, DistanceUnit.KILOMETERS), 0.01d));
searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort(new GeoDistanceSortBuilder("location", q).sortMode("min").order(SortOrder.DESC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
.execute().actionGet();
assertOrderedSearchHits(searchResponse, "d2", "d1");
assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 1, 5, 1, DistanceUnit.KILOMETERS), 0.01d));
assertThat((Double)searchResponse.getHits().getAt(1).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 2, 3, 2, DistanceUnit.KILOMETERS), 0.01d));
searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort(new GeoDistanceSortBuilder("location", q).sortMode("max").order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
.execute().actionGet();
assertOrderedSearchHits(searchResponse, "d1", "d2");
assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 2, 4, 1, DistanceUnit.KILOMETERS), 0.01d));
assertThat((Double)searchResponse.getHits().getAt(1).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 1, 6, 2, DistanceUnit.KILOMETERS), 0.01d));
searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort(new GeoDistanceSortBuilder("location", q).sortMode("max").order(SortOrder.DESC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
.execute().actionGet();
assertOrderedSearchHits(searchResponse, "d2", "d1");
assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 1, 6, 2, DistanceUnit.KILOMETERS), 0.01d));
assertThat((Double)searchResponse.getHits().getAt(1).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 2, 4, 1, DistanceUnit.KILOMETERS), 0.01d));
}
protected void createShuffeldJSONArray(XContentBuilder builder, GeoPoint[] pointsArray) throws IOException {
List<GeoPoint> points = new ArrayList<>();
points.addAll(Arrays.asList(pointsArray));
builder.startObject();
builder.startArray("location");
int numPoints = points.size();
for (int i = 0; i < numPoints; i++) {
builder.value(points.remove(randomInt(points.size() - 1)));
}
builder.endArray();
builder.endObject();
}
@SuppressWarnings("deprecation")
public void testManyToManyGeoPointsWithDifferentFormats() throws ExecutionException, InterruptedException, IOException {
/** q d1 d2
* |4 o| x | x
* | | |
* |3 o| x | x
* | | |
* |2 o| x | x
* | | |
* |1 o|x |x
* |______________________
* 1 2 3 4 5 6
*/
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", "location", "type=geo_point"));
XContentBuilder d1Builder = jsonBuilder();
GeoPoint[] d1Points = {new GeoPoint(2.5, 1), new GeoPoint(2.75, 2), new GeoPoint(3, 3), new GeoPoint(3.25, 4)};
createShuffeldJSONArray(d1Builder, d1Points);
XContentBuilder d2Builder = jsonBuilder();
GeoPoint[] d2Points = {new GeoPoint(4.5, 1), new GeoPoint(4.75, 2), new GeoPoint(5, 3), new GeoPoint(5.25, 4)};
createShuffeldJSONArray(d2Builder, d2Points);
indexRandom(true,
client().prepareIndex("index", "type", "d1").setSource(d1Builder),
client().prepareIndex("index", "type", "d2").setSource(d2Builder));
ensureYellow();
List<String> qHashes = new ArrayList<>();
List<GeoPoint> qPoints = new ArrayList<>();
createQPoints(qHashes, qPoints);
GeoDistanceSortBuilder geoDistanceSortBuilder = null;
for (int i = 0; i < 4; i++) {
int at = randomInt(3 - i);
if (randomBoolean()) {
if (geoDistanceSortBuilder == null) {
geoDistanceSortBuilder = new GeoDistanceSortBuilder("location", qHashes.get(at));
} else {
geoDistanceSortBuilder.geohashes(qHashes.get(at));
}
} else {
if (geoDistanceSortBuilder == null) {
geoDistanceSortBuilder = new GeoDistanceSortBuilder("location", qPoints.get(at));
} else {
geoDistanceSortBuilder.points(qPoints.get(at));
}
}
qHashes.remove(at);
qPoints.remove(at);
}
SearchResponse searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort(geoDistanceSortBuilder.sortMode("min").order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
.execute().actionGet();
assertOrderedSearchHits(searchResponse, "d1", "d2");
assertThat((Double) searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2.5, 1, 2, 1, DistanceUnit.KILOMETERS), 1.e-4));
assertThat((Double) searchResponse.getHits().getAt(1).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(4.5, 1, 2, 1, DistanceUnit.KILOMETERS), 1.e-4));
searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort(geoDistanceSortBuilder.sortMode("max").order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
.execute().actionGet();
assertOrderedSearchHits(searchResponse, "d1", "d2");
assertThat((Double) searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(3.25, 4, 2, 1, DistanceUnit.KILOMETERS), 1.e-4));
assertThat((Double) searchResponse.getHits().getAt(1).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(5.25, 4, 2, 1, DistanceUnit.KILOMETERS), 1.e-4));
}
public void testSinglePointGeoDistanceSort() throws ExecutionException, InterruptedException, IOException {
assertAcked(prepareCreate("index").addMapping("type", "location", "type=geo_point"));
indexRandom(true,
client().prepareIndex("index", "type", "d1").setSource(jsonBuilder().startObject().startObject("location").field("lat", 1).field("lon", 1).endObject().endObject()),
client().prepareIndex("index", "type", "d2").setSource(jsonBuilder().startObject().startObject("location").field("lat", 1).field("lon", 2).endObject().endObject()));
ensureYellow();
String hashPoint = "s037ms06g7h0";
GeoDistanceSortBuilder geoDistanceSortBuilder = new GeoDistanceSortBuilder("location", hashPoint);
SearchResponse searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort(geoDistanceSortBuilder.sortMode("min").order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
.execute().actionGet();
checkCorrectSortOrderForGeoSort(searchResponse);
geoDistanceSortBuilder = new GeoDistanceSortBuilder("location", new GeoPoint(2, 2));
searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort(geoDistanceSortBuilder.sortMode("min").order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
.execute().actionGet();
checkCorrectSortOrderForGeoSort(searchResponse);
geoDistanceSortBuilder = new GeoDistanceSortBuilder("location", 2, 2);
searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort(geoDistanceSortBuilder.sortMode("min").order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
.execute().actionGet();
checkCorrectSortOrderForGeoSort(searchResponse);
searchResponse = client()
.prepareSearch()
.setSource(
new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort("location", 2.0, 2.0)
.unit(DistanceUnit.KILOMETERS).geoDistance(GeoDistance.PLANE))).execute().actionGet();
checkCorrectSortOrderForGeoSort(searchResponse);
searchResponse = client()
.prepareSearch()
.setSource(
new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort("location", "s037ms06g7h0")
.unit(DistanceUnit.KILOMETERS).geoDistance(GeoDistance.PLANE))).execute().actionGet();
checkCorrectSortOrderForGeoSort(searchResponse);
searchResponse = client()
.prepareSearch()
.setSource(
new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort("location", 2.0, 2.0)
.unit(DistanceUnit.KILOMETERS).geoDistance(GeoDistance.PLANE))).execute().actionGet();
checkCorrectSortOrderForGeoSort(searchResponse);
}
private void checkCorrectSortOrderForGeoSort(SearchResponse searchResponse) {
assertOrderedSearchHits(searchResponse, "d2", "d1");
assertThat((Double) searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 2, 1, 2, DistanceUnit.KILOMETERS), 1.e-4));
assertThat((Double) searchResponse.getHits().getAt(1).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 2, 1, 1, DistanceUnit.KILOMETERS), 1.e-4));
}
protected void createQPoints(List<String> qHashes, List<GeoPoint> qPoints) {
GeoPoint[] qp = {new GeoPoint(2, 1), new GeoPoint(2, 2), new GeoPoint(2, 3), new GeoPoint(2, 4)};
qPoints.addAll(Arrays.asList(qp));
String[] qh = {"s02equ04ven0", "s037ms06g7h0", "s065kk0dc540", "s06g7h0dyg00"};
qHashes.addAll(Arrays.asList(qh));
}
public void testCrossIndexIgnoreUnmapped() throws Exception {
assertAcked(prepareCreate("test1").addMapping(
"type", "str_field1", "type=text",
"long_field", "type=long",
"double_field", "type=double").get());
assertAcked(prepareCreate("test2").get());
indexRandom(true,
client().prepareIndex("test1", "type").setSource("str_field", "bcd", "long_field", 3, "double_field", 0.65),
client().prepareIndex("test2", "type").setSource());
ensureYellow("test1", "test2");
SearchResponse resp = client().prepareSearch("test1", "test2")
.addSort(fieldSort("str_field").order(SortOrder.ASC).unmappedType("string"))
.addSort(fieldSort("str_field2").order(SortOrder.DESC).unmappedType("string")).get();
assertSortValues(resp,
new Object[] {new Text("bcd"), null},
new Object[] {null, null});
resp = client().prepareSearch("test1", "test2")
.addSort(fieldSort("long_field").order(SortOrder.ASC).unmappedType("long"))
.addSort(fieldSort("long_field2").order(SortOrder.DESC).unmappedType("long")).get();
assertSortValues(resp,
new Object[] {3L, Long.MIN_VALUE},
new Object[] {Long.MAX_VALUE, Long.MIN_VALUE});
resp = client().prepareSearch("test1", "test2")
.addSort(fieldSort("double_field").order(SortOrder.ASC).unmappedType("double"))
.addSort(fieldSort("double_field2").order(SortOrder.DESC).unmappedType("double")).get();
assertSortValues(resp,
new Object[] {0.65, Double.NEGATIVE_INFINITY},
new Object[] {Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY});
}
}
| apache-2.0 |
MMatGitHub/projekte | src/de/mm/test/QuartettTest.java | 319 | package de.mm.test;
import org.junit.Assert;
import org.junit.Test;
import de.mm.spiele.model.Quartett;
public class QuartettTest {
@Test
public void EsSind32KartenImSpiel() {
int anzKarten = new Quartett().getAnzahlKarten();
Assert.assertTrue("Es sind "+ anzKarten + " im Spiel", 32== anzKarten ) ;
}
}
| apache-2.0 |
hermeswaldemarin/docker-java | src/test/java/com/github/dockerjava/core/command/EventsCmdImplTest.java | 4266 | package com.github.dockerjava.core.command;
import com.github.dockerjava.api.DockerException;
import com.github.dockerjava.api.command.CreateContainerResponse;
import com.github.dockerjava.api.command.EventCallback;
import com.github.dockerjava.api.command.EventsCmd;
import com.github.dockerjava.api.model.Event;
import com.github.dockerjava.client.AbstractDockerClientTest;
import org.testng.ITestResult;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.AfterTest;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
@Test(groups = "integration")
public class EventsCmdImplTest extends AbstractDockerClientTest {
private static int KNOWN_NUM_EVENTS = 4;
private static String getEpochTime() {
return String.valueOf(System.currentTimeMillis() / 1000);
}
@BeforeTest
public void beforeTest() throws DockerException {
super.beforeTest();
}
@AfterTest
public void afterTest() {
super.afterTest();
}
@BeforeMethod
public void beforeMethod(Method method) {
super.beforeMethod(method);
}
@AfterMethod
public void afterMethod(ITestResult result) {
super.afterMethod(result);
}
@Test
public void testEventStreamTimeBound() throws InterruptedException, IOException {
// Don't include other tests events
TimeUnit.SECONDS.sleep(1);
String startTime = getEpochTime();
int expectedEvents = generateEvents();
String endTime = getEpochTime();
CountDownLatch countDownLatch = new CountDownLatch(expectedEvents);
EventCallback eventCallback = new EventCallbackTest(countDownLatch);
EventsCmd eventsCmd = dockerClient.eventsCmd(eventCallback).withSince(startTime).withUntil(endTime);
ExecutorService executorService = eventsCmd.exec();
boolean zeroCount = countDownLatch.await(5, TimeUnit.SECONDS);
executorService.shutdown();
assertTrue(zeroCount, "Expected 4 events, [create, start, die, stop]");
}
@Test
public void testEventStreaming() throws InterruptedException, IOException {
// Don't include other tests events
TimeUnit.SECONDS.sleep(1);
CountDownLatch countDownLatch = new CountDownLatch(KNOWN_NUM_EVENTS);
EventCallback eventCallback = new EventCallbackTest(countDownLatch);
EventsCmd eventsCmd = dockerClient.eventsCmd(eventCallback).withSince(getEpochTime());
ExecutorService executorService = eventsCmd.exec();
generateEvents();
boolean zeroCount = countDownLatch.await(5, TimeUnit.SECONDS);
executorService.shutdown();
assertTrue(zeroCount, "Expected 4 events, [create, start, die, stop]");
}
/**
* This method generates {#link KNOWN_NUM_EVENTS} events
*/
private int generateEvents() {
String testImage = "busybox";
asString(dockerClient.pullImageCmd(testImage).exec());
CreateContainerResponse container = dockerClient
.createContainerCmd(testImage).withCmd("echo").exec();
dockerClient.startContainerCmd(container.getId()).exec();
dockerClient.stopContainerCmd(container.getId()).exec();
return KNOWN_NUM_EVENTS;
}
private class EventCallbackTest implements EventCallback {
private final CountDownLatch countDownLatch;
public EventCallbackTest(CountDownLatch countDownLatch) {
this.countDownLatch = countDownLatch;
}
@Override
public void onEvent(Event event) {
LOG.info("Received event #{}: {}", countDownLatch.getCount(), event);
countDownLatch.countDown();
}
@Override
public void onException(Throwable throwable) {
LOG.error("Error occurred: {}", throwable.getMessage());
}
@Override
public void onCompletion(int numEvents) {
LOG.info("Number of events received: {}", numEvents);
}
}
}
| apache-2.0 |
rpudil/midpoint | model/model-common/src/main/java/com/evolveum/midpoint/model/common/expression/evaluator/AssignmentTargetSearchExpressionEvaluatorFactory.java | 4508 | /*
* Copyright (c) 2014-2015 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.model.common.expression.evaluator;
import java.util.Collection;
import javax.xml.bind.JAXBElement;
import javax.xml.namespace.QName;
import com.evolveum.midpoint.model.api.ModelService;
import com.evolveum.midpoint.model.common.expression.ExpressionEvaluator;
import com.evolveum.midpoint.model.common.expression.ExpressionEvaluatorFactory;
import com.evolveum.midpoint.prism.ItemDefinition;
import com.evolveum.midpoint.prism.PrismContainerDefinition;
import com.evolveum.midpoint.prism.PrismContext;
import com.evolveum.midpoint.prism.PrismValue;
import com.evolveum.midpoint.prism.crypto.Protector;
import com.evolveum.midpoint.schema.result.OperationResult;
import com.evolveum.midpoint.schema.util.ObjectResolver;
import com.evolveum.midpoint.security.api.SecurityEnforcer;
import com.evolveum.midpoint.task.api.Task;
import com.evolveum.midpoint.util.exception.SchemaException;
import com.evolveum.midpoint.xml.ns._public.common.common_3.AssignmentType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectFactory;
import com.evolveum.midpoint.xml.ns._public.common.common_3.SearchObjectRefExpressionEvaluatorType;
import org.apache.commons.lang.Validate;
/**
* @author semancik
*
*/
public class AssignmentTargetSearchExpressionEvaluatorFactory implements ExpressionEvaluatorFactory {
private PrismContext prismContext;
private Protector protector;
private ObjectResolver objectResolver;
private ModelService modelService;
private SecurityEnforcer securityEnforcer;
public AssignmentTargetSearchExpressionEvaluatorFactory(PrismContext prismContext, Protector protector, ObjectResolver objectResolver, ModelService modelService, SecurityEnforcer securityEnforcer) {
super();
this.prismContext = prismContext;
this.protector = protector;
this.objectResolver = objectResolver;
this.modelService = modelService;
this.securityEnforcer = securityEnforcer;
}
/* (non-Javadoc)
* @see com.evolveum.midpoint.common.expression.ExpressionEvaluatorFactory#getElementName()
*/
@Override
public QName getElementName() {
return new ObjectFactory().createAssignmentTargetSearch(new SearchObjectRefExpressionEvaluatorType()).getName();
}
/* (non-Javadoc)
* @see com.evolveum.midpoint.common.expression.ExpressionEvaluatorFactory#createEvaluator(javax.xml.bind.JAXBElement)
*/
@Override
public <V extends PrismValue,D extends ItemDefinition> ExpressionEvaluator<V,D> createEvaluator(Collection<JAXBElement<?>> evaluatorElements,
D outputDefinition, String contextDescription, Task task, OperationResult result) throws SchemaException {
Validate.notNull(outputDefinition, "output definition must be specified for assignmentTargetSearch expression evaluator");
JAXBElement<?> evaluatorElement = null;
if (evaluatorElements != null) {
if (evaluatorElements.size() > 1) {
throw new SchemaException("More than one evaluator specified in "+contextDescription);
}
evaluatorElement = evaluatorElements.iterator().next();
}
Object evaluatorTypeObject = null;
if (evaluatorElement != null) {
evaluatorTypeObject = evaluatorElement.getValue();
}
if (evaluatorTypeObject != null && !(evaluatorTypeObject instanceof SearchObjectRefExpressionEvaluatorType)) {
throw new SchemaException("assignment expression evaluator cannot handle elements of type " + evaluatorTypeObject.getClass().getName()+" in "+contextDescription);
}
AssignmentTargetSearchExpressionEvaluator expressionEvaluator = new AssignmentTargetSearchExpressionEvaluator((SearchObjectRefExpressionEvaluatorType)evaluatorTypeObject,
(PrismContainerDefinition<AssignmentType>) outputDefinition, protector, objectResolver, modelService, prismContext, securityEnforcer);
return (ExpressionEvaluator<V,D>) expressionEvaluator;
}
}
| apache-2.0 |
thomaskrause/graphANNIS | graphannis-utils/src/main/java/org/corpus_tools/graphannis/conversion/ReparseFolder.java | 1852 | /*
* Copyright 2017 Thomas Krause.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.corpus_tools.graphannis.conversion;
import java.io.File;
import java.util.LinkedList;
import java.util.List;
import org.corpus_tools.annis.benchmark.generator.Query;
import org.corpus_tools.annis.benchmark.generator.QuerySetPersistance;
import org.corpus_tools.annis.ql.parser.AnnisParserAntlr;
import org.corpus_tools.annis.ql.parser.QueryData;
import org.corpus_tools.graphannis.QueryToJSON;
/**
*
* @author thomas
*/
public class ReparseFolder
{
public static void main(String[] args)
{
if(args.length >= 1)
{
File dir = new File(args[0]);
System.out.println("Re-parsing folder " + dir.getAbsolutePath());
AnnisParserAntlr parser = new AnnisParserAntlr();
parser.setPrecedenceBound(50);
List<Query> allQueries = QuerySetPersistance.loadQuerySet(dir);
for(Query q : allQueries)
{
q.setJson(null);
QueryData queryData = parser.parse(q.getAql(), null);
queryData.setMaxWidth(queryData.getAlternatives().get(0).size());
String asJSON = QueryToJSON.serializeQuery(queryData.getAlternatives(), queryData.getMetaData());
q.setJson(asJSON);
}
QuerySetPersistance.writeQuerySet(dir, allQueries);
}
}
}
| apache-2.0 |
xuhuisheng/lemon | src/main/java/com/mossle/internal/open/persistence/domain/SysCategory.java | 6609 | package com.mossle.internal.open.persistence.domain;
// Generated by Hibernate Tools
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
/**
* SysCategory 系统分类.
*
* @author Lingo
*/
@Entity
@Table(name = "SYS_CATEGORY")
public class SysCategory implements java.io.Serializable {
private static final long serialVersionUID = 0L;
/** 主键. */
private Long id;
/** 外键,上级分类. */
private SysCategory sysCategory;
/** 编码. */
private String code;
/** 名称. */
private String name;
/** 图标. */
private String logo;
/** 排序. */
private Integer priority;
/** 状态. */
private String status;
/** 备注. */
private String descn;
/** 创建时间. */
private Date createTime;
/** 创建人. */
private String userId;
/** 租户. */
private String tenantId;
/** . */
private Set<SysCategory> sysCategories = new HashSet<SysCategory>(0);
/** . */
private Set<SysInfo> sysInfos = new HashSet<SysInfo>(0);
/** . */
private Set<SysEntry> sysEntries = new HashSet<SysEntry>(0);
public SysCategory() {
}
public SysCategory(Long id) {
this.id = id;
}
public SysCategory(Long id, SysCategory sysCategory, String code,
String name, String logo, Integer priority, String status,
String descn, Date createTime, String userId, String tenantId,
Set<SysCategory> sysCategories, Set<SysInfo> sysInfos,
Set<SysEntry> sysEntries) {
this.id = id;
this.sysCategory = sysCategory;
this.code = code;
this.name = name;
this.logo = logo;
this.priority = priority;
this.status = status;
this.descn = descn;
this.createTime = createTime;
this.userId = userId;
this.tenantId = tenantId;
this.sysCategories = sysCategories;
this.sysInfos = sysInfos;
this.sysEntries = sysEntries;
}
/** @return 主键. */
@Id
@Column(name = "ID", unique = true, nullable = false)
public Long getId() {
return this.id;
}
/**
* @param id
* 主键.
*/
public void setId(Long id) {
this.id = id;
}
/** @return 外键,上级分类. */
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "PARENT_ID")
public SysCategory getSysCategory() {
return this.sysCategory;
}
/**
* @param sysCategory
* 外键,上级分类.
*/
public void setSysCategory(SysCategory sysCategory) {
this.sysCategory = sysCategory;
}
/** @return 编码. */
@Column(name = "CODE", length = 50)
public String getCode() {
return this.code;
}
/**
* @param code
* 编码.
*/
public void setCode(String code) {
this.code = code;
}
/** @return 名称. */
@Column(name = "NAME", length = 50)
public String getName() {
return this.name;
}
/**
* @param name
* 名称.
*/
public void setName(String name) {
this.name = name;
}
/** @return 图标. */
@Column(name = "LOGO", length = 200)
public String getLogo() {
return this.logo;
}
/**
* @param logo
* 图标.
*/
public void setLogo(String logo) {
this.logo = logo;
}
/** @return 排序. */
@Column(name = "PRIORITY")
public Integer getPriority() {
return this.priority;
}
/**
* @param priority
* 排序.
*/
public void setPriority(Integer priority) {
this.priority = priority;
}
/** @return 状态. */
@Column(name = "STATUS", length = 50)
public String getStatus() {
return this.status;
}
/**
* @param status
* 状态.
*/
public void setStatus(String status) {
this.status = status;
}
/** @return 备注. */
@Column(name = "DESCN", length = 200)
public String getDescn() {
return this.descn;
}
/**
* @param descn
* 备注.
*/
public void setDescn(String descn) {
this.descn = descn;
}
/** @return 创建时间. */
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "CREATE_TIME", length = 26)
public Date getCreateTime() {
return this.createTime;
}
/**
* @param createTime
* 创建时间.
*/
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
/** @return 创建人. */
@Column(name = "USER_ID", length = 64)
public String getUserId() {
return this.userId;
}
/**
* @param userId
* 创建人.
*/
public void setUserId(String userId) {
this.userId = userId;
}
/** @return 租户. */
@Column(name = "TENANT_ID", length = 64)
public String getTenantId() {
return this.tenantId;
}
/**
* @param tenantId
* 租户.
*/
public void setTenantId(String tenantId) {
this.tenantId = tenantId;
}
/** @return . */
@OneToMany(fetch = FetchType.LAZY, mappedBy = "sysCategory")
public Set<SysCategory> getSysCategories() {
return this.sysCategories;
}
/**
* @param sysCategories
* .
*/
public void setSysCategories(Set<SysCategory> sysCategories) {
this.sysCategories = sysCategories;
}
/** @return . */
@OneToMany(fetch = FetchType.LAZY, mappedBy = "sysCategory")
public Set<SysInfo> getSysInfos() {
return this.sysInfos;
}
/**
* @param sysInfos
* .
*/
public void setSysInfos(Set<SysInfo> sysInfos) {
this.sysInfos = sysInfos;
}
/** @return . */
@OneToMany(fetch = FetchType.LAZY, mappedBy = "sysCategory")
public Set<SysEntry> getSysEntries() {
return this.sysEntries;
}
/**
* @param sysEntries
* .
*/
public void setSysEntries(Set<SysEntry> sysEntries) {
this.sysEntries = sysEntries;
}
}
| apache-2.0 |
tomaszrykala/yahnac | app/src/main/java/com/malmstein/yahnac/HNewsActivity.java | 2657 | package com.malmstein.yahnac;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.MenuItem;
import com.malmstein.yahnac.data.connectivity.NetworkChecker;
import com.malmstein.yahnac.views.ColorTweaker;
import com.malmstein.yahnac.views.LollipopUiConfiguration;
import com.malmstein.yahnac.views.LollipopUiHelper;
public class HNewsActivity extends AppCompatActivity {
public static final CharSequence SHARE_DIALOG_DEFAULT_TITLE = null;
private ColorTweaker colorTweaker;
private LollipopUiHelper lollipopUiHelper;
private Navigator navigator;
private NetworkChecker networkChecker;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
initNetworkChecker();
colorTweaker = new ColorTweaker();
lollipopUiHelper = new LollipopUiHelper(this, colorTweaker, getLollipopUiConfiguration());
lollipopUiHelper.setTaskDescriptionOnLollipopAndLater();
lollipopUiHelper.setSystemBarsColorOnLollipopAndLater();
navigator = new Navigator(this);
}
private void initNetworkChecker() {
networkChecker = new NetworkChecker(this);
}
protected LollipopUiConfiguration getLollipopUiConfiguration() {
return LollipopUiConfiguration.NEWS;
}
private void setupToolbar() {
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
}
public void setHighLevelActivity() {
setupToolbar();
getSupportActionBar().setHomeAsUpIndicator(R.drawable.ic_menu);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
}
public void setupSubActivity() {
setupToolbar();
getSupportActionBar().setDisplayUseLogoEnabled(false);
getSupportActionBar().setShowHideAnimationEnabled(true);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setDisplayShowTitleEnabled(false);
}
protected void setupSubActivityWithTitle() {
setupSubActivity();
getSupportActionBar().setDisplayShowTitleEnabled(true);
}
public Navigator navigate() {
if (navigator == null) {
navigator = new Navigator(this);
}
return navigator;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == android.R.id.home) {
finish();
}
return super.onOptionsItemSelected(item);
}
public boolean isOnline() {
return networkChecker.isConnected();
}
}
| apache-2.0 |
nterry/aws-sdk-java | aws-java-sdk-cloudsearch/src/main/java/com/amazonaws/services/cloudsearchv2/model/IndexFieldStatus.java | 3969 | /*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.cloudsearchv2.model;
import java.io.Serializable;
/**
* <p>
* The value of an <code>IndexField</code> and its current status.
* </p>
*/
public class IndexFieldStatus implements Serializable, Cloneable {
private IndexField options;
private OptionStatus status;
/**
* @param options
*/
public void setOptions(IndexField options) {
this.options = options;
}
/**
* @return
*/
public IndexField getOptions() {
return this.options;
}
/**
* @param options
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public IndexFieldStatus withOptions(IndexField options) {
setOptions(options);
return this;
}
/**
* @param status
*/
public void setStatus(OptionStatus status) {
this.status = status;
}
/**
* @return
*/
public OptionStatus getStatus() {
return this.status;
}
/**
* @param status
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public IndexFieldStatus withStatus(OptionStatus status) {
setStatus(status);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getOptions() != null)
sb.append("Options: " + getOptions() + ",");
if (getStatus() != null)
sb.append("Status: " + getStatus());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof IndexFieldStatus == false)
return false;
IndexFieldStatus other = (IndexFieldStatus) obj;
if (other.getOptions() == null ^ this.getOptions() == null)
return false;
if (other.getOptions() != null
&& other.getOptions().equals(this.getOptions()) == false)
return false;
if (other.getStatus() == null ^ this.getStatus() == null)
return false;
if (other.getStatus() != null
&& other.getStatus().equals(this.getStatus()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getOptions() == null) ? 0 : getOptions().hashCode());
hashCode = prime * hashCode
+ ((getStatus() == null) ? 0 : getStatus().hashCode());
return hashCode;
}
@Override
public IndexFieldStatus clone() {
try {
return (IndexFieldStatus) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
| apache-2.0 |
niklasteichmann/gradoop | gradoop-flink/src/main/java/org/gradoop/flink/model/impl/operators/matching/common/functions/ElementHasCandidate.java | 1577 | /*
* Copyright © 2014 - 2018 Leipzig University (Database Research Group)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradoop.flink.model.impl.operators.matching.common.functions;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.java.functions.FunctionAnnotation;
import org.gradoop.flink.model.impl.operators.matching.common.tuples
.IdWithCandidates;
/**
* Filters elements if their candidates contain a given candidate.
*
* Read fields:
*
* f1: candidates
*
* @param <K> key type
*/
@FunctionAnnotation.ReadFields("f1")
public class ElementHasCandidate<K> implements FilterFunction<IdWithCandidates<K>> {
/**
* Candidate to test on
*/
private final int candidate;
/**
* Constructor
*
* @param candidate candidate to test on
*/
public ElementHasCandidate(int candidate) {
this.candidate = candidate;
}
@Override
public boolean filter(IdWithCandidates<K> idWithCandidates) throws Exception {
return idWithCandidates.getCandidates()[candidate];
}
}
| apache-2.0 |
binarywang/weixin-java-tools | weixin-java-pay/src/main/java/com/github/binarywang/wxpay/bean/request/WxPayRefundRequest.java | 5792 | package com.github.binarywang.wxpay.bean.request;
import com.github.binarywang.wxpay.config.WxPayConfig;
import com.github.binarywang.wxpay.constant.WxPayConstants.RefundAccountSource;
import com.github.binarywang.wxpay.exception.WxPayException;
import com.thoughtworks.xstream.annotations.XStreamAlias;
import lombok.*;
import me.chanjar.weixin.common.annotation.Required;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import java.util.Arrays;
/**
* <pre>
* 微信支付-申请退款请求参数
* Created by Binary Wang on 2016-10-08.
* </pre>
*
* @author <a href="https://github.com/binarywang">Binary Wang</a>
*/
@Data
@EqualsAndHashCode(callSuper = true)
@Builder(builderMethodName = "newBuilder")
@NoArgsConstructor
@AllArgsConstructor
@XStreamAlias("xml")
public class WxPayRefundRequest extends BaseWxPayRequest {
private static final String[] REFUND_ACCOUNT = new String[]{
RefundAccountSource.RECHARGE_FUNDS, RefundAccountSource.UNSETTLED_FUNDS};
/**
* <pre>
* 字段名:设备号.
* 变量名:device_info
* 是否必填:否
* 类型:String(32)
* 示例值:13467007045764
* 描述:终端设备号
* </pre>
*/
@XStreamAlias("device_info")
private String deviceInfo;
/**
* <pre>
* 字段名:微信订单号.
* 变量名:transaction_id
* 是否必填:跟out_trade_no二选一
* 类型:String(28)
* 示例值:1217752501201400000000000000
* 描述:微信生成的订单号,在支付通知中有返回
* </pre>
*/
@XStreamAlias("transaction_id")
private String transactionId;
/**
* <pre>
* 字段名:商户订单号.
* 变量名:out_trade_no
* 是否必填:跟transaction_id二选一
* 类型:String(32)
* 示例值:1217752501201400000000000000
* 描述:商户侧传给微信的订单号
* </pre>
*/
@XStreamAlias("out_trade_no")
private String outTradeNo;
/**
* <pre>
* 字段名:商户退款单号.
* 变量名:out_refund_no
* 是否必填:是
* 类型:String(32)
* 示例值:1217752501201400000000000000
* 描述:商户系统内部的退款单号,商户系统内部唯一,同一退款单号多次请求只退一笔
* </pre>
*/
@Required
@XStreamAlias("out_refund_no")
private String outRefundNo;
/**
* <pre>
* 字段名:订单金额.
* 变量名:total_fee
* 是否必填:是
* 类型:Int
* 示例值:100
* 描述:订单总金额,单位为分,只能为整数,详见支付金额
* </pre>
*/
@Required
@XStreamAlias("total_fee")
private Integer totalFee;
/**
* <pre>
* 字段名:退款金额.
* 变量名:refund_fee
* 是否必填:是
* 类型:Int
* 示例值:100
* 描述:退款总金额,订单总金额,单位为分,只能为整数,详见支付金额
* </pre>
*/
@Required
@XStreamAlias("refund_fee")
private Integer refundFee;
/**
* <pre>
* 字段名:货币种类.
* 变量名:refund_fee_type
* 是否必填:否
* 类型:String(8)
* 示例值:CNY
* 描述:货币类型,符合ISO 4217标准的三位字母代码,默认人民币:CNY,其他值列表详见货币类型
* </pre>
*/
@XStreamAlias("refund_fee_type")
private String refundFeeType;
/**
* <pre>
* 字段名:操作员.
* 变量名:op_user_id
* 是否必填:是
* 类型:String(32)
* 示例值:1900000109
* 描述:操作员帐号, 默认为商户号
* </pre>
*/
//@Required
@XStreamAlias("op_user_id")
private String opUserId;
/**
* <pre>
* 字段名:退款资金来源.
* 变量名:refund_account
* 是否必填:否
* 类型:String(30)
* 示例值:REFUND_SOURCE_RECHARGE_FUNDS
* 描述:仅针对老资金流商户使用,
* <li>REFUND_SOURCE_UNSETTLED_FUNDS---未结算资金退款(默认使用未结算资金退款),
* <li>REFUND_SOURCE_RECHARGE_FUNDS---可用余额退款
* </pre>
*/
@XStreamAlias("refund_account")
private String refundAccount;
/**
* <pre>
* 字段名:退款原因.
* 变量名:refund_account
* 是否必填:否
* 类型:String(80)
* 示例值:商品已售完
* 描述:若商户传入,会在下发给用户的退款消息中体现退款原因
* </pre>
*/
@XStreamAlias("refund_desc")
private String refundDesc;
/**
* <pre>
* 字段名:退款结果通知url.
* 变量名:notify_url
* 是否必填:否
* 类型:String(256)
* 示例值:https://weixin.qq.com/notify/
* 描述: 异步接收微信支付退款结果通知的回调地址,通知URL必须为外网可访问的url,不允许带参数
如果参数中传了notify_url,则商户平台上配置的回调地址将不会生效。
* </pre>
*/
@XStreamAlias("notify_url")
private String notifyUrl;
@Override
public void checkAndSign(WxPayConfig config) throws WxPayException {
if (StringUtils.isBlank(this.getOpUserId())) {
this.setOpUserId(config.getMchId());
}
super.checkAndSign(config);
}
@Override
protected void checkConstraints() throws WxPayException {
if (StringUtils.isNotBlank(this.getRefundAccount())) {
if (!ArrayUtils.contains(REFUND_ACCOUNT, this.getRefundAccount())) {
throw new WxPayException(
String.format("refund_account目前必须为%s其中之一,实际值:%s", Arrays.toString(REFUND_ACCOUNT), this.getRefundAccount()));
}
}
if (StringUtils.isBlank(this.getOutTradeNo()) && StringUtils.isBlank(this.getTransactionId())) {
throw new WxPayException("transaction_id 和 out_trade_no 不能同时为空,必须提供一个");
}
}
}
| apache-2.0 |
ChinaQuants/Strata | modules/pricer/src/main/java/com/opengamma/strata/pricer/impl/swap/DispatchingSwapPaymentEventPricer.java | 6784 | /**
* Copyright (C) 2014 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.strata.pricer.impl.swap;
import com.opengamma.strata.basics.currency.MultiCurrencyAmount;
import com.opengamma.strata.collect.ArgChecker;
import com.opengamma.strata.market.explain.ExplainMapBuilder;
import com.opengamma.strata.market.sensitivity.PointSensitivityBuilder;
import com.opengamma.strata.pricer.rate.RatesProvider;
import com.opengamma.strata.pricer.swap.SwapPaymentEventPricer;
import com.opengamma.strata.product.swap.FxResetNotionalExchange;
import com.opengamma.strata.product.swap.NotionalExchange;
import com.opengamma.strata.product.swap.SwapPaymentEvent;
/**
* Pricer implementation for payment events using multiple dispatch.
* <p>
* Dispatches the request to the correct implementation.
*/
public class DispatchingSwapPaymentEventPricer
implements SwapPaymentEventPricer<SwapPaymentEvent> {
/**
* Default implementation.
*/
public static final DispatchingSwapPaymentEventPricer DEFAULT = new DispatchingSwapPaymentEventPricer(
DiscountingNotionalExchangePricer.DEFAULT,
DiscountingFxResetNotionalExchangePricer.DEFAULT);
/**
* Pricer for {@link NotionalExchange}.
*/
private final SwapPaymentEventPricer<NotionalExchange> notionalExchangePricer;
/**
* Pricer for {@link FxResetNotionalExchange}.
*/
private final SwapPaymentEventPricer<FxResetNotionalExchange> fxResetNotionalExchangePricer;
/**
* Creates an instance.
*
* @param notionalExchangePricer the pricer for {@link NotionalExchange}
* @param fxResetNotionalExchangePricer the pricer for {@link FxResetNotionalExchange}
*/
public DispatchingSwapPaymentEventPricer(
SwapPaymentEventPricer<NotionalExchange> notionalExchangePricer,
SwapPaymentEventPricer<FxResetNotionalExchange> fxResetNotionalExchangePricer) {
this.notionalExchangePricer = ArgChecker.notNull(notionalExchangePricer, "notionalExchangePricer");
this.fxResetNotionalExchangePricer =
ArgChecker.notNull(fxResetNotionalExchangePricer, "fxResetNotionalExchangePricer");
}
//-------------------------------------------------------------------------
@Override
public double presentValue(SwapPaymentEvent paymentEvent, RatesProvider provider) {
// dispatch by runtime type
if (paymentEvent instanceof NotionalExchange) {
return notionalExchangePricer.presentValue((NotionalExchange) paymentEvent, provider);
} else if (paymentEvent instanceof FxResetNotionalExchange) {
return fxResetNotionalExchangePricer.presentValue((FxResetNotionalExchange) paymentEvent, provider);
} else {
throw new IllegalArgumentException("Unknown PaymentEvent type: " + paymentEvent.getClass().getSimpleName());
}
}
@Override
public PointSensitivityBuilder presentValueSensitivity(SwapPaymentEvent paymentEvent, RatesProvider provider) {
// dispatch by runtime type
if (paymentEvent instanceof NotionalExchange) {
return notionalExchangePricer.presentValueSensitivity((NotionalExchange) paymentEvent, provider);
} else if (paymentEvent instanceof FxResetNotionalExchange) {
return fxResetNotionalExchangePricer.presentValueSensitivity((FxResetNotionalExchange) paymentEvent, provider);
} else {
throw new IllegalArgumentException("Unknown PaymentEvent type: " + paymentEvent.getClass().getSimpleName());
}
}
//-------------------------------------------------------------------------
@Override
public double forecastValue(SwapPaymentEvent paymentEvent, RatesProvider provider) {
// dispatch by runtime type
if (paymentEvent instanceof NotionalExchange) {
return notionalExchangePricer.forecastValue((NotionalExchange) paymentEvent, provider);
} else if (paymentEvent instanceof FxResetNotionalExchange) {
return fxResetNotionalExchangePricer.forecastValue((FxResetNotionalExchange) paymentEvent, provider);
} else {
throw new IllegalArgumentException("Unknown PaymentEvent type: " + paymentEvent.getClass().getSimpleName());
}
}
@Override
public PointSensitivityBuilder forecastValueSensitivity(SwapPaymentEvent paymentEvent, RatesProvider provider) {
// dispatch by runtime type
if (paymentEvent instanceof NotionalExchange) {
return notionalExchangePricer.forecastValueSensitivity((NotionalExchange) paymentEvent, provider);
} else if (paymentEvent instanceof FxResetNotionalExchange) {
return fxResetNotionalExchangePricer.forecastValueSensitivity((FxResetNotionalExchange) paymentEvent, provider);
} else {
throw new IllegalArgumentException("Unknown PaymentEvent type: " + paymentEvent.getClass().getSimpleName());
}
}
//-------------------------------------------------------------------------
@Override
public void explainPresentValue(SwapPaymentEvent paymentEvent, RatesProvider provider, ExplainMapBuilder builder) {
// dispatch by runtime type
if (paymentEvent instanceof NotionalExchange) {
notionalExchangePricer.explainPresentValue((NotionalExchange) paymentEvent, provider, builder);
} else if (paymentEvent instanceof FxResetNotionalExchange) {
fxResetNotionalExchangePricer.explainPresentValue((FxResetNotionalExchange) paymentEvent, provider, builder);
} else {
throw new IllegalArgumentException("Unknown PaymentEvent type: " + paymentEvent.getClass().getSimpleName());
}
}
//-------------------------------------------------------------------------
@Override
public MultiCurrencyAmount currencyExposure(SwapPaymentEvent paymentEvent, RatesProvider provider) {
// dispatch by runtime type
if (paymentEvent instanceof NotionalExchange) {
return notionalExchangePricer.currencyExposure((NotionalExchange) paymentEvent, provider);
} else if (paymentEvent instanceof FxResetNotionalExchange) {
return fxResetNotionalExchangePricer.currencyExposure((FxResetNotionalExchange) paymentEvent, provider);
} else {
throw new IllegalArgumentException("Unknown PaymentEvent type: " + paymentEvent.getClass().getSimpleName());
}
}
@Override
public double currentCash(SwapPaymentEvent paymentEvent, RatesProvider provider) {
// dispatch by runtime type
if (paymentEvent instanceof NotionalExchange) {
return notionalExchangePricer.currentCash((NotionalExchange) paymentEvent, provider);
} else if (paymentEvent instanceof FxResetNotionalExchange) {
return fxResetNotionalExchangePricer.currentCash((FxResetNotionalExchange) paymentEvent, provider);
} else {
throw new IllegalArgumentException("Unknown PaymentEvent type: " + paymentEvent.getClass().getSimpleName());
}
}
}
| apache-2.0 |
gianm/druid | indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskParallelRunTest.java | 18300 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.indexing.common.task;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.io.Files;
import org.apache.druid.data.input.InputSplit;
import org.apache.druid.data.input.SegmentsSplitHintSpec;
import org.apache.druid.data.input.impl.CsvInputFormat;
import org.apache.druid.data.input.impl.DimensionsSpec;
import org.apache.druid.data.input.impl.LocalInputSource;
import org.apache.druid.data.input.impl.TimestampSpec;
import org.apache.druid.indexer.TaskState;
import org.apache.druid.indexer.partitions.DynamicPartitionsSpec;
import org.apache.druid.indexer.partitions.HashedPartitionsSpec;
import org.apache.druid.indexer.partitions.PartitionsSpec;
import org.apache.druid.indexer.partitions.SingleDimensionPartitionsSpec;
import org.apache.druid.indexing.common.LockGranularity;
import org.apache.druid.indexing.common.RetryPolicyConfig;
import org.apache.druid.indexing.common.RetryPolicyFactory;
import org.apache.druid.indexing.common.task.CompactionTask.Builder;
import org.apache.druid.indexing.common.task.batch.parallel.AbstractParallelIndexSupervisorTaskTest;
import org.apache.druid.indexing.common.task.batch.parallel.ParallelIndexIOConfig;
import org.apache.druid.indexing.common.task.batch.parallel.ParallelIndexIngestionSpec;
import org.apache.druid.indexing.common.task.batch.parallel.ParallelIndexSupervisorTask;
import org.apache.druid.indexing.common.task.batch.parallel.ParallelIndexTuningConfig;
import org.apache.druid.indexing.firehose.WindowedSegmentId;
import org.apache.druid.indexing.input.DruidInputSource;
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.LongSumAggregatorFactory;
import org.apache.druid.segment.SegmentUtils;
import org.apache.druid.segment.indexing.DataSchema;
import org.apache.druid.segment.indexing.granularity.UniformGranularitySpec;
import org.apache.druid.timeline.CompactionState;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.HashBasedNumberedShardSpec;
import org.apache.druid.timeline.partition.NumberedOverwriteShardSpec;
import org.apache.druid.timeline.partition.NumberedShardSpec;
import org.apache.druid.timeline.partition.PartitionIds;
import org.apache.druid.timeline.partition.ShardSpec;
import org.apache.druid.timeline.partition.SingleDimensionShardSpec;
import org.joda.time.Interval;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import javax.annotation.Nullable;
import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
@RunWith(Parameterized.class)
public class CompactionTaskParallelRunTest extends AbstractParallelIndexSupervisorTaskTest
{
@Parameterized.Parameters(name = "{0}")
public static Iterable<Object[]> constructorFeeder()
{
return ImmutableList.of(
new Object[]{LockGranularity.TIME_CHUNK},
new Object[]{LockGranularity.SEGMENT}
);
}
private static final String DATA_SOURCE = "test";
private static final RetryPolicyFactory RETRY_POLICY_FACTORY = new RetryPolicyFactory(new RetryPolicyConfig());
private static final Interval INTERVAL_TO_INDEX = Intervals.of("2014-01-01/2014-01-02");
private final LockGranularity lockGranularity;
private File inputDir;
public CompactionTaskParallelRunTest(LockGranularity lockGranularity)
{
this.lockGranularity = lockGranularity;
}
@Before
public void setup() throws IOException
{
getObjectMapper().registerSubtypes(ParallelIndexTuningConfig.class, DruidInputSource.class);
inputDir = temporaryFolder.newFolder();
final File tmpFile = File.createTempFile("druid", "index", inputDir);
try (BufferedWriter writer = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
writer.write("2014-01-01T00:00:10Z,a,1\n");
writer.write("2014-01-01T00:00:10Z,b,2\n");
writer.write("2014-01-01T00:00:10Z,c,3\n");
writer.write("2014-01-01T01:00:20Z,a,1\n");
writer.write("2014-01-01T01:00:20Z,b,2\n");
writer.write("2014-01-01T01:00:20Z,c,3\n");
writer.write("2014-01-01T02:00:30Z,a,1\n");
writer.write("2014-01-01T02:00:30Z,b,2\n");
writer.write("2014-01-01T02:00:30Z,c,3\n");
}
}
@Test
public void testRunParallelWithDynamicPartitioningMatchCompactionState()
{
runIndexTask(null, true);
final Builder builder = new Builder(
DATA_SOURCE,
getSegmentLoaderFactory(),
RETRY_POLICY_FACTORY
);
final CompactionTask compactionTask = builder
.inputSpec(new CompactionIntervalSpec(INTERVAL_TO_INDEX, null))
.tuningConfig(AbstractParallelIndexSupervisorTaskTest.DEFAULT_TUNING_CONFIG_FOR_PARALLEL_INDEXING)
.build();
final Set<DataSegment> compactedSegments = runTask(compactionTask);
final CompactionState expectedState = new CompactionState(
new DynamicPartitionsSpec(null, Long.MAX_VALUE),
compactionTask.getTuningConfig().getIndexSpec().asMap(getObjectMapper())
);
for (DataSegment segment : compactedSegments) {
Assert.assertSame(
lockGranularity == LockGranularity.TIME_CHUNK ? NumberedShardSpec.class : NumberedOverwriteShardSpec.class,
segment.getShardSpec().getClass()
);
// Expect compaction state to exist as store compaction state by default
Assert.assertEquals(expectedState, segment.getLastCompactionState());
}
}
@Test
public void testRunParallelWithHashPartitioningMatchCompactionState()
{
// Hash partitioning is not supported with segment lock yet
Assume.assumeFalse(lockGranularity == LockGranularity.SEGMENT);
runIndexTask(null, true);
final Builder builder = new Builder(
DATA_SOURCE,
getSegmentLoaderFactory(),
RETRY_POLICY_FACTORY
);
final CompactionTask compactionTask = builder
.inputSpec(new CompactionIntervalSpec(INTERVAL_TO_INDEX, null))
.tuningConfig(newTuningConfig(new HashedPartitionsSpec(null, 3, null), 2, true))
.build();
final Set<DataSegment> compactedSegments = runTask(compactionTask);
final CompactionState expectedState = new CompactionState(
new HashedPartitionsSpec(null, 3, null),
compactionTask.getTuningConfig().getIndexSpec().asMap(getObjectMapper())
);
for (DataSegment segment : compactedSegments) {
// Expect compaction state to exist as store compaction state by default
Assert.assertSame(HashBasedNumberedShardSpec.class, segment.getShardSpec().getClass());
Assert.assertEquals(expectedState, segment.getLastCompactionState());
}
}
@Test
public void testRunParallelWithRangePartitioning()
{
// Range partitioning is not supported with segment lock yet
Assume.assumeFalse(lockGranularity == LockGranularity.SEGMENT);
runIndexTask(null, true);
final Builder builder = new Builder(
DATA_SOURCE,
getSegmentLoaderFactory(),
RETRY_POLICY_FACTORY
);
final CompactionTask compactionTask = builder
.inputSpec(new CompactionIntervalSpec(INTERVAL_TO_INDEX, null))
.tuningConfig(newTuningConfig(new SingleDimensionPartitionsSpec(7, null, "dim", false), 2, true))
.build();
final Set<DataSegment> compactedSegments = runTask(compactionTask);
final CompactionState expectedState = new CompactionState(
new SingleDimensionPartitionsSpec(7, null, "dim", false),
compactionTask.getTuningConfig().getIndexSpec().asMap(getObjectMapper())
);
for (DataSegment segment : compactedSegments) {
// Expect compaction state to exist as store compaction state by default
Assert.assertSame(SingleDimensionShardSpec.class, segment.getShardSpec().getClass());
Assert.assertEquals(expectedState, segment.getLastCompactionState());
}
}
@Test
public void testRunParallelWithRangePartitioningWithSingleTask()
{
// Range partitioning is not supported with segment lock yet
Assume.assumeFalse(lockGranularity == LockGranularity.SEGMENT);
runIndexTask(null, true);
final Builder builder = new Builder(
DATA_SOURCE,
getSegmentLoaderFactory(),
RETRY_POLICY_FACTORY
);
final CompactionTask compactionTask = builder
.inputSpec(new CompactionIntervalSpec(INTERVAL_TO_INDEX, null))
.tuningConfig(newTuningConfig(new SingleDimensionPartitionsSpec(7, null, "dim", false), 1, true))
.build();
final Set<DataSegment> compactedSegments = runTask(compactionTask);
final CompactionState expectedState = new CompactionState(
new SingleDimensionPartitionsSpec(7, null, "dim", false),
compactionTask.getTuningConfig().getIndexSpec().asMap(getObjectMapper())
);
for (DataSegment segment : compactedSegments) {
// Expect compaction state to exist as store compaction state by default
Assert.assertSame(SingleDimensionShardSpec.class, segment.getShardSpec().getClass());
Assert.assertEquals(expectedState, segment.getLastCompactionState());
}
}
@Test
public void testRunCompactionStateNotStoreIfContextSetToFalse()
{
runIndexTask(null, true);
final Builder builder = new Builder(
DATA_SOURCE,
getSegmentLoaderFactory(),
RETRY_POLICY_FACTORY
);
final CompactionTask compactionTask = builder
.inputSpec(new CompactionIntervalSpec(INTERVAL_TO_INDEX, null))
.tuningConfig(AbstractParallelIndexSupervisorTaskTest.DEFAULT_TUNING_CONFIG_FOR_PARALLEL_INDEXING)
.context(ImmutableMap.of(Tasks.STORE_COMPACTION_STATE_KEY, false))
.build();
final Set<DataSegment> compactedSegments = runTask(compactionTask);
for (DataSegment segment : compactedSegments) {
Assert.assertSame(
lockGranularity == LockGranularity.TIME_CHUNK ? NumberedShardSpec.class : NumberedOverwriteShardSpec.class,
segment.getShardSpec().getClass()
);
// Expect compaction state to exist as store compaction state by default
Assert.assertEquals(null, segment.getLastCompactionState());
}
}
@Test
public void testCompactHashAndDynamicPartitionedSegments()
{
runIndexTask(new HashedPartitionsSpec(null, 2, null), false);
runIndexTask(null, true);
final Builder builder = new Builder(
DATA_SOURCE,
getSegmentLoaderFactory(),
RETRY_POLICY_FACTORY
);
final CompactionTask compactionTask = builder
.inputSpec(new CompactionIntervalSpec(INTERVAL_TO_INDEX, null))
.tuningConfig(AbstractParallelIndexSupervisorTaskTest.DEFAULT_TUNING_CONFIG_FOR_PARALLEL_INDEXING)
.build();
final Map<Interval, List<DataSegment>> intervalToSegments = SegmentUtils.groupSegmentsByInterval(
runTask(compactionTask)
);
Assert.assertEquals(3, intervalToSegments.size());
Assert.assertEquals(
ImmutableSet.of(
Intervals.of("2014-01-01T00/PT1H"),
Intervals.of("2014-01-01T01/PT1H"),
Intervals.of("2014-01-01T02/PT1H")
),
intervalToSegments.keySet()
);
for (Entry<Interval, List<DataSegment>> entry : intervalToSegments.entrySet()) {
final List<DataSegment> segmentsInInterval = entry.getValue();
Assert.assertEquals(1, segmentsInInterval.size());
final ShardSpec shardSpec = segmentsInInterval.get(0).getShardSpec();
if (lockGranularity == LockGranularity.TIME_CHUNK) {
Assert.assertSame(NumberedShardSpec.class, shardSpec.getClass());
final NumberedShardSpec numberedShardSpec = (NumberedShardSpec) shardSpec;
Assert.assertEquals(0, numberedShardSpec.getPartitionNum());
Assert.assertEquals(1, numberedShardSpec.getNumCorePartitions());
} else {
Assert.assertSame(NumberedOverwriteShardSpec.class, shardSpec.getClass());
final NumberedOverwriteShardSpec numberedShardSpec = (NumberedOverwriteShardSpec) shardSpec;
Assert.assertEquals(PartitionIds.NON_ROOT_GEN_START_PARTITION_ID, numberedShardSpec.getPartitionNum());
Assert.assertEquals(1, numberedShardSpec.getAtomicUpdateGroupSize());
}
}
}
@Test
public void testCompactRangeAndDynamicPartitionedSegments()
{
runIndexTask(new SingleDimensionPartitionsSpec(2, null, "dim", false), false);
runIndexTask(null, true);
final Builder builder = new Builder(
DATA_SOURCE,
getSegmentLoaderFactory(),
RETRY_POLICY_FACTORY
);
final CompactionTask compactionTask = builder
.inputSpec(new CompactionIntervalSpec(INTERVAL_TO_INDEX, null))
.tuningConfig(AbstractParallelIndexSupervisorTaskTest.DEFAULT_TUNING_CONFIG_FOR_PARALLEL_INDEXING)
.build();
final Map<Interval, List<DataSegment>> intervalToSegments = SegmentUtils.groupSegmentsByInterval(
runTask(compactionTask)
);
Assert.assertEquals(3, intervalToSegments.size());
Assert.assertEquals(
ImmutableSet.of(
Intervals.of("2014-01-01T00/PT1H"),
Intervals.of("2014-01-01T01/PT1H"),
Intervals.of("2014-01-01T02/PT1H")
),
intervalToSegments.keySet()
);
for (Entry<Interval, List<DataSegment>> entry : intervalToSegments.entrySet()) {
final List<DataSegment> segmentsInInterval = entry.getValue();
Assert.assertEquals(1, segmentsInInterval.size());
final ShardSpec shardSpec = segmentsInInterval.get(0).getShardSpec();
if (lockGranularity == LockGranularity.TIME_CHUNK) {
Assert.assertSame(NumberedShardSpec.class, shardSpec.getClass());
final NumberedShardSpec numberedShardSpec = (NumberedShardSpec) shardSpec;
Assert.assertEquals(0, numberedShardSpec.getPartitionNum());
Assert.assertEquals(1, numberedShardSpec.getNumCorePartitions());
} else {
Assert.assertSame(NumberedOverwriteShardSpec.class, shardSpec.getClass());
final NumberedOverwriteShardSpec numberedShardSpec = (NumberedOverwriteShardSpec) shardSpec;
Assert.assertEquals(PartitionIds.NON_ROOT_GEN_START_PARTITION_ID, numberedShardSpec.getPartitionNum());
Assert.assertEquals(1, numberedShardSpec.getAtomicUpdateGroupSize());
}
}
}
@Test
public void testDruidInputSourceCreateSplitsWithIndividualSplits()
{
runIndexTask(null, true);
List<InputSplit<List<WindowedSegmentId>>> splits = Lists.newArrayList(
DruidInputSource.createSplits(
getCoordinatorClient(),
RETRY_POLICY_FACTORY,
DATA_SOURCE,
INTERVAL_TO_INDEX,
new SegmentsSplitHintSpec(null, 1)
)
);
List<DataSegment> segments = new ArrayList<>(
getCoordinatorClient().fetchUsedSegmentsInDataSourceForIntervals(
DATA_SOURCE,
ImmutableList.of(INTERVAL_TO_INDEX)
)
);
Set<String> segmentIdsFromSplits = new HashSet<>();
Set<String> segmentIdsFromCoordinator = new HashSet<>();
Assert.assertEquals(segments.size(), splits.size());
for (int i = 0; i < segments.size(); i++) {
segmentIdsFromCoordinator.add(segments.get(i).getId().toString());
segmentIdsFromSplits.add(splits.get(i).get().get(0).getSegmentId());
}
Assert.assertEquals(segmentIdsFromCoordinator, segmentIdsFromSplits);
}
private void runIndexTask(@Nullable PartitionsSpec partitionsSpec, boolean appendToExisting)
{
ParallelIndexIOConfig ioConfig = new ParallelIndexIOConfig(
null,
new LocalInputSource(inputDir, "druid*"),
new CsvInputFormat(
Arrays.asList("ts", "dim", "val"),
"|",
null,
false,
0
),
appendToExisting
);
ParallelIndexTuningConfig tuningConfig = newTuningConfig(partitionsSpec, 2, !appendToExisting);
ParallelIndexSupervisorTask indexTask = new ParallelIndexSupervisorTask(
null,
null,
null,
new ParallelIndexIngestionSpec(
new DataSchema(
DATA_SOURCE,
new TimestampSpec("ts", "auto", null),
new DimensionsSpec(DimensionsSpec.getDefaultSchemas(Arrays.asList("ts", "dim"))),
new AggregatorFactory[]{new LongSumAggregatorFactory("val", "val")},
new UniformGranularitySpec(
Granularities.HOUR,
Granularities.MINUTE,
ImmutableList.of(INTERVAL_TO_INDEX)
),
null
),
ioConfig,
tuningConfig
),
null
);
runTask(indexTask);
}
private Set<DataSegment> runTask(Task task)
{
task.addToContext(Tasks.FORCE_TIME_CHUNK_LOCK_KEY, lockGranularity == LockGranularity.TIME_CHUNK);
Assert.assertEquals(TaskState.SUCCESS, getIndexingServiceClient().runAndWait(task).getStatusCode());
return getIndexingServiceClient().getPublishedSegments(task);
}
}
| apache-2.0 |
mariusznet/dick | dick-web/src/main/java/com/dickthedeployer/dick/web/controller/JobBuildController.java | 2149 | /*
* Copyright dick the deployer.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dickthedeployer.dick.web.controller;
import com.dickthedeployer.dick.web.exception.NotFoundException;
import com.dickthedeployer.dick.web.model.LogChunkModel;
import com.dickthedeployer.dick.web.model.OutputModel;
import com.dickthedeployer.dick.web.service.JobBuildService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import java.util.Date;
import java.util.List;
/**
* @author mariusz
*/
@RestController
@RequestMapping("/api/job-builds")
public class JobBuildController {
@Autowired
JobBuildService jobBuildService;
@RequestMapping(value = "/{id}/chunks", method = RequestMethod.GET)
List<LogChunkModel> getLogChunks(@PathVariable Long id,
@RequestParam(required = false, name = "creationDate")
Long creationDate) throws NotFoundException {
return jobBuildService.getLogChunks(id, creationDate != null ? new Date(creationDate) : null);
}
@RequestMapping(value = "/{id}/output", method = RequestMethod.GET)
OutputModel getOutput(@PathVariable Long id) throws NotFoundException {
return jobBuildService.getOutput(id);
}
}
| apache-2.0 |
apache/incubator-taverna-workbench | taverna-edits-impl/src/main/java/org/apache/taverna/workbench/edits/impl/menu/UndoMenuSection.java | 1456 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.taverna.workbench.edits.impl.menu;
import java.net.URI;
import org.apache.taverna.ui.menu.AbstractMenuSection;
/**
* A section of the Edit menu that contains {@link UndoMenuSection undo} and
* {@link RedoMenuAction redo}.
*
* @author Stian Soiland-Reyes
*/
public class UndoMenuSection extends AbstractMenuSection {
public static final URI UNDO_SECTION_URI = URI
.create("http://taverna.sf.net/2008/t2workbench/edits#undoSection");
public static final URI EDIT_MENU_URI = URI
.create("http://taverna.sf.net/2008/t2workbench/menu#edit");
public UndoMenuSection() {
super(EDIT_MENU_URI, 10, UNDO_SECTION_URI);
}
}
| apache-2.0 |
caskdata/tigon | tigon-sql/src/main/java/co/cask/tigon/sql/internal/DefaultInputFlowletSpecification.java | 1951 | /*
* Copyright © 2014 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package co.cask.tigon.sql.internal;
import co.cask.tigon.sql.flowlet.InputFlowletSpecification;
import co.cask.tigon.sql.flowlet.InputStreamFormat;
import co.cask.tigon.sql.flowlet.StreamSchema;
import com.google.common.collect.ImmutableMap;
import java.util.Map;
/**
* Default InputFlowlet Specification.
*/
public class DefaultInputFlowletSpecification implements InputFlowletSpecification {
private final String name;
private final String description;
private final Map<String, Map.Entry<InputStreamFormat, StreamSchema>> inputSchemas;
private final Map<String, String> sql;
public DefaultInputFlowletSpecification(String name, String description,
Map<String, Map.Entry<InputStreamFormat, StreamSchema>> inputSchemas,
Map<String, String> sql) {
this.name = name;
this.description = description;
this.inputSchemas = ImmutableMap.copyOf(inputSchemas);
this.sql = ImmutableMap.copyOf(sql);
}
@Override
public String getName() {
return name;
}
@Override
public String getDescription() {
return description;
}
@Override
public Map<String, Map.Entry<InputStreamFormat, StreamSchema>> getInputSchemas() {
return inputSchemas;
}
@Override
public Map<String, String> getQuery() {
return sql;
}
}
| apache-2.0 |
trwc/trwc-android | app/src/main/java/com/github/mobile/ui/gist/GistFileFragment.java | 6393 | /*
* Copyright 2012 GitHub Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.mobile.ui.gist;
import static com.github.mobile.Intents.EXTRA_GIST_FILE;
import static com.github.mobile.Intents.EXTRA_GIST_ID;
import static com.github.mobile.util.PreferenceUtils.WRAP;
import android.accounts.Account;
import android.app.Activity;
import android.content.SharedPreferences;
import android.content.SharedPreferences.OnSharedPreferenceChangeListener;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.webkit.WebView;
import com.actionbarsherlock.view.Menu;
import com.actionbarsherlock.view.MenuInflater;
import com.actionbarsherlock.view.MenuItem;
import com.github.mobile.R;
import com.github.mobile.accounts.AuthenticatedUserTask;
import com.github.mobile.core.gist.GistStore;
import com.github.mobile.ui.DialogFragment;
import com.github.mobile.util.PreferenceUtils;
import com.github.mobile.util.SourceEditor;
import com.github.mobile.util.ToastUtils;
import com.google.inject.Inject;
import java.io.IOException;
import java.util.Map;
import org.eclipse.egit.github.core.Gist;
import org.eclipse.egit.github.core.GistFile;
/**
* Fragment to display the content of a file in a Gist
*/
public class GistFileFragment extends DialogFragment implements
OnSharedPreferenceChangeListener {
private WebView webView;
private String gistId;
private GistFile file;
private Gist gist;
@Inject
private GistStore store;
private SourceEditor editor;
private SharedPreferences codePrefs;
private MenuItem wrapItem;
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
gistId = getStringExtra(EXTRA_GIST_ID);
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
file = (GistFile) getArguments().get(EXTRA_GIST_FILE);
gist = store.getGist(gistId);
if (gist == null)
gist = new Gist().setId(gistId);
codePrefs = PreferenceUtils.getCodePreferences(getActivity());
codePrefs.registerOnSharedPreferenceChangeListener(this);
}
@Override
public void onDestroy() {
super.onDestroy();
codePrefs.unregisterOnSharedPreferenceChangeListener(this);
}
public void onDestroyView() {
super.onDestroyView();
}
@Override
public void onCreateOptionsMenu(Menu optionsMenu, MenuInflater inflater) {
inflater.inflate(R.menu.code_view, optionsMenu);
wrapItem = optionsMenu.findItem(R.id.m_wrap);
updateWrapItem();
}
private void updateWrapItem() {
if (wrapItem != null)
if (codePrefs.getBoolean(WRAP, false))
wrapItem.setTitle(R.string.disable_wrapping);
else
wrapItem.setTitle(R.string.enable_wrapping);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.m_wrap:
if (editor.getWrap()) {
item.setTitle(R.string.enable_wrapping);
editor.setWrap(false);
} else {
item.setTitle(R.string.disable_wrapping);
editor.setWrap(true);
}
PreferenceUtils.save(codePrefs.edit().putBoolean(WRAP,
editor.getWrap()));
return true;
default:
return super.onOptionsItemSelected(item);
}
}
private void loadSource() {
new AuthenticatedUserTask<GistFile>(getActivity()) {
@Override
public GistFile run(Account account) throws Exception {
gist = store.refreshGist(gistId);
Map<String, GistFile> files = gist.getFiles();
if (files == null)
throw new IOException();
GistFile loadedFile = files.get(file.getFilename());
if (loadedFile == null)
throw new IOException();
return loadedFile;
}
@Override
protected void onException(Exception e) throws RuntimeException {
super.onException(e);
ToastUtils.show(getActivity(), e, R.string.error_gist_file_load);
}
@Override
protected void onSuccess(GistFile loadedFile) throws Exception {
super.onSuccess(loadedFile);
if (loadedFile == null)
return;
file = loadedFile;
getArguments().putSerializable(EXTRA_GIST_FILE, file);
if (file.getContent() != null)
showSource();
}
}.execute();
}
private void showSource() {
editor.setSource(file.getFilename(), file.getContent(), false);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
return inflater.inflate(R.layout.gist_file_view, null);
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
webView = finder.find(R.id.wv_code);
editor = new SourceEditor(webView);
editor.setWrap(PreferenceUtils.getCodePreferences(getActivity())
.getBoolean(WRAP, false));
if (file.getContent() != null)
showSource();
else
loadSource();
}
@Override
public void onSharedPreferenceChanged(SharedPreferences sharedPreferences,
String key) {
if (WRAP.equals(key)) {
updateWrapItem();
editor.setWrap(sharedPreferences.getBoolean(WRAP, false));
}
}
}
| apache-2.0 |
LiuJianan/Graduate-Graph | src/test/com/chinamobile/bcbsp/examples/StringEdgeTest.java | 989 | package com.chinamobile.bcbsp.examples;
import static org.junit.Assert.*;
import org.junit.Test;
import com.chinamobile.bcbsp.examples.StringEdge;
public class StringEdgeTest {
@Test
public void testHashCode() {
StringEdge se = new StringEdge();
se.setVertexID("123");
int i =se.hashCode();
assertEquals(123,i);
}
@Test
public void testFromString() throws Exception {
StringEdge se = new StringEdge();
se.fromString("2:1");
assertEquals("2",se.getVertexID());
assertEquals("1",se.getEdgeValue());
}
@Test
public void testIntoString() {
StringEdge se = new StringEdge();
se.setVertexID("1");
se.setEdgeValue("1");
String s = se.intoString();
assertEquals("1:1",s);
}
@Test
public void testEqualsObject() {
StringEdge se = new StringEdge();
StringEdge se2 = new StringEdge();
se.setVertexID("1");
se2.setVertexID("1");
boolean b = se.equals(se2);
assertEquals(true, b);
}
}
| apache-2.0 |
trejkaz/derby | java/drda/org/apache/derby/impl/drda/EXTDTAInputStream.java | 10032 | /*
Derby - Class org.apache.derby.impl.drda.DRDAStatement
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.impl.drda;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.sql.Blob;
import java.sql.CallableStatement;
import java.sql.Clob;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.apache.derby.iapi.jdbc.EngineResultSet;
import org.apache.derby.iapi.reference.DRDAConstants;
import org.apache.derby.shared.common.sanity.SanityManager;
import org.apache.derby.impl.jdbc.Util;
/**
*
* EXTDTAObjectHolder provides Externalized Large Object representation that
* does not hold locks until the end of the transaction (DERBY-255)
*
* It serves as a holder for lob data and is only valid as long as the original
* result set from which it came is on the same row.
*
*
*/
class EXTDTAInputStream extends InputStream {
private InputStream binaryInputStream = null;
/** DRDA Type of column/parameter */
int ndrdaType;
//
// Used when this class wraps a ResultSet
//
/** ResultSet that contains the stream*/
EngineResultSet rs;
/** Column index starting with 1 */
int columnNumber;
//
// Used when this class wraps a CallableStatement
//
private Clob _clob;
private Blob _blob;
private EXTDTAInputStream(ResultSet rs,
int columnNumber,
int ndrdaType)
{
this.rs = (EngineResultSet) rs;
this.columnNumber = columnNumber;
this.ndrdaType = ndrdaType;
}
private EXTDTAInputStream(Clob clob, int ndrdaType )
{
_clob = clob;
this.ndrdaType = ndrdaType;
}
private EXTDTAInputStream(Blob blob, int ndrdaType )
{
_blob = blob;
this.ndrdaType = ndrdaType;
}
/**
* Create a new EXTDTAInputStream. Before read the stream must be
* initialized by the user with {@link #initInputStream()}
*
* @see DDMWriter#writeScalarStream
* @see #initInputStream()
*
* @param rs
* result set from which to retrieve the lob
* @param column
* column number
* @param drdaType
* FD:OCA type of object one of
* DRDAConstants.DRDA_TYPE_NLOBBYTES
* DRDAConstants.DRDA_TYPE_LOBBYTES
* DRDAConstants.DRDA_TYPE_NLOBCMIXED
* DRDAConstants.DRDA_TYPE_LOBCMIXED
*
* @return null if the value is null or a new EXTDTAInputStream corresponding to
* rs.getBinaryStream(column) value and associated length
*
* @throws SQLException
*/
public static EXTDTAInputStream getEXTDTAStream(ResultSet rs, int column, int drdaType)
{
int ndrdaType = drdaType | 1; //nullable drdaType
return new EXTDTAInputStream(rs,
column,
ndrdaType);
}
/**
* Create a new EXTDTAInputStream from a CallableStatement.
*
*
* @param cs
* CallableStatement from which to retrieve the lob
* @param column
* column number
* @param drdaType
* FD:OCA type of object one of
* DRDAConstants.DRDA_TYPE_NLOBBYTES
* DRDAConstants.DRDA_TYPE_LOBBYTES
* DRDAConstants.DRDA_TYPE_NLOBCMIXED
* DRDAConstants.DRDA_TYPE_LOBCMIXED
*/
public static EXTDTAInputStream getEXTDTAStream(CallableStatement cs, int column, int drdaType)
throws SQLException
{
int ndrdaType = drdaType | 1; //nullable drdaType
switch ( ndrdaType )
{
case DRDAConstants.DRDA_TYPE_NLOBBYTES:
Blob blob = cs.getBlob( column );
if ( blob == null ) { return null; }
return new EXTDTAInputStream( blob, ndrdaType );
case DRDAConstants.DRDA_TYPE_NLOBCMIXED:
Clob clob = cs.getClob( column );
if ( clob == null ) { return null; }
return new EXTDTAInputStream( clob, ndrdaType );
default:
badDRDAType( ndrdaType );
return null;
}
}
/**
* Requires {@link #initInputStream()} be called before we can read from the stream
*
* @see java.io.InputStream#read()
*/
public int read() throws IOException {
return binaryInputStream.read();
}
/**
*
*
* @see java.io.InputStream#available()
*/
public int available() throws IOException {
return binaryInputStream.available();
}
/**
*
*
* @see java.io.InputStream#close()
*/
public void close() throws IOException {
if (binaryInputStream != null)
binaryInputStream.close();
binaryInputStream = null;
}
/**
*
*
* @see java.lang.Object#equals(java.lang.Object)
*/
public boolean equals(Object arg0) {
return binaryInputStream.equals(arg0);
}
/**
*
*
* @see java.lang.Object#hashCode()
*/
public int hashCode() {
return binaryInputStream.hashCode();
}
/**
*
*
* @see java.io.InputStream#mark(int)
*/
public void mark(int arg0) {
binaryInputStream.mark(arg0);
}
/**
*
*
* @see java.io.InputStream#markSupported()
*/
public boolean markSupported() {
return binaryInputStream.markSupported();
}
/**
*
*
* @see java.io.InputStream#read(byte[])
*/
public int read(byte[] arg0) throws IOException {
return binaryInputStream.read(arg0);
}
/**
*
*
* @see java.io.InputStream#read(byte[], int, int)
*/
public int read(byte[] arg0, int arg1, int arg2) throws IOException {
return binaryInputStream.read(arg0, arg1, arg2);
}
/**
*
*
* @see java.io.InputStream#reset()
*/
public void reset() throws IOException {
binaryInputStream.reset();
}
/**
*
*
* @see java.io.InputStream#skip(long)
*/
public long skip(long arg0) throws IOException {
if (arg0 < 0L) {
return 0L;
}
return binaryInputStream.skip(arg0);
}
protected boolean isEmptyStream() throws SQLException
{
return (length() == 0);
}
private long length() throws SQLException
{
if ( rs != null ) { return rs.getLength(columnNumber); }
else if ( _clob != null ) { return _clob.length(); }
else { return _blob.length(); }
}
/**
* This method takes information of ResultSet and
* initializes the binaryInputStream variable of this object with not empty stream
* by calling getBinaryStream or getCharacterStream() as appropriate.
* The Reader returned from getCharacterStream() will be encoded in binarystream.
*
*
*/
public void initInputStream()
throws SQLException
{
InputStream is = null;
Reader r = null;
// BLOBS
if (ndrdaType == DRDAConstants.DRDA_TYPE_NLOBBYTES)
{
is = getBinaryStream();
if (is == null) { return; }
}
// CLOBS
else if (ndrdaType == DRDAConstants.DRDA_TYPE_NLOBCMIXED)
{
try {
r = getCharacterStream();
if(r == null){
return;
}
is = new ReEncodedInputStream(r);
}catch (java.io.UnsupportedEncodingException e) {
throw Util.javaException(e);
}catch (IOException e){
throw Util.javaException(e);
}
}
else { badDRDAType( ndrdaType ); }
if (! is.markSupported()) {
is = new BufferedInputStream(is);
}
this.binaryInputStream=is;
}
private InputStream getBinaryStream() throws SQLException
{
if ( rs != null ) { return rs.getBinaryStream(this.columnNumber); }
else { return _blob.getBinaryStream(); }
}
private Reader getCharacterStream() throws SQLException
{
if ( rs != null ) { return rs.getCharacterStream(this.columnNumber); }
else { return _clob.getCharacterStream(); }
}
private static void badDRDAType( int drdaType )
{
if (SanityManager.DEBUG)
{
SanityManager.THROWASSERT("NDRDAType: " + drdaType +
" not valid EXTDTA object type");
}
}
protected void finalize() throws Throwable{
close();
}
/**
* Is the value null? Null status is obtained from the underlying
* EngineResultSet or LOB, so that it can be determined before the stream
* is retrieved.
*
* @return true if this value is null
*
*/
public boolean isNull() throws SQLException
{
if ( rs != null ) { return rs.isNull(columnNumber); }
else { return (_clob == null) && (_blob == null); }
}
}
| apache-2.0 |
galpha/gradoop | gradoop-flink/src/main/java/org/gradoop/flink/model/impl/operators/matching/common/statistics/GraphStatistics.java | 13047 | /*
* Copyright © 2014 - 2021 Leipzig University (Database Research Group)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradoop.flink.model.impl.operators.matching.common.statistics;
import org.gradoop.flink.model.impl.epgm.LogicalGraph;
import java.util.Collections;
import java.util.Map;
/**
* Represents several statistics related to a {@link LogicalGraph}.
*/
public class GraphStatistics {
/**
* Number of vertices
*/
private long vertexCount;
/**
* Number of edges
*/
private long edgeCount;
/**
* Frequency distribution for vertex labels
*/
private Map<String, Long> vertexCountByLabel;
/**
* Frequency distribution for edge labels
*/
private Map<String, Long> edgeCountByLabel;
/**
* Number of edges with a specific source vertex label and edge label.
*/
private Map<String, Map<String, Long>> edgeCountBySourceVertexAndEdgeLabel;
/**
* Number of edges with a specific target vertex label and edge label.
*/
private Map<String, Map<String, Long>> edgeCountByTargetVertexAndEdgeLabel;
/**
* Number of distinct source vertices, i.e. the number of distinct vertices that have at least
* one outgoing edge.
*/
private long distinctSourceVertexCount;
/**
* Number of distinct target vertices, i.e. the number of distinct vertices that have at least
* one incoming edge.
*/
private long distinctTargetVertexCount;
/**
* Number of distinct source vertices by edge label, i.e. the number of distinct vertices that
* have at least one outgoing edge with the specified label.
*/
private Map<String, Long> distinctSourceVertexCountByEdgeLabel;
/**
* Number of distinct target vertices by edge label, i.e. the number of distinct vertices that
* have at least one incoming edge with the specified label.
*/
private Map<String, Long> distinctTargetVertexCountByEdgeLabel;
/**
* Number of distinct edge property values of a given label - property name pair
*/
private Map<String, Map<String, Long>> distinctEdgePropertiesByLabel;
/**
* Number of distinct vertex property values of a given label - property name pair
*/
private Map<String, Map<String, Long>> distinctVertexPropertiesByLabel;
/**
* Number of distinct edge property values for property names
*/
private Map<String, Long> distinctEdgeProperties;
/**
* Number of distinct vertex property values for property names
*/
private Map<String, Long> distinctVertexProperties;
/**
* Constructor using basic statistics.
*
* @param vertexCount total number of vertices
* @param edgeCount total number of edges
* @param distinctSourceVertexCount number of distinct source vertices
* @param distinctTargetVertexCount number of distinct target vertices
*/
public GraphStatistics(long vertexCount, long edgeCount,
long distinctSourceVertexCount, long distinctTargetVertexCount) {
this(vertexCount,
edgeCount,
Collections.emptyMap(),
Collections.emptyMap(),
Collections.emptyMap(),
Collections.emptyMap(),
distinctSourceVertexCount,
distinctTargetVertexCount,
Collections.emptyMap(),
Collections.emptyMap(),
Collections.emptyMap(),
Collections.emptyMap(),
Collections.emptyMap(),
Collections.emptyMap());
}
/**
* Constructor.
*
* @param vertexCount number of vertices
* @param edgeCount number of edges
* @param vertexCountByLabel number of vertices by label
* @param edgeCountByLabel number of edges by label
* @param edgeCountBySourceVertexAndEdgeLabel number of edges by source vertex and edge label
* @param edgeCountByTargetVertexAndEdgeLabel number of edges by target vertex and edge label
* @param distinctSourceVertexCount number of distinct source vertices
* @param distinctTargetVertexCount number of distinct target vertices
* @param distinctSourceVertexCountByEdgeLabel number of distinct source vertices by edge label
* @param distinctTargetVertexCountByEdgeLabel number of distinct target vertices by edge label
* @param distinctEdgePropertiesByLabel {@code (label,property) -> distinct values}
* @param distinctVertexPropertiesByLabel {@code (label,property) -> distinct values}
* @param distinctEdgeProperties {@code (edge property) -> distinct values}
* @param distinctVertexProperties {@code (vertex property) -> distinct values}
*/
GraphStatistics(long vertexCount, long edgeCount, Map<String, Long> vertexCountByLabel,
Map<String, Long> edgeCountByLabel,
Map<String, Map<String, Long>> edgeCountBySourceVertexAndEdgeLabel,
Map<String, Map<String, Long>> edgeCountByTargetVertexAndEdgeLabel,
long distinctSourceVertexCount, long distinctTargetVertexCount,
Map<String, Long> distinctSourceVertexCountByEdgeLabel,
Map<String, Long> distinctTargetVertexCountByEdgeLabel,
Map<String, Map<String, Long>> distinctEdgePropertiesByLabel,
Map<String, Map<String, Long>> distinctVertexPropertiesByLabel,
Map<String, Long> distinctEdgeProperties,
Map<String, Long> distinctVertexProperties) {
this.vertexCount = vertexCount;
this.edgeCount = edgeCount;
this.vertexCountByLabel = vertexCountByLabel;
this.edgeCountByLabel = edgeCountByLabel;
this.edgeCountBySourceVertexAndEdgeLabel = edgeCountBySourceVertexAndEdgeLabel;
this.edgeCountByTargetVertexAndEdgeLabel = edgeCountByTargetVertexAndEdgeLabel;
this.distinctSourceVertexCount = distinctSourceVertexCount;
this.distinctTargetVertexCount = distinctTargetVertexCount;
this.distinctSourceVertexCountByEdgeLabel = distinctSourceVertexCountByEdgeLabel;
this.distinctTargetVertexCountByEdgeLabel = distinctTargetVertexCountByEdgeLabel;
this.distinctEdgePropertiesByLabel = distinctEdgePropertiesByLabel;
this.distinctVertexPropertiesByLabel = distinctVertexPropertiesByLabel;
this.distinctEdgeProperties = distinctEdgeProperties;
this.distinctVertexProperties = distinctVertexProperties;
}
/**
* Returns the number of vertices in the graph.
*
* @return vertex count
*/
public long getVertexCount() {
return vertexCount;
}
/**
* Returns the number of edges in the graph
*
* @return edge count
*/
public long getEdgeCount() {
return edgeCount;
}
/**
* Returns the number of vertices with the specified label.
*
* @param vertexLabel vertex label
* @return number of vertices with the given label
*/
public long getVertexCount(String vertexLabel) {
return vertexCountByLabel.getOrDefault(vertexLabel, 0L);
}
/**
* Returns the number of edges with the specified label.
*
* @param edgeLabel edge label
* @return number of edges with the given label
*/
public long getEdgeCount(String edgeLabel) {
return edgeCountByLabel.getOrDefault(edgeLabel, 0L);
}
/**
* Returns the number of edges with a specified source vertex label and edge label.
*
* @param vertexLabel source vertex label
* @param edgeLabel edge label
* @return number of edges with the specified labels
*/
public long getEdgeCountBySource(String vertexLabel, String edgeLabel) {
return (edgeCountBySourceVertexAndEdgeLabel.containsKey(vertexLabel)) ?
edgeCountBySourceVertexAndEdgeLabel.get(vertexLabel).getOrDefault(edgeLabel, 0L) :
0L;
}
/**
* Returns the number of edges with a specified target vertex label and edge label.
*
* @param vertexLabel target vertex label
* @param edgeLabel edge label
* @return number of edges with the specified labels
*/
public long getEdgeCountByTarget(String vertexLabel, String edgeLabel) {
return (edgeCountByTargetVertexAndEdgeLabel.containsKey(vertexLabel)) ?
edgeCountByTargetVertexAndEdgeLabel.get(vertexLabel).getOrDefault(edgeLabel, 0L) :
0L;
}
/**
* Returns the number of distinct source vertices.
*
* @return number of distinct source vertices
*/
public long getDistinctSourceVertexCount() {
return distinctSourceVertexCount;
}
/**
* Returns the number of distinct target vertices.
*
* @return number of distinct target vertices
*/
public long getDistinctTargetVertexCount() {
return distinctTargetVertexCount;
}
/**
* Returns the number of distinct source vertices incident to an edge with the specified label.
*
* @param edgeLabel edge label
* @return number of distinct source vertices incident to the labeled edge
*/
public long getDistinctSourceVertexCount(String edgeLabel) {
return distinctSourceVertexCountByEdgeLabel.getOrDefault(edgeLabel, 0L);
}
/**
* Returns the number of distinct target vertices incident to an edge with the specified label.
*
* @param edgeLabel edge label
* @return number of distinct target vertices incident to the labeled edge
*/
public long getDistinctTargetVertexCount(String edgeLabel) {
return distinctTargetVertexCountByEdgeLabel.getOrDefault(edgeLabel, 0L);
}
/**
* Returns the number of distinct vertex property values for given property name<br>
* Eg {@code (name) -> 20}
*
* @param propertyName property name
* @return number of distinct property values for label property name pair
*/
public long getDistinctVertexProperties(String propertyName) {
return distinctVertexProperties.getOrDefault(propertyName, 0L);
}
/**
* Returns the number of distinct edge property values for given property name<br>
* Eg {@code (name) -> 20}
*
* @param propertyName property name
* @return number of distinct property values for label property name pair
*/
public long getDistinctEdgeProperties(String propertyName) {
return distinctEdgeProperties.getOrDefault(propertyName, 0L);
}
/**
* Returns the number of distinct property values for given vertex label property name pair<br>
* Eg {@code (Person, name) -> 20}
*
* @param vertexLabel vertex label
* @param propertyName property name
* @return number of distinct property values for label property name pair
*/
public long getDistinctVertexProperties(String vertexLabel, String propertyName) {
return distinctVertexPropertiesByLabel.containsKey(vertexLabel) ?
distinctVertexPropertiesByLabel.get(vertexLabel).getOrDefault(propertyName, 0L) : 0;
}
/**
* Returns the number of distinct property values for given edge label property name pair<br>
* Eg {@code (Person, name) -> 20}
*
* @param edgeLabel edge label
* @param propertyName property name
* @return number of distinct property values for label property name pair
*/
public long getDistinctEdgeProperties(String edgeLabel, String propertyName) {
return distinctEdgePropertiesByLabel.containsKey(edgeLabel) ?
distinctEdgePropertiesByLabel.get(edgeLabel).getOrDefault(propertyName, 0L) : 0;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("GraphStatistics{");
sb.append(String.format("%n vertexCount="))
.append(vertexCount);
sb.append(String.format(",%n edgeCount="))
.append(edgeCount);
sb.append(String.format(",%n vertexCountByLabel="))
.append(vertexCountByLabel);
sb.append(String.format(",%n edgeCountByLabel="))
.append(edgeCountByLabel);
sb.append(String.format(",%n edgeCountByTargetVertexAndEdgeLabel="))
.append(edgeCountByTargetVertexAndEdgeLabel);
sb.append(String.format(",%n edgeCountBySourceVertexAndEdgeLabel="))
.append(edgeCountBySourceVertexAndEdgeLabel);
sb.append(String.format(",%n distinctSourceVertexCount="))
.append(distinctSourceVertexCount);
sb.append(String.format(",%n distinctTargetVertexCount="))
.append(distinctTargetVertexCount);
sb.append(String.format(",%n distinctSourceVertexCountByEdgeLabel="))
.append(distinctSourceVertexCountByEdgeLabel);
sb.append(String.format(",%n distinctTargetVertexCountByEdgeLabel="))
.append(distinctTargetVertexCountByEdgeLabel);
sb.append(String.format(",%n distinctVertexProperties="))
.append(distinctVertexProperties);
sb.append(String.format(",%n distinctEdgeProperties="))
.append(distinctEdgeProperties);
sb.append(String.format(",%n distinctVertexPropertiesByLabel="))
.append(distinctVertexPropertiesByLabel);
sb.append(String.format(",%n distinctEdgePropertiesByLabel="))
.append(distinctEdgePropertiesByLabel);
sb.append(String.format("%n}"));
return sb.toString();
}
}
| apache-2.0 |
vv7/Appium | appium_ant_testng_vv7/src/com/vv7/controllers/Runner.java | 1456 | package com.vv7.controllers;
import io.appium.java_client.android.AndroidDriver;
import java.net.URL;
import java.util.concurrent.TimeUnit;
import org.openqa.selenium.remote.CapabilityType;
import org.openqa.selenium.remote.DesiredCapabilities;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.vv7.models.AppiumDriver;
import com.vv7.scripts.Settings;
/*
* 控制器
* */
public class Runner extends AppiumDriver {
@BeforeMethod
public void setUp() throws Exception {
DesiredCapabilities capabilities = new DesiredCapabilities();
capabilities.setCapability(CapabilityType.BROWSER_NAME, "");
capabilities.setCapability("platformName", "Android");
capabilities.setCapability("deviceName", "Android Emulator");
capabilities.setCapability("platformVersion", "4.4");
capabilities.setCapability("appPackage", "io.appium.unlock");
capabilities.setCapability("appActivity", ".Unlock");
// 中文输入
capabilities.setCapability("unicodeKeyboard", "True");
capabilities.setCapability("resetKeyboard", "True");
driver = new AndroidDriver(new URL("http://127.0.0.1:4723/wd/hub"),
capabilities);
driver.manage().timeouts().implicitlyWait(10, TimeUnit.SECONDS);
}
@AfterMethod
public void tearDown() throws Exception {
driver.quit();
}
@Test
public void test_Settings() throws Exception{
Settings ss=new Settings();
ss.TT();
}
}
| apache-2.0 |
stumoodie/VisualLanguageToolkit | lib/slf4j-1.5.2/slf4j-api/src/main/java/org/slf4j/spi/LocationAwareLogger.java | 2284 | /*
* Copyright (c) 2004-2007 QOS.ch
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
* OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.slf4j.spi;
import org.slf4j.Logger;
import org.slf4j.Marker;
/**
* An <b>optional</b> interface helping integration with logging systems capable of
* extracting location information. This interface is mainly used by SLF4J bridges
* such as jcl104-over-slf4j which need to provide hints so that the underlying logging
* system can extract the correct locatin information (method name, line number, etc.).
*
*
* @author Ceki Gulcu
* @since 1.3
*/
public interface LocationAwareLogger extends Logger {
final public int TRACE_INT = 00;
final public int DEBUG_INT = 10;
final public int INFO_INT = 20;
final public int WARN_INT = 30;
final public int ERROR_INT = 40;
/**
* Printing method which support for location information.
*
* @param marker
* @param fqcn The fully qualified class name of the <b>caller</b>
* @param level
* @param message
* @param t
*/
public void log(Marker marker, String fqcn, int level, String message, Throwable t);
}
| apache-2.0 |
jwren/intellij-community | java/testFramework/src/com/intellij/ide/projectWizard/ProjectWizardTestCase.java | 12509 | // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.projectWizard;
import com.intellij.ide.actions.ImportModuleAction;
import com.intellij.ide.impl.NewProjectUtil;
import com.intellij.ide.util.newProjectWizard.AbstractProjectWizard;
import com.intellij.ide.util.newProjectWizard.SelectTemplateSettings;
import com.intellij.ide.util.projectWizard.ModuleWizardStep;
import com.intellij.ide.wizard.NewProjectWizardStep;
import com.intellij.ide.wizard.Step;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectManager;
import com.intellij.openapi.projectRoots.ProjectJdkTable;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.projectRoots.SdkTypeId;
import com.intellij.openapi.projectRoots.SimpleJavaSdkType;
import com.intellij.openapi.projectRoots.impl.JavaAwareProjectJdkTableImpl;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.roots.impl.LanguageLevelProjectExtensionImpl;
import com.intellij.openapi.roots.ui.configuration.actions.NewModuleAction;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.projectImport.ProjectImportProvider;
import com.intellij.testFramework.HeavyPlatformTestCase;
import com.intellij.testFramework.PlatformTestUtil;
import com.intellij.ui.UIBundle;
import com.intellij.util.SystemProperties;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.function.Consumer;
import java.util.function.Supplier;
/**
* @author Dmitry Avdeev
*/
public abstract class ProjectWizardTestCase<T extends AbstractProjectWizard> extends HeavyPlatformTestCase {
protected static final String DEFAULT_SDK = "default";
protected T myWizard;
@Nullable
private Project myCreatedProject;
private Sdk myOldDefaultProjectSdk;
@Override
protected void setUp() throws Exception {
super.setUp();
Project defaultProject = ProjectManager.getInstance().getDefaultProject();
myOldDefaultProjectSdk = ProjectRootManager.getInstance(defaultProject).getProjectSdk();
Sdk projectSdk = ProjectRootManager.getInstance(getProject()).getProjectSdk();
for (final Sdk jdk : ProjectJdkTable.getInstance().getAllJdks()) {
if (projectSdk != jdk) {
ApplicationManager.getApplication().runWriteAction(() -> ProjectJdkTable.getInstance().removeJdk(jdk));
}
}
ProjectTypeStep.resetGroupForTests();
}
@Override
public void tearDown() throws Exception {
try {
if (myWizard != null) {
Disposer.dispose(myWizard.getDisposable());
myWizard = null;
}
if (myCreatedProject != null) {
PlatformTestUtil.forceCloseProjectWithoutSaving(myCreatedProject);
myCreatedProject = null;
}
ApplicationManager.getApplication().runWriteAction(() -> {
LanguageLevelProjectExtensionImpl extension =
LanguageLevelProjectExtensionImpl.getInstanceImpl(ProjectManager.getInstance().getDefaultProject());
extension.resetDefaults();
ProjectRootManager.getInstance(ProjectManager.getInstance().getDefaultProject()).setProjectSdk(myOldDefaultProjectSdk);
JavaAwareProjectJdkTableImpl.removeInternalJdkInTests();
});
SelectTemplateSettings.getInstance().setLastTemplate(null, null);
// let vfs update pass
PlatformTestUtil.dispatchAllEventsInIdeEventQueue();
}
catch (Throwable e) {
addSuppressedException(e);
}
finally {
super.tearDown();
}
}
protected Project createProjectFromTemplate(@NotNull String group, @Nullable String name, @Nullable Consumer<? super Step> adjuster) throws IOException {
runWizard(group, name, null, adjuster);
try {
myCreatedProject = NewProjectUtil.createFromWizard(myWizard);
}
catch (Throwable e) {
myCreatedProject = ContainerUtil.find(ProjectManager.getInstance().getOpenProjects(), project -> {
return myWizard.getProjectName().equals(project.getName());
});
throw new RuntimeException(e);
}
assertNotNull(myCreatedProject);
UIUtil.dispatchAllInvocationEvents();
Project[] projects = ProjectManager.getInstance().getOpenProjects();
assertEquals(Arrays.asList(projects).toString(), 2, projects.length);
return myCreatedProject;
}
protected @Nullable Module createModuleFromTemplate(String group, String name, @Nullable Consumer<? super Step> adjuster) throws IOException {
return createModuleFromTemplate(group, name, getProject(), adjuster);
}
protected @Nullable Module createModuleFromTemplate(String group, String name, @NotNull Project project, @Nullable Consumer<? super Step> adjuster)
throws IOException {
runWizard(group, name, project, adjuster);
return createModuleFromWizard(project);
}
private Module createModuleFromWizard(@NotNull Project project) {
return new NewModuleAction().createModuleFromWizard(project, null, myWizard);
}
private void runWizard(@NotNull String group,
@Nullable String name,
@Nullable Project project,
@Nullable Consumer<? super Step> adjuster) throws IOException {
createWizard(project);
ProjectTypeStep step = (ProjectTypeStep)myWizard.getCurrentStepObject();
if (!step.setSelectedTemplate(group, name)) {
throw new IllegalArgumentException(group + '/' + name + " template not found. Available groups: " + step.availableTemplateGroupsToString());
}
runWizard(step1 -> {
if (name != null && step1 instanceof ChooseTemplateStep) {
((ChooseTemplateStep)step1).setSelectedTemplate(name);
}
if (adjuster != null) {
adjuster.accept(step1);
}
});
}
protected void cancelWizardRun() {
throw new CancelWizardException();
}
private static class CancelWizardException extends RuntimeException {
}
private void runWizard(@Nullable Consumer<? super Step> adjuster) {
while (true) {
ModuleWizardStep currentStep = myWizard.getCurrentStepObject();
if (adjuster != null) {
try {
adjuster.accept(currentStep);
}
catch (CancelWizardException e) {
myWizard.doCancelAction();
return;
}
}
if (myWizard.isLast()) {
break;
}
myWizard.doNextAction();
if (currentStep == myWizard.getCurrentStepObject()) {
throw new RuntimeException(currentStep + " is not validated");
}
}
if (!myWizard.doFinishAction()) {
throw new RuntimeException(myWizard.getCurrentStepObject() + " is not validated");
}
}
protected void createWizard(@Nullable Project project) throws IOException {
if (myWizard != null) {
Disposer.dispose(myWizard.getDisposable());
myWizard = null;
}
myWizard = createWizard(project, createTempDirectoryWithSuffix("new").toFile());
UIUtil.dispatchAllInvocationEvents(); // to make default selection applied
}
protected Project createProject(Consumer<? super Step> adjuster) throws IOException {
createWizard(null);
runWizard(adjuster);
myWizard.disposeIfNeeded();
myCreatedProject = NewProjectUtil.createFromWizard(myWizard);
return myCreatedProject;
}
protected Project createProjectFromTemplate(@NotNull Consumer<NewProjectWizardStep> adjuster) throws IOException {
return createProjectFromTemplate(UIBundle.message("label.project.wizard.project.generator.name"), adjuster);
}
protected Project createProjectFromTemplate(@NotNull String group, @NotNull Consumer<NewProjectWizardStep> adjuster) throws IOException {
return createProject(step -> {
var npwStep = getNewProjectWizardStep(step, group);
if (npwStep != null) {
adjuster.accept(npwStep);
}
});
}
protected Module createModule(@NotNull Project project, @NotNull Consumer<? super Step> adjuster) throws IOException {
createWizard(null);
runWizard(adjuster);
myWizard.disposeIfNeeded();
return createModuleFromWizard(project);
}
protected Module createModuleFromTemplate(@NotNull Project project, @NotNull Consumer<NewProjectWizardStep> adjuster) throws IOException {
return createModuleFromTemplate(project, UIBundle.message("label.project.wizard.module.generator.name"), adjuster);
}
protected Module createModuleFromTemplate(
@NotNull Project project,
@NotNull String group,
@NotNull Consumer<NewProjectWizardStep> adjuster
) throws IOException {
return createModuleFromTemplate(group, null, project, step -> {
var npwStep = getNewProjectWizardStep(step, group);
if (npwStep != null) {
adjuster.accept(npwStep);
}
});
}
protected @Nullable NewProjectWizardStep getNewProjectWizardStep(@NotNull Step step, @NotNull String group) {
if (step instanceof ProjectTypeStep) {
var projectTypeStep = (ProjectTypeStep)step;
assertTrue(projectTypeStep.setSelectedTemplate(group, null));
var steps = myWizard.getSequence().getSelectedSteps();
assertEquals(steps.toString(), 1, steps.size());
var moduleWizardStep = projectTypeStep.getCustomStep();
assertInstanceOf(moduleWizardStep, NewProjectWizardStep.class);
return (NewProjectWizardStep)moduleWizardStep;
}
return null;
}
protected T createWizard(Project project, File directory) {
throw new RuntimeException();
}
protected void configureJdk() {
ApplicationManager.getApplication().runWriteAction(() -> {
addSdk(new SimpleJavaSdkType().createJdk(DEFAULT_SDK, SystemProperties.getJavaHome()));
addSdk(new SimpleJavaSdkType().createJdk("_other", SystemProperties.getJavaHome()));
LOG.debug("ProjectWizardTestCase.configureJdk:");
LOG.debug(String.valueOf(Arrays.asList(ProjectJdkTable.getInstance().getAllJdks())));
});
}
protected void addSdk(final Sdk sdk) {
ApplicationManager.getApplication().runWriteAction(() -> ProjectJdkTable.getInstance().addJdk(sdk, getTestRootDisposable()));
}
protected Module importModuleFrom(ProjectImportProvider provider, String path) {
return importFrom(path, getProject(), null, provider);
}
protected Module importProjectFrom(String path, Consumer<? super Step> adjuster, ProjectImportProvider... providers) {
Module module = importFrom(path, null, adjuster, providers);
if (module != null) {
myCreatedProject = module.getProject();
}
return module;
}
private Module importFrom(String path,
@Nullable Project project,
Consumer<? super Step> adjuster,
ProjectImportProvider... providers) {
return computeInWriteSafeContext(() -> doImportModule(path, project, adjuster, providers));
}
private Module doImportModule(String path, @Nullable Project project, Consumer<? super Step> adjuster, ProjectImportProvider[] providers) {
VirtualFile file = LocalFileSystem.getInstance().refreshAndFindFileByPath(path);
assertNotNull("Can't find " + path, file);
assertTrue(providers[0].canImport(file, project));
//noinspection unchecked
myWizard = (T)ImportModuleAction.createImportWizard(project, null, file, providers);
assertNotNull(myWizard);
if (myWizard.getStepCount() > 0) {
runWizard(adjuster);
}
return ContainerUtil.getFirstItem(ImportModuleAction.createFromWizard(project, myWizard));
}
private static <T> T computeInWriteSafeContext(Supplier<? extends T> supplier) {
Ref<T> module = Ref.create();
ApplicationManager.getApplication().invokeLater(() -> module.set(supplier.get()));
UIUtil.dispatchAllInvocationEvents();
return module.get();
}
protected Sdk createSdk(String name, SdkTypeId sdkType) {
final Sdk sdk = ProjectJdkTable.getInstance().createSdk(name, sdkType);
ApplicationManager.getApplication().runWriteAction(() -> ProjectJdkTable.getInstance().addJdk(sdk, getTestRootDisposable()));
return sdk;
}
}
| apache-2.0 |
DALDEI/byte-buddy | byte-buddy-dep/src/test/java/net/bytebuddy/matcher/IsNamedMatcherTest.java | 990 | package net.bytebuddy.matcher;
import net.bytebuddy.description.NamedElement;
import org.junit.Test;
import org.mockito.Mock;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.mockito.Mockito.when;
public class IsNamedMatcherTest extends AbstractElementMatcherTest<IsNamedMatcher<?>> {
@Mock
private NamedElement.WithOptionalName namedElement;
@SuppressWarnings("unchecked")
public IsNamedMatcherTest() {
super((Class<IsNamedMatcher<?>>) (Object) IsNamedMatcher.class, "isNamed");
}
@Test
public void testMatch() throws Exception {
when(namedElement.isNamed()).thenReturn(true);
assertThat(new IsNamedMatcher<NamedElement.WithOptionalName>().matches(namedElement), is(true));
}
@Test
public void testPositiveToNegative() throws Exception {
assertThat(new IsNamedMatcher<NamedElement.WithOptionalName>().matches(namedElement), is(false));
}
}
| apache-2.0 |
MichaelVose2/uPortal | uportal-war/src/main/java/org/apereo/portal/i18n/LocaleManager.java | 13691 | /**
* Licensed to Apereo under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Apereo licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apereo.portal.i18n;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.StringTokenizer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apereo.portal.properties.PropertiesManager;
import org.apereo.portal.security.IPerson;
import org.apereo.portal.utils.DocumentFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
/**
* Manages locales on behalf of a user.
* This class currently keeps track of locales at the following levels:<br>
* <ol>
* <li>User's locale preferences (associated with a user ID)</li>
* <li>Browser's locale preferences (from the Accept-Language request header)</li>
* <li>Session's locale preferences (set via the portal request parameter uP_locales)</li>
* <li>Portal's locale preferences (set in portal.properties)</li>
* </ol>
* Eventually, this class will also keep track of locale preferences at
* the following levels:<br>
* <ol>
* <li>Layout node's locale preferences</li>
* <li>User profile's locale preferences</li>
* </ol>
* @author Shoji Kajita <a href="mailto:">kajita@itc.nagoya-u.ac.jp</a>
* @author Ken Weiner, kweiner@unicon.net
* @version $Revision$
*/
public class LocaleManager implements Serializable {
private static final Log log = LogFactory.getLog(LocaleManager.class);
/**
* Default value for localeAware.
* This value will be used when the corresponding property cannot be loaded.
*/
public static final boolean DEFAULT_LOCALE_AWARE = false;
private static boolean localeAware = PropertiesManager.getPropertyAsBoolean("org.apereo.portal.i18n.LocaleManager.locale_aware", DEFAULT_LOCALE_AWARE);
private static Locale jvmLocale;
private static Locale[] portalLocales;
private final IPerson person;
private Locale[] sessionLocales;
private Locale[] browserLocales;
private Locale[] userLocales;
/**
* Constructor that associates a locale manager with a user.
* @param person the user
*/
public LocaleManager(IPerson person, Locale[] userLocales) {
this.person = person;
jvmLocale = Locale.getDefault();
if (localeAware) {
portalLocales = loadPortalLocales();
try {
this.userLocales = userLocales;
} catch (Exception e) {
log.error("Error populating userLocals", e);
}
}
}
/**
* Constructor that sets up locales according to
* the <code>Accept-Language</code> request header
* from a user's browser.
* @param person the user
* @param acceptLanguage the Accept-Language request header from a user's browser
*/
public LocaleManager(IPerson person, Locale[] userLocales, String acceptLanguage) {
this(person, userLocales);
this.browserLocales = parseLocales(acceptLanguage);
}
// Getters
public static boolean isLocaleAware() { return localeAware; }
public static Locale getJvmLocale() { return jvmLocale; }
public static Locale[] getPortalLocales() { return portalLocales; }
public Locale[] getBrowserLocales() { return browserLocales; }
public Locale[] getUserLocales() { return userLocales; }
public Locale[] getSessionLocales() { return sessionLocales; }
// Setters
public static void setJvmLocale(Locale jvmLocale) { LocaleManager.jvmLocale = jvmLocale; }
public static void setPortalLocales(Locale[] portalLocales) { LocaleManager.portalLocales = portalLocales; }
public void setBrowserLocales(Locale[] browserLocales) { this.browserLocales = browserLocales; }
public void setUserLocales(Locale[] userLocales) { this.userLocales = userLocales; this.sessionLocales = userLocales; }
public void setSessionLocales(Locale[] sessionLocales) { this.sessionLocales = sessionLocales; }
/**
* Read and parse portal_locales from portal.properties.
* portal_locales will be in the form of a comma-separated
* list, e.g. en_US,ja_JP,sv_SE
*/
private Locale[] loadPortalLocales() {
String portalLocalesString = PropertiesManager.getProperty("org.apereo.portal.i18n.LocaleManager.portal_locales");
return parseLocales(portalLocalesString);
}
/**
* Produces a sorted list of locales according to locale preferences
* obtained from several places. The following priority is given:
* session, user, browser, portal, and jvm.
* @return the sorted list of locales
*/
public Locale[] getLocales() {
// Need logic to construct ordered locale list.
// Consider creating a separate ILocaleResolver
// interface to do this work.
List locales = new ArrayList();
// Add highest priority locales first
addToLocaleList(locales, sessionLocales);
addToLocaleList(locales, userLocales);
// We will ignore browser locales until we know how to
// translate them into proper java.util.Locales
//addToLocaleList(locales, browserLocales);
addToLocaleList(locales, portalLocales);
addToLocaleList(locales, new Locale[] { jvmLocale });
return (Locale[])locales.toArray(new Locale[0]);
}
/**
* Add locales to the locale list if they aren't in there already
*/
private void addToLocaleList(List localeList, Locale[] locales) {
if (locales != null) {
for (int i = 0; i < locales.length; i++) {
if (locales[i] != null && !localeList.contains(locales[i]))
localeList.add(locales[i]);
}
}
}
/**
* Helper method to produce a <code>java.util.Locale</code> array from
* a comma-delimited locale string list, e.g. "en_US,ja_JP"
* @param localeStringList the locales to parse
* @return an array of locales representing the locale string list
*/
public static Locale[] parseLocales(String localeStringList) {
Locale[] locales = null;
if (localeStringList != null) {
StringTokenizer st = new StringTokenizer(localeStringList, ",");
locales = new Locale[st.countTokens()];
for (int i = 0; st.hasMoreTokens(); i++) {
String localeString = st.nextToken().trim();
locales[i] = parseLocale(localeString);
}
}
return locales;
}
/**
* Helper method to produce a <code>java.util.Locale</code> object from
* a locale string such as en_US or ja_JP.
* @param localeString a locale string such as en_US
* @return a java.util.Locale object representing the locale string
*/
public static Locale parseLocale(String localeString) {
String language = null;
String country = null;
String variant = null;
// Sometimes people specify "en-US" instead of "en_US", so
// we'll try to clean that up.
localeString = localeString.replaceAll("-", "_");
StringTokenizer st = new StringTokenizer(localeString, "_");
if (st.hasMoreTokens()) {
language = st.nextToken();
}
if (st.hasMoreTokens()) {
country = st.nextToken();
}
if (st.hasMoreTokens()) {
variant = st.nextToken();
}
Locale locale = null;
if (variant != null) {
locale = new Locale(language, country, variant);
} else if (country != null) {
locale = new Locale(language, country);
} else if (language != null) {
// Uncomment the following line
// when we can count on JDK 1.4!
//locale = new Locale(language);
locale = new Locale(language, "");
}
return locale;
}
/**
* Constructs a comma-delimited list of locales
* that could be parsed back into a Locale
* array with parseLocales(String localeStringList).
* @param locales the list of locales
* @return a string representing the list of locales
*/
public static String stringValueOf(Locale[] locales) {
StringBuffer sb = new StringBuffer();
for (int i = 0; i < locales.length; i++) {
Locale locale = locales[i];
sb.append(locale.toString());
if (i < locales.length - 1) {
sb.append(",");
}
}
return sb.toString();
}
/**
* Stores the user locales persistantly.
* @param userLocales the user locales preference
* @throws Exception
*/
public void persistUserLocales(Locale[] userLocales) throws Exception {
setUserLocales(userLocales);
}
/**
* Creates an XML representation of a list of locales.
* @param locales the locale list
* @return the locale list as XML
*/
public static Document xmlValueOf(Locale[] locales) {
return xmlValueOf(locales, null);
}
/**
* Creates an XML representation of a list of locales.
* If a selected locale is supplied, the XML element representing
* the selected locale will have an attribute of selected with value
* of true. This is helpful when constructing user interfaces that
* indicate which locale is selected.
* @param locales the locale list
* @param selectedLocale a locale that should be selected if it is in the list
* @return the locale list as XML
*/
public static Document xmlValueOf(Locale[] locales, Locale selectedLocale) {
Document doc = DocumentFactory.getThreadDocument();
// <locales>
Element localesE = doc.createElement("locales");
for (int i = 0; i < locales.length; i++) {
Element locE = doc.createElement("locale");
locE.setAttribute("displayName", locales[i].getDisplayName(locales[0]));
locE.setAttribute("code", locales[i].toString());
// Mark which locale is the user's preference
if (selectedLocale != null && selectedLocale.equals(locales[i])) {
locE.setAttribute("selected", "true");
}
// <language iso2="..." iso3="..." displayName="..."/>
Element languageE = doc.createElement("language");
languageE.setAttribute("iso2", locales[i].getLanguage());
try {
languageE.setAttribute("iso3", locales[i].getISO3Language());
} catch (Exception e) {
// Do nothing
}
languageE.setAttribute("displayName", locales[i].getDisplayLanguage(locales[0]));
locE.appendChild(languageE);
// <country iso2="..." iso3="..." displayName="..."/>
Element countryE = doc.createElement("country");
countryE.setAttribute("iso2", locales[i].getCountry());
try {
countryE.setAttribute("iso3", locales[i].getISO3Country());
} catch (Exception e) {
// Do nothing
}
countryE.setAttribute("displayName", locales[i].getDisplayCountry(locales[0]));
locE.appendChild(countryE);
// <variant code="..." displayName="..."/>
Element variantE = doc.createElement("variant");
variantE.setAttribute("code", locales[i].getVariant());
variantE.setAttribute("displayName", locales[i].getDisplayVariant(locales[0]));
locE.appendChild(variantE);
localesE.appendChild(locE);
}
doc.appendChild(localesE);
return doc;
}
public String toString() {
StringBuffer sb = new StringBuffer(1024);
sb.append("LocaleManager's locales").append("\n");
sb.append("-----------------------").append("\n");
sb.append("Session locales: ");
if (sessionLocales != null) {
sb.append(stringValueOf(sessionLocales));
}
sb.append("\n");
sb.append("User locales: ");
if (userLocales != null) {
sb.append(stringValueOf(userLocales));
}
sb.append("\n");
sb.append("Browser locales: ");
if (browserLocales != null) {
sb.append(stringValueOf(browserLocales));
}
sb.append("\n");
sb.append("Portal locales: ");
if (portalLocales != null) {
sb.append(stringValueOf(portalLocales));
}
sb.append("\n");
sb.append("JVM locale: ");
if (jvmLocale != null) {
sb.append(jvmLocale.toString());
}
sb.append("\n");
sb.append("Sorted locales: ");
Locale[] sortedLocales = getLocales();
if (sortedLocales != null) {
sb.append(stringValueOf(sortedLocales));
}
sb.append("\n");
return sb.toString();
}
}
| apache-2.0 |
manovotn/core | tests-arquillian/src/test/java/org/jboss/weld/tests/extensions/custombeans/interceptor/Foo.java | 1030 | /*
* JBoss, Home of Professional Open Source
* Copyright 2015, Red Hat, Inc., and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.weld.tests.extensions.custombeans.interceptor;
import javax.enterprise.context.RequestScoped;
@RequestScoped
@TypeBinding
public class Foo {
public void ping() {
}
@MethodBinding
public void methodLevel(){
}
}
| apache-2.0 |
apache/geronimo | framework/modules/geronimo-crypto/src/main/java/org/apache/geronimo/crypto/asn1/DERPrintableString.java | 3597 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.crypto.asn1;
import java.io.IOException;
/**
* DER PrintableString object.
*/
public class DERPrintableString
extends DERObject
implements DERString
{
String string;
/**
* return a printable string from the passed in object.
*
* @exception IllegalArgumentException if the object cannot be converted.
*/
public static DERPrintableString getInstance(
Object obj)
{
if (obj == null || obj instanceof DERPrintableString)
{
return (DERPrintableString)obj;
}
if (obj instanceof ASN1OctetString)
{
return new DERPrintableString(((ASN1OctetString)obj).getOctets());
}
if (obj instanceof ASN1TaggedObject)
{
return getInstance(((ASN1TaggedObject)obj).getObject());
}
throw new IllegalArgumentException("illegal object in getInstance: " + obj.getClass().getName());
}
/**
* return a Printable String from a tagged object.
*
* @param obj the tagged object holding the object we want
* @param explicit true if the object is meant to be explicitly
* tagged false otherwise.
* @exception IllegalArgumentException if the tagged object cannot
* be converted.
*/
public static DERPrintableString getInstance(
ASN1TaggedObject obj,
boolean explicit)
{
return getInstance(obj.getObject());
}
/**
* basic constructor - byte encoded string.
*/
public DERPrintableString(
byte[] string)
{
char[] cs = new char[string.length];
for (int i = 0; i != cs.length; i++)
{
cs[i] = (char)(string[i] & 0xff);
}
this.string = new String(cs);
}
/**
* basic constructor
*/
public DERPrintableString(
String string)
{
this.string = string;
}
public String getString()
{
return string;
}
public byte[] getOctets()
{
char[] cs = string.toCharArray();
byte[] bs = new byte[cs.length];
for (int i = 0; i != cs.length; i++)
{
bs[i] = (byte)cs[i];
}
return bs;
}
void encode(
DEROutputStream out)
throws IOException
{
out.writeEncoded(PRINTABLE_STRING, this.getOctets());
}
public int hashCode()
{
return this.getString().hashCode();
}
public boolean equals(
Object o)
{
if (!(o instanceof DERPrintableString))
{
return false;
}
DERPrintableString s = (DERPrintableString)o;
return this.getString().equals(s.getString());
}
}
| apache-2.0 |
VioletLife/okhttp | okhttp-logging-interceptor/src/main/java/com/squareup/okhttp/logging/HttpLoggingInterceptor.java | 8052 | /*
* Copyright (C) 2015 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squareup.okhttp.logging;
import com.squareup.okhttp.Connection;
import com.squareup.okhttp.Headers;
import com.squareup.okhttp.Interceptor;
import com.squareup.okhttp.MediaType;
import com.squareup.okhttp.OkHttpClient;
import com.squareup.okhttp.Protocol;
import com.squareup.okhttp.Request;
import com.squareup.okhttp.RequestBody;
import com.squareup.okhttp.Response;
import com.squareup.okhttp.ResponseBody;
import com.squareup.okhttp.internal.Platform;
import com.squareup.okhttp.internal.http.HttpEngine;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.concurrent.TimeUnit;
import okio.Buffer;
import okio.BufferedSource;
/**
* An OkHttp interceptor which logs request and response information. Can be applied as an
* {@linkplain OkHttpClient#interceptors() application interceptor} or as a
* {@linkplain OkHttpClient#networkInterceptors() network interceptor}.
* <p>
* The format of the logs created by this class should not be considered stable and may change
* slightly between releases. If you need a stable logging format, use your own interceptor.
*/
public final class HttpLoggingInterceptor implements Interceptor {
private static final Charset UTF8 = Charset.forName("UTF-8");
public enum Level {
/** No logs. */
NONE,
/**
* Logs request and response lines.
* <p>
* Example:
* <pre>{@code
* --> POST /greeting HTTP/1.1 (3-byte body)
*
* <-- HTTP/1.1 200 OK (22ms, 6-byte body)
* }</pre>
*/
BASIC,
/**
* Logs request and response lines and their respective headers.
* <p>
* Example:
* <pre>{@code
* --> POST /greeting HTTP/1.1
* Host: example.com
* Content-Type: plain/text
* Content-Length: 3
* --> END POST
*
* <-- HTTP/1.1 200 OK (22ms)
* Content-Type: plain/text
* Content-Length: 6
* <-- END HTTP
* }</pre>
*/
HEADERS,
/**
* Logs request and response lines and their respective headers and bodies (if present).
* <p>
* Example:
* <pre>{@code
* --> POST /greeting HTTP/1.1
* Host: example.com
* Content-Type: plain/text
* Content-Length: 3
*
* Hi?
* --> END GET
*
* <-- HTTP/1.1 200 OK (22ms)
* Content-Type: plain/text
* Content-Length: 6
*
* Hello!
* <-- END HTTP
* }</pre>
*/
BODY
}
public interface Logger {
void log(String message);
/** A {@link Logger} defaults output appropriate for the current platform. */
Logger DEFAULT = new Logger() {
@Override public void log(String message) {
Platform.get().log(message);
}
};
}
public HttpLoggingInterceptor() {
this(Logger.DEFAULT);
}
public HttpLoggingInterceptor(Logger logger) {
this.logger = logger;
}
private final Logger logger;
private volatile Level level = Level.NONE;
/** Change the level at which this interceptor logs. */
public HttpLoggingInterceptor setLevel(Level level) {
if (level == null) throw new NullPointerException("level == null. Use Level.NONE instead.");
this.level = level;
return this;
}
public Level getLevel() {
return level;
}
@Override public Response intercept(Chain chain) throws IOException {
Level level = this.level;
Request request = chain.request();
if (level == Level.NONE) {
return chain.proceed(request);
}
boolean logBody = level == Level.BODY;
boolean logHeaders = logBody || level == Level.HEADERS;
RequestBody requestBody = request.body();
boolean hasRequestBody = requestBody != null;
Connection connection = chain.connection();
Protocol protocol = connection != null ? connection.getProtocol() : Protocol.HTTP_1_1;
String requestStartMessage =
"--> " + request.method() + ' ' + request.httpUrl() + ' ' + protocol(protocol);
if (!logHeaders && hasRequestBody) {
requestStartMessage += " (" + requestBody.contentLength() + "-byte body)";
}
logger.log(requestStartMessage);
if (logHeaders) {
if (hasRequestBody) {
// Request body headers are only present when installed as a network interceptor. Force
// them to be included (when available) so there values are known.
if (requestBody.contentType() != null) {
logger.log("Content-Type: " + requestBody.contentType());
}
if (requestBody.contentLength() != -1) {
logger.log("Content-Length: " + requestBody.contentLength());
}
}
Headers headers = request.headers();
for (int i = 0, count = headers.size(); i < count; i++) {
String name = headers.name(i);
// Skip headers from the request body as they are explicitly logged above.
if (!"Content-Type".equalsIgnoreCase(name) && !"Content-Length".equalsIgnoreCase(name)) {
logger.log(name + ": " + headers.value(i));
}
}
if (!logBody || !hasRequestBody) {
logger.log("--> END " + request.method());
} else if (bodyEncoded(request.headers())) {
logger.log("--> END " + request.method() + " (encoded body omitted)");
} else {
Buffer buffer = new Buffer();
requestBody.writeTo(buffer);
Charset charset = UTF8;
MediaType contentType = requestBody.contentType();
if (contentType != null) {
contentType.charset(UTF8);
}
logger.log("");
logger.log(buffer.readString(charset));
logger.log("--> END " + request.method()
+ " (" + requestBody.contentLength() + "-byte body)");
}
}
long startNs = System.nanoTime();
Response response = chain.proceed(request);
long tookMs = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNs);
ResponseBody responseBody = response.body();
logger.log("<-- " + protocol(response.protocol()) + ' ' + response.code() + ' '
+ response.message() + " (" + tookMs + "ms"
+ (!logHeaders ? ", " + responseBody.contentLength() + "-byte body" : "") + ')');
if (logHeaders) {
Headers headers = response.headers();
for (int i = 0, count = headers.size(); i < count; i++) {
logger.log(headers.name(i) + ": " + headers.value(i));
}
if (!logBody || !HttpEngine.hasBody(response)) {
logger.log("<-- END HTTP");
} else if (bodyEncoded(response.headers())) {
logger.log("<-- END HTTP (encoded body omitted)");
} else {
BufferedSource source = responseBody.source();
source.request(Long.MAX_VALUE); // Buffer the entire body.
Buffer buffer = source.buffer();
Charset charset = UTF8;
MediaType contentType = responseBody.contentType();
if (contentType != null) {
charset = contentType.charset(UTF8);
}
if (responseBody.contentLength() != 0) {
logger.log("");
logger.log(buffer.clone().readString(charset));
}
logger.log("<-- END HTTP (" + buffer.size() + "-byte body)");
}
}
return response;
}
private boolean bodyEncoded(Headers headers) {
String contentEncoding = headers.get("Content-Encoding");
return contentEncoding != null && !contentEncoding.equalsIgnoreCase("identity");
}
private static String protocol(Protocol protocol) {
return protocol == Protocol.HTTP_1_0 ? "HTTP/1.0" : "HTTP/1.1";
}
}
| apache-2.0 |
ukwa/interject | interject-access-vrml97tox3d/src/main/java/iicm/vrml/pw/TextureCoordinate.java | 878 | /*
* <copyright>
*
* Copyright (c) 1996,97
* Institute for Information Processing and Computer Supported New Media (IICM),
* Graz University of Technology, Austria.
*
* This file is part of the `pw' VRML 2.0 parser.
*
* </copyright>
*/
/*
* TextureCoordinate.java
* Copyright (c) 1997 IICM
*
* created: krosch, 19960827
*
* changed: krosch, 19960827
* changed: apesen, 19970411
*
* $Id: TextureCoordinate.java,v 1.3 1997/05/22 15:14:36 apesen Exp $
*/
package iicm.vrml.pw;
// TextureCoordinate
public class TextureCoordinate extends Node
{
public MFVec2f point;
public String nodeName ()
{
return NodeNames.NODE_TEXTURECOORDINATE;
}
public void traverse (Traverser t)
{
t.tTextureCoordinate (this);
}
TextureCoordinate ()
{
addField ("point", point = new MFVec2f (), Field.F_EXPOSEDFIELD);
}
} // TextureCoordinate
| apache-2.0 |
kohsah/akomantoso-lib | src/main/java/org/akomantoso/schema/v3/csd07/Meta.java | 11753 | //
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2014.01.06 at 03:51:15 PM EAT
//
package org.akomantoso.schema.v3.csd07;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD07}identification"/>
* <element ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD07}publication" minOccurs="0"/>
* <element ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD07}classification" maxOccurs="unbounded" minOccurs="0"/>
* <element ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD07}lifecycle" maxOccurs="unbounded" minOccurs="0"/>
* <element ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD07}workflow" maxOccurs="unbounded" minOccurs="0"/>
* <element ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD07}analysis" maxOccurs="unbounded" minOccurs="0"/>
* <element ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD07}temporalData" maxOccurs="unbounded" minOccurs="0"/>
* <element ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD07}references" maxOccurs="unbounded" minOccurs="0"/>
* <element ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD07}notes" maxOccurs="unbounded" minOccurs="0"/>
* <element ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD07}proprietary" maxOccurs="unbounded" minOccurs="0"/>
* <element ref="{http://docs.oasis-open.org/legaldocml/ns/akn/3.0/CSD07}presentation" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"identification",
"publication",
"classification",
"lifecycle",
"workflow",
"analysis",
"temporalData",
"references",
"notes",
"proprietary",
"presentation"
})
@XmlRootElement(name = "meta")
public class Meta {
@XmlElement(required = true)
protected Identification identification;
protected Publication publication;
protected List<Classification> classification;
protected List<Lifecycle> lifecycle;
protected List<Workflow> workflow;
protected List<Analysis> analysis;
protected List<TemporalData> temporalData;
protected List<RefItems> references;
protected List<Notes> notes;
protected List<Proprietary> proprietary;
protected List<Presentation> presentation;
/**
* Gets the value of the identification property.
*
* @return
* possible object is
* {@link Identification }
*
*/
public Identification getIdentification() {
return identification;
}
/**
* Sets the value of the identification property.
*
* @param value
* allowed object is
* {@link Identification }
*
*/
public void setIdentification(Identification value) {
this.identification = value;
}
/**
* Gets the value of the publication property.
*
* @return
* possible object is
* {@link Publication }
*
*/
public Publication getPublication() {
return publication;
}
/**
* Sets the value of the publication property.
*
* @param value
* allowed object is
* {@link Publication }
*
*/
public void setPublication(Publication value) {
this.publication = value;
}
/**
* Gets the value of the classification property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the classification property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getClassification().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Classification }
*
*
*/
public List<Classification> getClassification() {
if (classification == null) {
classification = new ArrayList<Classification>();
}
return this.classification;
}
/**
* Gets the value of the lifecycle property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the lifecycle property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getLifecycle().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Lifecycle }
*
*
*/
public List<Lifecycle> getLifecycle() {
if (lifecycle == null) {
lifecycle = new ArrayList<Lifecycle>();
}
return this.lifecycle;
}
/**
* Gets the value of the workflow property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the workflow property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getWorkflow().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Workflow }
*
*
*/
public List<Workflow> getWorkflow() {
if (workflow == null) {
workflow = new ArrayList<Workflow>();
}
return this.workflow;
}
/**
* Gets the value of the analysis property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the analysis property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getAnalysis().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Analysis }
*
*
*/
public List<Analysis> getAnalysis() {
if (analysis == null) {
analysis = new ArrayList<Analysis>();
}
return this.analysis;
}
/**
* Gets the value of the temporalData property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the temporalData property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getTemporalData().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link TemporalData }
*
*
*/
public List<TemporalData> getTemporalData() {
if (temporalData == null) {
temporalData = new ArrayList<TemporalData>();
}
return this.temporalData;
}
/**
* Gets the value of the references property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the references property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getReferences().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link RefItems }
*
*
*/
public List<RefItems> getReferences() {
if (references == null) {
references = new ArrayList<RefItems>();
}
return this.references;
}
/**
* Gets the value of the notes property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the notes property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getNotes().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Notes }
*
*
*/
public List<Notes> getNotes() {
if (notes == null) {
notes = new ArrayList<Notes>();
}
return this.notes;
}
/**
* Gets the value of the proprietary property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the proprietary property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getProprietary().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Proprietary }
*
*
*/
public List<Proprietary> getProprietary() {
if (proprietary == null) {
proprietary = new ArrayList<Proprietary>();
}
return this.proprietary;
}
/**
* Gets the value of the presentation property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the presentation property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getPresentation().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Presentation }
*
*
*/
public List<Presentation> getPresentation() {
if (presentation == null) {
presentation = new ArrayList<Presentation>();
}
return this.presentation;
}
}
| apache-2.0 |
feesa/easyrec-parent | easyrec-plugins/easyrec-plugins-arm/src/main/java/org/easyrec/plugin/arm/model/ARMConfiguration.java | 11270 | /**Copyright 2015 Research Studios Austria Forschungsgesellschaft mBH
*
* This file is part of easyrec.
*
* easyrec is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* easyrec is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with easyrec. If not, see <http://www.gnu.org/licenses/>.
*/
package org.easyrec.plugin.arm.model;
import com.google.common.collect.Lists;
import org.easyrec.plugin.arm.model.enums.MetricTypes;
import org.easyrec.plugin.configuration.PluginParameter;
import org.easyrec.plugin.generator.GeneratorConfiguration;
import javax.xml.bind.annotation.*;
import java.util.List;
/**
*
* @author szavrel
*/
@XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD)
public class ARMConfiguration extends GeneratorConfiguration {
public static String DEFAULT_ACTIONTYPE = "VIEW";
public ARMConfiguration() {
this(DEFAULT_ACTIONTYPE);
}
public ARMConfiguration(String actionType) {
this.actionType = actionType;
}
private Integer support;
//private TupleCounter tupleCounter;
@XmlElementWrapper(name = "itemTypes")
@XmlElement(name = "itemType")
private List<String> itemTypes = Lists.newArrayList();
@PluginParameter(
displayName = "action type",
shortDescription = "The type of action to be considered in rule generation.",
description = "Defines the type of actions to be considered when rules are generated.",
optional = false)
private String actionType = "VIEW";
@PluginParameter(
displayName = "support percentage",
shortDescription = "# of shopping baskets that must contain an item combination to be considered.",
description = "Defines which percentage of all shopping baskets must contain a certain item combination so that this combination will be considered as significant.",
optional = false)
private Double supportPrcnt = 0.0;
@PluginParameter(
displayName = "minimum absolute support",
shortDescription = "Minimum absolute # of shopping baskets that must contain an item combination to be considered.",
description = "Defines the absolute minimum of shopping baskets that must contain a certain item combination for this combination to be considered as significant.",
optional = false)
private Integer supportMinAbs = 2;
@PluginParameter(
displayName = "confidence percentage",
shortDescription = "The relation of total actions on an item to actions on this and another item. ",
description = "Defines the confidence in an item combination (A,B) by putting it into perspective to how often A occurs without B.",
optional = false)
private Double confidencePrcnt = 0.0;
@PluginParameter(
displayName = "maximum rules per item",
shortDescription = "Maximum number of rules generated for an item.",
description = "When generating recommendation rules only the best N are considered. Relations exceeding this value are disregarded.",
optional = false)
private Integer maxRulesPerItem = 50;
@PluginParameter(
displayName = "exclude single-item baskets",
shortDescription = "Baskets (users) with only a single item (action) are disregarded in rule generation. ",
description = "Single action users (e.g. just one purchase) do not contribute to rule generation but can have a negative influence on the percentage values like confidence & support."
+ "In scenarios with a lot of single action users enabling this value can lead to more recommendations.",
optional = false)
private Boolean excludeSingleItemBaskests = false;
@PluginParameter(
displayName = "neutral rating",
shortDescription = "The value of a rating to be considered neutral.",
description = "Defines the threshold below which ratings are considered 'bad' and above which they are considered as 'good'.",
optional = true)
private Double ratingNeutral = 5.5;
@PluginParameter(
displayName = "metric type",
shortDescription = "The metric to be used to calculate item relations.",
description = "Allows to set the metric that determines the strength of a relation between items. Valid values are CONFIDENCE, CONVICTION, LIFT, LONGTAIL.",
optional = true)
private MetricTypes metricType = MetricTypes.CONFIDENCE;
@PluginParameter(
displayName = "popular items threshold",
shortDescription = "The number of most popular items considered for rule mining.",
description = "Defines the number of items considered for rule minig. Usually only the x most popular items are considered. CAUTION!!! This setting heavily influences the amount of memory needed by easyrec. The default value of 5000 requires a java heap size of 640MB to be on the safe side!",
optional = false)
private Integer maxSizeL1 = 5000;
@PluginParameter(
displayName = "item batch size",
shortDescription = "The number of items loaded to memory at a time for rule mining.",
description = "Defines the number of items loaded to memory at a time for rule mining. Helps to keep memory usage under control for " +
"large sets of items",
optional = false)
private Integer itemBatchSize = 4000;
@PluginParameter(
displayName = "keep top items",
shortDescription = "The number of most popular items always kept for analysis when batch processing.",
description = "When ruleminer uses batch processing to save memory it only loads a subset of the most popular items into memory at a " +
"time. This amount specifies how many of the most popular items are kept in memory for analysis at all times. Keep this value " +
"significantly lower than popular items threshold. Furthermore it is always added to item batch size so be cautious with " +
"memory usage.",
optional = false)
private Integer L1KeepItemCount = 1000;
@PluginParameter(
displayName = "time range (d)",
shortDescription = "The number of last days from which actions are considered.",
description = "Sets the number of days for how long back actions are considered. Use -1 for no time limit.",
optional = false)
private Integer timeRange = 365;
@PluginParameter(
displayName = "max basket size",
shortDescription = "The maximum number of items in a basket considered for rule mining.",
description = "The maximum number of items in a basket considered for rule mining.",
optional = false)
private Integer maxBasketSize = 300;
@PluginParameter(
displayName = "store alternative metrics",
shortDescription = "Calculates and stores the values for the not chosen metric types for comparison purposes.",
description = "Calculates and stores the values for the not chosen metric types for comparison purposes in a database field. "
+ "Enable for tuning and tweaking purposes to compare different metric types.",
optional = false)
private boolean storeAlternativeMetrics = false;
private Boolean doDeltaUpdate = false;
public String getActionType() {
return actionType;
}
public void setActionType(String actionType) {
this.actionType = actionType;
}
public Double getConfidencePrcnt() {
return confidencePrcnt;
}
public void setConfidencePrcnt(Double confidencePrcnt) {
this.confidencePrcnt = confidencePrcnt;
}
public Boolean getDoDeltaUpdate() {
return doDeltaUpdate;
}
public void setDoDeltaUpdate(Boolean doDeltaUpdate) {
this.doDeltaUpdate = doDeltaUpdate;
}
public Boolean getExcludeSingleItemBaskests() {
return excludeSingleItemBaskests;
}
public void setExcludeSingleItemBaskests(Boolean excludeSingleItemBaskests) {
this.excludeSingleItemBaskests = excludeSingleItemBaskests;
}
public List<String> getItemTypes() {
return itemTypes;
}
public void setItemTypes(List<String> itemTypes) {
this.itemTypes = itemTypes;
}
public Integer getMaxRulesPerItem() {
return maxRulesPerItem;
}
public void setMaxRulesPerItem(Integer maxRulesPerItem) {
this.maxRulesPerItem = maxRulesPerItem;
}
public Integer getMaxSizeL1() {
return maxSizeL1;
}
public void setMaxSizeL1(Integer maxSizeL1) {
this.maxSizeL1 = maxSizeL1;
}
public Integer getSupport() {
return support;
}
public void setSupport(Integer support) {
this.support = support;
}
public Integer getSupportMinAbs() {
return supportMinAbs;
}
public void setSupportMinAbs(Integer supportMinAbs) {
this.supportMinAbs = supportMinAbs;
}
public Double getSupportPrcnt() {
return supportPrcnt;
}
public void setSupportPrcnt(Double supportPrcnt) {
this.supportPrcnt = supportPrcnt;
}
public Double getRatingNeutral() {
return ratingNeutral;
}
public void setRatingNeutral(Double ratingNeutral) {
this.ratingNeutral = ratingNeutral;
}
public MetricTypes getMetricType() {
return metricType;
}
public void setMetricType(MetricTypes metricType) {
this.metricType = metricType;
}
public Integer getItemBatchSize() {
return itemBatchSize;
}
public void setItemBatchSize(Integer itemBatchSize) {
this.itemBatchSize = itemBatchSize;
}
public Integer getL1KeepItemCount() {
return L1KeepItemCount;
}
public void setL1KeepItemCount(Integer L1KeepItemCount) {
this.L1KeepItemCount = L1KeepItemCount;
}
public Integer getTimeRange() {
return timeRange;
}
public void setTimeRange(Integer timeRange) {
this.timeRange = timeRange;
}
public Integer getMaxBasketSize() {
return maxBasketSize;
}
public void setMaxBasketSize(Integer maxBasketSize) {
this.maxBasketSize = maxBasketSize;
}
public boolean getStoreAlternativeMetrics() {
return storeAlternativeMetrics;
}
public void setStoreAlternativeMetrics(boolean storeAlternativeMetrics) {
this.storeAlternativeMetrics = storeAlternativeMetrics;
}
}
| apache-2.0 |
arina-ielchiieva/drill | exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetWriter.java | 4717 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.store.parquet;
import java.io.IOException;
import java.util.List;
import org.apache.drill.common.exceptions.ExecutionSetupException;
import org.apache.drill.common.logical.FormatPluginConfig;
import org.apache.drill.common.logical.StoragePluginConfig;
import org.apache.drill.exec.physical.base.AbstractWriter;
import org.apache.drill.exec.physical.base.PhysicalOperator;
import org.apache.drill.exec.store.StorageStrategy;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.drill.exec.proto.UserBitShared.CoreOperatorType;
import org.apache.drill.exec.store.StoragePluginRegistry;
import com.fasterxml.jackson.annotation.JacksonInject;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeName;
@JsonTypeName("parquet-writer")
public class ParquetWriter extends AbstractWriter {
static final Logger logger = LoggerFactory.getLogger(ParquetWriter.class);
/** Version of Drill's Parquet writer. Increment this version (by 1) any time we make any format change to the file.
* Format changes include:
* <ul>
* <li>Supporting new data types,
* <li>Changes to the format of data fields,
* <li>Adding new metadata to the file footer, etc.
* </ul>
* Newer readers must be able to read old files. The Writer version tells the Parquet reader how to interpret fields
* or metadata when that data changes format from one writer version to another.
*/
public static final int WRITER_VERSION = 3;
private final String location;
private final List<String> partitionColumns;
private final ParquetFormatPlugin formatPlugin;
@JsonCreator
public ParquetWriter(
@JsonProperty("child") PhysicalOperator child,
@JsonProperty("location") String location,
@JsonProperty("partitionColumns") List<String> partitionColumns,
@JsonProperty("storageStrategy") StorageStrategy storageStrategy,
@JsonProperty("storage") StoragePluginConfig storageConfig,
@JacksonInject StoragePluginRegistry engineRegistry) throws IOException, ExecutionSetupException {
super(child);
this.formatPlugin = engineRegistry.resolveFormat(storageConfig, new ParquetFormatConfig(), ParquetFormatPlugin.class);
this.location = location;
this.partitionColumns = partitionColumns;
setStorageStrategy(storageStrategy);
}
public ParquetWriter(PhysicalOperator child,
String location,
List<String> partitionColumns,
ParquetFormatPlugin formatPlugin) {
super(child);
this.formatPlugin = formatPlugin;
this.location = location;
this.partitionColumns = partitionColumns;
}
@JsonProperty("location")
public String getLocation() {
return location;
}
@JsonProperty("storage")
public StoragePluginConfig getStorageConfig(){
return formatPlugin.getStorageConfig();
}
@JsonProperty("partitionColumns")
public List<String> getPartitionColumns() {
return partitionColumns;
}
@JsonIgnore
public FormatPluginConfig getFormatConfig(){
return formatPlugin.getConfig();
}
@JsonIgnore
public ParquetFormatPlugin getFormatPlugin(){
return formatPlugin;
}
@Override
protected PhysicalOperator getNewWithChild(PhysicalOperator child) {
ParquetWriter writer = new ParquetWriter(child, location, partitionColumns, formatPlugin);
writer.setStorageStrategy(getStorageStrategy());
return writer;
}
@Override
public int getOperatorType() {
return CoreOperatorType.PARQUET_WRITER_VALUE;
}
@Override
public String toString() {
return "ParquetWriter[location=" + location
+ ", storageStrategy=" + getStorageStrategy()
+ ", partitionColumns=" + partitionColumns
+ "]";
}
}
| apache-2.0 |
huihoo/olat | olat7.8/src/main/java/org/olat/connectors/rest/user/UserVOes.java | 1642 | /**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) frentix GmbH<br>
* http://www.frentix.com<br>
* <p>
*/
package org.olat.connectors.rest.user;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
/**
* Description:<br>
* Helper class for the example in the WADL document. Don't use it for something else!!!
* <P>
* Initial Date: 7 apr. 2010 <br>
*
* @author srosse, stephane.rosse@frentix.com
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlRootElement(name = "userVOes")
public class UserVOes {
@XmlElement(name = "userVO")
private List<UserVO> users = new ArrayList<UserVO>();
public UserVOes() {
// make JAXB happy
}
public List<UserVO> getUsers() {
return users;
}
public void setUsers(final List<UserVO> users) {
this.users = users;
}
}
| apache-2.0 |
mvp4g/mvp4g-examples | examples/Mvp4gHyperlink/src/main/java/de/gishmo/mvp4g/client/ui/page01/Page01View.java | 1073 | package de.gishmo.mvp4g.client.ui.page01;
import com.google.gwt.core.client.GWT;
import com.google.gwt.resources.client.ClientBundle;
import com.google.gwt.resources.client.CssResource;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.SimpleLayoutPanel;
import de.gishmo.mvp4g.client.widgets.ReverseResizeComposite;
public class Page01View
extends ReverseResizeComposite<IPage01View.IPage01Presenter>
implements IPage01View {
private Page01CSS style;
private SimpleLayoutPanel headerPanel;
private Page01View() {
style = GWT.<Page01Resources>create(Page01Resources.class)
.css();
style.ensureInjected();
headerPanel = new SimpleLayoutPanel();
Label label = new Label("Page 01");
label.setStyleName(style.headline());
headerPanel.add(label);
initWidget(headerPanel);
}
public interface Page01CSS
extends CssResource {
String headline();
}
interface Page01Resources
extends ClientBundle {
@Source("page01.css")
Page01CSS css();
}
}
| apache-2.0 |
pulkitsinghal/selenium | java/client/test/org/openqa/selenium/FrameSwitchingTest.java | 17622 | /*
Copyright 2012-2015 Software Freedom Conservancy
Copyright 2007-2012 Selenium committers
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.openqa.selenium;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.openqa.selenium.support.ui.ExpectedConditions.frameToBeAvailableAndSwitchToIt;
import static org.openqa.selenium.support.ui.ExpectedConditions.not;
import static org.openqa.selenium.support.ui.ExpectedConditions.presenceOfElementLocated;
import static org.openqa.selenium.support.ui.ExpectedConditions.titleIs;
import static org.openqa.selenium.testing.Ignore.Driver.ALL;
import static org.openqa.selenium.testing.Ignore.Driver.CHROME;
import static org.openqa.selenium.testing.Ignore.Driver.IE;
import static org.openqa.selenium.testing.Ignore.Driver.MARIONETTE;
import static org.openqa.selenium.testing.Ignore.Driver.PHANTOMJS;
import static org.openqa.selenium.testing.Ignore.Driver.SAFARI;
import org.junit.After;
import org.junit.Test;
import org.openqa.selenium.testing.Ignore;
import org.openqa.selenium.testing.JUnit4TestBase;
import org.openqa.selenium.testing.JavascriptEnabled;
import org.openqa.selenium.testing.NeedsLocalEnvironment;
import java.util.Random;
public class FrameSwitchingTest extends JUnit4TestBase {
@After
public void tearDown() throws Exception {
try {
driver.switchTo().defaultContent();
} catch (Exception e) {
// May happen if the driver went away.
}
}
// ----------------------------------------------------------------------------------------------
//
// Tests that WebDriver doesn't do anything fishy when it navigates to a page with frames.
//
// ----------------------------------------------------------------------------------------------
@Test
public void testShouldAlwaysFocusOnTheTopMostFrameAfterANavigationEvent() {
driver.get(pages.framesetPage);
driver.findElement(By.tagName("frameset")); // Test passes if this does not throw.
}
@Test
public void testShouldNotAutomaticallySwitchFocusToAnIFrameWhenAPageContainingThemIsLoaded() {
driver.get(pages.iframePage);
driver.findElement(By.id("iframe_page_heading"));
}
@Test(timeout = 10000)
@NeedsLocalEnvironment(reason = "it hangs at IE9 and event Test.timeout doesn't help")
@Ignore(MARIONETTE)
public void testShouldOpenPageWithBrokenFrameset() {
driver.get(appServer.whereIs("framesetPage3.html"));
WebElement frame1 = driver.findElement(By.id("first"));
driver.switchTo().frame(frame1);
driver.switchTo().defaultContent();
WebElement frame2 = driver.findElement(By.id("second"));
try {
driver.switchTo().frame(frame2);
} catch (WebDriverException e) {
// IE9 can not switch to this broken frame - it has no window.
}
}
// ----------------------------------------------------------------------------------------------
//
// Tests that WebDriver can switch to frames as expected.
//
// ----------------------------------------------------------------------------------------------
@Test
public void testShouldBeAbleToSwitchToAFrameByItsIndex() {
driver.get(pages.framesetPage);
driver.switchTo().frame(1);
assertThat(driver.findElement(By.id("pageNumber")).getText(), equalTo("2"));
}
@Test
public void testShouldBeAbleToSwitchToAnIframeByItsIndex() {
driver.get(pages.iframePage);
driver.switchTo().frame(0);
assertThat(driver.findElement(By.name("id-name1")).getAttribute("value"), equalTo("name"));
}
@Test
@Ignore(MARIONETTE)
public void testShouldBeAbleToSwitchToAFrameByItsName() {
driver.get(pages.framesetPage);
driver.switchTo().frame("fourth");
assertThat(driver.findElement(By.tagName("frame")).getAttribute("name"), equalTo("child1"));
}
@Test
@Ignore(MARIONETTE)
public void testShouldBeAbleToSwitchToAnIframeByItsName() {
driver.get(pages.iframePage);
driver.switchTo().frame("iframe1-name");
assertThat(driver.findElement(By.name("id-name1")).getAttribute("value"), equalTo("name"));
}
@Test
@Ignore(MARIONETTE)
public void testShouldBeAbleToSwitchToAFrameByItsID() {
driver.get(pages.framesetPage);
driver.switchTo().frame("fifth");
assertThat(driver.findElement(By.name("windowOne")).getText(), equalTo("Open new window"));
}
@Test
@Ignore(MARIONETTE)
public void testShouldBeAbleToSwitchToAnIframeByItsID() {
driver.get(pages.iframePage);
driver.switchTo().frame("iframe1");
assertThat(driver.findElement(By.name("id-name1")).getAttribute("value"), equalTo("name"));
}
@Ignore({MARIONETTE})
@Test
public void testShouldBeAbleToSwitchToFrameWithNameContainingDot() {
driver.get(pages.framesetPage);
driver.switchTo().frame("sixth.iframe1");
assertThat(driver.findElement(By.tagName("body")).getText(), containsString("Page number 3"));
}
@Ignore({MARIONETTE})
@Test
public void testShouldBeAbleToSwitchToAFrameUsingAPreviouslyLocatedWebElement() {
driver.get(pages.framesetPage);
WebElement frame = driver.findElement(By.tagName("frame"));
driver.switchTo().frame(frame);
assertThat(driver.findElement(By.id("pageNumber")).getText(), equalTo("1"));
}
@Ignore(MARIONETTE)
@Test
public void testShouldBeAbleToSwitchToAnIFrameUsingAPreviouslyLocatedWebElement() {
driver.get(pages.iframePage);
WebElement frame = driver.findElement(By.tagName("iframe"));
driver.switchTo().frame(frame);
WebElement element = driver.findElement(By.name("id-name1"));
assertThat(element.getAttribute("value"), equalTo("name"));
}
@Test
public void testShouldEnsureElementIsAFrameBeforeSwitching() {
driver.get(pages.framesetPage);
WebElement frame = driver.findElement(By.tagName("frameset"));
try {
driver.switchTo().frame(frame);
fail();
} catch (NoSuchFrameException expected) {
// Do nothing.
}
}
@Ignore({MARIONETTE})
@Test
public void testFrameSearchesShouldBeRelativeToTheCurrentlySelectedFrame() {
driver.get(pages.framesetPage);
driver.switchTo().frame("second");
assertThat(driver.findElement(By.id("pageNumber")).getText(), equalTo("2"));
try {
driver.switchTo().frame("third");
fail();
} catch (NoSuchFrameException expected) {
// Do nothing
}
driver.switchTo().defaultContent();
driver.switchTo().frame("third");
try {
driver.switchTo().frame("second");
fail();
} catch (NoSuchFrameException expected) {
// Do nothing
}
driver.switchTo().defaultContent();
driver.switchTo().frame("second");
assertThat(driver.findElement(By.id("pageNumber")).getText(), equalTo("2"));
}
@Ignore({MARIONETTE})
@Test
public void testShouldSelectChildFramesByChainedCalls() {
driver.get(pages.framesetPage);
driver.switchTo().frame("fourth").switchTo().frame("child2");
assertThat(driver.findElement(By.id("pageNumber")).getText(), equalTo("11"));
}
@Ignore({MARIONETTE})
@Test
public void testShouldThrowFrameNotFoundExceptionLookingUpSubFramesWithSuperFrameNames() {
driver.get(pages.framesetPage);
driver.switchTo().frame("fourth");
try {
driver.switchTo().frame("second");
fail("Expected NoSuchFrameException");
} catch (NoSuchFrameException e) {
// Expected
}
}
@Test
public void testShouldThrowAnExceptionWhenAFrameCannotBeFound() {
driver.get(pages.xhtmlTestPage);
try {
driver.switchTo().frame("Nothing here");
fail("Should not have been able to switch");
} catch (NoSuchFrameException e) {
// This is expected
}
}
@Test
public void testShouldThrowAnExceptionWhenAFrameCannotBeFoundByIndex() {
driver.get(pages.xhtmlTestPage);
try {
driver.switchTo().frame(27);
fail("Should not have been able to switch");
} catch (NoSuchFrameException e) {
// This is expected
}
}
@Ignore({CHROME, IE, PHANTOMJS, SAFARI, MARIONETTE})
@Test
public void testShouldBeAbleToSwitchToParentFrame() {
driver.get(pages.framesetPage);
driver.switchTo().frame("fourth").switchTo().parentFrame().switchTo().frame("first");
assertThat(driver.findElement(By.id("pageNumber")).getText(), equalTo("1"));
}
@Ignore({CHROME, IE, PHANTOMJS, SAFARI, MARIONETTE})
@Test
public void testShouldBeAbleToSwitchToParentFrameFromASecondLevelFrame() {
driver.get(pages.framesetPage);
driver.switchTo().frame("fourth").switchTo().frame("child1")
.switchTo().parentFrame().switchTo().frame("child2");
assertThat(driver.findElement(By.id("pageNumber")).getText(), equalTo("11"));
}
@Ignore({CHROME, IE, PHANTOMJS, SAFARI, MARIONETTE})
@Test
public void testSwitchingToParentFrameFromDefaultContextIsNoOp() {
driver.get(pages.xhtmlTestPage);
driver.switchTo().parentFrame();
assertEquals(driver.getTitle(), "XHTML Test Page");
}
@Ignore({CHROME, IE, PHANTOMJS, SAFARI, MARIONETTE})
@Test
public void testShouldBeAbleToSwitchToParentFromAnIframe() {
driver.get(pages.iframePage);
driver.switchTo().frame(0);
driver.switchTo().parentFrame();
driver.findElement(By.id("iframe_page_heading"));
}
// ----------------------------------------------------------------------------------------------
//
// General frame handling behavior tests
//
// ----------------------------------------------------------------------------------------------
@Ignore({MARIONETTE})
@Test
public void testShouldContinueToReferToTheSameFrameOnceItHasBeenSelected() {
driver.get(pages.framesetPage);
driver.switchTo().frame(2);
WebElement checkbox = driver.findElement(By.xpath("//input[@name='checky']"));
checkbox.click();
checkbox.submit();
// TODO(simon): this should not be needed, and is only here because IE's submit returns too
// soon.
wait.until(WaitingConditions.elementTextToEqual(By.xpath("//p"), "Success!"));
}
@Ignore(value = {MARIONETTE})
@Test
public void testShouldFocusOnTheReplacementWhenAFrameFollowsALinkToA_TopTargetedPage()
throws Exception {
driver.get(pages.framesetPage);
driver.switchTo().frame(0);
driver.findElement(By.linkText("top")).click();
String expectedTitle = "XHTML Test Page";
wait.until(titleIs(expectedTitle));
wait.until(presenceOfElementLocated(By.id("only-exists-on-xhtmltest")));
}
@Test
public void testShouldAllowAUserToSwitchFromAnIframeBackToTheMainContentOfThePage() {
driver.get(pages.iframePage);
driver.switchTo().frame(0);
try {
driver.switchTo().defaultContent();
driver.findElement(By.id("iframe_page_heading"));
} catch (Exception e) {
fail("Should have switched back to main content");
}
}
@Test
public void testShouldAllowTheUserToSwitchToAnIFrameAndRemainFocusedOnIt() {
driver.get(pages.iframePage);
driver.switchTo().frame(0);
driver.findElement(By.id("submitButton")).click();
assertThat(getTextOfGreetingElement(), equalTo("Success!"));
}
public String getTextOfGreetingElement() {
return wait.until(presenceOfElementLocated(By.id("greeting"))).getText();
}
@Ignore({MARIONETTE})
@Test
public void testShouldBeAbleToClickInAFrame() {
driver.get(pages.framesetPage);
driver.switchTo().frame("third");
// This should replace frame "third" ...
driver.findElement(By.id("submitButton")).click();
// driver should still be focused on frame "third" ...
assertThat(getTextOfGreetingElement(), equalTo("Success!"));
// Make sure it was really frame "third" which was replaced ...
driver.switchTo().defaultContent().switchTo().frame("third");
assertThat(getTextOfGreetingElement(), equalTo("Success!"));
}
@Ignore({MARIONETTE})
@JavascriptEnabled
@Test
public void testShouldBeAbleToClickInAFrameThatRewritesTopWindowLocation() {
driver.get(appServer.whereIs("click_tests/issue5237.html"));
driver.switchTo().frame("search");
driver.findElement(By.id("submit")).click();
driver.switchTo().defaultContent();
wait.until(titleIs("Target page for issue 5237"));
}
@Ignore({MARIONETTE})
@Test
public void testShouldBeAbleToClickInASubFrame() {
driver.get(pages.framesetPage);
driver.switchTo().frame("sixth")
.switchTo().frame("iframe1");
// This should replace frame "iframe1" inside frame "sixth" ...
driver.findElement(By.id("submitButton")).click();
// driver should still be focused on frame "iframe1" inside frame "sixth" ...
assertThat(getTextOfGreetingElement(), equalTo("Success!"));
// Make sure it was really frame "iframe1" inside frame "sixth" which was replaced ...
driver.switchTo().defaultContent()
.switchTo().frame("sixth")
.switchTo().frame("iframe1");
assertThat(driver.findElement(By.id("greeting")).getText(), equalTo("Success!"));
}
@Ignore(MARIONETTE)
@Test
public void testShouldBeAbleToFindElementsInIframesByXPath() {
driver.get(pages.iframePage);
driver.switchTo().frame("iframe1");
WebElement element = driver.findElement(By.xpath("//*[@id = 'changeme']"));
assertNotNull(element);
}
@Ignore({MARIONETTE})
@Test
public void testGetCurrentUrl() {
driver.get(pages.framesetPage);
driver.switchTo().frame("second");
String url = appServer.whereIs("page/2");
assertThat(driver.getCurrentUrl(), equalTo(url + "?title=Fish"));
url = appServer.whereIs("iframes.html");
driver.get(pages.iframePage);
assertThat(driver.getCurrentUrl(), equalTo(url));
url = appServer.whereIs("formPage.html");
driver.switchTo().frame("iframe1");
assertThat(driver.getCurrentUrl(), equalTo(url));
}
@Ignore(value = {PHANTOMJS, MARIONETTE})
@JavascriptEnabled
@Test
public void testShouldBeAbleToSwitchToTheTopIfTheFrameIsDeletedFromUnderUs() {
driver.get(appServer.whereIs("frame_switching_tests/deletingFrame.html"));
driver.switchTo().frame("iframe1");
WebElement killIframe = driver.findElement(By.id("killIframe"));
killIframe.click();
driver.switchTo().defaultContent();
assertFrameNotPresent("iframe1");
WebElement addIFrame = driver.findElement(By.id("addBackFrame"));
addIFrame.click();
wait.until(presenceOfElementLocated(By.id("iframe1")));
driver.switchTo().frame("iframe1");
try {
wait.until(presenceOfElementLocated(By.id("success")));
} catch (WebDriverException web) {
fail("Could not find element after switching frame");
}
}
@Ignore(ALL)
@JavascriptEnabled
@Test
public void testShouldNotBeAbleToDoAnythingTheFrameIsDeletedFromUnderUs() {
driver.get(appServer.whereIs("frame_switching_tests/deletingFrame.html"));
driver.switchTo().frame("iframe1");
WebElement killIframe = driver.findElement(By.id("killIframe"));
killIframe.click();
try {
driver.findElement(By.id("killIframe")).click();
fail("NoSuchFrameException should be thrown");
} catch (NoSuchFrameException expected) {
}
}
@Test
@Ignore(MARIONETTE)
public void testShouldReturnWindowTitleInAFrameset() {
driver.get(pages.framesetPage);
driver.switchTo().frame("third");
assertEquals("Unique title", driver.getTitle());
}
@Ignore(MARIONETTE)
@JavascriptEnabled
@Test
public void testJavaScriptShouldExecuteInTheContextOfTheCurrentFrame() {
JavascriptExecutor executor = (JavascriptExecutor) driver;
driver.get(pages.framesetPage);
assertTrue((Boolean) executor.executeScript("return window == window.top"));
driver.switchTo().frame("third");
assertTrue((Boolean) executor.executeScript("return window != window.top"));
}
@Ignore(MARIONETTE)
@Test
public void testShouldNotSwitchMagicallyToTheTopWindow() {
String baseUrl = appServer.whereIs("frame_switching_tests/");
driver.get(baseUrl + "bug4876.html");
driver.switchTo().frame(0);
wait.until(presenceOfElementLocated(By.id("inputText")));
for (int i = 0; i < 20; i++) {
try {
WebElement input = wait.until(presenceOfElementLocated(By.id("inputText")));
WebElement submit = wait.until(presenceOfElementLocated(By.id("submitButton")));
input.clear();
input.sendKeys("rand" + new Random().nextInt());
submit.click();
} finally {
String url = driver.getCurrentUrl();
// IE6 and Chrome add "?"-symbol to the end of the URL
if (url.endsWith("?")) {
url = url.substring(0, url.length()-1);
}
assertEquals(baseUrl + "bug4876_iframe.html", url);
}
}
}
private void assertFrameNotPresent(String locator) {
driver.switchTo().defaultContent();
wait.until(not(frameToBeAvailableAndSwitchToIt(locator)));
driver.switchTo().defaultContent();
}
}
| apache-2.0 |
gaowangyizu/myHeritrix | myHeritrix/src/org/archive/io/arc/ARCWriterPoolTest.java | 6018 | /* ARCWriterPoolTest
*
* $Id: ARCWriterPoolTest.java 5029 2007-03-29 23:53:50Z gojomo $
*
* Created on Jan 22, 2004
*
* Copyright (C) 2004 Internet Archive.
*
* This file is part of the Heritrix web crawler (crawler.archive.org).
*
* Heritrix is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser Public License as published by
* the Free Software Foundation; either version 2.1 of the License, or
* any later version.
*
* Heritrix is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser Public License for more details.
*
* You should have received a copy of the GNU Lesser Public License
* along with Heritrix; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.archive.io.arc;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.NoSuchElementException;
import org.archive.io.WriterPoolMember;
import org.archive.io.WriterPool;
import org.archive.io.WriterPoolSettings;
import org.archive.util.TmpDirTestCase;
/**
* Test ARCWriterPool
*/
public class ARCWriterPoolTest extends TmpDirTestCase {
private static final String PREFIX = "TEST";
public void testARCWriterPool()
throws Exception {
final int MAX_ACTIVE = 3;
final int MAX_WAIT_MILLISECONDS = 100;
cleanUpOldFiles(PREFIX);
WriterPool pool = new ARCWriterPool(getSettings(true),
MAX_ACTIVE, MAX_WAIT_MILLISECONDS);
WriterPoolMember [] writers = new WriterPoolMember[MAX_ACTIVE];
final String CONTENT = "Any old content";
ByteArrayOutputStream baos = new ByteArrayOutputStream();
baos.write(CONTENT.getBytes());
for (int i = 0; i < MAX_ACTIVE; i++) {
writers[i] = pool.borrowFile();
assertEquals("Number active", i + 1, pool.getNumActive());
((ARCWriter)writers[i]).write("http://one.two.three", "no-type",
"0.0.0.0", 1234567890, CONTENT.length(), baos);
}
// Pool is maxed out. Try and get a new ARCWriter. We'll block for
// MAX_WAIT_MILLISECONDS. Should get exception.
long start = (new Date()).getTime();
boolean isException = false;
try {
pool.borrowFile();
} catch(NoSuchElementException e) {
isException = true;
long end = (new Date()).getTime();
// This test can fail on a loaded machine if the wait period is
// only MAX_WAIT_MILLISECONDS. Up the time to wait.
final int WAIT = MAX_WAIT_MILLISECONDS * 100;
if ((end - start) > (WAIT)) {
fail("More than " + MAX_WAIT_MILLISECONDS + " elapsed: "
+ WAIT);
}
}
assertTrue("Did not get NoSuchElementException", isException);
for (int i = (MAX_ACTIVE - 1); i >= 0; i--) {
pool.returnFile(writers[i]);
assertEquals("Number active", i, pool.getNumActive());
assertEquals("Number idle", MAX_ACTIVE - pool.getNumActive(),
pool.getNumIdle());
}
pool.close();
}
public void testInvalidate() throws Exception {
final int MAX_ACTIVE = 3;
final int MAX_WAIT_MILLISECONDS = 100;
cleanUpOldFiles(PREFIX);
WriterPool pool = new ARCWriterPool(getSettings(true),
MAX_ACTIVE, MAX_WAIT_MILLISECONDS);
WriterPoolMember [] writers = new WriterPoolMember[MAX_ACTIVE];
final String CONTENT = "Any old content";
ByteArrayOutputStream baos = new ByteArrayOutputStream();
baos.write(CONTENT.getBytes());
for (int i = 0; i < MAX_ACTIVE; i++) {
writers[i] = pool.borrowFile();
assertEquals("Number active", i + 1, pool.getNumActive());
((ARCWriter)writers[i]).write("http://one.two.three", "no-type",
"0.0.0.0", 1234567890, CONTENT.length(), baos);
}
WriterPoolMember writer2Invalidate = writers[pool.getNumActive() - 1];
writers[pool.getNumActive() - 1] = null;
pool.invalidateFile(writer2Invalidate);
for (int i = 0; i < (MAX_ACTIVE - 1); i++) {
if (writers[i] == null) {
continue;
}
pool.returnFile(writers[i]);
}
for (int i = 0; i < MAX_ACTIVE; i++) {
writers[i] = pool.borrowFile();
assertEquals("Number active", i + 1, pool.getNumActive());
((ARCWriter)writers[i]).write("http://one.two.three", "no-type",
"0.0.0.0", 1234567890, CONTENT.length(), baos);
}
for (int i = (MAX_ACTIVE - 1); i >= 0; i--) {
pool.returnFile(writers[i]);
assertEquals("Number active", i, pool.getNumActive());
assertEquals("Number idle", MAX_ACTIVE - pool.getNumActive(),
pool.getNumIdle());
}
pool.close();
}
private WriterPoolSettings getSettings(final boolean isCompressed) {
return new WriterPoolSettings() {
public long getMaxSize() {
return ARCConstants.DEFAULT_MAX_ARC_FILE_SIZE;
}
public String getPrefix() {
return PREFIX;
}
public String getSuffix() {
return "";
}
public List<File> getOutputDirs() {
File [] files = {getTmpDir()};
return Arrays.asList(files);
}
public boolean isCompressed() {
return isCompressed;
}
public List getMetadata() {
return null;
}
};
}
} | apache-2.0 |
apache/ignite | modules/core/src/test/java/org/apache/ignite/internal/processors/cache/persistence/snapshot/EncryptedSnapshotTest.java | 18211 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.persistence.snapshot;
import java.io.File;
import java.util.Collections;
import java.util.function.Function;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteDataStreamer;
import org.apache.ignite.IgniteException;
import org.apache.ignite.cluster.ClusterState;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.internal.IgniteEx;
import org.apache.ignite.internal.encryption.AbstractEncryptionTest;
import org.apache.ignite.internal.processors.cache.verify.IdleVerifyResultV2;
import org.apache.ignite.internal.util.distributed.FullMessage;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.internal.CU;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteFuture;
import org.apache.ignite.spi.IgniteSpiException;
import org.apache.ignite.testframework.GridTestUtils;
import org.junit.Test;
import org.junit.runners.Parameterized;
import static org.apache.ignite.cluster.ClusterState.ACTIVE;
import static org.apache.ignite.configuration.IgniteConfiguration.DFLT_SNAPSHOT_DIRECTORY;
/**
* Snapshot test for encrypted-only snapshots.
*/
public class EncryptedSnapshotTest extends AbstractSnapshotSelfTest {
/** Second cache name. */
private static final String CACHE2 = "cache2";
/** Parameters. */
@Parameterized.Parameters(name = "Encryption is enabled.")
public static Iterable<Boolean> enableEncryption() {
return Collections.singletonList(true);
}
/** {@inheritDoc} */
@Override protected Function<Integer, Object> valueBuilder() {
return (i -> new Account(i, i));
}
/** Checks creation of encrypted cache with same name after putting plain cache in snapshot. */
@Test
public void testEncryptedCacheCreatedAfterPlainCacheSnapshotting() throws Exception {
testCacheCreatedAfterSnapshotting(true);
}
/** Checks creation of plain cache with same name after putting encrypted cache in snapshot. */
@Test
public void testPlainCacheCreatedAfterEncryptedCacheSnapshotting() throws Exception {
testCacheCreatedAfterSnapshotting(false);
}
/** Checks re-encryption fails during snapshot restoration. */
@Test
public void testReencryptDuringRestore() throws Exception {
checkActionFailsDuringSnapshotOperation(true, this::chageCacheKey, "Cache group key change " +
"was rejected.", IgniteException.class);
}
/** Checks master key changing fails during snapshot restoration. */
@Test
public void testMasterKeyChangeDuringRestore() throws Exception {
checkActionFailsDuringSnapshotOperation(true, this::chageMasterKey, "Master key change was " +
"rejected.", IgniteException.class);
}
/** Checks re-encryption fails during snapshot creation. */
@Test
public void testReencryptDuringSnapshot() throws Exception {
checkActionFailsDuringSnapshotOperation(false, this::chageCacheKey, "Cache group key change " +
"was rejected.", IgniteException.class);
}
/** Checks master key changing fails during snapshot creation. */
@Test
public void testMasterKeyChangeDuringSnapshot() throws Exception {
checkActionFailsDuringSnapshotOperation(false, this::chageMasterKey, "Master key change was " +
"rejected.", IgniteException.class);
}
/** Checks snapshot action fail during cache group key change. */
@Test
public void testSnapshotFailsDuringCacheKeyChange() throws Exception {
checkSnapshotActionFailsDuringReencryption(this::chageCacheKey, "Caches re-encryption process " +
"is not finished yet");
}
/** Checks snapshot action fail during master key change. */
@Test
public void testSnapshotFailsDuringMasterKeyChange() throws Exception {
checkSnapshotActionFailsDuringReencryption(this::chageMasterKey, "Master key changing process " +
"is not finished yet.");
}
/** Checks snapshot restoration fails if different master key is used. */
@Test
public void testSnapshotRestoringFailsWithOtherMasterKey() throws Exception {
IgniteEx ig = startGridsWithCache(2, CACHE_KEYS_RANGE, valueBuilder(), dfltCacheCfg);
snp(ig).createSnapshot(SNAPSHOT_NAME).get();
ig.destroyCache(dfltCacheCfg.getName());
ensureCacheAbsent(dfltCacheCfg);
stopAllGrids(false);
masterKeyName = AbstractEncryptionTest.MASTER_KEY_NAME_2;
final IgniteEx ig1 = startGrids(2);
ig1.cluster().state(ACTIVE);
GridTestUtils.assertThrowsAnyCause(
log,
() -> snp(ig1).restoreSnapshot(SNAPSHOT_NAME, Collections.singletonList(dfltCacheCfg.getName())).get(TIMEOUT),
IgniteCheckedException.class,
"different master key digest"
);
}
/** Checks both encrypted and plain caches can be restored from same snapshot. */
@Test
public void testRestoringEncryptedAndPlainCaches() throws Exception {
start2GridsWithEncryptesAndPlainCachesSnapshot();
grid(1).snapshot().restoreSnapshot(SNAPSHOT_NAME, null).get(TIMEOUT);
assertCacheKeys(grid(1).cache(DEFAULT_CACHE_NAME), CACHE_KEYS_RANGE);
assertCacheKeys(grid(1).cache(CACHE2), CACHE_KEYS_RANGE);
}
/** Checks both encrypted and plain caches can be restored from same snapshot. */
@Test
public void testStartingWithEncryptedAndPlainCaches() throws Exception {
start2GridsWithEncryptesAndPlainCachesSnapshot();
stopAllGrids();
IgniteEx ig = startGridsFromSnapshot(2, SNAPSHOT_NAME);
assertCacheKeys(ig.cache(DEFAULT_CACHE_NAME), CACHE_KEYS_RANGE);
assertCacheKeys(ig.cache(CACHE2), CACHE_KEYS_RANGE);
}
/** Checks snapshot after single reencryption. */
@Test
public void testSnapshotRestoringAfterSingleReencryption() throws Exception {
checkSnapshotWithReencryptedCache(1);
}
/** Checks snapshot after multiple reencryption. */
@Test
public void testSnapshotRestoringAfterMultipleReencryption() throws Exception {
checkSnapshotWithReencryptedCache(3);
}
/** Checks snapshot validati fails if different master key is used. */
@Test
public void testValidatingSnapshotFailsWithOtherMasterKey() throws Exception {
IgniteEx ig = startGridsWithCache(2, CACHE_KEYS_RANGE, valueBuilder(), dfltCacheCfg);
ig.snapshot().createSnapshot(SNAPSHOT_NAME).get();
ig.destroyCache(dfltCacheCfg.getName());
ensureCacheAbsent(dfltCacheCfg);
stopAllGrids(false);
masterKeyName = AbstractEncryptionTest.MASTER_KEY_NAME_2;
ig = startGrids(2);
IdleVerifyResultV2 snpCheckRes = snp(ig).checkSnapshot(SNAPSHOT_NAME).get();
for (Exception e : snpCheckRes.exceptions().values()) {
if (e.getMessage().contains("different master key digest"))
return;
}
throw new IllegalStateException("Snapshot validation must contain error due to different master key.");
}
/** @throws Exception If fails. */
@Test
public void testValidatingSnapshotFailsWithNoEncryption() throws Exception {
File tmpSnpDir = null;
try {
startGridsWithSnapshot(3, CACHE_KEYS_RANGE, false);
stopAllGrids();
encryption = false;
dfltCacheCfg = null;
File snpDir = U.resolveWorkDirectory(U.defaultWorkDirectory(), DFLT_SNAPSHOT_DIRECTORY, false);
assertTrue(snpDir.isDirectory() && snpDir.listFiles().length > 0);
tmpSnpDir = new File(snpDir.getAbsolutePath() + "_tmp");
assertTrue(tmpSnpDir.length() == 0);
assertTrue(snpDir.renameTo(tmpSnpDir));
cleanPersistenceDir();
assertTrue(tmpSnpDir.renameTo(snpDir));
IgniteEx ig = startGrids(3);
snpDir.renameTo(U.resolveWorkDirectory(U.defaultWorkDirectory(), DFLT_SNAPSHOT_DIRECTORY, false));
ig.cluster().state(ACTIVE);
IdleVerifyResultV2 snpCheckRes = snp(ig).checkSnapshot(SNAPSHOT_NAME).get();
for (Exception e : snpCheckRes.exceptions().values()) {
if (e.getMessage().contains("has encrypted caches while encryption is disabled"))
return;
}
throw new IllegalStateException("Snapshot validation must contain error due to encryption is currently " +
"disabled.");
}
finally {
if (tmpSnpDir != null)
U.delete(tmpSnpDir);
}
}
/** Checks snapshot restoration fails if different master key is contained in the snapshot. */
@Test
public void testStartFromSnapshotFailedWithOtherMasterKey() throws Exception {
IgniteEx ig = startGridsWithCache(2, CACHE_KEYS_RANGE, valueBuilder(), dfltCacheCfg);
ig.snapshot().createSnapshot(SNAPSHOT_NAME).get();
ig.destroyCache(dfltCacheCfg.getName());
ensureCacheAbsent(dfltCacheCfg);
stopAllGrids(false);
masterKeyName = AbstractEncryptionTest.MASTER_KEY_NAME_2;
GridTestUtils.assertThrowsAnyCause(
log,
() -> startGridsFromSnapshot(2, SNAPSHOT_NAME),
IgniteSpiException.class,
"bad key is used during decryption"
);
}
/** Checks it is unavailable to register snapshot task for encrypted caches without metastore. */
@Test
public void testSnapshotTaskIsBlockedWithoutMetastore() throws Exception {
// Start grid node with data before each test.
IgniteEx ig = startGridsWithCache(1, CACHE_KEYS_RANGE, valueBuilder(), dfltCacheCfg);
GridTestUtils.assertThrowsAnyCause(log,
() -> snp(ig).registerSnapshotTask(SNAPSHOT_NAME, ig.localNode().id(),
F.asMap(CU.cacheId(dfltCacheCfg.getName()), null), false,
snp(ig).localSnapshotSenderFactory().apply(SNAPSHOT_NAME)).get(TIMEOUT),
IgniteCheckedException.class,
"Metastore is required because it holds encryption keys");
}
/** {@inheritDoc} */
@Override protected void ensureCacheAbsent(
CacheConfiguration<?, ?> ccfg) throws IgniteCheckedException, InterruptedException {
awaitPartitionMapExchange();
super.ensureCacheAbsent(ccfg);
}
/**
* Ensures that same-name-cache is created after putting cache into snapshot and deleting.
*
* @param encryptedFirst If {@code true}, creates encrypted cache before snapshoting and deleting. In reverse order
* {@code false}.
*/
private void testCacheCreatedAfterSnapshotting(boolean encryptedFirst) throws Exception {
startGrids(2);
grid(0).cluster().state(ClusterState.ACTIVE);
addCache(encryptedFirst);
grid(1).snapshot().createSnapshot(SNAPSHOT_NAME).get(TIMEOUT);
awaitPartitionMapExchange();
grid(0).destroyCache(CACHE2);
awaitPartitionMapExchange();
addCache(!encryptedFirst);
}
/**
* Checks snapshot after reencryption.
*
* @param reencryptionIterations Number re-encryptions turns.
*/
private void checkSnapshotWithReencryptedCache(int reencryptionIterations) throws Exception {
IgniteEx ig = startGridsWithCache(2, CACHE_KEYS_RANGE, valueBuilder(), dfltCacheCfg.setName(CACHE2));
for (int r = 0; r < reencryptionIterations; ++r) {
chageCacheKey(0).get(TIMEOUT);
for (int g = 0; g < 2; ++g)
grid(g).context().encryption().reencryptionFuture(CU.cacheId(dfltCacheCfg.getName())).get();
}
ig.snapshot().createSnapshot(SNAPSHOT_NAME).get(TIMEOUT);
ig.cache(dfltCacheCfg.getName()).destroy();
ensureCacheAbsent(dfltCacheCfg);
ig.snapshot().restoreSnapshot(SNAPSHOT_NAME, null).get(TIMEOUT);
assertCacheKeys(grid(1).cache(dfltCacheCfg.getName()), CACHE_KEYS_RANGE);
stopAllGrids();
startGridsFromSnapshot(2, SNAPSHOT_NAME);
assertCacheKeys(grid(1).cache(dfltCacheCfg.getName()), CACHE_KEYS_RANGE);
}
/**
* Checks {@code action} is blocked with {@code errPrefix} and {@code errEncrypType} during active snapshot.
*
* @param restore If {@code true}, snapshot restoration is activated during the test. Snapshot creation otherwise.
* @param action Action to call during snapshot operation. Its param is the grid num.
* @param errPrefix Prefix of error message text to search for.
* @param errType Type of exception to search for.
*/
private void checkActionFailsDuringSnapshotOperation(boolean restore, Function<Integer, IgniteFuture<?>> action,
String errPrefix, Class<? extends Exception> errType) throws Exception {
startGridsWithCache(3, CACHE_KEYS_RANGE, valueBuilder(), dfltCacheCfg,
new CacheConfiguration<>(dfltCacheCfg).setName(CACHE2));
BlockingCustomMessageDiscoverySpi spi0 = discoSpi(grid(0));
IgniteFuture<Void> fut;
if (restore) {
grid(1).snapshot().createSnapshot(SNAPSHOT_NAME).get(TIMEOUT);
grid(1).cache(dfltCacheCfg.getName()).destroy();
ensureCacheAbsent(dfltCacheCfg);
spi0.block((msg) -> msg instanceof FullMessage && ((FullMessage<?>)msg).error().isEmpty());
fut = grid(1).snapshot().restoreSnapshot(SNAPSHOT_NAME, Collections.singletonList(dfltCacheCfg.getName()));
}
else {
spi0.block((msg) -> msg instanceof FullMessage && ((FullMessage<?>)msg).error().isEmpty());
fut = grid(1).snapshot().createSnapshot(SNAPSHOT_NAME);
}
spi0.waitBlocked(TIMEOUT);
GridTestUtils.assertThrowsAnyCause(log, () -> action.apply(2).get(TIMEOUT), errType,
errPrefix + " Snapshot operation is in progress.");
spi0.unblock();
fut.get(TIMEOUT);
}
/**
* Checks snapshot action is blocked during {@code reencryption}.
*
* @param reencryption Any kind of re-encryption action.
*/
private void checkSnapshotActionFailsDuringReencryption(Function<Integer, IgniteFuture<?>> reencryption,
String expectedError) throws Exception {
startGridsWithCache(3, CACHE_KEYS_RANGE, valueBuilder(), dfltCacheCfg,
new CacheConfiguration<>(dfltCacheCfg).setName(CACHE2));
grid(1).snapshot().createSnapshot(SNAPSHOT_NAME).get(TIMEOUT);
grid(2).destroyCache(dfltCacheCfg.getName());
ensureCacheAbsent(dfltCacheCfg);
BlockingCustomMessageDiscoverySpi discoSpi = discoSpi(grid(0));
discoSpi.block(msg -> msg instanceof FullMessage && ((FullMessage<?>)msg).error().isEmpty());
IgniteFuture<?> fut = reencryption.apply(1);
discoSpi.waitBlocked(TIMEOUT);
GridTestUtils.assertThrowsAnyCause(log,
() -> grid(1).snapshot().restoreSnapshot(SNAPSHOT_NAME, Collections.singletonList(CACHE2)).get(TIMEOUT),
IgniteCheckedException.class,
expectedError);
GridTestUtils.assertThrowsAnyCause(log,
() -> grid(2).snapshot().createSnapshot(SNAPSHOT_NAME + "_v2").get(TIMEOUT), IgniteCheckedException.class,
expectedError);
discoSpi.unblock();
fut.get(TIMEOUT);
}
/**
* Adds cache to the grid. Fills it and waits for PME.
*
* @param encrypted If {@code true}, created encrypted cache.
* @return CacheConfiguration of the created cache.
*/
private CacheConfiguration<?, ?> addCache(boolean encrypted) throws IgniteCheckedException {
CacheConfiguration<?, ?> cacheCfg = new CacheConfiguration<>(dfltCacheCfg).setName(CACHE2).
setEncryptionEnabled(encrypted);
grid(0).createCache(cacheCfg);
Function<Integer, Object> valBuilder = valueBuilder();
IgniteDataStreamer<Integer, Object> streamer = grid(0).dataStreamer(CACHE2);
for (int i = 0; i < CACHE_KEYS_RANGE; i++)
streamer.addData(i, valBuilder.apply(i));
streamer.flush();
forceCheckpoint();
return cacheCfg;
}
/**
* Starts 2 nodes, creates encrypted and plain caches, creates snapshot, destroes the caches. Ensures caches absent.
*/
private void start2GridsWithEncryptesAndPlainCachesSnapshot() throws Exception {
startGridsWithCache(2, CACHE_KEYS_RANGE, valueBuilder(), dfltCacheCfg);
CacheConfiguration<?, ?> ccfg = addCache(false);
grid(1).snapshot().createSnapshot(SNAPSHOT_NAME).get(TIMEOUT);
grid(1).cache(DEFAULT_CACHE_NAME).destroy();
grid(1).cache(CACHE2).destroy();
ensureCacheAbsent(dfltCacheCfg);
ensureCacheAbsent(ccfg);
}
/**
* @return Cache group key change action.
*/
private IgniteFuture<?> chageCacheKey(int gridNum) {
return grid(gridNum).encryption().changeCacheGroupKey(Collections.singletonList(CACHE2));
}
/**
* @return Master key change action.
*/
private IgniteFuture<?> chageMasterKey(int gridNum) {
return grid(gridNum).encryption().changeMasterKey(AbstractEncryptionTest.MASTER_KEY_NAME_2);
}
}
| apache-2.0 |
baidao/bizsocket | sample/j2se/src/main/java/bizsocket/sample/j2se/common/SampleProtocolUtil.java | 1123 | package bizsocket.sample.j2se.common;
import org.json.JSONException;
import org.json.JSONObject;
/**
* Created by tong on 16/9/12.
*/
public class SampleProtocolUtil {
public static int getResCode(String s) {
try {
JSONObject js = new JSONObject(s);
if (js.has("code")) {
int code = js.getInt("code");
return code;
}
} catch (JSONException e) {
e.printStackTrace();
}
return -1;
}
public static String getMsg(String s) {
String msg = "";
try {
JSONObject obj = new JSONObject(s);
msg = obj.optString("msg");
} catch (JSONException e) {
e.printStackTrace();
}
return msg;
}
//是否是正确的返回
public static boolean isSuccessResponsePacket(SamplePacket packet) {
try {
int code = SampleProtocolUtil.getResCode(packet.getContent());
if (code == 200) {
return true;
}
} catch (Throwable e) {
}
return false;
}
}
| apache-2.0 |
justvarshney/geometry-api-java | src/test/java/com/esri/core/geometry/TestPolygonUtils.java | 4512 | package com.esri.core.geometry;
import junit.framework.TestCase;
import org.junit.Test;
public class TestPolygonUtils extends TestCase {
@Override
protected void setUp() throws Exception {
super.setUp();
}
@Override
protected void tearDown() throws Exception {
super.tearDown();
}
@Test
public static void testPointInAnyOuterRing() {
Polygon polygon = new Polygon();
// outer ring1
polygon.startPath(-200, -100);
polygon.lineTo(200, -100);
polygon.lineTo(200, 100);
polygon.lineTo(-190, 100);
polygon.lineTo(-190, 90);
polygon.lineTo(-200, 90);
// hole
polygon.startPath(-100, 50);
polygon.lineTo(100, 50);
polygon.lineTo(100, -40);
polygon.lineTo(90, -40);
polygon.lineTo(90, -50);
polygon.lineTo(-100, -50);
// island
polygon.startPath(-10, -10);
polygon.lineTo(10, -10);
polygon.lineTo(10, 10);
polygon.lineTo(-10, 10);
// outer ring2
polygon.startPath(300, 300);
polygon.lineTo(310, 300);
polygon.lineTo(310, 310);
polygon.lineTo(300, 310);
polygon.reverseAllPaths();
Point2D testPointIn1 = new Point2D(1, 2); // inside the island
Point2D testPointIn2 = new Point2D(190, 90); // inside, betwen outer
// ring1 and the hole
Point2D testPointIn3 = new Point2D(305, 305); // inside the outer ring2
Point2D testPointOut1 = new Point2D(300, 2); // outside any
Point2D testPointOut2 = new Point2D(-195, 95); // outside any (in the
// concave area of outer
// ring 2)
Point2D testPointOut3 = new Point2D(99, 49); // outside (in the hole)
PolygonUtils.PiPResult res;
// is_point_in_polygon_2D
res = PolygonUtils.isPointInPolygon2D(polygon, testPointIn1, 0);
assertTrue(res == PolygonUtils.PiPResult.PiPInside);
res = PolygonUtils.isPointInPolygon2D(polygon, testPointIn2, 0);
assertTrue(res == PolygonUtils.PiPResult.PiPInside);
res = PolygonUtils.isPointInPolygon2D(polygon, testPointIn3, 0);
assertTrue(res == PolygonUtils.PiPResult.PiPInside);
res = PolygonUtils.isPointInPolygon2D(polygon, testPointOut1, 0);
assertTrue(res == PolygonUtils.PiPResult.PiPOutside);
res = PolygonUtils.isPointInPolygon2D(polygon, testPointOut2, 0);
assertTrue(res == PolygonUtils.PiPResult.PiPOutside);
res = PolygonUtils.isPointInPolygon2D(polygon, testPointOut3, 0);
assertTrue(res == PolygonUtils.PiPResult.PiPOutside);
// Ispoint_in_any_outer_ring
res = PolygonUtils.isPointInAnyOuterRing(polygon, testPointIn1, 0);
assertTrue(res == PolygonUtils.PiPResult.PiPInside);
res = PolygonUtils.isPointInAnyOuterRing(polygon, testPointIn2, 0);
assertTrue(res == PolygonUtils.PiPResult.PiPInside);
res = PolygonUtils.isPointInAnyOuterRing(polygon, testPointIn3, 0);
assertTrue(res == PolygonUtils.PiPResult.PiPInside);
res = PolygonUtils.isPointInAnyOuterRing(polygon, testPointOut1, 0);
assertTrue(res == PolygonUtils.PiPResult.PiPOutside);
res = PolygonUtils.isPointInAnyOuterRing(polygon, testPointOut2, 0);
assertTrue(res == PolygonUtils.PiPResult.PiPOutside);
res = PolygonUtils.isPointInAnyOuterRing(polygon, testPointOut3, 0);
assertTrue(res == PolygonUtils.PiPResult.PiPInside);// inside of outer
// ring
}
@Test
public static void testPointInPolygonBugCR181840() {
PolygonUtils.PiPResult res;
{// pointInPolygonBugCR181840 - point in polygon bug
Polygon polygon = new Polygon();
// outer ring1
polygon.startPath(0, 0);
polygon.lineTo(10, 10);
polygon.lineTo(20, 0);
res = PolygonUtils.isPointInPolygon2D(polygon,
Point2D.construct(15, 10), 0);
assertTrue(res == PolygonUtils.PiPResult.PiPOutside);
res = PolygonUtils.isPointInPolygon2D(polygon,
Point2D.construct(2, 10), 0);
assertTrue(res == PolygonUtils.PiPResult.PiPOutside);
res = PolygonUtils.isPointInPolygon2D(polygon,
Point2D.construct(5, 5), 0);
assertTrue(res == PolygonUtils.PiPResult.PiPInside);
}
{// CR181840 - point in polygon bug
Polygon polygon = new Polygon();
// outer ring1
polygon.startPath(10, 10);
polygon.lineTo(20, 0);
polygon.lineTo(0, 0);
res = PolygonUtils.isPointInPolygon2D(polygon,
Point2D.construct(15, 10), 0);
assertTrue(res == PolygonUtils.PiPResult.PiPOutside);
res = PolygonUtils.isPointInPolygon2D(polygon,
Point2D.construct(2, 10), 0);
assertTrue(res == PolygonUtils.PiPResult.PiPOutside);
res = PolygonUtils.isPointInPolygon2D(polygon,
Point2D.construct(5, 5), 0);
assertTrue(res == PolygonUtils.PiPResult.PiPInside);
}
}
}
| apache-2.0 |
magicdoom/RocketMQ | rocketmq-store/src/main/java/com/alibaba/rocketmq/store/AllocateMapedFileService.java | 10750 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.rocketmq.store;
import com.alibaba.rocketmq.common.ServiceThread;
import com.alibaba.rocketmq.common.UtilAll;
import com.alibaba.rocketmq.common.constant.LoggerName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.PriorityBlockingQueue;
import java.util.concurrent.TimeUnit;
public class AllocateMapedFileService extends ServiceThread {
private static final Logger log = LoggerFactory.getLogger(LoggerName.StoreLoggerName);
private static int WaitTimeOut = 1000 * 5;
private ConcurrentHashMap<String, AllocateRequest> requestTable =
new ConcurrentHashMap<String, AllocateRequest>();
private PriorityBlockingQueue<AllocateRequest> requestQueue =
new PriorityBlockingQueue<AllocateRequest>();
private volatile boolean hasException = false;
private DefaultMessageStore messageStore;
public AllocateMapedFileService(DefaultMessageStore messageStore) {
this.messageStore = messageStore;
}
public MapedFile putRequestAndReturnMapedFile(String nextFilePath, String nextNextFilePath, int fileSize) {
AllocateRequest nextReq = new AllocateRequest(nextFilePath, fileSize);
AllocateRequest nextNextReq = new AllocateRequest(nextNextFilePath, fileSize);
boolean nextPutOK = (this.requestTable.putIfAbsent(nextFilePath, nextReq) == null);
boolean nextNextPutOK = (this.requestTable.putIfAbsent(nextNextFilePath, nextNextReq) == null);
if (nextPutOK) {
boolean offerOK = this.requestQueue.offer(nextReq);
if (!offerOK) {
log.warn("never expetced here, add a request to preallocate queue failed");
}
}
if (nextNextPutOK) {
boolean offerOK = this.requestQueue.offer(nextNextReq);
if (!offerOK) {
log.warn("never expetced here, add a request to preallocate queue failed");
}
}
if (hasException) {
log.warn(this.getServiceName() + " service has exception. so return null");
return null;
}
AllocateRequest result = this.requestTable.get(nextFilePath);
try {
if (result != null) {
boolean waitOK = result.getCountDownLatch().await(WaitTimeOut, TimeUnit.MILLISECONDS);
if (!waitOK) {
log.warn("create mmap timeout " + result.getFilePath() + " " + result.getFileSize());
return null;
}
else {
this.requestTable.remove(nextFilePath);
return result.getMapedFile();
}
}
else {
log.error("find preallocate mmap failed, this never happen");
}
}
catch (InterruptedException e) {
log.warn(this.getServiceName() + " service has exception. ", e);
}
return null;
}
@Override
public String getServiceName() {
return AllocateMapedFileService.class.getSimpleName();
}
public void shutdown() {
this.stoped = true;
this.thread.interrupt();
try {
this.thread.join(this.getJointime());
}
catch (InterruptedException e) {
e.printStackTrace();
}
for (AllocateRequest req : this.requestTable.values()) {
if (req.mapedFile != null) {
log.info("delete pre allocated maped file, {}", req.mapedFile.getFileName());
req.mapedFile.destroy(1000);
}
}
}
public void run() {
log.info(this.getServiceName() + " service started");
while (!this.isStoped() && this.mmapOperation())
;
log.info(this.getServiceName() + " service end");
}
/**
* Only interrupted by the external thread, will return false
*/
private boolean mmapOperation() {
boolean isSuccess = false;
AllocateRequest req = null;
try {
req = this.requestQueue.take();
AllocateRequest expectedRequest = this.requestTable.get(req.getFilePath());
if (null == expectedRequest) {
log.warn("this mmap request expired, maybe cause timeout " + req.getFilePath() + " "
+ req.getFileSize());
return true;
}
if (expectedRequest != req) {
log.warn("never expected here, maybe cause timeout " + req.getFilePath() + " "
+ req.getFileSize() + ", req:" + req + ", expectedRequest:" + expectedRequest);
return true;
}
if (req.getMapedFile() == null) {
long beginTime = System.currentTimeMillis();
MapedFile mapedFile = new MapedFile(req.getFilePath(), req.getFileSize());
long eclipseTime = UtilAll.computeEclipseTimeMilliseconds(beginTime);
if (eclipseTime > 10) {
int queueSize = this.requestQueue.size();
log.warn("create mapedFile spent time(ms) " + eclipseTime + " queue size " + queueSize
+ " " + req.getFilePath() + " " + req.getFileSize());
}
// pre write mappedFile
if (mapedFile.getFileSize() >= this.messageStore.getMessageStoreConfig()
.getMapedFileSizeCommitLog() //
&& //
this.messageStore.getMessageStoreConfig().isWarmMapedFileEnable()) {
mapedFile.warmMappedFile(this.messageStore.getMessageStoreConfig().getFlushDiskType(),
this.messageStore.getMessageStoreConfig().getFlushLeastPagesWhenWarmMapedFile());
}
req.setMapedFile(mapedFile);
this.hasException = false;
isSuccess = true;
}
}
catch (InterruptedException e) {
log.warn(this.getServiceName() + " service has exception, maybe by shutdown");
this.hasException = true;
return false;
}
catch (IOException e) {
log.warn(this.getServiceName() + " service has exception. ", e);
this.hasException = true;
if (null != req) {
requestQueue.offer(req);
try {
Thread.sleep(1);
}
catch (InterruptedException e1) {
}
}
}
finally {
if (req != null && isSuccess)
req.getCountDownLatch().countDown();
}
return true;
}
class AllocateRequest implements Comparable<AllocateRequest> {
// Full file path
private String filePath;
private int fileSize;
private CountDownLatch countDownLatch = new CountDownLatch(1);
private volatile MapedFile mapedFile = null;
public AllocateRequest(String filePath, int fileSize) {
this.filePath = filePath;
this.fileSize = fileSize;
}
public String getFilePath() {
return filePath;
}
public void setFilePath(String filePath) {
this.filePath = filePath;
}
public int getFileSize() {
return fileSize;
}
public void setFileSize(int fileSize) {
this.fileSize = fileSize;
}
public CountDownLatch getCountDownLatch() {
return countDownLatch;
}
public void setCountDownLatch(CountDownLatch countDownLatch) {
this.countDownLatch = countDownLatch;
}
public MapedFile getMapedFile() {
return mapedFile;
}
public void setMapedFile(MapedFile mapedFile) {
this.mapedFile = mapedFile;
}
public int compareTo(AllocateRequest other) {
if (this.fileSize < other.fileSize)
return 1;
else if (this.fileSize > other.fileSize) {
return -1;
}
else {
int mIndex = this.filePath.lastIndexOf(File.separator);
long mName = Long.parseLong(this.filePath.substring(mIndex + 1));
int oIndex = other.filePath.lastIndexOf(File.separator);
long oName = Long.parseLong(other.filePath.substring(oIndex + 1));
if (mName < oName) {
return -1;
}
else if (mName > oName) {
return 1;
}
else {
return 0;
}
}
// return this.fileSize < other.fileSize ? 1 : this.fileSize >
// other.fileSize ? -1 : 0;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((filePath == null) ? 0 : filePath.hashCode());
result = prime * result + fileSize;
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
AllocateRequest other = (AllocateRequest) obj;
if (filePath == null) {
if (other.filePath != null)
return false;
}
else if (!filePath.equals(other.filePath))
return false;
if (fileSize != other.fileSize)
return false;
return true;
}
}
}
| apache-2.0 |
xmlunit/xmlunit | xmlunit-legacy/src/main/java/org/custommonkey/xmlunit/exceptions/XMLUnitException.java | 2306 | /*
******************************************************************
Copyright (c) 2006-2007, Jeff Martin, Tim Bacon
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of the XMLUnit nor the names
of its contributors may be used to endorse or promote products
derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
******************************************************************
*/
package org.custommonkey.xmlunit.exceptions;
/**
* Base class of any checked exception that can be thrown within
* XMLUnit.
*/
public abstract class XMLUnitException extends Exception {
private final Throwable cause;
/**
* Inititializes the exeption.
*
* @param message the detail message
* @param cause the root cause of the exception
*/
protected XMLUnitException(String message, Throwable cause) {
super(message);
this.cause = cause;
}
/**
* Root cause of the exception, if any.
*/
public Throwable getCause() {
return cause;
}
}
| apache-2.0 |
Fabryprog/camel | components/camel-stream/src/main/java/org/apache/camel/component/stream/FileWatcherStrategy.java | 8116 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.stream;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.WatchEvent;
import java.nio.file.WatchKey;
import java.nio.file.WatchService;
import java.util.Locale;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import static java.nio.file.StandardWatchEventKinds.ENTRY_CREATE;
import static java.nio.file.StandardWatchEventKinds.ENTRY_MODIFY;
import org.apache.camel.CamelContext;
import org.apache.camel.CamelContextAware;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.support.service.ServiceSupport;
import org.apache.camel.util.IOHelper;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* To watch for file changes/rollover via JDK file watcher API.
* This is used to know for example of streaming from a file, that gets rolled-over, so we know about this,
* and can begin reading the file again from the beginning.
*/
public class FileWatcherStrategy extends ServiceSupport implements CamelContextAware {
@FunctionalInterface
public interface OnChangeEvent {
void onChange(File file);
}
private static final Logger LOG = LoggerFactory.getLogger(FileWatcherStrategy.class);
private CamelContext camelContext;
private final String directory;
private final OnChangeEvent onChangeEvent;
private WatchService watcher;
private ExecutorService executorService;
private WatchFileChangesTask task;
private long pollTimeout = 1000;
public FileWatcherStrategy(String directory, OnChangeEvent onChangeEvent) {
this.directory = directory;
this.onChangeEvent = onChangeEvent;
}
@Override
public CamelContext getCamelContext() {
return camelContext;
}
@Override
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
public long getPollTimeout() {
return pollTimeout;
}
/**
* Sets the poll timeout in millis. The default value is 1000.
*/
public void setPollTimeout(long pollTimeout) {
this.pollTimeout = pollTimeout;
}
@Override
protected void doStart() throws Exception {
if (directory == null) {
// no folder configured
return;
}
File dir = new File(directory);
if (dir.exists() && dir.isDirectory()) {
LOG.info("Starting ReloadStrategy to watch directory: {}", dir);
WatchEvent.Modifier modifier = null;
// if its mac OSX then attempt to apply workaround or warn its slower
String os = ObjectHelper.getSystemProperty("os.name", "");
if (os.toLowerCase(Locale.US).startsWith("mac")) {
// this modifier can speedup the scanner on mac osx (as java on mac has no native file notification integration)
Class<WatchEvent.Modifier> clazz = getCamelContext().getClassResolver().resolveClass("com.sun.nio.file.SensitivityWatchEventModifier", WatchEvent.Modifier.class);
if (clazz != null) {
WatchEvent.Modifier[] modifiers = clazz.getEnumConstants();
for (WatchEvent.Modifier mod : modifiers) {
if ("HIGH".equals(mod.name())) {
modifier = mod;
break;
}
}
}
if (modifier != null) {
LOG.info("On Mac OS X the JDK WatchService is slow by default so enabling SensitivityWatchEventModifier.HIGH as workaround");
} else {
LOG.warn("On Mac OS X the JDK WatchService is slow and it may take up till 10 seconds to notice file changes");
}
}
try {
Path path = dir.toPath();
watcher = path.getFileSystem().newWatchService();
registerPathToWatcher(modifier, path, watcher);
task = new WatchFileChangesTask(watcher, path, onChangeEvent);
executorService = getCamelContext().getExecutorServiceManager().newSingleThreadExecutor(this, "FileWatcherStrategy");
executorService.submit(task);
} catch (IOException e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
}
}
}
private WatchKey registerPathToWatcher(WatchEvent.Modifier modifier, Path path, WatchService watcher) throws IOException {
WatchKey key;
if (modifier != null) {
key = path.register(watcher, new WatchEvent.Kind<?>[]{ENTRY_CREATE, ENTRY_MODIFY}, modifier);
} else {
key = path.register(watcher, ENTRY_CREATE, ENTRY_MODIFY);
}
return key;
}
@Override
protected void doStop() throws Exception {
if (executorService != null) {
getCamelContext().getExecutorServiceManager().shutdownGraceful(executorService);
executorService = null;
}
if (watcher != null) {
IOHelper.close(watcher);
}
}
/**
* Background task which watches for file changes
*/
protected class WatchFileChangesTask implements Runnable {
private final WatchService watcher;
private final Path folder;
private volatile boolean running;
private OnChangeEvent changeEvent;
public WatchFileChangesTask(WatchService watcher, Path folder, OnChangeEvent changeEvent) {
this.watcher = watcher;
this.folder = folder;
this.changeEvent = changeEvent;
}
public boolean isRunning() {
return running;
}
public void run() {
LOG.debug("FileWatcherStrategy is starting watching folder: {}", folder);
// allow to run while starting Camel
while (isStarting() || isRunAllowed()) {
running = true;
WatchKey key;
try {
LOG.trace("FileWatcherStrategy is polling for file changes in directory: {}", folder);
// wait for a key to be available
key = watcher.poll(pollTimeout, TimeUnit.MILLISECONDS);
} catch (InterruptedException ex) {
break;
}
if (key != null) {
Path pathToReload = folder;
for (WatchEvent<?> event : key.pollEvents()) {
WatchEvent<Path> we = (WatchEvent<Path>) event;
Path path = we.context();
File file = pathToReload.resolve(path).toFile();
LOG.trace("Modified/Created/Deleted file: {}", file);
changeEvent.onChange(file);
}
// the key must be reset after processed
boolean valid = key.reset();
if (!valid) {
break;
}
}
}
running = false;
LOG.info("FileWatcherStrategy is stopping watching folder: {}", folder);
}
}
}
| apache-2.0 |
Itvisors/mendix-DataTables | test/javasource/communitycommons/actions/EndTransaction.java | 1238 | // This file was generated by Mendix Studio Pro.
//
// WARNING: Only the following code will be retained when actions are regenerated:
// - the import list
// - the code between BEGIN USER CODE and END USER CODE
// - the code between BEGIN EXTRA CODE and END EXTRA CODE
// Other code you write will be lost the next time you deploy the project.
// Special characters, e.g., é, ö, à, etc. are supported in comments.
package communitycommons.actions;
import com.mendix.systemwideinterfaces.core.IContext;
import com.mendix.webui.CustomJavaAction;
/**
* Commit the transaction, this will end this transaction or remove a save point from the queue if the transaction is nested
*/
public class EndTransaction extends CustomJavaAction<java.lang.Boolean>
{
public EndTransaction(IContext context)
{
super(context);
}
@java.lang.Override
public java.lang.Boolean executeAction() throws Exception
{
// BEGIN USER CODE
getContext().endTransaction();
return true;
// END USER CODE
}
/**
* Returns a string representation of this action
*/
@java.lang.Override
public java.lang.String toString()
{
return "EndTransaction";
}
// BEGIN EXTRA CODE
// END EXTRA CODE
}
| apache-2.0 |
apache/xml-graphics-commons | src/test/java/org/apache/xmlgraphics/image/loader/DemoPainter.java | 1882 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id$ */
package org.apache.xmlgraphics.image.loader;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Graphics2D;
import java.awt.geom.Rectangle2D;
import org.apache.xmlgraphics.java2d.Graphics2DImagePainter;
public class DemoPainter implements Graphics2DImagePainter {
/** {@inheritDoc} */
public Dimension getImageSize() {
return new Dimension(10000, 10000);
}
public void paint(Graphics2D g2d, Rectangle2D area) {
g2d.translate(area.getX(), area.getY());
double w = area.getWidth();
double h = area.getHeight();
//Fit in paint area
Dimension imageSize = getImageSize();
double sx = w / imageSize.getWidth();
double sy = h / imageSize.getHeight();
if (sx != 1.0 || sy != 1.0) {
g2d.scale(sx, sy);
}
g2d.setColor(Color.BLACK);
g2d.setStroke(new BasicStroke());
g2d.drawRect(0, 0, imageSize.width, imageSize.height);
g2d.drawOval(0, 0, imageSize.width, imageSize.height);
}
}
| apache-2.0 |
apc999/alluxio | core/common/src/test/java/alluxio/uri/AuthorityTest.java | 4360 | /*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.uri;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
import java.util.Arrays;
/**
* Unit tests for {@link Authority}.
*/
public class AuthorityTest {
@Test
public void authorityFromStringTest() {
assertTrue(Authority.fromString("localhost:19998") instanceof SingleMasterAuthority);
assertTrue(Authority.fromString("127.0.0.1:19998") instanceof SingleMasterAuthority);
assertTrue(Authority.fromString("zk@host:2181") instanceof ZookeeperAuthority);
assertTrue(Authority.fromString("zk@host1:2181,127.0.0.2:2181,12.43.214.53:2181")
instanceof ZookeeperAuthority);
assertTrue(Authority.fromString("zk@host1:2181;host2:2181;host3:2181")
instanceof ZookeeperAuthority);
assertTrue(Authority.fromString("") instanceof NoAuthority);
assertTrue(Authority.fromString(null) instanceof NoAuthority);
assertTrue(Authority.fromString("localhost") instanceof UnknownAuthority);
assertTrue(Authority.fromString("f3,321:sad") instanceof UnknownAuthority);
assertTrue(Authority.fromString("localhost:") instanceof UnknownAuthority);
assertTrue(Authority.fromString("127.0.0.1:19998,") instanceof UnknownAuthority);
assertTrue(Authority.fromString("localhost:19998:8080") instanceof UnknownAuthority);
assertTrue(Authority.fromString("localhost:asdsad") instanceof UnknownAuthority);
assertTrue(Authority.fromString("zk@") instanceof UnknownAuthority);
assertTrue(Authority.fromString("zk@;") instanceof UnknownAuthority);
assertTrue(Authority.fromString("zk@localhost") instanceof UnknownAuthority);
assertTrue(Authority.fromString("zk@127.0.0.1:port") instanceof UnknownAuthority);
assertTrue(Authority.fromString("zk@127.0.0.1:2181,") instanceof UnknownAuthority);
assertTrue(Authority.fromString("zk@127.0.0.1:2181,localhost") instanceof UnknownAuthority);
assertTrue(Authority.fromString(",,,") instanceof UnknownAuthority);
assertTrue(Authority.fromString(";;;") instanceof UnknownAuthority);
}
@Test
public void singleMasterAuthorityTest() {
SingleMasterAuthority authority =
(SingleMasterAuthority) Authority.fromString("localhost:19998");
assertEquals("localhost:19998", authority.toString());
assertEquals("localhost", authority.getHost());
assertEquals(19998, authority.getPort());
}
@Test
public void zookeeperAuthorityTest() {
ZookeeperAuthority authority = (ZookeeperAuthority) Authority.fromString("zk@host:2181");
assertEquals("zk@host:2181", authority.toString());
assertEquals("host:2181", authority.getZookeeperAddress());
authority = (ZookeeperAuthority) Authority
.fromString("zk@127.0.0.1:2181,127.0.0.2:2181,127.0.0.3:2181");
assertEquals("zk@127.0.0.1:2181,127.0.0.2:2181,127.0.0.3:2181", authority.toString());
assertEquals("127.0.0.1:2181,127.0.0.2:2181,127.0.0.3:2181", authority.getZookeeperAddress());
authority = (ZookeeperAuthority) Authority.fromString("zk@host1:2181;host2:2181;host3:2181");
assertEquals("zk@host1:2181,host2:2181,host3:2181", authority.toString());
assertEquals("host1:2181,host2:2181,host3:2181", authority.getZookeeperAddress());
authority = (ZookeeperAuthority) Authority.fromString("zk@host1:2181+host2:2181+host3:2181");
assertEquals("zk@host1:2181,host2:2181,host3:2181", authority.toString());
assertEquals("host1:2181,host2:2181,host3:2181", authority.getZookeeperAddress());
}
@Test
public void mixedDelimiters() {
String normalized = "a:0,b:0,c:0";
for (String test : Arrays.asList(
"zk@a:0;b:0+c:0",
"zk@a:0,b:0;c:0",
"zk@a:0+b:0,c:0"
)) {
assertEquals(normalized,
((ZookeeperAuthority) Authority.fromString(test)).getZookeeperAddress());
}
}
}
| apache-2.0 |
na4lapy/na4lapy-android | app/src/main/java/pl/kodujdlapolski/na4lapy/model/type/AnimalAttribute.java | 857 | /*
* Copyright 2017 Stowarzyszenie Na4Łapy
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package pl.kodujdlapolski.na4lapy.model.type;
import android.support.annotation.DrawableRes;
import android.support.annotation.StringRes;
public interface AnimalAttribute {
@StringRes int getLabelResId();
@DrawableRes int getDrawableResId();
}
| apache-2.0 |
newbiet/zstack | test/src/test/java/org/zstack/test/kvm/TestAttachIsoOnKvm.java | 5770 | package org.zstack.test.kvm;
import junit.framework.Assert;
import org.junit.Before;
import org.junit.Test;
import org.zstack.compute.vm.VmSystemTags;
import org.zstack.core.cloudbus.CloudBus;
import org.zstack.core.componentloader.ComponentLoader;
import org.zstack.core.db.DatabaseFacade;
import org.zstack.core.db.SimpleQuery;
import org.zstack.core.db.SimpleQuery.Op;
import org.zstack.header.identity.SessionInventory;
import org.zstack.header.image.ImageInventory;
import org.zstack.header.network.l3.L3NetworkInventory;
import org.zstack.header.storage.primary.ImageCacheVO;
import org.zstack.header.storage.primary.ImageCacheVO_;
import org.zstack.header.vm.VmInstanceInventory;
import org.zstack.header.vm.VmNicInventory;
import org.zstack.kvm.KVMAgentCommands;
import org.zstack.kvm.KVMAgentCommands.*;
import org.zstack.network.service.virtualrouter.VirtualRouterVmVO;
import org.zstack.simulator.kvm.KVMSimulatorConfig;
import org.zstack.test.Api;
import org.zstack.test.ApiSenderException;
import org.zstack.test.DBUtil;
import org.zstack.test.WebBeanConstructor;
import org.zstack.test.deployer.Deployer;
import org.zstack.test.storage.backup.sftp.TestSftpBackupStorageDeleteImage2;
import org.zstack.utils.CollectionUtils;
import org.zstack.utils.Utils;
import org.zstack.utils.function.Function;
import org.zstack.utils.logging.CLogger;
/**
* 1. create a vm
* 2. attach an iso to the vm
*
* confirm the iso attached successfully
*
* 3. detach the iso
*
* confirm the iso detached successfully
*
* 4. stop the vm
* 5. attach the iso
*
* confirm the iso attached successfully
*
* 6. start the vm
*
* confirm the iso is attached
*
* 7. stop the vm
* 8. detach the iso
*
* confirm the iso detached successfully
*
* 9. start the vm
* 10. attach the iso
* 11. delete the iso
* 12. detach the iso
*
* confirm the iso detached successfully
*/
public class TestAttachIsoOnKvm {
CLogger logger = Utils.getLogger(TestSftpBackupStorageDeleteImage2.class);
Deployer deployer;
Api api;
ComponentLoader loader;
CloudBus bus;
DatabaseFacade dbf;
SessionInventory session;
KVMSimulatorConfig config;
@Before
public void setUp() throws Exception {
DBUtil.reDeployDB();
WebBeanConstructor con = new WebBeanConstructor();
deployer = new Deployer("deployerXml/kvm/TestAttachIsoOnKvm.xml", con);
deployer.addSpringConfig("KVMRelated.xml");
deployer.addSpringConfig("VirtualRouter.xml");
deployer.addSpringConfig("VirtualRouterSimulator.xml");
deployer.build();
api = deployer.getApi();
loader = deployer.getComponentLoader();
bus = loader.getComponent(CloudBus.class);
dbf = loader.getComponent(DatabaseFacade.class);
config = loader.getComponent(KVMSimulatorConfig.class);
session = api.loginAsAdmin();
}
@Test
public void test() throws ApiSenderException {
ImageInventory iso = deployer.images.get("TestIso");
VmInstanceInventory vm = deployer.vms.get("TestVm");
api.attachIso(vm.getUuid(), iso.getUuid(), null);
Assert.assertFalse(config.attachIsoCmds.isEmpty());
AttachIsoCmd cmd = config.attachIsoCmds.get(0);
SimpleQuery<ImageCacheVO> q = dbf.createQuery(ImageCacheVO.class);
q.add(ImageCacheVO_.imageUuid, Op.EQ, iso.getUuid());
ImageCacheVO cvo = q.find();
Assert.assertEquals(cvo.getInstallUrl(), cmd.iso.getPath());
Assert.assertEquals(vm.getUuid(), cmd.vmUuid);
String isoUuid = VmSystemTags.ISO.getTokenByResourceUuid(vm.getUuid(), VmSystemTags.ISO_TOKEN);
Assert.assertEquals(iso.getUuid(), isoUuid);
api.detachIso(vm.getUuid(), null);
Assert.assertFalse(config.detachIsoCmds.isEmpty());
DetachIsoCmd dcmd = config.detachIsoCmds.get(0);
Assert.assertEquals(vm.getUuid(), dcmd.vmUuid);
Assert.assertEquals(isoUuid, dcmd.isoUuid);
isoUuid = VmSystemTags.ISO.getTokenByResourceUuid(vm.getUuid(), VmSystemTags.ISO_TOKEN);
Assert.assertNull(isoUuid);
api.stopVmInstance(vm.getUuid());
config.attachIsoCmds.clear();
api.attachIso(vm.getUuid(), iso.getUuid(), null);
Assert.assertTrue(config.attachIsoCmds.isEmpty());
isoUuid = VmSystemTags.ISO.getTokenByResourceUuid(vm.getUuid(), VmSystemTags.ISO_TOKEN);
Assert.assertNotNull(isoUuid);
api.startVmInstance(vm.getUuid());
StartVmCmd scmd = config.startVmCmd;
Assert.assertNotNull(scmd.getBootIso());
Assert.assertEquals(cvo.getInstallUrl(), scmd.getBootIso().getPath());
Assert.assertEquals(iso.getUuid(), scmd.getBootIso().getImageUuid());
api.stopVmInstance(vm.getUuid());
config.detachIsoCmds.clear();
api.detachIso(vm.getUuid(), null);
Assert.assertTrue(config.detachIsoCmds.isEmpty());
isoUuid = VmSystemTags.ISO.getTokenByResourceUuid(vm.getUuid(), VmSystemTags.ISO_TOKEN);
Assert.assertNull(isoUuid);
api.startVmInstance(vm.getUuid());
api.attachIso(vm.getUuid(), iso.getUuid(), null);
api.deleteImage(iso.getUuid());
config.detachIsoCmds.clear();
api.detachIso(vm.getUuid(), null);
Assert.assertFalse(config.detachIsoCmds.isEmpty());
dcmd = config.detachIsoCmds.get(0);
Assert.assertEquals(vm.getUuid(), dcmd.vmUuid);
Assert.assertEquals(iso.getUuid(), dcmd.isoUuid);
isoUuid = VmSystemTags.ISO.getTokenByResourceUuid(vm.getUuid(), VmSystemTags.ISO_TOKEN);
Assert.assertNull(isoUuid);
}
}
| apache-2.0 |
flaxsearch/BioSolr | ontology/ontology-annotator/core/src/test/java/uk/co/flax/biosolr/ontology/core/owl/OWLOntologyHelperMethodsTest.java | 6839 | /**
* Copyright (c) 2015 Lemur Consulting Ltd.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.co.flax.biosolr.ontology.core.owl;
import static org.junit.Assert.*;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.semanticweb.owlapi.model.OWLOntologyCreationException;
import uk.co.flax.biosolr.ontology.core.OntologyHelper;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
/**
* Unit tests for the OWL Ontology Helper methods.
*
* <p>Created by Matt Pearce on 20/10/15.</p>
*/
public class OWLOntologyHelperMethodsTest {
private static OntologyHelper helper;
public static final String ROOT_IRI = "http://www.w3.org/2002/07/owl#Thing";
public static final String TEST_IRI = "http://www.ebi.ac.uk/efo/EFO_0000001";
public static final String TEST_CHILD_IRI = "http://www.ifomis.org/bfo/1.1/snap#MaterialEntity";
@BeforeClass
public static void setup() throws URISyntaxException, OWLOntologyCreationException {
URL testResource = OWLOntologyHelperMethodsTest.class.getClassLoader()
.getResource(OWLOntologyHelperTest.TEST_ONTOLOGY);
if (testResource != null) {
OWLOntologyConfiguration config = new OWLOntologyConfiguration(testResource.toExternalForm(),
Collections.singletonList(OWLOntologyConfiguration.LABEL_PROPERTY_URI),
Collections.singletonList(OWLOntologyConfiguration.SYNONYM_PROPERTY_URI),
Collections.singletonList(OWLOntologyConfiguration.DEFINITION_PROPERTY_URI),
Collections.emptyList());
helper = new OWLOntologyHelper(config);
}
}
@AfterClass
public static void dispose() {
helper.dispose();
}
@Test(expected = NullPointerException.class)
public void findLabels_nullClass() throws Exception {
helper.findLabels(null);
}
@Test
public void findLabels() throws Exception {
Collection<String> labels = helper.findLabels(TEST_IRI);
assertNotNull(labels);
assertEquals(2, labels.size());
}
@Test(expected = NullPointerException.class)
public void getChildUris_nullClass() throws Exception {
helper.getChildIris(null);
}
@Test
public void getChildUris_noChildren() throws Exception {
Collection<String> childUris = helper.getChildIris("http://www.ebi.ac.uk/efo/PARENTS_001");
assertNotNull(childUris);
assertEquals(0, childUris.size());
}
@Test
public void getChildUris() throws Exception {
Collection<String> childUris = helper.getChildIris(TEST_IRI);
assertNotNull(childUris);
assertEquals(2, childUris.size());
assertTrue(childUris.contains(TEST_CHILD_IRI));
}
@Test(expected = NullPointerException.class)
public void getDescendantUris_nullClass() throws Exception {
helper.getDescendantIris(null);
}
@Test
public void getDescendantUris_noDescendants() throws Exception {
Collection<String> descendantUris = helper.getDescendantIris("http://www.ebi.ac.uk/efo/PARENTS_001");
assertNotNull(descendantUris);
assertEquals(0, descendantUris.size());
}
@Test
public void getDescendantUris() throws Exception {
Collection<String> descendantUris = helper.getDescendantIris(TEST_IRI);
assertNotNull(descendantUris);
assertEquals(2, descendantUris.size());
assertTrue(descendantUris.contains(TEST_CHILD_IRI));
}
@Test(expected = NullPointerException.class)
public void getParentUris_nullClass() throws Exception {
helper.getParentIris(null);
}
@Test
public void getParentUris() throws Exception {
Collection<String> parentUris = helper.getParentIris(TEST_CHILD_IRI);
assertNotNull(parentUris);
assertEquals(1, parentUris.size());
assertTrue(parentUris.contains(TEST_IRI));
}
@Test(expected = NullPointerException.class)
public void getAncestorUris_nullClass() throws Exception {
helper.getAncestorIris(null);
}
@Test
public void getAncestorUris() throws Exception {
Collection<String> ancestorUris = helper.getAncestorIris(TEST_CHILD_IRI);
assertNotNull(ancestorUris);
assertEquals(2, ancestorUris.size());
assertTrue(ancestorUris.contains(TEST_IRI));
assertTrue(ancestorUris.contains(ROOT_IRI));
}
@Test(expected = NullPointerException.class)
public void findLabelsForIRIs_nullCollection() throws Exception {
helper.findLabelsForIRIs(null);
}
@Test
public void findLabelsForIRIs_notIRIs() throws Exception {
final String iri = "blah";
Collection<String> labels = helper.findLabelsForIRIs(Collections.singletonList(iri));
assertNotNull(labels);
assertEquals(0, labels.size());
}
@Test
public void findLabelsForIRIs() throws Exception {
Collection<String> labels = helper.findLabelsForIRIs(Arrays.asList(TEST_IRI, TEST_CHILD_IRI));
assertNotNull(labels);
assertEquals(3, labels.size());
}
@Test(expected = NullPointerException.class)
public void findSynonyms_nullClass() throws Exception {
helper.findSynonyms(null);
}
@Test
public void findSynonyms_noSynonymsInClass() throws Exception {
Collection<String> synonyms = helper.findSynonyms(TEST_CHILD_IRI);
assertNotNull(synonyms);
assertEquals(0, synonyms.size());
}
@Test
public void findSynonyms() throws Exception {
Collection<String> synonyms = helper.findSynonyms(TEST_IRI);
assertNotNull(synonyms);
assertEquals(1, synonyms.size());
}
@Test(expected = NullPointerException.class)
public void findDefinitions_nullClass() throws Exception {
helper.findDefinitions(null);
}
@Test
public void findDefinitions_noDefinitionsInClass() throws Exception {
Collection<String> synonyms = helper.findDefinitions(TEST_CHILD_IRI);
assertNotNull(synonyms);
assertEquals(0, synonyms.size());
}
@Test
public void findDefinitions() throws Exception {
Collection<String> synonyms = helper.findDefinitions(TEST_IRI);
assertNotNull(synonyms);
assertEquals(1, synonyms.size());
}
@Test
public void findParentPaths_noParents() throws Exception {
String iri = TEST_IRI;
Collection<String> parentPaths = helper.getParentPaths(iri, true);
assertNotNull(parentPaths);
assertEquals(1, parentPaths.size());
}
@Test
public void findParentPaths_multiplePaths() throws Exception {
String iri = "http://www.ebi.ac.uk/efo/PARENTS_001";
Collection<String> parentPaths = helper.getParentPaths(iri, true);
assertNotNull(parentPaths);
assertEquals(2, parentPaths.size());
}
}
| apache-2.0 |
JingchengDu/hbase | hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkLoadExample.java | 3858 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.spark.example.hbasecontext;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.spark.FamilyHFileWriteOptions;
import org.apache.hadoop.hbase.spark.JavaHBaseContext;
import org.apache.hadoop.hbase.spark.KeyFamilyQualifier;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.Function;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
/**
* Run this example using command below:
*
* SPARK_HOME/bin/spark-submit --master local[2] --class org.apache.hadoop.hbase.spark.example.hbasecontext.JavaHBaseBulkLoadExample
* path/to/hbase-spark.jar {path/to/output/HFiles}
*
* This example will output put hfiles in {path/to/output/HFiles}, and user can run
* 'hbase org.apache.hadoop.hbase.tool.LoadIncrementalHFiles' to load the HFiles into table to verify this example.
*/
final public class JavaHBaseBulkLoadExample {
private JavaHBaseBulkLoadExample() {}
public static void main(String[] args) {
if (args.length < 1) {
System.out.println("JavaHBaseBulkLoadExample " + "{outputPath}");
return;
}
String tableName = "bulkload-table-test";
String columnFamily1 = "f1";
String columnFamily2 = "f2";
SparkConf sparkConf = new SparkConf().setAppName("JavaHBaseBulkLoadExample " + tableName);
JavaSparkContext jsc = new JavaSparkContext(sparkConf);
try {
List<String> list= new ArrayList<String>();
// row1
list.add("1," + columnFamily1 + ",b,1");
// row3
list.add("3," + columnFamily1 + ",a,2");
list.add("3," + columnFamily1 + ",b,1");
list.add("3," + columnFamily2 + ",a,1");
/* row2 */
list.add("2," + columnFamily2 + ",a,3");
list.add("2," + columnFamily2 + ",b,3");
JavaRDD<String> rdd = jsc.parallelize(list);
Configuration conf = HBaseConfiguration.create();
JavaHBaseContext hbaseContext = new JavaHBaseContext(jsc, conf);
hbaseContext.bulkLoad(rdd, TableName.valueOf(tableName),new BulkLoadFunction(), args[0],
new HashMap<byte[], FamilyHFileWriteOptions>(), false, HConstants.DEFAULT_MAX_FILE_SIZE);
} finally {
jsc.stop();
}
}
public static class BulkLoadFunction implements Function<String, Pair<KeyFamilyQualifier, byte[]>> {
@Override
public Pair<KeyFamilyQualifier, byte[]> call(String v1) throws Exception {
if (v1 == null)
return null;
String[] strs = v1.split(",");
if(strs.length != 4)
return null;
KeyFamilyQualifier kfq = new KeyFamilyQualifier(Bytes.toBytes(strs[0]), Bytes.toBytes(strs[1]),
Bytes.toBytes(strs[2]));
return new Pair(kfq, Bytes.toBytes(strs[3]));
}
}
}
| apache-2.0 |
sahooamit/bigdata | gobblin-core/src/test/java/gobblin/writer/test/TestPartitioner.java | 1499 | /*
* Copyright (C) 2014-2015 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the
* License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed
* under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied.
*/
package gobblin.writer.test;
import gobblin.configuration.State;
import gobblin.writer.partitioner.WriterPartitioner;
import org.apache.avro.Schema;
import org.apache.avro.SchemaBuilder;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
public class TestPartitioner implements WriterPartitioner<String> {
public static final String PARTITION = "partition";
private static final Schema SCHEMA = SchemaBuilder.record("LetterPartition").namespace("gobblin.test").
fields().name(PARTITION).
type(Schema.create(Schema.Type.STRING)).noDefault().endRecord();
public TestPartitioner(State state, int numBranches, int branchId) {
}
@Override public Schema partitionSchema() {
return SCHEMA;
}
@Override
public GenericRecord partitionForRecord(String record) {
GenericRecord partition = new GenericData.Record(SCHEMA);
partition.put(PARTITION, record.toLowerCase().charAt(0));
return partition;
}
}
| apache-2.0 |
Fabryprog/camel | components/camel-micrometer/src/main/java/org/apache/camel/component/micrometer/MicrometerConstants.java | 2934 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.micrometer;
import java.util.function.Predicate;
import io.micrometer.core.instrument.Meter;
public final class MicrometerConstants {
public static final String HEADER_PREFIX = "CamelMetrics";
public static final String HEADER_TIMER_ACTION = HEADER_PREFIX + "TimerAction";
public static final String HEADER_HISTOGRAM_VALUE = HEADER_PREFIX + "HistogramValue";
public static final String HEADER_COUNTER_DECREMENT = HEADER_PREFIX + "CounterDecrement";
public static final String HEADER_COUNTER_INCREMENT = HEADER_PREFIX + "CounterIncrement";
public static final String HEADER_METRIC_NAME = HEADER_PREFIX + "Name";
public static final String HEADER_METRIC_TAGS = HEADER_PREFIX + "Tags";
public static final String DEFAULT_CAMEL_MESSAGE_HISTORY_METER_NAME = "CamelMessageHistory";
public static final String DEFAULT_CAMEL_ROUTE_POLICY_METER_NAME = "CamelRoutePolicy";
public static final String DEFAULT_CAMEL_EXCHANGE_EVENT_METER_NAME = "CamelExchangeEventNotifier";
public static final String DEFAULT_CAMEL_ROUTES_ADDED = "CamelRoutesAdded";
public static final String DEFAULT_CAMEL_ROUTES_RUNNING = "CamelRoutesRunning";
public static final String ROUTE_ID_TAG = "routeId";
public static final String NODE_ID_TAG = "nodeId";
public static final String FAILED_TAG = "failed";
public static final String CAMEL_CONTEXT_TAG = "camelContext";
public static final String EVENT_TYPE_TAG = "eventType";
public static final String METRICS_REGISTRY_NAME = "metricsRegistry";
public static final String SERVICE_NAME = "serviceName";
public static final String ENDPOINT_NAME = "endpointName";
public static final Predicate<Meter.Id> CAMEL_METERS = id -> id.getTag(CAMEL_CONTEXT_TAG) != null;
public static final Predicate<Meter.Id> TIMERS = id -> id.getType() == Meter.Type.TIMER;
public static final Predicate<Meter.Id> DISTRIBUTION_SUMMARIES = id -> id.getType() == Meter.Type.DISTRIBUTION_SUMMARY;
public static final Predicate<Meter.Id> ALWAYS = id -> true;
private MicrometerConstants() {
}
}
| apache-2.0 |