repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
icloudkit/example | net.cloudkit.enterprises/src/main/java/net/cloudkit/experiment/infrastructure/support/messaging/MessageGroup.java | 125 | package net.cloudkit.experiment.infrastructure.support.messaging;
/**
* MessageGroup
*/
public interface MessageGroup {
}
| apache-2.0 |
srdo/storm | storm-client/src/jvm/org/apache/storm/trident/topology/MasterBatchCoordinator.java | 12509 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version
* 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package org.apache.storm.trident.topology;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TreeMap;
import org.apache.storm.Config;
import org.apache.storm.spout.SpoutOutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseRichSpout;
import org.apache.storm.trident.spout.ITridentSpout;
import org.apache.storm.trident.topology.state.TransactionalState;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Values;
import org.apache.storm.utils.WindowedTimeThrottler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class MasterBatchCoordinator extends BaseRichSpout {
public static final Logger LOG = LoggerFactory.getLogger(MasterBatchCoordinator.class);
public static final long INIT_TXID = 1L;
public static final String BATCH_STREAM_ID = "$batch";
public static final String COMMIT_STREAM_ID = "$commit";
public static final String SUCCESS_STREAM_ID = "$success";
private static final String CURRENT_TX = "currtx";
private static final String CURRENT_ATTEMPTS = "currattempts";
TreeMap<Long, TransactionStatus> _activeTx = new TreeMap<Long, TransactionStatus>();
TreeMap<Long, Integer> _attemptIds;
Long _currTransaction;
int _maxTransactionActive;
List<ITridentSpout.BatchCoordinator> _coordinators = new ArrayList();
List<String> _managedSpoutIds;
List<ITridentSpout> _spouts;
WindowedTimeThrottler _throttler;
boolean _active = true;
private List<TransactionalState> _states = new ArrayList();
private SpoutOutputCollector _collector;
public MasterBatchCoordinator(List<String> spoutIds, List<ITridentSpout> spouts) {
if (spoutIds.isEmpty()) {
throw new IllegalArgumentException("Must manage at least one spout");
}
_managedSpoutIds = spoutIds;
_spouts = spouts;
LOG.debug("Created {}", this);
}
public List<String> getManagedSpoutIds() {
return _managedSpoutIds;
}
@Override
public void activate() {
_active = true;
}
@Override
public void deactivate() {
_active = false;
}
@Override
public void open(Map<String, Object> conf, TopologyContext context, SpoutOutputCollector collector) {
_throttler = new WindowedTimeThrottler((Number) conf.get(Config.TOPOLOGY_TRIDENT_BATCH_EMIT_INTERVAL_MILLIS), 1);
for (String spoutId : _managedSpoutIds) {
_states.add(TransactionalState.newCoordinatorState(conf, spoutId));
}
_currTransaction = getStoredCurrTransaction();
_collector = collector;
Number active = (Number) conf.get(Config.TOPOLOGY_MAX_SPOUT_PENDING);
if (active == null) {
_maxTransactionActive = 1;
} else {
_maxTransactionActive = active.intValue();
}
_attemptIds = getStoredCurrAttempts(_currTransaction, _maxTransactionActive);
for (int i = 0; i < _spouts.size(); i++) {
String txId = _managedSpoutIds.get(i);
_coordinators.add(_spouts.get(i).getCoordinator(txId, conf, context));
}
LOG.debug("Opened {}", this);
}
@Override
public void close() {
for (TransactionalState state : _states) {
state.close();
}
LOG.debug("Closed {}", this);
}
@Override
public void nextTuple() {
sync();
}
@Override
public void ack(Object msgId) {
TransactionAttempt tx = (TransactionAttempt) msgId;
TransactionStatus status = _activeTx.get(tx.getTransactionId());
LOG.debug("Ack. [tx_attempt = {}], [tx_status = {}], [{}]", tx, status, this);
if (status != null && tx.equals(status.attempt)) {
if (status.status == AttemptStatus.PROCESSING) {
status.status = AttemptStatus.PROCESSED;
LOG.debug("Changed status. [tx_attempt = {}] [tx_status = {}]", tx, status);
} else if (status.status == AttemptStatus.COMMITTING) {
_activeTx.remove(tx.getTransactionId());
_attemptIds.remove(tx.getTransactionId());
_collector.emit(SUCCESS_STREAM_ID, new Values(tx));
_currTransaction = nextTransactionId(tx.getTransactionId());
for (TransactionalState state : _states) {
state.setData(CURRENT_TX, _currTransaction);
}
LOG.debug("Emitted on [stream = {}], [tx_attempt = {}], [tx_status = {}], [{}]", SUCCESS_STREAM_ID, tx, status, this);
}
sync();
}
}
@Override
public void fail(Object msgId) {
TransactionAttempt tx = (TransactionAttempt) msgId;
TransactionStatus stored = _activeTx.remove(tx.getTransactionId());
LOG.debug("Fail. [tx_attempt = {}], [tx_status = {}], [{}]", tx, stored, this);
if (stored != null && tx.equals(stored.attempt)) {
_activeTx.tailMap(tx.getTransactionId()).clear();
sync();
}
}
@Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
// in partitioned example, in case an emitter task receives a later transaction than it's emitted so far,
// when it sees the earlier txid it should know to emit nothing
declarer.declareStream(BATCH_STREAM_ID, new Fields("tx"));
declarer.declareStream(COMMIT_STREAM_ID, new Fields("tx"));
declarer.declareStream(SUCCESS_STREAM_ID, new Fields("tx"));
}
private void sync() {
// note that sometimes the tuples active may be less than max_spout_pending, e.g.
// max_spout_pending = 3
// tx 1, 2, 3 active, tx 2 is acked. there won't be a commit for tx 2 (because tx 1 isn't committed yet),
// and there won't be a batch for tx 4 because there's max_spout_pending tx active
TransactionStatus maybeCommit = _activeTx.get(_currTransaction);
if (maybeCommit != null && maybeCommit.status == AttemptStatus.PROCESSED) {
maybeCommit.status = AttemptStatus.COMMITTING;
_collector.emit(COMMIT_STREAM_ID, new Values(maybeCommit.attempt), maybeCommit.attempt);
LOG.debug("Emitted on [stream = {}], [tx_status = {}], [{}]", COMMIT_STREAM_ID, maybeCommit, this);
}
if (_active) {
if (_activeTx.size() < _maxTransactionActive) {
Long curr = _currTransaction;
for (int i = 0; i < _maxTransactionActive; i++) {
if (!_activeTx.containsKey(curr) && isReady(curr)) {
// by using a monotonically increasing attempt id, downstream tasks
// can be memory efficient by clearing out state for old attempts
// as soon as they see a higher attempt id for a transaction
Integer attemptId = _attemptIds.get(curr);
if (attemptId == null) {
attemptId = 0;
} else {
attemptId++;
}
_attemptIds.put(curr, attemptId);
for (TransactionalState state : _states) {
state.setData(CURRENT_ATTEMPTS, _attemptIds);
}
TransactionAttempt attempt = new TransactionAttempt(curr, attemptId);
final TransactionStatus newTransactionStatus = new TransactionStatus(attempt);
_activeTx.put(curr, newTransactionStatus);
_collector.emit(BATCH_STREAM_ID, new Values(attempt), attempt);
LOG.debug("Emitted on [stream = {}], [tx_attempt = {}], [tx_status = {}], [{}]", BATCH_STREAM_ID, attempt,
newTransactionStatus, this);
_throttler.markEvent();
}
curr = nextTransactionId(curr);
}
}
}
}
private boolean isReady(long txid) {
if (_throttler.isThrottled()) {
return false;
}
//TODO: make this strategy configurable?... right now it goes if anyone is ready
for (ITridentSpout.BatchCoordinator coord : _coordinators) {
if (coord.isReady(txid)) {
return true;
}
}
return false;
}
@Override
public Map<String, Object> getComponentConfiguration() {
Config ret = new Config();
ret.setMaxTaskParallelism(1);
ret.registerSerialization(TransactionAttempt.class);
return ret;
}
private Long nextTransactionId(Long id) {
return id + 1;
}
private Long getStoredCurrTransaction() {
Long ret = INIT_TXID;
for (TransactionalState state : _states) {
Long curr = (Long) state.getData(CURRENT_TX);
if (curr != null && curr.compareTo(ret) > 0) {
ret = curr;
}
}
return ret;
}
private TreeMap<Long, Integer> getStoredCurrAttempts(long currTransaction, int maxBatches) {
TreeMap<Long, Integer> ret = new TreeMap<Long, Integer>();
for (TransactionalState state : _states) {
Map<Object, Number> attempts = (Map) state.getData(CURRENT_ATTEMPTS);
if (attempts == null) {
attempts = new HashMap();
}
for (Entry<Object, Number> e : attempts.entrySet()) {
// this is because json doesn't allow numbers as keys...
// TODO: replace json with a better form of encoding
Number txidObj;
if (e.getKey() instanceof String) {
txidObj = Long.parseLong((String) e.getKey());
} else {
txidObj = (Number) e.getKey();
}
long txid = ((Number) txidObj).longValue();
int attemptId = ((Number) e.getValue()).intValue();
Integer curr = ret.get(txid);
if (curr == null || attemptId > curr) {
ret.put(txid, attemptId);
}
}
}
ret.headMap(currTransaction).clear();
ret.tailMap(currTransaction + maxBatches - 1).clear();
return ret;
}
@Override
public String toString() {
return "MasterBatchCoordinator{" +
"_states=" + _states +
", _activeTx=" + _activeTx +
", _attemptIds=" + _attemptIds +
", _collector=" + _collector +
", _currTransaction=" + _currTransaction +
", _maxTransactionActive=" + _maxTransactionActive +
", _coordinators=" + _coordinators +
", _managedSpoutIds=" + _managedSpoutIds +
", _spouts=" + _spouts +
", _throttler=" + _throttler +
", _active=" + _active +
"}";
}
private static enum AttemptStatus {
PROCESSING,
PROCESSED,
COMMITTING
}
private static class TransactionStatus {
TransactionAttempt attempt;
AttemptStatus status;
public TransactionStatus(TransactionAttempt attempt) {
this.attempt = attempt;
this.status = AttemptStatus.PROCESSING;
}
@Override
public String toString() {
return attempt.toString() + " <" + status.toString() + ">";
}
}
}
| apache-2.0 |
BenMcLean/SquidLib | squidlib-util/src/main/java/squidpony/squidai/AimLimit.java | 738 | package squidpony.squidai;
/**
* Enum used for common targeting limitations (or lack thereof, in the case of AimLimit.FREE ). AimLimit.ORTHOGONAL will
* limit single targets or the centers/aimed-at-cells of AOE effects to cells directly, north, south, east or west of
* the user. AimLimit.DIAGONAL does the same but for northeast, southeast, southwest, or northwest. AimLimit.EIGHT_WAY
* limits the same things, but is less restrictive, allowing all cells AimLimit.ORTHOGONAL does as well as all cells
* AimLimit.DIAGONAL allows. AimLimit.FREE allows all cells within any range limit an ability may have.
* Created by Tommy Ettinger on 12/17/2015.
*/
public enum AimLimit {
FREE,
ORTHOGONAL,
DIAGONAL,
EIGHT_WAY
}
| apache-2.0 |
drewnoakes/metadata-extractor | Source/com/drew/metadata/xmp/XmpDirectory.java | 4489 | /*
* Copyright 2002-2019 Drew Noakes and contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* More information about this project is available at:
*
* https://drewnoakes.com/code/exif/
* https://github.com/drewnoakes/metadata-extractor
*/
package com.drew.metadata.xmp;
import com.adobe.internal.xmp.XMPException;
import com.adobe.internal.xmp.XMPIterator;
import com.adobe.internal.xmp.XMPMeta;
import com.adobe.internal.xmp.impl.XMPMetaImpl;
import com.adobe.internal.xmp.options.IteratorOptions;
import com.adobe.internal.xmp.properties.XMPPropertyInfo;
import com.drew.lang.annotations.NotNull;
import com.drew.lang.annotations.Nullable;
import com.drew.metadata.Directory;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
/**
* Wraps an instance of Adobe's {@link XMPMeta} object, which holds XMP data.
* <p />
* XMP uses a namespace and path format for identifying values, which does not map to metadata-extractor's
* integer based tag identifiers. Therefore, XMP data is extracted and exposed via {@link XmpDirectory#getXMPMeta()}
* which returns an instance of Adobe's {@link XMPMeta} which exposes the full XMP data set.
*
* @author Torsten Skadell
* @author Drew Noakes https://drewnoakes.com
*/
@SuppressWarnings("WeakerAccess")
public class XmpDirectory extends Directory
{
public static final int TAG_XMP_VALUE_COUNT = 0xFFFF;
@NotNull
private static final HashMap<Integer, String> _tagNameMap = new HashMap<Integer, String>();
static {
_tagNameMap.put(TAG_XMP_VALUE_COUNT, "XMP Value Count");
}
@Nullable
private XMPMeta _xmpMeta;
public XmpDirectory()
{
this.setDescriptor(new XmpDescriptor(this));
}
@Override
@NotNull
public String getName()
{
return "XMP";
}
@Override
@NotNull
protected HashMap<Integer, String> getTagNameMap()
{
return _tagNameMap;
}
/**
* Gets a map of all XMP properties in this directory.
* <p>
* This is required because XMP properties are represented as strings, whereas the rest of this library
* uses integers for keys.
*/
@NotNull
public Map<String, String> getXmpProperties()
{
Map<String, String> propertyValueByPath = new HashMap<String, String>();
if (_xmpMeta != null)
{
try {
IteratorOptions options = new IteratorOptions().setJustLeafnodes(true);
for (XMPIterator i = _xmpMeta.iterator(options); i.hasNext(); ) {
XMPPropertyInfo prop = (XMPPropertyInfo)i.next();
String path = prop.getPath();
String value = prop.getValue();
if (path != null && value != null) {
propertyValueByPath.put(path, value);
}
}
} catch (XMPException ignored) {
}
}
return Collections.unmodifiableMap(propertyValueByPath);
}
public void setXMPMeta(@NotNull XMPMeta xmpMeta)
{
_xmpMeta = xmpMeta;
try {
int valueCount = 0;
IteratorOptions options = new IteratorOptions().setJustLeafnodes(true);
for (XMPIterator i = _xmpMeta.iterator(options); i.hasNext(); ) {
XMPPropertyInfo prop = (XMPPropertyInfo)i.next();
if (prop.getPath() != null) {
valueCount++;
}
}
setInt(TAG_XMP_VALUE_COUNT, valueCount);
} catch (XMPException ignored) {
}
}
/**
* Gets the XMPMeta object used to populate this directory. It can be used for more XMP-oriented operations.
* If one does not exist it will be created.
*/
@NotNull
public XMPMeta getXMPMeta()
{
if (_xmpMeta == null)
_xmpMeta = new XMPMetaImpl();
return _xmpMeta;
}
}
| apache-2.0 |
apache/jena | jena-extras/jena-querybuilder/src/test/java/org/apache/jena/arq/querybuilder/handlers/SolutionModifierHandlerTest.java | 8734 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.arq.querybuilder.handlers;
import static org.junit.Assert.assertEquals;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.jena.graph.Node;
import org.apache.jena.query.Query;
import org.apache.jena.query.SortCondition;
import org.apache.jena.sparql.core.Var;
import org.apache.jena.sparql.expr.E_Random;
import org.apache.jena.sparql.lang.sparql_11.ParseException;
import org.junit.Before;
import org.junit.Test;
public class SolutionModifierHandlerTest extends AbstractHandlerTest {
private Query query;
private SolutionModifierHandler solutionModifier;
@Before
public void setup() {
query = new Query();
solutionModifier = new SolutionModifierHandler(query);
}
@Test
public void testAddAll() throws ParseException {
SolutionModifierHandler solutionModifier2 = new SolutionModifierHandler(new Query());
solutionModifier2.addOrderBy(Var.alloc("orderBy"));
solutionModifier2.addGroupBy(Var.alloc("groupBy"));
solutionModifier2.addHaving("?having<10");
solutionModifier2.setLimit(500);
solutionModifier2.setOffset(200);
solutionModifier.addAll(solutionModifier2);
String[] s = byLine(query.toString());
assertContainsRegex(GROUP_BY + var("groupBy"), s);
assertContainsRegex(HAVING + OPEN_PAREN + var("having") + OPT_SPACE + LT + OPT_SPACE + "10" + CLOSE_PAREN, s);
assertContainsRegex(ORDER_BY + var("orderBy"), s);
assertContainsRegex(LIMIT + "500", s);
assertContainsRegex(OFFSET + "200", s);
}
@Test
public void testAll() throws ParseException {
solutionModifier.addOrderBy(Var.alloc("orderBy"));
solutionModifier.addGroupBy(Var.alloc("groupBy"));
solutionModifier.addHaving("SUM(?lprice) > 10");
solutionModifier.setLimit(500);
solutionModifier.setOffset(200);
String[] s = byLine(query.toString());
assertContainsRegex(GROUP_BY + var("groupBy"), s);
assertContainsRegex(HAVING + OPEN_PAREN + "SUM" + OPEN_PAREN + var("lprice") + CLOSE_PAREN + OPT_SPACE + GT
+ "10" + CLOSE_PAREN, s);
assertContainsRegex(ORDER_BY + var("orderBy"), s);
assertContainsRegex(LIMIT + "500", s);
assertContainsRegex(OFFSET + "200", s);
}
@Test
public void testAddOrderBy() {
solutionModifier.addOrderBy(Var.alloc("orderBy"));
List<SortCondition> sc = query.getOrderBy();
assertEquals("Wrong number of conditions", 1, sc.size());
assertEquals("Wrong value", sc.get(0).expression.asVar(), Var.alloc("orderBy"));
solutionModifier.addOrderBy(Var.alloc("orderBy2"));
sc = query.getOrderBy();
assertEquals("Wrong number of conditions", 2, sc.size());
assertEquals("Wrong value", sc.get(0).expression.asVar(), Var.alloc("orderBy"));
assertEquals("Wrong value", sc.get(1).expression.asVar(), Var.alloc("orderBy2"));
}
@Test
public void testAddGroupByVar() {
solutionModifier.addGroupBy(Var.alloc("groupBy"));
String[] s = byLine(query.toString());
assertContainsRegex(GROUP_BY + var("groupBy"), s);
solutionModifier.addGroupBy(Var.alloc("groupBy2"));
s = byLine(query.toString());
assertContainsRegex(GROUP_BY + var("groupBy") + SPACE + var("groupBy2"), s);
}
@Test
public void testAddGroupByExpr() {
solutionModifier.addGroupBy(new E_Random());
String[] s = byLine(query.toString());
assertContainsRegex(GROUP_BY + "rand" + OPEN_PAREN + CLOSE_PAREN, s);
solutionModifier.addGroupBy(Var.alloc("groupBy2"));
s = byLine(query.toString());
assertContainsRegex(GROUP_BY + "rand" + OPEN_PAREN + CLOSE_PAREN + SPACE + var("groupBy2"), s);
}
@Test
public void testAddGroupByVarAndExpr() {
solutionModifier.addGroupBy(Var.alloc("groupBy"), new E_Random());
String[] s = byLine(query.toString());
assertContainsRegex(GROUP_BY + OPEN_PAREN + "rand" + OPEN_PAREN + CLOSE_PAREN + SPACE + "AS" + SPACE
+ var("groupBy") + CLOSE_PAREN, s);
solutionModifier.addGroupBy(Var.alloc("groupBy2"));
s = byLine(query.toString());
assertContainsRegex(GROUP_BY + OPEN_PAREN + "rand" + OPEN_PAREN + CLOSE_PAREN + SPACE + "AS" + SPACE
+ var("groupBy") + CLOSE_PAREN + SPACE + var("groupBy2"), s);
}
@Test
public void testAddHavingString() throws ParseException {
solutionModifier.addHaving("?having<10");
assertContainsRegex(HAVING + OPEN_PAREN + var("having") + OPT_SPACE + LT + 10 + CLOSE_PAREN, query.toString());
solutionModifier.addHaving("?having2");
assertContainsRegex(
HAVING + OPEN_PAREN + var("having") + OPT_SPACE + LT + 10 + CLOSE_PAREN + OPT_SPACE + var("having2"),
query.toString());
}
@Test
public void testAddHavingVar() throws ParseException {
solutionModifier.addHaving(Var.alloc("foo"));
assertContainsRegex(HAVING + var("foo"), query.toString());
solutionModifier.addHaving("?having2");
assertContainsRegex(HAVING + var("foo") + SPACE + var("having2"), query.toString());
}
@Test
public void testAddHavingExpr() throws ParseException {
solutionModifier.addHaving(new E_Random());
assertContainsRegex(HAVING + "rand" + OPEN_PAREN + CLOSE_PAREN, query.toString());
solutionModifier.addHaving("?having2");
assertContainsRegex(HAVING + "rand" + OPEN_PAREN + CLOSE_PAREN + SPACE + var("having2"), query.toString());
}
@Test
public void testSetLimit() {
solutionModifier.setLimit(500);
String[] s = byLine(query.toString());
assertContainsRegex("LIMIT\\s+500", s);
solutionModifier.setLimit(200);
s = byLine(query.toString());
assertContainsRegex("LIMIT\\s+200", s);
solutionModifier.setLimit(-1);
s = byLine(query.toString());
assertNotContainsRegex("LIMIT.*", s);
}
@Test
public void testSetOffset() {
solutionModifier.setOffset(500);
String[] s = byLine(query.toString());
assertContainsRegex("OFFSET\\s+500", s);
solutionModifier.setOffset(200);
s = byLine(query.toString());
assertContainsRegex("OFFSET\\s+200", s);
solutionModifier.setOffset(-1);
s = byLine(query.toString());
assertNotContainsRegex("OFFSET.*", s);
}
@Test
public void testSetVarsGroupBy() {
Var v = Var.alloc("v");
solutionModifier.addGroupBy(v);
String[] s = byLine(query.toString());
assertContainsRegex(GROUP_BY + var("v"), s);
Map<Var, Node> values = new HashMap<>();
values.put(v, Var.alloc("v2"));
solutionModifier.setVars(values);
s = byLine(query.toString());
assertContainsRegex(GROUP_BY + var("v2"), s);
}
@Test
public void testSetVarsHaving() {
Var v = Var.alloc("v");
solutionModifier.addHaving(v);
String[] s = byLine(query.toString());
assertContainsRegex(HAVING + var("v"), s);
Map<Var, Node> values = new HashMap<>();
values.put(v, Var.alloc("v2"));
solutionModifier.setVars(values);
s = byLine(query.toString());
assertContainsRegex(HAVING + var("v2"), s);
}
@Test
public void testSetVarsOrderBy() {
Var v = Var.alloc("v");
solutionModifier.addOrderBy(v);
String[] s = byLine(query.toString());
assertContainsRegex(ORDER_BY + var("v"), s);
Map<Var, Node> values = new HashMap<>();
values.put(v, Var.alloc("v2"));
solutionModifier.setVars(values);
s = byLine(query.toString());
assertContainsRegex(ORDER_BY + var("v2"), s);
}
}
| apache-2.0 |
prabathabey/carbon-identity | components/user-mgt/org.wso2.carbon.user.mgt.workflow/src/main/java/org/wso2/carbon/user/mgt/workflow/userstore/SetMultipleClaimsWFRequestHandler.java | 9052 | /*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.user.mgt.workflow.userstore;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.context.CarbonContext;
import org.wso2.carbon.identity.workflow.mgt.WorkflowManagementService;
import org.wso2.carbon.identity.workflow.mgt.bean.Entity;
import org.wso2.carbon.identity.workflow.mgt.exception.InternalWorkflowException;
import org.wso2.carbon.identity.workflow.mgt.exception.WorkflowException;
import org.wso2.carbon.identity.workflow.mgt.extension.AbstractWorkflowRequestHandler;
import org.wso2.carbon.identity.workflow.mgt.util.WorkflowDataType;
import org.wso2.carbon.identity.workflow.mgt.util.WorkflowRequestStatus;
import org.wso2.carbon.user.api.UserRealm;
import org.wso2.carbon.user.api.UserStoreException;
import org.wso2.carbon.user.core.service.RealmService;
import org.wso2.carbon.user.core.util.UserCoreUtil;
import org.wso2.carbon.user.mgt.workflow.internal.IdentityWorkflowDataHolder;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.UUID;
public class SetMultipleClaimsWFRequestHandler extends AbstractWorkflowRequestHandler {
private static final String FRIENDLY_NAME = "Update User Claims";
private static final String FRIENDLY_DESCRIPTION = "Triggered when a user updates his claims";
private static final String USERNAME = "Username";
private static final String USER_STORE_DOMAIN = "User Store Domain";
private static final String CLAIMS = "Claims";
private static final String PROFILE_NAME = "Profile";
private static final Map<String, String> PARAM_DEFINITION;
private static Log log = LogFactory.getLog(SetMultipleClaimsWFRequestHandler.class);
static {
PARAM_DEFINITION = new LinkedHashMap<>();
PARAM_DEFINITION.put(USERNAME, WorkflowDataType.STRING_TYPE);
PARAM_DEFINITION.put(USER_STORE_DOMAIN, WorkflowDataType.STRING_TYPE);
PARAM_DEFINITION.put(CLAIMS, WorkflowDataType.STRING_STRING_MAP_TYPE);
PARAM_DEFINITION.put(PROFILE_NAME, WorkflowDataType.STRING_TYPE);
}
public boolean startSetMultipleClaimsWorkflow(String userStoreDomain, String userName, Map<String, String>
claims, String profileName) throws WorkflowException {
WorkflowManagementService workflowService = IdentityWorkflowDataHolder.getInstance().getWorkflowService();
int tenant = CarbonContext.getThreadLocalCarbonContext().getTenantId();
String fullyQualifiedName = UserCoreUtil.addDomainToName(userName, userStoreDomain);
Map<String, Object> wfParams = new HashMap<>();
Map<String, Object> nonWfParams = new HashMap<>();
wfParams.put(USERNAME, userName);
wfParams.put(USER_STORE_DOMAIN, userStoreDomain);
wfParams.put(CLAIMS, claims);
wfParams.put(PROFILE_NAME, profileName);
String uuid = UUID.randomUUID().toString();
Entity[] entities = new Entity[claims.size() + 1];
entities[0] = new Entity(fullyQualifiedName, UserStoreWFConstants.ENTITY_TYPE_USER, tenant);
int i = 1;
for (String key : claims.keySet()) {
entities[i] = new Entity(key, UserStoreWFConstants.ENTITY_TYPE_CLAIM, tenant);
i++;
}
if (workflowService.eventEngagedWithWorkflows(UserStoreWFConstants.SET_MULTIPLE_USER_CLAIMS_EVENT) &&
!Boolean.TRUE.equals(getWorkFlowCompleted()) && !isValidOperation(entities)) {
throw new WorkflowException("Operation is not valid.");
}
boolean state = startWorkFlow(wfParams, nonWfParams, uuid);
//WF_REQUEST_ENTITY_RELATIONSHIP table has foreign key to WF_REQUEST, so need to run this after WF_REQUEST is
// updated
if (!Boolean.TRUE.equals(getWorkFlowCompleted()) && !state) {
try {
workflowService.addRequestEntityRelationships(uuid, entities);
} catch (InternalWorkflowException e) {
//debug exception which occurs at DB level since no workflows associated with event
if (log.isDebugEnabled()) {
log.debug("No workflow associated with the operation.", e);
}
}
}
return state;
}
@Override
public void onWorkflowCompletion(String status, Map<String, Object> requestParams,
Map<String, Object> responseAdditionalParams, int tenantId)
throws WorkflowException {
String userName;
Object requestUsername = requestParams.get(USERNAME);
if (requestUsername == null || !(requestUsername instanceof String)) {
throw new WorkflowException("Callback request for Set User Claim received without the mandatory " +
"parameter 'username'");
}
String userStoreDomain = (String) requestParams.get(USER_STORE_DOMAIN);
if (StringUtils.isNotBlank(userStoreDomain)) {
userName = userStoreDomain + "/" + requestUsername;
} else {
userName = (String) requestUsername;
}
Map<String, String> claims = (Map<String, String>) requestParams.get(CLAIMS);
String profile = (String) requestParams.get(PROFILE_NAME);
if (WorkflowRequestStatus.APPROVED.toString().equals(status) ||
WorkflowRequestStatus.SKIPPED.toString().equals(status)) {
try {
RealmService realmService = IdentityWorkflowDataHolder.getInstance().getRealmService();
UserRealm userRealm = realmService.getTenantUserRealm(tenantId);
userRealm.getUserStoreManager().setUserClaimValues(userName, claims, profile);
} catch (UserStoreException e) {
// Sending e.getMessage() since it is required to give error message to end user.
throw new WorkflowException(e.getMessage(), e);
}
} else {
if (retryNeedAtCallback()) {
//unset threadlocal variable
unsetWorkFlowCompleted();
}
if (log.isDebugEnabled()) {
log.debug("Setting User Claims is aborted for user '" + userName + "', Reason: Workflow response was " +
status);
}
}
}
@Override
public boolean retryNeedAtCallback() {
return true;
}
@Override
public String getEventId() {
return UserStoreWFConstants.SET_MULTIPLE_USER_CLAIMS_EVENT;
}
@Override
public Map<String, String> getParamDefinitions() {
return PARAM_DEFINITION;
}
@Override
public String getFriendlyName() {
return FRIENDLY_NAME;
}
@Override
public String getDescription() {
return FRIENDLY_DESCRIPTION;
}
@Override
public String getCategory() {
return UserStoreWFConstants.CATEGORY_USERSTORE_OPERATIONS;
}
@Override
public boolean isValidOperation(Entity[] entities) throws WorkflowException {
WorkflowManagementService workflowService = IdentityWorkflowDataHolder.getInstance().getWorkflowService();
for (int i = 0; i < entities.length; i++) {
try {
if (entities[i].getEntityType() == UserStoreWFConstants.ENTITY_TYPE_USER && workflowService
.entityHasPendingWorkflowsOfType(entities[i], UserStoreWFConstants.DELETE_USER_EVENT)) {
throw new WorkflowException("User has a delete operation pending.");
}
if (entities[i].getEntityType() == UserStoreWFConstants.ENTITY_TYPE_USER) {
for (int j = 0; j < entities.length; j++) {
if (entities[j].getEntityType() == UserStoreWFConstants.ENTITY_TYPE_CLAIM && workflowService
.areTwoEntitiesRelated(entities[i], entities[j])) {
throw new WorkflowException(entities[j].getEntityId() + " of user is already in a " +
"workflow to delete or update.");
}
}
}
} catch (InternalWorkflowException e) {
throw new WorkflowException(e.getMessage(), e);
}
}
return true;
}
}
| apache-2.0 |
ngs-mtech/drools | drools-compiler/src/test/java/org/drools/compiler/integrationtests/BranchTest.java | 25316 | /*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.compiler.integrationtests;
import org.drools.compiler.Cheese;
import org.drools.compiler.CommonTestMethodBase;
import org.junit.Test;
import org.kie.api.KieBase;
import org.kie.api.io.ResourceType;
import org.kie.api.runtime.KieSession;
import org.kie.internal.builder.KnowledgeBuilder;
import org.kie.internal.builder.KnowledgeBuilderFactory;
import org.kie.internal.io.ResourceFactory;
import org.kie.internal.runtime.StatefulKnowledgeSession;
import java.util.ArrayList;
import java.util.List;
public class BranchTest extends CommonTestMethodBase {
@Test
public void testNamedConsequences() {
List<String> results = executeTestWithCondition("do[t1]");
assertEquals( 2, results.size() );
assertTrue( results.contains( "cheddar" ) );
assertTrue( results.contains( "stilton" ) );
}
private List<String> executeTestWithCondition(String conditionElement) {
String drl = "import org.drools.compiler.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" " + conditionElement + "\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $b.getType() );\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"end\n";
return executeTestWithDRL(drl);
}
private List<String> executeTestWithDRL(String drl) {
KieBase kbase = loadKnowledgeBaseFromString(drl);
KieSession ksession = kbase.newKieSession();
List<String> results = new ArrayList<String>();
ksession.setGlobal( "results", results );
Cheese stilton = new Cheese( "stilton", 5 );
Cheese cheddar = new Cheese( "cheddar", 7 );
Cheese brie = new Cheese( "brie", 5 );
ksession.insert( stilton );
ksession.insert( cheddar );
ksession.insert( brie );
ksession.fireAllRules();
return results;
}
@Test
public void testNonCompilingBreakingConsequences() {
String str = "import org.drools.compiler.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" break[t1]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $b.getType() );\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL );
assertTrue(kbuilder.hasErrors());
}
@Test
public void testNonCompilingDuplicatedNamedConsequence() {
String str = "import org.drools.compiler.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" do[t1]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $b.getType() );\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL );
assertTrue(kbuilder.hasErrors());
}
@Test
public void testOutOfScopeNamedConsequences() {
String str = "import org.drools.compiler.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" do[t1]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $b.getType() );\n" +
"then[t1]\n" +
" results.add( $b.getType() );\n" +
"end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL );
assertTrue(kbuilder.hasErrors());
}
@Test
public void testAllowedIfDo() {
List<String> results = executeTestWithCondition("if ( price < 10 ) do[t1]");
assertEquals( 2, results.size() );
assertTrue( results.contains( "cheddar" ) );
assertTrue( results.contains( "stilton" ) );
}
@Test
public void testNotAllowedIfDo() {
List<String> results = executeTestWithCondition("if ( price > 10 ) do[t1]");
assertEquals( 1, results.size() );
assertTrue( results.contains( "cheddar" ) );
}
@Test
public void testAllowedIfBreak() {
List<String> results = executeTestWithCondition("if ( price < 10 ) break[t1]");
assertEquals( 1, results.size() );
assertTrue( results.contains( "stilton" ) );
}
@Test
public void testNotAllowedIfBreak() {
List<String> results = executeTestWithCondition("if ( price > 10 ) break[t1]");
assertEquals( 1, results.size() );
assertTrue( results.contains( "cheddar" ) );
}
@Test
public void testNamedConsequencesOutsideOR() {
String str = "import org.drools.compiler.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" ( $a: Cheese ( type == \"stilton\" )\n" +
" or\n" +
" $a: Cheese ( type == \"gorgonzola\" ) )\n" +
" do[t1]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $b.getType() );\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
assertEquals( 2, results.size() );
assertTrue( results.contains( "cheddar" ) );
assertTrue( results.contains( "stilton" ) );
}
@Test
public void testNamedConsequencesInsideOR1() {
String str = "import org.drools.compiler.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" ( $a: Cheese ( type == \"stilton\" ) do[t1]\n" +
" or\n" +
" $b: Cheese ( type == \"gorgonzola\" ) )\n" +
" $c: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $c.getType() );\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
assertEquals( 2, results.size() );
assertTrue( results.contains( "cheddar" ) );
assertTrue( results.contains( "stilton" ) );
}
@Test
public void testNamedConsequencesInsideOR2() {
String str = "import org.drools.compiler.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" ( $a: Cheese ( type == \"stilton\" )\n" +
" or\n" +
" $b: Cheese ( type == \"gorgonzola\" ) do[t1] )\n" +
" $c: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $c.getType() );\n" +
"then[t1]\n" +
" results.add( $b.getType() );\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
assertEquals( 1, results.size() );
assertTrue( results.contains( "cheddar" ) );
}
@Test
public void testOutOfScopeNamedConsequencesWithOr1() {
String str = "import org.drools.compiler.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" ( $a: Cheese ( type == \"stilton\" )\n" +
" or\n" +
" $b: Cheese ( type == \"gorgonzola\" ) do[t1] )\n" +
" $c: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $c.getType() );\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL );
assertTrue(kbuilder.hasErrors());
}
@Test
public void testOutOfScopeNamedConsequencesWithOr2() {
String str = "import org.drools.compiler.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" ( $a: Cheese ( type == \"stilton\" )\n" +
" or\n" +
" $b: Cheese ( type == \"gorgonzola\" ) do[t1] )\n" +
" $c: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $c.getType() );\n" +
"then[t1]\n" +
" results.add( $c.getType() );\n" +
"end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL );
assertTrue(kbuilder.hasErrors());
}
@Test
public void testNonCompilingIFAfterOR() {
String str = "import org.drools.compiler.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" ( $a: Cheese ( type == \"stilton\" )\n" +
" or\n" +
" $a: Cheese ( type == \"gorgonzola\" ) )\n" +
" if ( price > 10 ) do[t1]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $b.getType() );\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL );
assertTrue(kbuilder.hasErrors());
}
@Test
public void testMultipleIfAfterEval() {
String str = "import org.drools.compiler.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( )\n" +
" eval( $a.getType().equals(\"stilton\") )\n" +
" if ( $a.getPrice() > 10 ) do[t1]\n" +
" if ( $a.getPrice() < 10 ) do[t2]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $b.getType() );\n" +
"then[t1]\n" +
" results.add( $a.getType().toUpperCase() );\n" +
"then[t2]\n" +
" results.add( $a.getType() );\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
assertEquals( 2, results.size() );
assertTrue( results.contains( "cheddar" ) );
assertTrue( results.contains( "stilton" ) );
}
@Test
public void testInheritance() {
String str = "dialect \"mvel\"\n" +
"import org.drools.compiler.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R0 when\n" +
" $a: Cheese ( )\n" +
"then\n" +
"end\n" +
"\n" +
"rule R1 extends R0 when\n" +
" eval( $a.getType().equals(\"stilton\") )\n" +
" if ( $a.getPrice() > 10 ) do[t1]\n" +
" if ( $a.getPrice() < 10 ) do[t2]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $b.type );\n" +
"then[t1]\n" +
" results.add( $a.type.toUpperCase() );\n" +
"then[t2]\n" +
" results.add( $a.type );\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
assertEquals( 2, results.size() );
assertTrue( results.contains( "cheddar" ) );
assertTrue( results.contains( "stilton" ) );
}
@Test
public void testIfElse1() {
String str = "import org.drools.compiler.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" if ( price > 10 ) do[t1] else do[t2]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $b.getType() );\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"then[t2]\n" +
" results.add( $a.getType().toUpperCase() );\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
assertEquals( 2, results.size() );
assertTrue( results.contains( "cheddar" ) );
assertTrue( results.contains( "STILTON" ) );
}
@Test
public void testIfElse2() {
String str = "import org.drools.compiler.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" if ( price < 10 ) do[t1] else do[t2]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $b.getType() );\n" +
"then[t2]\n" +
" results.add( $a.getType() );\n" +
"then[t1]\n" +
" results.add( $a.getType().toUpperCase() );\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
assertEquals( 2, results.size() );
assertTrue( results.contains( "cheddar" ) );
assertTrue( results.contains( "STILTON" ) );
}
@Test
public void testIfElseBreak() {
String str = "import org.drools.compiler.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" if ( price > 10 ) do[t1] else break[t2]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $b.getType() );\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"then[t2]\n" +
" results.add( $a.getType().toUpperCase() );\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
assertEquals( 1, results.size() );
assertTrue( results.contains( "STILTON" ) );
}
@Test
public void testMVELBreak() {
String str = "import org.drools.compiler.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 dialect \"mvel\" when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
" if ( 200 < 400 ) break[t1]\n" +
"then\n" +
" results.add( $b.type );\n" +
"then[t1]\n" +
" results.add( $a.type.toUpperCase() );\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
System.out.println( results );
assertEquals( 1, results.size() );
assertTrue( results.contains( "STILTON" ) );
}
@Test
public void testMVELNoBreak() {
String str = "import org.drools.compiler.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 dialect \"mvel\" when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
" if ( 200 > 400 ) break[t1]\n" +
"then\n" +
" results.add( $b.type );\n" +
"then[t1]\n" +
" results.add( $a.type.toUpperCase() );\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
System.out.println( results );
assertEquals( 1, results.size() );
assertTrue( results.contains( "cheddar" ) );
}
@Test
public void testWrongConsequenceName() {
String str = "import org.drools.compiler.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 dialect \"mvel\" when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
" if ( 200 < 400 ) break[t2]\n" +
"then\n" +
" results.add( $b.getType() );\n" +
"then[t1]\n" +
" results.add( $a.getType().toUpperCase() );\n" +
"end\n";
KnowledgeBuilder builder = KnowledgeBuilderFactory.newKnowledgeBuilder();
builder.add( ResourceFactory.newByteArrayResource( str.getBytes() ), ResourceType.DRL);
assertTrue ( builder.hasErrors() );
}
@Test
public void testNestedIfElseBreak() {
String str = "import org.drools.compiler.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" if ( type.startsWith(\"a\") ) do[t0] else if ( price > 10 ) do[t1] else break[t2]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $b.getType() );\n" +
"then[t0]\n" +
" results.add( \"WRONG!\" );\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"then[t2]\n" +
" results.add( $a.getType().toUpperCase() );\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
assertEquals( 1, results.size() );
assertTrue( results.contains( "STILTON" ) );
}
@Test
public void testIfWithModify() {
String str = "import org.drools.compiler.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" if ( price > 10 ) break[t1]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" modify( $a ) { setPrice(15) };\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
assertEquals( 1, results.size() );
assertTrue( results.contains( "stilton" ) );
}
@Test(expected=RuntimeException.class)
public void testEndlessIfWithModify() {
String str = "import org.drools.compiler.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" if ( price > 10 ) do[t1]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" modify( $a ) { setPrice(15) };\n" +
" results.add( \"modify\" );\n" +
" if (results.size() > 10) throw new RuntimeException();\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
" if (results.size() > 10) throw new RuntimeException();\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
}
@Test
public void testIfWithModify2() {
String str = "import org.drools.compiler.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" if ( price < 10 ) break[t1]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $a.getType() );\n" +
"then[t1]\n" +
" modify( $a ) { setPrice(15) };\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
assertEquals( 1, results.size() );
assertTrue( results.contains( "stilton" ) );
}
@Test
public void testIfWithModify3() {
String str = "import org.drools.compiler.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( type == \"stilton\", price < 10 )\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" modify( $a ) { setPrice(15) };\n" +
"end\n" +
"rule R2 when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" if ( price > 10 ) break[t1]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
assertEquals( 1, results.size() );
assertTrue( results.contains( "stilton" ) );
}
@Test
public void testIfElseWithModify() {
String str = "import org.drools.compiler.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" if ( price < 10 ) do[t1] else break[t2]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" modify( $a ) { setPrice(15) };\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"then[t2]\n" +
" results.add( $a.getType().toUpperCase() );\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
assertEquals( 2, results.size() );
assertTrue( results.contains( "STILTON" ) );
}
@Test(expected=RuntimeException.class)
public void testEndlessIfElseWithModify() {
String str = "import org.drools.compiler.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" if ( price < 10 ) do[t1] else do[t2]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" modify( $a ) { setPrice(15) };\n" +
" results.add( \"modify\" );\n" +
" if (results.size() > 10) throw new RuntimeException();\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"then[t2]\n" +
" results.add( $a.getType().toUpperCase() );\n" +
" if (results.size() > 10) throw new RuntimeException();\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
}
}
| apache-2.0 |
djodjoni/jus | android/jus-android/src/main/java/io/apptik/comm/jus/request/ImageRequest.java | 12413 | /*
* Copyright (C) 2015 Apptik Project
* Copyright (C) 2014 Kalin Maldzhanski
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.apptik.comm.jus.request;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
import android.graphics.BitmapFactory;
import android.widget.ImageView.ScaleType;
import io.apptik.comm.jus.DefaultRetryPolicy;
import io.apptik.comm.jus.JusLog;
import io.apptik.comm.jus.NetworkResponse;
import io.apptik.comm.jus.ParseError;
import io.apptik.comm.jus.Request;
import io.apptik.comm.jus.Response;
import io.apptik.comm.jus.toolbox.HttpHeaderParser;
import io.apptik.comm.jus.util.BitmapPool;
/**
* A canned request for getting an image at a given URL and calling
* back with a decoded Bitmap.
*/
public class ImageRequest extends Request<Bitmap> {
private static final boolean PREFER_QUALITY_OVER_SPEED = false;
/**
* Socket timeout in milliseconds for image requests
*/
private static final int IMAGE_TIMEOUT_MS = 1000;
/**
* Default number of retries for image requests
*/
private static final int IMAGE_MAX_RETRIES = 3;
/**
* Default backoff multiplier for image requests
*/
private static final float IMAGE_BACKOFF_MULT = 2f;
private final Config decodeConfig;
private final int maxWidth;
private final int maxHeight;
private ScaleType scaleType;
private BitmapPool bitmapPool;
/**
* Decoding lock so that we don't decode more than one image at a time (to avoid OOM's)
*/
private static final Object DECODE_LOCK = new Object();
/**
* Creates a new image request, decoding to a maximum specified width and
* height. If both width and height are zero, the image will be decoded to
* its natural size. If one of the two is nonzero, that dimension will be
* clamped and the other one will be set to preserve the image's aspect
* ratio. If both width and height are nonzero, the image will be decoded to
* be fit in the rectangle of dimensions width x height while keeping its
* aspect ratio.
*
* @param url URL of the image
* @param maxWidth Maximum width to decode this bitmap to, or zero for none
* @param maxHeight Maximum height to decode this bitmap to, or zero for
* none
* @param scaleType The ImageViews ScaleType used to calculate the needed image size.
* @param decodeConfig Format to decode the bitmap to
*/
public ImageRequest(String url, int maxWidth, int maxHeight,
ScaleType scaleType, Config decodeConfig) {
super(Method.GET, url);
setRetryPolicy(
new DefaultRetryPolicy(IMAGE_TIMEOUT_MS, IMAGE_MAX_RETRIES, IMAGE_BACKOFF_MULT));
this.decodeConfig = decodeConfig;
this.maxWidth = maxWidth;
this.maxHeight = maxHeight;
this.scaleType = scaleType;
}
@Override
public ImageRequest clone() {
return new ImageRequest(getUrlString(), maxWidth, maxHeight,
scaleType, decodeConfig).setBitmapPool(bitmapPool);
}
public BitmapPool getBitmapPool() {
return bitmapPool;
}
public ImageRequest setBitmapPool(BitmapPool bitmapPool) {
this.bitmapPool = bitmapPool;
return this;
}
@Override
public Priority getPriority() {
return Priority.LOW;
}
/**
* Scales one side of a rectangle to fit aspect ratio.
*
* @param maxPrimary Maximum size of the primary dimension (i.e. width for
* max width), or zero to maintain aspect ratio with secondary
* dimension
* @param maxSecondary Maximum size of the secondary dimension, or zero to
* maintain aspect ratio with primary dimension
* @param actualPrimary Actual size of the primary dimension
* @param actualSecondary Actual size of the secondary dimension
* @param scaleType The ScaleType used to calculate the needed image size.
*/
private static int getResizedDimension(int maxPrimary, int maxSecondary, int actualPrimary,
int actualSecondary, ScaleType scaleType) {
// If no dominant value at all, just return the actual.
if ((maxPrimary == 0) && (maxSecondary == 0)) {
return actualPrimary;
}
// If ScaleType.FIT_XY fill the whole rectangle, ignore ratio.
if (scaleType == ScaleType.FIT_XY) {
if (maxPrimary == 0) {
return actualPrimary;
}
return maxPrimary;
}
// If primary is unspecified, scale primary to match secondary's scaling ratio.
if (maxPrimary == 0) {
double ratio = (double) maxSecondary / (double) actualSecondary;
return (int) (actualPrimary * ratio);
}
if (maxSecondary == 0) {
return maxPrimary;
}
double ratio = (double) actualSecondary / (double) actualPrimary;
int resized = maxPrimary;
// If ScaleType.CENTER_CROP fill the whole rectangle, preserve aspect ratio.
if (scaleType == ScaleType.CENTER_CROP) {
if ((resized * ratio) < maxSecondary) {
resized = (int) (maxSecondary / ratio);
}
return resized;
}
if ((resized * ratio) > maxSecondary) {
resized = (int) (maxSecondary / ratio);
}
return resized;
}
@Override
public Response<Bitmap> parseNetworkResponse(NetworkResponse response) {
// Serialize all decode on a global lock to reduce concurrent heap usage.
synchronized (DECODE_LOCK) {
try {
return doParse(response);
} catch (OutOfMemoryError e) {
JusLog.error("Caught OOM for " + response.data.length + " byte image, url=" +
getUrl());
return Response.error(new ParseError(e));
}
}
}
/**
* The real guts of parseNetworkResponse. Broken out for readability.
*/
private Response<Bitmap> doParse(NetworkResponse response) {
byte[] data = response.data;
BitmapFactory.Options decodeOptions = new BitmapFactory.Options();
Bitmap bitmap = null;
if (maxWidth == 0 && maxHeight == 0) {
decodeOptions.inPreferredConfig = decodeConfig;
//we don't have specific size to find a reusable bitmap so lets instead create a new
// one
try {
bitmap = BitmapFactory.decodeByteArray(data, 0, data.length, decodeOptions);
} catch (IllegalArgumentException ex) {
JusLog.error("Unbounded decode failed: " + ex.getMessage());
}
} else {
// If we have to resize this image, first get the natural bounds.
decodeOptions.inJustDecodeBounds = true;
BitmapFactory.decodeByteArray(data, 0, data.length, decodeOptions);
int actualWidth = decodeOptions.outWidth;
int actualHeight = decodeOptions.outHeight;
// Then compute the dimensions we would ideally like to decode to.
int desiredWidth = getResizedDimension(maxWidth, maxHeight,
actualWidth, actualHeight, scaleType);
int desiredHeight = getResizedDimension(maxHeight, maxWidth,
actualHeight, actualWidth, scaleType);
// Decode to the nearest power of two scaling factor.
decodeOptions.inJustDecodeBounds = false;
decodeOptions.inPreferQualityOverSpeed = PREFER_QUALITY_OVER_SPEED;
decodeOptions.inSampleSize =
findBestSampleSize(actualWidth, actualHeight, desiredWidth, desiredHeight);
Bitmap tempBitmap = try2decodeByteArray(data, 0, data.length, decodeOptions);
//TODO shall we optimise this with BitmapDrawable?
// If necessary, scale down to the maximal acceptable size.
if (tempBitmap != null && (tempBitmap.getWidth() > desiredWidth ||
tempBitmap.getHeight() > desiredHeight)) {
bitmap = Bitmap.createScaledBitmap(tempBitmap,
desiredWidth, desiredHeight, true);
if (bitmapPool != null) {
bitmapPool.addToPool(tempBitmap);
} else {
}
} else {
bitmap = tempBitmap;
}
}
if (bitmap == null) {
return Response.error(new ParseError(response));
} else {
return Response.success(bitmap, HttpHeaderParser.parseCacheHeaders(response));
}
}
public Bitmap try2decodeByteArray(byte[] data, int offset, int length, BitmapFactory
.Options decodeOptions) {
Bitmap tempBitmap = null;
addInBitmapOptions(decodeOptions);
try {
tempBitmap = BitmapFactory.decodeByteArray(data, 0, data.length, decodeOptions);
} catch (IllegalArgumentException ex) {
JusLog.error("1st decode failed: " + ex.getMessage());
decodeOptions.inBitmap = null;
}
//try to catch java.lang.IllegalArgumentException: Problem decoding into existing bitmap
if (tempBitmap == null) {
//try again
addInBitmapOptions(decodeOptions);
try {
tempBitmap = BitmapFactory.decodeByteArray(data, 0, data.length, decodeOptions);
} catch (IllegalArgumentException ex) {
JusLog.error("2nd decode failed: " + ex.getMessage());
decodeOptions.inBitmap = null;
}
}
//giveup and do it without inBitmap
if (tempBitmap == null) {
try {
//just in case
decodeOptions.inBitmap = null;
tempBitmap = BitmapFactory.decodeByteArray(data, 0, data.length, decodeOptions);
} catch (IllegalArgumentException ex) {
JusLog.error("3rd decode failed: " + ex.getMessage());
}
}
return tempBitmap;
}
private void addInBitmapOptions(BitmapFactory.Options options) {
// inBitmap only works with mutable bitmaps so force the decoder to
// return mutable bitmaps.
options.inMutable = true;
if (bitmapPool != null) {
// Try and find a bitmap to use for inBitmap
options.inBitmap = bitmapPool.getReusableBitmap(options);
}
}
/**
* Returns the largest power-of-two divisor for use in downscaling a bitmap
* that will not result in the scaling past the desired dimensions.
*
* @param actualWidth Actual width of the bitmap
* @param actualHeight Actual height of the bitmap
* @param desiredWidth Desired width of the bitmap
* @param desiredHeight Desired height of the bitmap
*/
// Visible for testing.
static int findBestSampleSize(
int actualWidth, int actualHeight, int desiredWidth, int desiredHeight) {
double wr = (double) actualWidth / desiredWidth;
double hr = (double) actualHeight / desiredHeight;
double ratio = Math.min(wr, hr);
float n = 1.0f;
while ((n * 2) <= ratio) {
n *= 2;
}
return (int) n;
}
@Override
public String toString() {
return "ImageRequest{" +
"bitmapPool=" + bitmapPool +
", decodeConfig=" + decodeConfig +
", maxWidth=" + maxWidth +
", maxHeight=" + maxHeight +
", scaleType=" + scaleType +
"} " + super.toString();
}
}
| apache-2.0 |
nmldiegues/jvm-stm | tests/t5/TestFields.java | 404 | package t5;
public class TestFields{
public static void main(String[] args){
FieldsHolder holder = new FieldsHolder();
Thread t1 = new ThreadFields(holder);
Thread t2 = new ThreadFields(holder);
t1.start();
t2.start();
try{
t1.join();
} catch(InterruptedException e){}
try{
t2.join();
}catch(InterruptedException e) {}
System.out.println(holder.toString());
}
} | apache-2.0 |
bivasdas/ostrich | dropwizard/src/test/java/com/bazaarvoice/ostrich/dropwizard/healthcheck/CachingHealthCheckTest.java | 1538 | package com.bazaarvoice.ostrich.dropwizard.healthcheck;
import com.codahale.metrics.health.HealthCheck;
import org.junit.Test;
import org.mockito.Mockito;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class CachingHealthCheckTest {
@Test
public void testCheckHealthy() {
HealthCheck healthyCheck = Mockito.mock(HealthCheck.class);
when(healthyCheck.execute()).thenReturn(HealthCheck.Result.healthy());
CachingHealthCheck cachedCheck = new CachingHealthCheck(healthyCheck);
// Execute check twice, should only call inner check once
HealthCheck.Result check = cachedCheck.execute();
assertTrue(check.isHealthy());
check = cachedCheck.execute();
assertTrue(check.isHealthy());
verify(healthyCheck).execute();
}
@Test
public void testCheckUnhealthy() {
HealthCheck unhealthyCheck = Mockito.mock(HealthCheck.class);
when(unhealthyCheck.execute()).thenReturn(HealthCheck.Result.unhealthy("test"));
CachingHealthCheck cachedCheck = new CachingHealthCheck(unhealthyCheck);
// Execute check twice, should only call inner check once
HealthCheck.Result check = cachedCheck.execute();
assertFalse(check.isHealthy());
check = cachedCheck.execute();
assertFalse(check.isHealthy());
verify(unhealthyCheck).execute();
}
interface Service {}
}
| apache-2.0 |
senseidb/sensei | sensei-core/src/main/java/com/senseidb/indexing/activity/deletion/DeletionListener.java | 305 | package com.senseidb.indexing.activity.deletion;
import org.apache.lucene.index.IndexReader;
/**
* Provides the callback method which will be called if the some documents are deleted from zoie
*
*/
public interface DeletionListener {
public void onDelete(IndexReader indexReader, long... uids);
}
| apache-2.0 |
iceeer/connectbot-tablet | src/org/connectbot/transport/TransportWrapper.java | 5356 | /*
* ConnectBot: simple, powerful, open-source SSH client for Android
* Copyright 2007 Kenny Root, Jeffrey Sharkey
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.connectbot.transport;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.concurrent.LinkedBlockingQueue;
import org.connectbot.bean.HostBean;
import org.connectbot.bean.PortForwardBean;
import org.connectbot.service.TerminalBridge;
import org.connectbot.service.TerminalManager;
import android.content.Context;
import android.net.Uri;
import android.util.Log;
/**
* Wraps write(), flush() and close() to be dispatched asynchronously.
*
* A nicer implementation might better abstract out the I/O related APIs and
* wrap those only.
* @author Perry Nguyen
*/
public class TransportWrapper extends AbsTransport implements Runnable {
private final static String TAG = "ConnectBot.TransportWrapper";
private AbsTransport transport;
private LinkedBlockingQueue<Object> queue = new LinkedBlockingQueue<Object>();
private final static String CMD_CLOSE = "close";
private final static String CMD_FLUSH = "flush";
public TransportWrapper(AbsTransport t) {
transport = t;
}
public void connect() {
new Thread(this, TAG).start();
transport.connect();
}
public int read(byte[] buffer, int offset, int length) throws IOException {
return transport.read(buffer, offset, length);
}
public void write(byte[] buffer) throws IOException {
queue.add(buffer);
}
public void write(int c) throws IOException {
queue.add(c);
}
public void flush() throws IOException {
queue.add(CMD_FLUSH);
}
public void close() {
queue.add(CMD_CLOSE);
}
public void setDimensions(int columns, int rows, int width, int height) {
transport.setDimensions(columns, rows, width, height);
}
public void setOptions(Map<String,String> options) {
transport.setOptions(options);
}
public Map<String,String> getOptions() {
return transport.getOptions();
}
public void setCompression(boolean compression) {
transport.setCompression(compression);
}
public void setUseAuthAgent(String useAuthAgent) {
transport.setUseAuthAgent(useAuthAgent);
}
public void setEmulation(String emulation) {
super.setEmulation(emulation);
transport.setEmulation(emulation);
}
public String getEmulation() {
return transport.getEmulation();
}
public void setHost(HostBean host) {
super.setHost(host);
transport.setHost(host);
}
public void setBridge(TerminalBridge bridge) {
super.setBridge(bridge);
transport.setBridge(bridge);
}
public void setManager(TerminalManager manager) {
super.setManager(manager);
transport.setManager(manager);
}
public boolean canForwardPorts() {
return transport.canForwardPorts();
}
public boolean addPortForward(PortForwardBean portForward) {
return transport.addPortForward(portForward);
}
public boolean enablePortForward(PortForwardBean portForward) {
return transport.enablePortForward(portForward);
}
public boolean disablePortForward(PortForwardBean portForward) {
return transport.disablePortForward(portForward);
}
public boolean removePortForward(PortForwardBean portForward) {
return transport.removePortForward(portForward);
}
public List<PortForwardBean> getPortForwards() {
return transport.getPortForwards();
}
public boolean isConnected() { return transport.isConnected(); }
public boolean isSessionOpen() { return transport.isSessionOpen(); }
public int getDefaultPort() { return transport.getDefaultPort(); }
public String getDefaultNickname(String username, String hostname, int port) {
return transport.getDefaultNickname(username, hostname, port);
}
public void getSelectionArgs(Uri uri, Map<String, String> selection) {
transport.getSelectionArgs(uri, selection);
}
public HostBean createHost(Uri uri) {
return transport.createHost(uri);
}
public boolean usesNetwork() { return transport.usesNetwork(); }
public void run() {
boolean closed = false;
while (!closed) {
try {
Object o = queue.take();
if (CMD_CLOSE.equals(o)) {
transport.close();
closed = true;
} else if (CMD_FLUSH.equals(o)) {
transport.flush();
} else if (o instanceof byte[]) {
transport.write((byte[]) o);
} else if (o instanceof Integer) {
transport.write((Integer) o);
}
} catch (IOException e) {
Log.e(TAG, "Unable to send deferred data", e);
try {
transport.flush();
} catch (IOException ioe) {
Log.d(TAG, "transport was closed, dispatching disconnect event");
bridge.dispatchDisconnect(false);
closed = true;
}
} catch (InterruptedException e) {
Log.e(TAG, "received an unexpected thread interrupt, exiting", e);
transport.close();
bridge.dispatchDisconnect(false);
closed = true;
}
}
}
}
| apache-2.0 |
backpaper0/doma2 | src/main/java/org/seasar/doma/internal/expr/node/ExpressionLocation.java | 1186 | /*
* Copyright 2004-2010 the Seasar Foundation and the Others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.seasar.doma.internal.expr.node;
/**
* @author taedium
*
*/
public class ExpressionLocation {
protected final String expression;
protected final int position;
public ExpressionLocation(String expression, int position) {
this.expression = expression;
this.position = position;
}
public String getExpression() {
return expression;
}
public int getPosition() {
return position;
}
@Override
public String toString() {
return expression + ":" + position;
}
}
| apache-2.0 |
ctomc/jboss-jstl-api_spec | src/main/java/org/apache/taglibs/standard/tlv/JstlFmtTLV.java | 10088 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.taglibs.standard.tlv;
import java.util.Set;
import java.util.Stack;
import javax.servlet.jsp.tagext.PageData;
import javax.servlet.jsp.tagext.ValidationMessage;
import org.apache.taglibs.standard.resources.Resources;
import org.xml.sax.Attributes;
import org.xml.sax.helpers.DefaultHandler;
/**
* <p>A SAX-based TagLibraryValidator for the JSTL i18n-capable formatting
* library. Currently implements the following checks:</p>
* <ul>
* <li>Expression syntax validation.
* <li>Tag bodies that must either be empty or non-empty given
* particular attributes.</li>
* </ul>
*
* @author Shawn Bayern
* @author Jan Luehe
*/
public class JstlFmtTLV extends JstlBaseTLV {
//*********************************************************************
// Implementation Overview
/*
* We essentially just run the page through a SAX parser, handling
* the callbacks that interest us. We collapse <jsp:text> elements
* into the text they contain, since this simplifies processing
* somewhat. Even a quick glance at the implementation shows its
* necessary, tree-oriented nature: multiple Stacks, an understanding
* of 'depth', and so on all are important as we recover necessary
* state upon each callback. This TLV demonstrates various techniques,
* from the general "how do I use a SAX parser for a TLV?" to
* "how do I read my init parameters and then validate?" But also,
* the specific SAX methodology was kept as general as possible to
* allow for experimentation and flexibility.
*
* Much of the code and structure is duplicated from JstlCoreTLV.
* An effort has been made to re-use code where unambiguously useful.
* However, splitting logic among parent/child classes isn't
* necessarily the cleanest approach when writing a parser like the
* one we need. I'd like to reorganize this somewhat, but it's not
* a priority.
*/
//*********************************************************************
// Constants
// tag names
private static final String SETLOCALE = "setLocale";
private static final String SETBUNDLE = "setBundle";
private static final String SETTIMEZONE = "setTimeZone";
private static final String BUNDLE = "bundle";
private static final String MESSAGE = "message";
private static final String MESSAGE_PARAM = "param";
private static final String FORMAT_NUMBER = "formatNumber";
private static final String PARSE_NUMBER = "parseNumber";
private static final String PARSE_DATE = "parseDate";
private static final String JSP_TEXT = "jsp:text";
// attribute names
private static final String EVAL = "evaluator";
private static final String MESSAGE_KEY = "key";
private static final String BUNDLE_PREFIX = "prefix";
private static final String VALUE = "value";
//*********************************************************************
// set its type and delegate validation to super-class
@Override
public ValidationMessage[] validate(
String prefix, String uri, PageData page) {
return super.validate(TYPE_FMT, prefix, uri, page);
}
//*********************************************************************
// Contract fulfillment
@Override
protected DefaultHandler getHandler() {
return new Handler();
}
//*********************************************************************
// SAX event handler
/**
* The handler that provides the base of our implementation.
*/
private class Handler extends DefaultHandler {
// parser state
private int depth = 0;
private Stack messageDepths = new Stack();
private String lastElementName = null;
private boolean bodyNecessary = false;
private boolean bodyIllegal = false;
// process under the existing context (state), then modify it
@Override
public void startElement(
String ns, String ln, String qn, Attributes a) {
// substitute our own parsed 'ln' if it's not provided
if (ln == null) {
ln = getLocalPart(qn);
}
// for simplicity, we can ignore <jsp:text> for our purposes
// (don't bother distinguishing between it and its characters)
if (qn.equals(JSP_TEXT)) {
return;
}
// check body-related constraint
if (bodyIllegal) {
fail(Resources.getMessage("TLV_ILLEGAL_BODY",
lastElementName));
}
// validate expression syntax if we need to
Set expAtts;
if (qn.startsWith(prefix + ":")
&& (expAtts = (Set) config.get(ln)) != null) {
for (int i = 0; i < a.getLength(); i++) {
String attName = a.getLocalName(i);
if (expAtts.contains(attName)) {
String vMsg =
validateExpression(
ln,
attName,
a.getValue(i));
if (vMsg != null) {
fail(vMsg);
}
}
}
}
// validate attributes
if (qn.startsWith(prefix + ":") && !hasNoInvalidScope(a)) {
fail(Resources.getMessage("TLV_INVALID_ATTRIBUTE",
SCOPE, qn, a.getValue(SCOPE)));
}
if (qn.startsWith(prefix + ":") && hasEmptyVar(a)) {
fail(Resources.getMessage("TLV_EMPTY_VAR", qn));
}
if (qn.startsWith(prefix + ":")
&& !isFmtTag(ns, ln, SETLOCALE)
&& !isFmtTag(ns, ln, SETBUNDLE)
&& !isFmtTag(ns, ln, SETTIMEZONE)
&& hasDanglingScope(a)) {
fail(Resources.getMessage("TLV_DANGLING_SCOPE", qn));
}
/*
* Make sure <fmt:param> is nested inside <fmt:message>. Note that
* <fmt:param> does not need to be a direct child of <fmt:message>.
* Otherwise, the following would not work:
*
* <fmt:message key="..." bundle="...">
* <c:forEach var="arg" items="...">
* <fmt:param value="${arg}"/>
* </c:forEach>
* </fmt:message>
*/
if (isFmtTag(ns, ln, MESSAGE_PARAM) && messageDepths.empty()) {
fail(Resources.getMessage("PARAM_OUTSIDE_MESSAGE"));
}
// now, modify state
// If we're in a <message>, record relevant state
if (isFmtTag(ns, ln, MESSAGE)) {
messageDepths.push(new Integer(depth));
}
// set up a check against illegal attribute/body combinations
bodyIllegal = false;
bodyNecessary = false;
if (isFmtTag(ns, ln, MESSAGE_PARAM)
|| isFmtTag(ns, ln, FORMAT_NUMBER)
|| isFmtTag(ns, ln, PARSE_NUMBER)
|| isFmtTag(ns, ln, PARSE_DATE)) {
if (hasAttribute(a, VALUE)) {
bodyIllegal = true;
} else {
bodyNecessary = true;
}
} else if (isFmtTag(ns, ln, MESSAGE)
&& !hasAttribute(a, MESSAGE_KEY)) {
bodyNecessary = true;
} else if (isFmtTag(ns, ln, BUNDLE)
&& hasAttribute(a, BUNDLE_PREFIX)) {
bodyNecessary = true;
}
// record the most recent tag (for error reporting)
lastElementName = qn;
lastElementId = a.getValue(JSP, "id");
// we're a new element, so increase depth
depth++;
}
@Override
public void characters(char[] ch, int start, int length) {
bodyNecessary = false; // body is no longer necessary!
// ignore strings that are just whitespace
String s = new String(ch, start, length).trim();
if (s.equals("")) {
return;
}
// check and update body-related constraints
if (bodyIllegal) {
fail(Resources.getMessage("TLV_ILLEGAL_BODY",
lastElementName));
}
}
@Override
public void endElement(String ns, String ln, String qn) {
// consistently, we ignore JSP_TEXT
if (qn.equals(JSP_TEXT)) {
return;
}
// handle body-related invariant
if (bodyNecessary) {
fail(Resources.getMessage("TLV_MISSING_BODY",
lastElementName));
}
bodyIllegal = false; // reset: we've left the tag
// update <message>-related state
if (isFmtTag(ns, ln, MESSAGE)) {
messageDepths.pop();
}
// update our depth
depth--;
}
}
}
| apache-2.0 |
quattor/pan | panc/src/test/java/org/quattor/pan/dml/functions/FormatTest.java | 2950 | /*
Copyright (c) 2006 Charles A. Loomis, Jr, Cedric Duprilot, and
Centre National de la Recherche Scientifique (CNRS).
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
$HeadURL: https://svn.lal.in2p3.fr/LCG/QWG/panc/trunk/src/org/quattor/pan/dml/functions/IndexTest.java $
$Id: IndexTest.java 1149 2007-01-20 13:50:39Z loomis $
*/
package org.quattor.pan.dml.functions;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
import org.quattor.pan.dml.data.Element;
import org.quattor.pan.dml.data.LongProperty;
import org.quattor.pan.dml.data.StringProperty;
import org.quattor.pan.exceptions.EvaluationException;
import org.quattor.pan.exceptions.SyntaxException;
public class FormatTest extends BuiltInFunctionTestUtils {
@Test
public void checkGetInstance() {
checkClassRequirements(Format.class);
}
@Test
public void checkOctalConversion() throws SyntaxException {
Element r = runDml(Format.getInstance(null, StringProperty
.getInstance("%o"), LongProperty.getInstance(15L)));
assertTrue(r instanceof StringProperty);
String s = ((StringProperty) r).getValue();
assertTrue("17".equals(s));
}
@Test
public void checkLowerHexConversion() throws SyntaxException {
Element r = runDml(Format.getInstance(null, StringProperty
.getInstance("%x"), LongProperty.getInstance(15L)));
assertTrue(r instanceof StringProperty);
String s = ((StringProperty) r).getValue();
assertTrue("f".equals(s));
}
@Test
public void checkUpperHexConversion() throws SyntaxException {
Element r = runDml(Format.getInstance(null, StringProperty
.getInstance("%X"), LongProperty.getInstance(15L)));
assertTrue(r instanceof StringProperty);
String s = ((StringProperty) r).getValue();
assertTrue("F".equals(s));
}
@Test(expected = EvaluationException.class)
public void illegalArguments1() throws SyntaxException {
runDml(Format.getInstance(null, LongProperty.getInstance(1L)));
}
@Test(expected = EvaluationException.class)
public void illegalArguments2() throws SyntaxException {
runDml(Format.getInstance(null, StringProperty.getInstance("%d")));
}
@Test(expected = EvaluationException.class)
public void illegalArguments3() throws SyntaxException {
runDml(Format.getInstance(null, StringProperty.getInstance("%d"),
StringProperty.getInstance("bad")));
}
@Test(expected = SyntaxException.class)
public void tooFewArguments() throws SyntaxException {
Format.getInstance(null);
}
}
| apache-2.0 |
zhangzhaoyu/springdao | src/main/java/cn/bjfu/springdao/jpa/service/impl/ParkingSpaceService.java | 1349 | /**
* Project Name:springdao
* File Name:ParkingSpaceService.java
* Package Name:cn.bjfu.springdao.jpa.service.impl
* Date:2014年7月4日 上午9:43:37
* Copyright (c) 2014, zhangzhaoyu0524@163.com All Rights Reserved.
*
*/
package cn.bjfu.springdao.jpa.service.impl;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import cn.bjfu.springdao.jpa.dao.IParkingSpaceDao;
import cn.bjfu.springdao.jpa.domain.execise.ParkingSpace;
import cn.bjfu.springdao.jpa.service.IParkingSpaceService;
/**
* ClassName:ParkingSpaceService <br/>
* Function: TODO ADD FUNCTION. <br/>
* Reason: TODO ADD REASON. <br/>
* Date: 2014年7月4日 上午9:43:37 <br/>
* @author zhangzhaoyu
* @version
* @since JDK 1.7
* @see
*/
@Service
@Transactional(readOnly = true)
public class ParkingSpaceService implements IParkingSpaceService {
@Autowired
private IParkingSpaceDao parkingSpaceDao;
@Override
@Transactional
public void saveParkingSpace(ParkingSpace parkingSpace) {
this.parkingSpaceDao.saveParkingSpace(parkingSpace);
}
@Override
public ParkingSpace findById(int id) {
return this.parkingSpaceDao.findById(id);
}
}
| apache-2.0 |
meetdestiny/geronimo-trader | modules/interop/src/java/org/apache/geronimo/interop/generator/JLocalVariable.java | 822 | /**
*
* Copyright 2004-2005 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.interop.generator;
public class JLocalVariable extends JVariable {
protected JLocalVariable(Class type, String name) {
super(type, name);
}
}
| apache-2.0 |
OuZhencong/log4j2 | log4j-core/src/test/java/org/apache/logging/log4j/core/LoggerUpdateTest.java | 3983 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j.core;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.util.List;
import java.util.Map;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.core.config.Configuration;
import org.apache.logging.log4j.core.config.ConfigurationFactory;
import org.apache.logging.log4j.core.config.LoggerConfig;
import org.apache.logging.log4j.status.StatusLogger;
import org.apache.logging.log4j.test.appender.ListAppender;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
/**
*
*/
public class LoggerUpdateTest {
private static final String CONFIG = "log4j-test2.xml";
private static Configuration config;
private static ListAppender app;
private static ListAppender host;
private static ListAppender noThrown;
private static LoggerContext ctx;
@BeforeClass
public static void setupClass() {
System.setProperty(ConfigurationFactory.CONFIGURATION_FILE_PROPERTY, CONFIG);
ctx = (LoggerContext) LogManager.getContext(false);
}
@AfterClass
public static void cleanupClass() {
System.clearProperty(ConfigurationFactory.CONFIGURATION_FILE_PROPERTY);
ctx.reconfigure();
StatusLogger.getLogger().reset();
}
@Before
public void before() {
config = ctx.getConfiguration();
for (final Map.Entry<String, Appender> entry : config.getAppenders().entrySet()) {
if (entry.getKey().equals("List")) {
app = (ListAppender) entry.getValue();
} else if (entry.getKey().equals("HostTest")) {
host = (ListAppender) entry.getValue();
} else if (entry.getKey().equals("NoThrowable")) {
noThrown = (ListAppender) entry.getValue();
}
}
assertNotNull("No Appender", app);
assertNotNull("No Host Appender", host);
app.clear();
host.clear();
}
org.apache.logging.log4j.Logger logger = LogManager.getLogger("com.apache.test");
@Test
public void resetLevel() {
logger.entry();
List<LogEvent> events = app.getEvents();
assertTrue("Incorrect number of events. Expected 1, actual " + events.size(), events.size() == 1);
app.clear();
final LoggerContext ctx = (LoggerContext) LogManager.getContext(false);
final Configuration config = ctx.getConfiguration();
final LoggerConfig loggerConfig = config.getLoggerConfig(LogManager.ROOT_LOGGER_NAME);
/* You could also specify the actual logger name as below and it will return the LoggerConfig used by the Logger.
LoggerConfig loggerConfig = getLoggerConfig("com.apache.test");
*/
loggerConfig.setLevel(Level.DEBUG);
ctx.updateLoggers(); // This causes all Loggers to refetch information from their LoggerConfig.
logger.entry();
events = app.getEvents();
assertTrue("Incorrect number of events. Expected 0, actual " + events.size(), events.size() == 0);
app.clear();
}
}
| apache-2.0 |
SergeyMakarenko/fbthrift | thrift/compiler/test/fixtures/optionals/gen-android/ModuleLogger.java | 7535 | /**
* Autogenerated by Thrift
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.facebook.thrift.lite.*;
import com.facebook.thrift.lite.protocol.*;
import com.facebook.thrift.lite.annotations.*;
public class ModuleLogger {
public final Module.EventType mEventType;
private final Map<ThriftProperty<?>, Object> mMap = new HashMap<ThriftProperty<?>, Object>();
public ModuleLogger(Module.EventType type) {
mEventType = type;
}
public <T> ModuleLogger addProperty(ThriftProperty<T> property, T value) {
mMap.put(property, value);
return this;
}
public static <T> void writeFieldBegin(TBinaryProtocol oprot, ThriftProperty<T> field) throws IOException {
TField tField = new TField(field.key, field.type, field.id);
oprot.writeFieldBegin(tField);
}
public void write(TBinaryProtocol oprot) throws IOException {
switch (mEventType) {
case Color: {
oprot.writeStructBegin(new TStruct("Color"));
if (mMap.containsKey(Module.Color_red) && mMap.get(Module.Color_red) != null) {
writeFieldBegin(oprot, Module.Color_red);
oprot.writeDouble((double) mMap.get(Module.Color_red));
oprot.writeFieldEnd();
}
if (mMap.containsKey(Module.Color_green) && mMap.get(Module.Color_green) != null) {
writeFieldBegin(oprot, Module.Color_green);
oprot.writeDouble((double) mMap.get(Module.Color_green));
oprot.writeFieldEnd();
}
if (mMap.containsKey(Module.Color_blue) && mMap.get(Module.Color_blue) != null) {
writeFieldBegin(oprot, Module.Color_blue);
oprot.writeDouble((double) mMap.get(Module.Color_blue));
oprot.writeFieldEnd();
}
if (mMap.containsKey(Module.Color_alpha) && mMap.get(Module.Color_alpha) != null) {
writeFieldBegin(oprot, Module.Color_alpha);
oprot.writeDouble((double) mMap.get(Module.Color_alpha));
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
break;
}
case Vehicle: {
oprot.writeStructBegin(new TStruct("Vehicle"));
if (mMap.containsKey(Module.Vehicle_color) && mMap.get(Module.Vehicle_color) != null) {
writeFieldBegin(oprot, Module.Vehicle_color);
((ModuleLogger) mMap.get(Module.Vehicle_color)).write(oprot);
oprot.writeFieldEnd();
}
if (mMap.containsKey(Module.Vehicle_licensePlate) && mMap.get(Module.Vehicle_licensePlate) != null) {
writeFieldBegin(oprot, Module.Vehicle_licensePlate);
oprot.writeString((String) mMap.get(Module.Vehicle_licensePlate));
oprot.writeFieldEnd();
}
if (mMap.containsKey(Module.Vehicle_description) && mMap.get(Module.Vehicle_description) != null) {
writeFieldBegin(oprot, Module.Vehicle_description);
oprot.writeString((String) mMap.get(Module.Vehicle_description));
oprot.writeFieldEnd();
}
if (mMap.containsKey(Module.Vehicle_name) && mMap.get(Module.Vehicle_name) != null) {
writeFieldBegin(oprot, Module.Vehicle_name);
oprot.writeString((String) mMap.get(Module.Vehicle_name));
oprot.writeFieldEnd();
}
if (mMap.containsKey(Module.Vehicle_hasAC) && mMap.get(Module.Vehicle_hasAC) != null) {
writeFieldBegin(oprot, Module.Vehicle_hasAC);
oprot.writeBool((boolean) mMap.get(Module.Vehicle_hasAC));
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
break;
}
case Person: {
oprot.writeStructBegin(new TStruct("Person"));
if (mMap.containsKey(Module.Person_id) && mMap.get(Module.Person_id) != null) {
writeFieldBegin(oprot, Module.Person_id);
oprot.writeI64((long) mMap.get(Module.Person_id));
oprot.writeFieldEnd();
}
if (mMap.containsKey(Module.Person_name) && mMap.get(Module.Person_name) != null) {
writeFieldBegin(oprot, Module.Person_name);
oprot.writeString((String) mMap.get(Module.Person_name));
oprot.writeFieldEnd();
}
if (mMap.containsKey(Module.Person_age) && mMap.get(Module.Person_age) != null) {
writeFieldBegin(oprot, Module.Person_age);
oprot.writeI16((short) mMap.get(Module.Person_age));
oprot.writeFieldEnd();
}
if (mMap.containsKey(Module.Person_address) && mMap.get(Module.Person_address) != null) {
writeFieldBegin(oprot, Module.Person_address);
oprot.writeString((String) mMap.get(Module.Person_address));
oprot.writeFieldEnd();
}
if (mMap.containsKey(Module.Person_favoriteColor) && mMap.get(Module.Person_favoriteColor) != null) {
writeFieldBegin(oprot, Module.Person_favoriteColor);
((ModuleLogger) mMap.get(Module.Person_favoriteColor)).write(oprot);
oprot.writeFieldEnd();
}
if (mMap.containsKey(Module.Person_friends) && mMap.get(Module.Person_friends) != null) {
writeFieldBegin(oprot, Module.Person_friends);
Set<Long> var0 = (Set<Long>) mMap.get(Module.Person_friends);
oprot.writeSetBegin(new TSet(TType.I64, var0.size()));
for(long iter0 : var0) {
oprot.writeI64(iter0);
}
oprot.writeSetEnd();
oprot.writeFieldEnd();
}
if (mMap.containsKey(Module.Person_bestFriend) && mMap.get(Module.Person_bestFriend) != null) {
writeFieldBegin(oprot, Module.Person_bestFriend);
oprot.writeI64((long) mMap.get(Module.Person_bestFriend));
oprot.writeFieldEnd();
}
if (mMap.containsKey(Module.Person_petNames) && mMap.get(Module.Person_petNames) != null) {
writeFieldBegin(oprot, Module.Person_petNames);
Map<ModuleEnum,String> var0 = (Map<ModuleEnum,String>) mMap.get(Module.Person_petNames);
oprot.writeMapBegin(new TMap(TType.I32, TType.STRING, var0.size()));
for (Map.Entry<ModuleEnum, String> iter0 : var0.entrySet()) {
oprot.writeI32(iter0.getKey().getValue());
oprot.writeString(iter0.getValue());
}
oprot.writeMapEnd();
oprot.writeFieldEnd();
}
if (mMap.containsKey(Module.Person_afraidOfAnimal) && mMap.get(Module.Person_afraidOfAnimal) != null) {
writeFieldBegin(oprot, Module.Person_afraidOfAnimal);
oprot.writeI32(((ModuleEnum) mMap.get(Module.Person_afraidOfAnimal)).getValue());
oprot.writeFieldEnd();
}
if (mMap.containsKey(Module.Person_vehicles) && mMap.get(Module.Person_vehicles) != null) {
writeFieldBegin(oprot, Module.Person_vehicles);
List<ModuleLogger> var0 = (List<ModuleLogger>) mMap.get(Module.Person_vehicles);
oprot.writeListBegin(new TList(TType.STRUCT, var0.size()));
for (ModuleLogger iter0 : var0) {
iter0.write(oprot);
}
oprot.writeListEnd();
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
break;
}
}
}
}
| apache-2.0 |
shutkou/ios-driver | server/src/main/java/org/uiautomation/ios/server/command/uiautomation/IsEnabledNHandler.java | 1684 | /*
* Copyright 2012-2013 eBay Software Foundation and ios-driver committers
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.uiautomation.ios.server.command.uiautomation;
import org.json.JSONException;
import org.json.JSONObject;
import org.uiautomation.ios.communication.WebDriverLikeRequest;
import org.uiautomation.ios.server.IOSServerManager;
import org.uiautomation.ios.server.command.UIAScriptHandler;
import org.uiautomation.ios.server.utils.JSTemplate;
public class IsEnabledNHandler extends UIAScriptHandler {
private static final JSTemplate template = new JSTemplate(
"var element = UIAutomation.cache.get(%:reference$s, false);" +
"var result = element.isEnabled();" +
"UIAutomation.createJSONResponse('%:sessionId$s',0,result)",
"sessionId", "reference");
public IsEnabledNHandler(IOSServerManager driver, WebDriverLikeRequest request) {
super(driver, request);
String js = template.generate(
request.getSession(),
request.getVariableValue(":reference"));
setJS(js);
}
@Override
public JSONObject configurationDescription() throws JSONException {
return super.noConfigDefined();
}
} | apache-2.0 |
gfyoung/elasticsearch | server/src/test/java/org/elasticsearch/indices/settings/InternalOrPrivateSettingsPlugin.java | 7970 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.indices.settings;
import org.elasticsearch.action.Action;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.MasterNodeRequest;
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateUpdateTask;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.plugins.ActionPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
public class InternalOrPrivateSettingsPlugin extends Plugin implements ActionPlugin {
static final Setting<String> INDEX_INTERNAL_SETTING =
Setting.simpleString("index.internal", Setting.Property.IndexScope, Setting.Property.InternalIndex);
static final Setting<String> INDEX_PRIVATE_SETTING =
Setting.simpleString("index.private", Setting.Property.IndexScope, Setting.Property.PrivateIndex);
@Override
public List<Setting<?>> getSettings() {
return Arrays.asList(INDEX_INTERNAL_SETTING, INDEX_PRIVATE_SETTING);
}
public static class UpdateInternalOrPrivateAction extends Action<UpdateInternalOrPrivateAction.Response> {
public static final UpdateInternalOrPrivateAction INSTANCE = new UpdateInternalOrPrivateAction();
private static final String NAME = "indices:admin/settings/update-internal-or-private-index";
public UpdateInternalOrPrivateAction() {
super(NAME);
}
public static class Request extends MasterNodeRequest<Request> {
private String index;
private String key;
private String value;
Request() {
}
public Request(final String index, final String key, final String value) {
this.index = index;
this.key = key;
this.value = value;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public void readFrom(final StreamInput in) throws IOException {
super.readFrom(in);
index = in.readString();
key = in.readString();
value = in.readString();
}
@Override
public void writeTo(final StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(index);
out.writeString(key);
out.writeString(value);
}
}
static class Response extends ActionResponse {
}
@Override
public UpdateInternalOrPrivateAction.Response newResponse() {
return new UpdateInternalOrPrivateAction.Response();
}
}
public static class TransportUpdateInternalOrPrivateAction
extends TransportMasterNodeAction<UpdateInternalOrPrivateAction.Request, UpdateInternalOrPrivateAction.Response> {
@Inject
public TransportUpdateInternalOrPrivateAction(
final Settings settings,
final TransportService transportService,
final ClusterService clusterService,
final ThreadPool threadPool,
final ActionFilters actionFilters,
final IndexNameExpressionResolver indexNameExpressionResolver) {
super(
settings,
UpdateInternalOrPrivateAction.NAME,
transportService,
clusterService,
threadPool,
actionFilters,
indexNameExpressionResolver,
UpdateInternalOrPrivateAction.Request::new);
}
@Override
protected String executor() {
return ThreadPool.Names.SAME;
}
@Override
protected UpdateInternalOrPrivateAction.Response newResponse() {
return new UpdateInternalOrPrivateAction.Response();
}
@Override
protected void masterOperation(
final UpdateInternalOrPrivateAction.Request request,
final ClusterState state,
final ActionListener<UpdateInternalOrPrivateAction.Response> listener) throws Exception {
clusterService.submitStateUpdateTask("update-index-internal-or-private", new ClusterStateUpdateTask() {
@Override
public ClusterState execute(final ClusterState currentState) throws Exception {
final MetaData.Builder builder = MetaData.builder(currentState.metaData());
final IndexMetaData.Builder imdBuilder = IndexMetaData.builder(currentState.metaData().index(request.index));
final Settings.Builder settingsBuilder =
Settings.builder()
.put(currentState.metaData().index(request.index).getSettings())
.put(request.key, request.value);
imdBuilder.settings(settingsBuilder);
builder.put(imdBuilder.build(), true);
return ClusterState.builder(currentState).metaData(builder).build();
}
@Override
public void clusterStateProcessed(final String source, final ClusterState oldState, final ClusterState newState) {
listener.onResponse(new UpdateInternalOrPrivateAction.Response());
}
@Override
public void onFailure(final String source, final Exception e) {
listener.onFailure(e);
}
});
}
@Override
protected ClusterBlockException checkBlock(final UpdateInternalOrPrivateAction.Request request, final ClusterState state) {
return null;
}
}
@Override
public List<ActionHandler<? extends ActionRequest, ? extends ActionResponse>> getActions() {
return Collections.singletonList(
new ActionHandler<>(UpdateInternalOrPrivateAction.INSTANCE, TransportUpdateInternalOrPrivateAction.class));
}
}
| apache-2.0 |
marcjansen/shogun2 | src/shogun2-core/src/main/java/de/terrestris/shogun2/model/module/Print.java | 2188 | /**
*
*/
package de.terrestris.shogun2.model.module;
import javax.persistence.Cacheable;
import javax.persistence.Entity;
import javax.persistence.Table;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.hibernate.annotations.Cache;
import org.hibernate.annotations.CacheConcurrencyStrategy;
/**
* A module which contains a formular to print the map with the mapfish print v3.
*
* @author Kai Volland
*/
@Entity
@Table
@Cacheable
@Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
public class Print extends Module {
/**
*
*/
private static final long serialVersionUID = 1L;
/**
* The mapfish-print url.
*/
private String url;
/**
* Explicitly adding the default constructor as this is important, e.g. for
* Hibernate: http://goo.gl/3Cr1pw
*/
public Print() {
}
/**
* @return the url
*/
public String getUrl() {
return url;
}
/**
* @param url the url to set
*/
public void setUrl(String url) {
this.url = url;
}
/**
* @see java.lang.Object#hashCode()
* <p>
* According to
* http://stackoverflow.com/questions/27581/overriding-equals
* -and-hashcode-in-java it is recommended only to use getter-methods
* when using ORM like Hibernate
*/
public int hashCode() {
// two randomly chosen prime numbers
return new HashCodeBuilder(23, 3).
appendSuper(super.hashCode()).
append(getUrl()).
toHashCode();
}
/**
* @see java.lang.Object#equals(java.lang.Object)
* <p>
* According to
* http://stackoverflow.com/questions/27581/overriding-equals
* -and-hashcode-in-java it is recommended only to use getter-methods
* when using ORM like Hibernate
*/
public boolean equals(Object obj) {
if (!(obj instanceof Print))
return false;
Print other = (Print) obj;
return new EqualsBuilder().
appendSuper(super.equals(other)).
append(getUrl(), other.getUrl()).
isEquals();
}
}
| apache-2.0 |
GovernmentCommunicationsHeadquarters/Gaffer | library/hdfs-library/src/main/java/uk/gov/gchq/gaffer/hdfs/operation/package-info.java | 730 | /*
* Copyright 2017-2020 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Classes relating to operations for interacting with data held in HDFS.
*/
package uk.gov.gchq.gaffer.hdfs.operation;
| apache-2.0 |
cppwfs/spring-cloud-task | spring-cloud-task-core/src/test/java/org/springframework/cloud/task/repository/H2TaskRepositoryIntegrationTests.java | 2121 | /*
* Copyright 2022-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.task.repository;
import java.util.UUID;
import javax.sql.DataSource;
import org.h2.engine.Mode.ModeEnum;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.EnumSource;
import org.springframework.boot.autoconfigure.ImportAutoConfiguration;
import org.springframework.boot.test.context.runner.ApplicationContextRunner;
import org.springframework.cloud.task.configuration.EnableTask;
import org.springframework.cloud.task.configuration.SimpleTaskAutoConfiguration;
import org.springframework.jdbc.datasource.SimpleDriverDataSource;
import static org.assertj.core.api.Assertions.assertThat;
/**
* @author Henning Pöttker
*/
class H2TaskRepositoryIntegrationTests {
@ParameterizedTest
@EnumSource(ModeEnum.class)
void testTaskRepository(ModeEnum mode) {
String connectionUrl = String.format("jdbc:h2:mem:%s;DB_CLOSE_DELAY=-1;MODE=%s", UUID.randomUUID(), mode);
ApplicationContextRunner applicationContextRunner = new ApplicationContextRunner()
.withUserConfiguration(TestConfiguration.class)
.withBean(DataSource.class,
() -> new SimpleDriverDataSource(new org.h2.Driver(), connectionUrl, "sa", ""));
applicationContextRunner.run((context -> {
TaskExplorer taskExplorer = context.getBean(TaskExplorer.class);
assertThat(taskExplorer.getTaskExecutionCount()).isOne();
}));
}
@EnableTask
@ImportAutoConfiguration(SimpleTaskAutoConfiguration.class)
static class TestConfiguration {
}
}
| apache-2.0 |
ZhaoKaiQiang/ExceptionDemo | ExceptionDemo/src/com/example/exceptiondemo/AppException.java | 2490 | package com.example.exceptiondemo;
import java.lang.Thread.UncaughtExceptionHandler;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.DialogInterface.OnClickListener;
import android.os.Looper;
import android.widget.Toast;
/**
*
* @ClassName: com.example.exceptiondemo.AppException
* @Description: 应用程序异常类:用于捕获异常
* @author zhaokaiqiang
* @date 2014-11-2 下午10:06:49
*
*/
public class AppException extends Exception implements UncaughtExceptionHandler {
private static final long serialVersionUID = -6262909398048670705L;
private String message;
private Thread.UncaughtExceptionHandler mDefaultHandler;
private AppException() {
super();
this.mDefaultHandler = Thread.getDefaultUncaughtExceptionHandler();
}
public AppException(String message, Exception excp) {
super(message, excp);
this.mDefaultHandler = Thread.getDefaultUncaughtExceptionHandler();
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
/**
* 获取APP异常崩溃处理对象
*
* @param context
* @return
*/
public static AppException getAppExceptionHandler() {
return new AppException();
}
@Override
public void uncaughtException(Thread thread, Throwable ex) {
if (!handleException(ex) && mDefaultHandler != null) {
mDefaultHandler.uncaughtException(thread, ex);
}
}
/**
* 自定义异常处理
*
* @param ex
* @return true:处理了该异常信息;否则返回false
*/
private boolean handleException(Throwable ex) {
if (ex == null) {
return false;
}
final Activity activity = AppManager.getAppManager().currentActivity();
if (activity == null) {
return false;
}
new Thread() {
@Override
public void run() {
Looper.prepare();
Toast.makeText(activity, "程序要崩了", Toast.LENGTH_SHORT).show();
new AlertDialog.Builder(activity).setTitle("提示")
.setCancelable(false).setMessage("亲,程序马上崩溃了...")
.setNeutralButton("没关系", new OnClickListener() {
@Override
public void onClick(DialogInterface dialog,
int which) {
AppManager.getAppManager().exitApp(activity);
}
}).create().show();
Looper.loop();
}
}.start();
return true;
}
}
| apache-2.0 |
mgsx-dev/box2d-editor | demo-core/src/net/mgsx/game/examples/td/components/Tower.java | 543 | package net.mgsx.game.examples.td.components;
import com.badlogic.ashley.core.Component;
import com.badlogic.ashley.core.ComponentMapper;
import net.mgsx.game.core.annotations.EditableComponent;
import net.mgsx.game.core.annotations.Storable;
/**
* Tower is just the basement of player weapon, can be mobile or fixed.
* @author mgsx
*
*/
@Storable("td.tower")
@EditableComponent(autoClone=true)
public class Tower implements Component
{
public final static ComponentMapper<Tower> components = ComponentMapper.getFor(Tower.class);
}
| apache-2.0 |
gustavoanatoly/hbase | hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java | 17353 | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.mapreduce;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import java.io.IOException;
import java.util.Arrays;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.RegexStringComparator;
import org.apache.hadoop.hbase.filter.RowFilter;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.JobConfigurable;
import org.apache.hadoop.mapred.MiniMRCluster;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
/**
* This tests the TableInputFormat and its recovery semantics
*
*/
@Category(LargeTests.class)
public class TestTableInputFormat {
private static final Log LOG = LogFactory.getLog(TestTableInputFormat.class);
private final static HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static MiniMRCluster mrCluster;
static final byte[] FAMILY = Bytes.toBytes("family");
private static final byte[][] columns = new byte[][] { FAMILY };
@BeforeClass
public static void beforeClass() throws Exception {
UTIL.startMiniCluster();
}
@AfterClass
public static void afterClass() throws Exception {
UTIL.shutdownMiniCluster();
}
@Before
public void before() throws IOException {
LOG.info("before");
UTIL.ensureSomeRegionServersAvailable(1);
LOG.info("before done");
}
/**
* Setup a table with two rows and values.
*
* @param tableName
* @return
* @throws IOException
*/
public static Table createTable(byte[] tableName) throws IOException {
return createTable(tableName, new byte[][] { FAMILY });
}
/**
* Setup a table with two rows and values per column family.
*
* @param tableName
* @return
* @throws IOException
*/
public static Table createTable(byte[] tableName, byte[][] families) throws IOException {
Table table = UTIL.createTable(TableName.valueOf(tableName), families);
Put p = new Put("aaa".getBytes());
for (byte[] family : families) {
p.addColumn(family, null, "value aaa".getBytes());
}
table.put(p);
p = new Put("bbb".getBytes());
for (byte[] family : families) {
p.addColumn(family, null, "value bbb".getBytes());
}
table.put(p);
return table;
}
/**
* Verify that the result and key have expected values.
*
* @param r
* @param key
* @param expectedKey
* @param expectedValue
* @return
*/
static boolean checkResult(Result r, ImmutableBytesWritable key,
byte[] expectedKey, byte[] expectedValue) {
assertEquals(0, key.compareTo(expectedKey));
Map<byte[], byte[]> vals = r.getFamilyMap(FAMILY);
byte[] value = vals.values().iterator().next();
assertTrue(Arrays.equals(value, expectedValue));
return true; // if succeed
}
/**
* Create table data and run tests on specified htable using the
* o.a.h.hbase.mapreduce API.
*
* @param table
* @throws IOException
* @throws InterruptedException
*/
static void runTestMapreduce(Table table) throws IOException,
InterruptedException {
org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl trr =
new org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl();
Scan s = new Scan();
s.setStartRow("aaa".getBytes());
s.setStopRow("zzz".getBytes());
s.addFamily(FAMILY);
trr.setScan(s);
trr.setHTable(table);
trr.initialize(null, null);
Result r = new Result();
ImmutableBytesWritable key = new ImmutableBytesWritable();
boolean more = trr.nextKeyValue();
assertTrue(more);
key = trr.getCurrentKey();
r = trr.getCurrentValue();
checkResult(r, key, "aaa".getBytes(), "value aaa".getBytes());
more = trr.nextKeyValue();
assertTrue(more);
key = trr.getCurrentKey();
r = trr.getCurrentValue();
checkResult(r, key, "bbb".getBytes(), "value bbb".getBytes());
// no more data
more = trr.nextKeyValue();
assertFalse(more);
}
/**
* Create a table that IOE's on first scanner next call
*
* @throws IOException
*/
static Table createIOEScannerTable(byte[] name, final int failCnt)
throws IOException {
// build up a mock scanner stuff to fail the first time
Answer<ResultScanner> a = new Answer<ResultScanner>() {
int cnt = 0;
@Override
public ResultScanner answer(InvocationOnMock invocation) throws Throwable {
// first invocation return the busted mock scanner
if (cnt++ < failCnt) {
// create mock ResultScanner that always fails.
Scan scan = mock(Scan.class);
doReturn("bogus".getBytes()).when(scan).getStartRow(); // avoid npe
ResultScanner scanner = mock(ResultScanner.class);
// simulate TimeoutException / IOException
doThrow(new IOException("Injected exception")).when(scanner).next();
return scanner;
}
// otherwise return the real scanner.
return (ResultScanner) invocation.callRealMethod();
}
};
Table htable = spy(createTable(name));
doAnswer(a).when(htable).getScanner((Scan) anyObject());
return htable;
}
/**
* Create a table that throws a NotServingRegionException on first scanner
* next call
*
* @throws IOException
*/
static Table createDNRIOEScannerTable(byte[] name, final int failCnt)
throws IOException {
// build up a mock scanner stuff to fail the first time
Answer<ResultScanner> a = new Answer<ResultScanner>() {
int cnt = 0;
@Override
public ResultScanner answer(InvocationOnMock invocation) throws Throwable {
// first invocation return the busted mock scanner
if (cnt++ < failCnt) {
// create mock ResultScanner that always fails.
Scan scan = mock(Scan.class);
doReturn("bogus".getBytes()).when(scan).getStartRow(); // avoid npe
ResultScanner scanner = mock(ResultScanner.class);
invocation.callRealMethod(); // simulate NotServingRegionException
doThrow(
new NotServingRegionException("Injected simulated TimeoutException"))
.when(scanner).next();
return scanner;
}
// otherwise return the real scanner.
return (ResultScanner) invocation.callRealMethod();
}
};
Table htable = spy(createTable(name));
doAnswer(a).when(htable).getScanner((Scan) anyObject());
return htable;
}
/**
* Run test assuming no errors using newer mapreduce api
*
* @throws IOException
* @throws InterruptedException
*/
@Test
public void testTableRecordReaderMapreduce() throws IOException,
InterruptedException {
Table table = createTable("table1-mr".getBytes());
runTestMapreduce(table);
}
/**
* Run test assuming Scanner IOException failure using newer mapreduce api
*
* @throws IOException
* @throws InterruptedException
*/
@Test
public void testTableRecordReaderScannerFailMapreduce() throws IOException,
InterruptedException {
Table htable = createIOEScannerTable("table2-mr".getBytes(), 1);
runTestMapreduce(htable);
}
/**
* Run test assuming Scanner IOException failure using newer mapreduce api
*
* @throws IOException
* @throws InterruptedException
*/
@Test(expected = IOException.class)
public void testTableRecordReaderScannerFailMapreduceTwice() throws IOException,
InterruptedException {
Table htable = createIOEScannerTable("table3-mr".getBytes(), 2);
runTestMapreduce(htable);
}
/**
* Run test assuming NotServingRegionException using newer mapreduce api
*
* @throws InterruptedException
* @throws org.apache.hadoop.hbase.DoNotRetryIOException
*/
@Test
public void testTableRecordReaderScannerTimeoutMapreduce()
throws IOException, InterruptedException {
Table htable = createDNRIOEScannerTable("table4-mr".getBytes(), 1);
runTestMapreduce(htable);
}
/**
* Run test assuming NotServingRegionException using newer mapreduce api
*
* @throws InterruptedException
* @throws org.apache.hadoop.hbase.NotServingRegionException
*/
@Test(expected = org.apache.hadoop.hbase.NotServingRegionException.class)
public void testTableRecordReaderScannerTimeoutMapreduceTwice()
throws IOException, InterruptedException {
Table htable = createDNRIOEScannerTable("table5-mr".getBytes(), 2);
runTestMapreduce(htable);
}
/**
* Verify the example we present in javadocs on TableInputFormatBase
*/
@Test
public void testExtensionOfTableInputFormatBase()
throws IOException, InterruptedException, ClassNotFoundException {
LOG.info("testing use of an InputFormat taht extends InputFormatBase");
final Table htable = createTable(Bytes.toBytes("exampleTable"),
new byte[][] { Bytes.toBytes("columnA"), Bytes.toBytes("columnB") });
testInputFormat(ExampleTIF.class);
}
@Test
public void testJobConfigurableExtensionOfTableInputFormatBase()
throws IOException, InterruptedException, ClassNotFoundException {
LOG.info("testing use of an InputFormat taht extends InputFormatBase, " +
"using JobConfigurable.");
final Table htable = createTable(Bytes.toBytes("exampleJobConfigurableTable"),
new byte[][] { Bytes.toBytes("columnA"), Bytes.toBytes("columnB") });
testInputFormat(ExampleJobConfigurableTIF.class);
}
@Test
public void testDeprecatedExtensionOfTableInputFormatBase()
throws IOException, InterruptedException, ClassNotFoundException {
LOG.info("testing use of an InputFormat taht extends InputFormatBase, " +
"using the approach documented in 0.98.");
final Table htable = createTable(Bytes.toBytes("exampleDeprecatedTable"),
new byte[][] { Bytes.toBytes("columnA"), Bytes.toBytes("columnB") });
testInputFormat(ExampleDeprecatedTIF.class);
}
void testInputFormat(Class<? extends InputFormat> clazz)
throws IOException, InterruptedException, ClassNotFoundException {
final Job job = MapreduceTestingShim.createJob(UTIL.getConfiguration());
job.setInputFormatClass(clazz);
job.setOutputFormatClass(NullOutputFormat.class);
job.setMapperClass(ExampleVerifier.class);
job.setNumReduceTasks(0);
LOG.debug("submitting job.");
assertTrue("job failed!", job.waitForCompletion(true));
assertEquals("Saw the wrong number of instances of the filtered-for row.", 2, job.getCounters()
.findCounter(TestTableInputFormat.class.getName() + ":row", "aaa").getValue());
assertEquals("Saw any instances of the filtered out row.", 0, job.getCounters()
.findCounter(TestTableInputFormat.class.getName() + ":row", "bbb").getValue());
assertEquals("Saw the wrong number of instances of columnA.", 1, job.getCounters()
.findCounter(TestTableInputFormat.class.getName() + ":family", "columnA").getValue());
assertEquals("Saw the wrong number of instances of columnB.", 1, job.getCounters()
.findCounter(TestTableInputFormat.class.getName() + ":family", "columnB").getValue());
assertEquals("Saw the wrong count of values for the filtered-for row.", 2, job.getCounters()
.findCounter(TestTableInputFormat.class.getName() + ":value", "value aaa").getValue());
assertEquals("Saw the wrong count of values for the filtered-out row.", 0, job.getCounters()
.findCounter(TestTableInputFormat.class.getName() + ":value", "value bbb").getValue());
}
public static class ExampleVerifier extends TableMapper<NullWritable, NullWritable> {
@Override
public void map(ImmutableBytesWritable key, Result value, Context context)
throws IOException {
for (Cell cell : value.listCells()) {
context.getCounter(TestTableInputFormat.class.getName() + ":row",
Bytes.toString(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength()))
.increment(1l);
context.getCounter(TestTableInputFormat.class.getName() + ":family",
Bytes.toString(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength()))
.increment(1l);
context.getCounter(TestTableInputFormat.class.getName() + ":value",
Bytes.toString(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()))
.increment(1l);
}
}
}
public static class ExampleDeprecatedTIF extends TableInputFormatBase implements JobConfigurable {
@Override
public void configure(JobConf job) {
try {
Connection connection = ConnectionFactory.createConnection(job);
Table exampleTable = connection.getTable(TableName.valueOf(("exampleDeprecatedTable")));
// mandatory
initializeTable(connection, exampleTable.getName());
byte[][] inputColumns = new byte [][] { Bytes.toBytes("columnA"),
Bytes.toBytes("columnB") };
// optional
Scan scan = new Scan();
for (byte[] family : inputColumns) {
scan.addFamily(family);
}
Filter exampleFilter = new RowFilter(CompareOp.EQUAL, new RegexStringComparator("aa.*"));
scan.setFilter(exampleFilter);
setScan(scan);
} catch (IOException exception) {
throw new RuntimeException("Failed to configure for job.", exception);
}
}
}
public static class ExampleJobConfigurableTIF extends TableInputFormatBase
implements JobConfigurable {
@Override
public void configure(JobConf job) {
try {
Connection connection = ConnectionFactory.createConnection(HBaseConfiguration.create(job));
TableName tableName = TableName.valueOf("exampleJobConfigurableTable");
// mandatory
initializeTable(connection, tableName);
byte[][] inputColumns = new byte [][] { Bytes.toBytes("columnA"),
Bytes.toBytes("columnB") };
//optional
Scan scan = new Scan();
for (byte[] family : inputColumns) {
scan.addFamily(family);
}
Filter exampleFilter = new RowFilter(CompareOp.EQUAL, new RegexStringComparator("aa.*"));
scan.setFilter(exampleFilter);
setScan(scan);
} catch (IOException exception) {
throw new RuntimeException("Failed to initialize.", exception);
}
}
}
public static class ExampleTIF extends TableInputFormatBase {
@Override
protected void initialize(JobContext job) throws IOException {
Connection connection = ConnectionFactory.createConnection(HBaseConfiguration.create(
job.getConfiguration()));
TableName tableName = TableName.valueOf("exampleTable");
// mandatory
initializeTable(connection, tableName);
byte[][] inputColumns = new byte [][] { Bytes.toBytes("columnA"),
Bytes.toBytes("columnB") };
//optional
Scan scan = new Scan();
for (byte[] family : inputColumns) {
scan.addFamily(family);
}
Filter exampleFilter = new RowFilter(CompareOp.EQUAL, new RegexStringComparator("aa.*"));
scan.setFilter(exampleFilter);
setScan(scan);
}
}
}
| apache-2.0 |
nikitamarchenko/open-kilda | services/src/messaging/src/main/java/org/openkilda/messaging/error/ErrorMessage.java | 3135 | /* Copyright 2017 Telstra Open Source
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openkilda.messaging.error;
import static com.google.common.base.MoreObjects.toStringHelper;
import static org.openkilda.messaging.Utils.CORRELATION_ID;
import static org.openkilda.messaging.Utils.DESTINATION;
import static org.openkilda.messaging.Utils.PAYLOAD;
import static org.openkilda.messaging.Utils.TIMESTAMP;
import org.openkilda.messaging.Destination;
import org.openkilda.messaging.Message;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
/**
* Class represents error message.
*/
@JsonSerialize
@JsonInclude(JsonInclude.Include.NON_NULL)
@JsonPropertyOrder(value = {
DESTINATION,
PAYLOAD,
TIMESTAMP,
CORRELATION_ID})
public class ErrorMessage extends Message {
/**
* Serialization version number constant.
*/
private static final long serialVersionUID = 1L;
/**
* Data of the error message.
*/
@JsonProperty(PAYLOAD)
private ErrorData data;
/**
* Instance constructor.
*
* @param data error message payload
* @param timestamp timestamp value
* @param correlationId message correlation id
* @param destination message destination
*/
@JsonCreator
public ErrorMessage(@JsonProperty(PAYLOAD) final ErrorData data,
@JsonProperty(TIMESTAMP) final long timestamp,
@JsonProperty(CORRELATION_ID) final String correlationId,
@JsonProperty(DESTINATION) final Destination destination) {
super(timestamp, correlationId, destination);
setData(data);
}
/**
* Returns payload of the error message.
*
* @return error message payload
*/
public ErrorData getData() {
return data;
}
/**
* Sets payload of the error message.
*
* @param data error message payload
*/
public void setData(final ErrorData data) {
this.data = data;
}
/**
* {@inheritDoc}
*/
@Override
public String toString() {
return toStringHelper(this)
.add(TIMESTAMP, timestamp)
.add(CORRELATION_ID, correlationId)
.add(DESTINATION, destination)
.add(PAYLOAD, data)
.toString();
}
}
| apache-2.0 |
Minoli/carbon-apimgt | components/apimgt/org.wso2.carbon.apimgt.core/src/main/java/org/wso2/carbon/apimgt/core/configuration/models/FileEncryptionConfigurations.java | 1718 | /*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.apimgt.core.configuration.models;
import org.wso2.carbon.config.annotation.Configuration;
import org.wso2.carbon.config.annotation.Element;
import java.util.ArrayList;
import java.util.List;
/**
* Class to hold File Encryption configuration parameters
*/
@Configuration(description = "File Encryption Configurations")
public class FileEncryptionConfigurations {
@Element(description = "enable file encryption")
private boolean enabled = false;
@Element(description = "files to encrypt")
private List<String> filesToEncrypt = new ArrayList<>();
public FileEncryptionConfigurations() {
this.filesToEncrypt.add("NameOfAFile");
}
public boolean isEnabled() {
return enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
public List<String> getFilesToEncrypt() {
return filesToEncrypt;
}
public void setFilesToEncrypt(List<String> filesToEncrypt) {
this.filesToEncrypt = filesToEncrypt;
}
}
| apache-2.0 |
IllusionRom-deprecated/android_platform_tools_idea | plugins/hg4idea/src/org/zmlx/hg4idea/repo/HgRepository.java | 1480 | /*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.zmlx.hg4idea.repo;
import com.intellij.dvcs.repo.Repository;
import com.intellij.openapi.vfs.VirtualFile;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.zmlx.hg4idea.HgNameWithHashInfo;
import java.util.Collection;
public interface HgRepository extends Repository {
@NotNull String DEFAULT_BRANCH = "default";
@NotNull
VirtualFile getHgDir();
/**
* Returns the current branch of this Hg repository.
*/
@NotNull
String getCurrentBranch();
@NotNull
Collection<HgNameWithHashInfo> getBranches();
@NotNull
Collection<HgNameWithHashInfo> getBookmarks();
@NotNull
Collection<HgNameWithHashInfo> getTags();
@NotNull
Collection<HgNameWithHashInfo> getLocalTags();
@Nullable
String getCurrentBookmark();
@NotNull
HgConfig getRepositoryConfig();
void updateConfig();
}
| apache-2.0 |
kamransaleem/waltz | waltz-model/src/main/java/com/khartec/waltz/model/user/UserUtilities.java | 957 | /*
* Waltz - Enterprise Architecture
* Copyright (C) 2016, 2017, 2018, 2019 Waltz open source project
* See README.md for more information
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific
*
*/
package com.khartec.waltz.model.user;
/**
* Created by dwatkins on 03/04/2016.
*/
public class UserUtilities {
public static final String ANONYMOUS_USERNAME = "anonymous";
public static final User ANONYMOUS_USER = ImmutableUser.builder()
.userName(ANONYMOUS_USERNAME)
.build();
}
| apache-2.0 |
cbeams-archive/spring-framework-2.5.x | test/org/springframework/transaction/TestTransactionManager.java | 2691 | /*
* Copyright 2002-2005 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.transaction;
import org.springframework.transaction.support.AbstractPlatformTransactionManager;
import org.springframework.transaction.support.DefaultTransactionStatus;
/**
* @author Juergen Hoeller
* @since 29.04.2003
*/
class TestTransactionManager extends AbstractPlatformTransactionManager {
private static final Object TRANSACTION = "transaction";
private final boolean existingTransaction;
private final boolean canCreateTransaction;
protected boolean begin = false;
protected boolean commit = false;
protected boolean rollback = false;
protected boolean rollbackOnly = false;
protected TestTransactionManager(boolean existingTransaction, boolean canCreateTransaction) {
this.existingTransaction = existingTransaction;
this.canCreateTransaction = canCreateTransaction;
setTransactionSynchronization(SYNCHRONIZATION_NEVER);
}
protected Object doGetTransaction() {
return TRANSACTION;
}
protected boolean isExistingTransaction(Object transaction) {
return existingTransaction;
}
protected void doBegin(Object transaction, TransactionDefinition definition) {
if (!TRANSACTION.equals(transaction)) {
throw new IllegalArgumentException("Not the same transaction object");
}
if (!this.canCreateTransaction) {
throw new CannotCreateTransactionException("Cannot create transaction");
}
this.begin = true;
}
protected void doCommit(DefaultTransactionStatus status) {
if (!TRANSACTION.equals(status.getTransaction())) {
throw new IllegalArgumentException("Not the same transaction object");
}
this.commit = true;
}
protected void doRollback(DefaultTransactionStatus status) {
if (!TRANSACTION.equals(status.getTransaction())) {
throw new IllegalArgumentException("Not the same transaction object");
}
this.rollback = true;
}
protected void doSetRollbackOnly(DefaultTransactionStatus status) {
if (!TRANSACTION.equals(status.getTransaction())) {
throw new IllegalArgumentException("Not the same transaction object");
}
this.rollbackOnly = true;
}
}
| apache-2.0 |
smulikHakipod/zb4osgi | zb4o-cc2480-datalink/src/main/java/com/itaca/ztool/api/zdo/ZDO_BIND_REQ_SRSP.java | 1985 | /*
Copyright 2008-2013 ITACA-TSB, http://www.tsb.upv.es/
Instituto Tecnologico de Aplicaciones de Comunicacion
Avanzadas - Grupo Tecnologias para la Salud y el
Bienestar (TSB)
See the NOTICE file distributed with this work for additional
information regarding copyright ownership
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.itaca.ztool.api.zdo;
import com.itaca.ztool.api.ZToolCMD;
import com.itaca.ztool.api.ZToolPacket;
import com.itaca.ztool.util.DoubleByte;
/**
*
* @author <a href="mailto:alfiva@aaa.upv.es">Alvaro Fides Valero</a>
* @version $LastChangedRevision$ ($LastChangedDate$)
*/
public class ZDO_BIND_REQ_SRSP extends ZToolPacket /*implements IREPONSE,IZDO*/ {
/// <name>TI.ZPI1.ZDO_BIND_REQ_SRSP.Status</name>
/// <summary>Status</summary>
public int Status;
/// <name>TI.ZPI1.ZDO_BIND_REQ_SRSP</name>
/// <summary>Constructor</summary>
public ZDO_BIND_REQ_SRSP() {
}
public ZDO_BIND_REQ_SRSP(int[] framedata) {
this.Status = framedata[0];
super.buildPacket(new DoubleByte(ZToolCMD.ZDO_BIND_REQ_SRSP), framedata);
}
/// <name>TI.ZPI1.ZDO_BIND_REQ_SRSP.AF_STATUS</name>
/// <summary>Status code for AF responses</summary>
public class AF_STATUS {
public static final int FAILED = 1;
public static final int INVALID_PARAMETER = 2;
public static final int MEM_FAIL = 0x10;
public static final int SUCCESS = 0;
}
}
| apache-2.0 |
killbill/killbill | util/src/test/java/org/killbill/billing/util/cache/TestCache.java | 6383 | /*
* Copyright 2010-2012 Ning, Inc.
*
* Ning licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.killbill.billing.util.cache;
import java.util.UUID;
import org.killbill.billing.ObjectType;
import org.killbill.billing.util.UtilTestSuiteWithEmbeddedDB;
import org.killbill.billing.util.cache.Cachable.CacheType;
import org.killbill.billing.util.dao.TableName;
import org.killbill.billing.util.entity.dao.EntitySqlDaoTransactionWrapper;
import org.killbill.billing.util.entity.dao.EntitySqlDaoTransactionalJdbiWrapper;
import org.killbill.billing.util.entity.dao.EntitySqlDaoWrapperFactory;
import org.killbill.billing.util.tag.dao.TagModelDao;
import org.killbill.billing.util.tag.dao.TagSqlDao;
import org.testng.Assert;
import org.testng.annotations.Test;
public class TestCache extends UtilTestSuiteWithEmbeddedDB {
private EntitySqlDaoTransactionalJdbiWrapper transactionalSqlDao;
private int getCacheSize(CacheType cacheType) {
final CacheController<Object, Object> cache = controlCacheDispatcher.getCacheController(cacheType);
return cache != null ? cache.size() : 0;
}
private Long retrieveRecordIdFromCache(final UUID tagId) {
final CacheController<Object, Object> cache = controlCacheDispatcher.getCacheController(CacheType.RECORD_ID);
Object result = null;
if (cache != null) {
result = cache.get(tagId.toString(), new CacheLoaderArgument(ObjectType.TAG));
}
return (Long) result;
}
@Test(groups = "slow")
public void testCacheRecordId() throws Exception {
this.transactionalSqlDao = new EntitySqlDaoTransactionalJdbiWrapper(dbi, roDbi, clock, controlCacheDispatcher, nonEntityDao, internalCallContextFactory);
final TagModelDao tag = new TagModelDao(clock.getUTCNow(), UUID.randomUUID(), UUID.randomUUID(), ObjectType.TAG);
// Verify we start with nothing in the cache
Assert.assertEquals(getCacheSize(CacheType.RECORD_ID), 0);
insertTag(tag);
// Verify we still have nothing after insert in the cache
Assert.assertEquals(getCacheSize(CacheType.RECORD_ID), 0);
final Long tagRecordId = tagDao.getRecordId(tag);
// Verify we now have something in the cache
Assert.assertEquals(getCacheSize(CacheType.RECORD_ID), 1);
final Long recordIdFromCache = retrieveRecordIdFromCache(tag.getId());
Assert.assertNotNull(recordIdFromCache);
Assert.assertEquals(recordIdFromCache, tagRecordId);
// We cannot assume the number to be 1 here as the auto_increment implementation
// depends on the database.
// See also http://h2database.com/html/grammar.html#create_sequence
Assert.assertTrue(recordIdFromCache > 0);
Assert.assertEquals(getCacheSize(CacheType.RECORD_ID), 1);
}
@Test(groups = "slow")
public void testAllCachesAfterGetById() throws Exception {
this.transactionalSqlDao = new EntitySqlDaoTransactionalJdbiWrapper(dbi, roDbi, clock, controlCacheDispatcher, nonEntityDao, internalCallContextFactory);
final TagModelDao tag = new TagModelDao(clock.getUTCNow(), UUID.randomUUID(), UUID.randomUUID(), ObjectType.TAG);
insertTag(tag);
// Verify we start with nothing in the cache
Assert.assertEquals(getCacheSize(CacheType.RECORD_ID), 0);
Assert.assertEquals(getCacheSize(CacheType.ACCOUNT_RECORD_ID), 0);
Assert.assertEquals(getCacheSize(CacheType.TENANT_RECORD_ID), 0);
Assert.assertEquals(getCacheSize(CacheType.OBJECT_ID), 0);
final TagModelDao result = getById(tag.getId());
Assert.assertEquals(getCacheSize(CacheType.RECORD_ID), 1);
Assert.assertEquals(getCacheSize(CacheType.ACCOUNT_RECORD_ID), 1);
Assert.assertEquals(getCacheSize(CacheType.TENANT_RECORD_ID), 1);
Assert.assertEquals(getCacheSize(CacheType.OBJECT_ID), 1);
final Long recordId = (Long) controlCacheDispatcher.getCacheController(CacheType.RECORD_ID).get(tag.getId().toString(), new CacheLoaderArgument(ObjectType.TAG));
Assert.assertEquals(recordId, result.getRecordId());
final Long tenantRecordId = (Long) controlCacheDispatcher.getCacheController(CacheType.TENANT_RECORD_ID).get(tag.getId().toString(), new CacheLoaderArgument(ObjectType.TAG));
Assert.assertEquals(tenantRecordId, result.getTenantRecordId());
final UUID objectId = (UUID) controlCacheDispatcher.getCacheController(CacheType.OBJECT_ID).get(TableName.TAG + CacheControllerDispatcher.CACHE_KEY_SEPARATOR + recordId, new CacheLoaderArgument(ObjectType.TAG));
Assert.assertEquals(objectId, result.getId());
final Long accountRecordId = (Long) controlCacheDispatcher.getCacheController(CacheType.ACCOUNT_RECORD_ID).get(tag.getId().toString(), new CacheLoaderArgument(ObjectType.TAG));
Assert.assertEquals(accountRecordId, result.getAccountRecordId());
}
private void insertTag(final TagModelDao modelDao) {
transactionalSqlDao.execute(false, new EntitySqlDaoTransactionWrapper<Void>() {
@Override
public Void inTransaction(final EntitySqlDaoWrapperFactory entitySqlDaoWrapperFactory) throws Exception {
entitySqlDaoWrapperFactory.become(TagSqlDao.class).create(modelDao, internalCallContext);
return null;
}
});
}
private TagModelDao getById(final UUID id) {
return transactionalSqlDao.execute(true, new EntitySqlDaoTransactionWrapper<TagModelDao>() {
@Override
public TagModelDao inTransaction(final EntitySqlDaoWrapperFactory entitySqlDaoWrapperFactory) throws Exception {
return entitySqlDaoWrapperFactory.become(TagSqlDao.class).getById(id.toString(), internalCallContext);
}
});
}
}
| apache-2.0 |
JavaMicroService/rapidpm-microservice-examples | modules/example001/src/main/java/org/rapidpm/microservice/demo/servlet/MessageServlet.java | 2157 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.rapidpm.microservice.demo.servlet;
import org.rapidpm.microservice.demo.service.Service;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebInitParam;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.PrintWriter;
@WebInitParam(value = "Hello World", name = "message")
@WebServlet(urlPatterns = "/test")
public class MessageServlet extends HttpServlet {
// @Inject Service service;
public static final String MESSAGE = "message";
private String message;
@Override
public void init(final ServletConfig config) throws ServletException {
super.init(config);
message = config.getInitParameter(MESSAGE);
}
@Override
protected void doGet(final HttpServletRequest req, final HttpServletResponse resp) throws ServletException, IOException {
PrintWriter writer = resp.getWriter();
// writer.write(message + " "+ service.doWork());
writer.write(message + " " + new Service().doWork());
writer.close();
}
@Override
protected void doPost(final HttpServletRequest req, final HttpServletResponse resp) throws ServletException, IOException {
doGet(req, resp);
}
}
| apache-2.0 |
prasi-in/geode | geode-web-api/src/main/java/org/apache/geode/rest/internal/web/security/RestSecurityService.java | 2159 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.rest.internal.web.security;
import org.apache.geode.internal.security.SecurityService;
import org.apache.geode.security.GemFireSecurityException;
import org.springframework.stereotype.Component;
@Component("securityService")
public class RestSecurityService {
private SecurityService securityService = SecurityService.getSecurityService();
public boolean authorize(String resource, String operation) {
return authorize(resource, operation, null, null);
}
public boolean authorize(String resource, String operation, String region) {
return authorize(resource, operation, region, null);
}
public boolean authorize(String resource, String operation, String region, String key) {
try {
securityService.authorize(resource, operation, region, key);
return true;
} catch (GemFireSecurityException ex) {
return false;
}
}
public boolean authorize(String operation, String region, String[] keys) {
boolean authorized = false;
for (String key : keys) {
authorized = authorize("DATA", operation, region, key);
if (!authorized)
return false;
}
return true;
}
public Object postProcess(String regionPath, Object key, Object value,
boolean valueIsSerialized) {
return securityService.postProcess(regionPath, key, value, valueIsSerialized);
}
}
| apache-2.0 |
StyleTang/incubator-rocketmq-externals | rocketmq-iot-bridge/src/test/java/org/apache/rocketmq/iot/connection/client/ClientManagerTest.java | 3024 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.rocketmq.iot.connection.client;
import io.netty.channel.Channel;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadPoolExecutor;
import org.apache.commons.lang3.reflect.FieldUtils;
import org.apache.rocketmq.iot.connection.client.impl.ClientManagerImpl;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
public class ClientManagerTest {
private ClientManager clientManager;
private Client client;
private Channel channel;
private Map<Channel, Client> channel2client;
@Before
public void setup() throws IllegalAccessException {
clientManager = new ClientManagerImpl();
client = Mockito.mock(Client.class);
channel = Mockito.mock(Channel.class);
channel2client = (Map<Channel, Client>) FieldUtils.getDeclaredField(ClientManagerImpl.class, "channel2Client", true).get(clientManager);
channel2client.put(channel, client);
}
@After
public void teardown() {
}
@Test
public void testGet() {
/* Normal */
Assert.assertEquals(client, clientManager.get(channel));
/* Abnormal */
Channel fakeChannel = Mockito.mock(Channel.class);
Assert.assertNull(clientManager.get(fakeChannel));
}
@Test
public void testPut() {
Client newClient = Mockito.mock(Client.class);
clientManager.put(channel, newClient);
Assert.assertEquals(newClient, channel2client.get(channel));
Assert.assertNotEquals(client, channel2client.get(channel));
Channel anotherChannel = Mockito.mock(Channel.class);
Client anotherClient = Mockito.mock(Client.class);
clientManager.put(anotherChannel, anotherClient);
Assert.assertEquals(anotherClient, clientManager.get(anotherChannel));
Channel channelWithNoClient = Mockito.mock(Channel.class);
Assert.assertNull(clientManager.get(channelWithNoClient));
}
@Test
public void testRemove() {
clientManager.remove(channel);
Assert.assertNull(clientManager.get(channel));
}
}
| apache-2.0 |
zzcclp/carbondata | core/src/main/java/org/apache/carbondata/core/metadata/schema/table/column/CarbonDimension.java | 3872 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.core.metadata.schema.table.column;
import java.util.ArrayList;
import java.util.List;
import org.apache.carbondata.core.metadata.encoder.Encoding;
public class CarbonDimension extends CarbonColumn {
/**
* serialization version
*/
private static final long serialVersionUID = 3648269871656322681L;
/**
* List of child dimension for complex type
*/
private List<CarbonDimension> listOfChildDimensions;
/**
* in case of dictionary dimension this will store the ordinal
* of the dimension in MDKey
*/
private int keyOrdinal;
/**
* Save the Parent Dimension of the complex Parent Column.
*/
private CarbonDimension complexParentDimension = null;
public CarbonDimension(ColumnSchema columnSchema, int ordinal, int keyOrdinal) {
this(columnSchema, ordinal, keyOrdinal, 0);
}
public CarbonDimension(ColumnSchema columnSchema, int ordinal, int keyOrdinal,
int schemaOrdinal) {
super(columnSchema, ordinal, schemaOrdinal);
this.keyOrdinal = keyOrdinal;
}
/**
* this method will initialize list based on number of child dimensions Count
*/
public void initializeChildDimensionsList(int childDimension) {
listOfChildDimensions = new ArrayList<CarbonDimension>(childDimension);
}
/**
* @return number of children for complex type
*/
public int getNumberOfChild() {
return columnSchema.getNumberOfChild();
}
/**
* @return list of children dims for complex type
*/
public List<CarbonDimension> getListOfChildDimensions() {
return listOfChildDimensions;
}
public boolean hasEncoding(Encoding encoding) {
return columnSchema.getEncodingList().contains(encoding);
}
/**
* @return the keyOrdinal
*/
public int getKeyOrdinal() {
return keyOrdinal;
}
public void setComplexTypeOrdinal(int complexTypeOrdinal) {
}
/**
* @return is column participated in sorting or not
*/
public boolean isSortColumn() {
return this.columnSchema.isSortColumn();
}
/**
* to generate the hash code for this class
*/
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((columnSchema == null) ? 0 : columnSchema.hashCode());
return result;
}
/**
* to check whether to dimension are equal or not
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!(obj instanceof CarbonDimension)) {
return false;
}
CarbonDimension other = (CarbonDimension) obj;
if (columnSchema == null) {
if (other.columnSchema != null) {
return false;
}
} else if (!columnSchema.equals(other.columnSchema)) {
return false;
}
return true;
}
public CarbonDimension getComplexParentDimension() {
return complexParentDimension;
}
public void setComplexParentDimension(CarbonDimension complexParentDimension) {
this.complexParentDimension = complexParentDimension;
}
}
| apache-2.0 |
punkhorn/camel-upstream | core/camel-core/src/test/java/org/apache/camel/processor/DynamicRouter3Test.java | 2139 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.processor;
import org.apache.camel.ContextTestSupport;
import org.apache.camel.Exchange;
import org.apache.camel.Header;
import org.apache.camel.builder.RouteBuilder;
import org.junit.Test;
public class DynamicRouter3Test extends ContextTestSupport {
@Test
public void testDynamicRouter() throws Exception {
getMockEndpoint("mock:a").expectedMessageCount(1);
getMockEndpoint("mock:b").expectedMessageCount(1);
getMockEndpoint("mock:c").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start")
// use a bean as the dynamic router
.dynamicRouter().method(DynamicRouter3Test.class, "slip");
}
};
}
public String slip(String body, @Header(Exchange.SLIP_ENDPOINT) String previous) {
if (previous == null) {
return "mock:a,mock:b";
} else if ("mock://b".equals(previous)) {
return "mock:c";
}
// no more so return null
return null;
}
}
| apache-2.0 |
porcelli-forks/kie-wb-common | kie-wb-common-stunner/kie-wb-common-stunner-sets/kie-wb-common-stunner-bpmn/kie-wb-common-stunner-bpmn-marshalling/src/main/java/org/kie/workbench/common/stunner/bpmn/client/marshall/converters/fromstunner/properties/GenericServiceTaskPropertyWriter.java | 4063 | /*
* Copyright 2019 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.stunner.bpmn.client.marshall.converters.fromstunner.properties;
import org.eclipse.bpmn2.Interface;
import org.eclipse.bpmn2.Operation;
import org.eclipse.bpmn2.ServiceTask;
import org.kie.workbench.common.stunner.bpmn.client.marshall.converters.customproperties.CustomAttribute;
import org.kie.workbench.common.stunner.bpmn.client.marshall.converters.customproperties.CustomElement;
import org.kie.workbench.common.stunner.bpmn.client.marshall.converters.tostunner.properties.GenericServiceTaskPropertyReader;
import org.kie.workbench.common.stunner.bpmn.client.marshall.converters.tostunner.properties.Scripts;
import org.kie.workbench.common.stunner.bpmn.definition.property.service.GenericServiceTaskValue;
import org.kie.workbench.common.stunner.bpmn.definition.property.task.OnEntryAction;
import org.kie.workbench.common.stunner.bpmn.definition.property.task.OnExitAction;
import static org.kie.workbench.common.stunner.bpmn.client.marshall.converters.fromstunner.Factories.bpmn2;
public class GenericServiceTaskPropertyWriter extends MultipleInstanceActivityPropertyWriter {
private final ServiceTask task;
private final Interface iface;
public GenericServiceTaskPropertyWriter(ServiceTask task, VariableScope variableScope) {
super(task, variableScope);
this.task = task;
this.iface = bpmn2.createInterface();
}
public void setValue(GenericServiceTaskValue value) {
//1 Implementation
String serviceImplementation = value.getServiceImplementation();
task.setImplementation(GenericServiceTaskPropertyReader.getServiceImplementation(serviceImplementation));
CustomAttribute.serviceImplementation.of(task).set(serviceImplementation);
//-------------------------------------------------------------
//2 Interface
String serviceInterface = value.getServiceInterface();
//https://issues.jboss.org/browse/KOGITO-418
// In/Out Messages should not be written now
//custom attribute
CustomAttribute.serviceInterface.of(task).set(serviceInterface);
iface.setImplementationRef(serviceInterface);
iface.setName(serviceInterface);
iface.setId(task.getId() + "_ServiceInterface");
//-------------------------------------------------------------
//3 Operation
String serviceOperation = value.getServiceOperation();
CustomAttribute.serviceOperation.of(task).set(serviceOperation);
Operation operation = bpmn2.createOperation();
operation.setId(task.getId() + "_ServiceOperation");
operation.setName(serviceOperation);
operation.setImplementationRef(serviceOperation);
iface.getOperations().add(operation);
task.setOperationRef(operation);
addInterfaceDefinition(iface);
}
public void setAdHocAutostart(boolean autoStart) {
CustomElement.autoStart.of(task).set(autoStart);
}
public void setAsync(boolean async) {
CustomElement.async.of(task).set(async);
}
public void setSLADueDate(String slaDueDate) {
CustomElement.slaDueDate.of(task).set(slaDueDate);
}
public void setOnEntryAction(OnEntryAction onEntryAction) {
Scripts.setOnEntryAction(task, onEntryAction);
}
public void setOnExitAction(OnExitAction onExitAction) {
Scripts.setOnExitAction(task, onExitAction);
}
}
| apache-2.0 |
cts2/cts2-framework | cts2-service/src/main/java/edu/mayo/cts2/framework/service/command/restriction/TaggedCodeSystemRestriction.java | 1310 | package edu.mayo.cts2.framework.service.command.restriction;
import edu.mayo.cts2.framework.model.service.core.NameOrURI;
public class TaggedCodeSystemRestriction {
private NameOrURI codeSystem;
private String tag;
public NameOrURI getCodeSystem() {
return codeSystem;
}
public void setCodeSystem(NameOrURI codeSystem) {
this.codeSystem = codeSystem;
}
public String getTag() {
return tag;
}
public void setTag(String tag) {
this.tag = tag;
}
@Override
public int hashCode()
{
final int prime = 37;
int result = 1;
result = prime * result + ((codeSystem == null) ? 0 : codeSystem.hashCode());
result = prime * result + ((tag == null) ? 0 : tag.hashCode());
return result;
}
@Override
public boolean equals(Object obj)
{
if (this == obj)
{
return true;
}
if (obj == null)
{
return false;
}
if (getClass() != obj.getClass())
{
return false;
}
TaggedCodeSystemRestriction other = (TaggedCodeSystemRestriction) obj;
if (codeSystem == null)
{
if (other.codeSystem != null)
{
return false;
}
}
else if (!codeSystem.equals(other.codeSystem))
{
return false;
}
if (tag == null)
{
if (other.tag != null)
{
return false;
}
}
else if (!tag.equals(other.tag))
{
return false;
}
return true;
}
} | apache-2.0 |
jk1/intellij-community | platform/testFramework/src/com/intellij/testFramework/propertyBased/IntentionPolicy.java | 4604 | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.testFramework.propertyBased;
import com.intellij.codeInsight.intention.IntentionAction;
import com.intellij.codeInsight.intention.IntentionActionDelegate;
import com.intellij.codeInspection.LocalQuickFix;
import com.intellij.codeInspection.ex.QuickFixWrapper;
import com.intellij.openapi.editor.Editor;
import com.intellij.psi.PsiComment;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import org.jetbrains.annotations.NotNull;
import java.util.Collections;
import java.util.List;
/**
* @author peter
*/
public class IntentionPolicy {
/**
* Determines whether the given intention should be invoked in the property tests. Possible reasons for not invoking them:
* <li>
* <ul>Intentions that don't change document by design (e.g. change settings, thus affecting following tests)</ul>
* <ul>Intentions that start live template (that also doesn't change document)</ul>
* <ul>Intentions that display dialogs/popups (but it'd be better to make them testable as well)</ul>
* <ul>Intentions requiring special test environment not provided by the property test</ul>
* <ul>Intentions ignored because of not-easy-to-fix bugs in them. Preferably should be filed as a tracker issue.</ul>
* </li>
*/
public boolean mayInvokeIntention(@NotNull IntentionAction action) {
if (!action.startInWriteAction() || shouldSkipIntention(action.getText())) {
return false;
}
IntentionAction original = action;
while (original instanceof IntentionActionDelegate) {
original = ((IntentionActionDelegate)original).getDelegate();
}
String familyName;
if (original instanceof QuickFixWrapper) {
LocalQuickFix fix = ((QuickFixWrapper)original).getFix();
familyName = fix.getFamilyName();
}
else {
familyName = original.getFamilyName();
}
return !shouldSkipByFamilyName(familyName);
}
protected boolean shouldSkipIntention(@NotNull String actionText) {
return actionText.startsWith("Typo: Change to...") || // doesn't change file text (starts live template);
actionText.startsWith("Optimize imports") || // https://youtrack.jetbrains.com/issue/IDEA-173801
actionText.startsWith("Convert to project line separators"); // changes VFS, not document
}
protected boolean shouldSkipByFamilyName(@NotNull String familyName) {
return false;
}
/**
* Controls whether the given intention (already approved by {@link #mayInvokeIntention}) is allowed to
* introduce new highlighting errors into the code. It's recommended to return false by default,
* and include found intentions one by one (or make them not break the code).
*/
public boolean mayBreakCode(@NotNull IntentionAction action, @NotNull Editor editor, @NotNull PsiFile file) {
return "Flip ','".equals(action.getText()); // just does text operations, doesn't care about correctness
}
public boolean checkComments(IntentionAction intention) {
return false;
}
public boolean trackComment(PsiComment comment) {
return true;
}
/**
* Return list of elements which could be wrapped with {@linkplain #getWrapPrefix() wrap prefix} and
* {@linkplain #getWrapSuffix()} wrap suffix} without changing the available intentions.
*
* @param currentElement an element caret is positioned at
* @return list of elements which could be wrapped. One of them will be selected and wrapped and it will be checked that no intentions
* changed. Returns an empty list by default which means that no wrapping should be performed
*/
@NotNull
public List<PsiElement> getElementsToWrap(@NotNull PsiElement currentElement) {
return Collections.emptyList();
}
/**
* @return a wrap prefix for {@link #getElementsToWrap(PsiElement)}.
*/
@NotNull
public String getWrapPrefix() { return "";}
/**
* @return a wrap suffix for {@link #getElementsToWrap(PsiElement)}.
*/
public String getWrapSuffix() { return "";}
}
| apache-2.0 |
loolooyyyy/Kommons | src/main/java/cc/koosha/kommons/function/Fun.java | 90 | package cc.koosha.kommons.function;
public interface Fun<T, R> {
R apply(T t);
}
| apache-2.0 |
RichardHightower/qbit | qbit/core/src/test/java/io/advantageous/qbit/service/rest/endpoint/tests/tests/PredicateChainTest.java | 9932 | package io.advantageous.qbit.service.rest.endpoint.tests.tests;
import io.advantageous.boon.core.Lists;
import io.advantageous.qbit.http.request.HttpResponseBuilder;
import io.advantageous.qbit.http.request.HttpTextResponse;
import io.advantageous.qbit.http.request.decorator.HttpBinaryResponseHolder;
import io.advantageous.qbit.http.request.decorator.HttpResponseDecorator;
import io.advantageous.qbit.http.request.decorator.HttpTextResponseHolder;
import io.advantageous.qbit.http.server.HttpServerBuilder;
import io.advantageous.qbit.server.EndpointServerBuilder;
import io.advantageous.qbit.server.ServiceEndpointServer;
import io.advantageous.qbit.service.rest.endpoint.tests.model.Employee;
import io.advantageous.qbit.service.rest.endpoint.tests.services.EmployeeServiceCollectionTestService;
import io.advantageous.qbit.service.rest.endpoint.tests.sim.HttpServerSimulator;
import io.advantageous.qbit.spi.FactorySPI;
import io.advantageous.qbit.util.MultiMap;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.util.concurrent.atomic.AtomicInteger;
import static junit.framework.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
public class PredicateChainTest {
ServiceEndpointServer serviceEndpointServer;
HttpServerSimulator httpServerSimulator;
@Before
public void before() {
httpServerSimulator = new HttpServerSimulator();
FactorySPI.setHttpServerFactory((options, endPointName, systemManager, serviceDiscovery,
healthServiceAsync, serviceDiscoveryTtl, serviceDiscoveryTtlTimeUnit,
decorators, httpResponseCreator, z)
-> {
httpServerSimulator.setResponseDecorators(decorators);
return
httpServerSimulator;
});
serviceEndpointServer = EndpointServerBuilder.endpointServerBuilder()
.setEnableHealthEndpoint(true).setEnableStatEndpoint(true)
.build()
.initServices(new EmployeeServiceCollectionTestService()).startServer();
}
@Test
public void testChain() {
EndpointServerBuilder endpointServerBuilder = EndpointServerBuilder.endpointServerBuilder()
.setEnableHealthEndpoint(true).setEnableStatEndpoint(true);
HttpServerBuilder httpServerBuilder = endpointServerBuilder.getHttpServerBuilder();
httpServerBuilder.addShouldContinueHttpRequestPredicate(httpRequest -> false);
serviceEndpointServer = endpointServerBuilder.addService(new EmployeeServiceCollectionTestService()).build();
serviceEndpointServer.startServer();
final HttpTextResponse httpResponse = httpServerSimulator.postBody("/es/sendEmployees",
Lists.list(new Employee(1, "Rick"),
new Employee(2, "Diana")));
assertNull(httpResponse);
}
@Test
public void testResponseDecorator() {
EndpointServerBuilder endpointServerBuilder = EndpointServerBuilder.endpointServerBuilder()
.setEnableHealthEndpoint(true).setEnableStatEndpoint(true);
HttpServerBuilder httpServerBuilder = endpointServerBuilder.getHttpServerBuilder();
httpServerBuilder.addResponseDecorator(new HttpResponseDecorator() {
@Override
public boolean decorateTextResponse(HttpTextResponseHolder responseHolder,
String requestPath, String requestMethod,
int code, String contentType, String payload,
MultiMap<String, String> responseHeaders,
MultiMap<String, String> requestHeaders,
MultiMap<String, String> requestParams) {
responseHolder.setHttpTextResponse((HttpTextResponse) HttpResponseBuilder.httpResponseBuilder()
.setCode(999).setContentType("foo/bar").addHeader("foo", "bar").setBody("DECORATED" + payload).build());
return true;
}
@Override
public boolean decorateBinaryResponse(HttpBinaryResponseHolder responseHolder, String requestPath, String requestMethod,
int code, String contentType, byte[] payload, MultiMap<String, String> responseHeaders, MultiMap<String, String> requestHeaders, MultiMap<String, String> requestParams) {
return false;
}
});
serviceEndpointServer = endpointServerBuilder.addService(new EmployeeServiceCollectionTestService()).build();
serviceEndpointServer.startServer();
final HttpTextResponse httpResponse = httpServerSimulator.postBody("/es/sendEmployees",
Lists.list(new Employee(1, "Rick"),
new Employee(2, "Diana")));
assertNotNull(httpResponse);
Assert.assertEquals(999, httpResponse.code());
Assert.assertTrue(httpResponse.headers().keySet().contains("foo"));
Assert.assertEquals("bar", httpResponse.headers().getFirst("foo"));
System.out.println(httpResponse);
}
@Test
public void testChainWithResponse() {
EndpointServerBuilder endpointServerBuilder = EndpointServerBuilder.endpointServerBuilder()
.setEnableHealthEndpoint(true).setEnableStatEndpoint(true);
HttpServerBuilder httpServerBuilder = endpointServerBuilder.getHttpServerBuilder();
httpServerBuilder.addShouldContinueHttpRequestPredicate(httpRequest -> {
httpRequest.getReceiver().response(666, "foo-content", "foo");
return false;
});
serviceEndpointServer = endpointServerBuilder.addService(new EmployeeServiceCollectionTestService()).build();
serviceEndpointServer.startServer();
final HttpTextResponse httpResponse = httpServerSimulator.postBody("/es/sendEmployees",
Lists.list(new Employee(1, "Rick"),
new Employee(2, "Diana")));
assertEquals(666, httpResponse.code());
assertEquals("foo", httpResponse.body());
}
@Test
public void testChainContinueTrue() {
EndpointServerBuilder endpointServerBuilder = EndpointServerBuilder.endpointServerBuilder()
.setEnableHealthEndpoint(true).setEnableStatEndpoint(true);
HttpServerBuilder httpServerBuilder = endpointServerBuilder.getHttpServerBuilder();
httpServerBuilder.addShouldContinueHttpRequestPredicate(httpRequest -> true);
serviceEndpointServer = endpointServerBuilder.addService(new EmployeeServiceCollectionTestService()).build();
serviceEndpointServer.startServer();
final HttpTextResponse httpResponse = httpServerSimulator.postBody("/es/sendEmployees",
Lists.list(new Employee(1, "Rick"),
new Employee(2, "Diana")));
assertEquals(202, httpResponse.code());
assertEquals("\"success\"", httpResponse.body());
}
@Test
public void testChainLastContinueFalse() {
EndpointServerBuilder endpointServerBuilder = EndpointServerBuilder.endpointServerBuilder()
.setEnableHealthEndpoint(true).setEnableStatEndpoint(true);
AtomicInteger count = new AtomicInteger();
HttpServerBuilder httpServerBuilder = endpointServerBuilder.getHttpServerBuilder();
for (int i = 0; i < 10; i++) {
httpServerBuilder.addShouldContinueHttpRequestPredicate(httpRequest -> {
count.incrementAndGet();
return true;
});
}
serviceEndpointServer = endpointServerBuilder.addService(new EmployeeServiceCollectionTestService()).build();
serviceEndpointServer.startServer();
final HttpTextResponse httpResponse = httpServerSimulator.postBody("/es/sendEmployees",
Lists.list(new Employee(1, "Rick"),
new Employee(2, "Diana")));
assertEquals(202, httpResponse.code());
assertEquals("\"success\"", httpResponse.body());
assertEquals(10, count.get());
}
@Test
public void testChainContinueTrueWithChain() {
EndpointServerBuilder endpointServerBuilder = EndpointServerBuilder.endpointServerBuilder()
.setEnableHealthEndpoint(true).setEnableStatEndpoint(true);
AtomicInteger count = new AtomicInteger();
HttpServerBuilder httpServerBuilder = endpointServerBuilder.getHttpServerBuilder();
for (int i = 0; i < 10; i++) {
httpServerBuilder.addShouldContinueHttpRequestPredicate(httpRequest -> {
count.incrementAndGet();
return true;
});
}
httpServerBuilder.addShouldContinueHttpRequestPredicate(httpRequest -> {
count.incrementAndGet();
httpRequest.getReceiver().response(666, "foo-content", "foo");
return false;
});
for (int i = 0; i < 10; i++) {
httpServerBuilder.addShouldContinueHttpRequestPredicate(httpRequest -> {
count.incrementAndGet();
return true;
});
}
serviceEndpointServer = endpointServerBuilder.addService(new EmployeeServiceCollectionTestService()).build();
serviceEndpointServer.startServer();
final HttpTextResponse httpResponse = httpServerSimulator.postBody("/es/sendEmployees",
Lists.list(new Employee(1, "Rick"),
new Employee(2, "Diana")));
assertEquals(666, httpResponse.code());
assertEquals("foo", httpResponse.body());
assertEquals(11, count.get());
}
}
| apache-2.0 |
sap-production/hudson-3.x | hudson-core/src/test/java/hudson/model/UpdateCenterTest.java | 2419 | /*******************************************************************************
*
* Copyright (c) 2004-2011 Oracle Corporation.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
*
* Kohsuke Kawaguchi, Nikita Levyankov
*
*
*******************************************************************************/
package hudson.model;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URL;
import net.sf.json.JSONObject;
import org.apache.commons.io.IOUtils;
import org.junit.Test;
import static junit.framework.Assert.assertTrue;
/**
* Quick test for {@link UpdateCenter}.
*
* @author Kohsuke Kawaguchi
*/
public class UpdateCenterTest {
@Test
public void testData() throws IOException {
// check if we have the internet connectivity. See HUDSON-2095
try {
HttpURLConnection con = (HttpURLConnection) new URL("http://hudson-ci.org/").openConnection();
con.setRequestMethod("HEAD");
con.setConnectTimeout(10000); //set timeout to 10 seconds
if (con.getResponseCode() != HttpURLConnection.HTTP_OK) {
System.out.println("Skipping this test. Page doesn't exists");
return;
}
} catch (java.net.SocketTimeoutException e) {
System.out.println("Skipping this test. Timeout exception");
return;
} catch (IOException e) {
System.out.println("Skipping this test. No internet connectivity");
return;
}
URL url = new URL("http://hudson-ci.org/update-center3/update-center.json?version=build");
String jsonp = IOUtils.toString(url.openStream());
String json = jsonp.substring(jsonp.indexOf('(')+1,jsonp.lastIndexOf(')'));
UpdateSite us = new UpdateSite("default", url.toExternalForm());
UpdateSite.Data data = us.new Data(JSONObject.fromObject(json));
assertTrue(data.core.url.startsWith("http://hudson-ci.org/") || data.core.url.startsWith("http://eclipse.org/") || data.core.url.startsWith("http://download.eclipse.org/"));
assertTrue(data.plugins.containsKey("rake"));
System.out.println(data.core.url);
}
}
| apache-2.0 |
Gadreel/divconq | divconq.core/src/main/java/divconq/www/http/parse/QueryParser.java | 22045 | /* ************************************************************************
#
# DivConq
#
# http://divconq.com/
#
# Copyright:
# Copyright 2014 eTimeline, LLC. All rights reserved.
#
# License:
# See the license.txt file in the project's top-level directory for details.
#
# Authors:
# * Andy White
#
************************************************************************ */
/*
* QueryParser.java December 2002
*
* Copyright (C) 2002, Niall Gallagher <niallg@users.sf.net>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package divconq.www.http.parse;
import divconq.www.http.Query;
import divconq.www.util.parse.MapParser;
import java.net.URLEncoder;
import java.util.Set;
/**
* The <code>ParameterParser</code> is used to parse data encoded in
* the <code>application/x-www-form-urlencoded</code> MIME type. It
* is also used to parse a query string from a HTTP URL, see RFC 2616.
* The parsed parameters are available through the various methods of
* the <code>org.simpleframework.http.net.Query</code> interface. The
* syntax of the parsed parameters is described below in BNF.
* <pre>
*
* params = *(pair [ "&" params])
* pair = name "=" value
* name = *(text | escaped)
* value = *(text | escaped)
* escaped = % HEX HEX
*
* </pre>
* This will consume all data found as a name or value, if the data
* is a "+" character then it is replaced with a space character.
* This regards only "=", "&", and "%" as having special values.
* The "=" character delimits the name from the value and the "&"
* delimits the name value pair. The "%" character represents the
* start of an escaped sequence, which consists of two hex digits.
* All escaped sequences are converted to its character value.
*
* @author Niall Gallagher
*/
public class QueryParser extends MapParser<String> implements Query {
/**
* Used to accumulate the characters for the parameter name.
*/
private Token name;
/**
* Used to accumulate the characters for the parameter value.
*/
private Token value;
/**
* Constructor for the <code>ParameterParser</code>. This creates
* an instance that can be use to parse HTML form data and URL
* query strings encoded as application/x-www-form-urlencoded.
* The parsed parameters are made available through the interface
* <code>org.simpleframework.util.net.Query</code>.
*/
public QueryParser(){
this.name = new Token();
this.value = new Token();
}
/**
* Constructor for the <code>ParameterParser</code>. This creates
* an instance that can be use to parse HTML form data and URL
* query strings encoded as application/x-www-form-urlencoded.
* The parsed parameters are made available through the interface
* <code>org.simpleframework.util.net.Query</code>.
*
* @param text this is the text to parse for the parameters
*/
public QueryParser(String text){
this();
parse(text);
}
/**
* This extracts an integer parameter for the named value. If the
* named parameter does not exist this will return a zero value.
* If however the parameter exists but is not in the format of a
* decimal integer value then this will throw an exception.
*
* @param name the name of the parameter value to retrieve
*
* @return this returns the named parameter value as an integer
*/
public int getInteger(Object name) {
String value = get(name);
if(value != null) {
return Integer.parseInt(value);
}
return 0;
}
/**
* This extracts a float parameter for the named value. If the
* named parameter does not exist this will return a zero value.
* If however the parameter exists but is not in the format of a
* floating point number then this will throw an exception.
*
* @param name the name of the parameter value to retrieve
*
* @return this returns the named parameter value as a float
*/
public float getFloat(Object name) {
String value = get(name);
if(value != null) {
return Float.parseFloat(value);
}
return 0.0f;
}
/**
* This extracts a boolean parameter for the named value. If the
* named parameter does not exist this will return false otherwise
* the value is evaluated. If it is either <code>true</code> or
* <code>false</code> then those boolean values are returned.
*
* @param name the name of the parameter value to retrieve
*
* @return this returns the named parameter value as an float
*/
public boolean getBoolean(Object name) {
Boolean flag = Boolean.FALSE;
String value = get(name);
if(value != null) {
flag = Boolean.valueOf(value);
}
return flag.booleanValue();
}
/**
* This initializes the parser so that it can be used several
* times. This clears any previous parameters extracted. This
* ensures that when the next <code>parse(String)</code> is
* invoked the status of the <code>Query</code> is empty.
*/
protected void init(){
all.clear();
map.clear();
name.len = 0;
value.len = 0;
off = 0;
}
/**
* This performs the actual parsing of the parameter text. The
* parameters parsed from this are taken as "name=value" pairs.
* Multiple pairs within the text are separated by an "&".
* This will parse and insert all parameters into a hashtable.
*/
protected void parse() {
param();
while(skip("&")){
param();
}
}
/**
* This method adds the name and value to a map so that the next
* name and value can be collected. The name and value are added
* to the map as string objects. Once added to the map the
* <code>Token</code> objects are set to have zero length so they
* can be reused to collect further values. This will add the
* values to the map as an array of type string. This is done so
* that if there are multiple values that they can be stored.
*/
private void insert(){
if(name.len > 0){
insert(name,value);
}
name.len = 0;
value.len = 0;
}
/**
* This will add the given name and value to the parameters map.
* If any previous value of the given name has been inserted
* into the map then this will overwrite that value. This is
* used to ensure that the string value is inserted to the map.
*
* @param name this is the name of the value to be inserted
* @param value this is the value of a that is to be inserted
*/
private void insert(Token name, Token value){
put(name.toString(), value.toString());
}
/**
* This is an expression that is defined by RFC 2396 it is used
* in the definition of a segment expression. This is basically
* a list of chars with escaped sequences.
* <p>
* This method has to ensure that no escaped chars go unchecked.
* This ensures that the read offset does not go out of bounds
* and consequently throw an out of bounds exception.
*/
private void param() {
name();
if(skip("=")){ /* in case of error*/
value();
}
insert();
}
/**
* This extracts the name of the parameter from the character
* buffer. The name of a parameter is defined as a set of
* chars including escape sequences. This will extract the
* parameter name and buffer the chars. The name ends when a
* equals character, "=", is encountered.
*/
private void name(){
int mark = off;
int pos = off;
while(off < count){
if(buf[off]=='%'){ /* escaped */
escape();
}else if(buf[off]=='=') {
break;
}else if(buf[off]=='+'){
buf[off] = ' ';
}
buf[pos++] = buf[off++];
}
name.len = pos - mark;
name.off = mark;
}
/**
* This extracts a parameter value from a path segment. The
* parameter value consists of a sequence of chars and some
* escape sequences. The parameter value is buffered so that
* the name and values can be paired. The end of the value
* is determined as the end of the buffer or an ampersand.
*/
private void value(){
int mark = off;
int pos = off;
while(off < count){
if(buf[off]=='%'){ /* escaped */
escape();
}else if(buf[off]=='+'){
buf[off] = ' ';
}else if(buf[off]=='&'){
break;
}
buf[pos++] = buf[off++];
}
value.len = pos - mark;
value.off = mark;
}
/**
* This converts an encountered escaped sequence, that is all
* embedded hexidecimal characters into a native UCS character
* value. This does not take any characters from the stream it
* just prepares the buffer with the correct byte. The escaped
* sequence within the URI will be interpreded as UTF-8.
* <p>
* This will leave the next character to read from the buffer
* as the character encoded from the URI. If there is a fully
* valid escaped sequence, that is <code>"%" HEX HEX</code>.
* This decodes the escaped sequence using UTF-8 encoding, all
* encoded sequences should be in UCS-2 to fit in a Java char.
*/
private void escape() {
int peek = peek(off);
if(!unicode(peek)) {
binary(peek);
}
}
/**
* This method determines, using a peek character, whether the
* sequence of escaped characters within the URI is binary data.
* If the data within the escaped sequence is binary then this
* will ensure that the next character read from the URI is the
* binary octet. This is used strictly for backward compatible
* parsing of URI strings, binary data should never appear.
*
* @param peek this is the first escaped character from the URI
*
* @return currently this implementation always returns true
*/
private boolean binary(int peek) {
if(off + 2 < count) {
off += 2;
buf[off] =bits(peek);
}
return true;
}
/**
* This method determines, using a peek character, whether the
* sequence of escaped characters within the URI is in UTF-8. If
* a UTF-8 character can be successfully decoded from the URI it
* will be the next character read from the buffer. This can
* check for both UCS-2 and UCS-4 characters. However, because
* the Java <code>char</code> can only hold UCS-2, the UCS-4
* characters will have only the low order octets stored.
* <p>
* The WWW Consortium provides a reference implementation of a
* UTF-8 decoding for Java, in this the low order octets in the
* UCS-4 sequence are used for the character. So, in the
* absence of a defined behaviour, the W3C behaviour is assumed.
*
* @param peek this is the first escaped character from the URI
*
* @return this returns true if a UTF-8 character is decoded
*/
private boolean unicode(int peek) {
if((peek & 0x80) == 0x00){
return unicode(peek, 0);
}
if((peek & 0xe0) == 0xc0){
return unicode(peek & 0x1f, 1);
}
if((peek & 0xf0) == 0xe0){
return unicode(peek & 0x0f, 2);
}
if((peek & 0xf8) == 0xf0){
return unicode(peek & 0x07, 3);
}
if((peek & 0xfc) == 0xf8){
return unicode(peek & 0x03, 4);
}
if((peek & 0xfe) == 0xfc){
return unicode(peek & 0x01, 5);
}
return false;
}
/**
* This method will decode the specified amount of escaped
* characters from the URI and convert them into a single Java
* UCS-2 character. If there are not enough characters within
* the URI then this will return false and leave the URI alone.
* <p>
* The number of characters left is determined from the first
* UTF-8 octet, as specified in RFC 2279, and because this is
* a URI there must that number of <code>"%" HEX HEX</code>
* sequences left. If successful the next character read is
* the UTF-8 sequence decoded into a native UCS-2 character.
*
* @param peek contains the bits read from the first UTF octet
* @param more this specifies the number of UTF octets left
*
* @return this returns true if a UTF-8 character is decoded
*/
private boolean unicode(int peek, int more) {
if(off + more * 3 >= count) {
return false;
}
return unicode(peek,more,off);
}
/**
* This will decode the specified amount of trailing UTF-8 bits
* from the URI. The trailing bits are those following the first
* UTF-8 octet, which specifies the length, in octets, of the
* sequence. The trailing octets are of the form 10xxxxxx, for
* each of these octets only the last six bits are valid UCS
* bits. So a conversion is basically an accumulation of these.
* <p>
* If at any point during the accumulation of the UTF-8 bits
* there is a parsing error, then parsing is aborted an false
* is returned, as a result the URI is left unchanged.
*
* @param peek bytes that have been accumulated fron the URI
* @param more this specifies the number of UTF octets left
* @param pos this specifies the position the parsing begins
*
* @return this returns true if a UTF-8 character is decoded
*/
private boolean unicode(int peek, int more, int pos) {
while(more-- > 0) {
if(buf[pos] == '%'){
int next = pos + 3;
int hex = peek(next);
if((hex & 0xc0) == 0x80){
peek = (peek<<6)|(hex&0x3f);
pos = next;
continue;
}
}
return false;
}
if(pos + 2 < count) {
off = pos + 2;
buf[off]= bits(peek);
}
return true;
}
/**
* Defines behaviour for UCS-2 versus UCS-4 conversion from four
* octets. The UTF-8 encoding scheme enables UCS-4 characters to
* be encoded and decodeded. However, Java supports the 16-bit
* UCS-2 character set, and so the 32-bit UCS-4 character set is
* not compatable. This basically decides what to do with UCS-4.
*
* @param data up to four octets to be converted to UCS-2 format
*
* @return this returns a native UCS-2 character from the int
*/
private char bits(int data) {
return (char)data;
}
/**
* This will return the escape expression specified from the URI
* as an integer value of the hexadecimal sequence. This does
* not make any changes to the buffer it simply checks to see if
* the characters at the position specified are an escaped set
* characters of the form <code>"%" HEX HEX</code>, if so, then
* it will convert that hexadecimal string in to an integer
* value, or -1 if the expression is not hexadecimal.
*
* @param pos this is the position the expression starts from
*
* @return the integer value of the hexadecimal expression
*/
private int peek(int pos) {
if(buf[pos] == '%'){
if(count <= pos + 2) {
return -1;
}
char high = buf[pos + 1];
char low = buf[pos + 2];
return convert(high, low);
}
return -1;
}
/**
* This will convert the two hexidecimal characters to a real
* integer value, which is returned. This requires characters
* within the range of 'A' to 'F' and 'a' to 'f', and also
* the digits '0' to '9'. The characters encoded using the
* ISO-8859-1 encoding scheme, if the characters are not with
* in the range specified then this returns -1.
*
* @param high this is the high four bits within the integer
* @param low this is the low four bits within the integer
*
* @return this returns the indeger value of the conversion
*/
private int convert(char high, char low) {
int hex = 0x00;
if(hex(high) && hex(low)){
if('A' <= high && high <= 'F'){
high -= 'A' - 'a';
}
if(high >= 'a') {
hex ^= (high-'a')+10;
} else {
hex ^= high -'0';
}
hex <<= 4;
if('A' <= low && low <= 'F') {
low -= 'A' - 'a';
}
if(low >= 'a') {
hex ^= (low-'a')+10;
} else {
hex ^= low-'0';
}
return hex;
}
return -1;
}
/**
* This is used to determine whether a char is a hexadecimal
* <code>char</code> or not. A hexadecimal character is considered
* to be a character within the range of <code>0 - 9</code> and
* between <code>a - f</code> and <code>A - F</code>. This will
* return <code>true</code> if the character is in this range.
*
* @param ch this is the character which is to be determined here
*
* @return true if the character given has a hexadecimal value
*/
private boolean hex(char ch) {
if(ch >= '0' && ch <= '9') {
return true;
} else if(ch >='a' && ch <= 'f') {
return true;
} else if(ch >= 'A' && ch <= 'F') {
return true;
}
return false;
}
/**
* This <code>encode</code> method will escape the text that
* is provided. This is used to that the parameter pairs can
* be encoded in such a way that it can be transferred over
* HTTP/1.1 using the ISO-8859-1 character set.
*
* @param text this is the text that is to be escaped
*
* @return the text with % HEX HEX UTF-8 escape sequences
*/
private String encode(String text) {
try {
return URLEncoder.encode(text, "UTF-8");
}catch(Exception e){
return text;
}
}
/**
* This <code>encode</code> method will escape the name=value
* pair provided using the UTF-8 character set. This method
* will ensure that the parameters are encoded in such a way
* that they can be transferred via HTTP in ISO-8859-1.
*
* @param name this is the name of that is to be escaped
* @param value this is the value that is to be escaped
*
* @return the pair with % HEX HEX UTF-8 escape sequences
*/
private String encode(String name, String value) {
return encode(name) + "=" + encode(value);
}
/**
* This <code>toString</code> method is used to compose an string
* in the <code>application/x-www-form-urlencoded</code> MIME type.
* This will encode the tokens specified in the <code>Set</code>.
* Each name=value pair acquired is converted into a UTF-8 escape
* sequence so that the parameters can be sent in the IS0-8859-1
* format required via the HTTP/1.1 specification RFC 2616.
*
* @param set this is the set of parameters to be encoded
*
* @return returns a HTTP parameter encoding for the pairs
*/
public String toString(Set<?> set) {
Object[] list = set.toArray();
String text = "";
for(int i = 0; i < list.length; i++){
String name = list[i].toString();
String value = get(name);
if(i > 0) {
text += "&";
}
text += encode(name, value);
}
return text;
}
/**
* This <code>toString</code> method is used to compose an string
* in the <code>application/x-www-form-urlencoded</code> MIME type.
* This will iterate over all tokens that have been added to this
* object, either during parsing, or during use of the instance.
* Each name=value pair acquired is converted into a UTF-8 escape
* sequence so that the parameters can be sent in the IS0-8859-1
* format required via the HTTP/1.1 specification RFC 2616.
*
* @return returns a HTTP parameter encoding for the pairs
*/
public String toString() {
Set<String> set = map.keySet();
if(map.size() > 0) {
return toString(set);
}
return "";
}
/**
* This is used to mark regions within the buffer that represent
* a valid token for either the name of a parameter or its value.
* This is used as an alternative to the <code>ParseBuffer</code>
* which requires memory to be allocated for storing the data
* read from the buffer. This requires only two integer values.
*/
private class Token {
/**
* This represents the number of characters in the token.
*/
public int len;
/**
* This represents the start offset within the buffer.
*/
public int off;
/**
* In order to represent the <code>Token</code> as a value
* that can be used this converts it to a <code>String</code>.
* If the length of the token is less than or equal to zero
* this will return and empty string for the value.
*
* @return this returns a value representing the token
*/
public String toString() {
if(len <= 0) {
return "";
}
return new String(buf,off,len);
}
}
}
| apache-2.0 |
kevinearls/camel | components/camel-google-calendar/src/main/java/org/apache/camel/component/google/calendar/GoogleCalendarConsumer.java | 2262 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.google.calendar;
import java.util.Map;
import java.util.Map.Entry;
import com.google.api.client.googleapis.services.AbstractGoogleClientRequest;
import org.apache.camel.Processor;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.TypeConverter;
import org.apache.camel.component.google.calendar.internal.GoogleCalendarApiName;
import org.apache.camel.support.IntrospectionSupport;
import org.apache.camel.support.component.AbstractApiConsumer;
/**
* The GoogleCalendar consumer.
*/
public class GoogleCalendarConsumer extends AbstractApiConsumer<GoogleCalendarApiName, GoogleCalendarConfiguration> {
public GoogleCalendarConsumer(GoogleCalendarEndpoint endpoint, Processor processor) {
super(endpoint, processor);
}
@Override
protected Object doInvokeMethod(Map<String, Object> properties) throws RuntimeCamelException {
AbstractGoogleClientRequest request = (AbstractGoogleClientRequest) super.doInvokeMethod(properties);
try {
TypeConverter typeConverter = getEndpoint().getCamelContext().getTypeConverter();
for (Entry<String, Object> p : properties.entrySet()) {
IntrospectionSupport.setProperty(typeConverter, request, p.getKey(), p.getValue());
}
return request.execute();
} catch (Exception e) {
throw new RuntimeCamelException(e);
}
}
}
| apache-2.0 |
apache/jena | jena-jdbc/jena-jdbc-driver-mem/src/test/java/org/apache/jena/jdbc/mem/connections/DebugMemConnection.java | 1923 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.jdbc.mem.connections;
import java.sql.SQLException;
import org.apache.jena.jdbc.JdbcCompatibility;
import org.apache.jena.jdbc.connections.JenaConnection;
import org.apache.jena.query.Dataset ;
import org.apache.jena.query.DatasetFactory ;
/**
* A test only variant of {@link MemConnection} which allows the dataset to be changed on the fly
*
*/
public class DebugMemConnection extends MemConnection {
/**
* Creates a debug dataset connection
* @throws SQLException
*/
public DebugMemConnection() throws SQLException {
this(DatasetFactory.create());
}
/**
* Creates a debug dataset connection
* @param ds Dataset
* @throws SQLException
*/
public DebugMemConnection(Dataset ds) throws SQLException {
super(ds, JenaConnection.DEFAULT_HOLDABILITY, JenaConnection.DEFAULT_AUTO_COMMIT, JenaConnection.DEFAULT_ISOLATION_LEVEL, JdbcCompatibility.DEFAULT);
}
/**
* Sets the Jena dataset in use
* @param ds Dataset
*/
public void setJenaDataset(Dataset ds) {
this.ds = ds;
}
}
| apache-2.0 |
jsrudani/HadoopHDFSProject | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java | 10964 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.web;
import java.io.IOException;
import java.net.URISyntaxException;
import java.security.PrivilegedExceptionAction;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.TestDFSClientRetries;
import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.log4j.Level;
import org.junit.Assert;
import org.junit.Test;
/** Test WebHDFS */
public class TestWebHDFS {
static final Log LOG = LogFactory.getLog(TestWebHDFS.class);
static final Random RANDOM = new Random();
static final long systemStartTime = System.nanoTime();
/** A timer for measuring performance. */
static class Ticker {
final String name;
final long startTime = System.nanoTime();
private long previousTick = startTime;
Ticker(final String name, String format, Object... args) {
this.name = name;
LOG.info(String.format("\n\n%s START: %s\n",
name, String.format(format, args)));
}
void tick(final long nBytes, String format, Object... args) {
final long now = System.nanoTime();
if (now - previousTick > 10000000000L) {
previousTick = now;
final double mintues = (now - systemStartTime)/60000000000.0;
LOG.info(String.format("\n\n%s %.2f min) %s %s\n", name, mintues,
String.format(format, args), toMpsString(nBytes, now)));
}
}
void end(final long nBytes) {
final long now = System.nanoTime();
final double seconds = (now - startTime)/1000000000.0;
LOG.info(String.format("\n\n%s END: duration=%.2fs %s\n",
name, seconds, toMpsString(nBytes, now)));
}
String toMpsString(final long nBytes, final long now) {
final double mb = nBytes/(double)(1<<20);
final double mps = mb*1000000000.0/(now - startTime);
return String.format("[nBytes=%.2fMB, speed=%.2fMB/s]", mb, mps);
}
}
@Test(timeout=300000)
public void testLargeFile() throws Exception {
largeFileTest(200L << 20); //200MB file length
}
/** Test read and write large files. */
static void largeFileTest(final long fileLength) throws Exception {
final Configuration conf = WebHdfsTestUtil.createConf();
final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(3)
.build();
try {
cluster.waitActive();
final FileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsFileSystem.SCHEME);
final Path dir = new Path("/test/largeFile");
Assert.assertTrue(fs.mkdirs(dir));
final byte[] data = new byte[1 << 20];
RANDOM.nextBytes(data);
final byte[] expected = new byte[2 * data.length];
System.arraycopy(data, 0, expected, 0, data.length);
System.arraycopy(data, 0, expected, data.length, data.length);
final Path p = new Path(dir, "file");
final Ticker t = new Ticker("WRITE", "fileLength=" + fileLength);
final FSDataOutputStream out = fs.create(p);
try {
long remaining = fileLength;
for(; remaining > 0;) {
t.tick(fileLength - remaining, "remaining=%d", remaining);
final int n = (int)Math.min(remaining, data.length);
out.write(data, 0, n);
remaining -= n;
}
} finally {
out.close();
}
t.end(fileLength);
Assert.assertEquals(fileLength, fs.getFileStatus(p).getLen());
final long smallOffset = RANDOM.nextInt(1 << 20) + (1 << 20);
final long largeOffset = fileLength - smallOffset;
final byte[] buf = new byte[data.length];
verifySeek(fs, p, largeOffset, fileLength, buf, expected);
verifySeek(fs, p, smallOffset, fileLength, buf, expected);
verifyPread(fs, p, largeOffset, fileLength, buf, expected);
} finally {
cluster.shutdown();
}
}
static void checkData(long offset, long remaining, int n,
byte[] actual, byte[] expected) {
if (RANDOM.nextInt(100) == 0) {
int j = (int)(offset % actual.length);
for(int i = 0; i < n; i++) {
if (expected[j] != actual[i]) {
Assert.fail("expected[" + j + "]=" + expected[j]
+ " != actual[" + i + "]=" + actual[i]
+ ", offset=" + offset + ", remaining=" + remaining + ", n=" + n);
}
j++;
}
}
}
/** test seek */
static void verifySeek(FileSystem fs, Path p, long offset, long length,
byte[] buf, byte[] expected) throws IOException {
long remaining = length - offset;
long checked = 0;
LOG.info("XXX SEEK: offset=" + offset + ", remaining=" + remaining);
final Ticker t = new Ticker("SEEK", "offset=%d, remaining=%d",
offset, remaining);
final FSDataInputStream in = fs.open(p, 64 << 10);
in.seek(offset);
for(; remaining > 0; ) {
t.tick(checked, "offset=%d, remaining=%d", offset, remaining);
final int n = (int)Math.min(remaining, buf.length);
in.readFully(buf, 0, n);
checkData(offset, remaining, n, buf, expected);
offset += n;
remaining -= n;
checked += n;
}
in.close();
t.end(checked);
}
static void verifyPread(FileSystem fs, Path p, long offset, long length,
byte[] buf, byte[] expected) throws IOException {
long remaining = length - offset;
long checked = 0;
LOG.info("XXX PREAD: offset=" + offset + ", remaining=" + remaining);
final Ticker t = new Ticker("PREAD", "offset=%d, remaining=%d",
offset, remaining);
final FSDataInputStream in = fs.open(p, 64 << 10);
for(; remaining > 0; ) {
t.tick(checked, "offset=%d, remaining=%d", offset, remaining);
final int n = (int)Math.min(remaining, buf.length);
in.readFully(offset, buf, 0, n);
checkData(offset, remaining, n, buf, expected);
offset += n;
remaining -= n;
checked += n;
}
in.close();
t.end(checked);
}
/** Test client retry with namenode restarting. */
@Test(timeout=300000)
public void testNamenodeRestart() throws Exception {
((Log4JLogger)NamenodeWebHdfsMethods.LOG).getLogger().setLevel(Level.ALL);
final Configuration conf = WebHdfsTestUtil.createConf();
TestDFSClientRetries.namenodeRestartTest(conf, true);
}
@Test(timeout=300000)
public void testLargeDirectory() throws Exception {
final Configuration conf = WebHdfsTestUtil.createConf();
final int listLimit = 2;
// force small chunking of directory listing
conf.setInt(DFSConfigKeys.DFS_LIST_LIMIT, listLimit);
// force paths to be only owner-accessible to ensure ugi isn't changing
// during listStatus
FsPermission.setUMask(conf, new FsPermission((short)0077));
final MiniDFSCluster cluster =
new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
try {
cluster.waitActive();
WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsFileSystem.SCHEME)
.setPermission(new Path("/"),
new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));
// trick the NN into not believing it's not the superuser so we can
// tell if the correct user is used by listStatus
UserGroupInformation.setLoginUser(
UserGroupInformation.createUserForTesting(
"not-superuser", new String[]{"not-supergroup"}));
UserGroupInformation.createUserForTesting("me", new String[]{"my-group"})
.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws IOException, URISyntaxException {
FileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem(conf,
WebHdfsFileSystem.SCHEME);
Path d = new Path("/my-dir");
Assert.assertTrue(fs.mkdirs(d));
for (int i=0; i < listLimit*3; i++) {
Path p = new Path(d, "file-"+i);
Assert.assertTrue(fs.createNewFile(p));
}
Assert.assertEquals(listLimit*3, fs.listStatus(d).length);
return null;
}
});
} finally {
cluster.shutdown();
}
}
@Test(timeout=300000)
public void testNumericalUserName() throws Exception {
final Configuration conf = WebHdfsTestUtil.createConf();
conf.set(DFSConfigKeys.DFS_WEBHDFS_USER_PATTERN_KEY, "^[A-Za-z0-9_][A-Za-z0-9._-]*[$]?$");
final MiniDFSCluster cluster =
new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
try {
cluster.waitActive();
WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsFileSystem.SCHEME)
.setPermission(new Path("/"),
new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));
UserGroupInformation.createUserForTesting("123", new String[]{"my-group"})
.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws IOException, URISyntaxException {
FileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem(conf,
WebHdfsFileSystem.SCHEME);
Path d = new Path("/my-dir");
Assert.assertTrue(fs.mkdirs(d));
return null;
}
});
} finally {
cluster.shutdown();
}
}
/**
* WebHdfs should be enabled by default after HDFS-5532
*
* @throws Exception
*/
@Test
public void testWebHdfsEnabledByDefault() throws Exception {
Configuration conf = new HdfsConfiguration();
Assert.assertTrue(conf.getBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY,
false));
}
}
| apache-2.0 |
nknize/elasticsearch | server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoShapeIntegrationIT.java | 12912 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.geo;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.geo.builders.PointBuilder;
import org.elasticsearch.common.geo.builders.ShapeBuilder;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.GeoShapeFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.test.ESIntegTestCase;
import static org.elasticsearch.index.query.QueryBuilders.geoShapeQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
public class GeoShapeIntegrationIT extends ESIntegTestCase {
@Override
protected Settings nodeSettings(int nodeOrdinal) {
return Settings.builder()
// Check that only geo-shape queries on legacy PrefixTree based
// geo shapes are disallowed.
.put("search.allow_expensive_queries", false)
.put(super.nodeSettings(nodeOrdinal))
.build();
}
/**
* Test that orientation parameter correctly persists across cluster restart
*/
public void testOrientationPersistence() throws Exception {
String idxName = "orientation";
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("orientation", "left")
.endObject()
.endObject().endObject());
// create index
assertAcked(prepareCreate(idxName).setMapping(mapping));
mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("properties").startObject("location")
.field("type", "geo_shape")
.field("orientation", "right")
.endObject()
.endObject().endObject());
assertAcked(prepareCreate(idxName+"2").setMapping(mapping));
ensureGreen(idxName, idxName+"2");
internalCluster().fullRestart();
ensureGreen(idxName, idxName+"2");
// left orientation test
IndicesService indicesService = internalCluster().getInstance(IndicesService.class, findNodeName(idxName));
IndexService indexService = indicesService.indexService(resolveIndex(idxName));
MappedFieldType fieldType = indexService.mapperService().fieldType("location");
assertThat(fieldType, instanceOf(GeoShapeFieldMapper.GeoShapeFieldType.class));
GeoShapeFieldMapper.GeoShapeFieldType gsfm = (GeoShapeFieldMapper.GeoShapeFieldType)fieldType;
ShapeBuilder.Orientation orientation = gsfm.orientation();
assertThat(orientation, equalTo(ShapeBuilder.Orientation.CLOCKWISE));
assertThat(orientation, equalTo(ShapeBuilder.Orientation.LEFT));
assertThat(orientation, equalTo(ShapeBuilder.Orientation.CW));
// right orientation test
indicesService = internalCluster().getInstance(IndicesService.class, findNodeName(idxName+"2"));
indexService = indicesService.indexService(resolveIndex((idxName+"2")));
fieldType = indexService.mapperService().fieldType("location");
assertThat(fieldType, instanceOf(GeoShapeFieldMapper.GeoShapeFieldType.class));
gsfm = (GeoShapeFieldMapper.GeoShapeFieldType)fieldType;
orientation = gsfm.orientation();
assertThat(orientation, equalTo(ShapeBuilder.Orientation.COUNTER_CLOCKWISE));
assertThat(orientation, equalTo(ShapeBuilder.Orientation.RIGHT));
assertThat(orientation, equalTo(ShapeBuilder.Orientation.CCW));
}
/**
* Test that ignore_malformed on GeoShapeFieldMapper does not fail the entire document
*/
public void testIgnoreMalformed() throws Exception {
// create index
assertAcked(client().admin().indices().prepareCreate("test")
.setMapping("shape", "type=geo_shape,ignore_malformed=true").get());
ensureGreen();
// test self crossing ccw poly not crossing dateline
String polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
.startArray("coordinates")
.startArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(-177.0).value(10.0).endArray()
.startArray().value(-177.0).value(-10.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(-177.0).value(15.0).endArray()
.startArray().value(172.0).value(0.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.endArray()
.endArray()
.endObject());
indexRandom(true, client().prepareIndex("test").setId("0").setSource("shape",
polygonGeoJson));
SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()).get();
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
}
public void testMappingUpdate() throws Exception {
// create index
assertAcked(client().admin().indices().prepareCreate("test")
.setMapping("shape", "type=geo_shape").get());
ensureGreen();
String update ="{\n" +
" \"properties\": {\n" +
" \"shape\": {\n" +
" \"type\": \"geo_shape\",\n" +
" \"strategy\": \"recursive\"\n" +
" }\n" +
" }\n" +
"}";
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> client().admin().indices()
.preparePutMapping("test")
.setSource(update, XContentType.JSON).get());
assertThat(e.getMessage(), containsString("mapper [shape] of type [geo_shape] cannot change strategy from [BKD] to [recursive]"));
}
/**
* Test that the indexed shape routing can be provided if it is required
*/
public void testIndexShapeRouting() throws Exception {
String mapping = "{\"_doc\":{\n" +
" \"_routing\": {\n" +
" \"required\": true\n" +
" },\n" +
" \"properties\": {\n" +
" \"shape\": {\n" +
" \"type\": \"geo_shape\"\n" +
" }\n" +
" }\n" +
" }}";
// create index
assertAcked(client().admin().indices().prepareCreate("test").setMapping(mapping).get());
ensureGreen();
String source = "{\n" +
" \"shape\" : {\n" +
" \"type\" : \"bbox\",\n" +
" \"coordinates\" : [[-45.0, 45.0], [45.0, -45.0]]\n" +
" }\n" +
"}";
indexRandom(true, client().prepareIndex("test").setId("0").setSource(source, XContentType.JSON).setRouting("ABC"));
SearchResponse searchResponse = client().prepareSearch("test").setQuery(
geoShapeQuery("shape", "0").indexedShapeIndex("test").indexedShapeRouting("ABC")
).get();
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
}
public void testIndexPolygonDateLine() throws Exception {
String mappingVector = "{\n" +
" \"properties\": {\n" +
" \"shape\": {\n" +
" \"type\": \"geo_shape\"\n" +
" }\n" +
" }\n" +
" }";
String mappingQuad = "{\n" +
" \"properties\": {\n" +
" \"shape\": {\n" +
" \"type\": \"geo_shape\",\n" +
" \"tree\": \"quadtree\"\n" +
" }\n" +
" }\n" +
" }";
// create index
assertAcked(client().admin().indices().prepareCreate("vector").setMapping(mappingVector).get());
ensureGreen();
assertAcked(client().admin().indices().prepareCreate("quad").setMapping(mappingQuad).get());
ensureGreen();
String source = "{\n" +
" \"shape\" : \"POLYGON((179 0, -179 0, -179 2, 179 2, 179 0))\""+
"}";
indexRandom(true, client().prepareIndex("quad").setId("0").setSource(source, XContentType.JSON));
indexRandom(true, client().prepareIndex("vector").setId("0").setSource(source, XContentType.JSON));
try {
ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest();
updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", true));
assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet());
SearchResponse searchResponse = client().prepareSearch("quad").setQuery(
geoShapeQuery("shape", new PointBuilder(-179.75, 1))
).get();
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
searchResponse = client().prepareSearch("quad").setQuery(
geoShapeQuery("shape", new PointBuilder(90, 1))
).get();
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L));
searchResponse = client().prepareSearch("quad").setQuery(
geoShapeQuery("shape", new PointBuilder(-180, 1))
).get();
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
searchResponse = client().prepareSearch("quad").setQuery(
geoShapeQuery("shape", new PointBuilder(180, 1))
).get();
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
} finally {
ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest();
updateSettingsRequest.persistentSettings(Settings.builder().put("search.allow_expensive_queries", (String) null));
assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet());
}
SearchResponse searchResponse = client().prepareSearch("vector").setQuery(
geoShapeQuery("shape", new PointBuilder(90, 1))
).get();
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L));
searchResponse = client().prepareSearch("vector").setQuery(
geoShapeQuery("shape", new PointBuilder(-179.75, 1))
).get();
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
searchResponse = client().prepareSearch("vector").setQuery(
geoShapeQuery("shape", new PointBuilder(-180, 1))
).get();
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
searchResponse = client().prepareSearch("vector").setQuery(
geoShapeQuery("shape", new PointBuilder(180, 1))
).get();
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
}
private String findNodeName(String index) {
ClusterState state = client().admin().cluster().prepareState().get().getState();
IndexShardRoutingTable shard = state.getRoutingTable().index(index).shard(0);
String nodeId = shard.assignedShards().get(0).currentNodeId();
return state.getNodes().get(nodeId).getName();
}
}
| apache-2.0 |
mra419/code-examples-and-poc | additional-test-resources-in-idea/src/funcTest/java/func/FuncLibraryTest.java | 337 | package func;
import base.Library;
import org.junit.Test;
import static org.junit.Assert.assertTrue;
public class FuncLibraryTest {
@Test public void testSomeLibraryMethod() {
Library classUnderTest = new Library();
assertTrue("someLibraryMethod should return 'true'", classUnderTest.someLibraryMethod());
}
}
| apache-2.0 |
jexp/idea2 | plugins/cvs/javacvs-src/org/netbeans/lib/cvsclient/command/importcmd/ImportCommand.java | 11568 | /*
* Sun Public License Notice
*
* The contents of this file are subject to the Sun Public License
* Version 1.0 (the "License"). You may not use this file except in
* compliance with the License. A copy of the License is available at
* http://www.sun.com/
*
* The Original Code is NetBeans. The Initial Developer of the Original
* Code is Sun Microsystems, Inc. Portions Copyright 1997-2000 Sun
* Microsystems, Inc. All Rights Reserved.
*/
package org.netbeans.lib.cvsclient.command.importcmd;
import org.netbeans.lib.cvsclient.IClientEnvironment;
import org.netbeans.lib.cvsclient.IRequestProcessor;
import org.netbeans.lib.cvsclient.connection.AuthenticationException;
import org.netbeans.lib.cvsclient.command.*;
import org.netbeans.lib.cvsclient.event.ICvsListenerRegistry;
import org.netbeans.lib.cvsclient.event.IEventSender;
import org.netbeans.lib.cvsclient.file.DirectoryObject;
import org.netbeans.lib.cvsclient.file.FileObject;
import org.netbeans.lib.cvsclient.progress.IProgressViewer;
import org.netbeans.lib.cvsclient.progress.sending.DummyRequestsProgressHandler;
import org.netbeans.lib.cvsclient.request.CommandRequest;
import org.netbeans.lib.cvsclient.request.DirectoryRequest;
import org.netbeans.lib.cvsclient.request.Requests;
import org.netbeans.lib.cvsclient.util.BugLog;
import org.netbeans.lib.cvsclient.util.SimpleStringPattern;
import org.jetbrains.annotations.NonNls;
import java.io.File;
import java.io.IOException;
import java.util.*;
/**
* The import command imports local directory structures into the repository.
*
* @author Thomas Singer
*/
public final class ImportCommand extends Command {
// Constants ==============================================================
@NonNls public static final String EXAM_DIR = "server: Importing ";
// Fields =================================================================
private final Map wrapperMap = new HashMap();
private String logMessage;
private String module;
private String releaseTag;
private String vendorBranch;
private String vendorTag;
private KeywordSubstitution keywordSubstitutionOption;
// Setup ==================================================================
public ImportCommand() {
}
// Implemented ============================================================
public boolean execute(IRequestProcessor requestProcessor, IEventSender eventManager, ICvsListenerRegistry listenerRegistry, IClientEnvironment clientEnvironment, IProgressViewer progressViewer) throws CommandException,
AuthenticationException {
// check necessary fields
BugLog.getInstance().assertNotNull(getModule());
BugLog.getInstance().assertNotNull(getReleaseTag());
BugLog.getInstance().assertNotNull(getVendorTag());
final Requests requests;
try {
requests = new Requests(CommandRequest.IMPORT, clientEnvironment);
requests.addArgumentRequest(getVendorBranchNotNull(), "-b");
requests.addMessageRequests(CommandUtils.getMessageNotNull(getLogMessage()));
requests.addArgumentRequest(getKeywordSubstitutionOption(), "-k");
addWrapperRequests(requests, this.wrapperMap);
requests.addArgumentRequest(getModule());
requests.addArgumentRequest(getVendorTag());
requests.addArgumentRequest(getReleaseTag());
final File rootDirectory = clientEnvironment.getCvsFileSystem().getLocalFileSystem().getRootDirectory();
addFileRequests(rootDirectory, requests, requestProcessor, clientEnvironment);
// This is necessary when importing a directory structure with CVS directories.
// If requests.addLocalPathDirectoryRequest(); would be used, the repository path
// would be used from the CVS folders.
requests.addRequest(new DirectoryRequest(".", getRepositoryRoot(clientEnvironment)));
}
catch (IOException ex) {
throw new IOCommandException(ex);
}
return requestProcessor.processRequests(requests, new DummyRequestsProgressHandler());
}
public void resetCvsCommand() {
super.resetCvsCommand();
setLogMessage(null);
setModule(null);
setReleaseTag(null);
setVendorTag(null);
setVendorBranch(null);
setKeywordSubstitutionOption(null);
if (wrapperMap != null) {
wrapperMap.clear();
}
}
public String getCvsCommandLine() {
@NonNls final StringBuffer cvsArguments = new StringBuffer("import ");
cvsArguments.append(getCvsArguments());
cvsArguments.append(' ');
cvsArguments.append(getModule());
cvsArguments.append(' ');
cvsArguments.append(getVendorTag());
cvsArguments.append(' ');
cvsArguments.append(getReleaseTag());
return cvsArguments.toString();
}
// Accessing ==============================================================
public void addWrapper(String filenamePattern, KeywordSubstitution keywordSubstitutionOptions) {
BugLog.getInstance().assertNotNull(keywordSubstitutionOptions);
wrapperMap.put(new SimpleStringPattern(filenamePattern), keywordSubstitutionOptions);
}
private KeywordSubstitution getKeywordSubstitutionOption() {
return keywordSubstitutionOption;
}
public void setKeywordSubstitutionOption(KeywordSubstitution keywordSubstitutionOption) {
this.keywordSubstitutionOption = keywordSubstitutionOption;
}
private String getReleaseTag() {
return releaseTag;
}
public void setReleaseTag(String releaseTag) {
this.releaseTag = getTrimmedString(releaseTag);
}
private String getLogMessage() {
return logMessage;
}
public void setLogMessage(String logMessage) {
this.logMessage = logMessage;
}
private String getModule() {
return module;
}
public void setModule(String module) {
this.module = getTrimmedString(module);
}
private String getVendorBranch() {
return vendorBranch;
}
/**
* Returns the vendor branch.
* If not set, then 1.1.1 is returned.
*/
private String getVendorBranchNotNull() {
if (vendorBranch == null) {
return "1.1.1";
}
return vendorBranch;
}
/**
* Sets the vendor branch.
* If null is set, the default branch 1.1.1 is used automatically.
*/
public void setVendorBranch(String vendorBranch) {
this.vendorBranch = getTrimmedString(vendorBranch);
}
private String getVendorTag() {
return vendorTag;
}
public void setVendorTag(String vendorTag) {
this.vendorTag = getTrimmedString(vendorTag);
}
// Utils ==================================================================
private String getCvsArguments() {
@NonNls final StringBuffer cvsArguments = new StringBuffer();
cvsArguments.append("-m \"");
cvsArguments.append(CommandUtils.getMessageNotNull(getLogMessage()));
cvsArguments.append("\" ");
if (getKeywordSubstitutionOption() != null) {
cvsArguments.append("-k");
cvsArguments.append(getKeywordSubstitutionOption().toString());
cvsArguments.append(" ");
}
if (getVendorBranch() != null) {
cvsArguments.append("-b ");
cvsArguments.append(getVendorBranch());
cvsArguments.append(" ");
}
if (wrapperMap.size() > 0) {
final Iterator it = wrapperMap.keySet().iterator();
while (it.hasNext()) {
final SimpleStringPattern pattern = (SimpleStringPattern)it.next();
final KeywordSubstitution keywordSubstitutionOptions = (KeywordSubstitution)wrapperMap.get(pattern);
cvsArguments.append("-W ");
cvsArguments.append(pattern.toString());
cvsArguments.append(" -k '");
cvsArguments.append(keywordSubstitutionOptions.toString());
cvsArguments.append("' ");
}
}
return cvsArguments.toString();
}
/**
* Adds requests for specified wrappers to the specified requestList.
*/
private static void addWrapperRequests(Requests requests, Map wrapperMap) {
// override the server's ignore list
requests.addArgumentRequest("-I !");
for (Iterator it = wrapperMap.keySet().iterator(); it.hasNext();) {
final SimpleStringPattern pattern = (SimpleStringPattern)it.next();
final KeywordSubstitution keywordSubstitutionOptions = (KeywordSubstitution)wrapperMap.get(pattern);
@NonNls final StringBuffer buffer = new StringBuffer();
buffer.append(pattern.toString());
buffer.append(" -k '");
buffer.append(keywordSubstitutionOptions.toString());
buffer.append("'");
requests.addArgumentRequest("-W");
requests.addArgumentRequest(buffer.toString());
}
}
/**
* Adds recursively all request for files and directories in the specified
* directory to the specified requestList.
*/
private void addFileRequests(File directory, Requests requests, IRequestProcessor requestProcessor, IClientEnvironment clientEnvironment) throws IOException {
final DirectoryObject directoryObject = clientEnvironment.getCvsFileSystem().getLocalFileSystem().getDirectoryObject(directory);
final String relativePath = directoryObject.toUnixPath();
String repository = getRepositoryRoot(clientEnvironment);
if (!relativePath.equals(".")) {
repository += '/' + relativePath;
}
requests.addRequest(new DirectoryRequest(relativePath, repository));
final File[] files = directory.listFiles();
if (files == null) {
return;
}
final List subdirectories = new ArrayList();
for (int i = 0; i < files.length; i++) {
final File file = files[i];
if (file.isDirectory()) {
final DirectoryObject subDirObject = clientEnvironment.getCvsFileSystem().getLocalFileSystem().getDirectoryObject(file);
if (clientEnvironment.getIgnoreFileFilter().shouldBeIgnored(subDirObject, clientEnvironment.getCvsFileSystem())) {
continue;
}
subdirectories.add(file);
}
else {
final FileObject fileObject = clientEnvironment.getCvsFileSystem().getLocalFileSystem().getFileObject(file);
if (clientEnvironment.getIgnoreFileFilter().shouldBeIgnored(fileObject, clientEnvironment.getCvsFileSystem())) {
continue;
}
final KeywordSubstitution keywordSubstMode = getKeywordSubstMode(file.getName());
final boolean writable = clientEnvironment.getLocalFileReader().isWritable(fileObject, clientEnvironment.getCvsFileSystem());
if (keywordSubstMode != null) {
requests.addKoptRequest(keywordSubstMode);
}
requests.addModifiedRequest(fileObject, keywordSubstMode == KeywordSubstitution.BINARY, writable);
}
}
for (Iterator it = subdirectories.iterator(); it.hasNext();) {
final File subdirectory = (File)it.next();
addFileRequests(subdirectory, requests, requestProcessor, clientEnvironment);
}
}
/**
* Returns the used root path in the repository.
* It's built from the repository stored in the clientService and the
* module.
*/
private String getRepositoryRoot(IClientEnvironment clientEnvironment) {
return clientEnvironment.getCvsRoot().getRepositoryPath() + '/' + getModule();
}
/**
* Returns true, if the file for the specified filename should be treated as
* a binary file.
*
* The information comes from the wrapper map and the set keywordsubstitution.
*/
private KeywordSubstitution getKeywordSubstMode(String fileName) {
KeywordSubstitution keywordSubstMode = getKeywordSubstitutionOption();
for (Iterator it = wrapperMap.keySet().iterator(); it.hasNext();) {
final SimpleStringPattern pattern = (SimpleStringPattern)it.next();
if (pattern.doesMatch(fileName)) {
keywordSubstMode = (KeywordSubstitution)wrapperMap.get(pattern);
break;
}
}
return keywordSubstMode;
}
}
| apache-2.0 |
supriyantomaftuh/java-client-api | src/main/java/com/marklogic/client/impl/StreamingOutputImpl.java | 1419 | /*
* Copyright 2012-2015 MarkLogic Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.marklogic.client.impl;
import java.io.IOException;
import java.io.OutputStream;
import javax.ws.rs.core.StreamingOutput;
import com.marklogic.client.util.RequestLogger;
import com.marklogic.client.io.OutputStreamSender;
class StreamingOutputImpl implements StreamingOutput {
private OutputStreamSender handle;
private RequestLogger logger;
StreamingOutputImpl(OutputStreamSender handle, RequestLogger logger) {
super();
this.handle = handle;
this.logger = logger;
}
public void write(OutputStream out) throws IOException {
if (logger != null) {
OutputStream tee = logger.getPrintStream();
long max = logger.getContentMax();
if (tee != null && max > 0) {
handle.write(new OutputStreamTee(out, tee, max));
return;
}
}
handle.write(out);
}
}
| apache-2.0 |
apache/jena | jena-geosparql/src/test/java/org/apache/jena/geosparql/spatial/property_functions/box/IntersectBoxGeomPFTest.java | 6083 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.geosparql.spatial.property_functions.box;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.jena.geosparql.configuration.GeoSPARQLConfig;
import org.apache.jena.geosparql.implementation.GeometryWrapper;
import org.apache.jena.geosparql.implementation.vocabulary.SpatialExtension;
import org.apache.jena.geosparql.spatial.ConvertLatLon;
import org.apache.jena.geosparql.spatial.ConvertLatLonBox;
import org.apache.jena.geosparql.spatial.SpatialIndexTestData;
import org.apache.jena.geosparql.spatial.property_functions.SpatialArguments;
import org.apache.jena.graph.Node;
import org.apache.jena.graph.NodeFactory;
import org.apache.jena.query.Dataset;
import org.apache.jena.query.QueryExecution;
import org.apache.jena.query.QueryExecutionFactory;
import org.apache.jena.query.QuerySolution;
import org.apache.jena.query.ResultSet;
import org.apache.jena.rdf.model.Literal;
import org.apache.jena.rdf.model.Resource;
import org.apache.jena.sparql.pfunction.PropFuncArg;
import org.junit.After;
import org.junit.AfterClass;
import static org.junit.Assert.*;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
/**
*
*
*/
public class IntersectBoxGeomPFTest {
public IntersectBoxGeomPFTest() {
}
@BeforeClass
public static void setUpClass() {
GeoSPARQLConfig.setupNoIndex();
}
@AfterClass
public static void tearDownClass() {
}
@Before
public void setUp() {
}
@After
public void tearDown() {
}
/**
* Test of checkSecondFilter method, of class IntersectBoxGeomPF.
*/
@Test
public void testCheckSecondFilter() {
IntersectBoxGeomPF instance = new IntersectBoxGeomPF();
//Property Function
Node predicate = NodeFactory.createURI(SpatialExtension.INTERSECT_BOX_GEOM_PROP);
//Geometry and Envelope parameters
float lat = 1;
float lon = 1;
float latMin = 0;
float lonMin = 0;
float latMax = 2;
float lonMax = 2;
Literal geometry = ConvertLatLonBox.toLiteral(latMin, lonMin, latMax, lonMax);
Literal targetGeometry = ConvertLatLon.toLiteral(lat, lon);
List<Node> objectNodes = Arrays.asList(geometry.asNode());
PropFuncArg object = new PropFuncArg(objectNodes);
//Function arguments
SpatialArguments spatialArguments = instance.extractObjectArguments(predicate, object, SpatialIndexTestData.WGS_84_SRS_INFO);
GeometryWrapper targetGeometryWrapper = GeometryWrapper.extract(targetGeometry);
//Test arguments
boolean expResult = true;
boolean result = instance.checkSecondFilter(spatialArguments, targetGeometryWrapper);
assertEquals(expResult, result);
}
/**
* Test of checkSecondFilter method, of class IntersectBoxGeomPF.
*/
@Test
public void testCheckSecondFilter_fail() {
IntersectBoxGeomPF instance = new IntersectBoxGeomPF();
//Property Function
Node predicate = NodeFactory.createURI(SpatialExtension.INTERSECT_BOX_GEOM_PROP);
//Geometry and Envelope parameters
float lat = 5;
float lon = 5;
float latMin = 0;
float lonMin = 0;
float latMax = 2;
float lonMax = 2;
Literal geometry = ConvertLatLonBox.toLiteral(latMin, lonMin, latMax, lonMax);
Literal targetGeometry = ConvertLatLon.toLiteral(lat, lon);
List<Node> objectNodes = Arrays.asList(geometry.asNode());
PropFuncArg object = new PropFuncArg(objectNodes);
//Function arguments
SpatialArguments spatialArguments = instance.extractObjectArguments(predicate, object, SpatialIndexTestData.WGS_84_SRS_INFO);
GeometryWrapper targetGeometryWrapper = GeometryWrapper.extract(targetGeometry);
//Test arguments
boolean expResult = false;
boolean result = instance.checkSecondFilter(spatialArguments, targetGeometryWrapper);
assertEquals(expResult, result);
}
/**
* Test of execEvaluated method, of class IntersectBoxGeom.
*/
@Test
public void testExecEvaluated() {
Dataset dataset = SpatialIndexTestData.createTestDataset();
String query = "PREFIX spatial: <http://jena.apache.org/spatial#>\n"
+ "\n"
+ "SELECT ?subj\n"
+ "WHERE{\n"
+ " BIND( \"<http://www.opengis.net/def/crs/EPSG/0/4326> POLYGON((51.4 -0.13, 51.6 -0.13, 51.6 -0.12, 51.4 -0.12, 51.4 -0.13))\"^^<http://www.opengis.net/ont/geosparql#wktLiteral> AS ?geom)"
+ " ?subj spatial:intersectBoxGeom(?geom) .\n"
+ "}ORDER by ?subj";
List<Resource> result = new ArrayList<>();
try (QueryExecution qe = QueryExecutionFactory.create(query, dataset)) {
ResultSet rs = qe.execSelect();
while (rs.hasNext()) {
QuerySolution qs = rs.nextSolution();
Resource feature = qs.getResource("subj");
result.add(feature);
}
}
List<Resource> expResult = Arrays.asList(SpatialIndexTestData.LONDON_FEATURE);
assertEquals(expResult, result);
}
}
| apache-2.0 |
openengsb-attic/openengsb-api | src/main/java/org/openengsb/core/api/security/EncryptionException.java | 1278 | /**
* Licensed to the Austrian Association for Software Tool Integration (AASTI)
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. The AASTI licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openengsb.core.api.security;
@SuppressWarnings("serial")
public class EncryptionException extends OpenEngSBSecurityException {
public EncryptionException() {
}
public EncryptionException(String message) {
super(message);
}
public EncryptionException(Throwable cause) {
super(cause);
}
public EncryptionException(String message, Throwable cause) {
super(message, cause);
}
}
| apache-2.0 |
v7lin/Android_Skin_2.0 | library/src/com/v7lin/android/env/widget/CompatHorizontalScrollView.java | 3095 | package com.v7lin.android.env.widget;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.util.AttributeSet;
import android.widget.FrameLayout;
import android.widget.HorizontalScrollView;
import com.v7lin.android.env.EnvCallback;
/**
*
*
* @author v7lin E-mail:v7lin@qq.com
*/
@SuppressWarnings("deprecation")
public class CompatHorizontalScrollView extends HorizontalScrollView implements XFrameLayoutCall, EnvCallback {
private static final boolean ALLOW_SYSRES = false;
private EnvUIChanger<FrameLayout, XFrameLayoutCall> mEnvUIChanger;
public CompatHorizontalScrollView(Context context) {
this(context, null);
}
public CompatHorizontalScrollView(Context context, AttributeSet attrs) {
// this(context, attrs, com.android.internal.R.attr.horizontalScrollViewStyle);
this(context, attrs, InternalTransfer.transferAttr(context, "horizontalScrollViewStyle"));
}
public CompatHorizontalScrollView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
mEnvUIChanger = new EnvFrameLayoutChanger<FrameLayout, XFrameLayoutCall>(context);
mEnvUIChanger.applyStyle(context, attrs, defStyle, 0, ALLOW_SYSRES, isInEditMode());
}
@Override
public void setForeground(Drawable d) {
super.setForeground(d);
applyAttrForeground(0);
}
private void applyAttrForeground(int resid) {
applyAttr(getContext(), android.R.attr.foreground, resid);
}
@Override
public void setBackgroundColor(int color) {
super.setBackgroundColor(color);
applyAttrBackground(0);
}
@Override
public void setBackgroundResource(int resid) {
super.setBackgroundResource(resid);
applyAttrBackground(resid);
}
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
@Override
public void setBackground(Drawable background) {
super.setBackground(background);
applyAttrBackground(0);
}
@Override
public void setBackgroundDrawable(Drawable background) {
super.setBackgroundDrawable(background);
applyAttrBackground(0);
}
private void applyAttrBackground(int resid) {
applyAttr(getContext(), android.R.attr.background, resid);
}
private void applyAttr(Context context, int attr, int resid) {
if (mEnvUIChanger != null) {
mEnvUIChanger.applyAttr(context, attr, resid, ALLOW_SYSRES, isInEditMode());
}
}
@Override
public void scheduleForeground(Drawable d) {
super.setForeground(d);
}
@Override
public void scheduleBackgroundDrawable(Drawable background) {
super.setBackgroundDrawable(background);
}
@Override
public void scheduleSkin() {
if (mEnvUIChanger != null) {
mEnvUIChanger.scheduleSkin(this, this, isInEditMode());
}
}
@Override
public void scheduleFont() {
if (mEnvUIChanger != null) {
mEnvUIChanger.scheduleFont(this, this, isInEditMode());
}
}
@Override
protected void onAttachedToWindow() {
super.onAttachedToWindow();
if (mEnvUIChanger != null) {
mEnvUIChanger.scheduleSkin(this, this, isInEditMode());
mEnvUIChanger.scheduleFont(this, this, isInEditMode());
}
}
}
| apache-2.0 |
hernsys/uberfire-0.4.0.CR1 | uberfire-widgets/uberfire-widgets-core/uberfire-widgets-core-client/src/main/java/org/uberfire/client/editors/repository/clone/CloneRepositoryForm.java | 12910 | /*
* Copyright 2012 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.uberfire.client.editors.repository.clone;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import javax.annotation.PostConstruct;
import javax.enterprise.context.Dependent;
import javax.inject.Inject;
import com.github.gwtbootstrap.client.ui.Button;
import com.github.gwtbootstrap.client.ui.ControlGroup;
import com.github.gwtbootstrap.client.ui.ControlLabel;
import com.github.gwtbootstrap.client.ui.HelpInline;
import com.github.gwtbootstrap.client.ui.ListBox;
import com.github.gwtbootstrap.client.ui.Modal;
import com.github.gwtbootstrap.client.ui.PasswordTextBox;
import com.github.gwtbootstrap.client.ui.TextBox;
import com.github.gwtbootstrap.client.ui.constants.ControlGroupType;
import com.google.gwt.core.client.GWT;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.KeyPressEvent;
import com.google.gwt.event.dom.client.KeyPressHandler;
import com.google.gwt.uibinder.client.UiBinder;
import com.google.gwt.uibinder.client.UiField;
import com.google.gwt.uibinder.client.UiHandler;
import com.google.gwt.user.client.Window;
import com.google.gwt.user.client.ui.InlineHTML;
import com.google.gwt.user.client.ui.PopupPanel;
import com.google.gwt.user.client.ui.Widget;
import org.jboss.errai.bus.client.api.messaging.Message;
import org.jboss.errai.common.client.api.Caller;
import org.jboss.errai.common.client.api.ErrorCallback;
import org.jboss.errai.common.client.api.RemoteCallback;
import org.uberfire.backend.organizationalunit.OrganizationalUnit;
import org.uberfire.backend.organizationalunit.OrganizationalUnitService;
import org.uberfire.backend.repositories.Repository;
import org.uberfire.backend.repositories.RepositoryAlreadyExistsException;
import org.uberfire.backend.repositories.RepositoryService;
import org.uberfire.client.UberFirePreferences;
import org.uberfire.client.common.BusyPopup;
import org.uberfire.client.common.popups.errors.ErrorPopup;
import org.uberfire.client.mvp.PlaceManager;
import org.uberfire.client.resources.i18n.CoreConstants;
import org.uberfire.mvp.impl.DefaultPlaceRequest;
import org.uberfire.util.URIUtil;
@Dependent
public class CloneRepositoryForm
extends PopupPanel {
interface CloneRepositoryFormBinder
extends
UiBinder<Widget, CloneRepositoryForm> {
}
private static CloneRepositoryFormBinder uiBinder = GWT.create( CloneRepositoryFormBinder.class );
@Inject
private Caller<RepositoryService> repositoryService;
@Inject
private Caller<OrganizationalUnitService> organizationalUnitService;
@Inject
private PlaceManager placeManager;
@UiField
Button clone;
@UiField
Button cancel;
@UiField
ControlGroup organizationalUnitGroup;
@UiField
ListBox organizationalUnitDropdown;
@UiField
HelpInline organizationalUnitHelpInline;
@UiField
ControlGroup nameGroup;
@UiField
TextBox nameTextBox;
@UiField
HelpInline nameHelpInline;
@UiField
ControlGroup urlGroup;
@UiField
TextBox gitURLTextBox;
@UiField
HelpInline urlHelpInline;
@UiField
TextBox usernameTextBox;
@UiField
PasswordTextBox passwordTextBox;
@UiField
Modal popup;
@UiField
InlineHTML isOUMandatory;
private Map<String, OrganizationalUnit> availableOrganizationalUnits = new HashMap<String, OrganizationalUnit>();
private boolean mandatoryOU = true;
@PostConstruct
public void init() {
mandatoryOU = UberFirePreferences.getProperty( "org.uberfire.client.workbench.clone.ou.mandatory.disable" ) == null;
setWidget( uiBinder.createAndBindUi( this ) );
if ( !mandatoryOU ) {
isOUMandatory.removeFromParent();
}
popup.setDynamicSafe( true );
nameTextBox.addKeyPressHandler( new KeyPressHandler() {
@Override
public void onKeyPress( final KeyPressEvent event ) {
nameGroup.setType( ControlGroupType.NONE );
nameHelpInline.setText( "" );
}
} );
gitURLTextBox.addKeyPressHandler( new KeyPressHandler() {
@Override
public void onKeyPress( final KeyPressEvent event ) {
urlGroup.setType( ControlGroupType.NONE );
urlHelpInline.setText( "" );
}
} );
//populate Organizational Units list box
organizationalUnitService.call( new RemoteCallback<Collection<OrganizationalUnit>>() {
@Override
public void callback( Collection<OrganizationalUnit> organizationalUnits ) {
organizationalUnitDropdown.addItem( CoreConstants.INSTANCE.SelectEntry() );
if ( organizationalUnits != null && !organizationalUnits.isEmpty() ) {
for ( OrganizationalUnit organizationalUnit : organizationalUnits ) {
organizationalUnitDropdown.addItem( organizationalUnit.getName(),
organizationalUnit.getName() );
availableOrganizationalUnits.put( organizationalUnit.getName(),
organizationalUnit );
}
}
}
},
new ErrorCallback<Message>() {
@Override
public boolean error( final Message message,
final Throwable throwable ) {
ErrorPopup.showMessage(CoreConstants.INSTANCE.CantLoadOrganizationalUnits()+" \n" + throwable.getMessage() );
return false;
}
}
).getOrganizationalUnits();
}
@UiHandler("clone")
public void onCloneClick( final ClickEvent e ) {
if ( gitURLTextBox.getText() == null || gitURLTextBox.getText().trim().isEmpty() ) {
urlGroup.setType( ControlGroupType.ERROR );
urlHelpInline.setText( CoreConstants.INSTANCE.URLMandatory() );
return;
} else if ( !URIUtil.isValid( gitURLTextBox.getText().trim() ) ) {
urlGroup.setType( ControlGroupType.ERROR );
urlHelpInline.setText( CoreConstants.INSTANCE.InvalidUrlFormat() );
return;
} else {
urlGroup.setType( ControlGroupType.NONE );
}
final String organizationalUnit = organizationalUnitDropdown.getValue( organizationalUnitDropdown.getSelectedIndex() );
if ( mandatoryOU && !availableOrganizationalUnits.containsKey( organizationalUnit ) ) {
organizationalUnitGroup.setType( ControlGroupType.ERROR );
organizationalUnitHelpInline.setText( CoreConstants.INSTANCE.OrganizationalUnitMandatory() );
return;
} else {
organizationalUnitGroup.setType( ControlGroupType.NONE );
}
if ( nameTextBox.getText() == null || nameTextBox.getText().trim().isEmpty() ) {
nameGroup.setType( ControlGroupType.ERROR );
nameHelpInline.setText( CoreConstants.INSTANCE.RepositoryNaneMandatory() );
return;
} else {
repositoryService.call( new RemoteCallback<String>() {
@Override
public void callback( String normalizedName ) {
if ( !nameTextBox.getText().equals( normalizedName ) ) {
if ( !Window.confirm( CoreConstants.INSTANCE.RepositoryNameInvalid()+" \"" + normalizedName + "\". "+CoreConstants.INSTANCE.DoYouAgree() ) ) {
return;
}
nameTextBox.setText( normalizedName );
}
lockScreen();
final String scheme = "git";
final String alias = nameTextBox.getText().trim();
final String origin = gitURLTextBox.getText().trim();
final String username = usernameTextBox.getText().trim();
final String password = passwordTextBox.getText().trim();
final Map<String, Object> env = new HashMap<String, Object>( 3 );
env.put( "username", username );
env.put( "crypt:password", password );
env.put( "origin", origin );
repositoryService.call( new RemoteCallback<Repository>() {
@Override
public void callback( Repository o ) {
BusyPopup.close();
Window.alert( CoreConstants.INSTANCE.RepoCloneSuccess() );
hide();
placeManager.goTo( new DefaultPlaceRequest( "RepositoryEditor" ).addParameter( "alias", o.getAlias() ) );
}
},
new ErrorCallback<Message>() {
@Override
public boolean error( final Message message,
final Throwable throwable ) {
try {
throw throwable;
} catch ( RepositoryAlreadyExistsException ex ) {
ErrorPopup.showMessage( CoreConstants.INSTANCE.RepoAlreadyExists() );
} catch ( Throwable ex ) {
ErrorPopup.showMessage( CoreConstants.INSTANCE.RepoCloneFail()+" \n" + throwable.getMessage() );
}
unlockScreen();
return true;
}
}
).createRepository( availableOrganizationalUnits.get( organizationalUnit ), scheme, alias, env );
}
} ).normalizeRepositoryName( nameTextBox.getText() );
}
}
private void lockScreen() {
BusyPopup.showMessage( CoreConstants.INSTANCE.Cloning() );
popup.setCloseVisible( false );
clone.setEnabled( false );
cancel.setEnabled( false );
passwordTextBox.setEnabled( false );
usernameTextBox.setEnabled( false );
gitURLTextBox.setEnabled( false );
organizationalUnitDropdown.setEnabled( false );
nameTextBox.setEnabled( false );
}
private void unlockScreen() {
BusyPopup.close();
popup.setCloseVisible( true );
clone.setEnabled( true );
cancel.setEnabled( true );
passwordTextBox.setEnabled( true );
usernameTextBox.setEnabled( true );
gitURLTextBox.setEnabled( true );
organizationalUnitDropdown.setEnabled( true );
nameTextBox.setEnabled( true );
}
@UiHandler("cancel")
public void onCancelClick( final ClickEvent e ) {
hide();
}
public void hide() {
BusyPopup.close();
popup.hide();
super.hide();
}
public void show() {
popup.show();
}
}
| apache-2.0 |
apache/geronimo-yoko | yoko-core/src/main/java/org/apache/yoko/orb/OAD/ProcessEndpointPOA.java | 4323 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.yoko.orb.OAD;
//
// IDL:orb.yoko.apache.org/OAD/ProcessEndpoint:1.0
//
public abstract class ProcessEndpointPOA
extends org.omg.PortableServer.Servant
implements org.omg.CORBA.portable.InvokeHandler,
ProcessEndpointOperations
{
static final String[] _ob_ids_ =
{
"IDL:orb.yoko.apache.org/OAD/ProcessEndpoint:1.0",
};
public ProcessEndpoint
_this()
{
return ProcessEndpointHelper.narrow(super._this_object());
}
public ProcessEndpoint
_this(org.omg.CORBA.ORB orb)
{
return ProcessEndpointHelper.narrow(super._this_object(orb));
}
public String[]
_all_interfaces(org.omg.PortableServer.POA poa, byte[] objectId)
{
return _ob_ids_;
}
public org.omg.CORBA.portable.OutputStream
_invoke(String opName,
org.omg.CORBA.portable.InputStream in,
org.omg.CORBA.portable.ResponseHandler handler)
{
final String[] _ob_names =
{
"reestablish_link",
"stop"
};
int _ob_left = 0;
int _ob_right = _ob_names.length;
int _ob_index = -1;
while(_ob_left < _ob_right)
{
int _ob_m = (_ob_left + _ob_right) / 2;
int _ob_res = _ob_names[_ob_m].compareTo(opName);
if(_ob_res == 0)
{
_ob_index = _ob_m;
break;
}
else if(_ob_res > 0)
_ob_right = _ob_m;
else
_ob_left = _ob_m + 1;
}
if(_ob_index == -1 && opName.charAt(0) == '_')
{
_ob_left = 0;
_ob_right = _ob_names.length;
String _ob_ami_op =
opName.substring(1);
while(_ob_left < _ob_right)
{
int _ob_m = (_ob_left + _ob_right) / 2;
int _ob_res = _ob_names[_ob_m].compareTo(_ob_ami_op);
if(_ob_res == 0)
{
_ob_index = _ob_m;
break;
}
else if(_ob_res > 0)
_ob_right = _ob_m;
else
_ob_left = _ob_m + 1;
}
}
switch(_ob_index)
{
case 0: // reestablish_link
return _OB_op_reestablish_link(in, handler);
case 1: // stop
return _OB_op_stop(in, handler);
}
throw new org.omg.CORBA.BAD_OPERATION(
org.apache.yoko.orb.OB.MinorCodes
.describeBadOperation(org.apache.yoko.orb.OB.MinorCodes.MinorTypeMismatch),
org.apache.yoko.orb.OB.MinorCodes.MinorTypeMismatch, org.omg.CORBA.CompletionStatus.COMPLETED_NO);
}
private org.omg.CORBA.portable.OutputStream
_OB_op_reestablish_link(org.omg.CORBA.portable.InputStream in,
org.omg.CORBA.portable.ResponseHandler handler)
{
org.omg.CORBA.portable.OutputStream out = null;
ProcessEndpointManager _ob_a0 = ProcessEndpointManagerHelper.read(in);
reestablish_link(_ob_a0);
out = handler.createReply();
return out;
}
private org.omg.CORBA.portable.OutputStream
_OB_op_stop(org.omg.CORBA.portable.InputStream in,
org.omg.CORBA.portable.ResponseHandler handler)
{
org.omg.CORBA.portable.OutputStream out = null;
stop();
out = handler.createReply();
return out;
}
}
| apache-2.0 |
cloudendpoints/endpoints-java | endpoints-framework-tools/src/main/java/com/google/api/server/spi/tools/WebXml.java | 2333 | package com.google.api.server.spi.tools;
import com.google.api.server.spi.EndpointsServlet;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import org.w3c.dom.Document;
import org.xml.sax.SAXException;
import java.io.File;
import java.io.IOException;
import java.util.List;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
/**
* Representation of web.xml
*/
public class WebXml {
private final Document document;
public WebXml(Document document) {
this.document = document;
}
/**
* Create a instance of WebXml from a file.
*/
public static WebXml parse(File webXml)
throws IOException, SAXException, ParserConfigurationException {
return new WebXml(DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(webXml));
}
/**
* Find endpoint service classes defined in the web.xml.
*
* Looks for a servlet with servlet-class = SystemServiceServlet or
* {@link EndpointsServlet}.
* Does NOT handle the case of multiple mappings to the same servlet-class,
* it will only return the result to first one that defines the "services" init-param
* Does NOT validate that the servlet has a corresponding servlet-mapping
*
* @return a list of endpoints service classes
*/
public List<String> endpointsServiceClasses() {
XPath xpath = XPathFactory.newInstance().newXPath();
String findService = "/web-app/servlet" + "["
+ "servlet-class = 'com.google.api.server.spi.SystemServiceServlet'"
+ " or "
+ "servlet-class = '" + EndpointsServlet.class.getName() + "'"
+ "]/init-param[param-name = 'services']/param-value/text()";
try {
String servicesString = (String) xpath.evaluate(findService, document, XPathConstants.STRING);
List<String> services = Lists.newArrayList();
for (String service : servicesString.trim().split(",")) {
if (!Strings.isNullOrEmpty(service.trim())) {
services.add(service.trim());
}
}
return services;
}
catch (XPathExpressionException e) {
throw new RuntimeException(e);
}
}
}
| apache-2.0 |
pivotal-amurmann/geode | geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/DeployCommand.java | 7613 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal.cli.commands;
import static org.apache.commons.io.FileUtils.ONE_MB;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.text.DecimalFormat;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.springframework.shell.core.annotation.CliCommand;
import org.springframework.shell.core.annotation.CliOption;
import org.apache.geode.cache.execute.ResultCollector;
import org.apache.geode.distributed.DistributedMember;
import org.apache.geode.management.cli.CliMetaData;
import org.apache.geode.management.cli.ConverterHint;
import org.apache.geode.management.cli.Result;
import org.apache.geode.management.internal.cli.AbstractCliAroundInterceptor;
import org.apache.geode.management.internal.cli.CliUtil;
import org.apache.geode.management.internal.cli.GfshParseResult;
import org.apache.geode.management.internal.cli.functions.CliFunctionResult;
import org.apache.geode.management.internal.cli.functions.DeployFunction;
import org.apache.geode.management.internal.cli.i18n.CliStrings;
import org.apache.geode.management.internal.cli.remote.CommandExecutionContext;
import org.apache.geode.management.internal.cli.result.FileResult;
import org.apache.geode.management.internal.cli.result.ResultBuilder;
import org.apache.geode.management.internal.cli.result.TabularResultData;
import org.apache.geode.management.internal.security.ResourceOperation;
import org.apache.geode.security.ResourcePermission;
public class DeployCommand implements GfshCommand {
private final DeployFunction deployFunction = new DeployFunction();
/**
* Deploy one or more JAR files to members of a group or all members.
*
* @param groups Group(s) to deploy the JAR to or null for all members
* @param jars JAR file to deploy
* @param dir Directory of JAR files to deploy
* @return The result of the attempt to deploy
*/
@CliCommand(value = {CliStrings.DEPLOY}, help = CliStrings.DEPLOY__HELP)
@CliMetaData(
interceptor = "org.apache.geode.management.internal.cli.commands.DeployCommand$Interceptor",
isFileUploaded = true, relatedTopic = {CliStrings.TOPIC_GEODE_CONFIG})
@ResourceOperation(resource = ResourcePermission.Resource.CLUSTER,
operation = ResourcePermission.Operation.MANAGE, target = ResourcePermission.Target.DEPLOY)
public Result deploy(
@CliOption(key = {CliStrings.GROUP, CliStrings.GROUPS}, help = CliStrings.DEPLOY__GROUP__HELP,
optionContext = ConverterHint.MEMBERGROUP) String[] groups,
@CliOption(key = {CliStrings.JAR, CliStrings.JARS},
help = CliStrings.DEPLOY__JAR__HELP) String[] jars,
@CliOption(key = {CliStrings.DEPLOY__DIR}, help = CliStrings.DEPLOY__DIR__HELP) String dir) {
TabularResultData tabularData = ResultBuilder.createTabularResultData();
byte[][] shellBytesData = CommandExecutionContext.getBytesFromShell();
String[] jarNames = CliUtil.bytesToNames(shellBytesData);
byte[][] jarBytes = CliUtil.bytesToData(shellBytesData);
Set<DistributedMember> targetMembers;
targetMembers = CliUtil.findMembers(groups, null);
if (targetMembers.size() > 0) {
// this deploys the jars to all the matching servers
ResultCollector<?, ?> resultCollector = CliUtil.executeFunction(this.deployFunction,
new Object[] {jarNames, jarBytes}, targetMembers);
List<CliFunctionResult> results =
CliFunctionResult.cleanResults((List<?>) resultCollector.getResult());
for (CliFunctionResult result : results) {
if (result.getThrowable() != null) {
tabularData.accumulate("Member", result.getMemberIdOrName());
tabularData.accumulate("Deployed JAR", "");
tabularData.accumulate("Deployed JAR Location",
"ERROR: " + result.getThrowable().getClass().getName() + ": "
+ result.getThrowable().getMessage());
tabularData.setStatus(Result.Status.ERROR);
} else {
String[] strings = (String[]) result.getSerializables();
for (int i = 0; i < strings.length; i += 2) {
tabularData.accumulate("Member", result.getMemberIdOrName());
tabularData.accumulate("Deployed JAR", strings[i]);
tabularData.accumulate("Deployed JAR Location", strings[i + 1]);
}
}
}
}
Result result = ResultBuilder.buildResult(tabularData);
persistClusterConfiguration(result,
() -> getSharedConfiguration().addJarsToThisLocator(jarNames, jarBytes, groups));
return result;
}
/**
* Interceptor used by gfsh to intercept execution of deploy command at "shell".
*/
public static class Interceptor extends AbstractCliAroundInterceptor {
private final DecimalFormat numFormatter = new DecimalFormat("###,##0.00");
@Override
public Result preExecution(GfshParseResult parseResult) {
// 2nd argument is the jar
String[] jars = (String[]) parseResult.getArguments()[1];
// 3rd argument is the dir
String dir = (String) parseResult.getArguments()[2];
if (ArrayUtils.isEmpty(jars) && StringUtils.isBlank(dir)) {
return ResultBuilder.createUserErrorResult(
"Parameter \"jar\" or \"dir\" is required. Use \"help <command name>\" for assistance.");
}
if (ArrayUtils.isNotEmpty(jars) && StringUtils.isNotBlank(dir)) {
return ResultBuilder
.createUserErrorResult("Parameters \"jar\" and \"dir\" can not both be specified.");
}
FileResult fileResult;
String[] filesToUpload = jars;
if (filesToUpload == null) {
filesToUpload = new String[] {dir};
}
try {
fileResult = new FileResult(filesToUpload);
} catch (FileNotFoundException fnfex) {
return ResultBuilder
.createGemFireErrorResult("'" + Arrays.toString(filesToUpload) + "' not found.");
} catch (IOException ioex) {
return ResultBuilder.createGemFireErrorResult("I/O error when reading jar/dir: "
+ ioex.getClass().getName() + ": " + ioex.getMessage());
}
// Only do this additional check if a dir was provided
if (dir != null) {
String message =
"\nDeploying files: " + fileResult.getFormattedFileList() + "\nTotal file size is: "
+ this.numFormatter.format((double) fileResult.computeFileSizeTotal() / ONE_MB)
+ "MB\n\nContinue? ";
if (readYesNo(message, Response.YES) == Response.NO) {
return ResultBuilder.createShellClientAbortOperationResult(
"Aborted deploy of " + Arrays.toString(filesToUpload) + ".");
}
}
return fileResult;
}
}
}
| apache-2.0 |
exercisecode/alidayu_demo | alidayu/src/main/java/com/taobao/api/internal/toplink/embedded/websocket/frame/draft06/PingFrame.java | 1963 | /*
* The MIT License
*
* Copyright (c) 2011 Takahiro Hashimoto
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.taobao.api.internal.toplink.embedded.websocket.frame.draft06;
import com.taobao.api.internal.toplink.embedded.websocket.frame.FrameHeader;
import com.taobao.api.internal.toplink.embedded.websocket.frame.draft06.FrameBuilderDraft06.Opcode;
/**
* The Class PingFrame.
*
* @author Takahiro Hashimoto
*/
public class PingFrame extends FrameDraft06 {
/**
* Instantiates a new ping frame.
*
* @param header the header
* @param bodyData the contents data
*/
protected PingFrame(FrameHeaderDraft06 header, byte[] bodyData) {
super(header, bodyData);
}
/**
* Instantiates a new ping frame.
*/
public PingFrame(){
FrameHeader header = FrameBuilderDraft06.createFrameHeader(null, false, Opcode.PING);
setHeader(header);
}
}
| apache-2.0 |
ewestfal/rice-svn2git-test | rice-framework/krad-web-framework/src/main/java/org/kuali/rice/krad/uif/lifecycle/initialize/PopulatePathTask.java | 1703 | /**
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.krad.uif.lifecycle.initialize;
import org.kuali.rice.krad.uif.lifecycle.LifecycleElementState;
import org.kuali.rice.krad.uif.lifecycle.ViewLifecycleTaskBase;
import org.kuali.rice.krad.uif.util.LifecycleElement;
/**
* Assigns a lifecycle element's path property {@link LifecycleElement#setViewPath(String)} and
* {@link LifecycleElement#setPath(String)}, based on the paths to the element from
* {@link LifecycleElementState}.
*
* @author Kuali Rice Team (rice.collab@kuali.org)
*/
public class PopulatePathTask extends ViewLifecycleTaskBase<LifecycleElement> {
/**
* Creates an instance based on element state.
*/
protected PopulatePathTask() {
super(LifecycleElement.class);
}
/**
* {@inheritDoc}
*/
@Override
protected void performLifecycleTask() {
LifecycleElementState elementState = getElementState();
LifecycleElement element = elementState.getElement();
element.setViewPath(elementState.getViewPath());
}
}
| apache-2.0 |
leafclick/intellij-community | platform/vcs-impl/src/com/intellij/openapi/vcs/changes/VcsFreezingProcess.java | 3412 | // Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.vcs.changes;
import com.intellij.configurationStore.StoreReloadManager;
import com.intellij.ide.SaveAndSyncHandler;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.progress.util.BackgroundTaskUtil;
import com.intellij.openapi.project.Project;
import com.intellij.util.messages.Topic;
import org.jetbrains.annotations.NotNull;
/**
* Executes an action surrounding it with freezing-unfreezing of the ChangeListManager
* and blocking/unblocking save/sync on frame de/activation.
*/
public class VcsFreezingProcess {
private static final Logger LOG = Logger.getInstance(VcsFreezingProcess.class);
@NotNull private final Project myProject;
@NotNull private final String myOperationTitle;
@NotNull private final Runnable myRunnable;
@NotNull private final ChangeListManagerEx myChangeListManager;
public VcsFreezingProcess(@NotNull Project project, @NotNull String operationTitle, @NotNull Runnable runnable) {
myProject = project;
myOperationTitle = operationTitle;
myRunnable = runnable;
myChangeListManager = (ChangeListManagerEx)ChangeListManager.getInstance(project);
}
public void execute() {
LOG.debug("starting");
try {
LOG.debug("saving documents, blocking project autosync");
saveAndBlockInAwt();
try {
LOG.debug("freezing the ChangeListManager");
freeze();
LOG.debug("running the operation");
myRunnable.run();
LOG.debug("operation completed.");
}
finally {
LOG.debug("unfreezing the ChangeListManager");
unfreeze();
}
}
finally {
LOG.debug("unblocking project autosync");
unblockInAwt();
}
LOG.debug("finished.");
}
private static void saveAndBlockInAwt() {
ApplicationManager.getApplication().invokeAndWait(() -> {
StoreReloadManager.getInstance().blockReloadingProjectOnExternalChanges();
FileDocumentManager.getInstance().saveAllDocuments();
SaveAndSyncHandler saveAndSyncHandler = SaveAndSyncHandler.getInstance();
saveAndSyncHandler.blockSaveOnFrameDeactivation();
saveAndSyncHandler.blockSyncOnFrameActivation();
});
}
private static void unblockInAwt() {
ApplicationManager.getApplication().invokeAndWait(() -> {
StoreReloadManager.getInstance().unblockReloadingProjectOnExternalChanges();
SaveAndSyncHandler saveAndSyncHandler = SaveAndSyncHandler.getInstance();
saveAndSyncHandler.unblockSaveOnFrameDeactivation();
saveAndSyncHandler.unblockSyncOnFrameActivation();
});
}
private void freeze() {
BackgroundTaskUtil.syncPublisher(myProject, Listener.TOPIC).onFreeze();
myChangeListManager.freeze("Local changes are not available until " + myOperationTitle + " is finished.");
}
private void unfreeze() {
BackgroundTaskUtil.syncPublisher(myProject, Listener.TOPIC).onUnfreeze();
myChangeListManager.unfreeze();
}
public interface Listener {
Topic<Listener> TOPIC = Topic.create("Change List Manager Freeze", Listener.class);
default void onFreeze() {}
default void onUnfreeze() {}
}
}
| apache-2.0 |
prabushi/devstudio-tooling-esb | plugins/org.wso2.developerstudio.eclipse.esb.synapse.unit.test/src/org/wso2/developerstudio/eclipse/esb/synapse/unit/test/action/RunSynapseUnitTestAction.java | 5771 | /*
* Copyright (c) 2019, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.developerstudio.eclipse.esb.synapse.unit.test.action;
import java.io.File;
import org.eclipse.core.resources.IFolder;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.Path;
import org.eclipse.jface.action.IAction;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.swt.SWT;
import org.eclipse.swt.widgets.MessageBox;
import org.eclipse.ui.IActionDelegate;
import org.eclipse.ui.IWorkbenchWindow;
import org.eclipse.ui.PlatformUI;
import org.wso2.developerstudio.eclipse.esb.synapse.unit.test.Activator;
import org.wso2.developerstudio.eclipse.esb.synapse.unit.test.component.CustomRunWizardDialog;
import org.wso2.developerstudio.eclipse.esb.synapse.unit.test.component.CustomWizardDialog;
import org.wso2.developerstudio.eclipse.esb.synapse.unit.test.constant.Constants;
import org.wso2.developerstudio.eclipse.esb.synapse.unit.test.wizard.runtest.UnitTestConfigurationWizard;
import org.wso2.developerstudio.eclipse.esb.synapse.unit.test.wizard.unittest.UnitTestSuiteCreationWizard;
import org.wso2.developerstudio.eclipse.logging.core.IDeveloperStudioLog;
import org.wso2.developerstudio.eclipse.logging.core.Logger;
/**
* Class responsible for running the unit test suites with configuration wizard.
*/
public class RunSynapseUnitTestAction implements IActionDelegate {
private static IDeveloperStudioLog log = Logger.getLog(Activator.PLUGIN_ID);
private IStructuredSelection selection;
/**
* Method of running ILauncher with maven dependencies.
*/
public void run(IAction action) {
if (checkTestSuitesAvailable()) {
UnitTestConfigurationWizard wizard = new UnitTestConfigurationWizard();
wizard.init(PlatformUI.getWorkbench(), selection);
CustomRunWizardDialog testRunWizardDialog = new CustomRunWizardDialog(
PlatformUI.getWorkbench().getActiveWorkbenchWindow().getShell(), wizard);
testRunWizardDialog.open();
}
}
@Override
public void selectionChanged(IAction action, ISelection selection) {
if (selection instanceof IStructuredSelection) {
this.selection = (IStructuredSelection) selection;
}
}
/**
* Method for checking test suites exists in the test folder to proceed testing.
*
* @return boolean value of existence
*/
private boolean checkTestSuitesAvailable() {
boolean isTestFilesExists = true;
try {
IProject project = getSelectedProject(selection);
if (project != null) {
IFolder testIFolder = project.getFolder(new Path(Constants.TEST_FOLDER));
File testFolder = new File(testIFolder.getLocation().toOSString());
File[] filesList = testFolder.listFiles();
boolean isFileExists = false;
for (File file : filesList) {
if (file.isFile()) {
isFileExists = true;
break;
}
}
if (!isFileExists) {
IWorkbenchWindow window = PlatformUI.getWorkbench().getActiveWorkbenchWindow();
MessageBox messageBox = new MessageBox(window.getShell(), SWT.ICON_QUESTION | SWT.YES | SWT.NO);
messageBox.setText("No Unit Test Suites Found");
messageBox.setMessage("0 unit test suites found! Do you want to create an new unit test suite?");
int response = messageBox.open();
if (response == SWT.YES) {
UnitTestSuiteCreationWizard wizard = new UnitTestSuiteCreationWizard(project);
wizard.init(PlatformUI.getWorkbench(), null);
CustomWizardDialog mockServiceWizardDialog = new CustomWizardDialog(window.getShell(), wizard);
mockServiceWizardDialog.setHelpAvailable(false);
mockServiceWizardDialog.open();
}
isTestFilesExists = false;
}
}
} catch (CoreException e) {
log.error("Error getting session properties", e);
} catch (Exception e) {
log.error("Error reading project", e);
}
return isTestFilesExists;
}
/**
* Method for getting current project from IStructuredSelection.
*
* @param obj
* IStructuredSelection as an object
* @return IProject
*/
private IProject getSelectedProject(Object obj) throws Exception {
if (obj == null) {
return null;
}
if (obj instanceof IResource) {
return ((IResource) obj).getProject();
} else if (obj instanceof IStructuredSelection) {
return getSelectedProject(((IStructuredSelection) obj).getFirstElement());
}
return null;
}
}
| apache-2.0 |
apache/tapestry-5 | tapestry-ioc-jcache/src/test/java/org/apache/tapestry5/jcache/internal/HarnessModule.java | 1905 | // Copyright 2014 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.jcache.internal;
import javax.cache.CacheManager;
import javax.cache.Caching;
import javax.cache.spi.CachingProvider;
import manager.BlogManager;
import manager.CacheNameOnEachMethodBlogManagerImpl;
import manager.ClassLevelCacheConfigBlogManagerImpl;
import manager.UsingDefaultCacheNameBlogManagerImpl;
import org.apache.tapestry5.ioc.ServiceBinder;
/**
* Tapestry-IoC module class for this test harness.
*/
public class HarnessModule
{
private HarnessModule()
{
}
public static void bind(ServiceBinder binder)
{
bindWithId(binder, UsingDefaultCacheNameBlogManagerImpl.class);
bindWithId(binder, CacheNameOnEachMethodBlogManagerImpl.class);
bindWithId(binder, ClassLevelCacheConfigBlogManagerImpl.class);
}
@SuppressWarnings(
{ "rawtypes", "unchecked" })
private static void bindWithId(ServiceBinder binder, Class implementationClass)
{
binder.bind(BlogManager.class, implementationClass).withId(
implementationClass.getSimpleName());
}
public static CacheManager buildCacheManager()
{
CachingProvider provider = Caching.getCachingProvider();
return provider.getCacheManager(provider.getDefaultURI(), provider.getDefaultClassLoader());
}
}
| apache-2.0 |
papicella/snappy-store | gemfirexd/tools/src/dunit/java/com/pivotal/gemfirexd/internal/engine/distributed/EquiJoinQueryDUnit.java | 57646 | /*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
/**
*
*/
package com.pivotal.gemfirexd.internal.engine.distributed;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.sql.Statement;
import java.util.HashSet;
import java.util.Set;
import com.gemstone.gemfire.distributed.DistributedMember;
import com.pivotal.gemfirexd.DistributedSQLTestBase;
import com.pivotal.gemfirexd.TestUtil;
import com.pivotal.gemfirexd.internal.engine.GemFireXDQueryObserverAdapter;
import com.pivotal.gemfirexd.internal.engine.GemFireXDQueryObserverHolder;
import com.pivotal.gemfirexd.internal.engine.GfxdConstants;
import com.pivotal.gemfirexd.internal.engine.distributed.metadata.SelectQueryInfo;
import com.pivotal.gemfirexd.internal.iapi.sql.Activation;
import com.pivotal.gemfirexd.internal.iapi.types.SQLInteger;
import com.pivotal.gemfirexd.internal.impl.jdbc.EmbedStatement;
import io.snappydata.test.dunit.SerializableRunnable;
import io.snappydata.test.dunit.VM;
/**
* Tests whether the statementID , connectionID etc are being passed correctly
* or not
*
* @author Asif
* @since 6.0
*
*/
@SuppressWarnings("serial")
public class EquiJoinQueryDUnit extends DistributedSQLTestBase {
public EquiJoinQueryDUnit(String name) {
super(name);
}
public void testColocation() throws Exception {
// Create a table from client using partition by column
// Start one client and three servers
startVMs(1, 3);
clientSQLExecute(
1,
" create table trade.customers " +
"(cid int not null, cust_name varchar(100), since date, " +
"addr varchar(100), tid int, primary key (cid)) partition by column (cid)");
clientSQLExecute(
1,
" create table trade.portfolio " +
"(cid int not null, sid int not null, qty int not null, " +
"availQty int not null, subTotal decimal(30,20), tid int, " +
"constraint portf_pk primary key (cid, sid), " +
"constraint cust_fk foreign key (cid) references trade.customers (cid) on delete restrict, " +
"constraint qty_ck check (qty>=0), constraint avail_ch check (availQty>=0 and availQty<=qty)) " +
"partition by column (cid) colocate with (trade.customers)");
clientSQLExecute(
1,
"create table trade.sellorders (oid int not null constraint orders_pk primary key, " +
"cid int, sid int, qty int, ask decimal (30, 20), order_time timestamp, " +
"status varchar(10) default 'open', tid int, " +
"constraint portf_fk foreign key (cid, sid) references trade.portfolio (cid, sid) " +
"on delete restrict, " +
"constraint status_ch check (status in ('cancelled', 'open', 'filled'))) " +
"partition by column (cid) colocate with (trade.portfolio)");
clientSQLExecute(1, "select * from trade.customers c LEFT OUTER JOIN " +
"trade.portfolio f LEFT OUTER JOIN trade.sellorders so on f.cid = so.cid " +
"on c.cid= f.cid where f.tid = 0");
}
/**
* Tests basic equi join query having a single column partitioning
*
*/
public void testComputeNodesBehaviourWithSingleColumnAsPartitoningKey_1()
throws Exception {
// Create a table from client using partition by column
// Start one client and three servers
startVMs(1, 3);
clientSQLExecute(
1,
"create table TESTTABLE1 (ID1 int not null, "
+ " DESCRIPTION1 varchar(1024) not null, ADDRESS1 varchar(1024) not null, primary key (ID1))"
+ "PARTITION BY COLUMN ( ID1 )"+getOverflowSuffix());
clientSQLExecute(
1,
"create table TESTTABLE2 (ID2 int not null, "
+ " DESCRIPTION2 varchar(1024) not null, ADDRESS2 varchar(1024) not null, primary key (ID2))"
+ "PARTITION BY COLUMN ( ID2 ) Colocate with ( TESTTABLE1 )"+getOverflowSuffix());
// Insert values 1 to 8
for (int i = 0; i <= 8; ++i) {
clientSQLExecute(1, "insert into TESTTABLE1 values (" + (i + 1)
+ ", 'First1" + (i + 1) + "', 'J1 604')");
clientSQLExecute(1, "insert into TESTTABLE2 values (" + (i + 1)
+ ", 'First2" + (i + 1) + "', 'J2 604')");
}
Object[][] queries = new Object[][] {
//query 1
{ "select ID1, DESCRIPTION2 from TESTTABLE1, TESTTABLE2 where ID1 = ID2 AND ID1 IN (7,9) ",
//Conditions
new Object[][] {
//each condition1
{ new Integer(NodesPruningHelper.bycolumn),new SQLInteger(7) },
//each condition2
{ new Integer(NodesPruningHelper.bycolumn),new SQLInteger(9), new Integer(NodesPruningHelper.ORing) }
}
}
};
TestUtil.setupConnection();
try {
for( int i=0; i < queries.length; ++i) {
final SelectQueryInfo[] sqiArr = new SelectQueryInfo[1];
final Activation[] actArr = new Activation[1];
NodesPruningHelper.setupObserverOnClient(sqiArr, actArr);
String queryString = (String)queries[i][0];
EmbedStatement es = (EmbedStatement)TestUtil.jdbcConn.createStatement();
String log="\nexecuting Query "+ (i+1) + " : \""+es.getSQLText() + "\"";
getLogWriter().info(log);
es.executeQuery(queryString);
NodesPruningHelper.validateNodePruningForQuery(queryString, sqiArr[0], (Object[][])queries[i][1],this, actArr[0]);
getLogWriter().info("Query " + (i+1) + " : succeeded");
es.close();
}
}
finally {
GemFireXDQueryObserverHolder
.setInstance(new GemFireXDQueryObserverAdapter());
//clientSQLExecute(1, "Drop table TESTTABLE1 ");
clientSQLExecute(1, "Drop table TESTTABLE2 ");
clientSQLExecute(1, "Drop table TESTTABLE1 ");
}
}
/**
* Tests Exception string
*
*/
public void testException_1()
throws Exception {
// Create a table from client using partition by column
// Start one client and three servers
startVMs(1, 3);
clientSQLExecute(
1,
"create table TESTTABLE1 (ID1 int not null, "
+ " DESCRIPTION1 varchar(1024) not null, ADDRESS1 varchar(1024) not null, primary key (ID1))"
+ "PARTITION BY COLUMN ( ID1 )"+getOverflowSuffix());
clientSQLExecute(
1,
"create table TESTTABLE2 (ID2 int not null, "
+ " DESCRIPTION2 varchar(1024) not null, ADDRESS2 varchar(1024) not null, primary key (ID2))"
+ "PARTITION BY COLUMN ( ID2 ) "+getOverflowSuffix());
// Insert values 1 to 8
for (int i = 0; i <= 8; ++i) {
clientSQLExecute(1, "insert into TESTTABLE1 values (" + (i + 1)
+ ", 'First1" + (i + 1) + "', 'J1 604')");
clientSQLExecute(1, "insert into TESTTABLE2 values (" + (i + 1)
+ ", 'First2" + (i + 1) + "', 'J2 604')");
}
Object[][] queries = new Object[][] {
//query 1
{ "select ID1, DESCRIPTION2 from TESTTABLE1, TESTTABLE2 where ID1 = ID2 AND ID1 IN (7,9) ",
//Conditions
new Object[][] {
//each condition1
{ new Integer(NodesPruningHelper.bycolumn),new SQLInteger(7) },
//each condition2
{ new Integer(NodesPruningHelper.bycolumn),new SQLInteger(9), new Integer(NodesPruningHelper.ORing) }
}
}
};
TestUtil.setupConnection();
try {
for( int i=0; i < queries.length; ++i) {
final SelectQueryInfo[] sqiArr = new SelectQueryInfo[1];
NodesPruningHelper.setupObserverOnClient(sqiArr);
String queryString = (String)queries[i][0];
EmbedStatement es = (EmbedStatement)TestUtil.jdbcConn.createStatement();
String log="\nexecuting Query "+ (i+1) + " : \""+es.getSQLText() + "\"";
getLogWriter().info(log);
es.executeQuery(queryString);
fail("The equijoin query without colocation should have failed");
}
} catch (SQLException sqle) {
getLogWriter().info("Expected exception=" + sqle.getMessage(), sqle);
assertTrue(sqle.getMessage().indexOf("not colocated with ") != -1);
} finally {
GemFireXDQueryObserverHolder
.setInstance(new GemFireXDQueryObserverAdapter());
clientSQLExecute(1, "Drop table TESTTABLE1 ");
clientSQLExecute(1, "Drop table TESTTABLE2 ");
}
}
/**
* Tests Exception string
*
*/
public void testException_2()
throws Exception {
// Create a table from client using partition by column
// Start one client and three servers
startVMs(1, 3);
clientSQLExecute(
1,
"create table TESTTABLE1 (ID1 int not null, "
+ " DESCRIPTION1 varchar(1024) not null, ADDRESS1 varchar(1024) not null, primary key (ID1))"
+ "PARTITION BY COLUMN ( ID1 )"+getOverflowSuffix());
clientSQLExecute(
1,
"create table TESTTABLE2 (ID2 int not null, "
+ " DESCRIPTION2 varchar(1024) not null, ADDRESS2 varchar(1024) not null, primary key (ID2))"
+ "PARTITION BY COLUMN ( ID2 ) Colocate with ( TESTTABLE1 )"+getOverflowSuffix());
// Insert values 1 to 8
for (int i = 0; i <= 8; ++i) {
clientSQLExecute(1, "insert into TESTTABLE1 values (" + (i + 1)
+ ", 'First1" + (i + 1) + "', 'J1 604')");
clientSQLExecute(1, "insert into TESTTABLE2 values (" + (i + 1)
+ ", 'First2" + (i + 1) + "', 'J2 604')");
}
Object[][] queries = new Object[][] {
//query 1
{ "select ID1, DESCRIPTION2 from TESTTABLE1, TESTTABLE2 where ID1 IN (7,9) ",
//Conditions
new Object[][] {
//each condition1
{ new Integer(NodesPruningHelper.bycolumn),new SQLInteger(7) },
//each condition2
{ new Integer(NodesPruningHelper.bycolumn),new SQLInteger(9), new Integer(NodesPruningHelper.ORing) }
}
}
};
TestUtil.setupConnection();
try {
for( int i=0; i < queries.length; ++i) {
final SelectQueryInfo[] sqiArr = new SelectQueryInfo[1];
NodesPruningHelper.setupObserverOnClient(sqiArr);
String queryString = (String)queries[i][0];
EmbedStatement es = (EmbedStatement)TestUtil.jdbcConn.createStatement();
String log="\nexecuting Query "+ (i+1) + " : \""+es.getSQLText() + "\"";
getLogWriter().info(log);
es.executeQuery(queryString);
fail("The equijoin query without colocation should have failed");
}
} catch (SQLException sqle) {
getLogWriter().info("Expected exception=" + sqle.getMessage(), sqle);
assertTrue(sqle.getMessage().indexOf(
"The query cannot be executed as it does not have all the "
+ "required colocation equijoin conditions") != -1);
} finally {
GemFireXDQueryObserverHolder
.setInstance(new GemFireXDQueryObserverAdapter());
//clientSQLExecute(1, "Drop table TESTTABLE1 ");
clientSQLExecute(1, "Drop table TESTTABLE2 ");
clientSQLExecute(1, "Drop table TESTTABLE1 ");
}
}
/**
* Tests the nodes pruning logic when colocated queries are executed using partition by range.
*/
public void testComputeNodesBehaviourWithSingleColumnAsPartitoningKey_2()
throws Exception {
invokeInEveryVM(new SerializableRunnable() {
@Override
public void run() {
System.setProperty(GfxdConstants.GFXD_DISABLE_STATEMENT_MATCHING, "true");
}
});
System.setProperty(GfxdConstants.GFXD_DISABLE_STATEMENT_MATCHING, "true");
// Create a table from client using partition by column
// Start one client and three servers
startVMs(1, 3);
clientSQLExecute(
1,
"create table TESTTABLE1 (ID1 int not null, "
+ " DESCRIPTION1 varchar(1024) not null, ADDRESS1 varchar(1024) not null, primary key (ID1))"
+ "PARTITION BY RANGE ( ID1 )"
+ " ( VALUES BETWEEN 1 and 4, VALUES BETWEEN 4 and 6, VALUES BETWEEN 6 and +infinity )"+getOverflowSuffix());
clientSQLExecute(
1,
"create table TESTTABLE2 (ID2 int not null, "
+ " DESCRIPTION2 varchar(1024) not null, ADDRESS2 varchar(1024) not null, primary key (ID2))"
+ "PARTITION BY RANGE ( ID2 )"
+ " ( VALUES BETWEEN 1 and 4, VALUES BETWEEN 4 and 6, VALUES BETWEEN 6 and +infinity )"
+ "Colocate with ( TESTTABLE1 )"+getOverflowSuffix());
// Insert values 1 to 8
for (int i = 0; i <= 8; ++i) {
clientSQLExecute(1, "insert into TESTTABLE1 values (" + (i + 1)
+ ", 'First1" + (i + 1) + "', 'J1 604')");
clientSQLExecute(1, "insert into TESTTABLE2 values (" + (i + 1)
+ ", 'First2" + (i + 1) + "', 'J2 604')");
}
Object[][] queries = new Object[][] {
//query 1
{ "select * from TESTTABLE1, TESTTABLE2 where ID1 = ID2 AND ID1 > 1 and ID1 < 6 ",
//Conditions
new Object[][] {
//each condition1
{ new Integer(NodesPruningHelper.byrange),new SQLInteger(1), Boolean.FALSE, new SQLInteger(6),Boolean.FALSE }
}
}
};
TestUtil.setupConnection();
try {
for( int i=0; i < queries.length; ++i) {
final SelectQueryInfo[] sqiArr = new SelectQueryInfo[1];
NodesPruningHelper.setupObserverOnClient(sqiArr);
String queryString = (String)queries[i][0];
EmbedStatement es = (EmbedStatement)TestUtil.jdbcConn.createStatement();
String log="\nexecuting Query "+ (i+1) + " : \""+es.getSQLText() + "\"";
getLogWriter().info(log);
ResultSet rs = es.executeQuery(queryString);
int cnt =0;
while(rs.next()) {
++cnt;
}
assertEquals(cnt,4);
NodesPruningHelper.validateNodePruningForQuery(queryString, sqiArr[0], (Object[][])queries[i][1],this);
getLogWriter().info("Query " + (i+1) + " : succeeded");
rs.close();
}
}
finally {
GemFireXDQueryObserverHolder
.setInstance(new GemFireXDQueryObserverAdapter());
//clientSQLExecute(1, "Drop table TESTTABLE1 ");
clientSQLExecute(1, "Drop table TESTTABLE2 ");
clientSQLExecute(1, "Drop table TESTTABLE1 ");
invokeInEveryVM(new SerializableRunnable() {
@Override
public void run() {
System.clearProperty(GfxdConstants.GFXD_DISABLE_STATEMENT_MATCHING);
}
});
System.clearProperty(GfxdConstants.GFXD_DISABLE_STATEMENT_MATCHING);
}
}
/**
* Tests the nodes pruning logic when colocated queries are executed using partition by range.
* Projection attribute used is not *
*/
public void testBug39862()
throws Exception {
// Create a table from client using partition by column
// Start one client and three servers
invokeInEveryVM(new SerializableRunnable() {
@Override
public void run() {
System.setProperty(GfxdConstants.GFXD_DISABLE_STATEMENT_MATCHING, "true");
}
});
System.setProperty(GfxdConstants.GFXD_DISABLE_STATEMENT_MATCHING, "true");
startVMs(1, 3);
clientSQLExecute(
1,
"create table TESTTABLE1 (ID1 int not null, "
+ " DESCRIPTION1 varchar(1024) not null, ADDRESS1 varchar(1024) not null, primary key (ID1))"
+ "PARTITION BY RANGE ( ID1 )"
+ " ( VALUES BETWEEN 1 and 4, VALUES BETWEEN 4 and 6, VALUES BETWEEN 6 and +infinity )"+getOverflowSuffix());
clientSQLExecute(
1,
"create table TESTTABLE2 (ID2 int not null, "
+ " DESCRIPTION2 varchar(1024) not null, ADDRESS2 varchar(1024) not null, primary key (ID2))"
+ "PARTITION BY RANGE ( ID2 )"
+ " ( VALUES BETWEEN 1 and 4, VALUES BETWEEN 4 and 6, VALUES BETWEEN 6 and +infinity )"
+ "Colocate with ( TESTTABLE1 )"+getOverflowSuffix());
// Insert values 1 to 8
for (int i = 0; i <= 8; ++i) {
clientSQLExecute(1, "insert into TESTTABLE1 values (" + (i + 1)
+ ", 'First1" + (i + 1) + "', 'J1 604')");
clientSQLExecute(1, "insert into TESTTABLE2 values (" + (i + 1)
+ ", 'First2" + (i + 1) + "', 'J2 604')");
}
Object[][] queries = new Object[][] {
//query 1
{ "select ID1, ID2 from TESTTABLE1, TESTTABLE2 where ID1 = ID2 AND ID1 > 1 and ID1 < 6 ",
//Conditions
new Object[][] {
//each condition1
{ new Integer(NodesPruningHelper.byrange),new SQLInteger(1), Boolean.FALSE, new SQLInteger(6),Boolean.FALSE }
}
}
};
TestUtil.setupConnection();
try {
for( int i=0; i < queries.length; ++i) {
final SelectQueryInfo[] sqiArr = new SelectQueryInfo[1];
NodesPruningHelper.setupObserverOnClient(sqiArr);
String queryString = (String)queries[i][0];
EmbedStatement es = (EmbedStatement)TestUtil.jdbcConn.createStatement();
String log="\nexecuting Query "+ (i+1) + " : \""+ queryString + "\"";
Set<Integer> rslts = new HashSet<Integer>();
for (int j = 2; j < 6; ++j) {
rslts.add(Integer.valueOf(j));
}
getLogWriter().info(log);
ResultSet rs = es.executeQuery(queryString);
int cnt =0;
while(rs.next()) {
++cnt;
assertTrue(rslts.remove(Integer.valueOf(rs.getInt(1))));
}
assertTrue(rslts.isEmpty());
assertFalse(rs.next());
assertEquals(cnt,4);
NodesPruningHelper.validateNodePruningForQuery(queryString, sqiArr[0], (Object[][])queries[i][1],this);
getLogWriter().info("Query " + (i+1) + " : succeeded");
rs.close();
}
}
finally {
GemFireXDQueryObserverHolder
.setInstance(new GemFireXDQueryObserverAdapter());
//clientSQLExecute(1, "Drop table TESTTABLE1 ");
clientSQLExecute(1, "Drop table TESTTABLE2 ");
clientSQLExecute(1, "Drop table TESTTABLE1 ");
try {
System.clearProperty(GfxdConstants.GFXD_DISABLE_STATEMENT_MATCHING);
invokeInEveryVM(new SerializableRunnable() {
@Override
public void run() {
System.clearProperty(GfxdConstants.GFXD_DISABLE_STATEMENT_MATCHING);
}
});
}
catch(Throwable t) {
throw new AssertionError("Couldn't clear STATEMENT OPTIMIZATION flag. " + t);
}
}
}
/**
* Tests the nodes pruning logic when equijoin query involves a PR & replicated region
*/
public void testEquijoinQueryForPRAndReplicatedRegion_1()
throws Exception {
// Create a table from client using partition by column
// Start one client a three servers
startVMs(1, 3);
VM dataStore1 = this.serverVMs.get(0);
VM dataStore2 = this.serverVMs.get(1);
VM dataStore3 = this.serverVMs.get(2);
clientSQLExecute(
1,
"create table TESTTABLE1 (ID1 int not null, "
+ " DESCRIPTION1 varchar(1024) not null, ADDRESS1 varchar(1024) not null, primary key (ID1))"
+ "PARTITION BY Column ( ID1 )" +getOverflowSuffix());
clientSQLExecute(
1,
"create table TESTTABLE2 (ID2 int not null, "
+ " DESCRIPTION2 varchar(1024) not null, ADDRESS2 varchar(1024) not null, primary key (ID2))"
+ " REPLICATE"+getOverflowSuffix());
// Insert values 1 to 8
for (int i = 0; i <= 8; ++i) {
clientSQLExecute(1, "insert into TESTTABLE1 values (" + (i + 1)
+ ", 'First1" + (i + 1) + "', 'J1 604')");
clientSQLExecute(1, "insert into TESTTABLE2 values (" + (i + 1)
+ ", 'First2" + (i + 1) + "', 'J2 604')");
}
Object[][] queries = new Object[][] {
//query 1
{ "select * from TESTTABLE1, TESTTABLE2 where ID2 = 1 ",
//Conditions
new Object[][] {
//each condition1
{ new Integer(NodesPruningHelper.allnodes) }
}
}
};
TestUtil.setupConnection();
try {
for (int i = 0; i < queries.length; ++i) {
final SelectQueryInfo[] sqiArr = new SelectQueryInfo[1];
setupObservers(new VM[] { dataStore1, dataStore2, dataStore3 }, sqiArr);
String queryString = (String)queries[i][0];
EmbedStatement es = (EmbedStatement)TestUtil.jdbcConn.createStatement();
String log = "\nexecuting Query " + (i + 1) + " : \"" + queryString
+ "\"";
getLogWriter().info(log);
ResultSet rs = es.executeQuery(queryString);
while (rs.next()) {
}
Set<DistributedMember> expectedPrunedNodes = NodesPruningHelper
.getExpectedNodes(queryString, sqiArr[0],
(Object[][])queries[i][1], getLogWriter());
Set<DistributedMember> allNodes = NodesPruningHelper.getExpectedNodes(
queryString, sqiArr[0], new Object[][] { { new Integer(
NodesPruningHelper.allnodes) } }, getLogWriter());
allNodes.removeAll(expectedPrunedNodes);
verifyQueryExecution(sqiArr[0], expectedPrunedNodes, allNodes, 3, 0);
getLogWriter().info("Query " + (i + 1) + " : succeeded");
es.close();
}
}
finally {
GemFireXDQueryObserverHolder
.setInstance(new GemFireXDQueryObserverAdapter());
isQueryExecutedOnNode = false;
//invokeInEveryVM(DistributedSQLTestBase.class, "reset");
clientSQLExecute(1, "Drop table TESTTABLE1 ");
clientSQLExecute(1, "Drop table TESTTABLE2 ");
}
}
/**
* Tests for nodes pruning logic when equijoin query involves a PR &
* replicated region with same or incompatible server groups.
*/
public void testEquijoinQueryForPRAndReplicatedRegionWithServerGroups()
throws Exception {
// Create a table from client using partition by column
// Start one client three servers
AsyncVM async1 = invokeStartServerVM(1, 0, "SG1", null);
AsyncVM async2 = invokeStartServerVM(2, 0, "SG2", null);
AsyncVM async3 = invokeStartServerVM(3, 0, "SG3", null);
AsyncVM async4 = invokeStartServerVM(4, 0, null, null);
startClientVMs(1, 0, null);
joinVMs(true, async1, async2, async3, async4);
clientSQLExecute(1, "create table EMP.PARTTABLE1 (ID1 int not null, "
+ " DESCRIPTION1 varchar(1024) not null, "
+ "ADDRESS1 varchar(1024) not null, primary key (ID1))"
+ "PARTITION BY Column (ID1) SERVER GROUPS (SG1,SG2)");
clientSQLExecute(1, "create table EMP.PARTTABLE2 (ID11 int not null, "
+ " DESCRIPTION11 varchar(1024) not null, "
+ "ADDRESS11 varchar(1024) not null, primary key (ID11))"
+ "PARTITION BY Column (ID11)" + getOverflowSuffix());
// check for syntax error
try {
clientSQLExecute(1, "create table EMP.PARTTABLE3 (ID12 int not null, "
+ " DESCRIPTION12 varchar(1024) not null, "
+ "ADDRESS12 varchar(1024) not null, primary key (ID12))"
+ "PARTITION BY Column (ID12) SERVER GROUPS (SG1,SG2) "
+ "COLOCATE WITH (EMP.PARTTABLE1)" + getOverflowSuffix());
} catch (SQLException ex) {
if (!"42X01".equals(ex.getSQLState())) {
throw ex;
}
}
clientSQLExecute(1, "create table EMP.PARTTABLE3 (ID12 int not null, "
+ " DESCRIPTION12 varchar(1024) not null, "
+ "ADDRESS12 varchar(1024) not null, primary key (ID12))"
+ "PARTITION BY Column (ID12) COLOCATE WITH (EMP.PARTTABLE1) "
+ "SERVER GROUPS (SG1,SG2)" + getOverflowSuffix());
clientSQLExecute(1, "create table EMP.TESTTABLE2 (ID2 int not null, "
+ " DESCRIPTION2 varchar(1024) not null, "
+ "ADDRESS2 varchar(1024) not null, primary key (ID2))"
+ " REPLICATE SERVER GROUPS (SG1,SG2, sg3)" + getOverflowSuffix());
// also check for table in different server groups and matching one
serverSQLExecute(1, "create table EMP.TESTTABLE3 (ID3 int not null, "
+ " DESCRIPTION3 varchar(1024) not null, "
+ "ADDRESS3 varchar(1024) not null, primary key (ID3))"
+ " REPLICATE SERVER GROUPS (SG1)" + getOverflowSuffix());
serverSQLExecute(2, "create table EMP.TESTTABLE4 (ID4 int not null, "
+ " DESCRIPTION4 varchar(1024) not null, "
+ "ADDRESS4 varchar(1024) not null, primary key (ID4))"
+ " REPLICATE SERVER GROUPS (SG1, SG2)" + getOverflowSuffix());
serverSQLExecute(1, "create table EMP.TESTTABLE5 (ID5 int not null, "
+ " DESCRIPTION5 varchar(1024) not null, "
+ "ADDRESS5 varchar(1024) not null, primary key (ID5)) REPLICATE"
+ getOverflowSuffix());
// Insert values 1 to 10
for (int i = 0; i <= 10; ++i) {
clientSQLExecute(1, "insert into EMP.PARTTABLE1 values (" + (i + 1)
+ ", 'First1" + (i + 1) + "', 'J1 604')");
clientSQLExecute(1, "insert into EMP.PARTTABLE2 values (" + (i + 1)
+ ", 'First2" + (i + 1) + "', 'J2 604')");
serverSQLExecute(2, "insert into EMP.PARTTABLE3 values (" + (i + 1)
+ ", 'First3" + (i + 1) + "', 'J3 604')");
clientSQLExecute(1, "insert into EMP.TESTTABLE2 values (" + (i + 1)
+ ", 'First2" + (i + 1) + "', 'J2 604')");
serverSQLExecute(1, "insert into EMP.TESTTABLE3 values (" + (i + 1)
+ ", 'First3" + (i + 1) + "', 'J3 604')");
serverSQLExecute(3, "insert into EMP.TESTTABLE4 values (" + (i + 1)
+ ", 'First4" + (i + 1) + "', 'J4 604')");
serverSQLExecute(2, "insert into EMP.TESTTABLE5 values (" + (i + 1)
+ ", 'First5" + (i + 1) + "', 'J5 604')");
}
// check exception for queries involving different server groups
try {
clientSQLExecute(1, "select * from EMP.PARTTABLE1, EMP.TESTTABLE3 "
+ "where ID1 = 4 and ID3 = 1");
fail("expected exception in join for tables in incompatible server groups");
} catch (SQLException ex) {
// expected exception
if (!"0A000".equals(ex.getSQLState())) {
throw ex;
}
}
try {
clientSQLExecute(1, "select * from EMP.PARTTABLE2, EMP.TESTTABLE3 "
+ "where ID11 = 4 and ID3 = 1");
fail("expected exception in join for tables in incompatible server groups");
} catch (SQLException ex) {
// expected exception
if (!"0A000".equals(ex.getSQLState())) {
throw ex;
}
}
try {
clientSQLExecute(1, "select * from EMP.PARTTABLE1, EMP.PARTTABLE2 "
+ "where ID1 = 4 and ID11 = 1");
fail("expected exception in join for tables in incompatible server groups");
} catch (SQLException ex) {
// expected exception
if (!"0A000".equals(ex.getSQLState())) {
throw ex;
}
}
try {
clientSQLExecute(1, "select * from EMP.PARTTABLE1, EMP.PARTTABLE2 "
+ "where ID1 = ID11");
fail("expected exception in join for tables in incompatible server groups");
} catch (SQLException ex) {
// expected exception
if (!"0A000".equals(ex.getSQLState())) {
throw ex;
}
}
try {
clientSQLExecute(1, "select * from EMP.PARTTABLE2, EMP.TESTTABLE4 "
+ "where ID11 = 4 and ID4 = 1");
fail("expected exception in join for tables in incompatible server groups");
} catch (SQLException ex) {
// expected exception
if (!"0A000".equals(ex.getSQLState())) {
throw ex;
}
}
try {
clientSQLExecute(1, "select * from EMP.TESTTABLE3, EMP.PARTTABLE2, "
+ "EMP.TESTTABLE4");
fail("expected exception in join for tables in incompatible server groups");
} catch (SQLException ex) {
// expected exception
if (!"0A000".equals(ex.getSQLState())) {
throw ex;
}
}
try {
clientSQLExecute(1, "select * from EMP.PARTTABLE1, EMP.PARTTABLE3 "
+ "where ADDRESS1 = ADDRESS12");
fail("expected exception in join for tables in improper join condition");
} catch (SQLException ex) {
// expected exception
if (!"0A000".equals(ex.getSQLState())) {
throw ex;
}
}
try {
clientSQLExecute(1, "select * from EMP.TESTTABLE5, EMP.PARTTABLE1, "
+ "EMP.PARTTABLE3");
fail("expected exception in join for tables in improper join condition");
} catch (SQLException ex) {
// expected exception
if (!"0A000".equals(ex.getSQLState())) {
throw ex;
}
}
// check success for queries involving same server groups
sqlExecuteVerify(new int[] { 1 }, new int[] { 1, 2, 3 },
"select * from EMP.PARTTABLE1, EMP.TESTTABLE2 where ID2 = 1 "
+ "AND ID1 = 4", TestUtil.getResourcesDir()
+ "/lib/checkEquiJoinQuery.xml", "sgs_1");
sqlExecuteVerify(new int[] { 1 }, new int[] { 1, 2, 3 },
"select * from EMP.PARTTABLE1, EMP.TESTTABLE4 where ID4 = 1 "
+ "AND ID1 = 4", TestUtil.getResourcesDir()
+ "/lib/checkEquiJoinQuery.xml", "sgs_2");
sqlExecuteVerify(new int[] { 1 }, new int[] { 1, 2, 3 },
"select * from EMP.TESTTABLE2, EMP.TESTTABLE4 where ID2 = 1 "
+ "AND ID4 = 4", TestUtil.getResourcesDir()
+ "/lib/checkEquiJoinQuery.xml", "sgs_3");
isQueryExecutedOnNode = false;
invokeInEveryVM(DistributedSQLTestBase.class, "reset");
VM dataStore1 = this.serverVMs.get(0);
VM dataStore2 = this.serverVMs.get(1);
VM dataStore3 = this.serverVMs.get(2);
VM dataStore4 = this.serverVMs.get(3);
final SelectQueryInfo[] sqiArr = new SelectQueryInfo[1];
setupObservers(new VM[] { dataStore1, dataStore2, dataStore3, dataStore4 },
sqiArr);
try {
// expect query to succeed when server groups of one table are a subset of
// another
clientSQLExecute(1, "select * from EMP.TESTTABLE2, EMP.TESTTABLE3",
false, false, true);
checkQueryExecution(true, dataStore1);
// expect query to succeed when an replicated table has no server groups
serverSQLExecute(3, "select * from EMP.PARTTABLE1, EMP.TESTTABLE5", true,
false, true);
checkQueryExecution(false, dataStore1, dataStore2);
clientSQLExecute(1, "select * from EMP.TESTTABLE2, EMP.TESTTABLE5", true,
false, true);
checkQueryExecution(true, dataStore1, dataStore2, dataStore3);
clientSQLExecute(1, "select * from EMP.TESTTABLE5", false, false, true);
checkQueryExecution(true, dataStore1, dataStore2, dataStore3, dataStore4);
serverSQLExecute(1, "select * from EMP.TESTTABLE5, EMP.PARTTABLE1, "
+ "EMP.PARTTABLE3 where ID1 = ID12", false, false, true);
checkQueryExecution(false, dataStore1, dataStore2);
// expect success when server groups of replicated tables are superset of those
// of partitioned tables
serverSQLExecute(2, "select * from EMP.TESTTABLE5, EMP.PARTTABLE1, "
+ "EMP.TESTTABLE2", true, false, true);
checkQueryExecution(false, dataStore1, dataStore2);
serverSQLExecute(3, "select * from EMP.TESTTABLE5, EMP.PARTTABLE1, "
+ "EMP.TESTTABLE2, EMP.PARTTABLE3 where ID1 = ID12", true, false,
true);
checkQueryExecution(false, dataStore1, dataStore2);
clientSQLExecute(1, "select * from EMP.TESTTABLE2, EMP.PARTTABLE1, "
+ "EMP.TESTTABLE4, EMP.PARTTABLE3 where ID1 = ID12", true, false,
true);
checkQueryExecution(false, dataStore1, dataStore2);
clientSQLExecute(1, "select * from EMP.TESTTABLE4, EMP.PARTTABLE3, "
+ "EMP.TESTTABLE2, EMP.PARTTABLE1, EMP.TESTTABLE5 where ID1 = ID12 "
+ "and ADDRESS1 = ADDRESS12", false, false, true);
checkQueryExecution(false, dataStore1, dataStore2);
} finally {
GemFireXDQueryObserverHolder
.setInstance(new GemFireXDQueryObserverAdapter());
isQueryExecutedOnNode = false;
//invokeInEveryVM(DistributedSQLTestBase.class, "reset");
}
}
/**
* test equi-join on multiple PR and replicated tables
*/
public void testEquiJoinPRAndReplicatedMix_40307() throws Exception {
startVMs(1, 3);
clientSQLExecute(1, "create table EMP.PARTTABLE1 (ID1 int not null, "
+ " DESCRIPTION1 varchar(1024) not null, "
+ "ADDRESS1 varchar(1024) not null, primary key (ID1))"
+ "PARTITION BY Column (ID1)"+getOverflowSuffix());
clientSQLExecute(1, "create table EMP.PARTTABLE2 (ID2 int not null, "
+ " DESCRIPTION2 varchar(1024) not null, "
+ "ADDRESS2 varchar(1024) not null, primary key (ID2))"
+ "PARTITION BY Column (ID2) COLOCATE WITH (EMP.PARTTABLE1)"+getOverflowSuffix());
serverSQLExecute(1, "create table EMP.REPLTABLE1 (ID1 int not null, "
+ " DESCRIPTION1 varchar(1024) not null, "
+ "ADDRESS1 varchar(1024) not null, primary key (ID1)) REPLICATE"+getOverflowSuffix());
serverSQLExecute(2, "create table EMP.REPLTABLE2 (ID2 int not null, "
+ " DESCRIPTION2 varchar(1024) not null, "
+ "ADDRESS2 varchar(1024) not null, primary key (ID2)) REPLICATE"+getOverflowSuffix());
// Insert values 1 to 4
for (int i = 1; i <= 4; ++i) {
clientSQLExecute(1, "insert into EMP.PARTTABLE1 values (" + i
+ ", 'First1" + i + "', 'J1 604')");
clientSQLExecute(1, "insert into EMP.PARTTABLE2 values (" + i
+ ", 'First2" + i + "', 'J2 604')");
serverSQLExecute(1, "insert into EMP.REPLTABLE1 values (" + i
+ ", 'RFirst1" + i + "', 'J1 604')");
serverSQLExecute(3, "insert into EMP.REPLTABLE2 values (" + i
+ ", 'First2" + i + "', 'RJ2 604')");
}
sqlExecuteVerify(new int[] { 1 }, new int[] { 1, 2, 3 },
"select * from EMP.REPLTABLE1 r1, EMP.PARTTABLE1 p1, "
+ "EMP.PARTTABLE2 p2 where p1.ID1 = p2.ID2 and "
+ "r1.ADDRESS1 = p1.ADDRESS1 and r1.ID1 = p2.ID2", TestUtil.getResourcesDir()
+ "/lib/checkEquiJoinQuery.xml", "pr_repl_mix1");
sqlExecuteVerify(new int[] { 1 }, new int[] { 1, 2, 3 },
"select * from EMP.REPLTABLE1 r1, EMP.PARTTABLE1 p1, "
+ "EMP.PARTTABLE2 p2, EMP.REPLTABLE2 r2 where p1.ID1 = p2.ID2 and "
+ "r1.ADDRESS1 = p1.ADDRESS1 and r1.ID1 = p2.ID2 and r1.ID1 = r2.ID2 "
+ "and r2.ID2 = 3", TestUtil.getResourcesDir() +
"/lib/checkEquiJoinQuery.xml", "pr_repl_mix2");
sqlExecuteVerify(new int[] { 1 }, new int[] { 1, 2, 3 },
"select * from EMP.REPLTABLE1 r1, EMP.REPLTABLE2 r2, "
+ "EMP.PARTTABLE1 p1, EMP.PARTTABLE2 p2 where p1.ID1 = p2.ID2 and "
+ "r1.ID1 = 2 and r2.ID2 = 4",
TestUtil.getResourcesDir() + "/lib/checkEquiJoinQuery.xml",
"pr_repl_mix3");
clientSQLExecute(1, "drop table EMP.REPLTABLE1");
clientSQLExecute(1, "drop table EMP.REPLTABLE2");
addExpectedException(new int[] { 1 }, null,
UnsupportedOperationException.class);
try {
clientSQLExecute(1, "drop table EMP.PARTTABLE1");
fail("Expected exception in dropping the base table");
} catch (SQLException ex) {
// check for the expected exception
if (!"X0Y98".equals(ex.getSQLState())) {
throw ex;
}
}
removeExpectedException(new int[] { 1 }, null,
UnsupportedOperationException.class);
clientSQLExecute(1, "drop table EMP.PARTTABLE2");
clientSQLExecute(1, "drop table EMP.PARTTABLE1");
}
/**
* Checks for query execution on exactly one node for queries involving only
* replicated tables. If the "checkFirst" parameter is true then it will
* expect the query to be executed on the first VM and on no other VMs --
* useful for testing local execution on data store nodes.
*/
private void checkReplicatedQueryExecution(boolean checkFirst, VM... vms) {
boolean isExecuted = false;
for (VM vm : vms) {
Boolean isExecutedOnNode = (Boolean)vm.invoke(
DistributedSQLTestBase.class, "getQueryStatus");
// for local execution derby's activation plan is generated and
// query observers are not fired; so just check that query is not
// fired on any of the other nodes while result checking will ensure
// the query was executed locally
if (checkFirst && !isExecuted) {
isExecutedOnNode = true;
}
assertFalse("Did not expect query to execute on this node: "
+ vm.toString(), checkFirst && isExecuted && isExecutedOnNode);
if (isExecutedOnNode) {
getLogWriter().info("Query executed on " + vm.toString());
assertFalse("Did not expect query on replicated tables to execute "
+ "on this node", isExecuted);
isExecuted = true;
vm.invoke(DistributedSQLTestBase.class, "reset");
}
}
assertTrue("Expected query to execute on exactly one node", isExecuted);
}
/**
* Tests the result and that only one node should be used for equijoin query
* involving two replicated tables.
*/
public void testEquijoinQueryForReplicatedRegions_1() throws Exception {
// Create a table from client using partition by column
// Start one client a three servers
startVMs(1, 3);
VM dataStore1 = this.serverVMs.get(0);
VM dataStore2 = this.serverVMs.get(1);
VM dataStore3 = this.serverVMs.get(2);
clientSQLExecute(1, "create table TESTTABLE1 (ID1 int not null, "
+ " DESCRIPTION1 varchar(1024) not null, "
+ "ADDRESS1 varchar(1024) not null, primary key (ID1)) REPLICATE"+getOverflowSuffix());
clientSQLExecute(1, "create table TESTTABLE2 (ID2 int not null, "
+ " DESCRIPTION2 varchar(1024) not null, "
+ "ADDRESS2 varchar(1024) not null, primary key (ID2)) REPLICATE"+getOverflowSuffix());
// Insert values 1 to 8
for (int i = 0; i <= 8; ++i) {
clientSQLExecute(1, "insert into TESTTABLE1 values (" + (i + 1)
+ ", 'First1" + (i + 1) + "', 'J1 604')");
clientSQLExecute(1, "insert into TESTTABLE2 values (" + (i + 1)
+ ", 'First2" + (i + 1) + "', 'J2 604')");
}
String[][] queries = new String[][] {
// query 1
{ "select * from TESTTABLE1, TESTTABLE2 where ID2 = 1", "query1" },
// query 2
{ "select * from TESTTABLE1, TESTTABLE2 where ID2 = ID1 AND ID1 = 2",
"query2" },
// query 3
{ "select * from TESTTABLE1, TESTTABLE2 where ID2 = 1 AND ID1 = 4",
"query3" }, };
try {
for (int i = 0; i < queries.length; ++i) {
final SelectQueryInfo[] sqiArr = new SelectQueryInfo[1];
setupObservers(new VM[] { dataStore1, dataStore2, dataStore3 },
sqiArr);
String queryString = queries[i][0];
String log = "\nexecuting Query " + (i + 1) + " : \"" + queryString
+ "\"";
getLogWriter().info(log);
// First execute from client node
sqlExecuteVerify(new int[] { 1 }, null, queryString, TestUtil.getResourcesDir()
+ "/lib/checkEquiJoinQuery.xml", queries[i][1]);
assertFalse("Did not expect query to be executed on this node",
isQueryExecutedOnNode);
checkReplicatedQueryExecution(false, dataStore1, dataStore2,
dataStore3);
// Next execute from each of the server nodes and check that query
// gets executed locally
// First server node
sqlExecuteVerify(null, new int[] { 1 }, queryString, TestUtil.getResourcesDir()
+ "/lib/checkEquiJoinQuery.xml", queries[i][1]);
assertFalse("Did not expect query to be executed on this node",
isQueryExecutedOnNode);
checkReplicatedQueryExecution(true, dataStore1, dataStore2,
dataStore3);
// Second server node
sqlExecuteVerify(null, new int[] { 2 }, queryString, TestUtil.getResourcesDir()
+ "/lib/checkEquiJoinQuery.xml", queries[i][1]);
assertFalse("Did not expect query to be executed on this node",
isQueryExecutedOnNode);
checkReplicatedQueryExecution(true, dataStore2, dataStore1,
dataStore3);
// Third server node
sqlExecuteVerify(null, new int[] { 3 }, queryString, TestUtil.getResourcesDir()
+ "/lib/checkEquiJoinQuery.xml", queries[i][1]);
assertFalse("Did not expect query to be executed on this node",
isQueryExecutedOnNode);
checkReplicatedQueryExecution(true, dataStore3, dataStore1,
dataStore2);
getLogWriter().info("Query " + (i + 1) + " : succeeded");
}
}
finally {
GemFireXDQueryObserverHolder
.setInstance(new GemFireXDQueryObserverAdapter());
isQueryExecutedOnNode = false;
//invokeInEveryVM(DistributedSQLTestBase.class, "reset");
clientSQLExecute(1, "Drop table TESTTABLE1");
clientSQLExecute(1, "Drop table TESTTABLE2");
}
}
public void testQueryConvertibleToJoin() throws Exception
{
startVMs(1, 3);
clientSQLExecute(
1,
"create table TESTTABLE1 (ID1 int primary key, "
+ " DESCRIPTION1 varchar(1024) not null, ADDRESS1 varchar(1024) not null)"
+ "PARTITION BY Primary key"+getOverflowSuffix());
clientSQLExecute(
1,
"create table TESTTABLE2 (ID2 int primary key, "
+ " DESCRIPTION2 varchar(1024) not null, ADDRESS2 varchar(1024) not null,ID_FK int, foreign key (ID_FK) references TESTTABLE1(ID1))"+getOverflowSuffix());
// Insert values 1 to 8
for (int i = 0; i < 8; ++i) {
clientSQLExecute(1, "insert into TESTTABLE1 values (" + (i + 1)
+ ", 'First" + (i + 1) + "', 'J 604' )");
}
for (int i = 0; i < 8; ++i) {
clientSQLExecute(1, "insert into TESTTABLE2 values (" + (i + 1)
+ ", 'First" + (i + 1) + "', 'J 604', "+(i+1)+" )");
}
sqlExecuteVerify(new int[] { 1 }, null,
"select ID2 from TESTTABLE2 where ADDRESS2 like '%604' AND ID_FK IN (select ID1 from TESTTABLE1 )", TestUtil.getResourcesDir()
+ "/lib/checkStatementQueryDUnit.xml", "testLikePredicateQuery_2",
false, false);
}
public void testSupportedCorrelatedQueries_1() throws Exception {
startVMs(1, 3);
String derbyDbUrl = "jdbc:derby:newDB;create=true;";
if (TestUtil.currentUserName != null) {
derbyDbUrl += ("user=" + TestUtil.currentUserName + ";password="
+ TestUtil.currentUserPassword + ';');
}
Class.forName("org.apache.derby.jdbc.EmbeddedDriver").newInstance();
Connection derbyConn = DriverManager.getConnection(derbyDbUrl);
Statement derbyStmt = derbyConn.createStatement();
cleanDerbyArtifacts(derbyStmt, new String[0],
new String[0], new String[]{"orders","customers"}, null);
String table1 = "create table Customers (cust_id int primary key , cust_name varchar(1024) not null )";
String table2 = "create table Orders (oid int primary key, "
+ "order_type varchar(1024) not null, order_amount int, ordered_by int )";
clientSQLExecute(1, table1 + "partition by column ( cust_id)"+getOverflowSuffix());
clientSQLExecute(1,table2 + "partition by column ( ordered_by ) colocate with ( Customers) "
+getOverflowSuffix());
derbyStmt.executeUpdate(table1);
derbyStmt.executeUpdate(table2);
Connection conn = TestUtil.getConnection();
PreparedStatement psInsertCust = conn.prepareStatement("insert into customers values(?,?)");
PreparedStatement psInsertCustDerby = derbyConn.prepareStatement("insert into customers values(?,?)");
PreparedStatement ps = null;
for (int k = 0; k < 2; ++k) {
ps = k == 0 ? psInsertCust : psInsertCustDerby;
for (int i = 1; i < 101; ++i) {
ps.setInt(1, i);
ps.setString(2, "name_" + i);
ps.executeUpdate();
}
}
PreparedStatement psInsertOrdersGfxd= conn.prepareStatement("insert into Orders values(?,?,?,?)");
PreparedStatement psInsertOrdersDerby= derbyConn.prepareStatement("insert into Orders values(?,?,?,?)");
PreparedStatement psInsertOrders = null;
for (int l = 0; l < 2; ++l) {
psInsertOrders = l == 0 ? psInsertOrdersGfxd : psInsertOrdersDerby;
int j = 1;
for (int i = 1; i < 101; ++i) {
for (int k = 1; k < 101; ++k) {
psInsertOrders.setInt(1, j++);
psInsertOrders.setString(2, "order_type_" + j);
psInsertOrders.setInt(3, i * k * 10);
psInsertOrders.setInt(4, i);
}
}
}
try {
String query = "Select * from Customers where ( select sum(order_amount)"
+ " from orders where ordered_by = cust_id ) > 50";
validateResults(derbyStmt, query, -1, false);
} finally {
clientSQLExecute(1, "drop table orders");
clientSQLExecute(1, "drop table customers");
cleanDerbyArtifacts(derbyStmt, new String[0], new String[0],
new String[] { "orders", "customers" }, derbyConn);
try {
DriverManager.getConnection("jdbc:derby:;shutdown=true");
}
catch (SQLException sqle) {
if (sqle.getMessage().indexOf("shutdown") == -1) {
sqle.printStackTrace();
throw sqle;
}
}
}
}
public void testSupportedCorrelatedQueries_2() throws Exception
{
startVMs(1, 3);
String derbyDbUrl = "jdbc:derby:newDB;create=true;";
if (TestUtil.currentUserName != null) {
derbyDbUrl += ("user=" + TestUtil.currentUserName + ";password="
+ TestUtil.currentUserPassword + ';');
}
Class.forName("org.apache.derby.jdbc.EmbeddedDriver").newInstance();
Connection derbyConn = DriverManager.getConnection(derbyDbUrl);
Statement derbyStmt = derbyConn.createStatement();
cleanDerbyArtifacts(derbyStmt, new String[0], new String[0], new String[] {
"orders", "customers" }, null);
String table1 = "create table Customers (cust_id int primary key , "
+ "cust_name varchar(1024) not null, address varchar(1024) )";
String table2 = "create table Orders (oid int primary key, "
+ "order_type varchar(1024) not null, order_amount int, ordered_by int, "
+ "orderer_name varchar(1024) )";
String table3 = "create table Suborders (sub_oid int primary key, "
+ "sub_order_type varchar(1024) not null, sub_order_amount int, sub_ordered_by int, "
+ "sub_orderer_name varchar(1024) )";
String table4 = "create table sub_suborders (sub_sub_oid int primary key, "
+ "sub_sub_order_type varchar(1024) not null, sub_sub_order_amount int, sub_sub_ordered_by int, "
+ "sub_sub_orderer_name varchar(1024) )";
clientSQLExecute(1, table1 + "partition by column (cust_id, cust_name) "
+ getOverflowSuffix());
clientSQLExecute(1, table2
+ "partition by column ( ordered_by,orderer_name) "
+ "colocate with (customers) " + getOverflowSuffix());
clientSQLExecute(1, table3
+ "partition by column ( sub_ordered_by,sub_orderer_name ) "
+ "colocate with (orders) " + getOverflowSuffix());
clientSQLExecute(1, table4
+ "partition by column ( sub_sub_ordered_by,sub_sub_orderer_name ) "
+ "colocate with (customers)" + getOverflowSuffix());
derbyStmt.executeUpdate(table1);
derbyStmt.executeUpdate(table2);
derbyStmt.executeUpdate(table3);
derbyStmt.executeUpdate(table4);
String query1 = "Select * from Customers where "
+ " ( select sum(order_amount) from orders where ordered_by = cust_id and "
+ "orderer_name = cust_name ) > 1000 and "
+ "( select sum(sub_order_amount) from suborders where sub_orderer_name = cust_name "
+ " and sub_ordered_by = cust_id and 12 = "
+ "( select Sum(sub_sub_ordered_by) from sub_suborders where " +
"sub_sub_ordered_by = sub_ordered_by and sub_sub_orderer_name = cust_name)" + ") < 500 ";
String query2 = "Select * from Customers where "
+ " ( select sum(order_amount) from orders where ordered_by = cust_id and "
+ "orderer_name = cust_name and "
+ " oid > ( select sum(sub_order_amount) from suborders where "
+ " sub_orderer_name = orderer_name and sub_ordered_by = cust_id and "
+ "sub_oid > ( select sum(sub_sub_order_amount) from sub_suborders where "
+ " sub_sub_orderer_name = orderer_name and sub_sub_ordered_by = sub_ordered_by ) )"
+ ") > 100";
String queries[] = new String[] { query1, query2 };
Connection conn = TestUtil.getConnection();
PreparedStatement psInsertCust = conn
.prepareStatement("insert into customers values(?,?,?)");
PreparedStatement psInsertCustDerby = derbyConn
.prepareStatement("insert into customers values(?,?,?)");
PreparedStatement ps = null;
for (int k = 0; k < 2; ++k) {
ps = k == 0 ? psInsertCust : psInsertCustDerby;
for (int i = 1; i < 101; ++i) {
ps.setInt(1, i);
ps.setString(2, "name_" + i);
ps.setString(3, "address_" + i);
ps.executeUpdate();
}
}
PreparedStatement psInsertOrdersGfxd = conn
.prepareStatement("insert into Orders values(?,?,?,?,?)");
PreparedStatement psInsertOrdersDerby = derbyConn
.prepareStatement("insert into Orders values(?,?,?,?,?)");
for (int l = 0; l < 2; ++l) {
PreparedStatement psInsertOrders = l == 0 ? psInsertOrdersGfxd : psInsertOrdersDerby;
int j = 1;
for (int i = 1; i < 101; ++i) {
for (int k = 1; k < 101; ++k) {
psInsertOrders.setInt(1, j++);
psInsertOrders.setString(2, "order_type_" + j);
psInsertOrders.setInt(3, i * k * 10);
psInsertOrders.setInt(4, i);
psInsertOrders.setString(5, "order_name_" + j);
}
}
}
PreparedStatement psInsertSubOrdersGfxd = conn
.prepareStatement("insert into SubOrders values" + "(?,?,?,?,?)");
PreparedStatement psInsertSubOrdersDerby = derbyConn
.prepareStatement("insert into SubOrders " + "values(?,?,?,?,?)");
for (int l = 0; l < 2; ++l) {
PreparedStatement psInsertSubOrders = l == 0 ? psInsertSubOrdersGfxd : psInsertSubOrdersDerby;
int j = 1;
for (int i = 1; i < 101; ++i) {
for (int k = 1; k < 101; ++k) {
psInsertSubOrders.setInt(1, j++);
psInsertSubOrders.setString(2, "sub_order_type_" + j);
psInsertSubOrders.setInt(3, i * k * 10);
psInsertSubOrders.setInt(4, i);
psInsertSubOrders.setString(5, "sub_order_name_" + j);
}
}
}
PreparedStatement psInsertSubSubOrdersGfxd = conn
.prepareStatement("insert into SubOrders values" + "(?,?,?,?,?)");
PreparedStatement psInsertSubSubOrdersDerby = derbyConn
.prepareStatement("insert into SubOrders " + "values(?,?,?,?,?)");
for (int l = 0; l < 2; ++l) {
PreparedStatement psInsertSubSubOrders = l == 0 ? psInsertSubSubOrdersGfxd : psInsertSubSubOrdersDerby;
int j = 1;
for (int i = 1; i < 101; ++i) {
for (int k = 1; k < 101; ++k) {
psInsertSubSubOrders.setInt(1, j++);
psInsertSubSubOrders.setString(2, "sub_order_type_" + j);
psInsertSubSubOrders.setInt(3, i * k * 10);
psInsertSubSubOrders.setInt(4, i);
psInsertSubSubOrders.setString(5, "sub_order_name_" + j);
}
}
}
try {
for (String query : queries) {
validateResults(derbyStmt, query, -1, false);
}
} finally {
clientSQLExecute(1, "drop table suborders");
clientSQLExecute(1, "drop table sub_suborders");
clientSQLExecute(1, "drop table orders");
clientSQLExecute(1, "drop table customers");
cleanDerbyArtifacts(derbyStmt, new String[0], new String[0],
new String[] { "sub_suborders", "suborders", "orders", "customers" },
derbyConn);
try {
DriverManager.getConnection("jdbc:derby:;shutdown=true");
}
catch (SQLException sqle) {
if (sqle.getMessage().indexOf("shutdown") == -1) {
sqle.printStackTrace();
throw sqle;
}
}
}
}
public void testUnsupportedCorrelatedQueries_1() throws Exception {
startVMs(1, 1);
String table1 = "create table Customers (cust_id int primary key, "
+ "cust_name varchar(1024) not null)";
String table2 = "create table Orders (oid int primary key, order_type "
+ "varchar(1024) not null, order_amount int, ordered_by int)";
String table3 = "create table Customers2 (cust_id int primary key, "
+ "cust_name varchar(1024) not null, address varchar(1024))";
String table4 = "create table Orders2 (oid int primary key, order_type "
+ "varchar(1024) not null, order_amount int, ordered_by int, "
+ "orderer_name varchar(1024))";
clientSQLExecute(1, table1 + "partition by column (cust_id)"
+ getOverflowSuffix());
clientSQLExecute(1, table2 + "partition by column (ordered_by)"
+ getOverflowSuffix());
clientSQLExecute(1, table3 + "partition by column (cust_id, cust_name)"
+ getOverflowSuffix());
clientSQLExecute(1, table4 + "partition by column (ordered_by, "
+ "orderer_name) colocate with (Customers2) " + getOverflowSuffix());
try {
try {
String query = "Select * from Customers where ( select sum("
+ "order_amount) from orders where ordered_by = cust_id ) > 1000";
clientSQLExecute(1, query);
fail("Query execution should have failed "
+ "as it is not having colocated tables");
} catch (SQLFeatureNotSupportedException sqle) {
if (sqle.toString().indexOf("not colocated with") == -1) {
fail("Did not get the expected exception. Exception occured: " + sqle);
}
}
try {
String query = "Select * from customers2 where ( select sum("
+ "order_amount) from Orders2 where ordered_by = cust_id ) > 1000";
clientSQLExecute(1, query);
fail("Should have failed to execute as tables are not colocated");
} catch (SQLFeatureNotSupportedException sqle) {
if (sqle.toString().indexOf(
"The query cannot be executed as it does not have all the "
+ "required colocation equijoin conditions") == -1) {
fail("Did not get the expected exception. Exception occured: " + sqle);
}
}
} finally {
clientSQLExecute(1, "drop table orders");
clientSQLExecute(1, "drop table customers");
clientSQLExecute(1, "drop table orders2");
clientSQLExecute(1, "drop table customers2");
}
}
public void test43206() throws Exception {
startVMs(2, 3);
Connection conn = TestUtil.getConnection();
conn.createStatement().execute("create table pizza (i int) ");
ResultSet rs = conn
.createStatement()
.executeQuery(
"select m.ID, m.hostdata from sys.systables t, sys.members m where "
+ "t.tablename='PIZZA' and m.hostdata = 'true'");
assertTrue(rs.next());
assertTrue(rs.next());
assertTrue(rs.next());
assertFalse(rs.next());
rs = conn
.createStatement()
.executeQuery(
"select m.ID, m.hostdata from sys.systables t, sys.members m where "
+ "t.tablename='PIZZA' and m.hostdata = 1");
assertTrue(rs.next());
assertTrue(rs.next());
assertTrue(rs.next());
assertFalse(rs.next());
rs = conn
.createStatement()
.executeQuery(
"select m.ID, m.hostdata from sys.systables t, sys.members m where "
+ "t.tablename='PIZZA' and m.hostdata = 1");
assertTrue(rs.next());
assertTrue(rs.next());
assertTrue(rs.next());
assertFalse(rs.next());
}
public String getOverflowSuffix() {
return " ";
}
@Override
protected void vmTearDown() throws Exception {
super.vmTearDown();
reset();
}
}
| apache-2.0 |
AeroGlass/g3m | tools/vectorial-streaming/src/com/glob3mobile/vectorial/processing/SortingLODPointFeaturesStatistics.java | 9356 |
package com.glob3mobile.vectorial.processing;
import java.awt.Color;
import java.awt.Font;
import java.io.File;
import java.io.IOException;
import java.util.List;
import com.glob3mobile.geo.Geodetic2D;
import com.glob3mobile.geo.Sector;
import com.glob3mobile.utils.Progress;
import com.glob3mobile.vectorial.lod.sorting.PointFeatureSortingLODStorage;
import com.glob3mobile.vectorial.lod.sorting.mapdb.PointFeatureSortingLODMapDBStorage;
import com.glob3mobile.vectorial.storage.PointFeature;
import com.glob3mobile.vectorial.storage.QuadKey;
import com.glob3mobile.vectorial.storage.QuadKeyUtils;
import com.glob3mobile.vectorial.utils.GEOBitmap;
public class SortingLODPointFeaturesStatistics {
public static final class LODDrawer
implements
PointFeatureSortingLODStorage.NodeVisitor {
private final PointFeatureSortingLODStorage _lodStorage;
private final int _nodesCount;
private Progress _progress;
private GEOBitmap _bitmap;
private LODDrawer(final PointFeatureSortingLODStorage lodStorage,
final int nodesCount) {
_lodStorage = lodStorage;
_nodesCount = nodesCount;
}
@Override
public boolean visit(final PointFeatureSortingLODStorage.Node node) {
// System.out.println(node.getID() + ", features=" + node.getFeaturesCount());
final Color featureColor = new Color(1, 1, 0, 0.25f);
final int level = node.getDepth();
final int maxLevel = 15;
if (level < maxLevel) {
// final Font font = new Font(Font.SERIF, Font.BOLD, 12 + ((maxLevel - level) * 3));
// final Font font = new Font(Font.SERIF, Font.BOLD, 14);
// final int pointSize = (maxLevel - level);
final int pointSize = 2;
final Sector nodeSector = node.getNodeSector();
_bitmap.drawSector(nodeSector, new Color(1, 1, 1, 0.05f), new Color(1, 1, 1, 0.2f));
final Sector minimumSector = node.getMinimumSector();
_bitmap.drawSector(minimumSector, new Color(0, 1, 1, 0.05f), new Color(0, 1, 1, 0.2f));
for (final PointFeature feature : node.getFeatures()) {
_bitmap.drawPoint(feature._position, pointSize, pointSize, featureColor);
// final String featureName = (String) feature._properties.get("name");
// _bitmap.drawPoint(featureName, feature._position, pointSize, pointSize, featureColor, font);
}
}
_progress.stepDone();
return true;
}
@Override
public void start() {
_progress = new Progress(_nodesCount) {
@Override
public void informProgress(final long stepsDone,
final double percent,
final long elapsed,
final long estimatedMsToFinish) {
System.out.println(_lodStorage.getName() + " - Processing: "
+ progressString(stepsDone, percent, elapsed, estimatedMsToFinish));
}
};
final int width = 2048;
// final Sector sector = _lodStorage.getSector();
final Sector sector = Sector.FULL_SPHERE;
final double sectorFactor = sector._deltaLatitude._radians / sector._deltaLongitude._radians;
final int height = Math.round((float) sectorFactor * width);
_bitmap = new GEOBitmap(sector, width, height, Color.BLACK);
}
@Override
public void stop() {
try {
_bitmap.save(new File("LOD_" + _lodStorage.getName() + ".png"));
}
catch (final IOException e) {
throw new RuntimeException(e);
}
_bitmap = null;
_progress.finish();
_progress = null;
}
}
public static void main(final String[] args) throws IOException {
System.out.println("SortingLODPointFeaturesStatistics 0.1");
System.out.println("-------------------------------------\n");
final File directory = new File("PointFeaturesLOD");
// final String name = "Cities1000_LOD";
// final String name = "AR_LOD";
// final String name = "ES_LOD";
final String name = "GEONames-PopulatedPlaces_LOD";
try (final PointFeatureSortingLODStorage lodStorage = PointFeatureSortingLODMapDBStorage.openReadOnly(directory, name)) {
final PointFeatureSortingLODStorage.Statistics statistics = lodStorage.getStatistics(true);
statistics.show();
final int nodesCount = statistics.getNodesCount();
// // final String id = "102311321213";
// //final String id = "100221122202";
//
// // drawLODForNode(lodStorage, "100221122200");
// // drawLODForNode(lodStorage, "100221122201");
// // drawLODForNode(lodStorage, "100221122202");
// // drawLODForNode(lodStorage, "100221122203");
//
// // drawLODForNode(lodStorage, "100221033310");
// // drawLODForNode(lodStorage, "100221033311");
// // drawLODForNode(lodStorage, "100221033312");
// // drawLODForNode(lodStorage, "100221033313");
//
// // drawLODForSector(lodStorage, "Spain", Sector.fromDegrees(-8, 37, 1, 42));
//
// // drawLODForNode(lodStorage, "03311101");
//
// // final Sector baSector = Sector.fromDegrees( //
// // -34.703643764020576157, -58.579915412480858095, //
// // -34.519657217233664426, -58.307698526247456527);
// //
// // drawLODForSector(lodStorage, "Buenos Aires", baSector);
//
// // drawLODForPosition(lodStorage, "Buenos Aires", Geodetic2D.fromDegrees(-34.610202831685171532, -58.385756267343843717));
// // drawLODForPosition(lodStorage, "Madrid", Geodetic2D.fromDegrees(40.414924394015059761, -3.6980228798493248732));
// // drawLODForPosition(lodStorage, "Caceres", Geodetic2D.fromDegrees(39.483333, -6.366667));
//
// // drawLODForPosition(lodStorage, "DC", Geodetic2D.fromDegrees(38.904722, -77.016389));
// // drawLODForPosition(lodStorage, "New York City", Geodetic2D.fromDegrees(40.7127, -74.0059));
// drawLODForPosition(lodStorage, "San Francisco", Geodetic2D.fromDegrees(37.783333, -122.416667));
lodStorage.acceptDepthFirstVisitor(new LODDrawer(lodStorage, nodesCount));
}
System.out.println("\n- done!");
}
public static void drawLODForNode(final PointFeatureSortingLODStorage lodStorage,
final String nodeID) throws IOException {
final QuadKey root = new QuadKey(new byte[] {}, lodStorage.getSector());
final Sector searchSector = QuadKey.sectorFor(root, QuadKeyUtils.toBinaryID(nodeID));
drawLODForSector(lodStorage, nodeID, searchSector);
}
public static void drawLODForSector(final PointFeatureSortingLODStorage lodStorage,
final String name,
final Sector searchSector) throws IOException {
final List<PointFeatureSortingLODStorage.Node> nodes = lodStorage.getNodesFor(searchSector);
drawNodes(name, searchSector, nodes);
}
public static void drawLODForPosition(final PointFeatureSortingLODStorage lodStorage,
final String name,
final Geodetic2D position) throws IOException {
final List<PointFeatureSortingLODStorage.Node> nodes = lodStorage.getNodesFor(position);
final Sector sector = nodes.get(nodes.size() - 1).getNodeSector();
drawNodes(name, sector, nodes);
}
public static void drawNodes(final String name,
final Sector sector,
final List<PointFeatureSortingLODStorage.Node> nodes) throws IOException {
final GEOBitmap bitmap = new GEOBitmap(sector, 2048, 2048, Color.BLACK);
for (int i = 0; i < nodes.size(); i++) {
final PointFeatureSortingLODStorage.Node node = nodes.get(i);
if (node != null) {
System.out.println("Node #\"" + node.getID() + "\", features=" + node.getFeaturesCount());
final int pointSize = ((nodes.size() - i) + 1) * 3;
final Color featureColor = new Color(1, 1, 0, 0.8f);
final Font featureFont = new Font(Font.SERIF, Font.BOLD, 6 + pointSize);
for (final PointFeature features : node.getFeatures()) {
final String featuresName = (String) features._properties.get("name");
bitmap.drawPoint(featuresName + " LOD:" + i, features._position, pointSize, pointSize, featureColor, featureFont,
featureColor);
}
}
}
bitmap.save(new File("LOD-Levels-" + name + ".png"));
}
}
| bsd-2-clause |
AeroGlass/g3m | Commons/G3MSharedSDK/src/org/glob3/mobile/generated/WMSBilElevationDataProvider_BufferDownloadListener.java | 1830 | package org.glob3.mobile.generated;
public class WMSBilElevationDataProvider_BufferDownloadListener extends IBufferDownloadListener
{
private final Sector _sector ;
private final int _width;
private final int _height;
private IElevationDataListener _listener;
private final boolean _autodeleteListener;
private final double _deltaHeight;
public WMSBilElevationDataProvider_BufferDownloadListener(Sector sector, Vector2I extent, IElevationDataListener listener, boolean autodeleteListener, double deltaHeight)
{
_sector = new Sector(sector);
_width = extent._x;
_height = extent._y;
_listener = listener;
_autodeleteListener = autodeleteListener;
_deltaHeight = deltaHeight;
}
public final void onDownload(URL url, IByteBuffer buffer, boolean expired)
{
final Vector2I resolution = new Vector2I(_width, _height);
ShortBufferElevationData elevationData = BilParser.parseBil16(_sector, resolution, buffer, _deltaHeight);
if (buffer != null)
buffer.dispose();
if (elevationData == null)
{
_listener.onError(_sector, resolution);
}
else
{
_listener.onData(_sector, resolution, elevationData);
}
if (_autodeleteListener)
{
if (_listener != null)
_listener.dispose();
_listener = null;
}
}
public final void onError(URL url)
{
final Vector2I resolution = new Vector2I(_width, _height);
_listener.onError(_sector, resolution);
if (_autodeleteListener)
{
if (_listener != null)
_listener.dispose();
_listener = null;
}
}
public final void onCancel(URL url)
{
}
public final void onCanceledDownload(URL url, IByteBuffer data, boolean expired)
{
}
} | bsd-2-clause |
Minjung-Baek/Dexter | project/dexter-metrics/src/test/com/samsung/sec/dexter/metrics/util/MetricUtilTest.java | 3579 | /**
* Copyright (c) 2017 Samsung Electronics, Inc.,
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.samsung.sec.dexter.metrics.util;
import static org.junit.Assert.assertEquals;
import org.junit.Assert;
//import static org.junit.Assert.*;
import org.junit.Test;
import com.samsung.sec.dexter.core.exception.DexterRuntimeException;
import com.samsung.sec.dexter.metrics.util.MetricUtil;
public class MetricUtilTest {
String TestFilePath = ".\\src\\sample\\TestFile_For_MetricUtilTest.java";
@Test
public void test() {
final String source = "void main()\n"
+ "{\n" +
"//test4\n" +
"/*test6\n" +
"THIS IS COMMENT 1\n" +
"THIS IS COMMENT 2\n" +
"THIS IS COMMENT 3\n" +
"*/\n" +
"print(\"%d\",10);\n" +
"THIS IS NOT A COMMENT1 //comment \n"+
"THIS IS NOT A COMMENT2 /*comment*/ \n" +
"void main3(){ } \n" +
"void main5(){ \n" +
"} \n" +
"}";
//MetricUtil.getFunctionLOCArray(source, 0, 14);
}
@Test
public void MetricUtilTest_ThrowsDexterRuntimeException_GivenNoneExistanceFile () {
int start = 0;
int end = 10;
try {
MetricUtil.getFunctionLOCArray(".\\ThereIsNoSuchFile.no", start, end);
Assert.fail();
}
catch(DexterRuntimeException e) {
}
}
@Test
public void MetricUtilTest_ReturnsZero_GivenEmptyFileName () {
int start = 0;
int end = 10;
int loc;
try {
loc=MetricUtil.getFunctionLOCArray("", start, end);
assertEquals(0, loc);
}
catch(DexterRuntimeException e) {
Assert.fail();
}
}
@Test
public void MetricUtilTest_EmptyFilePath() {
int start = 0;
int end = 10;
MetricUtil.getFunctionLOCArray("", start, end);
}
@Test
public void MetricUtilTest_ThrowsDexterRuntimeException_GivenStartGreaterThanEnd() {
int start = 4;
int end = 2;
try {
MetricUtil.getFunctionLOCArray(TestFilePath, start, end);
Assert.fail();
}
catch(DexterRuntimeException e) {
assertEquals("cannot count line because end line "
+ "is smaller than start line number. "
+ "start:" + start + " end:" + end, e.getMessage());
}
}
@Test
public void MetricUtilTest_ReturnsProperLoc_GivenCorrectExample() {
int start = 0;
int end = 48;
int loc=0;
loc=MetricUtil.getFunctionLOCArray(TestFilePath, start, end);
System.out.println(loc);
assertEquals(loc, 12);
}
}
| bsd-2-clause |
TomasMikula/ReactFX | reactfx/src/main/java/org/reactfx/FlatMapStream.java | 1437 | package org.reactfx;
import java.util.Optional;
import java.util.function.Function;
/**
* See {@link EventStream#flatMap(Function)}
*/
class FlatMapStream<T, U> extends EventStreamBase<U> {
private final EventStream<T> source;
private final Function<? super T, ? extends EventStream<U>> mapper;
private Subscription mappedSubscription = Subscription.EMPTY;
public FlatMapStream(
EventStream<T> src,
Function<? super T, ? extends EventStream<U>> f) {
this.source = src;
this.mapper = f;
}
@Override
protected Subscription observeInputs() {
Subscription s = source.subscribe(t -> {
mappedSubscription.unsubscribe();
mappedSubscription = mapper.apply(t).subscribe(this::emit);
});
return () -> {
s.unsubscribe();
mappedSubscription.unsubscribe();
mappedSubscription = Subscription.EMPTY;
};
}
}
class FlatMapOptStream<T, U> extends EventStreamBase<U> {
private final EventStream<T> source;
private final Function<? super T, Optional<U>> mapper;
public FlatMapOptStream(
EventStream<T> src,
Function<? super T, Optional<U>> f) {
this.source = src;
this.mapper = f;
}
@Override
protected Subscription observeInputs() {
return source.subscribe(t -> mapper.apply(t).ifPresent(this::emit));
}
}
| bsd-2-clause |
andrewgaul/jSCSI | bundles/target/src/main/java/org/jscsi/target/util/BinaryLock.java | 1875 | package org.jscsi.target.util;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
/**
* Instances of {@link BinaryLock} can be used to prevent concurrent access to the same resource, so, in essence, this
* is a very simplified {@link Lock} implementation, however lacking many advanced capabilities.
* <p>
* A {@link BinaryLock} knows only two states, locked and unlocked. Attempts by the lock-holder to lock a
* {@link BinaryLock} when locked or to unlock it when unlocked, will have no effect.
*
* @author Andreas Ergenzinger
*/
public class BinaryLock {
/**
* The {@link ReentrantLock} which backs up the {@link BinaryLock} and takes care of suspending and notifying
* waiting {@link Threads}.
*/
private final ReentrantLock lock = new ReentrantLock();
/**
* This method is used to acquire the lock. It will block until no other {@link Thread} is holding the lock and then
* return <code>true</code> to indicate the successful lock acquisition, or return <code>false</code>, if the
* calling {@link Thread} was interrupted while waiting for the lock.
* <p>
* If the caller is already holding the lock, the method will immediately return <code>true</code> without any
* changes.
*
* @return <code>true</code> if and only if the lock has been acquired
*/
public boolean lock () {
try {
lock.lockInterruptibly();
return true;
} catch (InterruptedException e) {
return false;
}
}
/**
* Releases the lock when called by the current lock holder;
*/
public void unlock () {
if (lock.isHeldByCurrentThread()) {
while (lock.getHoldCount() > 0)
lock.unlock();
}
}
}
| bsd-3-clause |
delkyd/Oracle-Cloud | PaaS_SaaS_Accelerator_RESTFulFacade/FusionProxy_SalesLeadsService/src/com/oracle/xmlns/apps/marketing/leadmgmt/leads/leadservice/types/FindSalesLeadProductAsyncResponse.java | 2094 |
package com.oracle.xmlns.apps.marketing.leadmgmt.leads.leadservice.types;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
import com.oracle.xmlns.apps.marketing.leadmgmt.leads.leadservice.MklProdAssoc;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="result" type="{http://xmlns.oracle.com/apps/marketing/leadMgmt/leads/leadService/}MklProdAssoc" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"result"
})
@XmlRootElement(name = "findSalesLeadProductAsyncResponse")
public class FindSalesLeadProductAsyncResponse {
protected List<MklProdAssoc> result;
/**
* Gets the value of the result property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the result property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getResult().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link MklProdAssoc }
*
*
*/
public List<MklProdAssoc> getResult() {
if (result == null) {
result = new ArrayList<MklProdAssoc>();
}
return this.result;
}
}
| bsd-3-clause |
eoogbe/api-client-staging | generated/java/proto-google-cloud-dlp-v2beta1/src/main/java/com/google/privacy/dlp/v2beta1/CloudStoragePath.java | 17940 | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/privacy/dlp/v2beta1/storage.proto
package com.google.privacy.dlp.v2beta1;
/**
* <pre>
* A location in Cloud Storage.
* </pre>
*
* Protobuf type {@code google.privacy.dlp.v2beta1.CloudStoragePath}
*/
public final class CloudStoragePath extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.privacy.dlp.v2beta1.CloudStoragePath)
CloudStoragePathOrBuilder {
// Use CloudStoragePath.newBuilder() to construct.
private CloudStoragePath(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CloudStoragePath() {
path_ = "";
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return com.google.protobuf.UnknownFieldSet.getDefaultInstance();
}
private CloudStoragePath(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!input.skipField(tag)) {
done = true;
}
break;
}
case 10: {
java.lang.String s = input.readStringRequireUtf8();
path_ = s;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.privacy.dlp.v2beta1.DlpStorage.internal_static_google_privacy_dlp_v2beta1_CloudStoragePath_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.privacy.dlp.v2beta1.DlpStorage.internal_static_google_privacy_dlp_v2beta1_CloudStoragePath_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.privacy.dlp.v2beta1.CloudStoragePath.class, com.google.privacy.dlp.v2beta1.CloudStoragePath.Builder.class);
}
public static final int PATH_FIELD_NUMBER = 1;
private volatile java.lang.Object path_;
/**
* <pre>
* The url, in the format of `gs://bucket/<path>`.
* </pre>
*
* <code>string path = 1;</code>
*/
public java.lang.String getPath() {
java.lang.Object ref = path_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
path_ = s;
return s;
}
}
/**
* <pre>
* The url, in the format of `gs://bucket/<path>`.
* </pre>
*
* <code>string path = 1;</code>
*/
public com.google.protobuf.ByteString
getPathBytes() {
java.lang.Object ref = path_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
path_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!getPathBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, path_);
}
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getPathBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, path_);
}
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.privacy.dlp.v2beta1.CloudStoragePath)) {
return super.equals(obj);
}
com.google.privacy.dlp.v2beta1.CloudStoragePath other = (com.google.privacy.dlp.v2beta1.CloudStoragePath) obj;
boolean result = true;
result = result && getPath()
.equals(other.getPath());
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PATH_FIELD_NUMBER;
hash = (53 * hash) + getPath().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.privacy.dlp.v2beta1.CloudStoragePath parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2beta1.CloudStoragePath parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2beta1.CloudStoragePath parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2beta1.CloudStoragePath parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2beta1.CloudStoragePath parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2beta1.CloudStoragePath parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2beta1.CloudStoragePath parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2beta1.CloudStoragePath parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.privacy.dlp.v2beta1.CloudStoragePath parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2beta1.CloudStoragePath parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.privacy.dlp.v2beta1.CloudStoragePath parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2beta1.CloudStoragePath parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.privacy.dlp.v2beta1.CloudStoragePath prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* A location in Cloud Storage.
* </pre>
*
* Protobuf type {@code google.privacy.dlp.v2beta1.CloudStoragePath}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.privacy.dlp.v2beta1.CloudStoragePath)
com.google.privacy.dlp.v2beta1.CloudStoragePathOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.privacy.dlp.v2beta1.DlpStorage.internal_static_google_privacy_dlp_v2beta1_CloudStoragePath_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.privacy.dlp.v2beta1.DlpStorage.internal_static_google_privacy_dlp_v2beta1_CloudStoragePath_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.privacy.dlp.v2beta1.CloudStoragePath.class, com.google.privacy.dlp.v2beta1.CloudStoragePath.Builder.class);
}
// Construct using com.google.privacy.dlp.v2beta1.CloudStoragePath.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
path_ = "";
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.privacy.dlp.v2beta1.DlpStorage.internal_static_google_privacy_dlp_v2beta1_CloudStoragePath_descriptor;
}
public com.google.privacy.dlp.v2beta1.CloudStoragePath getDefaultInstanceForType() {
return com.google.privacy.dlp.v2beta1.CloudStoragePath.getDefaultInstance();
}
public com.google.privacy.dlp.v2beta1.CloudStoragePath build() {
com.google.privacy.dlp.v2beta1.CloudStoragePath result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.google.privacy.dlp.v2beta1.CloudStoragePath buildPartial() {
com.google.privacy.dlp.v2beta1.CloudStoragePath result = new com.google.privacy.dlp.v2beta1.CloudStoragePath(this);
result.path_ = path_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.privacy.dlp.v2beta1.CloudStoragePath) {
return mergeFrom((com.google.privacy.dlp.v2beta1.CloudStoragePath)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.privacy.dlp.v2beta1.CloudStoragePath other) {
if (other == com.google.privacy.dlp.v2beta1.CloudStoragePath.getDefaultInstance()) return this;
if (!other.getPath().isEmpty()) {
path_ = other.path_;
onChanged();
}
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.privacy.dlp.v2beta1.CloudStoragePath parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.privacy.dlp.v2beta1.CloudStoragePath) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object path_ = "";
/**
* <pre>
* The url, in the format of `gs://bucket/<path>`.
* </pre>
*
* <code>string path = 1;</code>
*/
public java.lang.String getPath() {
java.lang.Object ref = path_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
path_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* The url, in the format of `gs://bucket/<path>`.
* </pre>
*
* <code>string path = 1;</code>
*/
public com.google.protobuf.ByteString
getPathBytes() {
java.lang.Object ref = path_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
path_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* The url, in the format of `gs://bucket/<path>`.
* </pre>
*
* <code>string path = 1;</code>
*/
public Builder setPath(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
path_ = value;
onChanged();
return this;
}
/**
* <pre>
* The url, in the format of `gs://bucket/<path>`.
* </pre>
*
* <code>string path = 1;</code>
*/
public Builder clearPath() {
path_ = getDefaultInstance().getPath();
onChanged();
return this;
}
/**
* <pre>
* The url, in the format of `gs://bucket/<path>`.
* </pre>
*
* <code>string path = 1;</code>
*/
public Builder setPathBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
path_ = value;
onChanged();
return this;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
// @@protoc_insertion_point(builder_scope:google.privacy.dlp.v2beta1.CloudStoragePath)
}
// @@protoc_insertion_point(class_scope:google.privacy.dlp.v2beta1.CloudStoragePath)
private static final com.google.privacy.dlp.v2beta1.CloudStoragePath DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.privacy.dlp.v2beta1.CloudStoragePath();
}
public static com.google.privacy.dlp.v2beta1.CloudStoragePath getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CloudStoragePath>
PARSER = new com.google.protobuf.AbstractParser<CloudStoragePath>() {
public CloudStoragePath parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new CloudStoragePath(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<CloudStoragePath> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CloudStoragePath> getParserForType() {
return PARSER;
}
public com.google.privacy.dlp.v2beta1.CloudStoragePath getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| bsd-3-clause |
motech-implementations/mim | kilkari/src/main/java/org/motechproject/nms/kilkari/domain/SubscriptionError.java | 3115 | package org.motechproject.nms.kilkari.domain;
import org.motechproject.mds.annotations.Entity;
import org.motechproject.mds.annotations.Field;
import javax.validation.constraints.NotNull;
/**
* Entity for logging rejected Kilkari subscriptions.
*/
@Entity(tableName = "nms_subscription_errors")
public class SubscriptionError {
@Field
private long contactNumber;
@Field
private String beneficiaryId; // MctsId or RchId based on importOrigin
@Field
private SubscriptionRejectionReason rejectionReason;
@Field
private SubscriptionPackType packType;
@Field
private String rejectionMessage;
@Field
@NotNull
private SubscriptionOrigin importOrigin;
public SubscriptionError(long contactNumber, SubscriptionRejectionReason rejectionReason,
SubscriptionPackType packType, SubscriptionOrigin importOrigin) {
this(contactNumber, rejectionReason, packType, null, importOrigin);
}
public SubscriptionError(long contactNumber, SubscriptionRejectionReason rejectionReason,
SubscriptionPackType packType, String rejectionMessage, SubscriptionOrigin importOrigin) {
this.contactNumber = contactNumber;
this.rejectionReason = rejectionReason;
this.packType = packType;
this.rejectionMessage = rejectionMessage;
this.importOrigin = importOrigin;
}
public SubscriptionError(long contactNumber, String beneficiaryId, SubscriptionRejectionReason rejectionReason,
SubscriptionPackType packType, String rejectionMessage, SubscriptionOrigin importOrigin) {
this.contactNumber = contactNumber;
this.beneficiaryId = beneficiaryId;
this.rejectionReason = rejectionReason;
this.packType = packType;
this.rejectionMessage = rejectionMessage;
this.importOrigin = importOrigin;
}
public long getContactNumber() {
return contactNumber;
}
public void setContactNumber(long contactNumber) {
this.contactNumber = contactNumber;
}
public String getBeneficiaryId() {
return beneficiaryId;
}
public void setBeneficiaryId(String beneficiaryId) {
this.beneficiaryId = beneficiaryId;
}
public SubscriptionRejectionReason getRejectionReason() {
return rejectionReason;
}
public void setRejectionReason(SubscriptionRejectionReason rejectionReason) {
this.rejectionReason = rejectionReason;
}
public SubscriptionPackType getPackType() {
return packType;
}
public void setPackType(SubscriptionPackType packType) {
this.packType = packType;
}
public String getRejectionMessage() {
return rejectionMessage;
}
public void setRejectionMessage(String rejectionMessage) {
this.rejectionMessage = rejectionMessage;
}
public SubscriptionOrigin getImportOrigin() {
return importOrigin;
}
public void setImportOrigin(SubscriptionOrigin importOrigin) {
this.importOrigin = importOrigin;
}
}
| bsd-3-clause |
shaniaki/ForSyDe-Eclipse | plugins/se.kth.ict.forsyde/src-gen/forsyde/impl/ProcessImpl.java | 5001 | /**
*/
package forsyde.impl;
import forsyde.ForsydePackage;
import forsyde.Port;
import java.util.Collection;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.impl.MinimalEObjectImpl;
import org.eclipse.emf.ecore.util.EObjectContainmentEList;
import org.eclipse.emf.ecore.util.InternalEList;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Process</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link forsyde.impl.ProcessImpl#getName <em>Name</em>}</li>
* <li>{@link forsyde.impl.ProcessImpl#getPorts <em>Ports</em>}</li>
* </ul>
*
* @generated
*/
public abstract class ProcessImpl extends MinimalEObjectImpl.Container implements forsyde.Process {
/**
* The default value of the '{@link #getName() <em>Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getName()
* @generated
* @ordered
*/
protected static final String NAME_EDEFAULT = null;
/**
* The cached value of the '{@link #getName() <em>Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getName()
* @generated
* @ordered
*/
protected String name = NAME_EDEFAULT;
/**
* The cached value of the '{@link #getPorts() <em>Ports</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getPorts()
* @generated
* @ordered
*/
protected EList<Port> ports;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ProcessImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return ForsydePackage.Literals.PROCESS;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getName() {
return name;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setName(String newName) {
String oldName = name;
name = newName;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ForsydePackage.PROCESS__NAME, oldName, name));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<Port> getPorts() {
if (ports == null) {
ports = new EObjectContainmentEList<Port>(Port.class, this, ForsydePackage.PROCESS__PORTS);
}
return ports;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case ForsydePackage.PROCESS__PORTS:
return ((InternalEList<?>)getPorts()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case ForsydePackage.PROCESS__NAME:
return getName();
case ForsydePackage.PROCESS__PORTS:
return getPorts();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case ForsydePackage.PROCESS__NAME:
setName((String)newValue);
return;
case ForsydePackage.PROCESS__PORTS:
getPorts().clear();
getPorts().addAll((Collection<? extends Port>)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case ForsydePackage.PROCESS__NAME:
setName(NAME_EDEFAULT);
return;
case ForsydePackage.PROCESS__PORTS:
getPorts().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case ForsydePackage.PROCESS__NAME:
return NAME_EDEFAULT == null ? name != null : !NAME_EDEFAULT.equals(name);
case ForsydePackage.PROCESS__PORTS:
return ports != null && !ports.isEmpty();
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (name: ");
result.append(name);
result.append(')');
return result.toString();
}
} //ProcessImpl
| bsd-3-clause |
standevgd/gooddata-java | src/main/java/com/gooddata/project/ProjectValidationResultItem.java | 1656 | /*
* Copyright (C) 2004-2017, GoodData(R) Corporation. All rights reserved.
* This source code is licensed under the BSD-style license found in the
* LICENSE.txt file in the root directory of this source tree.
*/
package com.gooddata.project;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.gooddata.util.GoodDataToStringBuilder;
import java.util.List;
/**
* Validation result body helper dto. Not exposed to user.
* Deserialization only.
*/
@JsonIgnoreProperties(ignoreUnknown = true)
class ProjectValidationResultItem {
private final Body body;
private final ProjectValidationType validation;
@JsonCreator
private ProjectValidationResultItem(@JsonProperty("body") Body body, @JsonProperty("from") ProjectValidationType validation) {
this.body = body;
this.validation = validation;
}
List<ProjectValidationResult> getLogs() {
return body.logs;
}
ProjectValidationType getValidation() {
return validation;
}
@Override
public String toString() {
return GoodDataToStringBuilder.defaultToString(this);
}
@JsonIgnoreProperties(ignoreUnknown = true)
private static class Body {
private final List<ProjectValidationResult> logs;
@JsonCreator
private Body(@JsonProperty("log") List<ProjectValidationResult> logs) {
this.logs = logs;
}
@Override
public String toString() {
return GoodDataToStringBuilder.defaultToString(this);
}
}
}
| bsd-3-clause |
xzy256/grpc-java-mips64 | grpclb/src/generated/main/java/io/grpc/grpclb/InitialLoadBalanceResponseOrBuilder.java | 2386 | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: load_balancer.proto
package io.grpc.grpclb;
public interface InitialLoadBalanceResponseOrBuilder extends
// @@protoc_insertion_point(interface_extends:grpc.lb.v1.InitialLoadBalanceResponse)
com.google.protobuf.MessageOrBuilder {
/**
* <pre>
* This is an application layer redirect that indicates the client should
* use the specified server for load balancing. When this field is set in
* the response, the client should open a separate connection to the
* load_balancer_delegate and call the BalanceLoad method.
* </pre>
*
* <code>optional string load_balancer_delegate = 2;</code>
*/
java.lang.String getLoadBalancerDelegate();
/**
* <pre>
* This is an application layer redirect that indicates the client should
* use the specified server for load balancing. When this field is set in
* the response, the client should open a separate connection to the
* load_balancer_delegate and call the BalanceLoad method.
* </pre>
*
* <code>optional string load_balancer_delegate = 2;</code>
*/
com.google.protobuf.ByteString
getLoadBalancerDelegateBytes();
/**
* <pre>
* This interval defines how often the client should send the client stats
* to the load balancer. Stats should only be reported when the duration is
* positive.
* </pre>
*
* <code>optional .google.protobuf.Duration client_stats_report_interval = 3;</code>
*/
boolean hasClientStatsReportInterval();
/**
* <pre>
* This interval defines how often the client should send the client stats
* to the load balancer. Stats should only be reported when the duration is
* positive.
* </pre>
*
* <code>optional .google.protobuf.Duration client_stats_report_interval = 3;</code>
*/
com.google.protobuf.Duration getClientStatsReportInterval();
/**
* <pre>
* This interval defines how often the client should send the client stats
* to the load balancer. Stats should only be reported when the duration is
* positive.
* </pre>
*
* <code>optional .google.protobuf.Duration client_stats_report_interval = 3;</code>
*/
com.google.protobuf.DurationOrBuilder getClientStatsReportIntervalOrBuilder();
public io.grpc.grpclb.InitialLoadBalanceResponse.InitialResponseTypeCase getInitialResponseTypeCase();
}
| bsd-3-clause |
fukata/xstream-for-android | xstream/src/test/com/thoughtworks/acceptance/someobjects/Protocol.java | 640 | /*
* Copyright (C) 2004 Joe Walnes.
* Copyright (C) 2006, 2007 XStream Committers.
* All rights reserved.
*
* The software in this package is published under the terms of the BSD
* style license a copy of which has been included with this distribution in
* the LICENSE.txt file.
*
* Created on 07. March 2004 by Joe Walnes
*/
package com.thoughtworks.acceptance.someobjects;
/**
*
*
* @author <a href="mailto:jason@maven.org">Jason van Zyl</a>
*
* @version $Id: Protocol.java 1345 2007-12-11 01:50:12Z joehni $
*/
public class Protocol
{
private String id;
public String getId()
{
return id;
}
}
| bsd-3-clause |
NCIP/caaers | caAERS/software/core/src/main/java/gov/nih/nci/cabig/caaers/dao/query/PreExistingConditionQuery.java | 978 | /*******************************************************************************
* Copyright SemanticBits, Northwestern University and Akaza Research
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/caaers/LICENSE.txt for details.
******************************************************************************/
package gov.nih.nci.cabig.caaers.dao.query;
import gov.nih.nci.cabig.caaers.domain.PreExistingCondition;
/**
* @author Biju Joseph
* @author Ion C. Olaru
*/
public class PreExistingConditionQuery extends AbstractQuery {
public PreExistingConditionQuery() {
super("select p from PreExistingCondition p");
}
public void filterByMeddraCode(String meddraCode) {
andWhere("p.meddraLltCode = :mc");
setParameter("mc", meddraCode);
}
public void filterByRetiredStatus(Boolean status) {
super.filterByRetiredStatus("p", status);
}
}
| bsd-3-clause |
patrickianwilson/vijava-contrib | src/main/java/com/vmware/vim25/VirtualDeviceConfigSpec.java | 2778 | /*================================================================================
Copyright (c) 2013 Steve Jin. All Rights Reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the names of copyright holders nor the names of its contributors may be used
to endorse or promote products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
================================================================================*/
package com.vmware.vim25;
/**
* @author Steve Jin (http://www.doublecloud.org)
* @version 5.1
*/
@SuppressWarnings("all")
public class VirtualDeviceConfigSpec extends DynamicData {
public VirtualDeviceConfigSpecOperation operation;
public VirtualDeviceConfigSpecFileOperation fileOperation;
public VirtualDevice device;
public VirtualMachineProfileSpec[] profile;
public VirtualDeviceConfigSpecOperation getOperation() {
return this.operation;
}
public VirtualDeviceConfigSpecFileOperation getFileOperation() {
return this.fileOperation;
}
public VirtualDevice getDevice() {
return this.device;
}
public VirtualMachineProfileSpec[] getProfile() {
return this.profile;
}
public void setOperation(VirtualDeviceConfigSpecOperation operation) {
this.operation=operation;
}
public void setFileOperation(VirtualDeviceConfigSpecFileOperation fileOperation) {
this.fileOperation=fileOperation;
}
public void setDevice(VirtualDevice device) {
this.device=device;
}
public void setProfile(VirtualMachineProfileSpec[] profile) {
this.profile=profile;
}
} | bsd-3-clause |
daejunpark/jsaf | third_party/pmd/src/test/java/net/sourceforge/pmd/lang/java/rule/coupling/LawOfDemeterRuleTest.java | 583 | /**
* BSD-style license; for more info see http://pmd.sourceforge.net/license.html
*/
package net.sourceforge.pmd.lang.java.rule.coupling;
import net.sourceforge.pmd.testframework.SimpleAggregatorTst;
import org.junit.Before;
public class LawOfDemeterRuleTest extends SimpleAggregatorTst {
private static final String RULESET = "java-coupling";
@Before
public void setUp() {
addRule(RULESET, "LawOfDemeter");
}
public static junit.framework.Test suite() {
return new junit.framework.JUnit4TestAdapter(LawOfDemeterRuleTest.class);
}
}
| bsd-3-clause |
minagri-rwanda/DHIS2-Agriculture | dhis-api/src/main/java/org/hisp/dhis/validation/ValidationRuleGroup.java | 7167 | package org.hisp.dhis.validation;
/*
* Copyright (c) 2004-2016, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonView;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlElementWrapper;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlProperty;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlRootElement;
import org.hisp.dhis.common.BaseIdentifiableObject;
import org.hisp.dhis.common.DxfNamespaces;
import org.hisp.dhis.common.IdentifiableObject;
import org.hisp.dhis.common.MergeMode;
import org.hisp.dhis.common.annotation.Scanned;
import org.hisp.dhis.common.view.DetailedView;
import org.hisp.dhis.common.view.ExportView;
import org.hisp.dhis.schema.annotation.PropertyRange;
import org.hisp.dhis.user.UserGroup;
import java.util.HashSet;
import java.util.Set;
/**
* @author Lars Helge Overland
*/
@JacksonXmlRootElement( localName = "validationRuleGroup", namespace = DxfNamespaces.DXF_2_0 )
public class ValidationRuleGroup
extends BaseIdentifiableObject
{
private String description;
@Scanned
private Set<ValidationRule> members = new HashSet<>();
@Scanned
private Set<UserGroup> userGroupsToAlert = new HashSet<>();
private boolean alertByOrgUnits;
// -------------------------------------------------------------------------
// Constructors
// -------------------------------------------------------------------------
public ValidationRuleGroup()
{
}
public ValidationRuleGroup( String name, String description, Set<ValidationRule> members )
{
this.name = name;
this.description = description;
this.members = members;
}
// -------------------------------------------------------------------------
// Logic
// -------------------------------------------------------------------------
public void addValidationRule( ValidationRule validationRule )
{
members.add( validationRule );
validationRule.getGroups().add( this );
}
public void removeValidationRule( ValidationRule validationRule )
{
members.remove( validationRule );
validationRule.getGroups().remove( this );
}
public void removeAllValidationRules()
{
members.clear();
}
/**
* Indicates whether this group has user roles to alert.
*/
public boolean hasUserGroupsToAlert()
{
return userGroupsToAlert != null && !userGroupsToAlert.isEmpty();
}
// -------------------------------------------------------------------------
// Getters and setters
// -------------------------------------------------------------------------
@JsonProperty
@JsonView( { DetailedView.class, ExportView.class } )
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
@PropertyRange( min = 2 )
public String getDescription()
{
return description;
}
public void setDescription( String description )
{
this.description = description;
}
@JsonProperty( "validationRules" )
@JsonSerialize( contentAs = BaseIdentifiableObject.class )
@JsonView( { DetailedView.class, ExportView.class } )
@JacksonXmlElementWrapper( localName = "validationRules", namespace = DxfNamespaces.DXF_2_0 )
@JacksonXmlProperty( localName = "validationRule", namespace = DxfNamespaces.DXF_2_0 )
public Set<ValidationRule> getMembers()
{
return members;
}
public void setMembers( Set<ValidationRule> members )
{
this.members = members;
}
@JsonProperty
@JsonSerialize( contentAs = BaseIdentifiableObject.class )
@JsonView( { DetailedView.class, ExportView.class } )
@JacksonXmlElementWrapper( localName = "userGroupsToAlert", namespace = DxfNamespaces.DXF_2_0 )
@JacksonXmlProperty( localName = "userGroupToAlert", namespace = DxfNamespaces.DXF_2_0 )
public Set<UserGroup> getUserGroupsToAlert()
{
return userGroupsToAlert;
}
public void setUserGroupsToAlert( Set<UserGroup> userGroupsToAlert )
{
this.userGroupsToAlert = userGroupsToAlert;
}
@JsonProperty
@JsonView( { DetailedView.class, ExportView.class } )
@JacksonXmlProperty( namespace = DxfNamespaces.DXF_2_0 )
public boolean isAlertByOrgUnits()
{
return alertByOrgUnits;
}
public void setAlertByOrgUnits( boolean alertByOrgUnits )
{
this.alertByOrgUnits = alertByOrgUnits;
}
@Override
public void mergeWith( IdentifiableObject other, MergeMode mergeMode )
{
super.mergeWith( other, mergeMode );
if ( other.getClass().isInstance( this ) )
{
ValidationRuleGroup validationRuleGroup = (ValidationRuleGroup) other;
if ( mergeMode.isReplace() )
{
description = validationRuleGroup.getDescription();
}
else if ( mergeMode.isMerge() )
{
description = validationRuleGroup.getDescription() == null ? description : validationRuleGroup.getDescription();
}
removeAllValidationRules();
for ( ValidationRule validationRule : validationRuleGroup.getMembers() )
{
addValidationRule( validationRule );
}
}
}
}
| bsd-3-clause |
stephenc/vijava | src/main/java/com/vmware/vim25/ComputeResourceEventArgument.java | 2114 | /*================================================================================
Copyright (c) 2013 Steve Jin. All Rights Reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the names of copyright holders nor the names of its contributors may be used
to endorse or promote products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
================================================================================*/
package com.vmware.vim25;
/**
* @author Steve Jin (http://www.doublecloud.org)
* @version 5.1
*/
@SuppressWarnings("all")
public class ComputeResourceEventArgument extends EntityEventArgument {
public ManagedObjectReference computeResource;
public ManagedObjectReference getComputeResource() {
return this.computeResource;
}
public void setComputeResource(ManagedObjectReference computeResource) {
this.computeResource=computeResource;
}
} | bsd-3-clause |
googleapis/gax-java | gax-httpjson/src/test/java/com/google/api/gax/httpjson/ProtoRestSerializerTest.java | 5163 | /*
* Copyright 2020 Google LLC
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google LLC nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.google.api.gax.httpjson;
import com.google.common.truth.Truth;
import com.google.protobuf.Field;
import com.google.protobuf.Field.Cardinality;
import com.google.protobuf.Option;
import java.io.IOException;
import java.io.StringReader;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class ProtoRestSerializerTest {
private ProtoRestSerializer<Field> requestSerializer;
private Field field;
private String fieldJson;
@Before
public void setUp() {
requestSerializer = ProtoRestSerializer.create();
field =
Field.newBuilder()
.setNumber(2)
.setName("field_name1")
.addOptions(Option.newBuilder().setName("opt_name1").build())
.addOptions(Option.newBuilder().setName("opt_name2").build())
.setCardinality(Cardinality.CARDINALITY_OPTIONAL)
.build();
fieldJson =
"{\n"
+ " \"cardinality\": \"CARDINALITY_OPTIONAL\",\n"
+ " \"number\": 2,\n"
+ " \"name\": \"field_name1\",\n"
+ " \"options\": [{\n"
+ " \"name\": \"opt_name1\"\n"
+ " }, {\n"
+ " \"name\": \"opt_name2\"\n"
+ " }]\n"
+ "}";
}
@Test
public void toJson() {
String fieldToJson = requestSerializer.toJson(field);
Truth.assertThat(fieldToJson).isEqualTo(fieldJson);
}
@Test
public void fromJson() {
Field fieldFromJson =
requestSerializer.fromJson(new StringReader(fieldJson), Field.newBuilder());
Truth.assertThat(fieldFromJson).isEqualTo(field);
}
@Test
public void fromJsonInvalidJson() {
try {
requestSerializer.fromJson(new StringReader("heh"), Field.newBuilder());
Assert.fail();
} catch (RestSerializationException e) {
Truth.assertThat(e.getCause()).isInstanceOf(IOException.class);
}
}
@Test
public void putPathParam() {
Map<String, String> fields = new HashMap<>();
requestSerializer.putPathParam(fields, "optName1", 1);
requestSerializer.putPathParam(fields, "optName2", 0);
requestSerializer.putPathParam(fields, "optName3", "three");
requestSerializer.putPathParam(fields, "optName4", "");
Map<String, String> expectedFields = new HashMap<>();
expectedFields.put("optName1", "1");
expectedFields.put("optName2", "0");
expectedFields.put("optName3", "three");
expectedFields.put("optName4", "");
Truth.assertThat(fields).isEqualTo(expectedFields);
}
@Test
public void putQueryParam() {
Map<String, List<String>> fields = new HashMap<>();
requestSerializer.putQueryParam(fields, "optName1", 1);
requestSerializer.putQueryParam(fields, "optName2", 0);
requestSerializer.putQueryParam(fields, "optName3", "three");
requestSerializer.putQueryParam(fields, "optName4", "");
requestSerializer.putQueryParam(fields, "optName5", Arrays.asList("four", "five"));
Map<String, List<String>> expectedFields = new HashMap<>();
expectedFields.put("optName1", Arrays.asList("1"));
expectedFields.put("optName2", Arrays.asList("0"));
expectedFields.put("optName3", Arrays.asList("three"));
expectedFields.put("optName4", Arrays.asList(""));
expectedFields.put("optName5", Arrays.asList("four", "five"));
Truth.assertThat(fields).isEqualTo(expectedFields);
}
@Test
public void toBody() {
String body = requestSerializer.toBody("bodyField1", field);
Truth.assertThat(body).isEqualTo(fieldJson);
}
}
| bsd-3-clause |
pgesek/modules | dhis2/src/main/java/org/motechproject/dhis2/rest/domain/ProgramTrackedEntityAttributeDto.java | 743 | package org.motechproject.dhis2.rest.domain;
import org.codehaus.jackson.annotate.JsonIgnoreProperties;
import org.codehaus.jackson.map.annotate.JsonSerialize;
/**
* A class to model program tracked entity attributes returned by the DHIS2 API.
*/
@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
@JsonIgnoreProperties(ignoreUnknown = true)
public class ProgramTrackedEntityAttributeDto {
private TrackedEntityAttributeDto trackedEntityAttribute;
public TrackedEntityAttributeDto getTrackedEntityAttribute() {
return trackedEntityAttribute;
}
public void setTrackedEntityAttribute(TrackedEntityAttributeDto trackedEntityAttribute) {
this.trackedEntityAttribute = trackedEntityAttribute;
}
}
| bsd-3-clause |
knopflerfish/knopflerfish.org | osgi/bundles_test/regression_tests/restart_test/test_target_bundles/bundleC_test/src/org/knopflerfish/bundle/bundleC_test/BundleActivator.java | 2907 | /*
* Copyright (c) 2004, KNOPFLERFISH project
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* - Neither the name of the KNOPFLERFISH project nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.knopflerfish.bundle.bundleC_test;
import java.util.*;
import org.osgi.framework.*;
import org.knopflerfish.service.bundleC_test.*;
/*
The start method tries to install the bundle(s) needed to do the test
The user has to fill in the bundles array with the bundles needed
to do the test.
To facilitate that the test may take place in another location
the POSGRUNDIR property is prepended before the bundle names
before an attempt to load them is made
*/
public class BundleActivator implements org.osgi.framework.BundleActivator {
BundleContext bc;
BundC s;
ServiceReference sr1;
String []serviceDescription = {"org.knopflerfish.service.bundleC_test.BundleC", "java.lang.Object"};
ServiceRegistration sreg;
public void start(BundleContext bc) {
this.bc = bc;
s = new BundC(bc);
Hashtable dict = new Hashtable();
dict.put ("key1","value1");
dict.put ("key2","value2");
try {
sreg = bc.registerService(serviceDescription, s, dict);
}
catch (RuntimeException ru) {
System.out.println ("Exception " + ru + " in BundleC start");
ru.printStackTrace();
}
sr1 = bc.getServiceReference("org.knopflerfish.service.bundleC_test.BundleC");
}
public void stop(BundleContext bc) {
bc.ungetService(sr1);
}
}
| bsd-3-clause |
Azure/azure-sdk-for-java | sdk/batch/microsoft-azure-batch/src/main/java/com/microsoft/azure/batch/protocol/models/AccountListSupportedImagesNextOptions.java | 3680 | /**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.batch.protocol.models;
import java.util.UUID;
import com.microsoft.rest.DateTimeRfc1123;
import org.joda.time.DateTime;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* Additional parameters for listSupportedImagesNext operation.
*/
public class AccountListSupportedImagesNextOptions {
/**
* The caller-generated request identity, in the form of a GUID with no
* decoration such as curly braces, e.g.
* 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0.
*/
@JsonProperty(value = "")
private UUID clientRequestId;
/**
* Whether the server should return the client-request-id in the response.
*/
@JsonProperty(value = "")
private Boolean returnClientRequestId;
/**
* The time the request was issued. Client libraries typically set this to
* the current system clock time; set it explicitly if you are calling the
* REST API directly.
*/
@JsonProperty(value = "")
private DateTimeRfc1123 ocpDate;
/**
* Get the caller-generated request identity, in the form of a GUID with no decoration such as curly braces, e.g. 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0.
*
* @return the clientRequestId value
*/
public UUID clientRequestId() {
return this.clientRequestId;
}
/**
* Set the caller-generated request identity, in the form of a GUID with no decoration such as curly braces, e.g. 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0.
*
* @param clientRequestId the clientRequestId value to set
* @return the AccountListSupportedImagesNextOptions object itself.
*/
public AccountListSupportedImagesNextOptions withClientRequestId(UUID clientRequestId) {
this.clientRequestId = clientRequestId;
return this;
}
/**
* Get whether the server should return the client-request-id in the response.
*
* @return the returnClientRequestId value
*/
public Boolean returnClientRequestId() {
return this.returnClientRequestId;
}
/**
* Set whether the server should return the client-request-id in the response.
*
* @param returnClientRequestId the returnClientRequestId value to set
* @return the AccountListSupportedImagesNextOptions object itself.
*/
public AccountListSupportedImagesNextOptions withReturnClientRequestId(Boolean returnClientRequestId) {
this.returnClientRequestId = returnClientRequestId;
return this;
}
/**
* Get the time the request was issued. Client libraries typically set this to the current system clock time; set it explicitly if you are calling the REST API directly.
*
* @return the ocpDate value
*/
public DateTime ocpDate() {
if (this.ocpDate == null) {
return null;
}
return this.ocpDate.dateTime();
}
/**
* Set the time the request was issued. Client libraries typically set this to the current system clock time; set it explicitly if you are calling the REST API directly.
*
* @param ocpDate the ocpDate value to set
* @return the AccountListSupportedImagesNextOptions object itself.
*/
public AccountListSupportedImagesNextOptions withOcpDate(DateTime ocpDate) {
if (ocpDate == null) {
this.ocpDate = null;
} else {
this.ocpDate = new DateTimeRfc1123(ocpDate);
}
return this;
}
}
| mit |
navalev/azure-sdk-for-java | sdk/applicationinsights/mgmt-v2015_05_01/src/main/java/com/microsoft/azure/management/applicationinsights/v2015_05_01/implementation/ApplicationInsightsComponentExportConfigurationImpl.java | 3094 | /**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.applicationinsights.v2015_05_01.implementation;
import com.microsoft.azure.management.applicationinsights.v2015_05_01.ApplicationInsightsComponentExportConfiguration;
import com.microsoft.azure.arm.model.implementation.WrapperImpl;
class ApplicationInsightsComponentExportConfigurationImpl extends WrapperImpl<ApplicationInsightsComponentExportConfigurationInner> implements ApplicationInsightsComponentExportConfiguration {
private final InsightsManager manager;
ApplicationInsightsComponentExportConfigurationImpl(ApplicationInsightsComponentExportConfigurationInner inner, InsightsManager manager) {
super(inner);
this.manager = manager;
}
@Override
public InsightsManager manager() {
return this.manager;
}
@Override
public String applicationName() {
return this.inner().applicationName();
}
@Override
public String containerName() {
return this.inner().containerName();
}
@Override
public String destinationAccountId() {
return this.inner().destinationAccountId();
}
@Override
public String destinationStorageLocationId() {
return this.inner().destinationStorageLocationId();
}
@Override
public String destinationStorageSubscriptionId() {
return this.inner().destinationStorageSubscriptionId();
}
@Override
public String destinationType() {
return this.inner().destinationType();
}
@Override
public String exportId() {
return this.inner().exportId();
}
@Override
public String exportStatus() {
return this.inner().exportStatus();
}
@Override
public String instrumentationKey() {
return this.inner().instrumentationKey();
}
@Override
public String isUserEnabled() {
return this.inner().isUserEnabled();
}
@Override
public String lastGapTime() {
return this.inner().lastGapTime();
}
@Override
public String lastSuccessTime() {
return this.inner().lastSuccessTime();
}
@Override
public String lastUserUpdate() {
return this.inner().lastUserUpdate();
}
@Override
public String notificationQueueEnabled() {
return this.inner().notificationQueueEnabled();
}
@Override
public String permanentErrorReason() {
return this.inner().permanentErrorReason();
}
@Override
public String recordTypes() {
return this.inner().recordTypes();
}
@Override
public String resourceGroup() {
return this.inner().resourceGroup();
}
@Override
public String storageName() {
return this.inner().storageName();
}
@Override
public String subscriptionId() {
return this.inner().subscriptionId();
}
}
| mit |
selvasingh/azure-sdk-for-java | sdk/network/mgmt-v2017_10_01/src/main/java/com/microsoft/azure/management/network/v2017_10_01/ConnectionMonitors.java | 4039 | /**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.network.v2017_10_01;
import com.microsoft.azure.arm.collection.SupportsCreating;
import rx.Completable;
import rx.Observable;
import com.microsoft.azure.management.network.v2017_10_01.implementation.ConnectionMonitorsInner;
import com.microsoft.azure.arm.model.HasInner;
/**
* Type representing ConnectionMonitors.
*/
public interface ConnectionMonitors extends SupportsCreating<ConnectionMonitorResult.DefinitionStages.Blank>, HasInner<ConnectionMonitorsInner> {
/**
* Stops the specified connection monitor.
*
* @param resourceGroupName The name of the resource group containing Network Watcher.
* @param networkWatcherName The name of the Network Watcher resource.
* @param connectionMonitorName The name of the connection monitor.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
Completable stopAsync(String resourceGroupName, String networkWatcherName, String connectionMonitorName);
/**
* Starts the specified connection monitor.
*
* @param resourceGroupName The name of the resource group containing Network Watcher.
* @param networkWatcherName The name of the Network Watcher resource.
* @param connectionMonitorName The name of the connection monitor.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
Completable startAsync(String resourceGroupName, String networkWatcherName, String connectionMonitorName);
/**
* Query a snapshot of the most recent connection states.
*
* @param resourceGroupName The name of the resource group containing Network Watcher.
* @param networkWatcherName The name of the Network Watcher resource.
* @param connectionMonitorName The name given to the connection monitor.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
Observable<ConnectionMonitorQueryResult> queryAsync(String resourceGroupName, String networkWatcherName, String connectionMonitorName);
/**
* Gets a connection monitor by name.
*
* @param resourceGroupName The name of the resource group containing Network Watcher.
* @param networkWatcherName The name of the Network Watcher resource.
* @param connectionMonitorName The name of the connection monitor.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
Observable<ConnectionMonitorResult> getAsync(String resourceGroupName, String networkWatcherName, String connectionMonitorName);
/**
* Lists all connection monitors for the specified Network Watcher.
*
* @param resourceGroupName The name of the resource group containing Network Watcher.
* @param networkWatcherName The name of the Network Watcher resource.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
Observable<ConnectionMonitorResult> listAsync(String resourceGroupName, String networkWatcherName);
/**
* Deletes the specified connection monitor.
*
* @param resourceGroupName The name of the resource group containing Network Watcher.
* @param networkWatcherName The name of the Network Watcher resource.
* @param connectionMonitorName The name of the connection monitor.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
Completable deleteAsync(String resourceGroupName, String networkWatcherName, String connectionMonitorName);
}
| mit |
ShyHi/ShyHi_Android | dev/ShyHi/src/dev/rug/shyhi/randomNameGen.java | 749 | package dev.rug.shyhi;
import java.util.Random;
public class randomNameGen {
private static String[] adjArr= {"numerous","flagrant","wild","comfortable","incandescent","rare","pushy","insidious","workable","dirty","doubtful","cruel","friendly","productive","busy","tough","thinkable","repulsive","colorful","excellent","succinct","honorable","aggressive","flawless","narrow"};
private static String[] nounArr = {"zebra","ocelot","mongoose","turtle","potato","moose"};
public static String randomName(){
Random rand = new Random();
int randomNum = rand.nextInt(adjArr.length-1);
String adj = adjArr[randomNum];
int randNum = rand.nextInt(nounArr.length-1);
String noun = nounArr[randNum];
return adj+" "+noun;
}
}
| mit |
kebenxiaoming/cocos2d-java | cocos/src/cocos2d/actions/action_intervals/CCMoveTo.java | 3318 | /******************************************************************************
* Copyright (c) 2014 cocos2d-java.org
*
* http://www.cocos2d-java.org
*
* The MIT License (MIT)
*
*Permission is hereby granted, free of charge, to any person obtaining a copy
*of this software and associated documentation files (the "Software"), to deal
*in the Software without restriction, including without limitation the rights
*to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
*copies of the Software, and to permit persons to whom the Software is
*furnished to do so, subject to the following conditions:
*The above copyright notice and this permission notice shall be included in
*all copies or substantial portions of the Software.
*THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
*IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
*FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
*AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
*LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
*OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
*THE SOFTWARE.
*******************************************************************************/
package cocos2d.actions.action_intervals;
import cocos2d.base_nodes.CCNode;
import cocos2d.cocoa.CCPoint;
import cocos2d.cocoa.ICCCopyable;
public class CCMoveTo extends CCMoveBy
{
CCPoint currentPos = new CCPoint();
CCPoint newPos = new CCPoint();
public CCMoveTo(float duration, CCPoint position)
{
super(duration, position);
}
protected CCMoveTo(CCMoveTo moveTo)
{
super(moveTo);
initWithDuration(moveTo.m_fDuration, moveTo.m_endPosition);
}
@Override
protected boolean initWithDuration(float duration, CCPoint position)
{
if (super.initWithDuration(duration))
{
m_endPosition = new CCPoint(position);
return true;
}
return false;
}
@Override
public Object copy(ICCCopyable zone)
{
if (zone != null)
{
CCMoveTo ret = (CCMoveTo) zone;
super.copy(zone);
ret.initWithDuration(m_fDuration, m_endPosition);
return ret;
}
else
{
return new CCMoveTo(this);
}
}
@Override
public void update(float time)
{
if (m_pTarget != null)
{
currentPos.set(m_pTarget.getPosition());
//CCPoint diff = new CCPoint(CCPoint.ccpSub(currentPos, m_previousPosition));
//m_startPosition = m_startPosition + diff;
//newPos.set(CCPoint.ccpAdd(m_startPosition, CCPoint.ccpMult(m_positionDelta, time)));
newPos.set(m_startPosition.x + m_positionDelta.x * time, m_startPosition.y + m_positionDelta.y * time);
m_pTarget.setPosition(newPos);
m_previousPosition.set(newPos);
}
}
@Override
public void startWithTarget(CCNode target)
{
super.startWithTarget(target);
m_startPosition = new CCPoint(target.getPosition());
m_positionDelta = new CCPoint(CCPoint.ccpSub(m_endPosition, target.getPosition()));
}
} | mit |
zbsz/robolectric | robolectric/src/test/java/org/robolectric/shadows/ShadowParcelFileDescriptorTest.java | 1823 | package org.robolectric.shadows;
import android.os.ParcelFileDescriptor;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RuntimeEnvironment;
import org.robolectric.TestRunners;
import java.io.File;
import java.io.FileOutputStream;
import static org.assertj.core.api.Assertions.assertThat;
@RunWith(TestRunners.MultiApiWithDefaults.class)
public class ShadowParcelFileDescriptorTest {
private File file;
@Before
public void setup() throws Exception {
file = new File(RuntimeEnvironment.application.getFilesDir(), "test");
FileOutputStream os = new FileOutputStream(file);
os.close();
}
@Test
public void testOpens() throws Exception {
ParcelFileDescriptor pfd = ParcelFileDescriptor.open(file, -1);
assertThat(pfd).isNotNull();
assertThat(pfd.getFileDescriptor().valid()).isTrue();
pfd.close();
}
@Test
public void testCloses() throws Exception {
ParcelFileDescriptor pfd = ParcelFileDescriptor.open(file, -1);
pfd.close();
assertThat(pfd.getFileDescriptor().valid()).isFalse();
}
@Test
public void testAutoCloseInputStream() throws Exception {
ParcelFileDescriptor pfd = ParcelFileDescriptor.open(file, -1);
ParcelFileDescriptor.AutoCloseInputStream is = new ParcelFileDescriptor.AutoCloseInputStream(pfd);
is.close();
assertThat(pfd.getFileDescriptor().valid()).isFalse();
}
@Test
public void testAutoCloseOutputStream() throws Exception {
File f = new File(RuntimeEnvironment.application.getFilesDir(), "outfile");
ParcelFileDescriptor pfd = ParcelFileDescriptor.open(f, -1);
ParcelFileDescriptor.AutoCloseOutputStream os = new ParcelFileDescriptor.AutoCloseOutputStream(pfd);
os.close();
assertThat(pfd.getFileDescriptor().valid()).isFalse();
}
}
| mit |
ikhvostenkov/takes | src/test/java/org/takes/http/BkTimeableTest.java | 3796 | /**
* The MIT License (MIT)
*
* Copyright (c) 2015 Yegor Bugayenko
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.takes.http;
import com.jcabi.http.request.JdkRequest;
import com.jcabi.http.response.RestResponse;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.util.concurrent.TimeUnit;
import org.hamcrest.Matchers;
import org.junit.Test;
import org.takes.Request;
import org.takes.Response;
import org.takes.Take;
import org.takes.rs.RsText;
/**
* Test case for {@link BkTimeable}.
*
* @author Dmitry Zaytsev (dmitry.zaytsev@gmail.com)
* @version $Id$
* @since 0.14.2
*/
@SuppressWarnings("PMD.DoNotUseThreads")
public final class BkTimeableTest {
/**
* BkTimeable can stop long running Back.
* @throws java.lang.Exception If some problem inside
*/
@Test
public void stopsLongRunningBack() throws Exception {
final int port = new Ports().allocate();
final String response = "interrupted";
final Take take = new Take() {
@Override
public Response act(final Request req)
throws IOException {
try {
// @checkstyle MagicNumberCheck (1 line)
TimeUnit.SECONDS.sleep(10L);
return new RsText("finish");
} catch (final InterruptedException ex) {
Thread.currentThread().interrupt();
return new RsText(response);
}
}
};
final Thread thread = new Thread(
new Runnable() {
@Override
public void run() {
try {
new FtCLI(
take,
String.format("--port=%d", port),
"--threads=1",
"--lifetime=3000",
"--max-latency=100"
).start(Exit.NEVER);
} catch (final IOException ex) {
throw new IllegalStateException(ex);
}
}
}
);
thread.start();
// @checkstyle MagicNumberCheck (1 line)
TimeUnit.MILLISECONDS.sleep(1500L);
new JdkRequest(String.format("http://localhost:%d", port))
.fetch()
.as(RestResponse.class)
.assertStatus(HttpURLConnection.HTTP_OK)
.assertBody(Matchers.startsWith(response));
try {
thread.join();
} catch (final InterruptedException ex) {
Thread.currentThread().interrupt();
throw new IllegalStateException(ex);
}
new Ports().release(port);
}
}
| mit |
Skywalker-11/spongycastle | pg/src/main/java/org/spongycastle/bcpg/sig/EmbeddedSignature.java | 458 | package org.spongycastle.bcpg.sig;
import org.spongycastle.bcpg.SignatureSubpacket;
import org.spongycastle.bcpg.SignatureSubpacketTags;
/**
* Packet embedded signature
*/
public class EmbeddedSignature
extends SignatureSubpacket
{
public EmbeddedSignature(
boolean critical,
boolean isLongLength,
byte[] data)
{
super(SignatureSubpacketTags.EMBEDDED_SIGNATURE, critical, isLongLength, data);
}
} | mit |
robertsmieja/litle-sdk-for-java | samples/auth/src/main/java/AuthExample.java | 1499 | package com.litle.sdk.samples;
import com.litle.sdk.*;
import com.litle.sdk.generate.*;
//Authorization
public class AuthExample {
public static void main(String[] args) {
Authorization auth = new Authorization();
auth.setOrderId("1");
auth.setAmount(10010L);
auth.setOrderSource(OrderSourceType.ECOMMERCE);
Contact billToAddress = new Contact();
billToAddress.setName("John Smith");
billToAddress.setAddressLine1("1 Main St.");
billToAddress.setCity("Burlington");
billToAddress.setState("MA");
billToAddress.setCountry(CountryTypeEnum.US);
billToAddress.setZip("01803-3747");
auth.setBillToAddress(billToAddress);
CardType card = new CardType();
card.setNumber("375001010000003");
card.setExpDate("0112");
card.setCardValidationNum("349");
card.setType(MethodOfPaymentTypeEnum.AX);
auth.setCard(card);
AuthorizationResponse response = new LitleOnline().authorize(auth);
//Display Results
System.out.println("Response: " + response.getResponse());
System.out.println("Message: " + response.getMessage());
System.out.println("Litle Transaction ID: " + response.getLitleTxnId());
// In your sample, you can ignore this
if(!response.getMessage().equals("Approved"))
throw new RuntimeException(" The AuthSample does not give the right response");
}
}
| mit |
csmith/DMDirc-Parser | xmpp/src/main/java/com/dmdirc/parser/xmpp/XmppChannelClientInfo.java | 2678 | /*
* Copyright (c) 2006-2017 DMDirc Developers
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.dmdirc.parser.xmpp;
import com.dmdirc.parser.common.BaseChannelClientInfo;
import com.dmdirc.parser.interfaces.ChannelClientInfo;
import com.dmdirc.parser.interfaces.ChannelInfo;
import com.dmdirc.parser.interfaces.ClientInfo;
import java.util.Comparator;
import javax.annotation.Nonnull;
/**
* An XMPP-specific channel client info object.
*/
public class XmppChannelClientInfo extends BaseChannelClientInfo {
/**
* Creates a new client info object for the specified channel and client.
*
* @param channel The channel the association is with
* @param client The user that holds the association
*/
public XmppChannelClientInfo(final ChannelInfo channel, final ClientInfo client) {
super(channel, client);
}
@Override
public String getImportantModePrefix() {
return ""; // TODO: Implement
}
@Override
public String getImportantMode() {
return ""; // TODO: Implement
}
@Override
public String getAllModes() {
return ""; // TODO: Implement
}
@Override
public String getAllModesPrefix() {
return ""; // TODO: Implement
}
@Override
public void kick(final String message) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public Comparator<String> getImportantModeComparator() {
return Comparator.naturalOrder();
}
@Override
public int compareTo(@Nonnull final ChannelClientInfo o) {
return 0; // TODO: Implement
}
}
| mit |
SoundBite/johm | src/main/java/redis/clients/johm/MissingIdException.java | 186 | package redis.clients.johm;
public class MissingIdException extends RuntimeException {
/**
*
*/
private static final long serialVersionUID = 431576167757996845L;
}
| mit |
lionell/labs | eom/processor/src/processor/memory/Memory.java | 1470 | package processor.memory;
import processor.exceptions.MemoryException;
import processor.utils.io.Logger;
/**
* Created by lionell on 24.02.16.
*
* @author Ruslan Sakevych
*/
public class Memory {
private int registerAddressing;
private int registerCount;
private Register[] registers;
public Memory(int registerAddressing, int registerCount) {
this.registerAddressing = registerAddressing;
this.registerCount = registerCount;
registers = new Register[registerCount];
for (int i = 0; i < registerCount; i++) {
registers[i] = new Register(registerAddressing);
}
}
public void printDump() {
for (int i = 0; i < registers.length; i++) {
Logger.println("R" + i + " = " + registers[i]);
}
}
public Register getRegister(int index) {
if (index < 0 || index >= registerCount) {
throw new MemoryException("Invalid index.");
}
return registers[index];
}
public int[] getData(int index) {
return getRegister(index).getData();
}
public void setRegister(int index, int[] data) {
if (index < 0 || index >= registerCount) {
throw new MemoryException("Invalid index.");
}
registers[index].setData(data);
}
public int getRegisterAddressing() {
return registerAddressing;
}
public int getRegisterCount() {
return registerCount;
}
}
| mit |
angelozerr/typescript.java | core/ts.core/src/ts/cmd/tsc/CompilerOptionCapability.java | 605 | package ts.cmd.tsc;
import ts.utils.VersionHelper;
public enum CompilerOptionCapability {
listEmittedFiles("2.0.0");
private String sinceVersion;
private CompilerOptionCapability(String version) {
this.sinceVersion = version;
}
/**
* Return true if the tsc compiler option support the given version and
* false otherwise.
*
* @param version
* @return true if the tsc compiler option support the given version and
* false otherwise.
*/
public boolean canSupport(String version) {
return VersionHelper.canSupport(version, sinceVersion);
}
}
| mit |
CCSU-CS416F17/CS416F17CourseInfo | Lec12Demos/src/java/edu/ccsu/customtags/LabeledTextField.java | 1318 | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package edu.ccsu.customtags;
import javax.servlet.jsp.JspWriter;
import javax.servlet.jsp.JspException;
import javax.servlet.jsp.tagext.SimpleTagSupport;
/**
*
* @author cw1491
*/
public class LabeledTextField extends SimpleTagSupport {
private String label = "";
private String name = "";
private String value = "";
/**
* Called by the container to invoke this tag.
* The implementation of this method is provided by the tag library developer,
* and handles all tag processing, body iteration, etc.
*/
@Override
public void doTag() throws JspException {
JspWriter out = getJspContext().getOut();
try {
out.println("<tr><th>"+label+
"</th><td><input type=\"text\" name=\""+name+
"\" value=\""+value+"\"/></td></tr>");
} catch (java.io.IOException ex) {
throw new JspException("Error in LabeledTextField tag", ex);
}
}
public void setLabel(String label) {
this.label = label;
}
public void setName(String name) {
this.name = name;
}
public void setValue(String value) {
this.value = value;
}
}
| mit |