repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15
values |
|---|---|---|---|---|
viapp/httpcore-nio-android | src/main/java/org/apache/http/impl/nio/codecs/LengthDelimitedDecoder.java | 5989 | /*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.http.impl.nio.codecs;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.channels.ReadableByteChannel;
import org.apache.http.ConnectionClosedException;
import org.apache.http.annotation.NotThreadSafe;
import org.apache.http.impl.io.HttpTransportMetricsImpl;
import org.apache.http.nio.FileContentDecoder;
import org.apache.http.nio.reactor.SessionInputBuffer;
import org.apache.http.util.Args;
/**
* Content decoder that cuts off after a defined number of bytes. This class
* is used to receive content of HTTP messages where the end of the content
* entity is determined by the value of the <code>Content-Length header</code>.
* Entities transferred using this stream can be maximum {@link Long#MAX_VALUE}
* long.
* <p>
* This decoder is optimized to transfer data directly from the underlying
* I/O session's channel to a {@link FileChannel}, whenever
* possible avoiding intermediate buffering in the session buffer.
*
* @since 4.0
*/
@NotThreadSafe
public class LengthDelimitedDecoder extends AbstractContentDecoder
implements FileContentDecoder {
private final long contentLength;
private long len;
public LengthDelimitedDecoder(
final ReadableByteChannel channel,
final SessionInputBuffer buffer,
final HttpTransportMetricsImpl metrics,
final long contentLength) {
super(channel, buffer, metrics);
Args.notNegative(contentLength, "Content length");
this.contentLength = contentLength;
}
public int read(final ByteBuffer dst) throws IOException {
Args.notNull(dst, "Byte buffer");
if (this.completed) {
return -1;
}
final int chunk = (int) Math.min((this.contentLength - this.len), Integer.MAX_VALUE);
final int bytesRead;
if (this.buffer.hasData()) {
final int maxLen = Math.min(chunk, this.buffer.length());
bytesRead = this.buffer.read(dst, maxLen);
} else {
bytesRead = readFromChannel(dst, chunk);
}
if (bytesRead == -1) {
this.completed = true;
if (this.len < this.contentLength) {
throw new ConnectionClosedException(
"Premature end of Content-Length delimited message body (expected: "
+ this.contentLength + "; received: " + this.len);
}
}
this.len += bytesRead;
if (this.len >= this.contentLength) {
this.completed = true;
}
if (this.completed && bytesRead == 0) {
return -1;
} else {
return bytesRead;
}
}
public long transfer(
final FileChannel dst,
final long position,
final long count) throws IOException {
if (dst == null) {
return 0;
}
if (this.completed) {
return -1;
}
final int chunk = (int) Math.min((this.contentLength - this.len), Integer.MAX_VALUE);
final long bytesRead;
if (this.buffer.hasData()) {
final int maxLen = Math.min(chunk, this.buffer.length());
dst.position(position);
bytesRead = this.buffer.read(dst, maxLen);
} else {
if (this.channel.isOpen()) {
if (position > dst.size()) {
throw new IOException("Position past end of file [" + position +
" > " + dst.size() + "]");
}
bytesRead = dst.transferFrom(this.channel, position, count < chunk ? count : chunk);
} else {
bytesRead = -1;
}
if (bytesRead > 0) {
this.metrics.incrementBytesTransferred(bytesRead);
}
}
if (bytesRead == -1) {
this.completed = true;
if (this.len < this.contentLength) {
throw new ConnectionClosedException(
"Premature end of Content-Length delimited message body (expected: "
+ this.contentLength + "; received: " + this.len);
}
}
this.len += bytesRead;
if (this.len >= this.contentLength) {
this.completed = true;
}
return bytesRead;
}
@Override
public String toString() {
final StringBuilder buffer = new StringBuilder();
buffer.append("[content length: ");
buffer.append(this.contentLength);
buffer.append("; pos: ");
buffer.append(this.len);
buffer.append("; completed: ");
buffer.append(this.completed);
buffer.append("]");
return buffer.toString();
}
}
| apache-2.0 |
adufilie/flex-falcon | compiler/src/org/apache/flex/compiler/internal/embedding/transcoders/ScalableTranscoder.java | 5748 | /*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.flex.compiler.internal.embedding.transcoders;
import java.util.Collection;
import java.util.List;
import org.apache.flex.compiler.common.ISourceLocation;
import org.apache.flex.compiler.internal.embedding.EmbedAttribute;
import org.apache.flex.compiler.internal.embedding.EmbedData;
import org.apache.flex.compiler.internal.workspaces.Workspace;
import org.apache.flex.compiler.problems.EmbedScalingGridProblem;
import org.apache.flex.compiler.problems.ICompilerProblem;
import org.apache.flex.swf.tags.DefineScalingGridTag;
import org.apache.flex.swf.tags.DefineSpriteTag;
import org.apache.flex.swf.tags.ITag;
import org.apache.flex.swf.types.Rect;
/**
* Abstract class which should be extended by any embedding transcoder
* which supports scaling. Currently images and movies.
*/
public abstract class ScalableTranscoder extends TranscoderBase
{
/**
* @param data
* @param workspace
*/
protected ScalableTranscoder(EmbedData data, Workspace workspace)
{
super(data, workspace);
this.scaling = false;
}
protected boolean scaling;
protected Integer scaleGridBottom;
protected Integer scaleGridLeft;
protected Integer scaleGridRight;
protected Integer scaleGridTop;
@Override
protected boolean setAttribute(EmbedAttribute attribute)
{
boolean isSupported = true;
switch (attribute)
{
case SCALE_GRID_BOTTOM:
scaleGridBottom = (Integer)data.getAttribute(EmbedAttribute.SCALE_GRID_BOTTOM);
break;
case SCALE_GRID_LEFT:
scaleGridLeft = (Integer)data.getAttribute(EmbedAttribute.SCALE_GRID_LEFT);
break;
case SCALE_GRID_RIGHT:
scaleGridRight = (Integer)data.getAttribute(EmbedAttribute.SCALE_GRID_RIGHT);
break;
case SCALE_GRID_TOP:
scaleGridTop = (Integer)data.getAttribute(EmbedAttribute.SCALE_GRID_TOP);
break;
default:
isSupported = super.setAttribute(attribute);
}
return isSupported;
}
@Override
protected boolean checkAttributeValues(ISourceLocation location, Collection<ICompilerProblem> problems)
{
boolean result = super.checkAttributeValues(location, problems);
if (!result)
return false;
// if any of the scaling values are set, they all need to be set
if (scaleGridBottom != null || scaleGridLeft != null || scaleGridRight != null || scaleGridTop != null)
{
if (scaleGridBottom == null || scaleGridLeft == null || scaleGridRight == null || scaleGridTop == null)
{
problems.add(new EmbedScalingGridProblem(location));
result = false;
}
else
{
// values ok, so turn on scaling
scaling = true;
}
}
return result;
}
protected DefineScalingGridTag buildScalingGrid()
{
Rect rect = new Rect(scaleGridLeft, scaleGridRight, scaleGridTop, scaleGridBottom);
DefineScalingGridTag scalingGrid = new DefineScalingGridTag();
scalingGrid.setSplitter(rect);
return scalingGrid;
}
protected DefineSpriteTag buildSprite(List<ITag> spriteTags, int frameCount, DefineScalingGridTag scalingGrid, Collection<ITag> tags)
{
DefineSpriteTag sprite = new DefineSpriteTag(frameCount, spriteTags);
if (scalingGrid != null)
{
scalingGrid.setCharacter(sprite);
tags.add(scalingGrid);
}
return sprite;
}
@Override
public boolean equals(Object o)
{
if (!super.equals(o))
return false;
if (!(o instanceof ScalableTranscoder))
return false;
ScalableTranscoder t = (ScalableTranscoder)o;
if (scaling != t.scaling)
return false;
// if scaling is enabled, the grid must match to be equal
if (scaling)
{
if (!scaleGridBottom.equals(t.scaleGridBottom) ||
!scaleGridLeft.equals(t.scaleGridLeft) ||
!scaleGridRight.equals(t.scaleGridRight) ||
!scaleGridTop.equals(t.scaleGridTop))
{
return false;
}
}
return true;
}
@Override
public int hashCode()
{
int hashCode = super.hashCode();
hashCode += (scaling ? 1 : 0);
if (scaleGridBottom != null)
hashCode ^= scaleGridBottom.hashCode();
if (scaleGridLeft != null)
hashCode ^= scaleGridLeft.hashCode();
if (scaleGridRight != null)
hashCode ^= scaleGridRight.hashCode();
if (scaleGridTop != null)
hashCode ^= scaleGridTop.hashCode();
return hashCode;
}
}
| apache-2.0 |
LegNeato/buck | src-gen/com/facebook/buck/artifact_cache/thrift/ContainsResult.java | 16135 | /**
* Autogenerated by Thrift Compiler (0.10.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package com.facebook.buck.artifact_cache.thrift;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.10.0)")
public class ContainsResult implements org.apache.thrift.TBase<ContainsResult, ContainsResult._Fields>, java.io.Serializable, Cloneable, Comparable<ContainsResult> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ContainsResult");
private static final org.apache.thrift.protocol.TField RESULT_TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("resultType", org.apache.thrift.protocol.TType.I32, (short)1);
private static final org.apache.thrift.protocol.TField DEBUG_INFO_FIELD_DESC = new org.apache.thrift.protocol.TField("debugInfo", org.apache.thrift.protocol.TType.STRUCT, (short)2);
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new ContainsResultStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new ContainsResultTupleSchemeFactory();
/**
*
* @see ContainsResultType
*/
public ContainsResultType resultType; // optional
public ContainsDebugInfo debugInfo; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
/**
*
* @see ContainsResultType
*/
RESULT_TYPE((short)1, "resultType"),
DEBUG_INFO((short)2, "debugInfo");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // RESULT_TYPE
return RESULT_TYPE;
case 2: // DEBUG_INFO
return DEBUG_INFO;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final _Fields optionals[] = {_Fields.RESULT_TYPE,_Fields.DEBUG_INFO};
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.RESULT_TYPE, new org.apache.thrift.meta_data.FieldMetaData("resultType", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, ContainsResultType.class)));
tmpMap.put(_Fields.DEBUG_INFO, new org.apache.thrift.meta_data.FieldMetaData("debugInfo", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, ContainsDebugInfo.class)));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(ContainsResult.class, metaDataMap);
}
public ContainsResult() {
}
/**
* Performs a deep copy on <i>other</i>.
*/
public ContainsResult(ContainsResult other) {
if (other.isSetResultType()) {
this.resultType = other.resultType;
}
if (other.isSetDebugInfo()) {
this.debugInfo = new ContainsDebugInfo(other.debugInfo);
}
}
public ContainsResult deepCopy() {
return new ContainsResult(this);
}
@Override
public void clear() {
this.resultType = null;
this.debugInfo = null;
}
/**
*
* @see ContainsResultType
*/
public ContainsResultType getResultType() {
return this.resultType;
}
/**
*
* @see ContainsResultType
*/
public ContainsResult setResultType(ContainsResultType resultType) {
this.resultType = resultType;
return this;
}
public void unsetResultType() {
this.resultType = null;
}
/** Returns true if field resultType is set (has been assigned a value) and false otherwise */
public boolean isSetResultType() {
return this.resultType != null;
}
public void setResultTypeIsSet(boolean value) {
if (!value) {
this.resultType = null;
}
}
public ContainsDebugInfo getDebugInfo() {
return this.debugInfo;
}
public ContainsResult setDebugInfo(ContainsDebugInfo debugInfo) {
this.debugInfo = debugInfo;
return this;
}
public void unsetDebugInfo() {
this.debugInfo = null;
}
/** Returns true if field debugInfo is set (has been assigned a value) and false otherwise */
public boolean isSetDebugInfo() {
return this.debugInfo != null;
}
public void setDebugInfoIsSet(boolean value) {
if (!value) {
this.debugInfo = null;
}
}
public void setFieldValue(_Fields field, java.lang.Object value) {
switch (field) {
case RESULT_TYPE:
if (value == null) {
unsetResultType();
} else {
setResultType((ContainsResultType)value);
}
break;
case DEBUG_INFO:
if (value == null) {
unsetDebugInfo();
} else {
setDebugInfo((ContainsDebugInfo)value);
}
break;
}
}
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
case RESULT_TYPE:
return getResultType();
case DEBUG_INFO:
return getDebugInfo();
}
throw new java.lang.IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
case RESULT_TYPE:
return isSetResultType();
case DEBUG_INFO:
return isSetDebugInfo();
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that == null)
return false;
if (that instanceof ContainsResult)
return this.equals((ContainsResult)that);
return false;
}
public boolean equals(ContainsResult that) {
if (that == null)
return false;
if (this == that)
return true;
boolean this_present_resultType = true && this.isSetResultType();
boolean that_present_resultType = true && that.isSetResultType();
if (this_present_resultType || that_present_resultType) {
if (!(this_present_resultType && that_present_resultType))
return false;
if (!this.resultType.equals(that.resultType))
return false;
}
boolean this_present_debugInfo = true && this.isSetDebugInfo();
boolean that_present_debugInfo = true && that.isSetDebugInfo();
if (this_present_debugInfo || that_present_debugInfo) {
if (!(this_present_debugInfo && that_present_debugInfo))
return false;
if (!this.debugInfo.equals(that.debugInfo))
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = hashCode * 8191 + ((isSetResultType()) ? 131071 : 524287);
if (isSetResultType())
hashCode = hashCode * 8191 + resultType.getValue();
hashCode = hashCode * 8191 + ((isSetDebugInfo()) ? 131071 : 524287);
if (isSetDebugInfo())
hashCode = hashCode * 8191 + debugInfo.hashCode();
return hashCode;
}
@Override
public int compareTo(ContainsResult other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = java.lang.Boolean.valueOf(isSetResultType()).compareTo(other.isSetResultType());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetResultType()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.resultType, other.resultType);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.valueOf(isSetDebugInfo()).compareTo(other.isSetDebugInfo());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetDebugInfo()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.debugInfo, other.debugInfo);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("ContainsResult(");
boolean first = true;
if (isSetResultType()) {
sb.append("resultType:");
if (this.resultType == null) {
sb.append("null");
} else {
sb.append(this.resultType);
}
first = false;
}
if (isSetDebugInfo()) {
if (!first) sb.append(", ");
sb.append("debugInfo:");
if (this.debugInfo == null) {
sb.append("null");
} else {
sb.append(this.debugInfo);
}
first = false;
}
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
if (debugInfo != null) {
debugInfo.validate();
}
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class ContainsResultStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public ContainsResultStandardScheme getScheme() {
return new ContainsResultStandardScheme();
}
}
private static class ContainsResultStandardScheme extends org.apache.thrift.scheme.StandardScheme<ContainsResult> {
public void read(org.apache.thrift.protocol.TProtocol iprot, ContainsResult struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // RESULT_TYPE
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.resultType = com.facebook.buck.artifact_cache.thrift.ContainsResultType.findByValue(iprot.readI32());
struct.setResultTypeIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // DEBUG_INFO
if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
struct.debugInfo = new ContainsDebugInfo();
struct.debugInfo.read(iprot);
struct.setDebugInfoIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, ContainsResult struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.resultType != null) {
if (struct.isSetResultType()) {
oprot.writeFieldBegin(RESULT_TYPE_FIELD_DESC);
oprot.writeI32(struct.resultType.getValue());
oprot.writeFieldEnd();
}
}
if (struct.debugInfo != null) {
if (struct.isSetDebugInfo()) {
oprot.writeFieldBegin(DEBUG_INFO_FIELD_DESC);
struct.debugInfo.write(oprot);
oprot.writeFieldEnd();
}
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class ContainsResultTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public ContainsResultTupleScheme getScheme() {
return new ContainsResultTupleScheme();
}
}
private static class ContainsResultTupleScheme extends org.apache.thrift.scheme.TupleScheme<ContainsResult> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, ContainsResult struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet optionals = new java.util.BitSet();
if (struct.isSetResultType()) {
optionals.set(0);
}
if (struct.isSetDebugInfo()) {
optionals.set(1);
}
oprot.writeBitSet(optionals, 2);
if (struct.isSetResultType()) {
oprot.writeI32(struct.resultType.getValue());
}
if (struct.isSetDebugInfo()) {
struct.debugInfo.write(oprot);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, ContainsResult struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet incoming = iprot.readBitSet(2);
if (incoming.get(0)) {
struct.resultType = com.facebook.buck.artifact_cache.thrift.ContainsResultType.findByValue(iprot.readI32());
struct.setResultTypeIsSet(true);
}
if (incoming.get(1)) {
struct.debugInfo = new ContainsDebugInfo();
struct.debugInfo.read(iprot);
struct.setDebugInfoIsSet(true);
}
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
}
}
| apache-2.0 |
gosu-lang/gosu-lang | gosu-core/src/main/java/gw/internal/gosu/parser/ParameterizedDynamicPropertySymbol.java | 1882 | /*
* Copyright 2014 Guidewire Software, Inc.
*/
package gw.internal.gosu.parser;
import gw.lang.parser.IReducedDynamicPropertySymbol;
import gw.lang.parser.TypeVarToTypeMap;
import gw.lang.reflect.IType;
import gw.lang.parser.ScriptPartId;
/**
*/
public class ParameterizedDynamicPropertySymbol extends DynamicPropertySymbol
{
private DynamicPropertySymbol _delegate;
public ParameterizedDynamicPropertySymbol( DynamicPropertySymbol dpsDelegate, IGosuClassInternal owner )
{
super( dpsDelegate );
_delegate = dpsDelegate;
setModifierInfo( _delegate.getModifierInfo() );
assignPossibleParametarizedGetterAndSetter( owner );
setType( getActualType( dpsDelegate.getType(), owner ) );
setScriptPart( new ScriptPartId( owner, null ) );
}
public DynamicPropertySymbol getDelegate()
{
return _delegate;
}
private void assignPossibleParametarizedGetterAndSetter( IGosuClassInternal owner )
{
if( owner == null || !owner.isParameterizedType() )
{
return;
}
if( _dfsGetter != null && _dfsGetter.getGosuClass() != owner )
{
_dfsGetter = _dfsGetter.getParameterizedVersion( owner );
}
if( _dfsSetter != null && _dfsSetter.getGosuClass() != owner )
{
_dfsSetter = _dfsSetter.getParameterizedVersion( owner );
}
}
private IType getActualType( IType propType, IGosuClassInternal ownerType )
{
if( ownerType.isParameterizedType() )
{
TypeVarToTypeMap actualParamByVarName = TypeLord.mapTypeByVarName( ownerType, ownerType );
propType = TypeLord.getActualType( propType, actualParamByVarName, true );
}
return propType;
}
@Override
public IReducedDynamicPropertySymbol createReducedSymbol() {
return new ReducedParameterizedDynamicPropertySymbol(
(ReducedDynamicPropertySymbol) getDelegate().createReducedSymbol(), this);
}
}
| apache-2.0 |
jyemin/mongo-java-driver | driver-sync/src/main/com/mongodb/client/ListIndexesIterable.java | 1519 | /*
* Copyright 2008-present MongoDB, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mongodb.client;
import java.util.concurrent.TimeUnit;
/**
* Iterable for ListIndexes.
*
* @param <TResult> The type of the result.
* @since 3.0
*/
public interface ListIndexesIterable<TResult> extends MongoIterable<TResult> {
/**
* Sets the maximum execution time on the server for this operation.
*
* @param maxTime the max time
* @param timeUnit the time unit, which may not be null
* @return this
* @mongodb.driver.manual reference/operator/meta/maxTimeMS/ Max Time
*/
ListIndexesIterable<TResult> maxTime(long maxTime, TimeUnit timeUnit);
/**
* Sets the number of documents to return per batch.
*
* @param batchSize the batch size
* @return this
* @mongodb.driver.manual reference/method/cursor.batchSize/#cursor.batchSize Batch Size
*/
@Override
ListIndexesIterable<TResult> batchSize(int batchSize);
}
| apache-2.0 |
marques-work/gocd | config/config-server/src/main/java/com/thoughtworks/go/config/GoConfigMigration.java | 6712 | /*
* Copyright 2021 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.config;
import com.thoughtworks.go.config.registry.ConfigElementImplementationRegistry;
import com.thoughtworks.go.domain.GoConfigRevision;
import com.thoughtworks.go.util.TimeProvider;
import org.apache.commons.io.FileUtils;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.input.SAXBuilder;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.stream.StreamSource;
import java.io.*;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.List;
import static com.thoughtworks.go.util.ExceptionUtils.bomb;
import static com.thoughtworks.go.util.ExceptionUtils.bombIfNull;
import static com.thoughtworks.go.util.XmlUtils.buildXmlDocument;
/**
* @understands how to migrate from a previous version of config
*/
@Component
public class GoConfigMigration {
private static final org.slf4j.Logger LOG = LoggerFactory.getLogger(GoConfigMigration.class.getName());
private final String schemaVersion = "schemaVersion";
private final TimeProvider timeProvider;
private final ConfigElementImplementationRegistry registry;
@Autowired
public GoConfigMigration(final TimeProvider timeProvider, ConfigElementImplementationRegistry registry) {
this.timeProvider = timeProvider;
this.registry = registry;
}
public File revertFileToVersion(File configFile, GoConfigRevision currentConfigRevision) {
File backupFile = getBackupFile(configFile, "invalid.");
try {
backup(configFile, backupFile);
FileUtils.writeStringToFile(configFile, currentConfigRevision.getContent());
} catch (IOException e1) {
throw new RuntimeException(String.format("Could not write to config file '%s'.", configFile.getAbsolutePath()), e1);
}
return backupFile;
}
public String upgradeIfNecessary(String content) {
return upgrade(content, getCurrentSchemaVersion(content));
}
private void backup(File configFile, File backupFile) throws IOException {
FileUtils.copyFile(configFile, backupFile);
LOG.info("Config file is backed up, location: {}", backupFile.getAbsolutePath());
}
File getBackupFile(File configFile, final String prefix) {
String timestamp = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss").format(timeProvider.currentTime());
return new File(configFile + "." + prefix + timestamp);
}
private String upgrade(String content, int currentVersion) {
int targetVersion = GoConfigSchema.currentSchemaVersion();
return upgrade(content, currentVersion, targetVersion);
}
public String upgrade(String content, int currentVersion, int targetVersion) {
LOG.info("Upgrading config file from version {} to version {}", currentVersion, targetVersion);
List<URL> upgradeScripts = upgradeScripts(currentVersion, targetVersion);
for (URL upgradeScript : upgradeScripts) {
validate(content);
content = upgrade(content, upgradeScript);
}
validate(content);
LOG.info("Finished upgrading config file");
return content;
}
private void validate(String content) {
int currentVersion = getCurrentSchemaVersion(content);
try {
buildXmlDocument(new ByteArrayInputStream(content.getBytes()), GoConfigSchema.getResource(currentVersion), registry.xsds());
} catch (Exception e) {
throw bomb("Cruise config file with version " + currentVersion + " is invalid. Unable to upgrade.", e);
}
}
private String upgrade(String originalContent, URL upgradeScript) {
try (InputStream xslt = upgradeScript.openStream()) {
ByteArrayOutputStream convertedConfig = new ByteArrayOutputStream();
transformer(upgradeScript.getPath(), xslt)
.transform(new StreamSource(new ByteArrayInputStream(originalContent.getBytes())), new StreamResult(convertedConfig));
return convertedConfig.toString();
} catch (TransformerException e) {
throw bomb("Couldn't transform configuration file using upgrade script " + upgradeScript.getPath(), e);
} catch (IOException e) {
throw bomb("Couldn't write converted config file", e);
}
}
private List<URL> upgradeScripts(int currentVersion, int targetVersion) {
ArrayList<URL> xsls = new ArrayList<>();
for (int i = currentVersion + 1; i <= targetVersion; i++) {
URL xsl = getResource("/upgrades/" + i + ".xsl");
bombIfNull(xsl, "Config File upgrade script named " + i + ".xsl is missing. Unable to perform upgrade.");
xsls.add(xsl);
}
return xsls;
}
private URL getResource(String script) {
return GoConfigMigration.class.getResource(script);
}
private Transformer transformer(String xsltName, InputStream xslt) {
try {
return TransformerFactory.newInstance().newTransformer(new StreamSource(xslt));
} catch (TransformerConfigurationException tce) {
throw bomb("Couldn't parse XSL template " + xsltName, tce);
}
}
private int getCurrentSchemaVersion(String content) {
try {
SAXBuilder builder = new SAXBuilder();
Document document = builder.build(new ByteArrayInputStream(content.getBytes()));
Element root = document.getRootElement();
String currentVersion = root.getAttributeValue(schemaVersion) == null ? "0" : root.getAttributeValue(schemaVersion);
return Integer.parseInt(currentVersion);
} catch (Exception e) {
throw bomb(e);
}
}
}
| apache-2.0 |
dayatang/hrm-demo | dayatang-hrm-organisation/src/main/java/org/dayatang/hrm/organisation/domain/PostHolding.java | 1601 | package org.dayatang.hrm.organisation.domain;
import java.util.Date;
import java.util.List;
import javax.persistence.DiscriminatorValue;
import javax.persistence.Entity;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import org.apache.commons.lang3.builder.ToStringBuilder;
@Entity
@DiscriminatorValue("PostHolding")
@NamedQueries({
@NamedQuery(name = "PostHolding.getPostsOfEmployee", query = "select o.commissioner from PostHolding o where o.responsible = :employee and o.fromDate <= :date and o.toDate > :date"),
@NamedQuery(name = "PostHolding.getEmployeesOfPost", query = "select o.responsible from PostHolding o where o.commissioner = :post and o.fromDate <= :date and o.toDate > :date") })
public class PostHolding extends Accountability<Post, Employee> {
private static final long serialVersionUID = 7390804525640459582L;
protected PostHolding() {
}
public PostHolding(Post post, Employee employee, Date date) {
super(post, employee, date);
}
public static List<Post> findPostsOfEmployee(Employee employee, Date date) {
return getRepository().createNamedQuery("PostHolding.getPostsOfEmployee")
.addParameter("employee", employee).addParameter("date", date).list();
}
public static List<Employee> findEmployeesOfPost(Post post, Date date) {
return getRepository().createNamedQuery("PostHolding.getEmployeesOfPost")
.addParameter("post", post).addParameter("date", date).list();
}
@Override
public String toString() {
return new ToStringBuilder(this).append(getCommissioner())
.append(getResponsible()).build();
}
}
| apache-2.0 |
massakam/pulsar | pulsar-broker/src/test/java/org/apache/pulsar/common/naming/NamespaceBundlesTest.java | 15610 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.common.naming;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
import com.github.benmanes.caffeine.cache.AsyncLoadingCache;
import com.google.common.collect.BoundType;
import com.google.common.collect.Range;
import com.google.common.collect.Sets;
import com.google.common.hash.Hashing;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.SortedSet;
import java.util.concurrent.CompletableFuture;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.pulsar.broker.PulsarService;
import org.apache.pulsar.broker.resources.LocalPoliciesResources;
import org.apache.pulsar.broker.resources.NamespaceResources;
import org.apache.pulsar.broker.resources.PulsarResources;
import org.apache.pulsar.metadata.api.extended.MetadataStoreExtended;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
@Test(groups = "broker-naming")
public class NamespaceBundlesTest {
private NamespaceBundleFactory factory;
@BeforeMethod(alwaysRun = true)
protected void initializeFactory() {
factory = getNamespaceBundleFactory();
}
@SuppressWarnings("unchecked")
@Test
public void testConstructor() throws Exception {
long[] partitions = new long[]{0L, 0x10000000L, 0x40000000L, 0xffffffffL};
NamespaceBundles bundles = new NamespaceBundles(NamespaceName.get("pulsar/use/ns2"), factory, Optional.empty(), partitions);
Field partitionField = NamespaceBundles.class.getDeclaredField("partitions");
Field nsField = NamespaceBundles.class.getDeclaredField("nsname");
Field bundlesField = NamespaceBundles.class.getDeclaredField("bundles");
partitionField.setAccessible(true);
nsField.setAccessible(true);
bundlesField.setAccessible(true);
long[] partFld = (long[]) partitionField.get(bundles);
// the same instance
assertEquals(partitions.length, partFld.length);
NamespaceName nsFld = (NamespaceName) nsField.get(bundles);
assertEquals(nsFld.toString(), "pulsar/use/ns2");
ArrayList<NamespaceBundle> bundleList = (ArrayList<NamespaceBundle>) bundlesField.get(bundles);
assertEquals(bundleList.size(), 3);
assertEquals(bundleList.get(0),
factory.getBundle(nsFld, Range.range(0L, BoundType.CLOSED, 0x10000000L, BoundType.OPEN)));
assertEquals(bundleList.get(1),
factory.getBundle(nsFld, Range.range(0x10000000L, BoundType.CLOSED, 0x40000000L, BoundType.OPEN)));
assertEquals(bundleList.get(2),
factory.getBundle(nsFld, Range.range(0x40000000L, BoundType.CLOSED, 0xffffffffL, BoundType.CLOSED)));
}
@SuppressWarnings("unchecked")
private NamespaceBundleFactory getNamespaceBundleFactory() {
PulsarService pulsar = mock(PulsarService.class);
MetadataStoreExtended store = mock(MetadataStoreExtended.class);
when(pulsar.getLocalMetadataStore()).thenReturn(store);
when(pulsar.getConfigurationMetadataStore()).thenReturn(store);
PulsarResources resources = mock(PulsarResources.class);
when(pulsar.getPulsarResources()).thenReturn(resources);
when(resources.getLocalPolicies()).thenReturn(mock(LocalPoliciesResources.class));
when(resources.getLocalPolicies().getLocalPoliciesWithVersion(any())).thenReturn(
CompletableFuture.completedFuture(Optional.empty()));
when(resources.getNamespaceResources()).thenReturn(mock(NamespaceResources.class));
when(resources.getNamespaceResources().getPoliciesAsync(any())).thenReturn(
CompletableFuture.completedFuture(Optional.empty()));
return NamespaceBundleFactory.createFactory(pulsar, Hashing.crc32());
}
@Test
public void testFindBundle() throws Exception {
SortedSet<Long> partitions = Sets.newTreeSet();
partitions.add(0L);
partitions.add(0x40000000L);
partitions.add(0xa0000000L);
partitions.add(0xb0000000L);
partitions.add(0xc0000000L);
partitions.add(0xffffffffL);
NamespaceBundles bundles = new NamespaceBundles(NamespaceName.get("pulsar/global/ns1"),
factory, Optional.empty(), partitions);
TopicName topicName = TopicName.get("persistent://pulsar/global/ns1/topic-1");
NamespaceBundle bundle = bundles.findBundle(topicName);
assertTrue(bundle.includes(topicName));
topicName = TopicName.get("persistent://pulsar/use/ns2/topic-2");
try {
bundles.findBundle(topicName);
fail("Should have failed due to mismatched namespace name");
} catch (IllegalArgumentException iae) {
// OK, expected
}
Long hashKey = factory.getLongHashCode(topicName.toString());
// The following code guarantees that we have at least two ranges after the hashKey till the end
SortedSet<Long> tailSet = partitions.tailSet(hashKey);
tailSet.add(hashKey);
// Now, remove the first range to ensure the hashKey is not included in <code>newPar</code>
Iterator<Long> iter = tailSet.iterator();
iter.next();
SortedSet<Long> newPar = tailSet.tailSet(iter.next());
try {
bundles = new NamespaceBundles(topicName.getNamespaceObject(), factory, Optional.empty(), newPar);
bundles.findBundle(topicName);
fail("Should have failed due to out-of-range");
} catch (IndexOutOfBoundsException iae) {
// OK, expected
}
}
@Test
public void testSplitBundles() throws Exception {
NamespaceName nsname = NamespaceName.get("pulsar/global/ns1");
TopicName topicName = TopicName.get("persistent://pulsar/global/ns1/topic-1");
NamespaceBundles bundles = factory.getBundles(nsname);
NamespaceBundle bundle = bundles.findBundle(topicName);
final int numberSplitBundles = 4;
// (1) split in 4
Pair<NamespaceBundles, List<NamespaceBundle>> splitBundles = factory.splitBundles(bundle, numberSplitBundles,
null).join();
// existing_no_bundles(1) +
// additional_new_split_bundle(4) -
// parent_target_bundle(1)
int totalExpectedSplitBundles = bundles.getBundles().size() + numberSplitBundles - 1;
validateSplitBundlesRange(bundles.getFullBundle(), splitBundles.getRight());
assertEquals(totalExpectedSplitBundles, splitBundles.getLeft().getBundles().size());
// (2) split in 4: first bundle from above split bundles
NamespaceBundleFactory utilityFactory = getNamespaceBundleFactory();
NamespaceBundles bundles2 = splitBundles.getLeft();
NamespaceBundle testChildBundle = bundles2.getBundles().get(0);
Pair<NamespaceBundles, List<NamespaceBundle>> splitChildBundles =
splitBundlesUtilFactory(
utilityFactory,
nsname,
bundles2,
testChildBundle,
numberSplitBundles);
// existing_no_bundles(4) +
// additional_new_split_bundle(4) -
// parent_target_bundle(1)
totalExpectedSplitBundles = bundles2.getBundles().size() + numberSplitBundles - 1;
validateSplitBundlesRange(testChildBundle, splitChildBundles.getRight());
assertEquals(totalExpectedSplitBundles, splitChildBundles.getLeft().getBundles().size());
// (3) split in 3: second bundle from above split bundles
NamespaceBundle testChildBundl2 = bundles2.getBundles().get(1);
Pair<NamespaceBundles, List<NamespaceBundle>> splitChildBundles2 =
splitBundlesUtilFactory(
utilityFactory,
nsname,
bundles2,
testChildBundl2,
3);
// existing_no_bundles(4) +
// additional_new_split_bundle(3) -
// parent_target_bundle(1)
totalExpectedSplitBundles = bundles2.getBundles().size() + 3 - 1;
validateSplitBundlesRange(testChildBundl2, splitChildBundles2.getRight());
assertEquals(totalExpectedSplitBundles, splitChildBundles2.getLeft().getBundles().size());
}
@Test
public void testSplitBundleInTwo() throws Exception {
final int NO_BUNDLES = 2;
NamespaceName nsname = NamespaceName.get("pulsar/global/ns1");
TopicName topicName = TopicName.get("persistent://pulsar/global/ns1/topic-1");
NamespaceBundles bundles = factory.getBundles(nsname);
NamespaceBundle bundle = bundles.findBundle(topicName);
// (1) split : [0x00000000,0xffffffff] => [0x00000000_0x7fffffff,0x7fffffff_0xffffffff]
Pair<NamespaceBundles, List<NamespaceBundle>> splitBundles = factory.splitBundles(bundle, NO_BUNDLES,
null).join();
assertNotNull(splitBundles);
assertBundleDivideInTwo(bundle, splitBundles.getRight());
// (2) split: [0x00000000,0x7fffffff] => [0x00000000_0x3fffffff,0x3fffffff_0x7fffffff],
// [0x7fffffff,0xffffffff] => [0x7fffffff_0xbfffffff,0xbfffffff_0xffffffff]
NamespaceBundleFactory utilityFactory = getNamespaceBundleFactory();
assertBundles(utilityFactory, nsname, bundle, splitBundles, NO_BUNDLES);
// (3) split: [0x00000000,0x3fffffff] => [0x00000000_0x1fffffff,0x1fffffff_0x3fffffff],
// [0x3fffffff,0x7fffffff] => [0x3fffffff_0x5fffffff,0x5fffffff_0x7fffffff]
Pair<NamespaceBundles, List<NamespaceBundle>> splitChildBundles = splitBundlesUtilFactory(utilityFactory,
nsname, splitBundles.getLeft(), splitBundles.getRight().get(0), NO_BUNDLES);
assertBundles(utilityFactory, nsname, splitBundles.getRight().get(0), splitChildBundles, NO_BUNDLES);
// (4) split: [0x7fffffff,0xbfffffff] => [0x7fffffff_0x9fffffff,0x9fffffff_0xbfffffff],
// [0xbfffffff,0xffffffff] => [0xbfffffff_0xdfffffff,0xdfffffff_0xffffffff]
splitChildBundles = splitBundlesUtilFactory(utilityFactory, nsname, splitBundles.getLeft(),
splitBundles.getRight().get(1), NO_BUNDLES);
assertBundles(utilityFactory, nsname, splitBundles.getRight().get(1), splitChildBundles, NO_BUNDLES);
}
@Test
public void testSplitBundleByFixBoundary() throws Exception {
NamespaceName nsname = NamespaceName.get("pulsar/global/ns1");
NamespaceBundles bundles = factory.getBundles(nsname);
NamespaceBundle bundleToSplit = bundles.getBundles().get(0);
try {
factory.splitBundles(bundleToSplit, 0, bundleToSplit.getLowerEndpoint());
} catch (IllegalArgumentException e) {
//No-op
}
try {
factory.splitBundles(bundleToSplit, 0, bundleToSplit.getUpperEndpoint());
} catch (IllegalArgumentException e) {
//No-op
}
Long fixBoundary = bundleToSplit.getLowerEndpoint() + 10;
Pair<NamespaceBundles, List<NamespaceBundle>> splitBundles = factory.splitBundles(bundleToSplit,
0, fixBoundary).join();
assertEquals(splitBundles.getRight().get(0).getLowerEndpoint(), bundleToSplit.getLowerEndpoint());
assertEquals(splitBundles.getRight().get(1).getLowerEndpoint().longValue(), bundleToSplit.getLowerEndpoint() + fixBoundary);
}
private void validateSplitBundlesRange(NamespaceBundle fullBundle, List<NamespaceBundle> splitBundles) {
assertNotNull(fullBundle);
assertNotNull(splitBundles);
Range<Long> fullRange = fullBundle.getKeyRange();
Range<Long> span = splitBundles.get(0).getKeyRange();
for (NamespaceBundle bundle : splitBundles) {
span = span.span(bundle.getKeyRange());
}
assertEquals(span, fullRange);
}
@SuppressWarnings("unchecked")
private Pair<NamespaceBundles, List<NamespaceBundle>> splitBundlesUtilFactory(NamespaceBundleFactory utilityFactory,
NamespaceName nsname, NamespaceBundles bundles, NamespaceBundle targetBundle, int numBundles)
throws Exception {
Field bCacheField = NamespaceBundleFactory.class.getDeclaredField("bundlesCache");
bCacheField.setAccessible(true);
((AsyncLoadingCache<NamespaceName, NamespaceBundles>) bCacheField.get(utilityFactory)).put(nsname,
CompletableFuture.completedFuture(bundles));
return utilityFactory.splitBundles(targetBundle, numBundles, null).join();
}
private void assertBundles(NamespaceBundleFactory utilityFactory,
NamespaceName nsname,
NamespaceBundle bundle,
Pair<NamespaceBundles, List<NamespaceBundle>> splitBundles,
int numBundles) throws Exception {
NamespaceBundle bundle1 = splitBundles.getRight().get(0);
NamespaceBundle bundle2 = splitBundles.getRight().get(1);
NamespaceBundles nspaceBundles = splitBundles.getLeft();
Pair<NamespaceBundles, List<NamespaceBundle>> bundle1Split = splitBundlesUtilFactory(utilityFactory, nsname,
nspaceBundles, bundle1, numBundles);
assertBundleDivideInTwo(bundle1, bundle1Split.getRight());
Pair<NamespaceBundles, List<NamespaceBundle>> bundle2Split = splitBundlesUtilFactory(utilityFactory, nsname,
nspaceBundles, bundle2, numBundles);
assertBundleDivideInTwo(bundle2, bundle2Split.getRight());
}
private void assertBundleDivideInTwo(NamespaceBundle bundle,
List<NamespaceBundle> bundles) {
assertEquals(bundles.size(), 2);
String[] range = bundle.getBundleRange().split("_");
long lower = Long.decode(range[0]);
long upper = Long.decode(range[1]);
long middle = ((upper - lower) / 2) + lower;
String lRange = String.format("0x%08x_0x%08x", lower, middle);
String uRange = String.format("0x%08x_0x%08x", middle, upper);
assertEquals(lRange, bundles.get(0).getBundleRange());
assertEquals(uRange, bundles.get(1).getBundleRange());
log.info("[{},{}] => [{},{}]", range[0], range[1], lRange, uRange);
}
private static final Logger log = LoggerFactory.getLogger(NamespaceBundlesTest.class);
}
| apache-2.0 |
rakesh4u/springside-sub | tiny-examples/spring-turtorial-classpath-scanning-and-managed-components/src/test/java/st/cp/AppTest.java | 633 | package st.cp;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
/**
* Unit test for simple App.
*/
public class AppTest
extends TestCase
{
/**
* Create the test case
*
* @param testName name of the test case
*/
public AppTest( String testName )
{
super( testName );
}
/**
* @return the suite of tests being tested
*/
public static Test suite()
{
return new TestSuite( AppTest.class );
}
/**
* Rigourous Test :-)
*/
public void testApp()
{
assertTrue( true );
}
}
| apache-2.0 |
eladnava/redalert-android | app/src/main/java/com/betomaluje/miband/bluetooth/WriteAction.java | 1930 | package com.betomaluje.miband.bluetooth;
import com.betomaluje.miband.ActionCallback;
import java.util.UUID;
/**
* Created by Lewis on 10/01/15.
*/
public class WriteAction implements BLEAction {
private UUID service;
private final UUID characteristic;
private final byte[] payload;
private ActionCallback callback;
public WriteAction(final UUID service, final UUID characteristic, final byte[] payload) {
this.service = service;
this.characteristic = characteristic;
this.payload = payload;
}
public WriteAction(final UUID service, final UUID characteristic, final byte[] payload, ActionCallback callback) {
this.service = service;
this.characteristic = characteristic;
this.payload = payload;
this.callback = callback;
}
public WriteAction(final UUID characteristic, final byte[] payload) {
this.characteristic = characteristic;
this.payload = payload;
}
public WriteAction(final UUID characteristic, final byte[] payload, ActionCallback callback) {
this.characteristic = characteristic;
this.payload = payload;
this.callback = callback;
}
public UUID getCharacteristic() {
return characteristic;
}
public byte[] getPayload() {
return payload;
}
public ActionCallback getCallback() {
return callback;
}
public UUID getService() {
return service;
}
@Override
public boolean expectsResult() {
return true;
}
@Override
public boolean run(BTCommandManager btCommandManager) {
if (service == null)
return btCommandManager.writeCharacteristicWithResponse(getCharacteristic(), getPayload(), getCallback());
else
return btCommandManager.writeCharacteristicWithResponse(getService(), getCharacteristic(), getPayload(), getCallback());
}
}
| apache-2.0 |
levi-h/aluminumproject | tests/src/test/java/com/googlecode/aluminumproject/utilities/finders/FieldFinderTest.java | 2845 | /*
* Copyright 2010-2012 Aluminum project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.googlecode.aluminumproject.utilities.finders;
import com.googlecode.aluminumproject.utilities.finders.FieldFinder.FieldFilter;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.math.BigDecimal;
import java.util.List;
import org.testng.annotations.Test;
@SuppressWarnings("javadoc")
@Test(groups = {"utilities", "fast"})
public class FieldFinderTest {
@Test(expectedExceptions = IllegalArgumentException.class)
public void notSupplyingTypeShouldCauseException() {
FieldFinder.find(new FieldFilter() {
public boolean accepts(Field field) {
return true;
}
}, null);
}
public void fieldsOfClassShouldBeFindable() throws NoSuchFieldException {
List<Field> textualFields = FieldFinder.find(new FieldFilter() {
public boolean accepts(Field field) {
return field.getType() == String.class;
}
}, Car.class);
assert textualFields != null;
assert textualFields.size() == 1;
assert textualFields.contains(Car.class.getDeclaredField("manufacturer"));
}
public void fieldsOfSuperclassShouldBeFindable() throws NoSuchFieldException {
List<Field> numericFields = FieldFinder.find(new FieldFilter() {
public boolean accepts(Field field) {
return field.getType() == Integer.TYPE;
}
}, Car.class);
assert numericFields != null;
assert numericFields.size() == 1;
assert numericFields.contains(Vehicle.class.getDeclaredField("maximumSpeed"));
}
public void fieldsOfImplementedInterfaceShouldBeFindable() throws NoSuchFieldException {
List<Field> constants = FieldFinder.find(new FieldFilter() {
public boolean accepts(Field field) {
return Modifier.isStatic(field.getModifiers());
}
}, Car.class);
assert constants != null;
assert constants.size() == 1;
assert constants.contains(TrafficParticipant.class.getField("MINIMUM_SPEEDING_FINE_AMOUNT"));
}
private static class Vehicle {
@SuppressWarnings("unused")
private int maximumSpeed;
}
private static interface TrafficParticipant {
@SuppressWarnings("unused")
BigDecimal MINIMUM_SPEEDING_FINE_AMOUNT = new BigDecimal("25");
}
private static class Car extends Vehicle implements TrafficParticipant {
@SuppressWarnings("unused")
private String manufacturer;
}
} | apache-2.0 |
apache/incubator-shardingsphere | shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-suite/src/test/java/org/apache/shardingsphere/test/integration/framework/container/atomic/ITContainer.java | 1058 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.test.integration.framework.container.atomic;
import org.testcontainers.lifecycle.Startable;
/**
* IT container.
*/
public interface ITContainer extends Startable {
@Override
default void stop() {
}
}
| apache-2.0 |
leochuan/CustomLayoutManager | app/src/main/java/rouchuan/viewpagerlayoutmanager/gallery/GalleryLayoutActivity.java | 664 | package rouchuan.viewpagerlayoutmanager.gallery;
import com.leochuan.GalleryLayoutManager;
import rouchuan.viewpagerlayoutmanager.BaseActivity;
import rouchuan.viewpagerlayoutmanager.Util;
/**
* Created by Dajavu on 27/10/2017.
*/
public class GalleryLayoutActivity extends BaseActivity<GalleryLayoutManager, GalleryPopUpWindow> {
@Override
protected GalleryLayoutManager createLayoutManager() {
return new GalleryLayoutManager(this, Util.Dp2px(this, 10));
}
@Override
protected GalleryPopUpWindow createSettingPopUpWindow() {
return new GalleryPopUpWindow(this, getViewPagerLayoutManager(), getRecyclerView());
}
}
| apache-2.0 |
hushengjun/FastAndroid | main/me/src/main/java/com/hsj/me/base/MeConstant.java | 830 | /*
* Copyright (c) 2017. HSJ
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hsj.me.base;
/**
* @Author:HSJ
* @E-mail:shengjunhu@foxmail.com
* @Date:2017/12/5/11:19
* @Class:MeConstant
* @Description:
*/
public interface MeConstant {
}
| apache-2.0 |
feesa/easyrec-parent | easyrec-web/src/test/java/org/easyrec/utils/Movielens100kImporter.java | 20974 | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package org.easyrec.utils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.easyrec.exception.core.ClusterException;
import org.easyrec.model.core.web.Item;
import org.easyrec.model.core.TenantVO;
import org.easyrec.model.core.web.Operator;
import org.easyrec.model.core.web.RemoteTenant;
import org.easyrec.model.core.web.Session;
import org.easyrec.service.core.ClusterService;
import org.easyrec.service.core.TenantService;
import org.easyrec.service.domain.TypeMappingService;
import org.easyrec.service.web.NamedConfigurationService;
import org.easyrec.service.web.nodomain.ShopRecommenderService;
import org.easyrec.store.dao.IDMappingDAO;
import org.easyrec.store.dao.core.types.ItemTypeDAO;
import org.easyrec.store.dao.web.OperatorDAO;
import org.easyrec.store.dao.web.RemoteTenantDAO;
import org.easyrec.utils.spring.cli.AbstractDependencyInjectionSpringCLI;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.net.URI;
import java.net.URLDecoder;
import java.util.*;
import org.easyrec.plugin.container.PluginRegistry;
import org.easyrec.plugin.model.Version;
/**
* DOCUMENT ME!
*
* @author pmarschik
*/
@SuppressWarnings({"UnusedDeclaration"})
public class Movielens100kImporter extends AbstractDependencyInjectionSpringCLI {
private static final Log logger = LogFactory.getLog(Movielens1MImporter.class);
private static final double RATING_COUNT = 100000;
private static final double MOVIE_COUNT = 1682;
private static final double USER_COUNT = 943;
private OperatorDAO operatorDAO;
private RemoteTenantDAO remoteTenantDAO;
private ShopRecommenderService shopRecommenderService;
private TenantService tenantService;
private ClusterService clusterService;
private TypeMappingService typeMappingService;
private IDMappingDAO idMappingDAO;
private ItemTypeDAO itemTypeDAO;
private NamedConfigurationService namedConfigurationService;
private PluginRegistry pluginRegistry;
private HashMap<Integer, String> clusters;
public static void main(String[] args) {
Movielens100kImporter importer = new Movielens100kImporter();
importer.processCommandLineCall(args);
System.exit(0);
}
public void setNamedConfigurationService(NamedConfigurationService namedConfigurationService) {
this.namedConfigurationService = namedConfigurationService;
}
public void setItemTypeDAO(ItemTypeDAO itemTypeDAO) {
this.itemTypeDAO = itemTypeDAO;
}
public void setOperatorDAO(OperatorDAO operatorDAO) {
this.operatorDAO = operatorDAO;
}
public void setRemoteTenantDAO(RemoteTenantDAO remoteTenantDAO) {
this.remoteTenantDAO = remoteTenantDAO;
}
public void setShopRecommenderService(ShopRecommenderService shopRecommenderService) {
this.shopRecommenderService = shopRecommenderService;
}
public void setTenantService(TenantService tenantService) {
this.tenantService = tenantService;
}
public ClusterService getClusterService() {
return clusterService;
}
public void setClusterService(ClusterService clusterService) {
this.clusterService = clusterService;
}
public TypeMappingService getTypeMappingService() {
return typeMappingService;
}
public void setTypeMappingService(TypeMappingService typeMappingService) {
this.typeMappingService = typeMappingService;
}
public IDMappingDAO getidMappingDAO() {
return idMappingDAO;
}
public void setidMappingDAO(IDMappingDAO idMappingDAO) {
this.idMappingDAO = idMappingDAO;
}
public IDMappingDAO getIdMappingDAO() {
return idMappingDAO;
}
public void setIdMappingDAO(IDMappingDAO idMappingDAO) {
this.idMappingDAO = idMappingDAO;
}
public PluginRegistry getPluginRegistry() {
return pluginRegistry;
}
public void setPluginRegistry(PluginRegistry pluginRegistry) {
this.pluginRegistry = pluginRegistry;
}
@Override
protected String[] getConfigLocations() {
return new String[]{"spring/web/importer/movielens/AllInOne_Movielens100k.xml"};
}
@Override
protected int processCommandLineCall(String[] args) {
try {
processCommandLineCallEx(args);
return 0;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public void initClusters() {
clusters = new HashMap<Integer, String>();
clusters.put(0, "Unknown");
clusters.put(1, "Action");
clusters.put(2, "Adventure");
clusters.put(3, "Animation");
clusters.put(4, "Children");
clusters.put(5, "Comedy");
clusters.put(6, "Crime");
clusters.put(7, "Documentary");
clusters.put(8, "Drama");
clusters.put(9, "Fantasy");
clusters.put(10, "Filmnoir");
clusters.put(11, "Horror");
clusters.put(12, "Musical");
clusters.put(13, "Mystery");
clusters.put(14, "Romance");
clusters.put(15, "Scifi");
clusters.put(16, "Thriller");
clusters.put(17, "War");
clusters.put(18, "Western");
}
@Override
protected void usage() {
System.out.println("Usage: java -...ImporterCLI <path_to_movielens_100k> <useClusters>");
}
private void parseData(Operator operator, File ratingsFile, Map<Integer, Movie> movies, String tenantName,
Session session, boolean useClusters, boolean useItemTypes)
throws FileNotFoundException, NumberFormatException {
if (remoteTenantDAO.exists(tenantName)) {
System.out.println("Tenant " + tenantName + " already exists. SKIPPING import.");
return;
}
String tenantDescription = "MovieLens data sets were collected by the GroupLens Research Project\n" +
"at the University of Minnesota.\n" + "\n" + "This data set consists of:\n" +
"\t* 100,000 ratings (1-5) from 943 users on 1682 movies.\n" +
"\t* Each user has rated at least 20 movies.";
TenantVO tenant = new TenantVO(tenantName, tenantDescription, 1, 5, 2.5);
tenantService.insertTenantWithTypes(tenant, null);
System.out.println("\nTenant got id: " + tenant.getId());
remoteTenantDAO.update(operator.getOperatorId(), tenant.getId(), "", tenantDescription);
tenantService.updateConfigProperty(tenant.getId(), RemoteTenant.AUTO_ARCHIVER_ENABLED, "false");
tenantService.updateConfigProperty(tenant.getId(), RemoteTenant.AUTO_ARCHIVER_TIME_RANGE,
RemoteTenant.AUTO_ARCHIVER_DEFAULT_TIME_RANGE);
// enable backtracking by default
tenantService.updateConfigProperty(tenant.getId(), RemoteTenant.BACKTRACKING, "true");
// enable auto rule mining by default
tenantService.updateConfigProperty(tenant.getId(), RemoteTenant.SCHEDULER_ENABLED, "false");
tenantService.updateConfigProperty(tenant.getId(), RemoteTenant.SCHEDULER_EXECUTION_TIME,
RemoteTenant.SCHEDULER_DEFAULT_EXECUTION_TIME);
namedConfigurationService.setupDefaultConfiguration(tenant.getId());
RemoteTenant remoteTenant = remoteTenantDAO.get(tenant.getId());
Scanner ratings = new Scanner(ratingsFile);
ratings.useDelimiter("\\t|(\\r)?\\n");
System.out.println("\nLoading ratings ...");
int line = 0;
int lastPerc = 0;
if (useItemTypes) {
for (String itemTypeName : clusters.values()) {
itemTypeDAO.insertOrUpdate(tenant.getId(), "GENRE_" + itemTypeName.toUpperCase(), true);
}
}
do {
line++;
double percentage = (line * 100.0) / RATING_COUNT;
if (((Math.floor(percentage) % 10) == 0) && ((int) percentage != lastPerc)) {
lastPerc = (int) percentage;
System.out.print(lastPerc + "% ");
}
int userId = ratings.nextInt();
int movieId = ratings.nextInt();
int rating = ratings.nextInt();
String timestampStr = ratings.next();
Date timestamp = new Date(Long.parseLong(timestampStr));
// Date timestamp = new Date();
Movie movie = movies.get(movieId);
String itemType = Item.DEFAULT_STRING_ITEM_TYPE;
if (useItemTypes) {
int genreId = movie.getGenres().nextSetBit(0);
if (genreId >= 0 && clusters.containsKey(genreId)) {
String clusterName = clusters.get(genreId);
if (clusterName != null)
itemType = "GENRE_" + clusterName.toUpperCase();
}
}
shopRecommenderService.rateItem(remoteTenant, "" + userId, "" + movieId, itemType,
movie.getName() + " Genres: " + movie.getGenres(), movie.getImdbUrl(), movie.getGeneratedImageUrl(),
rating, timestamp, session, null);
// always also view with type item
shopRecommenderService.viewItem(remoteTenant, "" + userId, "" + movieId, Item.DEFAULT_STRING_ITEM_TYPE,
movie.getName() + " Genres: " + movie.getGenres(), movie.getImdbUrl(), movie.getGeneratedImageUrl(),
timestamp, session, null);
// use generic sendAction method for view actions for testing
// shopRecommenderService.sendAction(remoteTenant, "" + userId, "" + movieId, Item.DEFAULT_STRING_ITEM_TYPE,
// movie.getName() + " Genres: " + movie.getGenres(), movie.getImdbUrl(), movie.getGeneratedImageUrl(),
// "VIEW", rating, timestamp, session);
} while (ratings.hasNextInt());
if (useClusters) {
System.out.println("Creating Clusters for tenant!\n");
for (String clusterName : clusters.values()) {
try {
clusterService.addCluster(remoteTenant.getId(), clusterName, "The Genre " + clusterName,
clusterService.getClustersForTenant(remoteTenant.getId()).getRoot().getName());
} catch (ClusterException ce) {
System.out
.println("An error occured creating the clusters for tenant " + remoteTenant.getStringId() +
": " + ce.getMessage());
}
}
System.out.println("Done!");
System.out.println("Adding movies to clusters:\n");
for (Movie movie : movies.values()) {
for (int i = movie.getGenres().nextSetBit(0); i >= 0; i = movie.getGenres().nextSetBit(i + 1)) {
// operate on index i here
try {
if ((i < 0) || (i > 18)) {
System.out.println("Unknown Genre: " + i + " " + movie.getName());
} else {
clusterService.addItemToCluster(remoteTenant.getId(), clusters.get(i),
idMappingDAO.lookup(Integer.toString(movie.getId())),
typeMappingService.getIdOfItemType(remoteTenant.getId(),
Item.DEFAULT_STRING_ITEM_TYPE));
}
} catch (ClusterException ce) {
System.out
.println("An error occured adding item " + movie.getName() + " to cluster " +
clusters.get(i) + ": " + ce.getMessage());
}
}
}
System.out.println("Done!");
}
}
private Map<Integer, Movie> parseMovies(File moviesFile) throws FileNotFoundException {
Map<Integer, Movie> movies = new TreeMap<Integer, Movie>();
FileInputStream fsi = new FileInputStream(moviesFile);
Scanner movieScanner = new Scanner(fsi, "UTF-8");
movieScanner.useDelimiter("\\||\\r?\\n");
System.out.println("Loading movies ...");
int line = 0;
int lastPerc = 0;
do {
line++;
double percentage = (line * 100.0) / MOVIE_COUNT;
if (((Math.floor(percentage) % 10) == 0) && ((int) percentage != lastPerc)) {
lastPerc = (int) percentage;
System.out.print(lastPerc + "% ");
}
int id = movieScanner.nextInt();
String name = movieScanner.next();
String releaseDate = movieScanner.next();
String videoReleaseDate = movieScanner.next();
String imdbUrl = movieScanner.next();
try {
imdbUrl = URLDecoder.decode(imdbUrl, "UTF-8").replaceAll(" ", "+");
} catch (Exception ignored) {
}
BitSet genres = new BitSet();
int idx = 0;
genres.set(idx++, movieScanner.nextInt() == 1); // unknown
genres.set(idx++, movieScanner.nextInt() == 1); // action
genres.set(idx++, movieScanner.nextInt() == 1); // adventure
genres.set(idx++, movieScanner.nextInt() == 1); // animation
genres.set(idx++, movieScanner.nextInt() == 1); // childrens
genres.set(idx++, movieScanner.nextInt() == 1); // comedy
genres.set(idx++, movieScanner.nextInt() == 1); // crime
genres.set(idx++, movieScanner.nextInt() == 1); // documentary
genres.set(idx++, movieScanner.nextInt() == 1); // drama
genres.set(idx++, movieScanner.nextInt() == 1); // fantasy
genres.set(idx++, movieScanner.nextInt() == 1); // filmnoir
genres.set(idx++, movieScanner.nextInt() == 1); // horror
genres.set(idx++, movieScanner.nextInt() == 1); // musical
genres.set(idx++, movieScanner.nextInt() == 1); // mystery
genres.set(idx++, movieScanner.nextInt() == 1); // romance
genres.set(idx++, movieScanner.nextInt() == 1); // scifi
genres.set(idx++, movieScanner.nextInt() == 1); // thriller
genres.set(idx++, movieScanner.nextInt() == 1); // war
genres.set(idx, movieScanner.nextInt() == 1); // western
String imageUrl = "";
/*
try {
// TODO need better name handling, e.g. remove all braces and text inside braces,
// mov "The" to the front etc.
// strip the year
String queryName = name.substring(0, name.length() - 7);
if (queryName.endsWith(", The"))
queryName = "The " + queryName.substring(0, queryName.length() - 6);
//queryName = URLEncoder.encode(queryName, "UTF-8");
queryName = queryName.replaceAll(" ", "%20");
int queryYear = Integer.parseInt(releaseDate.substring(7));
// built using http://www.freebase.com/queryeditor
String freebaseQuery = "http://www.freebase" +
".com/api/service/mqlread?query={%20%22query%22%3A%20%5B{%20%22%2Fcommon%2Ftopic%2Fimage%22" +
"%3A%20{%20%22id%22%3A%20null%2C%20%22limit%22%3A%201%2C%20%22optional%22%3A%20true%20}%2C%" +
"20%22FBID96%3Ainitial_release_date%22%3A%20%5B{%20%22type%22%3A%20%22%2Ftype%2Fdatetime%22" +
"%2C%20%22value%3C%22%3A%20%22" + (queryYear + 1) + "%22%2C%20%22value%3E%3D%22%3A%20%22" +
queryYear + "%22%20}%5D%2C%20%22id%22%3A%20null%2C%20%22limit%22%3A%201%2C%20%22name%22%3A" +
"%20null%2C%20%22q0%3Aname~%3D%22%3A%20%22*" + queryName +
"*%22%2C%20%22s0%3Atype%22%3A%20%5B{" +
"%20%22id%22%3A%20%22%2Ffilm%2Ffilm%22%2C%20%22link%22%3A%20%5B{%20%22timestamp%22%3A%20%5B" +
"{%20%22optional%22%3A%20true%2C%20%22type%22%3A%20%22%2Ftype%2Fdatetime%22%2C%20%22value%2" +
"2%3A%20null%20}%5D%2C%20%22type%22%3A%20%22%2Ftype%2Flink%22%20}%5D%2C%20%22type%22%3A%20%" +
"22%2Ftype%2Ftype%22%20}%5D%2C%20%22type%22%3A%20%22%2Ffilm%2Ffilm%22%20}%5D%20}";
URL url = new URL(freebaseQuery);
URLConnection connection = url.openConnection();
InputStream textInputStream = connection.getInputStream();
StringBuilder content = new StringBuilder();
int curChar = -1;
while ((curChar = textInputStream.read()) != -1)
content.append((char) curChar);
JSONObject response = new JSONObject(content.toString());
JSONArray resultArray = response.getJSONArray("result");
if (resultArray.length() > 0) {
String imageId = resultArray.getJSONObject(0).getJSONObject(
"/common/topic/image").getString("id");
imageUrl = "http://img.freebase.com/api/trans/image_thumb" + imageId + "?maxwidth=1024";
}
} catch (Exception ignored) {
System.out.println(ignored);
}
*/
movies.put(id, new Movie(id, name, releaseDate, videoReleaseDate, imdbUrl, genres, imageUrl));
} while (movieScanner.hasNextInt());
movieScanner.close();
return movies;
}
private void processCommandLineCallEx(String[] args) throws Exception {
String pathToDataset = "C://projects//easyrec//movielens//small//ml-data"; //"C:\\DATA\\datasets\\ml100k";
boolean useClusters = true;
boolean useItemTypes = true;
if (args.length != 1) {
if (!new File(pathToDataset).exists()) usage();
} else pathToDataset = args[0];
File datasetFile = new File(pathToDataset);
File moviesFile = new File(datasetFile.getAbsolutePath() + File.separator + "u.item");
File ratingsFile = new File(datasetFile.getAbsolutePath() + File.separator + "u.data");
if (!datasetFile.exists()) {
System.err.println("Path \"" + pathToDataset + "\" doesn't exist.");
return;
}
if (!moviesFile.exists() || !ratingsFile.exists()) {
logger.info(moviesFile.toString());
logger.info(ratingsFile.toString());
System.err.println("movies.dat or ratings.dat not found.");
return;
}
pluginRegistry.installPlugin(URI.create("http://www.easyrec.org/plugins/ARM"), new Version("0.98"));
Map<Integer, Movie> movies = parseMovies(moviesFile);
//noinspection ConstantConditions
if (useClusters || useItemTypes)
initClusters();
Operator operator = operatorDAO.get("easyrec");
Session session = new Session("ml100k-import-session", "127.0.0.1");
parseData(operator, ratingsFile, movies, "Movielens_100k", session, useClusters, useItemTypes);
for (int i = 0; i < 5; i++) {
ratingsFile = new File(datasetFile.getAbsolutePath() + File.separator + "u" + (i + 1) + ".base");
parseData(operator, ratingsFile, movies, "Movielens_100k_" + (i + 1), session, useClusters, useItemTypes);
}
}
private static class Movie {
private BitSet genres;
private String imdbUrl;
private String name;
private String releaseDate;
private String videoReleaseDate;
private String generatedImageUrl;
private int id;
public Movie(int id, String name, String releaseDate, String videoReleaseDate, String imdbUrl, BitSet genres,
String generatedImageUrl) {
this.id = id;
this.name = name;
this.releaseDate = releaseDate;
this.videoReleaseDate = videoReleaseDate;
this.imdbUrl = imdbUrl;
this.genres = genres;
this.generatedImageUrl = generatedImageUrl;
}
public BitSet getGenres() {
return genres;
}
public int getId() {
return id;
}
public String getImdbUrl() {
return imdbUrl;
}
public String getName() {
return name;
}
public String getReleaseDate() {
return releaseDate;
}
public String getVideoReleaseDate() {
return videoReleaseDate;
}
public String getGeneratedImageUrl() {
return generatedImageUrl;
}
}
}
| apache-2.0 |
jwang98052/reef | lang/java/reef-runtime-mock/src/main/java/org/apache/reef/mock/runtime/package-info.java | 899 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
/**
* mock runtime implementation.
*/
package org.apache.reef.mock.runtime;
| apache-2.0 |
apache/directory-server | protocol-dns/src/main/java/org/apache/directory/server/dns/service/DomainNameService.java | 7828 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.server.dns.service;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import org.apache.directory.server.dns.DnsException;
import org.apache.directory.server.dns.messages.DnsMessage;
import org.apache.directory.server.dns.messages.DnsMessageModifier;
import org.apache.directory.server.dns.messages.MessageType;
import org.apache.directory.server.dns.messages.OpCode;
import org.apache.directory.server.dns.messages.QuestionRecord;
import org.apache.directory.server.dns.messages.ResourceRecord;
import org.apache.directory.server.dns.messages.ResponseCode;
import org.apache.directory.server.dns.store.RecordStore;
import org.apache.directory.server.i18n.I18n;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Domain Name Service (DNS) Protocol (RFC 1034, 1035)
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
public final class DomainNameService
{
/** the log for this class */
private static final Logger LOG = LoggerFactory.getLogger( DomainNameService.class );
private DomainNameService()
{
}
/**
* Creates a new instance of DomainNameService.
*/
public static void execute( DnsContext dnsContext, DnsMessage request ) throws Exception
{
if ( LOG.isDebugEnabled() )
{
monitorRequest( request );
}
getResourceRecords( dnsContext, request );
if ( LOG.isDebugEnabled() )
{
monitorContext( dnsContext );
}
buildReply( dnsContext, request );
if ( LOG.isDebugEnabled() )
{
monitorReply( dnsContext );
}
}
private static void monitorRequest( DnsMessage request )
{
try
{
if ( LOG.isDebugEnabled() )
{
LOG.debug( monitorMessage( request, "request" ) );
}
}
catch ( Exception e )
{
// This is a monitor. No exceptions should bubble up.
LOG.error( I18n.err( I18n.ERR_153 ), e );
}
}
private static void getResourceRecords( DnsContext dnsContext, DnsMessage request ) throws DnsException
{
RecordStore store = dnsContext.getStore();
List<QuestionRecord> questions = request.getQuestionRecords();
Iterator<QuestionRecord> it = questions.iterator();
while ( it.hasNext() )
{
dnsContext.addResourceRecords( getEntry( store, it.next() ) );
}
}
/**
* Returns a set of {@link ResourceRecord}s from a {@link RecordStore}, given a DNS {@link QuestionRecord}.
*
* @param store
* @param question
* @return The set of {@link ResourceRecord}s.
* @throws DNSException
*/
private static Set<ResourceRecord> getEntry( RecordStore store, QuestionRecord question ) throws DnsException
{
Set<ResourceRecord> records = null;
records = store.getRecords( question );
if ( records == null || records.isEmpty() )
{
LOG.debug( "The domain name referenced in the query does not exist." );
throw new DnsException( ResponseCode.NAME_ERROR );
}
return records;
}
private static void monitorContext( DnsContext dnsContext )
{
try
{
RecordStore store = dnsContext.getStore();
List<ResourceRecord> records = dnsContext.getResourceRecords();
StringBuilder sb = new StringBuilder();
sb.append( "Monitoring context:" );
sb.append( "\n\t" + "store: " + store );
sb.append( "\n\t" + "records: " + records );
LOG.debug( sb.toString() );
}
catch ( Exception e )
{
// This is a monitor. No exceptions should bubble up.
LOG.error( I18n.err( I18n.ERR_154 ), e );
}
}
private static void buildReply( DnsContext dnsContext, DnsMessage request )
{
List<ResourceRecord> records = dnsContext.getResourceRecords();
DnsMessageModifier modifier = new DnsMessageModifier();
modifier.setTransactionId( request.getTransactionId() );
modifier.setMessageType( MessageType.RESPONSE );
modifier.setOpCode( OpCode.QUERY );
modifier.setAuthoritativeAnswer( false );
modifier.setTruncated( false );
modifier.setRecursionDesired( request.isRecursionDesired() );
modifier.setRecursionAvailable( false );
modifier.setReserved( false );
modifier.setAcceptNonAuthenticatedData( false );
modifier.setResponseCode( ResponseCode.NO_ERROR );
modifier.setQuestionRecords( request.getQuestionRecords() );
modifier.setAnswerRecords( records );
modifier.setAuthorityRecords( new ArrayList<ResourceRecord>() );
modifier.setAdditionalRecords( new ArrayList<ResourceRecord>() );
dnsContext.setReply( modifier.getDnsMessage() );
}
private static void monitorReply( DnsContext dnsContext )
{
try
{
DnsMessage reply = dnsContext.getReply();
if ( LOG.isDebugEnabled() )
{
LOG.debug( monitorMessage( reply, "reply" ) );
}
}
catch ( Exception e )
{
// This is a monitor. No exceptions should bubble up.
LOG.error( I18n.err( I18n.ERR_155 ), e );
}
}
private static String monitorMessage( DnsMessage message, String direction )
{
MessageType messageType = message.getMessageType();
OpCode opCode = message.getOpCode();
ResponseCode responseCode = message.getResponseCode();
int transactionId = message.getTransactionId();
StringBuilder sb = new StringBuilder();
sb.append( "Monitoring " + direction + ":" );
sb.append( "\n\t" + "messageType " + messageType );
sb.append( "\n\t" + "opCode " + opCode );
sb.append( "\n\t" + "responseCode " + responseCode );
sb.append( "\n\t" + "transactionId " + transactionId );
sb.append( "\n\t" + "authoritativeAnswer " + message.isAuthoritativeAnswer() );
sb.append( "\n\t" + "truncated " + message.isTruncated() );
sb.append( "\n\t" + "recursionDesired " + message.isRecursionDesired() );
sb.append( "\n\t" + "recursionAvailable " + message.isRecursionAvailable() );
sb.append( "\n\t" + "reserved " + message.isReserved() );
sb.append( "\n\t" + "acceptNonAuthenticatedData " + message.isAcceptNonAuthenticatedData() );
List<QuestionRecord> questions = message.getQuestionRecords();
sb.append( "\n\t" + "questions: " + questions );
return sb.toString();
}
}
| apache-2.0 |
stari4ek/androidtv-sample-inputs | library/src/test/java/com/google/android/media/tv/companionlibrary/sync/SyncStatusBroadcastReceiverTest.java | 5278 | /*
* Copyright 2017 The Android Open Source Project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.media.tv.companionlibrary.sync;
/** Tests for {@link SyncStatusBroadcastReceiver}. */
import android.content.Intent;
import com.google.android.media.tv.companionlibrary.BuildConfig;
import com.google.android.media.tv.companionlibrary.sync.SyncStatusBroadcastReceiver.SyncListener;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.Config;
/**
* Tests that channels can be created using the Builder pattern and correctly obtain values from
* them
*/
@RunWith(RobolectricTestRunner.class)
@Config(
constants = BuildConfig.class,
sdk = 23,
manifest =
"src/main/AndroidManifest.xml"
)
public class SyncStatusBroadcastReceiverTest {
private static final String TEST_INPUT = "test_input";
private TestSyncListener mSyncListener = new TestSyncListener();
SyncStatusBroadcastReceiver mReceiver =
new SyncStatusBroadcastReceiver(TEST_INPUT, mSyncListener);
@Test
public void receiveSyncScannedIntent() {
Intent intent =
EpgSyncJobService.createSyncScannedIntent(TEST_INPUT, 5, 10, "display", "1.1");
mReceiver.onReceive(null, intent);
Assert.assertEquals("finished", false, mSyncListener.mFinished);
Assert.assertEquals("scanned", 5, mSyncListener.mChannelsScanned);
Assert.assertEquals("count", 10, mSyncListener.mChannelCount);
Assert.assertEquals("displayName", "display", mSyncListener.mDisplayName);
Assert.assertEquals("displayNumber", "1.1", mSyncListener.mDisplayNumber);
}
@Test
public void receiveSyncFinishedIntent() {
Intent intent = EpgSyncJobService.createSyncFinishedIntent(TEST_INPUT);
mReceiver.onReceive(null, intent);
Assert.assertEquals("finished", true, mSyncListener.mFinished);
}
@Test
public void receiveSyncErrorIntent() {
Intent intent = EpgSyncJobService.createSyncErrorIntent(TEST_INPUT, 42);
mReceiver.onReceive(null, intent);
Assert.assertEquals("finished", false, mSyncListener.mFinished);
Assert.assertEquals("errorCode", 42, mSyncListener.mErrorCode);
}
@Test
public void receiveSyncFinishedIntent_noOtherChanges() {
Intent intent = EpgSyncJobService.createSyncFinishedIntent(TEST_INPUT);
mReceiver.onReceive(null, intent);
Assert.assertEquals("finished", true, mSyncListener.mFinished);
Intent after =
EpgSyncJobService.createSyncScannedIntent(TEST_INPUT, 5, 10, "display", "1.1");
mReceiver.onReceive(null, after);
Assert.assertEquals("finished", true, mSyncListener.mFinished);
Assert.assertEquals("scanned", 0, mSyncListener.mChannelsScanned);
Assert.assertEquals("count", 0, mSyncListener.mChannelCount);
Assert.assertEquals("displayName", null, mSyncListener.mDisplayName);
Assert.assertEquals("displayNumber", null, mSyncListener.mDisplayNumber);
}
@Test
public void receiveSyncFinishedIntent_otherInput() {
Intent intent = EpgSyncJobService.createSyncFinishedIntent("other");
mReceiver.onReceive(null, intent);
Assert.assertEquals("finished", false, mSyncListener.mFinished);
}
@Test
public void receiveSyncFinishedIntent_nullInput() {
Intent intent = new Intent(EpgSyncJobService.ACTION_SYNC_STATUS_CHANGED);
// NOTE BUNDLE_KEY_INPUT_ID is not set
intent.putExtra(EpgSyncJobService.SYNC_STATUS, EpgSyncJobService.SYNC_FINISHED);
mReceiver.onReceive(null, intent);
Assert.assertEquals("finished", false, mSyncListener.mFinished);
}
private static final class TestSyncListener implements SyncListener {
private int mChannelsScanned;
private int mChannelCount;
private CharSequence mDisplayName;
private CharSequence mDisplayNumber;
private boolean mFinished = false;
private int mErrorCode = 0;
@Override
public void onScanStepCompleted(int completedStep, int totalSteps) {
this.mChannelsScanned = completedStep;
this.mChannelCount = totalSteps;
}
@Override
public void onScannedChannel(CharSequence displayName, CharSequence displayNumber) {
mDisplayName = displayName;
mDisplayNumber = displayNumber;
}
@Override
public void onScanFinished() {
mFinished = true;
}
@Override
public void onScanError(int errorCode) {
mErrorCode = errorCode;
}
}
}
| apache-2.0 |
ChrisCanCompute/assertj-core | src/main/java/org/assertj/core/util/TextFileWriter.java | 1497 | /**
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2017 the original author or authors.
*/
package org.assertj.core.util;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.nio.charset.Charset;
/**
* @author Yvonne Wang
* @author Olivier Michallat
*/
public class TextFileWriter {
private static final TextFileWriter INSTANCE = new TextFileWriter();
public static TextFileWriter instance() {
return INSTANCE;
}
public void write(File file, String... content) throws IOException {
write(file, Charset.defaultCharset(), content);
}
public void write(File file, Charset charset, String... content) throws IOException {
try (PrintWriter writer = new PrintWriter(new OutputStreamWriter(new FileOutputStream(file), charset))) {
for (String line : content) {
writer.println(line);
}
}
}
private TextFileWriter() {
}
}
| apache-2.0 |
tommyettinger/SquidSetup | src/main/java/com/github/czyzby/lml/vis/parser/impl/attribute/grid/ItemWidthLmlAttribute.java | 747 | package com.github.czyzby.lml.vis.parser.impl.attribute.grid;
import com.github.czyzby.lml.parser.LmlParser;
import com.github.czyzby.lml.parser.tag.LmlAttribute;
import com.github.czyzby.lml.parser.tag.LmlTag;
import com.kotcrab.vis.ui.layout.GridGroup;
/** See {@link GridGroup#setItemWidth(float)}. Mapped to "itemWidth".
*
* @author MJ */
public class ItemWidthLmlAttribute implements LmlAttribute<GridGroup> {
@Override
public Class<GridGroup> getHandledType() {
return GridGroup.class;
}
@Override
public void process(final LmlParser parser, final LmlTag tag, final GridGroup actor,
final String rawAttributeData) {
actor.setItemWidth(parser.parseFloat(rawAttributeData, actor));
}
}
| apache-2.0 |
apache/directory-server | protocol-ldap/src/main/java/org/apache/directory/server/ldap/handlers/request/ModifyDnRequestHandler.java | 5056 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.server.ldap.handlers.request;
import org.apache.directory.api.ldap.model.message.LdapResult;
import org.apache.directory.api.ldap.model.message.ModifyDnRequest;
import org.apache.directory.api.ldap.model.message.ModifyDnResponse;
import org.apache.directory.api.ldap.model.message.ResultCodeEnum;
import org.apache.directory.api.ldap.model.name.Dn;
import org.apache.directory.api.ldap.model.schema.SchemaManager;
import org.apache.directory.server.core.api.CoreSession;
import org.apache.directory.server.ldap.LdapSession;
import org.apache.directory.server.ldap.handlers.LdapRequestHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A single reply MessageReceived handler for {@link org.apache.directory.api.ldap.model.message.ModifyDnRequest}s.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
public class ModifyDnRequestHandler extends LdapRequestHandler<ModifyDnRequest>
{
private static final Logger LOG = LoggerFactory.getLogger( ModifyDnRequestHandler.class );
/**
* Deal with a ModifyDN request received from a client.
*
* A ModifyDN operation has more than one semantic, depending on its parameters.
*
* In any case, the first argument is the Dn entry to be changed. We then
* have the new relative Dn for this entry.
*
* Two other arguments can be provided :
* - deleteOldRdn : if the old Rdn attributes should be removed from the
* new entry or not (for instance, if the old Rdn was cn=acme, and the new
* one is sn=acme, then we may have to remove the cn: acme from the attributes
* list)
* - newSuperior : this is a move operation. The entry is removed from its
* current location, and created in the new one.
*/
public void handle( LdapSession session, ModifyDnRequest modifyDnRequest )
{
ModifyDnResponse modifyDnResponse = ( ModifyDnResponse ) modifyDnRequest.getResultResponse();
LdapResult result = modifyDnResponse.getLdapResult();
LOG.debug( "Handling modify dn request while ignoring referrals: {}", modifyDnRequest );
if ( modifyDnRequest.getName().isEmpty() )
{
// it is not allowed to modify the name of the Root DSE
String msg = "Modify Dn is not allowed on Root DSE.";
result.setResultCode( ResultCodeEnum.PROTOCOL_ERROR );
result.setDiagnosticMessage( msg );
session.getIoSession().write( modifyDnResponse );
return;
}
try
{
SchemaManager schemaManager = session.getCoreSession().getDirectoryService().getSchemaManager();
Dn newRdn = new Dn( schemaManager, modifyDnRequest.getNewRdn().getName() );
Dn oldRdn = new Dn( schemaManager, modifyDnRequest.getName().getRdn().getName() );
boolean rdnChanged = modifyDnRequest.getNewRdn() != null && !newRdn.equals( oldRdn );
CoreSession coreSession = session.getCoreSession();
if ( rdnChanged )
{
if ( modifyDnRequest.getNewSuperior() != null )
{
coreSession.moveAndRename( modifyDnRequest );
}
else
{
coreSession.rename( modifyDnRequest );
}
}
else if ( modifyDnRequest.getNewSuperior() != null )
{
modifyDnRequest.setNewRdn( null );
coreSession.move( modifyDnRequest );
}
else
{
result.setDiagnosticMessage( "Attempt to move entry onto itself." );
result.setResultCode( ResultCodeEnum.ENTRY_ALREADY_EXISTS );
result.setMatchedDn( modifyDnRequest.getName() );
session.getIoSession().write( modifyDnResponse );
return;
}
result.setResultCode( ResultCodeEnum.SUCCESS );
session.getIoSession().write( modifyDnResponse );
}
catch ( Exception e )
{
handleException( session, modifyDnRequest, modifyDnResponse, e );
}
}
} | apache-2.0 |
nagyistoce/camunda-bpm-elasticsearch | elasticsearch-cockpit-plugin/src/main/java/org/camunda/bpm/cockpit/plugin/dashboards/resources/DateHistogramBucketPair.java | 557 | package org.camunda.bpm.cockpit.plugin.dashboards.resources;
/**
* Created by hawky4s on 12.08.14.
*/
public class DateHistogramBucketPair {
protected Number key;
protected long docCount;
public DateHistogramBucketPair(Number key, long docCount) {
this.key = key;
this.docCount = docCount;
}
public Number getKey() {
return key;
}
public void setKey(Number key) {
this.key = key;
}
public long getDocCount() {
return docCount;
}
public void setDocCount(long docCount) {
this.docCount = docCount;
}
}
| apache-2.0 |
punkhorn/camel-upstream | core/camel-core/src/main/java/org/apache/camel/model/language/SimpleExpression.java | 3485 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.model.language;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlTransient;
import org.apache.camel.CamelContext;
import org.apache.camel.Expression;
import org.apache.camel.Predicate;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.builder.SimpleBuilder;
import org.apache.camel.spi.Metadata;
/**
* To use Camels built-in Simple language in Camel expressions or predicates.
*/
@Metadata(firstVersion = "1.1.0", label = "language,core,java", title = "Simple")
@XmlRootElement(name = "simple")
@XmlAccessorType(XmlAccessType.FIELD)
public class SimpleExpression extends ExpressionDefinition {
@XmlAttribute(name = "resultType")
private String resultTypeName;
@XmlTransient
private Class<?> resultType;
public SimpleExpression() {
}
public SimpleExpression(String expression) {
super(expression);
}
public SimpleExpression(Expression expression) {
super(expression);
}
public String getLanguage() {
return "simple";
}
public Class<?> getResultType() {
return resultType;
}
/**
* Sets the class of the result type (type from output)
*/
public void setResultType(Class<?> resultType) {
this.resultType = resultType;
}
public String getResultTypeName() {
return resultTypeName;
}
/**
* Sets the class name of the result type (type from output)
*/
public void setResultTypeName(String resultTypeName) {
this.resultTypeName = resultTypeName;
}
@Override
public Expression createExpression(CamelContext camelContext) {
if (resultType == null && resultTypeName != null) {
try {
resultType = camelContext.getClassResolver().resolveMandatoryClass(resultTypeName);
} catch (ClassNotFoundException e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
}
}
String exp = getExpression();
// should be true by default
boolean isTrim = getTrim() == null || getTrim();
if (exp != null && isTrim) {
exp = exp.trim();
}
SimpleBuilder answer = new SimpleBuilder(exp);
answer.setResultType(resultType);
return answer;
}
@Override
public Predicate createPredicate(CamelContext camelContext) {
// SimpleBuilder is also a Predicate
return (Predicate) createExpression(camelContext);
}
}
| apache-2.0 |
googleapis/google-api-java-client-services | clients/google-api-services-servicenetworking/v1beta/1.31.0/com/google/api/services/servicenetworking/v1beta/model/OAuthRequirements.java | 3827 | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.servicenetworking.v1beta.model;
/**
* OAuth scopes are a way to define data and permissions on data. For example, there are scopes
* defined for "Read-only access to Google Calendar" and "Access to Cloud Platform". Users can
* consent to a scope for an application, giving it permission to access that data on their behalf.
* OAuth scope specifications should be fairly coarse grained; a user will need to see and
* understand the text description of what your scope means. In most cases: use one or at most two
* OAuth scopes for an entire family of products. If your product has multiple APIs, you should
* probably be sharing the OAuth scope across all of those APIs. When you need finer grained OAuth
* consent screens: talk with your product management about how developers will use them in
* practice. Please note that even though each of the canonical scopes is enough for a request to be
* accepted and passed to the backend, a request can still fail due to the backend requiring
* additional scopes or permissions.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Service Networking API. For a detailed explanation
* see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class OAuthRequirements extends com.google.api.client.json.GenericJson {
/**
* The list of publicly documented OAuth scopes that are allowed access. An OAuth token containing
* any of these scopes will be accepted. Example: canonical_scopes:
* https://www.googleapis.com/auth/calendar, https://www.googleapis.com/auth/calendar.read
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String canonicalScopes;
/**
* The list of publicly documented OAuth scopes that are allowed access. An OAuth token containing
* any of these scopes will be accepted. Example: canonical_scopes:
* https://www.googleapis.com/auth/calendar, https://www.googleapis.com/auth/calendar.read
* @return value or {@code null} for none
*/
public java.lang.String getCanonicalScopes() {
return canonicalScopes;
}
/**
* The list of publicly documented OAuth scopes that are allowed access. An OAuth token containing
* any of these scopes will be accepted. Example: canonical_scopes:
* https://www.googleapis.com/auth/calendar, https://www.googleapis.com/auth/calendar.read
* @param canonicalScopes canonicalScopes or {@code null} for none
*/
public OAuthRequirements setCanonicalScopes(java.lang.String canonicalScopes) {
this.canonicalScopes = canonicalScopes;
return this;
}
@Override
public OAuthRequirements set(String fieldName, Object value) {
return (OAuthRequirements) super.set(fieldName, value);
}
@Override
public OAuthRequirements clone() {
return (OAuthRequirements) super.clone();
}
}
| apache-2.0 |
cushon/bazel | src/main/java/com/google/devtools/build/lib/rules/java/JavaStarlarkCommon.java | 10286 | // Copyright 2016 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.java;
import com.google.common.collect.ImmutableList;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.analysis.PlatformOptions;
import com.google.devtools.build.lib.analysis.platform.ConstraintValueInfo;
import com.google.devtools.build.lib.analysis.platform.ToolchainInfo;
import com.google.devtools.build.lib.analysis.starlark.StarlarkActionFactory;
import com.google.devtools.build.lib.analysis.starlark.StarlarkRuleContext;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.collect.nestedset.Depset;
import com.google.devtools.build.lib.packages.Provider;
import com.google.devtools.build.lib.starlarkbuildapi.core.ProviderApi;
import com.google.devtools.build.lib.starlarkbuildapi.java.JavaCommonApi;
import com.google.devtools.build.lib.starlarkbuildapi.java.JavaToolchainStarlarkApiProviderApi;
import net.starlark.java.eval.EvalException;
import net.starlark.java.eval.Sequence;
import net.starlark.java.eval.Starlark;
import net.starlark.java.eval.StarlarkList;
import net.starlark.java.eval.StarlarkThread;
/** A module that contains Starlark utilities for Java support. */
public class JavaStarlarkCommon
implements JavaCommonApi<
Artifact,
JavaInfo,
JavaToolchainProvider,
ConstraintValueInfo,
StarlarkRuleContext,
StarlarkActionFactory> {
private final JavaSemantics javaSemantics;
public JavaStarlarkCommon(JavaSemantics javaSemantics) {
this.javaSemantics = javaSemantics;
}
@Override
public Provider getJavaProvider() {
return JavaInfo.PROVIDER;
}
@Override
public JavaInfo createJavaCompileAction(
StarlarkRuleContext starlarkRuleContext,
Sequence<?> sourceJars, // <Artifact> expected
Sequence<?> sourceFiles, // <Artifact> expected
Artifact outputJar,
Object outputSourceJar,
Sequence<?> javacOpts, // <String> expected
Sequence<?> deps, // <JavaInfo> expected
Sequence<?> experimentalLocalCompileTimeDeps, // <JavaInfo> expected
Sequence<?> exports, // <JavaInfo> expected
Sequence<?> plugins, // <JavaInfo> expected
Sequence<?> exportedPlugins, // <JavaInfo> expected
Sequence<?> annotationProcessorAdditionalInputs, // <Artifact> expected
Sequence<?> annotationProcessorAdditionalOutputs, // <Artifact> expected
String strictDepsMode,
JavaToolchainProvider javaToolchain,
Object hostJavabase,
Sequence<?> sourcepathEntries, // <Artifact> expected
Sequence<?> resources, // <Artifact> expected
Boolean neverlink,
StarlarkThread thread)
throws EvalException, InterruptedException {
return JavaInfoBuildHelper.getInstance()
.createJavaCompileAction(
starlarkRuleContext,
Sequence.cast(sourceJars, Artifact.class, "source_jars"),
Sequence.cast(sourceFiles, Artifact.class, "source_files"),
outputJar,
outputSourceJar == Starlark.NONE ? null : (Artifact) outputSourceJar,
Sequence.cast(javacOpts, String.class, "javac_opts"),
Sequence.cast(deps, JavaInfo.class, "deps"),
Sequence.cast(
experimentalLocalCompileTimeDeps,
JavaInfo.class,
"experimental_local_compile_time_deps"),
Sequence.cast(exports, JavaInfo.class, "exports"),
Sequence.cast(plugins, JavaInfo.class, "plugins"),
Sequence.cast(exportedPlugins, JavaInfo.class, "exported_plugins"),
Sequence.cast(
annotationProcessorAdditionalInputs,
Artifact.class,
"annotation_processor_additional_inputs"),
Sequence.cast(
annotationProcessorAdditionalOutputs,
Artifact.class,
"annotation_processor_additional_outputs"),
strictDepsMode,
javaToolchain,
ImmutableList.copyOf(Sequence.cast(sourcepathEntries, Artifact.class, "sourcepath")),
Sequence.cast(resources, Artifact.class, "resources"),
neverlink,
javaSemantics,
thread);
}
@Override
public Artifact runIjar(
StarlarkActionFactory actions,
Artifact jar,
Object targetLabel,
JavaToolchainProvider javaToolchain)
throws EvalException {
return JavaInfoBuildHelper.getInstance()
.buildIjar(
actions, jar, targetLabel != Starlark.NONE ? (Label) targetLabel : null, javaToolchain);
}
@Override
public Artifact stampJar(
StarlarkActionFactory actions,
Artifact jar,
Label targetLabel,
JavaToolchainProvider javaToolchain)
throws EvalException {
return JavaInfoBuildHelper.getInstance().stampJar(actions, jar, targetLabel, javaToolchain);
}
@Override
public Artifact packSources(
StarlarkActionFactory actions,
Object outputJar,
Object outputSourceJar,
Sequence<?> sourceFiles, // <Artifact> expected.
Sequence<?> sourceJars, // <Artifact> expected.
JavaToolchainProvider javaToolchain,
Object hostJavabase)
throws EvalException {
return JavaInfoBuildHelper.getInstance()
.packSourceFiles(
actions,
outputJar instanceof Artifact ? (Artifact) outputJar : null,
outputSourceJar instanceof Artifact ? (Artifact) outputSourceJar : null,
Sequence.cast(sourceFiles, Artifact.class, "sources"),
Sequence.cast(sourceJars, Artifact.class, "source_jars"),
javaToolchain);
}
@Override
// TODO(b/78512644): migrate callers to passing explicit javacopts or using custom toolchains, and
// delete
public ImmutableList<String> getDefaultJavacOpts(JavaToolchainProvider javaToolchain)
throws EvalException {
// We don't have a rule context if the default_javac_opts.java_toolchain parameter is set
return ((JavaToolchainProvider) javaToolchain).getJavacOptions(/* ruleContext= */ null);
}
@Override
public JavaInfo mergeJavaProviders(Sequence<?> providers /* <JavaInfo> expected. */)
throws EvalException {
return JavaInfo.merge(Sequence.cast(providers, JavaInfo.class, "providers"));
}
// TODO(b/65113771): Remove this method because it's incorrect.
@Override
public JavaInfo makeNonStrict(JavaInfo javaInfo) {
return JavaInfo.Builder.copyOf(javaInfo)
// Overwrites the old provider.
.addProvider(
JavaCompilationArgsProvider.class,
JavaCompilationArgsProvider.makeNonStrict(
javaInfo.getProvider(JavaCompilationArgsProvider.class)))
.build();
}
@Override
public Provider getJavaToolchainProvider() {
return ToolchainInfo.PROVIDER;
}
@Override
public Provider getJavaRuntimeProvider() {
return ToolchainInfo.PROVIDER;
}
@Override
public boolean isJavaToolchainResolutionEnabled(StarlarkRuleContext ruleContext)
throws EvalException {
return ruleContext
.getConfiguration()
.getOptions()
.get(PlatformOptions.class)
.useToolchainResolutionForJavaRules;
}
@Override
public ProviderApi getMessageBundleInfo() {
// No implementation in Bazel. This method not callable in Starlark except through
// (discouraged) use of --experimental_google_legacy_api.
return null;
}
@Override
public JavaInfo addConstraints(JavaInfo javaInfo, Sequence<?> constraints) throws EvalException {
// No implementation in Bazel. This method not callable in Starlark except through
// (discouraged) use of --experimental_google_legacy_api.
return null;
}
@Override
public Sequence<String> getConstraints(JavaInfo javaInfo) {
// No implementation in Bazel. This method not callable in Starlark except through
// (discouraged) use of --experimental_google_legacy_api.
return StarlarkList.empty();
}
@Override
public JavaInfo removeAnnotationProcessors(JavaInfo javaInfo) {
// No implementation in Bazel. This method not callable in Starlark except through
// (discouraged) use of --experimental_google_legacy_api.
return null;
}
@Override
public JavaInfo setAnnotationProcessing(
JavaInfo javaInfo,
boolean enabled,
Sequence<?> processorClassnames,
Object processorClasspath,
Object classJar,
Object sourceJar)
throws EvalException {
// No implementation in Bazel. This method not callable in Starlark except through
// (discouraged) use of --experimental_google_legacy_api.
return null;
}
@Override
public Depset /*<Artifact>*/ getCompileTimeJavaDependencyArtifacts(JavaInfo javaInfo) {
// No implementation in Bazel. This method not callable in Starlark except through
// (discouraged) use of --experimental_google_legacy_api.
return null;
}
@Override
public JavaInfo addCompileTimeJavaDependencyArtifacts(
JavaInfo javaInfo, Sequence<?> compileTimeJavaDependencyArtifacts) throws EvalException {
// No implementation in Bazel. This method not callable in Starlark except through
// (discouraged) use of --experimental_google_legacy_api.
return null;
}
@Override
public Label getJavaToolchainLabel(JavaToolchainStarlarkApiProviderApi toolchain)
throws EvalException {
// No implementation in Bazel. This method not callable in Starlark except through
// (discouraged) use of --experimental_google_legacy_api.
return null;
}
@Override
public ProviderApi getBootClassPathInfo() {
return BootClassPathInfo.PROVIDER;
}
}
| apache-2.0 |
obattalov/jrivets-common | src/test/java/org/jrivets/log/AbstractLoggerTest.java | 3401 | package org.jrivets.log;
import java.util.HashSet;
import java.util.Set;
import org.jrivets.log.AbstractLogger.LogLevel;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import static org.testng.Assert.*;
public class AbstractLoggerTest {
private TestLogger logger;
private static class TestLogger extends AbstractLogger {
private String logMessage;
private final Set<LogLevel> enabledLevels = new HashSet<AbstractLogger.LogLevel>();
TestLogger(String formatString, Object marker) {
super(formatString, marker);
}
@Override
public boolean isEnabled(LogLevel logLevel) {
return enabledLevels.contains(logLevel);
}
@Override
protected void log(LogLevel logLevel, String message) {
logMessage = logLevel.name() + " " + message;
}
void setEnabled(LogLevel logLevel) {
enabledLevels.add(logLevel);
}
String getLogMessage() {
return logMessage;
}
}
@BeforeMethod
public void initSimple() {
this.logger = new TestLogger(null, null);
}
@Test
public void infoEnabled() {
logger.setEnabled(LogLevel.INFO);
logger.info("test");
assertEquals("INFO test", logger.getLogMessage());
}
@Test
public void infoDisabled() {
logger.info("test");
assertEquals(null, logger.getLogMessage());
}
@Test
public void warnEnabled() {
logger.setEnabled(LogLevel.WARN);
logger.warn("test");
assertEquals("WARN test", logger.getLogMessage());
}
@Test
public void warnDisabled() {
logger.warn("test");
assertEquals(null, logger.getLogMessage());
}
@Test
public void debugEnabled() {
logger.setEnabled(LogLevel.DEBUG);
logger.debug("test");
assertEquals("DEBUG test", logger.getLogMessage());
}
@Test
public void debugDisabled() {
logger.debug("test");
assertEquals(null, logger.getLogMessage());
}
@Test
public void traceEnabled() {
logger.setEnabled(LogLevel.TRACE);
logger.trace("test");
assertEquals("TRACE test", logger.getLogMessage());
}
@Test
public void traceDisabled() {
logger.trace("test");
assertEquals(null, logger.getLogMessage());
}
@Test
public void errorEnabled() {
logger.setEnabled(LogLevel.ERROR);
logger.error("test");
assertEquals("ERROR test", logger.getLogMessage());
}
@Test
public void errorDisabled() {
logger.error("test");
assertEquals(null, logger.getLogMessage());
}
@Test
public void fatalEnabled() {
logger.setEnabled(LogLevel.FATAL);
logger.fatal("test");
assertEquals("FATAL test", logger.getLogMessage());
}
@Test
public void fatalDisabled() {
logger.fatal("test");
assertEquals(null, logger.getLogMessage());
}
@Test
public void testFormatter() {
logger = new TestLogger("%1$s %2$s", "test");
logger.setEnabled(LogLevel.INFO);
logger.info("message");
assertEquals("INFO test message", logger.getLogMessage());
}
}
| apache-2.0 |
lhong375/aura | aura-util/src/test/java/org/auraframework/util/text/HashTest.java | 6843 | /*
* Copyright (C) 2013 salesforce.com, inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.auraframework.util.text;
import java.io.StringReader;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import org.auraframework.test.UnitTestCase;
import org.auraframework.util.text.Hash.StringBuilder;
public class HashTest extends UnitTestCase {
public static class ExposedHash extends Hash {
// expose the default c'tor
public ExposedHash() {
super();
}
}
public void testAsPromise() {
byte[] bytes = { 12, 34, 56, 78, 90 };
Hash hash = new ExposedHash();
assertFalse(hash.isSet());
hash.setHash(bytes);
assertTrue(hash.isSet());
assertEquals(new Hash(bytes), hash);
}
private String findNonPrint(String val) {
StringBuffer sb = new StringBuffer();
boolean error = false;
int i;
for (i = 0; i < val.length(); i++) {
char x = val.charAt(i);
if (x < 128 && x >= 32) {
sb.append(x);
} else {
sb.append("[[[");
sb.append((int) x);
sb.append("]]]");
error = true;
}
}
if (error) {
return sb.toString();
} else {
return "";
}
}
public void testToString() throws Exception {
byte[] bytes = { 12, 34, 56, 78, 90 };
Hash hash = new Hash(bytes);
String val = hash.toString();
assertTrue(val.length() > 1);
assertEquals("Bad character in string", "", findNonPrint(val));
hash = new Hash(new StringReader("a test for all eternity"));
val = hash.toString();
assertTrue(val.length() > 1);
assertEquals("Bad character in string", "", findNonPrint(val));
hash = new Hash(new StringReader("a different test for all eternity"));
val = hash.toString();
assertTrue(val.length() > 1);
assertEquals("Bad character in string", "", findNonPrint(val));
hash = new Hash(new StringReader("why are you looking at this anyway"));
val = hash.toString();
assertTrue(val.length() > 1);
assertEquals("Bad character in string", "", findNonPrint(val));
}
public void testFromBytes() {
byte[] bytes1 = { 12, 34, 56, 78, 90 };
byte[] bytes2 = { 12, 43, 56, 78, 90 };
Hash hash1 = new Hash(bytes1);
Hash hash2 = new Hash(bytes2);
assertTrue(hash1.isSet());
assertTrue(hash2.isSet());
assertFalse(hash1.equals(hash2));
assertFalse(hash2.equals(hash1));
}
public void testFromBytecode() throws Exception {
Hash hash = new Hash(HashTest.class.getName());
assertTrue(hash.isSet());
}
public void testFromReader() throws Exception {
String text = "Some text to be read by the reader and hashed";
Hash readerHash = new Hash(new StringReader(text));
assertTrue(readerHash.isSet());
Hash setHash = new ExposedHash();
MessageDigest digest = MessageDigest.getInstance("MD5");
digest.update(text.getBytes());
setHash.setHash(digest.digest());
assertEquals(readerHash, setHash);
assertEquals(readerHash.hashCode(), setHash.hashCode());
}
public void testCannotReset() throws Exception {
byte[] bytes = { 12, 34, 56, 78, 90 };
Hash hash = new ExposedHash();
hash.setHash(bytes);
assertTrue(hash.isSet());
try {
hash.setHash(bytes);
fail("Hash shouldn't accept a second setHash() call");
} catch (IllegalStateException e) {
// expected.
}
try {
hash.setHash(new StringReader("foo"));
fail("Hash shouldn't accept a second setHash() call");
} catch (IllegalStateException e) {
// expected.
}
}
public void testEqualsAndHashcode() {
byte[] bytes1 = { 12, 34, 56, 78, 90 };
byte[] bytes2 = { 12, 43, 56, 78, 90 };
Hash hash1 = new Hash(bytes1);
Hash hash2 = new Hash(bytes2);
Hash hash3 = new Hash(bytes1);
assertEquals(hash1, hash3);
assertEquals(hash1.hashCode(), hash3.hashCode());
assertFalse(hash1.equals(hash2));
assertFalse(hash1.hashCode() == hash2.hashCode());
}
private int getHashCode(String string) throws NoSuchAlgorithmException {
return Arrays.hashCode(MessageDigest.getInstance("MD5").digest(string.getBytes()));
}
private void assertHash(Hash hash, boolean isSet, int hashCode) throws Exception {
assertEquals(isSet, hash.isSet());
assertEquals(hashCode, hash.hashCode());
}
private void assertStringBuilderHash(String toHash) throws Exception {
int expected = getHashCode(toHash);
StringBuilder builder = new StringBuilder();
builder.addString(toHash);
assertHash(builder.build(), true, expected);
}
public void testStringBuilderEmptyString() throws Exception {
assertStringBuilderHash("");
}
public void testStringBuilderSingleString() throws Exception {
assertStringBuilderHash("never can say goodbye");
}
public void testStringBuilderMultipleStrings() throws Exception {
String toHash = "never can say goodbye";
int expected = getHashCode(toHash);
StringBuilder builder = new StringBuilder();
// split string by word boundaries
for (String part : toHash.split("\\b")) {
builder.addString(part);
}
assertHash(builder.build(), true, expected);
}
public void testStringBuilderNoStrings() throws Exception {
int expected = Arrays.hashCode(MessageDigest.getInstance("MD5").digest());
StringBuilder builder = new StringBuilder();
assertHash(builder.build(), true, expected);
}
public void testStringBuilderNull() throws Exception {
int expected = Arrays.hashCode(MessageDigest.getInstance("MD5").digest());
StringBuilder builder = new StringBuilder();
builder.addString(null);
assertHash(builder.build(), true, expected);
}
}
| apache-2.0 |
obourgain/elasticsearch | core/src/main/java/org/elasticsearch/script/StoredScriptSource.java | 18576 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.script;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse;
import org.elasticsearch.cluster.AbstractDiffable;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.Diff;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
/**
* {@link StoredScriptSource} represents user-defined parameters for a script
* saved in the {@link ClusterState}.
*/
public class StoredScriptSource extends AbstractDiffable<StoredScriptSource> implements Writeable, ToXContent {
/**
* Standard {@link ParseField} for outer level of stored script source.
*/
public static final ParseField SCRIPT_PARSE_FIELD = new ParseField("script");
/**
* Standard {@link ParseField} for outer level of stored script source.
*/
public static final ParseField TEMPLATE_PARSE_FIELD = new ParseField("template");
/**
* Standard {@link ParseField} for lang on the inner level.
*/
public static final ParseField LANG_PARSE_FIELD = new ParseField("lang");
/**
* Standard {@link ParseField} for code on the inner level.
*/
public static final ParseField CODE_PARSE_FIELD = new ParseField("code");
/**
* Standard {@link ParseField} for options on the inner level.
*/
public static final ParseField OPTIONS_PARSE_FIELD = new ParseField("options");
/**
* Helper class used by {@link ObjectParser} to store mutable {@link StoredScriptSource} variables and then
* construct an immutable {@link StoredScriptSource} object based on parsed XContent.
*/
private static final class Builder {
private String lang;
private String code;
private Map<String, String> options;
private Builder() {
// This cannot default to an empty map because options are potentially added at multiple points.
this.options = new HashMap<>();
}
private void setLang(String lang) {
this.lang = lang;
}
/**
* Since stored scripts can accept templates rather than just scripts, they must also be able
* to handle template parsing, hence the need for custom parsing code. Templates can
* consist of either an {@link String} or a JSON object. If a JSON object is discovered
* then the content type option must also be saved as a compiler option.
*/
private void setCode(XContentParser parser) {
try {
if (parser.currentToken() == Token.START_OBJECT) {
XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType());
code = builder.copyCurrentStructure(parser).bytes().utf8ToString();
options.put(Script.CONTENT_TYPE_OPTION, parser.contentType().mediaType());
} else {
code = parser.text();
}
} catch (IOException exception) {
throw new UncheckedIOException(exception);
}
}
/**
* Options may have already been added if a template was specified.
* Appends the user-defined compiler options with the internal compiler options.
*/
private void setOptions(Map<String, String> options) {
if (options.containsKey(Script.CONTENT_TYPE_OPTION)) {
throw new IllegalArgumentException(Script.CONTENT_TYPE_OPTION + " cannot be user-specified");
}
this.options.putAll(options);
}
/**
* Validates the parameters and creates an {@link StoredScriptSource}.
*/
private StoredScriptSource build() {
if (lang == null) {
throw new IllegalArgumentException("must specify lang for stored script");
} else if (lang.isEmpty()) {
throw new IllegalArgumentException("lang cannot be empty");
}
if (code == null) {
throw new IllegalArgumentException("must specify code for stored script");
} else if (code.isEmpty()) {
throw new IllegalArgumentException("code cannot be empty");
}
if (options.size() > 1 || options.size() == 1 && options.get(Script.CONTENT_TYPE_OPTION) == null) {
throw new IllegalArgumentException("illegal compiler options [" + options + "] specified");
}
return new StoredScriptSource(lang, code, options);
}
}
private static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>("stored script source", Builder::new);
static {
// Defines the fields necessary to parse a Script as XContent using an ObjectParser.
PARSER.declareString(Builder::setLang, LANG_PARSE_FIELD);
PARSER.declareField(Builder::setCode, parser -> parser, CODE_PARSE_FIELD, ValueType.OBJECT_OR_STRING);
PARSER.declareField(Builder::setOptions, XContentParser::mapStrings, OPTIONS_PARSE_FIELD, ValueType.OBJECT);
}
/**
* This will parse XContent into a {@link StoredScriptSource}. The following formats can be parsed:
*
* The simple script format with no compiler options or user-defined params:
*
* Example:
* {@code
* {"script": "return Math.log(doc.popularity) * 100;"}
* }
*
* The above format requires the lang to be specified using the deprecated stored script namespace
* (as a url parameter during a put request). See {@link ScriptMetaData} for more information about
* the stored script namespaces.
*
* The complex script format using the new stored script namespace
* where lang and code are required but options is optional:
*
* {@code
* {
* "script" : {
* "lang" : "<lang>",
* "code" : "<code>",
* "options" : {
* "option0" : "<option0>",
* "option1" : "<option1>",
* ...
* }
* }
* }
* }
*
* Example:
* {@code
* {
* "script": {
* "lang" : "painless",
* "code" : "return Math.log(doc.popularity) * params.multiplier"
* }
* }
* }
*
* The simple template format:
*
* {@code
* {
* "query" : ...
* }
* }
*
* The complex template format:
*
* {@code
* {
* "template": {
* "query" : ...
* }
* }
* }
*
* Note that templates can be handled as both strings and complex JSON objects.
* Also templates may be part of the 'code' parameter in a script. The Parser
* can handle this case as well.
*
* @param lang An optional parameter to allow for use of the deprecated stored
* script namespace. This will be used to specify the language
* coming in as a url parameter from a request or for stored templates.
* @param content The content from the request to be parsed as described above.
* @return The parsed {@link StoredScriptSource}.
*/
public static StoredScriptSource parse(String lang, BytesReference content, XContentType xContentType) {
try (XContentParser parser = xContentType.xContent().createParser(NamedXContentRegistry.EMPTY, content)) {
Token token = parser.nextToken();
if (token != Token.START_OBJECT) {
throw new ParsingException(parser.getTokenLocation(), "unexpected token [" + token + "], expected [{]");
}
token = parser.nextToken();
if (token != Token.FIELD_NAME) {
throw new ParsingException(parser.getTokenLocation(), "unexpected token [" + token + ", expected [" +
SCRIPT_PARSE_FIELD.getPreferredName() + ", " + TEMPLATE_PARSE_FIELD.getPreferredName());
}
String name = parser.currentName();
if (SCRIPT_PARSE_FIELD.getPreferredName().equals(name)) {
token = parser.nextToken();
if (token == Token.VALUE_STRING) {
if (lang == null) {
throw new IllegalArgumentException(
"must specify lang as a url parameter when using the deprecated stored script namespace");
}
return new StoredScriptSource(lang, parser.text(), Collections.emptyMap());
} else if (token == Token.START_OBJECT) {
if (lang == null) {
return PARSER.apply(parser, null).build();
} else {
try (XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType())) {
builder.copyCurrentStructure(parser);
return new StoredScriptSource(lang, builder.string(),
Collections.singletonMap(Script.CONTENT_TYPE_OPTION, parser.contentType().mediaType()));
}
}
} else {
throw new ParsingException(parser.getTokenLocation(), "unexpected token [" + token + "], expected [{, <code>]");
}
} else {
if (lang == null) {
throw new IllegalArgumentException("unexpected stored script format");
}
if (TEMPLATE_PARSE_FIELD.getPreferredName().equals(name)) {
token = parser.nextToken();
if (token == Token.VALUE_STRING) {
return new StoredScriptSource(lang, parser.text(),
Collections.singletonMap(Script.CONTENT_TYPE_OPTION, parser.contentType().mediaType()));
}
}
try (XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType())) {
if (token != Token.START_OBJECT) {
builder.startObject();
builder.copyCurrentStructure(parser);
builder.endObject();
} else {
builder.copyCurrentStructure(parser);
}
return new StoredScriptSource(lang, builder.string(),
Collections.singletonMap(Script.CONTENT_TYPE_OPTION, parser.contentType().mediaType()));
}
}
} catch (IOException ioe) {
throw new UncheckedIOException(ioe);
}
}
/**
* This will parse XContent into a {@link StoredScriptSource}. The following format is what will be parsed:
*
* {@code
* {
* "script" : {
* "lang" : "<lang>",
* "code" : "<code>",
* "options" : {
* "option0" : "<option0>",
* "option1" : "<option1>",
* ...
* }
* }
* }
* }
*
* Note that the "code" parameter can also handle template parsing including from
* a complex JSON object.
*/
public static StoredScriptSource fromXContent(XContentParser parser) throws IOException {
return PARSER.apply(parser, null).build();
}
/**
* Required for {@link ScriptMetaData.ScriptMetadataDiff}. Uses
* the {@link StoredScriptSource#StoredScriptSource(StreamInput)}
* constructor.
*/
public static Diff<StoredScriptSource> readDiffFrom(StreamInput in) throws IOException {
return readDiffFrom(StoredScriptSource::new, in);
}
private final String lang;
private final String code;
private final Map<String, String> options;
/**
* Constructor for use with {@link GetStoredScriptResponse}
* to support the deprecated stored script namespace.
*/
public StoredScriptSource(String code) {
this.lang = null;
this.code = Objects.requireNonNull(code);
this.options = null;
}
/**
* Standard StoredScriptSource constructor.
* @param lang The language to compile the script with. Must not be {@code null}.
* @param code The source code to compile with. Must not be {@code null}.
* @param options Compiler options to be compiled with. Must not be {@code null},
* use an empty {@link Map} to represent no options.
*/
public StoredScriptSource(String lang, String code, Map<String, String> options) {
this.lang = Objects.requireNonNull(lang);
this.code = Objects.requireNonNull(code);
this.options = Collections.unmodifiableMap(Objects.requireNonNull(options));
}
/**
* Reads a {@link StoredScriptSource} from a stream. Version 5.3+ will read
* all of the lang, code, and options parameters. For versions prior to 5.3,
* only the code parameter will be read in as a bytes reference.
*/
public StoredScriptSource(StreamInput in) throws IOException {
if (in.getVersion().onOrAfter(Version.V_5_3_0_UNRELEASED)) {
this.lang = in.readString();
this.code = in.readString();
@SuppressWarnings("unchecked")
Map<String, String> options = (Map<String, String>)(Map)in.readMap();
this.options = options;
} else {
this.lang = null;
this.code = in.readBytesReference().utf8ToString();
this.options = null;
}
}
/**
* Writes a {@link StoredScriptSource} to a stream. Version 5.3+ will write
* all of the lang, code, and options parameters. For versions prior to 5.3,
* only the code parameter will be read in as a bytes reference.
*/
@Override
public void writeTo(StreamOutput out) throws IOException {
if (out.getVersion().onOrAfter(Version.V_5_3_0_UNRELEASED)) {
out.writeString(lang);
out.writeString(code);
@SuppressWarnings("unchecked")
Map<String, Object> options = (Map<String, Object>)(Map)this.options;
out.writeMap(options);
} else {
out.writeBytesReference(new BytesArray(code));
}
}
/**
* This will write XContent from a {@link StoredScriptSource}. The following format will be written:
*
* {@code
* {
* "script" : {
* "lang" : "<lang>",
* "code" : "<code>",
* "options" : {
* "option0" : "<option0>",
* "option1" : "<option1>",
* ...
* }
* }
* }
* }
*
* Note that the 'code' parameter can also handle templates written as complex JSON.
*/
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(LANG_PARSE_FIELD.getPreferredName(), lang);
builder.field(CODE_PARSE_FIELD.getPreferredName(), code);
builder.field(OPTIONS_PARSE_FIELD.getPreferredName(), options);
builder.endObject();
return builder;
}
@Override
public boolean isFragment() {
return false;
}
/**
* @return The language used for compiling this script.
*/
public String getLang() {
return lang;
}
/**
* @return The code used for compiling this script.
*/
public String getCode() {
return code;
}
/**
* @return The compiler options used for this script.
*/
public Map<String, String> getOptions() {
return options;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
StoredScriptSource that = (StoredScriptSource)o;
if (lang != null ? !lang.equals(that.lang) : that.lang != null) return false;
if (code != null ? !code.equals(that.code) : that.code != null) return false;
return options != null ? options.equals(that.options) : that.options == null;
}
@Override
public int hashCode() {
int result = lang != null ? lang.hashCode() : 0;
result = 31 * result + (code != null ? code.hashCode() : 0);
result = 31 * result + (options != null ? options.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "StoredScriptSource{" +
"lang='" + lang + '\'' +
", code='" + code + '\'' +
", options=" + options +
'}';
}
}
| apache-2.0 |
markfisher/spring-cloud-data | spring-cloud-dataflow-server-core/src/test/java/org/springframework/cloud/dataflow/server/controller/StreamControllerTests.java | 50347 | /*
* Copyright 2015-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.dataflow.server.controller;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.cloud.dataflow.core.BindingPropertyKeys;
import org.springframework.cloud.dataflow.core.StreamAppDefinition;
import org.springframework.cloud.dataflow.core.StreamDefinition;
import org.springframework.cloud.dataflow.core.StreamPropertyKeys;
import org.springframework.cloud.dataflow.server.config.apps.CommonApplicationProperties;
import org.springframework.cloud.dataflow.server.configuration.TestDependencies;
import org.springframework.cloud.dataflow.server.repository.DeploymentIdRepository;
import org.springframework.cloud.dataflow.server.repository.DeploymentKey;
import org.springframework.cloud.dataflow.server.repository.StreamDefinitionRepository;
import org.springframework.cloud.dataflow.server.stream.StreamDeployerUtil;
import org.springframework.cloud.deployer.resource.maven.MavenResource;
import org.springframework.cloud.deployer.spi.app.AppDeployer;
import org.springframework.cloud.deployer.spi.app.AppInstanceStatus;
import org.springframework.cloud.deployer.spi.app.AppStatus;
import org.springframework.cloud.deployer.spi.app.DeploymentState;
import org.springframework.cloud.deployer.spi.core.AppDeploymentRequest;
import org.springframework.http.MediaType;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.annotation.DirtiesContext.ClassMode;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.web.context.WebApplicationContext;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.startsWith;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/**
* @author Mark Fisher
* @author Ilayaperumal Gopinathan
* @author Janne Valkealahti
* @author Gunnar Hillert
* @author Glenn Renfro
* @author Andy Clement
*/
@RunWith(SpringRunner.class)
@SpringBootTest(classes = TestDependencies.class)
@DirtiesContext(classMode = ClassMode.AFTER_EACH_TEST_METHOD)
public class StreamControllerTests {
@Autowired
private StreamDefinitionRepository repository;
@Autowired
private DeploymentIdRepository deploymentIdRepository;
private MockMvc mockMvc;
@Autowired
private WebApplicationContext wac;
@Autowired
private AppDeployer appDeployer;
@Autowired
private CommonApplicationProperties appsProperties;
@Before
public void setupMocks() {
this.mockMvc = MockMvcBuilders.webAppContextSetup(wac)
.defaultRequest(get("/").accept(MediaType.APPLICATION_JSON)).build();
when(appDeployer.deploy(any(AppDeploymentRequest.class))).thenReturn("testID");
}
@After
public void tearDown() {
repository.deleteAll();
assertEquals(0, repository.count());
}
@Test(expected = IllegalArgumentException.class)
public void testConstructorMissingStreamService() {
new StreamDefinitionController(null);
}
@Test
public void testSave() throws Exception {
assertEquals(0, repository.count());
mockMvc.perform(post("/streams/definitions/").param("name", "myStream").param("definition", "time | log")
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isCreated());
assertEquals(1, repository.count());
StreamDefinition myStream = repository.findOne("myStream");
assertEquals("time | log", myStream.getDslText());
assertEquals("myStream", myStream.getName());
assertEquals(2, myStream.getAppDefinitions().size());
StreamAppDefinition timeDefinition = myStream.getAppDefinitions().get(0);
StreamAppDefinition logDefinition = myStream.getAppDefinitions().get(1);
assertEquals(2, timeDefinition.getProperties().size());
assertEquals("myStream.time", timeDefinition.getProperties().get(BindingPropertyKeys.OUTPUT_DESTINATION));
assertEquals("myStream", timeDefinition.getProperties().get(BindingPropertyKeys.OUTPUT_REQUIRED_GROUPS));
assertEquals(2, logDefinition.getProperties().size());
assertEquals("myStream.time", logDefinition.getProperties().get(BindingPropertyKeys.INPUT_DESTINATION));
assertEquals("myStream", logDefinition.getProperties().get(BindingPropertyKeys.INPUT_GROUP));
}
@Test
public void testFindRelatedStreams() throws Exception {
assertEquals(0, repository.count());
mockMvc.perform(post("/streams/definitions/").param("name", "myStream1").param("definition", "time | log")
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isCreated());
mockMvc.perform(post("/streams/definitions/").param("name", "myAnotherStream1")
.param("definition", "time | log").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isCreated());
mockMvc.perform(post("/streams/definitions/").param("name", "myStream2").param("definition", ":myStream1 > log")
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isCreated());
mockMvc.perform(post("/streams/definitions/").param("name", "myStream3")
.param("definition", ":myStream1.time > log").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isCreated());
mockMvc.perform(post("/streams/definitions/").param("name", "myStream4")
.param("definition", ":myAnotherStream1 > log").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isCreated());
assertEquals(5, repository.count());
String response = mockMvc
.perform(get("/streams/definitions/myStream1/related").accept(MediaType.APPLICATION_JSON)).andReturn()
.getResponse().getContentAsString();
assertTrue(response.contains(":myStream1 > log"));
assertTrue(response.contains(":myStream1.time > log"));
assertTrue(response.contains("time | log"));
assertTrue(response.contains("\"totalElements\":3"));
}
@Test
public void testFindRelatedStreams_gh2150() throws Exception {
assertEquals(0, repository.count());
// Bad definition, recursive reference
mockMvc.perform(post("/streams/definitions/").param("name", "mapper")
.param("definition", ":mapper.time > log")
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isCreated());
assertEquals(1, repository.count());
String response = mockMvc
.perform(get("/streams/definitions/mapper/related").param("nested", "true").accept(MediaType.APPLICATION_JSON)).andReturn()
.getResponse().getContentAsString();
assertTrue(response.contains(":mapper.time > log"));
assertTrue(response.contains("\"totalElements\":1"));
}
@Test
public void testFindRelatedStreams2_gh2150() throws Exception {
// bad streams, recursively referencing via each other
mockMvc.perform(post("/streams/definitions/").param("name", "foo")
.param("definition", ":bar.time > log")
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isCreated());
mockMvc.perform(post("/streams/definitions/").param("name", "bar")
.param("definition", ":foo.time > log")
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isCreated());
assertEquals(2, repository.count());
String response = mockMvc
.perform(get("/streams/definitions/foo/related").param("nested", "true").accept(MediaType.APPLICATION_JSON)).andReturn()
.getResponse().getContentAsString();
assertTrue(response.contains(":foo.time > log"));
assertTrue(response.contains(":bar.time > log"));
assertTrue(response.contains("\"totalElements\":2"));
}
@Test
public void testMethodArgumentTypeMismatchFailure() throws Exception {
mockMvc.perform(get("/streams/definitions/myStream1/related").param("nested", "in-correct-value")
.accept(MediaType.APPLICATION_JSON)).andExpect(status().is4xxClientError());
}
@Test
public void testFindRelatedAndNestedStreams() throws Exception {
assertEquals(0, repository.count());
mockMvc.perform(post("/streams/definitions/").param("name", "myStream1").param("definition", "time | log")
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isCreated());
mockMvc.perform(post("/streams/definitions/").param("name", "myAnotherStream1")
.param("definition", "time | log").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isCreated());
mockMvc.perform(post("/streams/definitions/").param("name", "myStream2").param("definition", ":myStream1 > log")
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isCreated());
mockMvc.perform(post("/streams/definitions/").param("name", "TapOnmyStream2")
.param("definition", ":myStream2 > log").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isCreated());
mockMvc.perform(post("/streams/definitions/").param("name", "myStream3")
.param("definition", ":myStream1.time > log").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isCreated());
mockMvc.perform(post("/streams/definitions/").param("name", "TapOnMyStream3")
.param("definition", ":myStream3 > log").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isCreated());
mockMvc.perform(post("/streams/definitions/").param("name", "MultipleNestedTaps")
.param("definition", ":TapOnMyStream3 > log").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isCreated());
mockMvc.perform(post("/streams/definitions/").param("name", "myStream4")
.param("definition", ":myAnotherStream1 > log").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isCreated());
mockMvc.perform(post("/streams/definitions/").param("name", "myStream5").param("definition", "time | log --secret=foo")
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isCreated());
mockMvc.perform(post("/streams/definitions/").param("name", "myStream6")
.param("definition", ":myStream5.time > log --password=bar").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isCreated());
assertEquals(10, repository.count());
String response = mockMvc
.perform(get("/streams/definitions/myStream1/related?nested=true").accept(MediaType.APPLICATION_JSON))
.andReturn().getResponse().getContentAsString();
assertTrue(response.contains(":myStream1 > log"));
assertTrue(response.contains(":myStream1.time > log"));
assertTrue(response.contains("time | log"));
assertTrue(response.contains("\"totalElements\":6"));
response = mockMvc
.perform(get("/streams/definitions/myStream5/related?nested=true").accept(MediaType.APPLICATION_JSON))
.andReturn().getResponse().getContentAsString();
assertTrue(response.contains(":myStream5.time > log --password='******'"));
assertTrue(response.contains("time | log --secret='******'"));
assertTrue(response.contains("\"totalElements\":2"));
String response2 = mockMvc.perform(
get("/streams/definitions/myAnotherStream1/related?nested=true").accept(MediaType.APPLICATION_JSON))
.andReturn().getResponse().getContentAsString();
assertTrue(response2.contains(":myAnotherStream1 > log"));
assertTrue(response2.contains("time | log"));
assertTrue(response2.contains("\"totalElements\":2"));
String response3 = mockMvc
.perform(get("/streams/definitions/myStream2/related?nested=true").accept(MediaType.APPLICATION_JSON))
.andReturn().getResponse().getContentAsString();
assertTrue(response3.contains(":myStream1 > log"));
assertTrue(response3.contains(":myStream2 > log"));
assertTrue(response3.contains("\"totalElements\":2"));
}
@Test
public void testFindAll() throws Exception {
assertEquals(0, repository.count());
mockMvc.perform(post("/streams/definitions/").param("name", "myStream1").param("definition", "time --password=foo| log")
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isCreated());
mockMvc.perform(post("/streams/definitions/").param("name", "myStream1A").param("definition", "time --foo=bar| log")
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isCreated());
mockMvc.perform(post("/streams/definitions/").param("name", "myAnotherStream1")
.param("definition", "time | log").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isCreated());
mockMvc.perform(post("/streams/definitions/").param("name", "myStream2").param("definition", ":myStream1 > log")
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isCreated());
mockMvc.perform(post("/streams/definitions/").param("name", "TapOnmyStream2")
.param("definition", ":myStream2 > log").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isCreated());
mockMvc.perform(post("/streams/definitions/").param("name", "myStream3")
.param("definition", ":myStream1.time > log").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isCreated());
mockMvc.perform(post("/streams/definitions/").param("name", "TapOnMyStream3")
.param("definition", ":myStream3 > log").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isCreated());
mockMvc.perform(post("/streams/definitions/").param("name", "MultipleNestedTaps")
.param("definition", ":TapOnMyStream3 > log").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isCreated());
mockMvc.perform(post("/streams/definitions/").param("name", "myStream4")
.param("definition", ":myAnotherStream1 > log").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isCreated());
mockMvc.perform(post("/streams/definitions")
.param("name", "timelogSingleTick")
.param("definition", "time --format='YYYY MM DD' | log")
.param("deploy", "false"))
.andExpect(status().isCreated());
mockMvc.perform(post("/streams/definitions").param("name", "timelogDoubleTick")
.param("definition", "a: time --format=\"YYYY MM DD\" | log")
.param("deploy", "false")).andExpect(status().isCreated());
mockMvc.perform(post("/streams/definitions/").param("name", "twoPassword")
.param("definition", "time --password='foo'| log --password=bar")
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isCreated());
mockMvc.perform(post("/streams/definitions/").param("name", "nameChannelPassword")
.param("definition", "time --password='foo'> :foobar")
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isCreated());
mockMvc.perform(post("/streams/definitions/").param("name", "twoParam").param("definition", "time --password=foo --arg=foo | log")
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isCreated());
mockMvc.perform(post("/streams/definitions/").param("name", "twoPipeInQuotes").param("definition", "time --password='fo|o' --arg=bar | log")
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isCreated());
assertEquals(15, repository.count());
String response = mockMvc
.perform(get("/streams/definitions/").accept(MediaType.APPLICATION_JSON))
.andReturn().getResponse().getContentAsString();
assertTrue(response.contains("time --password='******' | log"));
assertTrue(response.contains("time --foo=bar | log"));
assertTrue(response.contains(":myStream1.time > log"));
assertTrue(response.contains("time | log"));
assertTrue(response.contains(":myStream1 > log"));
assertTrue(response.contains(":myStream1.time > log"));
assertTrue(response.contains("time | log"));
assertTrue(response.contains(":myAnotherStream1 > log"));
assertTrue(response.contains("time | log"));
assertTrue(response.contains(":myStream1 > log"));
assertTrue(response.contains(":myStream2 > log"));
assertTrue(response.contains(":myStream3 > log"));
assertTrue(response.contains("time --format='YYYY MM DD' | log"));
assertTrue(response.contains("a: time --format='YYYY MM DD' | log"));
assertTrue(response.contains("time --password='******' | log --password='******'"));
assertTrue(response.contains("time --password='******' > :foobar"));
assertTrue(response.contains("time --password='******' --arg=foo | log"));
assertTrue(response.contains("time --password='******' --arg=bar | log"));
assertTrue(response.contains("\"totalElements\":15"));
}
@Test
public void testSaveInvalidAppDefinitions() throws Exception {
mockMvc.perform(post("/streams/definitions/").param("name", "myStream").param("definition", "foo | bar")
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isBadRequest())
.andExpect(jsonPath("$[0].logref", is("InvalidStreamDefinitionException")))
.andExpect(jsonPath("$[0].message",
is("Application name 'foo' with type 'source' does not exist in the " + "app "
+ "registry.\nApplication name 'bar' with type 'sink' does not exist in the app "
+ "registry.")));
}
@Test
public void testSaveInvalidAppDefinitionsDueToParseException() throws Exception {
mockMvc.perform(post("/streams/definitions/").param("name", "myStream")
.param("definition", "foo --.spring.cloud.stream.metrics.properties=spring* | bar")
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isBadRequest())
.andExpect(jsonPath("$[0].logref", is("InvalidStreamDefinitionException"))).andExpect(
jsonPath("$[0].message", startsWith("111E:(pos 6): Unexpected token. Expected '.' but was")));
}
@Test
public void testSaveDuplicate() throws Exception {
repository.save(new StreamDefinition("myStream", "time | log"));
assertEquals(1, repository.count());
mockMvc.perform(post("/streams/definitions/").param("name", "myStream").param("definition", "time | log")
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isConflict());
assertEquals(1, repository.count());
}
@Test
public void testSaveWithParameters() throws Exception {
assertEquals(0, repository.count());
String definition = "time --fixedDelay=500 --timeUnit=milliseconds | log";
mockMvc.perform(post("/streams/definitions/").param("name", "myStream").param("definition", definition)
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isCreated());
assertEquals(1, repository.count());
StreamDefinition myStream = repository.findOne("myStream");
StreamAppDefinition timeDefinition = myStream.getAppDefinitions().get(0);
StreamAppDefinition logDefinition = myStream.getAppDefinitions().get(1);
assertEquals("time", timeDefinition.getName());
assertEquals("log", logDefinition.getName());
assertEquals("500", timeDefinition.getProperties().get("fixedDelay"));
assertEquals("milliseconds", timeDefinition.getProperties().get("timeUnit"));
assertEquals(definition, myStream.getDslText());
assertEquals("myStream", myStream.getName());
}
@Test
public void testStreamWithProcessor() throws Exception {
assertEquals(0, repository.count());
String definition = "time | filter | log";
mockMvc.perform(post("/streams/definitions/").param("name", "myStream").param("definition", definition)
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isCreated());
assertEquals(1, repository.count());
StreamDefinition myStream = repository.findOne("myStream");
assertEquals(definition, myStream.getDslText());
assertEquals("myStream", myStream.getName());
assertEquals(3, myStream.getAppDefinitions().size());
StreamAppDefinition timeDefinition = myStream.getAppDefinitions().get(0);
StreamAppDefinition filterDefinition = myStream.getAppDefinitions().get(1);
StreamAppDefinition logDefinition = myStream.getAppDefinitions().get(2);
assertEquals(2, timeDefinition.getProperties().size());
assertEquals("myStream.time", timeDefinition.getProperties().get(BindingPropertyKeys.OUTPUT_DESTINATION));
assertEquals("myStream", timeDefinition.getProperties().get(BindingPropertyKeys.OUTPUT_REQUIRED_GROUPS));
assertEquals(4, filterDefinition.getProperties().size());
assertEquals("myStream.time", filterDefinition.getProperties().get(BindingPropertyKeys.INPUT_DESTINATION));
assertEquals("myStream", filterDefinition.getProperties().get(BindingPropertyKeys.INPUT_GROUP));
assertEquals("myStream.filter", filterDefinition.getProperties().get(BindingPropertyKeys.OUTPUT_DESTINATION));
assertEquals("myStream", filterDefinition.getProperties().get(BindingPropertyKeys.OUTPUT_REQUIRED_GROUPS));
assertEquals(2, logDefinition.getProperties().size());
assertEquals("myStream.filter", logDefinition.getProperties().get(BindingPropertyKeys.INPUT_DESTINATION));
assertEquals("myStream", logDefinition.getProperties().get(BindingPropertyKeys.INPUT_GROUP));
}
@Test
public void testSourceDestinationWithSingleApp() throws Exception {
assertEquals(0, repository.count());
String definition = ":foo > log";
mockMvc.perform(post("/streams/definitions/").param("name", "myStream").param("definition", definition)
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isCreated());
assertEquals(1, repository.count());
StreamDefinition myStream = repository.findOne("myStream");
assertEquals(definition, myStream.getDslText());
assertEquals("myStream", myStream.getName());
assertEquals(1, myStream.getAppDefinitions().size());
StreamAppDefinition logDefinition = myStream.getAppDefinitions().get(0);
assertEquals(2, logDefinition.getProperties().size());
assertEquals("foo", logDefinition.getProperties().get(BindingPropertyKeys.INPUT_DESTINATION));
assertEquals("myStream", logDefinition.getProperties().get(BindingPropertyKeys.INPUT_GROUP));
}
@Test
public void testSourceDestinationWithTwoApps() throws Exception {
assertEquals(0, repository.count());
String definition = ":foo > filter | log";
mockMvc.perform(post("/streams/definitions/").param("name", "myStream").param("definition", definition)
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isCreated());
assertEquals(1, repository.count());
StreamDefinition myStream = repository.findOne("myStream");
assertEquals(definition, myStream.getDslText());
assertEquals("myStream", myStream.getName());
assertEquals(2, myStream.getAppDefinitions().size());
StreamAppDefinition filterDefinition = myStream.getAppDefinitions().get(0);
assertEquals(4, filterDefinition.getProperties().size());
assertEquals("foo", filterDefinition.getProperties().get(BindingPropertyKeys.INPUT_DESTINATION));
assertEquals("myStream", filterDefinition.getProperties().get(BindingPropertyKeys.INPUT_GROUP));
assertEquals("myStream.filter", filterDefinition.getProperties().get(BindingPropertyKeys.OUTPUT_DESTINATION));
assertEquals("myStream", filterDefinition.getProperties().get(BindingPropertyKeys.OUTPUT_REQUIRED_GROUPS));
StreamAppDefinition logDefinition = myStream.getAppDefinitions().get(1);
assertEquals(2, logDefinition.getProperties().size());
assertEquals("myStream.filter", logDefinition.getProperties().get(BindingPropertyKeys.INPUT_DESTINATION));
assertEquals("myStream", logDefinition.getProperties().get(BindingPropertyKeys.INPUT_GROUP));
}
@Test
public void testSinkDestinationWithSingleApp() throws Exception {
assertEquals(0, repository.count());
String definition = "time > :foo";
mockMvc.perform(post("/streams/definitions/").param("name", "myStream").param("definition", definition)
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isCreated());
assertEquals(1, repository.count());
StreamDefinition myStream = repository.findOne("myStream");
assertEquals(definition, myStream.getDslText());
assertEquals("myStream", myStream.getName());
assertEquals(1, myStream.getAppDefinitions().size());
StreamAppDefinition timeDefinition = myStream.getAppDefinitions().get(0);
assertEquals(1, timeDefinition.getProperties().size());
assertEquals("foo", timeDefinition.getProperties().get(BindingPropertyKeys.OUTPUT_DESTINATION));
}
@Test
public void testSinkDestinationWithTwoApps() throws Exception {
assertEquals(0, repository.count());
String definition = "time | filter > :foo";
mockMvc.perform(post("/streams/definitions/").param("name", "myStream").param("definition", definition)
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isCreated());
assertEquals(1, repository.count());
StreamDefinition myStream = repository.findOne("myStream");
assertEquals(definition, myStream.getDslText());
assertEquals("myStream", myStream.getName());
assertEquals(2, myStream.getAppDefinitions().size());
StreamAppDefinition timeDefinition = myStream.getAppDefinitions().get(0);
assertEquals(2, timeDefinition.getProperties().size());
assertEquals("myStream.time", timeDefinition.getProperties().get(BindingPropertyKeys.OUTPUT_DESTINATION));
assertEquals("myStream", timeDefinition.getProperties().get(BindingPropertyKeys.OUTPUT_REQUIRED_GROUPS));
StreamAppDefinition filterDefinition = myStream.getAppDefinitions().get(1);
assertEquals(3, filterDefinition.getProperties().size());
assertEquals("myStream.time", filterDefinition.getProperties().get(BindingPropertyKeys.INPUT_DESTINATION));
assertEquals("myStream", filterDefinition.getProperties().get(BindingPropertyKeys.INPUT_GROUP));
assertEquals("foo", filterDefinition.getProperties().get(BindingPropertyKeys.OUTPUT_DESTINATION));
}
@Test
public void testDestinationsOnBothSides() throws Exception {
assertEquals(0, repository.count());
String definition = ":bar > filter > :foo";
AppStatus status = mock(AppStatus.class);
when(status.getState()).thenReturn(DeploymentState.unknown);
when(appDeployer.status("testID")).thenReturn(status);
mockMvc.perform(post("/streams/definitions/").param("name", "myStream").param("definition", definition)
.param("deploy", "true").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isCreated());
assertEquals(1, repository.count());
StreamDefinition myStream = repository.findOne("myStream");
assertEquals(definition, myStream.getDslText());
assertEquals("myStream", myStream.getName());
assertEquals(1, myStream.getAppDefinitions().size());
StreamAppDefinition filterDefinition = myStream.getAppDefinitions().get(0);
assertEquals(3, filterDefinition.getProperties().size());
assertEquals("bar", filterDefinition.getProperties().get(BindingPropertyKeys.INPUT_DESTINATION));
assertEquals("myStream", filterDefinition.getProperties().get(BindingPropertyKeys.INPUT_GROUP));
assertEquals("foo", filterDefinition.getProperties().get(BindingPropertyKeys.OUTPUT_DESTINATION));
ArgumentCaptor<AppDeploymentRequest> captor = ArgumentCaptor.forClass(AppDeploymentRequest.class);
verify(appDeployer).deploy(captor.capture());
AppDeploymentRequest request = captor.getValue();
assertThat(request.getDefinition().getName(), is("filter"));
assertThat(request.getResource(), instanceOf(MavenResource.class));
assertThat(((MavenResource) request.getResource()).getArtifactId(), is("filter-processor-rabbit"));
}
@Test
public void testDestroyStream() throws Exception {
StreamDefinition streamDefinition1 = new StreamDefinition("myStream", "time | log");
repository.save(streamDefinition1);
for (StreamAppDefinition appDefinition : streamDefinition1.getAppDefinitions()) {
deploymentIdRepository.save(DeploymentKey.forStreamAppDefinition(appDefinition),
streamDefinition1.getName() + "." + appDefinition.getName());
}
assertEquals(1, repository.count());
AppStatus status = mock(AppStatus.class);
when(status.getState()).thenReturn(DeploymentState.unknown);
when(appDeployer.status("myStream.time")).thenReturn(status);
when(appDeployer.status("myStream.log")).thenReturn(status);
mockMvc.perform(delete("/streams/definitions/myStream").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isOk());
assertEquals(0, repository.count());
}
@Test
public void testDestroySingleStream() throws Exception {
StreamDefinition streamDefinition1 = new StreamDefinition("myStream", "time | log");
StreamDefinition streamDefinition2 = new StreamDefinition("myStream1", "time | log");
repository.save(streamDefinition1);
repository.save(streamDefinition2);
for (StreamAppDefinition appDefinition : streamDefinition1.getAppDefinitions()) {
deploymentIdRepository.save(DeploymentKey.forStreamAppDefinition(appDefinition),
streamDefinition1.getName() + "." + appDefinition.getName());
}
assertEquals(2, repository.count());
AppStatus status = mock(AppStatus.class);
when(status.getState()).thenReturn(DeploymentState.unknown);
when(appDeployer.status("myStream.time")).thenReturn(status);
when(appDeployer.status("myStream.log")).thenReturn(status);
mockMvc.perform(delete("/streams/definitions/myStream").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isOk());
assertEquals(1, repository.count());
}
@Test
public void testDisplaySingleStream() throws Exception {
StreamDefinition streamDefinition1 = new StreamDefinition("myStream", "time | log");
for (StreamAppDefinition appDefinition : streamDefinition1.getAppDefinitions()) {
deploymentIdRepository.save(DeploymentKey.forStreamAppDefinition(appDefinition),
streamDefinition1.getName() + "." + appDefinition.getName());
}
repository.save(streamDefinition1);
assertEquals(1, repository.count());
AppStatus status = mock(AppStatus.class);
when(status.getState()).thenReturn(DeploymentState.unknown);
when(appDeployer.status("myStream.time")).thenReturn(status);
when(appDeployer.status("myStream.log")).thenReturn(status);
mockMvc.perform(get("/streams/definitions/myStream").accept(MediaType.APPLICATION_JSON))
.andExpect(status().isOk()).andExpect(content().json("{name: \"myStream\"}"))
.andExpect(content().json("{dslText: \"time | log\"}"));
}
@Test
public void testDisplaySingleStreamWithRedaction() throws Exception {
StreamDefinition streamDefinition1 = new StreamDefinition("myStream", "time --secret=foo | log");
for (StreamAppDefinition appDefinition : streamDefinition1.getAppDefinitions()) {
deploymentIdRepository.save(DeploymentKey.forStreamAppDefinition(appDefinition),
streamDefinition1.getName() + "." + appDefinition.getName());
}
repository.save(streamDefinition1);
assertEquals(1, repository.count());
AppStatus status = mock(AppStatus.class);
when(status.getState()).thenReturn(DeploymentState.unknown);
when(appDeployer.status("myStream.time")).thenReturn(status);
when(appDeployer.status("myStream.log")).thenReturn(status);
mockMvc.perform(get("/streams/definitions/myStream").accept(MediaType.APPLICATION_JSON))
.andExpect(status().isOk()).andExpect(content().json("{name: \"myStream\"}"))
.andExpect(content().json("{dslText: \"time --secret='******' | log\"}"));
}
@Test
public void testDestroyStreamNotFound() throws Exception {
mockMvc.perform(delete("/streams/definitions/myStream").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isNotFound());
assertEquals(0, repository.count());
}
@Test
public void testDeploy() throws Exception {
repository.save(new StreamDefinition("myStream", "time | log"));
mockMvc.perform(post("/streams/deployments/myStream").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isCreated());
ArgumentCaptor<AppDeploymentRequest> captor = ArgumentCaptor.forClass(AppDeploymentRequest.class);
verify(appDeployer, times(2)).deploy(captor.capture());
List<AppDeploymentRequest> requests = captor.getAllValues();
assertEquals(2, requests.size());
AppDeploymentRequest logRequest = requests.get(0);
assertThat(logRequest.getDefinition().getName(), is("log"));
AppDeploymentRequest timeRequest = requests.get(1);
assertThat(timeRequest.getDefinition().getName(), is("time"));
}
@Test
public void testStreamWithShortformProperties() throws Exception {
repository.save(new StreamDefinition("myStream", "time --fixed-delay=2 | log --level=WARN"));
mockMvc.perform(post("/streams/deployments/myStream").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isCreated());
ArgumentCaptor<AppDeploymentRequest> captor = ArgumentCaptor.forClass(AppDeploymentRequest.class);
verify(appDeployer, times(2)).deploy(captor.capture());
List<AppDeploymentRequest> requests = captor.getAllValues();
assertEquals(2, requests.size());
AppDeploymentRequest logRequest = requests.get(0);
assertThat(logRequest.getDefinition().getName(), is("log"));
Map<String, String> logAppProps = logRequest.getDefinition().getProperties();
assertEquals("WARN", logAppProps.get("log.level"));
assertNull(logAppProps.get("level"));
AppDeploymentRequest timeRequest = requests.get(1);
assertThat(timeRequest.getDefinition().getName(), is("time"));
Map<String, String> timeAppProps = timeRequest.getDefinition().getProperties();
assertEquals("2", timeAppProps.get("trigger.fixed-delay"));
assertNull(timeAppProps.get("fixed-delay"));
}
@Test
public void testDeployWithAppPropertiesOverride() throws Exception {
repository.save(new StreamDefinition("myStream", "time --fixed-delay=2 | log --level=WARN"));
Map<String, String> properties = new HashMap<>();
properties.put("app.time.fixed-delay", "4");
properties.put("app.log.level", "ERROR");
properties.put("app.time.producer.partitionKeyExpression", "payload");
mockMvc.perform(post("/streams/deployments/myStream").content(new ObjectMapper().writeValueAsBytes(properties))
.contentType(MediaType.APPLICATION_JSON)).andExpect(status().isCreated());
ArgumentCaptor<AppDeploymentRequest> captor = ArgumentCaptor.forClass(AppDeploymentRequest.class);
verify(appDeployer, times(2)).deploy(captor.capture());
List<AppDeploymentRequest> requests = captor.getAllValues();
assertEquals(2, requests.size());
AppDeploymentRequest logRequest = requests.get(0);
assertThat(logRequest.getDefinition().getName(), is("log"));
Map<String, String> logAppProps = logRequest.getDefinition().getProperties();
assertEquals("true", logRequest.getDeploymentProperties().get(AppDeployer.INDEXED_PROPERTY_KEY));
assertEquals("ERROR", logAppProps.get("log.level"));
AppDeploymentRequest timeRequest = requests.get(1);
assertThat(timeRequest.getDefinition().getName(), is("time"));
Map<String, String> timeAppProps = timeRequest.getDefinition().getProperties();
assertEquals("4", timeAppProps.get("trigger.fixed-delay"));
}
@Test
public void testDeployWithAppPropertiesOverrideWithLabel() throws Exception {
repository.save(new StreamDefinition("myStream", "a: time --fixed-delay=2 | b: log --level=WARN"));
Map<String, String> properties = new HashMap<>();
properties.put("app.a.fixed-delay", "4");
properties.put("app.b.level", "ERROR");
mockMvc.perform(post("/streams/deployments/myStream").content(new ObjectMapper().writeValueAsBytes(properties))
.contentType(MediaType.APPLICATION_JSON)).andExpect(status().isCreated());
ArgumentCaptor<AppDeploymentRequest> captor = ArgumentCaptor.forClass(AppDeploymentRequest.class);
verify(appDeployer, times(2)).deploy(captor.capture());
List<AppDeploymentRequest> requests = captor.getAllValues();
assertEquals(2, requests.size());
AppDeploymentRequest logRequest = requests.get(0);
assertThat(logRequest.getDefinition().getName(), is("b"));
Map<String, String> logAppProps = logRequest.getDefinition().getProperties();
assertEquals("ERROR", logAppProps.get("log.level"));
AppDeploymentRequest timeRequest = requests.get(1);
assertThat(timeRequest.getDefinition().getName(), is("a"));
Map<String, String> timeAppProps = timeRequest.getDefinition().getProperties();
assertEquals("4", timeAppProps.get("trigger.fixed-delay"));
}
@Test
public void testDuplicateDeploy() throws Exception {
repository.save(new StreamDefinition("myStream", "time | log"));
mockMvc.perform(post("/streams/deployments/myStream").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isCreated());
ArgumentCaptor<AppDeploymentRequest> captor = ArgumentCaptor.forClass(AppDeploymentRequest.class);
verify(appDeployer, times(2)).deploy(captor.capture());
when(appDeployer.status("testID")).thenReturn(AppStatus.of("testID").with(new AppInstanceStatus() {
@Override
public String getId() {
return "testID";
}
@Override
public DeploymentState getState() {
return DeploymentState.valueOf("deployed");
}
@Override
public Map<String, String> getAttributes() {
return null;
}
}).build());
mockMvc.perform(post("/streams/deployments/myStream").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isConflict());
}
@Test
public void testDuplicateDeployWhenStreamIsBeingDeployed() throws Exception {
repository.save(new StreamDefinition("myStream", "time | log"));
mockMvc.perform(post("/streams/deployments/myStream").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isCreated());
ArgumentCaptor<AppDeploymentRequest> captor = ArgumentCaptor.forClass(AppDeploymentRequest.class);
verify(appDeployer, times(2)).deploy(captor.capture());
when(appDeployer.status("testID")).thenReturn(AppStatus.of("testID").with(new AppInstanceStatus() {
@Override
public String getId() {
return "testID";
}
@Override
public DeploymentState getState() {
return DeploymentState.valueOf("deploying");
}
@Override
public Map<String, String> getAttributes() {
return null;
}
}).build());
mockMvc.perform(post("/streams/deployments/myStream").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isConflict());
}
@Test
public void testUndeployNonDeployedStream() throws Exception {
repository.save(new StreamDefinition("myStream", "time | log"));
mockMvc.perform(delete("/streams/deployments/myStream").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isOk());
ArgumentCaptor<String> captor = ArgumentCaptor.forClass(String.class);
verify(appDeployer, times(0)).undeploy(captor.capture());
}
@Test
public void testUndeployAllNonDeployedStream() throws Exception {
repository.save(new StreamDefinition("myStream1", "time | log"));
repository.save(new StreamDefinition("myStream2", "time | log"));
mockMvc.perform(delete("/streams/deployments").accept(MediaType.APPLICATION_JSON)).andDo(print())
.andExpect(status().isOk());
ArgumentCaptor<String> captor = ArgumentCaptor.forClass(String.class);
verify(appDeployer, times(0)).undeploy(captor.capture());
}
@Test
public void testDeployWithProperties() throws Exception {
repository.save(new StreamDefinition("myStream", "time | log"));
Map<String, String> properties = new HashMap<>();
properties.put("app.*.producer.partitionKeyExpression", "payload");
properties.put("deployer.log.count", "2");
properties.put("app.*.consumer.concurrency", "3");
mockMvc.perform(post("/streams/deployments/myStream").content(new ObjectMapper().writeValueAsBytes(properties))
.contentType(MediaType.APPLICATION_JSON)).andExpect(status().isCreated());
ArgumentCaptor<AppDeploymentRequest> captor = ArgumentCaptor.forClass(AppDeploymentRequest.class);
verify(appDeployer, times(2)).deploy(captor.capture());
List<AppDeploymentRequest> requests = captor.getAllValues();
assertEquals(2, requests.size());
AppDeploymentRequest logRequest = requests.get(0);
assertThat(logRequest.getDefinition().getName(), is("log"));
Map<String, String> logAppProps = logRequest.getDefinition().getProperties();
assertEquals("true", logAppProps.get("spring.cloud.stream.bindings.input.consumer.partitioned"));
assertEquals("3", logAppProps.get("spring.cloud.stream.bindings.input.consumer.concurrency"));
assertEquals("2", logAppProps.get(StreamPropertyKeys.INSTANCE_COUNT));
Map<String, String> logDeploymentProps = logRequest.getDeploymentProperties();
assertEquals(logDeploymentProps.get(AppDeployer.INDEXED_PROPERTY_KEY), "true");
assertEquals("2", logDeploymentProps.get(AppDeployer.COUNT_PROPERTY_KEY));
assertEquals("myStream", logDeploymentProps.get(AppDeployer.GROUP_PROPERTY_KEY));
assertEquals("true", logDeploymentProps.get(AppDeployer.INDEXED_PROPERTY_KEY));
AppDeploymentRequest timeRequest = requests.get(1);
assertThat(timeRequest.getDefinition().getName(), is("time"));
Map<String, String> timeAppProps = timeRequest.getDefinition().getProperties();
assertEquals("2", timeAppProps.get("spring.cloud.stream.bindings.output.producer.partitionCount"));
assertEquals("payload",
timeAppProps.get("spring.cloud.stream.bindings.output.producer" + ".partitionKeyExpression"));
Map<String, String> timeDeploymentProps = timeRequest.getDeploymentProperties();
assertNull(timeDeploymentProps.get(AppDeployer.COUNT_PROPERTY_KEY));
assertEquals("myStream", timeDeploymentProps.get(AppDeployer.GROUP_PROPERTY_KEY));
assertNull(timeDeploymentProps.get(AppDeployer.INDEXED_PROPERTY_KEY));
}
@Test
public void testDeployWithWildcardProperties() throws Exception {
repository.save(new StreamDefinition("myStream", "time | log"));
Map<String, String> properties = new HashMap<>();
properties.put("app.*.producer.partitionKeyExpression", "payload");
properties.put("deployer.*.count", "2");
properties.put("app.*.consumer.concurrency", "3");
mockMvc.perform(post("/streams/deployments/myStream").content(new ObjectMapper().writeValueAsBytes(properties))
.contentType(MediaType.APPLICATION_JSON)).andExpect(status().isCreated());
ArgumentCaptor<AppDeploymentRequest> captor = ArgumentCaptor.forClass(AppDeploymentRequest.class);
verify(appDeployer, times(2)).deploy(captor.capture());
List<AppDeploymentRequest> requests = captor.getAllValues();
assertEquals(2, requests.size());
AppDeploymentRequest logRequest = requests.get(0);
assertThat(logRequest.getDefinition().getName(), is("log"));
Map<String, String> logAppProps = logRequest.getDefinition().getProperties();
assertEquals("2", logAppProps.get(StreamPropertyKeys.INSTANCE_COUNT));
assertEquals("true", logAppProps.get("spring.cloud.stream.bindings.input.consumer.partitioned"));
assertEquals("3", logAppProps.get("spring.cloud.stream.bindings.input.consumer.concurrency"));
Map<String, String> logDeploymentProps = logRequest.getDeploymentProperties();
assertEquals("2", logDeploymentProps.get(AppDeployer.COUNT_PROPERTY_KEY));
assertEquals("myStream", logDeploymentProps.get(AppDeployer.GROUP_PROPERTY_KEY));
assertEquals("true", logDeploymentProps.get(AppDeployer.INDEXED_PROPERTY_KEY));
AppDeploymentRequest timeRequest = requests.get(1);
assertThat(timeRequest.getDefinition().getName(), is("time"));
Map<String, String> timeAppProps = timeRequest.getDefinition().getProperties();
assertEquals("2", timeAppProps.get("spring.cloud.stream.bindings.output.producer.partitionCount"));
assertEquals("payload",
timeAppProps.get("spring.cloud.stream.bindings.output.producer" + ".partitionKeyExpression"));
Map<String, String> timeDeploymentProps = timeRequest.getDeploymentProperties();
assertEquals("2", timeDeploymentProps.get(AppDeployer.COUNT_PROPERTY_KEY));
assertEquals("myStream", timeDeploymentProps.get(AppDeployer.GROUP_PROPERTY_KEY));
assertNull(timeDeploymentProps.get(AppDeployer.INDEXED_PROPERTY_KEY));
}
@Test
public void testDeployWithCommonApplicationProperties() throws Exception {
repository.save(new StreamDefinition("myStream", "time | log"));
assertThat(appsProperties.getStream().values(), empty());
appsProperties.getStream().put("spring.cloud.stream.fake.binder.host", "fakeHost");
appsProperties.getStream().put("spring.cloud.stream.fake.binder.port", "fakePort");
Map<String, String> properties = new HashMap<>();
properties.put("app.*.producer.partitionKeyExpression", "payload");
properties.put("deployer.*.count", "2");
properties.put("app.*.consumer.concurrency", "3");
mockMvc.perform(post("/streams/deployments/myStream").content(new ObjectMapper().writeValueAsBytes(properties))
.contentType(MediaType.APPLICATION_JSON)).andExpect(status().isCreated());
ArgumentCaptor<AppDeploymentRequest> captor = ArgumentCaptor.forClass(AppDeploymentRequest.class);
verify(appDeployer, times(2)).deploy(captor.capture());
List<AppDeploymentRequest> requests = captor.getAllValues();
assertEquals(2, requests.size());
AppDeploymentRequest logRequest = requests.get(0);
assertThat(logRequest.getDefinition().getName(), is("log"));
Map<String, String> logAppProps = logRequest.getDefinition().getProperties();
assertEquals("2", logAppProps.get(StreamPropertyKeys.INSTANCE_COUNT));
assertEquals("fakeHost", logAppProps.get("spring.cloud.stream.fake.binder.host"));
assertEquals("fakePort", logAppProps.get("spring.cloud.stream.fake.binder.port"));
assertEquals("true", logAppProps.get("spring.cloud.stream.bindings.input.consumer.partitioned"));
assertEquals("3", logAppProps.get("spring.cloud.stream.bindings.input.consumer.concurrency"));
Map<String, String> logDeploymentProps = logRequest.getDeploymentProperties();
assertEquals("2", logDeploymentProps.get(AppDeployer.COUNT_PROPERTY_KEY));
assertEquals("myStream", logDeploymentProps.get(AppDeployer.GROUP_PROPERTY_KEY));
assertEquals("true", logDeploymentProps.get(AppDeployer.INDEXED_PROPERTY_KEY));
AppDeploymentRequest timeRequest = requests.get(1);
assertThat(timeRequest.getDefinition().getName(), is("time"));
Map<String, String> timeAppProps = timeRequest.getDefinition().getProperties();
assertEquals("2", timeAppProps.get("spring.cloud.stream.bindings.output.producer.partitionCount"));
assertEquals("payload",
timeAppProps.get("spring.cloud.stream.bindings.output.producer" + ".partitionKeyExpression"));
Map<String, String> timeDeploymentProps = timeRequest.getDeploymentProperties();
assertEquals("2", timeDeploymentProps.get(AppDeployer.COUNT_PROPERTY_KEY));
assertEquals("myStream", timeDeploymentProps.get(AppDeployer.GROUP_PROPERTY_KEY));
assertNull(timeDeploymentProps.get(AppDeployer.INDEXED_PROPERTY_KEY));
appsProperties.getStream().clear();
}
@Test
public void testAggregateState() {
assertThat(StreamDeployerUtil.aggregateState(EnumSet.of(DeploymentState.deployed, DeploymentState.failed)), is(DeploymentState.partial));
assertThat(StreamDeployerUtil.aggregateState(EnumSet.of(DeploymentState.unknown, DeploymentState.failed)), is(DeploymentState.failed));
assertThat(StreamDeployerUtil.aggregateState(EnumSet.of(DeploymentState.deployed, DeploymentState.failed, DeploymentState.error)), is(DeploymentState.error));
assertThat(StreamDeployerUtil.aggregateState(EnumSet.of(DeploymentState.deployed, DeploymentState.undeployed)), is(DeploymentState.partial));
assertThat(StreamDeployerUtil.aggregateState(EnumSet.of(DeploymentState.deployed, DeploymentState.unknown)), is(DeploymentState.partial));
assertThat(StreamDeployerUtil.aggregateState(EnumSet.of(DeploymentState.undeployed, DeploymentState.unknown)), is(DeploymentState.partial));
assertThat(StreamDeployerUtil.aggregateState(EnumSet.of(DeploymentState.unknown)), is(DeploymentState.undeployed));
}
@Test
public void testAppDeploymentFailure() throws Exception {
when(appDeployer.deploy(any(AppDeploymentRequest.class))).thenThrow(new RuntimeException());
repository.save(new StreamDefinition("myStream", "time | log"));
mockMvc.perform(post("/streams/deployments/myStream").accept(MediaType.APPLICATION_JSON))
.andExpect(status().isCreated());
ArgumentCaptor<AppDeploymentRequest> captor = ArgumentCaptor.forClass(AppDeploymentRequest.class);
verify(appDeployer, times(2)).deploy(captor.capture());
}
@Test
public void testValidateStream() throws Exception {
assertEquals(0, repository.count());
mockMvc.perform(post("/streams/definitions/").param("name", "myStream1").param("definition", "time | log")
.accept(MediaType.APPLICATION_JSON)).andDo(print()).andExpect(status().isCreated());
mockMvc.perform(get("/streams/validation/myStream1").accept(MediaType.APPLICATION_JSON))
.andExpect(status().isOk()).andDo(print()).andExpect(content()
.json("{\"appName\":\"myStream1\",\"appStatuses\":{\"source:time\":\"valid\",\"sink:log\":\"valid\"},\"dsl\":\"time | log\",\"links\":[]}"));
}
}
| apache-2.0 |
shyTNT/googleads-java-lib | examples/adwords_axis/src/main/java/adwords/axis/v201506/targeting/AddDemographicTargetingCriteria.java | 4724 | // Copyright 2015 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package adwords.axis.v201506.targeting;
import com.google.api.ads.adwords.axis.factory.AdWordsServices;
import com.google.api.ads.adwords.axis.v201506.cm.AdGroupCriterion;
import com.google.api.ads.adwords.axis.v201506.cm.AdGroupCriterionOperation;
import com.google.api.ads.adwords.axis.v201506.cm.AdGroupCriterionReturnValue;
import com.google.api.ads.adwords.axis.v201506.cm.AdGroupCriterionServiceInterface;
import com.google.api.ads.adwords.axis.v201506.cm.AgeRange;
import com.google.api.ads.adwords.axis.v201506.cm.BiddableAdGroupCriterion;
import com.google.api.ads.adwords.axis.v201506.cm.Gender;
import com.google.api.ads.adwords.axis.v201506.cm.NegativeAdGroupCriterion;
import com.google.api.ads.adwords.axis.v201506.cm.Operator;
import com.google.api.ads.adwords.lib.client.AdWordsSession;
import com.google.api.ads.common.lib.auth.OfflineCredentials;
import com.google.api.ads.common.lib.auth.OfflineCredentials.Api;
import com.google.api.client.auth.oauth2.Credential;
/**
* This example adds demographic criteria to an ad group. To get ad groups, run
* GetAdGroups.java
*
* Credentials and properties in {@code fromFile()} are pulled from the
* "ads.properties" file. See README for more info.
*/
public class AddDemographicTargetingCriteria {
public static void main(String[] args) throws Exception {
// Generate a refreshable OAuth2 credential similar to a ClientLogin token
// and can be used in place of a service account.
Credential oAuth2Credential = new OfflineCredentials.Builder()
.forApi(Api.ADWORDS)
.fromFile()
.build()
.generateCredential();
// Construct an AdWordsSession.
AdWordsSession session = new AdWordsSession.Builder()
.fromFile()
.withOAuth2Credential(oAuth2Credential)
.build();
long adGroupId = Long.parseLong("INSERT_AD_GROUP_ID_HERE");
AdWordsServices adWordsServices = new AdWordsServices();
runExample(adWordsServices, session, adGroupId);
}
public static void runExample(
AdWordsServices adWordsServices, AdWordsSession session, Long adGroupId) throws Exception {
// Get the AdGroupCriterionService.
AdGroupCriterionServiceInterface adGroupCriterionService =
adWordsServices.get(session, AdGroupCriterionServiceInterface.class);
// https://developers.google.com/adwords/api/docs/appendix/genders
Gender male = new Gender();
male.setId(10L);
BiddableAdGroupCriterion genderBiddableAdGroupCriterion = new BiddableAdGroupCriterion();
genderBiddableAdGroupCriterion.setAdGroupId(adGroupId);
genderBiddableAdGroupCriterion.setCriterion(male);
// https://developers.google.com/adwords/api/docs/appendix/ages
AgeRange undetermined = new AgeRange();
undetermined.setId(503999L);
NegativeAdGroupCriterion ageRangeNegativeAdGroupCriterion = new NegativeAdGroupCriterion();
ageRangeNegativeAdGroupCriterion.setAdGroupId(adGroupId);
ageRangeNegativeAdGroupCriterion.setCriterion(undetermined);
AdGroupCriterionOperation genderAdGroupCriterionOperation = new AdGroupCriterionOperation();
genderAdGroupCriterionOperation.setOperand(genderBiddableAdGroupCriterion);
genderAdGroupCriterionOperation.setOperator(Operator.ADD);
AdGroupCriterionOperation ageRangeNegativeAdGroupCriterionOperation =
new AdGroupCriterionOperation();
ageRangeNegativeAdGroupCriterionOperation.setOperand(ageRangeNegativeAdGroupCriterion);
ageRangeNegativeAdGroupCriterionOperation.setOperator(Operator.ADD);
AdGroupCriterionReturnValue result =
adGroupCriterionService.mutate(new AdGroupCriterionOperation[] {
genderAdGroupCriterionOperation, ageRangeNegativeAdGroupCriterionOperation});
// Display campaigns.
for (AdGroupCriterion adGroupCriterion : result.getValue()) {
System.out.printf("AdGroup criterion with adGroup id '%s', criterion id '%s', "
+ "and type '%s' was added.\n", adGroupCriterion.getAdGroupId(),
adGroupCriterion.getCriterion().getId(),
adGroupCriterion.getCriterion().getCriterionType());
}
}
}
| apache-2.0 |
jakubmalek/guava | guava/src/com/google/common/escape/UnicodeEscaper.java | 13258 | /*
* Copyright (C) 2008 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.escape;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.annotations.Beta;
import com.google.common.annotations.GwtCompatible;
/**
* An {@link Escaper} that converts literal text into a format safe for inclusion in a particular
* context (such as an XML document). Typically (but not always), the inverse process of
* "unescaping" the text is performed automatically by the relevant parser.
*
* <p>For example, an XML escaper would convert the literal string {@code
* "Foo<Bar>"} into {@code "Foo<Bar>"} to prevent {@code "<Bar>"} from being confused with an
* XML tag. When the resulting XML document is parsed, the parser API will return this text as the
* original literal string {@code
* "Foo<Bar>"}.
*
* <p><b>Note:</b> This class is similar to {@link CharEscaper} but with one very important
* difference. A CharEscaper can only process Java
* <a href="http://en.wikipedia.org/wiki/UTF-16">UTF16</a> characters in isolation and may not cope
* when it encounters surrogate pairs. This class facilitates the correct escaping of all Unicode
* characters.
*
* <p>As there are important reasons, including potential security issues, to handle Unicode
* correctly if you are considering implementing a new escaper you should favor using UnicodeEscaper
* wherever possible.
*
* <p>A {@code UnicodeEscaper} instance is required to be stateless, and safe when used concurrently
* by multiple threads.
*
* <p>Popular escapers are defined as constants in classes like
* {@link com.google.common.html.HtmlEscapers} and {@link com.google.common.xml.XmlEscapers}. To
* create your own escapers extend this class and implement the {@link #escape(int)} method.
*
* @author David Beaumont
* @since 15.0
*/
@Beta
@GwtCompatible
public abstract class UnicodeEscaper extends Escaper {
/** The amount of padding (chars) to use when growing the escape buffer. */
private static final int DEST_PAD = 32;
/** Constructor for use by subclasses. */
protected UnicodeEscaper() {}
/**
* Returns the escaped form of the given Unicode code point, or {@code null} if this code point
* does not need to be escaped. When called as part of an escaping operation, the given code point
* is guaranteed to be in the range {@code 0 <= cp <= Character#MAX_CODE_POINT}.
*
* <p>If an empty array is returned, this effectively strips the input character from the
* resulting text.
*
* <p>If the character does not need to be escaped, this method should return {@code null}, rather
* than an array containing the character representation of the code point. This enables the
* escaping algorithm to perform more efficiently.
*
* <p>If the implementation of this method cannot correctly handle a particular code point then it
* should either throw an appropriate runtime exception or return a suitable replacement
* character. It must never silently discard invalid input as this may constitute a security risk.
*
* @param cp the Unicode code point to escape if necessary
* @return the replacement characters, or {@code null} if no escaping was needed
*/
protected abstract char[] escape(int cp);
/**
* Scans a sub-sequence of characters from a given {@link CharSequence}, returning the index of
* the next character that requires escaping.
*
* <p><b>Note:</b> When implementing an escaper, it is a good idea to override this method for
* efficiency. The base class implementation determines successive Unicode code points and invokes
* {@link #escape(int)} for each of them. If the semantics of your escaper are such that code
* points in the supplementary range are either all escaped or all unescaped, this method can be
* implemented more efficiently using {@link CharSequence#charAt(int)}.
*
* <p>Note however that if your escaper does not escape characters in the supplementary range, you
* should either continue to validate the correctness of any surrogate characters encountered or
* provide a clear warning to users that your escaper does not validate its input.
*
* <p>See {@link com.google.common.net.PercentEscaper} for an example.
*
* @param csq a sequence of characters
* @param start the index of the first character to be scanned
* @param end the index immediately after the last character to be scanned
* @throws IllegalArgumentException if the scanned sub-sequence of {@code csq} contains invalid
* surrogate pairs
*/
protected int nextEscapeIndex(CharSequence csq, int start, int end) {
int index = start;
while (index < end) {
int cp = codePointAt(csq, index, end);
if (cp < 0 || escape(cp) != null) {
break;
}
index += Character.isSupplementaryCodePoint(cp) ? 2 : 1;
}
return index;
}
/**
* Returns the escaped form of a given literal string.
*
* <p>If you are escaping input in arbitrary successive chunks, then it is not generally safe to
* use this method. If an input string ends with an unmatched high surrogate character, then this
* method will throw {@link IllegalArgumentException}. You should ensure your input is valid
* <a href="http://en.wikipedia.org/wiki/UTF-16">UTF-16</a> before calling this method.
*
* <p><b>Note:</b> When implementing an escaper it is a good idea to override this method for
* efficiency by inlining the implementation of {@link #nextEscapeIndex(CharSequence, int, int)}
* directly. Doing this for {@link com.google.common.net.PercentEscaper} more than doubled the
* performance for unescaped strings (as measured by {@link CharEscapersBenchmark}).
*
* @param string the literal string to be escaped
* @return the escaped form of {@code string}
* @throws NullPointerException if {@code string} is null
* @throws IllegalArgumentException if invalid surrogate characters are encountered
*/
@Override
public String escape(String string) {
checkNotNull(string);
int end = string.length();
int index = nextEscapeIndex(string, 0, end);
return index == end ? string : escapeSlow(string, index);
}
/**
* Returns the escaped form of a given literal string, starting at the given index. This method is
* called by the {@link #escape(String)} method when it discovers that escaping is required. It is
* protected to allow subclasses to override the fastpath escaping function to inline their
* escaping test. See {@link CharEscaperBuilder} for an example usage.
*
* <p>This method is not reentrant and may only be invoked by the top level
* {@link #escape(String)} method.
*
* @param s the literal string to be escaped
* @param index the index to start escaping from
* @return the escaped form of {@code string}
* @throws NullPointerException if {@code string} is null
* @throws IllegalArgumentException if invalid surrogate characters are encountered
*/
protected final String escapeSlow(String s, int index) {
int end = s.length();
// Get a destination buffer and setup some loop variables.
char[] dest = Platform.charBufferFromThreadLocal();
int destIndex = 0;
int unescapedChunkStart = 0;
while (index < end) {
int cp = codePointAt(s, index, end);
if (cp < 0) {
throw new IllegalArgumentException("Trailing high surrogate at end of input");
}
// It is possible for this to return null because nextEscapeIndex() may
// (for performance reasons) yield some false positives but it must never
// give false negatives.
char[] escaped = escape(cp);
int nextIndex = index + (Character.isSupplementaryCodePoint(cp) ? 2 : 1);
if (escaped != null) {
int charsSkipped = index - unescapedChunkStart;
// This is the size needed to add the replacement, not the full
// size needed by the string. We only regrow when we absolutely must.
int sizeNeeded = destIndex + charsSkipped + escaped.length;
if (dest.length < sizeNeeded) {
int destLength = sizeNeeded + (end - index) + DEST_PAD;
dest = growBuffer(dest, destIndex, destLength);
}
// If we have skipped any characters, we need to copy them now.
if (charsSkipped > 0) {
s.getChars(unescapedChunkStart, index, dest, destIndex);
destIndex += charsSkipped;
}
if (escaped.length > 0) {
System.arraycopy(escaped, 0, dest, destIndex, escaped.length);
destIndex += escaped.length;
}
// If we dealt with an escaped character, reset the unescaped range.
unescapedChunkStart = nextIndex;
}
index = nextEscapeIndex(s, nextIndex, end);
}
// Process trailing unescaped characters - no need to account for escaped
// length or padding the allocation.
int charsSkipped = end - unescapedChunkStart;
if (charsSkipped > 0) {
int endIndex = destIndex + charsSkipped;
if (dest.length < endIndex) {
dest = growBuffer(dest, destIndex, endIndex);
}
s.getChars(unescapedChunkStart, end, dest, destIndex);
destIndex = endIndex;
}
return new String(dest, 0, destIndex);
}
/**
* Returns the Unicode code point of the character at the given index.
*
* <p>Unlike {@link Character#codePointAt(CharSequence, int)} or {@link String#codePointAt(int)}
* this method will never fail silently when encountering an invalid surrogate pair.
*
* <p>The behaviour of this method is as follows:
* <ol>
* <li>If {@code index >= end}, {@link IndexOutOfBoundsException} is thrown.
* <li><b>If the character at the specified index is not a surrogate, it is returned.</b>
* <li>If the first character was a high surrogate value, then an attempt is made to read the next
* character.
* <ol>
* <li><b>If the end of the sequence was reached, the negated value of the trailing high
* surrogate is returned.</b>
* <li><b>If the next character was a valid low surrogate, the code point value of the
* high/low surrogate pair is returned.</b>
* <li>If the next character was not a low surrogate value, then {@link
* IllegalArgumentException} is thrown.
* </ol>
* <li>If the first character was a low surrogate value, {@link IllegalArgumentException} is
* thrown.
* </ol>
*
* @param seq the sequence of characters from which to decode the code point
* @param index the index of the first character to decode
* @param end the index beyond the last valid character to decode
* @return the Unicode code point for the given index or the negated value of the trailing high
* surrogate character at the end of the sequence
*/
protected static int codePointAt(CharSequence seq, int index, int end) {
checkNotNull(seq);
if (index < end) {
char c1 = seq.charAt(index++);
if (c1 < Character.MIN_HIGH_SURROGATE || c1 > Character.MAX_LOW_SURROGATE) {
// Fast path (first test is probably all we need to do)
return c1;
} else if (c1 <= Character.MAX_HIGH_SURROGATE) {
// If the high surrogate was the last character, return its inverse
if (index == end) {
return -c1;
}
// Otherwise look for the low surrogate following it
char c2 = seq.charAt(index);
if (Character.isLowSurrogate(c2)) {
return Character.toCodePoint(c1, c2);
}
throw new IllegalArgumentException(
"Expected low surrogate but got char '"
+ c2
+ "' with value "
+ (int) c2
+ " at index "
+ index
+ " in '"
+ seq
+ "'");
} else {
throw new IllegalArgumentException(
"Unexpected low surrogate character '"
+ c1
+ "' with value "
+ (int) c1
+ " at index "
+ (index - 1)
+ " in '"
+ seq
+ "'");
}
}
throw new IndexOutOfBoundsException("Index exceeds specified range");
}
/**
* Helper method to grow the character buffer as needed, this only happens once in a while so it's
* ok if it's in a method call. If the index passed in is 0 then no copying will be done.
*/
private static char[] growBuffer(char[] dest, int index, int size) {
char[] copy = new char[size];
if (index > 0) {
System.arraycopy(dest, 0, copy, 0, index);
}
return copy;
}
}
| apache-2.0 |
medicayun/medicayundicom | dcm4jboss-all/tags/DCM4CHEE_2_10_9/dcm4jboss-ejb/src/java/org/dcm4chex/archive/ejb/session/MPPSManagerBean.java | 17033 | /* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is part of dcm4che, an implementation of DICOM(TM) in
* Java(TM), available at http://sourceforge.net/projects/dcm4che.
*
* The Initial Developer of the Original Code is
* TIANI Medgraph AG.
* Portions created by the Initial Developer are Copyright (C) 2003-2005
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Gunter Zeilinger <gunter.zeilinger@tiani.com>
* Franz Willer <franz.willer@gwi-ag.com>
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
package org.dcm4chex.archive.ejb.session;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import javax.ejb.CreateException;
import javax.ejb.EJBException;
import javax.ejb.FinderException;
import javax.ejb.ObjectNotFoundException;
import javax.ejb.SessionBean;
import javax.ejb.SessionContext;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import org.apache.log4j.Logger;
import org.dcm4che.data.Dataset;
import org.dcm4che.data.DcmElement;
import org.dcm4che.data.DcmObjectFactory;
import org.dcm4che.dict.Status;
import org.dcm4che.dict.Tags;
import org.dcm4che.net.DcmServiceException;
import org.dcm4chex.archive.ejb.interfaces.InstanceLocal;
import org.dcm4chex.archive.ejb.interfaces.MPPSLocal;
import org.dcm4chex.archive.ejb.interfaces.MPPSLocalHome;
import org.dcm4chex.archive.ejb.interfaces.MWLItemLocal;
import org.dcm4chex.archive.ejb.interfaces.MWLItemLocalHome;
import org.dcm4chex.archive.ejb.interfaces.PatientLocal;
import org.dcm4chex.archive.ejb.interfaces.PatientLocalHome;
import org.dcm4chex.archive.ejb.interfaces.SeriesLocal;
import org.dcm4chex.archive.ejb.interfaces.SeriesLocalHome;
import org.dcm4chex.archive.ejb.interfaces.StudyLocal;
/**
* @author gunter.zeilinter@tiani.com
* @version $Revision: 2936 $ $Date: 2006-11-07 17:37:13 +0800 (周二, 07 11月 2006) $
* @since 21.03.2004
*
* @ejb.bean name="MPPSManager" type="Stateless" view-type="remote"
* jndi-name="ejb/MPPSManager"
* @ejb.transaction-type type="Container"
* @ejb.transaction type="Required"
* @ejb.ejb-ref ejb-name="Patient" view-type="local" ref-name="ejb/Patient"
* @ejb.ejb-ref ejb-name="MPPS" view-type="local" ref-name="ejb/MPPS"
* @ejb.ejb-ref ejb-name="MWLItem" view-type="local" ref-name="ejb/MWLItem"
* @ejb.ejb-ref ejb-name="Series" view-type="local" ref-name="ejb/Series"
*
*/
public abstract class MPPSManagerBean implements SessionBean {
private static Logger log = Logger.getLogger(MPPSManagerBean.class);
private static final String NO_LONGER_BE_UPDATED_ERR_MSG =
"Performed Procedure Step Object may no longer be updated";
private static final int NO_LONGER_BE_UPDATED_ERR_ID = 0xA710;
private static final int DELETED = 1;
private static final int[] PATIENT_ATTRS_EXC = {
Tags.PatientName,
Tags.PatientID,
Tags.PatientBirthDate,
Tags.PatientSex,
Tags.RefPatientSeq,
};
private static final int[] PATIENT_ATTRS_INC = {
Tags.PatientName,
Tags.PatientID,
Tags.PatientBirthDate,
Tags.PatientSex,
};
private PatientLocalHome patHome;
private SeriesLocalHome seriesHome;
private MPPSLocalHome mppsHome;
private MWLItemLocalHome mwlItemHome;
private SessionContext sessionCtx;
public void setSessionContext(SessionContext ctx) {
sessionCtx = ctx;
Context jndiCtx = null;
try {
jndiCtx = new InitialContext();
patHome =
(PatientLocalHome) jndiCtx.lookup("java:comp/env/ejb/Patient");
seriesHome = (SeriesLocalHome) jndiCtx.lookup("java:comp/env/ejb/Series");
mppsHome = (MPPSLocalHome) jndiCtx.lookup("java:comp/env/ejb/MPPS");
mwlItemHome = (MWLItemLocalHome) jndiCtx.lookup("java:comp/env/ejb/MWLItem");
} catch (NamingException e) {
throw new EJBException(e);
} finally {
if (jndiCtx != null) {
try {
jndiCtx.close();
} catch (NamingException ignore) {}
}
}
}
public void unsetSessionContext() {
sessionCtx = null;
mppsHome = null;
patHome = null;
seriesHome = null;
}
/**
* @ejb.interface-method
*/
public void createMPPS(Dataset ds)
throws DcmServiceException {
try {
PatientLocal pat = getPatient(ds);
mppsHome.create(ds.subSet(PATIENT_ATTRS_EXC, true, true), pat);
} catch (CreateException ce) {
try {
mppsHome.findBySopIuid(ds.getString(Tags.SOPInstanceUID));
throw new DcmServiceException(Status.DuplicateSOPInstance);
} catch (FinderException fe) {
throw new DcmServiceException(Status.ProcessingFailure, ce);
} finally {
sessionCtx.setRollbackOnly();
}
}
}
private PatientLocal getPatient(Dataset ds) throws DcmServiceException {
try {
final String id = ds.getString(Tags.PatientID);
Collection c = patHome.findByPatientId(id);
for (Iterator it = c.iterator(); it.hasNext();) {
PatientLocal patient = (PatientLocal) it.next();
if (equals(patient, ds)) {
PatientLocal mergedWith;
while ((mergedWith = patient.getMergedWith()) != null) {
patient = mergedWith;
}
return patient;
}
}
PatientLocal patient =
patHome.create(ds.subSet(PATIENT_ATTRS_INC));
return patient;
} catch (Exception e) {
throw new DcmServiceException(Status.ProcessingFailure, e);
}
}
private boolean equals(PatientLocal patient, Dataset ds) {
return true;
}
/**
* @ejb.interface-method
*/
public Dataset getMPPS(String iuid) throws FinderException {
final MPPSLocal mpps = mppsHome.findBySopIuid(iuid);
final PatientLocal pat = mpps.getPatient();
Dataset attrs = mpps.getAttributes();
attrs.putAll(pat.getAttributes(false));
return attrs;
}
/**
* @ejb.interface-method
*/
public void updateMPPS(Dataset ds)
throws DcmServiceException {
MPPSLocal mpps;
try {
mpps = mppsHome.findBySopIuid(ds.getString(Tags.SOPInstanceUID));
} catch (ObjectNotFoundException e) {
throw new DcmServiceException(Status.NoSuchObjectInstance);
} catch (FinderException e) {
throw new DcmServiceException(Status.ProcessingFailure, e);
}
if (!"IN PROGRESS".equals(mpps.getPpsStatus())) {
DcmServiceException e =
new DcmServiceException(
Status.ProcessingFailure,
NO_LONGER_BE_UPDATED_ERR_MSG);
e.setErrorID(NO_LONGER_BE_UPDATED_ERR_ID);
throw e;
}
Dataset attrs = mpps.getAttributes();
attrs.putAll(ds);
mpps.setAttributes(attrs);
}
/**
* Links a mpps to a mwl entry.
* <p>
* Sets SpsID and AccessionNumber from mwl entry.
* <P>
* Returns a Map with following key/value pairs.
* <dl>
* <dt>mppsAttrs: (Dataset)</dt>
* <dd> Attributes of mpps entry. (for notification)</dd>
* <dt>mwlPat: (Dataset)</dt>
* <dd> Patient of MWL entry.</dd>
* <dd> (The dominant patient of patient merge).</dd>
* <dt>mppsPat: (Dataset)</dt>
* <dd> Patient of MPPS entry.</dd>
* <dd> (The merged patient).</dd>
* <dt>userAction: (Boolean)</dt>
* <dd> Indicates that a user action is necessary.</dd>
* <dd> (the MPPS patient has more than one Study!)
* </dl>
* @param spsID spsID to select MWL entry
* @param mppsIUID Instance UID of mpps.
*
* @return A map with mpps attributes and patient attributes to merge.
*
* @ejb.interface-method
*/
public Map linkMppsToMwl(String spsID, String mppsIUID) throws DcmServiceException {
log.info("linkMppsToMwl sps:"+spsID+" mpps:"+mppsIUID);
MWLItemLocal mwlItem;
MPPSLocal mpps;
Map map = new HashMap();
try {
mwlItem = mwlItemHome.findBySpsId(spsID);
mpps = mppsHome.findBySopIuid(mppsIUID);
String accNo = mwlItem.getAccessionNumber();
PatientLocal mwlPat = mwlItem.getPatient();
PatientLocal mppsPat = mpps.getPatient();
Dataset mwlAttrs = mwlItem.getAttributes();
Dataset mppsAttrs = mpps.getAttributes();
Dataset ssa;
DcmElement ssaSQ = mppsAttrs.get(Tags.ScheduledStepAttributesSeq);
String ssaSpsID, studyIUID = null;
boolean spsNotInList = true;
for ( int i = 0, len = ssaSQ.countItems() ; i < len ; i++ ) {
ssa = ssaSQ.getItem(i);
if ( ssa != null ) {
if ( studyIUID == null ) {
studyIUID = ssa.getString(Tags.StudyInstanceUID);
if ( !studyIUID.equals(
mwlAttrs.getString(Tags.StudyInstanceUID) ) ) {
log.info("StudyInstanceUID corrected for spsID "+spsID);
mwlAttrs.putUI(Tags.StudyInstanceUID, ssa.getString(Tags.StudyInstanceUID) );
mwlItem.setAttributes( mwlAttrs );
}
}
ssaSpsID = ssa.getString(Tags.SPSID);
if ( ssaSpsID == null || spsID.equals(ssaSpsID) ) {
ssa.putSH(Tags.AccessionNumber,accNo);
ssa.putSH(Tags.SPSID, spsID);
ssa.putUI(Tags.StudyInstanceUID, studyIUID);
spsNotInList = false;
}
}
}
if ( spsNotInList ) {
ssa = ssaSQ.addNewItem();
Dataset spsDS = mwlAttrs.getItem(Tags.SPSSeq);
ssa.putUI(Tags.StudyInstanceUID, studyIUID);
ssa.putSH(Tags.SPSID, spsID);
ssa.putSH(Tags.AccessionNumber, accNo);
ssa.putSQ(Tags.RefStudySeq);
ssa.putSH(Tags.RequestedProcedureID, mwlAttrs.getString(Tags.RequestedProcedureID));
ssa.putLO(Tags.SPSDescription, spsDS.getString(Tags.SPSDescription));
DcmElement mppsSPCSQ = ssa.putSQ(Tags.ScheduledProtocolCodeSeq);
DcmElement mwlSPCSQ = spsDS.get(Tags.ScheduledProtocolCodeSeq);
if ( mwlSPCSQ != null && mwlSPCSQ.countItems() > 0 ) {
for ( int i = 0, len = mwlSPCSQ.countItems() ; i < len ; i++ ) {
mppsSPCSQ.addNewItem().putAll(mwlSPCSQ.getItem(i));
}
}
log.debug("add new scheduledStepAttribute item:");log.info(ssa);
log.debug("new mppsAttrs:");log.debug(mppsAttrs);
}
mpps.setAttributes(mppsAttrs);
mppsAttrs.putAll(mppsPat.getAttributes(false));
map.put("mppsAttrs",mppsAttrs);
map.put("mwlAttrs",mwlAttrs);
if ( ! mwlPat.equals(mppsPat) ) {
map.put( "mwlPat", mwlPat.getAttributes(true));
map.put( "mppsPat",mppsPat.getAttributes(true));
}
return map;
} catch (ObjectNotFoundException e) {
throw new DcmServiceException(Status.NoSuchObjectInstance);
} catch (FinderException e) {
throw new DcmServiceException(Status.ProcessingFailure, e);
}
}
/**
* @ejb.interface-method
*/
public void unlinkMpps( String mppsIUID ) throws FinderException {
MPPSLocal mpps = mppsHome.findBySopIuid(mppsIUID);
Dataset mppsAttrs = mpps.getAttributes();
DcmElement ssaSQ = mppsAttrs.get(Tags.ScheduledStepAttributesSeq);
Dataset ds = null;
String spsID;
for ( int i = ssaSQ.countItems()-1 ; i >= 0 ; i-- ) {
ds = ssaSQ.getItem(i);
spsID = ds.getString(Tags.SPSID);
if ( spsID != null ) {
try {
MWLItemLocal mwlItem = mwlItemHome.findBySpsId(spsID);
Dataset mwlDS = mwlItem.getAttributes();
mwlDS.getItem(Tags.SPSSeq).putCS(Tags.SPSStatus, "SCHEDULED");
mwlItem.setAttributes(mwlDS);
} catch ( FinderException ignore ) {}
}
}
String studyIUID = ds.getString(Tags.StudyInstanceUID);
ds.clear();
ds.putUI(Tags.StudyInstanceUID, studyIUID);
//add empty type 2 attributes.
ds.putSH(Tags.SPSID, (String)null);
ds.putSH(Tags.AccessionNumber, (String)null);
ds.putSQ(Tags.RefStudySeq);
ds.putSH(Tags.RequestedProcedureID, (String)null);
ds.putLO(Tags.SPSDescription, (String)null);
ds.putSQ(Tags.ScheduledProtocolCodeSeq);
mppsAttrs.putSQ(Tags.ScheduledStepAttributesSeq).addItem(ds);
mpps.setAttributes(mppsAttrs);
}
/**
* Delete a list of mpps entries.
*
* @ejb.interface-method
*/
public boolean deleteMPPSEntries( String[] iuids ) {
for ( int i = 0 ; i < iuids.length ; i++ ) {
try {
mppsHome.findBySopIuid( iuids[i] ).remove();
} catch (Exception x) {
log.error("Cant delete mpps:"+iuids[i], x);
}
}
return true;
}
/**
* @ejb.interface-method
*/
public Collection getSeriesIUIDs(String mppsIUID) throws FinderException {
Collection col = new ArrayList();
Collection series = seriesHome.findByPpsIuid(mppsIUID);
for ( Iterator iter = series.iterator() ; iter.hasNext() ; ) {
col.add( ( (SeriesLocal) iter.next()).getSeriesIuid() );
}
return col;
}
/**
* @ejb.interface-method
*/
public Collection getSeriesAndStudyDS(String mppsIUID) throws FinderException {
Collection col = new ArrayList();
Collection seriess = seriesHome.findByPpsIuid(mppsIUID);
SeriesLocal series;
Dataset ds;
for ( Iterator iter = seriess.iterator() ; iter.hasNext() ; ) {
series = (SeriesLocal) iter.next();
ds = series.getAttributes(true);
ds.putAll(series.getStudy().getAttributes(true));
col.add( ds );
}
return col;
}
/**
* Returns a StudyMgt Dataset.
*
* @ejb.interface-method
*/
public Dataset updateSeriesAndStudy(Collection seriesDS) throws FinderException {
Dataset ds = null;
String iuid;
SeriesLocal series = null;
Dataset dsN = DcmObjectFactory.getInstance().newDataset();
DcmElement refSeriesSeq = dsN.putSQ( Tags.RefSeriesSeq );
Dataset dsSer;
for ( Iterator iter = seriesDS.iterator() ; iter.hasNext() ; ) {
ds = (Dataset) iter.next();
iuid = ds.getString(Tags.SeriesInstanceUID);
series = seriesHome.findBySeriesIuid(iuid);
series.setAttributes(ds);
dsSer = refSeriesSeq.addNewItem();
dsSer.putAll(series.getAttributes(true));
Iterator iter2 = series.getInstances().iterator();
if ( iter2.hasNext()) {
DcmElement refSopSeq = dsSer.putSQ( Tags.RefSOPSeq );
InstanceLocal il;
Dataset dsInst;
while ( iter2.hasNext() ) {
il = (InstanceLocal) iter2.next();
dsInst = refSopSeq.addNewItem();
dsInst.putUI( Tags.RefSOPClassUID, il.getSopCuid() );
dsInst.putUI( Tags.RefSOPInstanceUID, il.getSopIuid() );
dsInst.putAE( Tags.RetrieveAET, il.getRetrieveAETs() );
}
}
}
if ( series != null ) {
StudyLocal study = series.getStudy();
study.setAttributes(ds);
dsN.putAll( study.getAttributes(true) );
}
return dsN;
}
}
| apache-2.0 |
bordoley-legacy/java-restlib | restlib-core/src/restlib/impl/Registry.java | 1052 | package restlib.impl;
import java.util.concurrent.ConcurrentMap;
import javax.annotation.concurrent.GuardedBy;
import javax.annotation.concurrent.ThreadSafe;
import com.google.common.base.Objects;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Maps;
@ThreadSafe
public final class Registry<T> {
private final ConcurrentMap<T,T> registered = Maps.newConcurrentMap();
@GuardedBy("this")
private volatile Iterable<T> registeredList = ImmutableList.of();
public synchronized T register(final T item) {
Preconditions.checkNotNull(item);
final T retval =
Objects.firstNonNull(registered.putIfAbsent(item, item), item);
registeredList = ImmutableList.copyOf(registered.keySet());
return retval;
}
public T getIfPresent(final T item) {
return Objects.firstNonNull(registered.get(item), item);
}
public Iterable<T> registered() {
return registeredList;
}
}
| apache-2.0 |
rLadia-demo/AttacknidPatch | decompiled_src/Procyon/org/anddev/andengine/entity/shape/modifier/RotationModifier.java | 1535 | package org.anddev.andengine.entity.shape.modifier;
import org.anddev.andengine.util.modifier.ease.*;
import org.anddev.andengine.util.modifier.*;
import org.anddev.andengine.entity.shape.*;
public class RotationModifier extends SingleValueSpanShapeModifier
{
public RotationModifier(final float n, final float n2, final float n3) {
this(n, n2, n3, null, IEaseFunction.DEFAULT);
}
public RotationModifier(final float n, final float n2, final float n3, final IShapeModifierListener shapeModifierListener) {
super(n, n2, n3, shapeModifierListener, IEaseFunction.DEFAULT);
}
public RotationModifier(final float n, final float n2, final float n3, final IShapeModifierListener shapeModifierListener, final IEaseFunction easeFunction) {
super(n, n2, n3, shapeModifierListener, easeFunction);
}
public RotationModifier(final float n, final float n2, final float n3, final IEaseFunction easeFunction) {
this(n, n2, n3, null, easeFunction);
}
protected RotationModifier(final RotationModifier rotationModifier) {
super(rotationModifier);
}
@Override
public RotationModifier clone() {
return new RotationModifier(this);
}
@Override
protected void onSetInitialValue(final IShape shape, final float rotation) {
shape.setRotation(rotation);
}
@Override
protected void onSetValue(final IShape shape, final float n, final float rotation) {
shape.setRotation(rotation);
}
}
| apache-2.0 |
finaorepo/ica-plugin | src/main/java/de/jkitberatung/ica/wsh/MouseButton.java | 333 | package de.jkitberatung.ica.wsh ;
import com4j.*;
/**
* Mouse buttons
*/
public enum MouseButton implements ComEnum {
MouseButtonLeft(1),
MouseButtonRight(2),
MouseButtonMiddle(4),
;
private final int value;
MouseButton(int value) { this.value=value; }
public int comEnumValue() { return value; }
}
| apache-2.0 |
bptlab/processeditor | src/com/inubit/research/layouter/gridLayouter/FlowObjectWrapper.java | 8987 | /**
*
* Process Editor
*
* (C) 2009, 2010 inubit AG
* (C) 2014 the authors
*
*/
package com.inubit.research.layouter.gridLayouter;
import java.awt.Dimension;
import java.awt.Point;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import com.inubit.research.layouter.LayoutHelper;
import com.inubit.research.layouter.interfaces.AbstractModelAdapter;
import com.inubit.research.layouter.interfaces.BPMNNodeInterface;
import com.inubit.research.layouter.interfaces.EdgeInterface;
import com.inubit.research.layouter.interfaces.NodeInterface;
import com.inubit.research.layouter.preprocessor.DummyEdge;
/**
* Wraps a BPMN Flow Object and provides further information, which are used
* in the "Grid Layout" algorithm
* @author ff
*
*/
public class FlowObjectWrapper{
// Map which holds all singeltons
private static HashMap<NodeInterface, FlowObjectWrapper> f_wrappedObjects = new HashMap<NodeInterface, FlowObjectWrapper>();
private ArrayList<FlowObjectWrapper> f_predecessors = new ArrayList<FlowObjectWrapper>();
private ArrayList<FlowObjectWrapper> f_successors = new ArrayList<FlowObjectWrapper>();
private ArrayList<EdgeInterface> f_predecessorsEdges = new ArrayList<EdgeInterface>();
private ArrayList<EdgeInterface> f_successorsEdges = new ArrayList<EdgeInterface>();
private NodeInterface f_node;
private AbstractModelAdapter f_model;
private int moveMode = 0; //0 = do not move; +1 = move right; -1 = move left;
private Point f_Pos;
// needed if a split has connections with 2 join, so
//only one of them (the first) gets placed into the same row
private boolean f_joinAlreadyPlaced = false;
private Point f_recommendedPosDelta = null;
private FlowObjectWrapper f_recommendedPositionParent = null;
private boolean f_needToAddRow = true;
private static int spacingX = 20;
private static int spacingY = 10;
private int f_gridNumber = 0;
private Comparator<Object> f_successorSorter = new YPositionComparator();
/**
* constructor cannot be used directly as this would lead to infinite recursion
* a singleton methods getFlowObjectWrapper is provided instead
*/
private FlowObjectWrapper(NodeInterface node, AbstractModelAdapter model) {
f_node = node;
f_model = model;
}
/**
* fills predecessor and successor lists
*/
private void buildLinks() {
//building predecessor and successor lists
for(EdgeInterface edge : f_model.getEdges()) {
if(edge.getSource().equals(f_node)) {
FlowObjectWrapper _w = getFlowObjectWrapper((NodeInterface)edge.getTarget(),f_model);
int _idx = getIndex(f_successors,_w);
f_successors.add(_idx,_w);
f_successorsEdges.add(_idx,edge);
}else if(edge.getTarget().equals(f_node)) {
f_predecessors.add(getFlowObjectWrapper((NodeInterface) edge.getSource(),f_model));
f_predecessorsEdges.add(edge);
}
}
//moving nodes that are connected through dummy edges to the back
for(int i = f_successorsEdges.size()-1;i>=0;i--) {
if(f_successorsEdges.get(i) instanceof DummyEdge) {
EdgeInterface e = f_successorsEdges.get(i);
FlowObjectWrapper n = f_successors.get(i);
f_successorsEdges.remove(i);
f_successors.remove(i);
f_successorsEdges.add(e);
f_successors.add(n);
}
}
}
/**
* @param f_successors2
* @param _w
* @return
*/
private int getIndex(ArrayList<FlowObjectWrapper> list,FlowObjectWrapper w) {
int _result = 0;
for(FlowObjectWrapper l:list) {
if(f_successorSorter.compare(w, l) > 0) {
_result++;
}else {
break;
}
}
return _result;
}
public boolean getNeedToAddRow() {
return f_needToAddRow;
}
public ArrayList<FlowObjectWrapper> getPredecessors() {
return f_predecessors;
}
public ArrayList<EdgeInterface> getPredecessorEdges() {
return f_predecessorsEdges;
}
public int getPredecessorsSizeInGrid() {
return f_predecessors.size()-countDataObjects(f_predecessors)-countObjectsInOtherPools(f_predecessors);
}
public ArrayList<FlowObjectWrapper> getSuccessors() {
return f_successors;
}
public ArrayList<EdgeInterface> getSuccessorEdges() {
return f_successorsEdges;
}
public NodeInterface getWrappedObject() {
return f_node;
}
public boolean hasRecommendedPosition() {
return f_recommendedPosDelta != null;
}
public void recommendPosition(int x, int y,FlowObjectWrapper parent) {
recommendPosition(x, y, true,parent);
}
/**
* @param _x
* @param i
* @param b
*/
public void recommendPosition(int dx, int dy, boolean createRow,FlowObjectWrapper parent) {
f_recommendedPosDelta = new Point(dx,dy);
f_recommendedPositionParent = parent;
f_needToAddRow = createRow;
}
public void setPosition(Point pos) {
f_Pos = (Point) pos.clone();
}
/**
* sets the amount of pixels which are left free
* on the sides of a node. (This value gets added to their width while layouting)
* @param value
*/
public static void setSpacingX(int value) {
spacingX = value;
}
/**
* sets the amount of pixels which are left free
* on the sides of a node. (This value gets added to their height while layouting)
* @param value
*/
public static void setSpacingY(int value) {
spacingY = value;
}
public boolean wraps(NodeInterface f) {
if(f_node.equals(f)) {
return true;
}
return false;
}
/**
* factory method for the retrieval of correctly wrapped objects
* @param obj
* @param model
* @return
*/
public static FlowObjectWrapper getFlowObjectWrapper(NodeInterface obj,AbstractModelAdapter model) {
FlowObjectWrapper _result = f_wrappedObjects.get(obj);
if(_result == null) {
_result = new FlowObjectWrapper(obj,model);
f_wrappedObjects.put(obj, _result);
_result.buildLinks();
}
return _result;
}
public static void clear() {
f_wrappedObjects.clear();
}
public boolean isSplit() {
if(this.isDataObject()) {
// return f_successors.size()-countObjectsInOtherPools(f_successors) > 1;
}
return f_successors.size()-countDataObjects(f_successors)
-countObjectsInOtherPools(f_successors)-countDummyEdges(f_successorsEdges) > 1;
}
/**
* @param edges
* @return
*/
private int countDummyEdges(ArrayList<EdgeInterface> edges) {
int result = 0;
for(EdgeInterface e:edges) {
if(e instanceof DummyEdge) {
result++;
}
}
return result;
}
public boolean isJoin() {
return getPredecessorsSizeInGrid() > 1;
}
/**
* @param f_predecessors2
* @return
*/
private int countObjectsInOtherPools(ArrayList<FlowObjectWrapper> predecessors) {
int i=0;
for(FlowObjectWrapper f:predecessors) {
if(!f.isDataObject())
if(f.getGrid() != this.getGrid()) {
i++;
}
}
return i;
}
/**
* @param f_predecessors2
* @return
*/
private int countDataObjects(ArrayList<FlowObjectWrapper> list) {
int _result = 0;
for (FlowObjectWrapper f : list) {
if(LayoutHelper.isDataObject(f)) {
_result++;
}
}
return _result;
}
public Point getPosition() {
if(f_Pos == null) {
return new Point();
}
return (Point) f_Pos.clone();
}
public Point getRecommendedPosition() {
Point _result = f_recommendedPositionParent.getPosition();
_result.translate(f_recommendedPosDelta.x, f_recommendedPosDelta.y);
return _result;
}
public Dimension getSize() {
return new Dimension(
(int)f_node.getSize().getWidth()+spacingX+f_node.getPaddingX(),
(int)f_node.getSize().getHeight()+spacingY+f_node.getPaddingY());
}
/**
* @param joinAlreadyPlaced the joinAlreadyPlaced to set
*/
public void setJoinAlreadyPlaced(boolean joinAlreadyPlaced) {
this.f_joinAlreadyPlaced = joinAlreadyPlaced;
}
/**
* @return the joinAlreadyPlaced
*/
public boolean isJoinAlreadyPlaced() {
return f_joinAlreadyPlaced;
}
@Override
public String toString() {
return "FOW ("+f_node.toString()+")";
}
/**
* @param i
*/
public void setGrid(int i) {
f_gridNumber = i;
}
public int getGrid() {
return f_gridNumber;
}
public void setMoveMode(int moveMode) {
this.moveMode = moveMode;
}
public int getMoveMode() {
return moveMode;
}
/**
* @return
*/
public boolean isDataObject() {
if(getWrappedObject() instanceof BPMNNodeInterface) {
BPMNNodeInterface _n = (BPMNNodeInterface) getWrappedObject();
return _n.isDataObject();
}
return false;
}
/**
* returns the width offset which gets added to nodes
* @return
*/
public static int getSpacingX() {
return spacingX;
}
/**
* returns the height offset which gets added to nodes
* @return
*/
public static int getSpacingY() {
return spacingY;
}
}
| apache-2.0 |
GabiAxel/spring-social-google | src/main/java/org/springframework/social/google/api/calendar/impl/NotificationMethodDeserializer.java | 1109 | /**
* Copyright 2011-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.social.google.api.calendar.impl;
import org.springframework.social.google.api.calendar.NotificationMethod;
import org.springframework.social.google.api.impl.ApiEnumDeserializer;
/**
* {@link ApiEnumDeserializer} for {@link NotificationMethod}.
*
* @author Martin Wink
*/
public class NotificationMethodDeserializer extends ApiEnumDeserializer<NotificationMethod> {
public NotificationMethodDeserializer() {
super(NotificationMethod.class);
}
}
| apache-2.0 |
kbachl/brix-cms | brix-plugin-menu/src/main/java/org/brixcms/plugin/menu/editor/cell/SwitcherCellPanel.java | 4171 | /**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.brixcms.plugin.menu.editor.cell;
import org.apache.wicket.Component;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.ajax.form.AjaxFormComponentUpdatingBehavior;
import org.apache.wicket.extensions.ajax.markup.html.AjaxEditableMultiLineLabel;
import org.apache.wicket.markup.html.WebMarkupContainer;
import org.apache.wicket.markup.html.form.DropDownChoice;
import org.apache.wicket.markup.html.form.Form;
import org.apache.wicket.markup.html.panel.Panel;
import org.apache.wicket.model.IModel;
import org.brixcms.plugin.menu.Menu;
import org.brixcms.plugin.menu.editor.ReferenceColumnPanel;
import org.brixcms.plugin.site.picker.reference.ReferenceEditorConfiguration;
import org.brixcms.web.reference.Reference;
import java.util.Arrays;
/**
* A Panel that is used to change the type of a MenuType to any of the possible options
* <p>
* Created by IntelliJ IDEA. User: korbinianbachl Date: 08.09.2010 Time: 20:55:51
*/
public abstract class SwitcherCellPanel extends Panel {
IModel<Menu.ChildEntry.MenuType> typeModel;
IModel<Reference> referenceModel;
IModel<String> labelOrCodeModel;
ReferenceEditorConfiguration conf;
WebMarkupContainer container;
/**
* @param id ComponentID
* @param typeModel Model of the MenuType
* @param referenceModel Model of the Reference (backwards compatible)
* @param labelOrCodeModel Model of the "Label" or "Code" String
* @param conf ReferenceEditorConfiguration for ReferenceEditor
*/
public SwitcherCellPanel(String id,
IModel<Menu.ChildEntry.MenuType> typeModel,
IModel<Reference> referenceModel,
IModel<String> labelOrCodeModel,
ReferenceEditorConfiguration conf) {
super(id);
this.typeModel = typeModel;
this.referenceModel = referenceModel;
this.labelOrCodeModel = labelOrCodeModel;
this.conf = conf;
container = new WebMarkupContainer("container");
container.setOutputMarkupId(true);
container.add(getEditPanel());
add(container);
Form form = new Form("form");
DropDownChoice choice = new DropDownChoice<Menu.ChildEntry.MenuType>("typeChoice", typeModel, Arrays.asList(Menu.ChildEntry.MenuType.values()));
choice.add(new AjaxFormComponentUpdatingBehavior("change") {
@Override
protected void onUpdate(AjaxRequestTarget target) {
container.addOrReplace(getEditPanel());
target.add(container);
}
});
form.add(choice);
add(form);
}
/**
* derives the right editPanel
*
* @return a Component that is to be attached
*/
private Component getEditPanel() {
String id = "editPanel";
Component returnComponent;
if (typeModel.getObject() == Menu.ChildEntry.MenuType.REFERENCE) {
returnComponent = new ReferenceColumnPanel(id, referenceModel) {
@Override
public ReferenceEditorConfiguration getConfiguration() {
return conf;
}
@Override
protected boolean isEditing() {
return SwitcherCellPanel.this.isEditing();
}
};
} else {
returnComponent = new AjaxEditableMultiLineLabel<String>(id, labelOrCodeModel);
}
return returnComponent;
}
abstract boolean isEditing();
}
| apache-2.0 |
apache/geronimo-yoko | yoko-spec-corba/src/main/java/org/omg/CORBA/BAD_TYPECODE.java | 1335 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.omg.CORBA;
final public class BAD_TYPECODE extends org.omg.CORBA.SystemException {
public BAD_TYPECODE() {
super("", 0, CompletionStatus.COMPLETED_NO);
}
public BAD_TYPECODE(int minor, CompletionStatus completed) {
super("", minor, completed);
}
public BAD_TYPECODE(String reason) {
super(reason, 0, CompletionStatus.COMPLETED_NO);
}
public BAD_TYPECODE(String reason, int minor, CompletionStatus completed) {
super(reason, minor, completed);
}
}
| apache-2.0 |
HebaKhaled/bposs | src/com.mentor.nucleus.bp.core.test/src/com/mentor/nucleus/bp/core/test/CoreTest.java | 3190 | //=====================================================================
//
//File: $RCSfile: CoreTest.java,v $
//Version: $Revision: 1.21 $
//Modified: $Date: 2013/03/14 02:37:43 $
//
//(c) Copyright 2004-2014 by Mentor Graphics Corp. All rights reserved.
//
//=====================================================================
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy
// of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
//=====================================================================
package com.mentor.nucleus.bp.core.test;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.swt.widgets.Display;
import com.mentor.nucleus.bp.core.common.PersistableModelComponent;
import com.mentor.nucleus.bp.core.util.WorkspaceUtil;
import com.mentor.nucleus.bp.test.common.BaseTest;
import com.mentor.nucleus.bp.test.common.TestingUtilities;
import com.mentor.nucleus.bp.utilities.ui.ProjectUtilities;
public class CoreTest extends BaseTest
{
protected static boolean initialized = false;
public CoreTest(String projectName, String name) {
super(projectName, name);
}
public CoreTest() {
super(null, null);
}
protected void setUp() throws Exception {
super.setUp();
WorkspaceUtil.setAutobuilding(false);
}
protected void initialize(String name) throws Exception {
initialize(name, true);
}
protected PersistableModelComponent initialize(String name, boolean loadDomainOnly) throws Exception {
PersistableModelComponent pmc = null;
switchPerspective("com.mentor.nucleus.bp.core.perspective");
m_wp.activate(m_bp_view);
ProjectUtilities.allowJobCompletion();
if(!initialized){
project = ResourcesPlugin.getWorkspace().getRoot().getProject(name);
if (project.exists()) {
TestingUtilities.deleteProject(name);
ProjectUtilities.allowJobCompletion();
while ( Display.getCurrent().readAndDispatch() ) ;
}
if(!project.exists()) {
try {
project = TestingUtilities.createProject(name);
} catch (CoreException e) {
fail(e.getMessage());
}
// get the SystemModel_c instance related to the
// newly created project
m_sys = getSystemModel(name);
}
pmc = ensureAvailableAndLoaded(name, loadDomainOnly, true);
initialized = true;
}
m_bp_tree.expandAll();
ProjectUtilities.allowJobCompletion();
while ( Display.getCurrent().readAndDispatch() ) ;
return pmc;
}
}
| apache-2.0 |
Fabryprog/camel | core/camel-core/src/test/java/org/apache/camel/processor/interceptor/AdviceWithRouteIdTest.java | 2148 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.processor.interceptor;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.reifier.RouteReifier;
import org.junit.Test;
public class AdviceWithRouteIdTest extends AdviceWithTest {
@Test
public void testAdvised() throws Exception {
RouteReifier.adviceWith(context.getRouteDefinition("myRoute"), context, new RouteBuilder() {
@Override
public void configure() throws Exception {
interceptSendToEndpoint("mock:foo")
.skipSendToOriginalEndpoint()
.to("log:foo")
.to("mock:advised");
}
});
getMockEndpoint("mock:foo").expectedMessageCount(0);
getMockEndpoint("mock:advised").expectedMessageCount(1);
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start").routeId("myRoute").to("mock:foo").to("mock:result");
from("direct:bar").to("mock:bar");
}
};
}
} | apache-2.0 |
USEF-Foundation/ri.usef.energy | usef-build/usef-workflow/usef-brp/src/main/java/energy/usef/brp/controller/FlexOfferRevocationController.java | 1703 | /*
* Copyright 2015-2016 USEF Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package energy.usef.brp.controller;
import energy.usef.brp.workflow.plan.flexoffer.revoke.FlexOfferRevocationEvent;
import energy.usef.core.controller.BaseIncomingMessageController;
import energy.usef.core.data.xml.bean.message.FlexOfferRevocation;
import energy.usef.core.exception.BusinessException;
import energy.usef.core.model.Message;
import javax.enterprise.event.Event;
import javax.inject.Inject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Controller class for the {@link FlexOfferRevocation} messages.
*/
public class FlexOfferRevocationController extends BaseIncomingMessageController<FlexOfferRevocation> {
private static final Logger LOGGER = LoggerFactory.getLogger(FlexOfferRevocationController.class);
@Inject
private Event<FlexOfferRevocationEvent> eventManager;
/**
* {@inheritDoc}
*/
@Override
public void action(FlexOfferRevocation message, Message savedMessage) throws BusinessException {
LOGGER.info("FlexOfferRevocation received.");
eventManager.fire(new FlexOfferRevocationEvent(message));
}
}
| apache-2.0 |
Youbetme/youbetme-sdk | Samples/Basic/AndroidSample/AndroidSample/src/com/youbetme/androidsample/tasks/PerformCustomerSearchTask.java | 703 | package com.youbetme.androidsample.tasks;
import com.youbetme.androidsample.proxies.Proxy;
public class PerformCustomerSearchTask extends SuperTask {
public PerformCustomerSearchTask(String token){
super(token);
}
protected String[] doInBackground(String...request) {
int count = request.length;
String[] response = new String[count];
Proxy proxy = new Proxy();
for (int i = 0; i < count; i++) {
response[i] = proxy.PerformCustomerSearch(this.token, request[i]);
}
return response;
}
protected void onProgressUpdate(Integer... progress) {
}
protected void onPostExecute(String result) {
}
}
| apache-2.0 |
bretthshelley/Maven-IIB9-Plug-In | src/test/java/ch/sbb/maven/plugins/iib/mojos/PrepareBarBuildWorkspaceMojoTest.java | 1690 | package ch.sbb.maven.plugins.iib.mojos;
import static ch.sbb.maven.plugins.iib.mojos.PrepareBarBuildWorkspaceMojo.verifyIibIncludeTypes;
import junit.framework.Assert;
import org.junit.Test;
public class PrepareBarBuildWorkspaceMojoTest {
@Test
public void verifyIncludeTypes()
{
String types = "zip,jar";
String actual = verifyIibIncludeTypes(types);
String expected = "zip";
Assert.assertEquals(expected, actual);
types = "jar,zip,war";
actual = verifyIibIncludeTypes(types);
expected = "zip,war";
Assert.assertEquals(expected, actual);
types = null;
actual = verifyIibIncludeTypes(types);
expected = "zip";
Assert.assertEquals(expected, actual);
types = "";
actual = verifyIibIncludeTypes(types);
expected = "zip";
Assert.assertEquals(expected, actual);
types = "JAR,,zip, ,";
actual = verifyIibIncludeTypes(types);
expected = "zip";
Assert.assertEquals(expected, actual);
types = "ear,,JAR,,zip, ,";
actual = verifyIibIncludeTypes(types);
expected = "ear,zip";
Assert.assertEquals(expected, actual);
}
@Test
public void checkUnpackIIbDependencies()
{
// / verify that the value is false
PrepareBarBuildWorkspaceMojo mojo = new PrepareBarBuildWorkspaceMojo();
Boolean expected = null;
Boolean actual = mojo.unpackIibDependenciesIntoWorkspace;
String message = "The unpackIibDependenciesIntoWorkspace should be by default null, but was " + actual;
Assert.assertEquals(message, expected, actual);
}
}
| apache-2.0 |
stefan-ziel/Activiti | modules/activiti-admin/src/main/java/com/activiti/repository/UserRepository.java | 823 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.activiti.repository;
import org.springframework.data.jpa.repository.JpaRepository;
import com.activiti.domain.User;
/**
* Spring Data JPA repository for the User entity.
*/
public interface UserRepository extends JpaRepository<User, String> {
}
| apache-2.0 |
sergecodd/FireFox-OS | B2G/gecko/mobile/android/base/MenuItemDefault.java | 3178 | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
package org.mozilla.gecko;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.drawable.Drawable;
import android.util.AttributeSet;
import android.view.View;
import android.widget.AbsListView;
import android.widget.CheckBox;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
public class MenuItemDefault extends LinearLayout
implements GeckoMenuItem.Layout {
private static final String LOGTAG = "GeckoMenuItemDefault";
private ImageView mIcon;
private TextView mTitle;
private CheckBox mCheck;
private ImageView mMore;
private boolean mCheckable;
private boolean mChecked;
private boolean mHasSubMenu;
public MenuItemDefault(Context context, AttributeSet attrs) {
super(context, attrs);
Resources res = context.getResources();
setLayoutParams(new AbsListView.LayoutParams((int) (res.getDimension(R.dimen.menu_item_row_width)),
(int) (res.getDimension(R.dimen.menu_item_row_height))));
inflate(context, R.layout.menu_item, this);
mIcon = (ImageView) findViewById(R.id.icon);
mTitle = (TextView) findViewById(R.id.title);
mCheck = (CheckBox) findViewById(R.id.check);
mMore = (ImageView) findViewById(R.id.more);
mCheckable = false;
mChecked = false;
mHasSubMenu = false;
}
@Override
public View getLayout() {
return this;
}
@Override
public void setIcon(Drawable icon) {
if (icon != null) {
mIcon.setImageDrawable(icon);
mIcon.setVisibility(VISIBLE);
} else {
mIcon.setVisibility(GONE);
}
}
@Override
public void setIcon(int icon) {
if (icon != 0) {
mIcon.setImageResource(icon);
mIcon.setVisibility(VISIBLE);
} else {
mIcon.setVisibility(GONE);
}
}
@Override
public void setTitle(CharSequence title) {
mTitle.setText(title);
}
@Override
public void setEnabled(boolean enabled) {
super.setEnabled(enabled);
mTitle.setEnabled(enabled);
mCheck.setEnabled(enabled);
mIcon.setColorFilter(enabled ? 0 : 0xFF999999);
mMore.setColorFilter(enabled ? 0 : 0xFF999999);
}
@Override
public void setCheckable(boolean checkable) {
mCheckable = checkable;
mCheck.setVisibility(mCheckable && !mHasSubMenu ? VISIBLE : GONE);
}
@Override
public void setChecked(boolean checked) {
mChecked = checked;
mCheck.setChecked(mChecked);
}
@Override
public void setSubMenuIndicator(boolean hasSubMenu) {
mHasSubMenu = hasSubMenu;
mMore.setVisibility(mHasSubMenu ? VISIBLE : GONE);
mCheck.setVisibility(mCheckable && !mHasSubMenu ? VISIBLE : GONE);
}
}
| apache-2.0 |
pfirmstone/river-internet | qa/src/org/apache/river/test/spec/url/httpmd/handler/OpenConnection.java | 16177 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.river.test.spec.url.httpmd.handler;
import java.util.logging.Level;
// org.apache.river.qa
import org.apache.river.qa.harness.QATestEnvironment;
import org.apache.river.qa.harness.QAConfig;
import org.apache.river.qa.harness.Test;
// org.apache.river.qa.harness
import org.apache.river.qa.harness.QAConfig; // base class for QAConfig
import org.apache.river.qa.harness.TestException;
// java.util
import java.util.logging.Level;
// davis packages
import net.jini.url.httpmd.Handler;
// java.net
import java.net.URL;
import java.net.URLConnection;
import java.net.NetPermission;
// java.io
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.BufferedInputStream;
import java.io.IOException;
// java.security
import java.security.MessageDigest;
import java.security.DigestInputStream;
// java.util
import java.util.Vector;
// TestHandler extends url.httpmd.Handler class
import org.apache.river.test.spec.url.httpmd.util.TestHandler;
/**
* <pre>
*
* Purpose:
* This test verifies the behavior of {@link Handler#openConnection(URL)}
* method.
* {@link Handler#openConnection(URL)} method should create a HTTPMD URL
* connection for an HTTPMD URL object or throw IOException if an I/O error
* occurs while opening the connection.
*
* Test Cases:
* This test tries to create a HTTPMD URL connection for an HTTPMD URL object.
* The cases:
* - OpenConnectionValidURL
* - URLConnection connect = openConnection(URL),
* - it's verified that URLConnection object is created;
* - InputStream in = connect.getInputStream(),
* - read data from InputStream and compare them with
* data retrived from the file corresponding to the HTTPMD URL object;
* - OpenConnectionNonExistFile
* - URLConnection connect = openConnection(URL),
* where URL references to non-existent file,
* - it's verified that URLConnection object is created;
* - InputStream in = connect.getInputStream(),
* - try to read data from InputStream;
* - it's expected that IOException is thrown while reading data;
* Notes: At least on SPARC/Solaris 8:
* 1. IOException isn't thrown while opening a HTTPMD URL connection,
* 2. Instead IOException is thrown while trying to read from the open
* HTTPMD URL connection.
*
* Infrastructure:
* - TestHandler
* extends {@link Handler} class
* - OpenConnection
* performs actions
*
* Actions:
* Jini Harness does the following before running the test:
* - setting java.protocol.handler.pkgs property to
* net.jini.url to enable HTTPMD urls creating
* ({@link Handler} is used as HTTPMD Protocol handler),
* - launching HTTP Server.
* Test performs the following steps:
* - setting the ability to specify a TestHandler stream handler
* when constructing a HTTPMD URL object,
* - getting test parameters,
* - creating HTTPMD URL object,
* - creating TestHandler object,
* - implementing each case.
*
* </pre>
*/
public class OpenConnection extends QATestEnvironment implements Test {
QAConfig config;
final static int BUFSIZE = 8;
/** HTTPMD protocol handler */
protected TestHandler handler;
/**
* HTTP Server source directory.
* The value is specified by testClassServer.dir test property.
*/
protected String classServerSrcDir;
/**
* HTTP Server port number.
* The value is specified by testClassServer.port test property.
*/
protected int classServerPort;
/**
* HTTPMD URL object.
* It's created from the String representation specified by
* OpenConnection.Url test property.
*/
protected URL httpmdURL;
/** Contents of file specified by HTTPMD URL object */
protected Vector realFileContents = new Vector();
/** Expected contents of file specified by HTTPMD URL object */
protected Vector expectedFileContents = new Vector();
/**
* Expected Exception as a String
* The value is specified by OpenConnection.ExpResult test property.
*/
protected String expClassStr;
/** Expected result */
protected Object expectedResult;
/**
* <pre>
* This method performs all preparations.
* These preparations include the following:
* - setting the ability to specify a TestHandler stream handler
* when constructing a HTTPMD URL object,
* - getting test parameters,
* - creating HTTPMD URL object,
* - creating TestHandler object.
* Test parameters:
* OpenConnection.Url - String representation of HTTPMD URL
* OpenConnection.ExpResult - expected result
* testClassServer.dir - HTTP Server source directory
* testClassServer.port - HTTP Server port number
* </pre>
*/
public Test construct(QAConfig config) throws Exception {
super.construct(config);
this.config = (QAConfig) config; // or this.config = getConfig();
/*
* Setting the ability to specify a TestHandler stream handler
* when constructing a HTTPMD URL object
*/
NetPermission np = new NetPermission("specifyStreamHandler");
/*
* Checking the ability to specify a TestHandler stream handler
* when constructing a HTTPMD URL object
*/
(System.getSecurityManager()).checkPermission(np);
/* Instantiating TestHandler object */
handler = new TestHandler();
/* Getting test parameters */
String u = config.getStringConfigVal("OpenConnection.Url", "");
expClassStr = config.getStringConfigVal("OpenConnection.ExpResult",
null);
/* Getting the source directory of HTTP Server */
classServerSrcDir = config.getStringConfigVal("testClassServer.dir",
null);
/* Getting the port number of HTTP Server */
classServerPort = config.getIntConfigVal("testClassServer.port", 0);
/* Creating URL object according to the specified spec */
httpmdURL = new URL(u);
if (expClassStr != null) {
/* Exception is expected as result of openConnection() method */
/*
* Creating Class object associated with the class with the
* given string name
*/
expectedResult = Class.forName(expClassStr);
} else {
/* No Exception is expected as result of openConnection() */
/* Correcting HTTPMD URL object */
httpmdURL = correctURL(httpmdURL);
/* Absolute filename */
String absfn = httpmdURL.getFile().substring(0,
httpmdURL.getFile().indexOf(";"));
/* Obtaining the contents of the file into Vector of Bytes */
getFileContents(classServerSrcDir + absfn);
expectedResult = expectedFileContents;
}
return this;
}
/**
* <pre>
* Correcting the specified HTTPMD URL object.
* The new HTTPMD URL object features:
* port is equal to HTTP Server port,
* the message digest is corrected.
* </pre>
* @param u HTTPMD URL object
* @throws Exception if the specified HTTPMD URL object can't be corrected
* @return HTTPMD URL object with the corrected message digest
*/
protected URL correctURL(URL u) throws Exception {
/*
* Obtaining relative pathname of the file specified in the created
* URL object
*/
String relfileName = u.getFile().substring(0, u.getFile().indexOf(";"));
/* Getting absolute pathname of the file */
String absfileName = classServerSrcDir + relfileName;
/* Obtaining the message digest algorithm from the created URL object */
String urltail = u.getFile().substring(u.getFile().indexOf(";"),
u.getFile().length());
String alg = urltail.substring(urltail.indexOf(";") + 1,
urltail.indexOf("="));
/*
* Computing message digest for the file specified in the created
* URL object
*/
String MD = computeFileMD(absfileName, alg);
/* Obtaining comments from the created URL object */
urltail = urltail.substring(urltail.indexOf("=") + 1, urltail.length());
String comments = urltail.substring(urltail.indexOf(",") + 1,
urltail.length());
/*
* Creating HTTPMD URL object with the corrected message digest and
* HTTP Server port. TestHandler is used as protocol handler.
*/
URL url = new URL(u.getProtocol(), u.getHost(), classServerPort,
relfileName + ";" + alg + "=" + MD + "," + comments, handler);
return url;
}
/**
* Computing the message digest as a String in hexadecimal format
* for the specified file and message digest algorithm.
*
* @param filename filename.
* @param algorithm message digest algorithm.
* @throws Exception if it's impossible to compute the message digest
* for the specified file according to the specified
* algorithm
* @return the message digest as a String in hexadecimal format
*/
public String computeFileMD(String filename, String algorithm)
throws Exception {
MessageDigest MDigest = MessageDigest.getInstance(algorithm);
/* Computing the message digest */
FileInputStream fin = new FileInputStream(filename);
BufferedInputStream bin = new BufferedInputStream(fin, BUFSIZE);
DigestInputStream in = new DigestInputStream(bin, MDigest);
in.on(true);
byte[] buf = new byte[BUFSIZE];
while (true) {
int n = in.read(buf, 0, buf.length);
if (n < 0) {
break;
}
}
MDigest = in.getMessageDigest();
in.close();
byte[] digest = MDigest.digest();
/* The message digest as a String in hexadecimal format */
return digestString(digest);
}
/**
* Converting a message digest to a String in hexadecimal format.
*
* @param digest a message digest as a byte[].
* @return the message digest as a String in hexadecimal format.
*/
protected String digestString(byte[] digest) {
StringBuffer sb = new StringBuffer(digest.length * 2);
for (int i = 0; i < digest.length; i++) {
byte b = digest[i];
sb.append(Character.forDigit((b >> 4) & 0xf, 16));
sb.append(Character.forDigit(b & 0xf, 16));
}
return sb.toString();
}
/**
* Obtaining the contents of the file into Vector of Bytes.
*
* @param filename file name
* @throws IOException if it's impossible to get contents of
* the specified file
*/
public void getFileContents(String filename) throws IOException {
byte[] buf = new byte[BUFSIZE];
FileInputStream fin = new FileInputStream(filename);
while (true) {
int n = fin.read(buf);
if (n < 0) {
break;
}
for (int i = 0; i < n; i++) {
expectedFileContents.add(new Byte(buf[i]));
}
}
}
/**
* This method performs all actions mentioned in class description.
*/
public void run() throws Exception {
logger.log(Level.FINE, "HTTPMD URL: " + httpmdURL);
/* Checking that openConnection(url) returns URLConnection object */
if (!(handler.openConnection(httpmdURL) instanceof URLConnection)) {
throw new TestException(
""
+ " test failed: openConnection() returns"
+ " non-instance of URLConnection class");
}
try {
/* Creating URLConnection object */
URLConnection connect = handler.openConnection(httpmdURL);
/* Getting contents from URLConnection object */
InputStream in = connect.getInputStream();
byte[] buf = new byte[BUFSIZE];
while (true) {
int n = in.read(buf);
if (n < 0) {
break;
}
for (int i = 0; i < n; i++) {
realFileContents.add(new Byte(buf[i]));
}
}
/* Compare got contents with expected one */
compareResults(realFileContents);
} catch (Exception e) {
/* Compare got Exception with expected one */
compareResults(e);
}
return;
}
/**
* Checking if the Vector represented by expectedResult variable is
* the same as the Vector specified by the Vector parameter.
*
* @param realres an Vector.
*/
public void compareResults(Vector realres) throws TestException {
if (expClassStr != null) {
throw new TestException(
"" + " test failed:"
+ " Expected result: " + expClassStr
+ " Returned result: FILE CONTENTS");
}
logger.log(Level.FINE, "Comparing byte arrays ...");
Vector expectedVector = (Vector) expectedResult;
Vector realVector = (Vector) realres;
String expectedVectorStr = expectedResult.toString();
String realVectorStr = realres.toString();
logger.log(Level.FINE, "Expected result: <FILE CONTENTS>");
logger.log(Level.FINE, "Returned result: <FILE CONTENTS>");
if (!realVector.equals(expectedVector)) {
throw new TestException(
"" + " test failed:\n"
+ " The data read from HTTPMD URL isn't equal to"
+ " the data retrived from the file corresponding"
+ " to the HTTPMD URL");
}
return;
}
/**
* Checking if the class represented by expectedResult variable is
* either the same as, or is a superclass of the class
* whose instance is specified by the Exception parameter.
*
* @param realres an Exception.
*/
public void compareResults(Exception realres) throws TestException {
if (expClassStr == null) {
throw new TestException(
"" + " test failed:"
+ " Expected result: FILE CONTENTS" + " Returned result: "
+ realres);
}
logger.log(Level.FINE, "Comparing exceptions ...");
Class expectedClass = (Class) expectedResult;
Class exceptionClass = (Class) realres.getClass();
logger.log(Level.FINE, "Expected result: " + expectedClass);
logger.log(Level.FINE, "Returned result: " + exceptionClass);
if (!expectedClass.isAssignableFrom(exceptionClass)) {
// if (!expectedClass.equals(exceptionClass)) {
realres.printStackTrace();
throw new TestException(
"" + " test failed:\n"
+ " Expected result: " + expectedClass + " Returned result: "
+ exceptionClass);
}
return;
}
}
| apache-2.0 |
ryanemerson/activemq-artemis | examples/jms/last-value-queue/src/main/java/org/apache/activemq/artemis/jms/example/LastValueQueueExample.java | 5226 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.jms.example;
import java.util.Enumeration;
import javax.jms.Connection;
import javax.jms.ConnectionFactory;
import javax.jms.MessageConsumer;
import javax.jms.MessageProducer;
import javax.jms.Queue;
import javax.jms.QueueBrowser;
import javax.jms.Session;
import javax.jms.TextMessage;
import javax.naming.InitialContext;
import org.apache.activemq.artemis.common.example.ActiveMQExample;
/**
* This example shows how to configure and use a <em>Last-Value</em> queues.
* Only the last message with a well-defined property is hold by the queue.
*/
public class LastValueQueueExample extends ActiveMQExample
{
public static void main(final String[] args)
{
new LastValueQueueExample().run(args);
}
@Override
public boolean runExample() throws Exception
{
Connection connection = null;
InitialContext initialContext = null;
try
{
// Step 1. Create an initial context to perform the JNDI lookup.
initialContext = new InitialContext();
// Step 2. Perfom a lookup on the queue
Queue queue = (Queue)initialContext.lookup("queue/exampleQueue");
// Step 3. Perform a lookup on the Connection Factory
ConnectionFactory cf = (ConnectionFactory)initialContext.lookup("ConnectionFactory");
// Step 4.Create a JMS Connection, session and producer on the queue
connection = cf.createConnection();
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageProducer producer = session.createProducer(queue);
// Step 5. Create and send a text message with the Last-Value header set
TextMessage message = session.createTextMessage("1st message with Last-Value property set");
message.setStringProperty("_AMQ_LVQ_NAME", "STOCK_NAME");
producer.send(message);
System.out.format("Sent message: %s%n", message.getText());
// Step 6. Create and send a second text message with the Last-Value header set
message = session.createTextMessage("2nd message with Last-Value property set");
message.setStringProperty("_AMQ_LVQ_NAME", "STOCK_NAME");
producer.send(message);
System.out.format("Sent message: %s%n", message.getText());
// Step 7. Create and send a third text message with the Last-Value header set
message = session.createTextMessage("3rd message with Last-Value property set");
message.setStringProperty("_AMQ_LVQ_NAME", "STOCK_NAME");
producer.send(message);
System.out.format("Sent message: %s%n", message.getText());
// Step 8. Browse the queue. There is only 1 message in it, the last sent
QueueBrowser browser = session.createBrowser(queue);
Enumeration enumeration = browser.getEnumeration();
while (enumeration.hasMoreElements())
{
TextMessage messageInTheQueue = (TextMessage)enumeration.nextElement();
System.out.format("Message in the queue: %s%n", messageInTheQueue.getText());
}
browser.close();
// Step 9. Create a JMS Message Consumer for the queue
MessageConsumer messageConsumer = session.createConsumer(queue);
// Step 10. Start the Connection
connection.start();
// Step 11. Trying to receive a message. Since the queue is configured to keep only the
// last message with the Last-Value header set, the message received is the last sent
TextMessage messageReceived = (TextMessage)messageConsumer.receive(5000);
System.out.format("Received message: %s%n", messageReceived.getText());
// Step 12. Trying to receive another message but there will be none.
// The 1st message was discarded when the 2nd was sent to the queue.
// The 2nd message was in turn discarded when the 3trd was sent to the queue
messageReceived = (TextMessage)messageConsumer.receive(5000);
System.out.format("Received message: %s%n", messageReceived);
initialContext.close();
return true;
}
finally
{
// Step 13. Be sure to close our JMS resources!
if (initialContext != null)
{
initialContext.close();
}
if (connection != null)
{
connection.close();
}
}
}
}
| apache-2.0 |
paulk-asert/groovy | src/main/java/org/codehaus/groovy/transform/sc/transformers/ConstructorCallTransformer.java | 9741 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.codehaus.groovy.transform.sc.transformers;
import org.codehaus.groovy.ast.ClassHelper;
import org.codehaus.groovy.ast.ClassNode;
import org.codehaus.groovy.ast.ConstructorNode;
import org.codehaus.groovy.ast.GroovyCodeVisitor;
import org.codehaus.groovy.ast.InnerClassNode;
import org.codehaus.groovy.ast.Parameter;
import org.codehaus.groovy.ast.expr.BinaryExpression;
import org.codehaus.groovy.ast.expr.ConstructorCallExpression;
import org.codehaus.groovy.ast.expr.Expression;
import org.codehaus.groovy.ast.expr.ExpressionTransformer;
import org.codehaus.groovy.ast.expr.MapEntryExpression;
import org.codehaus.groovy.ast.expr.MapExpression;
import org.codehaus.groovy.ast.expr.TupleExpression;
import org.codehaus.groovy.classgen.AsmClassGenerator;
import org.codehaus.groovy.classgen.BytecodeExpression;
import org.codehaus.groovy.classgen.asm.BytecodeHelper;
import org.codehaus.groovy.classgen.asm.CompileStack;
import org.codehaus.groovy.classgen.asm.OperandStack;
import org.codehaus.groovy.classgen.asm.WriterController;
import org.codehaus.groovy.syntax.Token;
import org.codehaus.groovy.transform.stc.StaticTypeCheckingSupport;
import org.codehaus.groovy.transform.stc.StaticTypeCheckingVisitor;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.Opcodes;
import java.util.List;
import static org.codehaus.groovy.ast.tools.GeneralUtils.binX;
import static org.codehaus.groovy.ast.tools.GeneralUtils.bytecodeX;
import static org.codehaus.groovy.ast.tools.GeneralUtils.propX;
import static org.codehaus.groovy.transform.stc.StaticTypesMarker.DIRECT_METHOD_CALL_TARGET;
public class ConstructorCallTransformer {
private final StaticCompilationTransformer staticCompilationTransformer;
public ConstructorCallTransformer(final StaticCompilationTransformer staticCompilationTransformer) {
this.staticCompilationTransformer = staticCompilationTransformer;
}
Expression transformConstructorCall(final ConstructorCallExpression expr) {
ConstructorNode node = expr.getNodeMetaData(DIRECT_METHOD_CALL_TARGET);
if (node == null) return expr;
Parameter[] params = node.getParameters();
if ((params.length == 1 || params.length == 2) // 2 is for inner class case
&& StaticTypeCheckingSupport.implementsInterfaceOrIsSubclassOf(params[params.length - 1].getType(), ClassHelper.MAP_TYPE)
&& node.getCode() == StaticTypeCheckingVisitor.GENERATED_EMPTY_STATEMENT) {
Expression arguments = expr.getArguments();
if (arguments instanceof TupleExpression) {
TupleExpression tupleExpression = (TupleExpression) arguments;
List<Expression> expressions = tupleExpression.getExpressions();
if (expressions.size() == 1 || expressions.size() == 2) { // 2 = inner class case
Expression expression = expressions.get(expressions.size() - 1);
if (expression instanceof MapExpression) {
MapExpression map = (MapExpression) expression;
// check that the node doesn't belong to the list of declared constructors
ClassNode declaringClass = node.getDeclaringClass();
for (ConstructorNode constructorNode : declaringClass.getDeclaredConstructors()) {
if (constructorNode == node) {
return staticCompilationTransformer.superTransform(expr);
}
}
// replace call to <init>(Map) or <init>(this, Map)
// with a call to <init>() or <init>(this) + appropriate setters
// for example, foo(x:1, y:2) is replaced with:
// { def tmp = new Foo(); tmp.x = 1; tmp.y = 2; return tmp }()
MapStyleConstructorCall result = new MapStyleConstructorCall(
staticCompilationTransformer,
declaringClass,
map,
expr
);
return result;
}
}
}
}
return staticCompilationTransformer.superTransform(expr);
}
private static class MapStyleConstructorCall extends BytecodeExpression implements Opcodes {
private final StaticCompilationTransformer staticCompilationTransformer;
private AsmClassGenerator acg;
private final ClassNode declaringClass;
private final MapExpression map;
private final ConstructorCallExpression originalCall;
private final boolean innerClassCall;
public MapStyleConstructorCall(
final StaticCompilationTransformer transformer,
final ClassNode declaringClass,
final MapExpression map,
final ConstructorCallExpression originalCall) {
super(declaringClass);
this.staticCompilationTransformer = transformer;
this.declaringClass = declaringClass;
this.map = map;
this.originalCall = originalCall;
this.setSourcePosition(originalCall);
this.copyNodeMetaData(originalCall);
List<Expression> originalExpressions = originalCall.getArguments() instanceof TupleExpression
? ((TupleExpression) originalCall.getArguments()).getExpressions()
: null;
this.innerClassCall = originalExpressions != null && originalExpressions.size() == 2;
}
@Override
public Expression transformExpression(final ExpressionTransformer transformer) {
Expression result = new MapStyleConstructorCall(
staticCompilationTransformer, declaringClass,
(MapExpression) map.transformExpression(transformer),
(ConstructorCallExpression) originalCall.transformExpression(transformer)
);
result.copyNodeMetaData(this);
result.setSourcePosition(this);
return result;
}
@Override
public void visit(final GroovyCodeVisitor visitor) {
if (visitor instanceof AsmClassGenerator) {
acg = (AsmClassGenerator) visitor;
} else {
originalCall.visit(visitor);
}
super.visit(visitor);
}
@Override
public void visit(final MethodVisitor mv) {
WriterController controller = acg.getController();
CompileStack compileStack = controller.getCompileStack();
OperandStack operandStack = controller.getOperandStack();
// create a temporary variable to store the constructed object
int tmpObj = compileStack.defineTemporaryVariable("tmpObj", declaringClass, false);
String classInternalName = BytecodeHelper.getClassInternalName(declaringClass);
mv.visitTypeInsn(NEW, classInternalName);
mv.visitInsn(DUP);
String desc = "()V";
if (innerClassCall && declaringClass.isRedirectNode() && declaringClass.redirect() instanceof InnerClassNode) {
// load "this"
mv.visitVarInsn(ALOAD, 0);
InnerClassNode icn = (InnerClassNode) declaringClass.redirect();
Parameter[] params = {new Parameter(icn.getOuterClass(), "$p$")};
desc = BytecodeHelper.getMethodDescriptor(ClassHelper.VOID_TYPE, params);
}
mv.visitMethodInsn(INVOKESPECIAL, classInternalName, "<init>", desc, false);
mv.visitVarInsn(ASTORE, tmpObj); // store it into tmp variable
// load every field
for (MapEntryExpression entryExpression : map.getMapEntryExpressions()) {
Expression keyExpression = staticCompilationTransformer.transform(entryExpression.getKeyExpression());
Expression valueExpression = staticCompilationTransformer.transform(entryExpression.getValueExpression());
BinaryExpression bexp = binX(
propX(
bytecodeX(declaringClass, v -> v.visitVarInsn(ALOAD, tmpObj)),
keyExpression
),
Token.newSymbol("=", entryExpression.getLineNumber(), entryExpression.getColumnNumber()),
valueExpression
);
bexp.setSourcePosition(entryExpression);
bexp.visit(acg);
operandStack.pop(); // consume argument
}
// load object
mv.visitVarInsn(ALOAD, tmpObj);
// cleanup stack
compileStack.removeVar(tmpObj);
}
}
}
| apache-2.0 |
lordjone/libgdx | extensions/gdx-bullet/jni/swig-src/collision/com/badlogic/gdx/physics/bullet/collision/btConvexPenetrationDepthSolver.java | 2348 | /* ----------------------------------------------------------------------------
* This file was automatically generated by SWIG (http://www.swig.org).
* Version 3.0.0
*
* Do not make changes to this file unless you know what you are doing--modify
* the SWIG interface file instead.
* ----------------------------------------------------------------------------- */
package com.badlogic.gdx.physics.bullet.collision;
import com.badlogic.gdx.physics.bullet.BulletBase;
import com.badlogic.gdx.physics.bullet.linearmath.*;
import com.badlogic.gdx.math.Vector3;
import com.badlogic.gdx.math.Quaternion;
import com.badlogic.gdx.math.Matrix3;
import com.badlogic.gdx.math.Matrix4;
public class btConvexPenetrationDepthSolver extends BulletBase {
private long swigCPtr;
protected btConvexPenetrationDepthSolver(final String className, long cPtr, boolean cMemoryOwn) {
super(className, cPtr, cMemoryOwn);
swigCPtr = cPtr;
}
/** Construct a new btConvexPenetrationDepthSolver, normally you should not need this constructor it's intended for low-level usage. */
public btConvexPenetrationDepthSolver(long cPtr, boolean cMemoryOwn) {
this("btConvexPenetrationDepthSolver", cPtr, cMemoryOwn);
construct();
}
@Override
protected void reset(long cPtr, boolean cMemoryOwn) {
if (!destroyed)
destroy();
super.reset(swigCPtr = cPtr, cMemoryOwn);
}
public static long getCPtr(btConvexPenetrationDepthSolver obj) {
return (obj == null) ? 0 : obj.swigCPtr;
}
@Override
protected void finalize() throws Throwable {
if (!destroyed)
destroy();
super.finalize();
}
@Override protected synchronized void delete() {
if (swigCPtr != 0) {
if (swigCMemOwn) {
swigCMemOwn = false;
CollisionJNI.delete_btConvexPenetrationDepthSolver(swigCPtr);
}
swigCPtr = 0;
}
super.delete();
}
public boolean calcPenDepth(btVoronoiSimplexSolver simplexSolver, btConvexShape convexA, btConvexShape convexB, Matrix4 transA, Matrix4 transB, Vector3 v, Vector3 pa, Vector3 pb, btIDebugDraw debugDraw) {
return CollisionJNI.btConvexPenetrationDepthSolver_calcPenDepth(swigCPtr, this, btVoronoiSimplexSolver.getCPtr(simplexSolver), simplexSolver, btConvexShape.getCPtr(convexA), convexA, btConvexShape.getCPtr(convexB), convexB, transA, transB, v, pa, pb, btIDebugDraw.getCPtr(debugDraw), debugDraw);
}
}
| apache-2.0 |
tangrui/zyeeda-framework | core/src/main/java/com/zyeeda/framework/web/OpenSessionInViewFilter.java | 3683 | /*
* Copyright 2010 Zyeeda Co. Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.zyeeda.framework.web;
import java.io.IOException;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.transaction.UserTransaction;
import org.apache.tapestry5.ioc.Registry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.zyeeda.framework.FrameworkConstants;
import com.zyeeda.framework.persistence.PersistenceService;
import com.zyeeda.framework.persistence.internal.DefaultPersistenceServiceProvider;
import com.zyeeda.framework.transaction.TransactionService;
import com.zyeeda.framework.transaction.internal.DefaultTransactionServiceProvider;
import com.zyeeda.framework.utils.IocUtils;
/**
* Open session in view servlet filter.
*
* @author Rui Tang
* @version %I%, %G%
* @since 1.0
*/
public class OpenSessionInViewFilter implements Filter {
private final static Logger logger = LoggerFactory.getLogger(OpenSessionInViewFilter.class);
private FilterConfig config;
@Override
public void init(FilterConfig config) throws ServletException {
this.config = config;
}
@Override
public void destroy() {
}
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
Registry reg = (Registry) this.config.getServletContext().getAttribute(FrameworkConstants.SERVICE_REGISTRY);
PersistenceService defaultPersistenceSvc = null;
UserTransaction utx = null;
try {
TransactionService txSvc = reg.getService(IocUtils.getServiceId(DefaultTransactionServiceProvider.class), TransactionService.class);
defaultPersistenceSvc = reg.getService(IocUtils.getServiceId(DefaultPersistenceServiceProvider.class), PersistenceService.class);
utx = txSvc.getTransaction();
logger.debug("tx status before begin = {}", utx.getStatus());
utx.begin();
logger.debug("tx status after begin = {}", utx.getStatus());
defaultPersistenceSvc.openSession();
chain.doFilter(request, response);
logger.debug("tx status before commit = {}", utx.getStatus());
utx.commit();
logger.debug("tx status after commit = {}", utx.getStatus());
} catch (Throwable t) {
try {
if (utx != null) {
logger.debug("tx status before rollback = {}", utx.getStatus());
utx.rollback();
logger.debug("tx status after successfully rollback = {}", utx.getStatus());
}
} catch (Throwable t2) {
logger.error("Cannot rollback transaction.", t2);
}
throw new ServletException(t);
} finally {
if (defaultPersistenceSvc != null) {
defaultPersistenceSvc.closeSession();
}
}
}
}
| apache-2.0 |
ShikaSD/realm-java | realm/realm-library/src/androidTest/java/io/realm/entities/PrimaryKeyAsInteger.java | 1051 | /*
* Copyright 2016 Realm Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.realm.entities;
import io.realm.RealmObject;
import io.realm.annotations.PrimaryKey;
public class PrimaryKeyAsInteger extends RealmObject {
@PrimaryKey
private int id;
private String name;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
| apache-2.0 |
vadimv/PlatypusJS | web-client/src/platypus/src/com/bearsoft/gwt/ui/RadioGroup.java | 3783 | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.bearsoft.gwt.ui;
import com.google.gwt.event.logical.shared.ValueChangeEvent;
import com.google.gwt.event.logical.shared.ValueChangeHandler;
import com.google.gwt.event.shared.EventHandler;
import com.google.gwt.event.shared.GwtEvent;
import com.google.gwt.event.shared.HandlerManager;
import com.google.gwt.event.shared.HandlerRegistration;
import com.google.gwt.user.client.DOM;
import com.google.gwt.user.client.ui.HasName;
import com.google.gwt.user.client.ui.HasValue;
import com.google.gwt.user.client.ui.UIObject;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
*
* @author mg
*/
public class RadioGroup extends UIObject implements ValueChangeHandler<Boolean> {
protected Map<HasValue<Boolean>, HandlerRegistration> groupedHandlers = new HashMap<>();
protected List<HasValue<Boolean>> grouped = new ArrayList<>();
protected String groupName = "group-" + DOM.createUniqueId();
//
private HandlerManager handlerManager;
public RadioGroup() {
super();
}
public String getGroupName() {
return groupName;
}
public HasValue<Boolean> get(int aIndex) {
return grouped.get(aIndex);
}
public void add(HasValue<Boolean> aItem) {
if (!grouped.contains(aItem)) {
if (aItem instanceof HasName) {
((HasName) aItem).setName(groupName);
}
groupedHandlers.put(aItem, aItem.addValueChangeHandler(this));
grouped.add(aItem);
}
}
public boolean remove(HasValue<Boolean> aItem) {
HandlerRegistration handler = groupedHandlers.get(aItem);
if (handler != null) {
handler.removeHandler();
}
if (aItem instanceof HasName) {
((HasName) aItem).setName("");
}
return grouped.remove(aItem);
}
public void clear() {
for (HandlerRegistration handler : groupedHandlers.values()) {
if (handler != null) {
handler.removeHandler();
}
}
groupedHandlers.clear();
for (HasValue<Boolean> item : grouped) {
if (item instanceof HasName) {
((HasName) item).setName("");
}
}
grouped.clear();
}
public int size() {
return grouped.size();
}
@Override
public void onValueChange(ValueChangeEvent<Boolean> event) {
if (Boolean.TRUE.equals(event.getValue())) {
for (HasValue<Boolean> hv : grouped) {
if (hv != event.getSource()) {
hv.setValue(Boolean.FALSE, false);
}
}
}
}
/**
* Adds this handler to the widget.
*
* @param <H>
* the type of handler to add
* @param type
* the event type
* @param handler
* the handler
* @return {@link HandlerRegistration} used to remove the handler
*/
public final <H extends EventHandler> HandlerRegistration addHandler(final H handler, GwtEvent.Type<H> type) {
return ensureHandlers().addHandler(type, handler);
}
/**
* Ensures the existence of the handler manager.
*
* @return the handler manager
* */
HandlerManager ensureHandlers() {
return handlerManager == null ? handlerManager = createHandlerManager() : handlerManager;
}
HandlerManager getHandlerManager() {
return handlerManager;
}
public void fireEvent(GwtEvent<?> event) {
ensureHandlers().fireEvent(event);
}
/**
* Creates the {@link HandlerManager} used by this Widget. You can override
* this method to create a custom {@link HandlerManager}.
*
* @return the {@link HandlerManager} you want to use
*/
protected HandlerManager createHandlerManager() {
return new HandlerManager(this);
}
}
| apache-2.0 |
facaiy/spark | core/src/test/java/org/apache/spark/unsafe/map/AbstractBytesToBytesMapSuite.java | 25900 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.unsafe.map;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.*;
import scala.Tuple2$;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.apache.spark.SparkConf;
import org.apache.spark.executor.ShuffleWriteMetrics;
import org.apache.spark.memory.MemoryMode;
import org.apache.spark.memory.TestMemoryConsumer;
import org.apache.spark.memory.TaskMemoryManager;
import org.apache.spark.memory.TestMemoryManager;
import org.apache.spark.network.util.JavaUtils;
import org.apache.spark.serializer.JavaSerializer;
import org.apache.spark.serializer.SerializerInstance;
import org.apache.spark.serializer.SerializerManager;
import org.apache.spark.storage.*;
import org.apache.spark.unsafe.Platform;
import org.apache.spark.unsafe.array.ByteArrayMethods;
import org.apache.spark.util.Utils;
import static org.hamcrest.Matchers.greaterThan;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.mockito.Answers.RETURNS_SMART_NULLS;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Mockito.when;
public abstract class AbstractBytesToBytesMapSuite {
private final Random rand = new Random(42);
private TestMemoryManager memoryManager;
private TaskMemoryManager taskMemoryManager;
private SerializerManager serializerManager = new SerializerManager(
new JavaSerializer(new SparkConf()),
new SparkConf().set("spark.shuffle.spill.compress", "false"));
private static final long PAGE_SIZE_BYTES = 1L << 26; // 64 megabytes
final LinkedList<File> spillFilesCreated = new LinkedList<>();
File tempDir;
@Mock(answer = RETURNS_SMART_NULLS) BlockManager blockManager;
@Mock(answer = RETURNS_SMART_NULLS) DiskBlockManager diskBlockManager;
@Before
public void setup() {
memoryManager =
new TestMemoryManager(
new SparkConf()
.set("spark.memory.offHeap.enabled", "" + useOffHeapMemoryAllocator())
.set("spark.memory.offHeap.size", "256mb")
.set("spark.shuffle.spill.compress", "false")
.set("spark.shuffle.compress", "false"));
taskMemoryManager = new TaskMemoryManager(memoryManager, 0);
tempDir = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "unsafe-test");
spillFilesCreated.clear();
MockitoAnnotations.initMocks(this);
when(blockManager.diskBlockManager()).thenReturn(diskBlockManager);
when(diskBlockManager.createTempLocalBlock()).thenAnswer(invocationOnMock -> {
TempLocalBlockId blockId = new TempLocalBlockId(UUID.randomUUID());
File file = File.createTempFile("spillFile", ".spill", tempDir);
spillFilesCreated.add(file);
return Tuple2$.MODULE$.apply(blockId, file);
});
when(blockManager.getDiskWriter(
any(BlockId.class),
any(File.class),
any(SerializerInstance.class),
anyInt(),
any(ShuffleWriteMetrics.class))).thenAnswer(invocationOnMock -> {
Object[] args = invocationOnMock.getArguments();
return new DiskBlockObjectWriter(
(File) args[1],
serializerManager,
(SerializerInstance) args[2],
(Integer) args[3],
false,
(ShuffleWriteMetrics) args[4],
(BlockId) args[0]
);
});
}
@After
public void tearDown() {
Utils.deleteRecursively(tempDir);
tempDir = null;
if (taskMemoryManager != null) {
Assert.assertEquals(0L, taskMemoryManager.cleanUpAllAllocatedMemory());
long leakedMemory = taskMemoryManager.getMemoryConsumptionForThisTask();
taskMemoryManager = null;
Assert.assertEquals(0L, leakedMemory);
}
}
protected abstract boolean useOffHeapMemoryAllocator();
private static byte[] getByteArray(Object base, long offset, int size) {
final byte[] arr = new byte[size];
Platform.copyMemory(base, offset, arr, Platform.BYTE_ARRAY_OFFSET, size);
return arr;
}
private byte[] getRandomByteArray(int numWords) {
Assert.assertTrue(numWords >= 0);
final int lengthInBytes = numWords * 8;
final byte[] bytes = new byte[lengthInBytes];
rand.nextBytes(bytes);
return bytes;
}
/**
* Fast equality checking for byte arrays, since these comparisons are a bottleneck
* in our stress tests.
*/
private static boolean arrayEquals(
byte[] expected,
Object base,
long offset,
long actualLengthBytes) {
return (actualLengthBytes == expected.length) && ByteArrayMethods.arrayEquals(
expected,
Platform.BYTE_ARRAY_OFFSET,
base,
offset,
expected.length
);
}
@Test
public void emptyMap() {
BytesToBytesMap map = new BytesToBytesMap(taskMemoryManager, 64, PAGE_SIZE_BYTES);
try {
Assert.assertEquals(0, map.numKeys());
final int keyLengthInWords = 10;
final int keyLengthInBytes = keyLengthInWords * 8;
final byte[] key = getRandomByteArray(keyLengthInWords);
Assert.assertFalse(map.lookup(key, Platform.BYTE_ARRAY_OFFSET, keyLengthInBytes).isDefined());
Assert.assertFalse(map.iterator().hasNext());
} finally {
map.free();
}
}
@Test
public void setAndRetrieveAKey() {
BytesToBytesMap map = new BytesToBytesMap(taskMemoryManager, 64, PAGE_SIZE_BYTES);
final int recordLengthWords = 10;
final int recordLengthBytes = recordLengthWords * 8;
final byte[] keyData = getRandomByteArray(recordLengthWords);
final byte[] valueData = getRandomByteArray(recordLengthWords);
try {
final BytesToBytesMap.Location loc =
map.lookup(keyData, Platform.BYTE_ARRAY_OFFSET, recordLengthBytes);
Assert.assertFalse(loc.isDefined());
Assert.assertTrue(loc.append(
keyData,
Platform.BYTE_ARRAY_OFFSET,
recordLengthBytes,
valueData,
Platform.BYTE_ARRAY_OFFSET,
recordLengthBytes
));
// After storing the key and value, the other location methods should return results that
// reflect the result of this store without us having to call lookup() again on the same key.
Assert.assertEquals(recordLengthBytes, loc.getKeyLength());
Assert.assertEquals(recordLengthBytes, loc.getValueLength());
Assert.assertArrayEquals(keyData,
getByteArray(loc.getKeyBase(), loc.getKeyOffset(), recordLengthBytes));
Assert.assertArrayEquals(valueData,
getByteArray(loc.getValueBase(), loc.getValueOffset(), recordLengthBytes));
// After calling lookup() the location should still point to the correct data.
Assert.assertTrue(
map.lookup(keyData, Platform.BYTE_ARRAY_OFFSET, recordLengthBytes).isDefined());
Assert.assertEquals(recordLengthBytes, loc.getKeyLength());
Assert.assertEquals(recordLengthBytes, loc.getValueLength());
Assert.assertArrayEquals(keyData,
getByteArray(loc.getKeyBase(), loc.getKeyOffset(), recordLengthBytes));
Assert.assertArrayEquals(valueData,
getByteArray(loc.getValueBase(), loc.getValueOffset(), recordLengthBytes));
try {
Assert.assertTrue(loc.append(
keyData,
Platform.BYTE_ARRAY_OFFSET,
recordLengthBytes,
valueData,
Platform.BYTE_ARRAY_OFFSET,
recordLengthBytes
));
Assert.fail("Should not be able to set a new value for a key");
} catch (AssertionError e) {
// Expected exception; do nothing.
}
} finally {
map.free();
}
}
private void iteratorTestBase(boolean destructive) throws Exception {
final int size = 4096;
BytesToBytesMap map = new BytesToBytesMap(taskMemoryManager, size / 2, PAGE_SIZE_BYTES);
try {
for (long i = 0; i < size; i++) {
final long[] value = new long[] { i };
final BytesToBytesMap.Location loc =
map.lookup(value, Platform.LONG_ARRAY_OFFSET, 8);
Assert.assertFalse(loc.isDefined());
// Ensure that we store some zero-length keys
if (i % 5 == 0) {
Assert.assertTrue(loc.append(
null,
Platform.LONG_ARRAY_OFFSET,
0,
value,
Platform.LONG_ARRAY_OFFSET,
8
));
} else {
Assert.assertTrue(loc.append(
value,
Platform.LONG_ARRAY_OFFSET,
8,
value,
Platform.LONG_ARRAY_OFFSET,
8
));
}
}
final java.util.BitSet valuesSeen = new java.util.BitSet(size);
final Iterator<BytesToBytesMap.Location> iter;
if (destructive) {
iter = map.destructiveIterator();
} else {
iter = map.iterator();
}
int numPages = map.getNumDataPages();
int countFreedPages = 0;
while (iter.hasNext()) {
final BytesToBytesMap.Location loc = iter.next();
Assert.assertTrue(loc.isDefined());
final long value = Platform.getLong(loc.getValueBase(), loc.getValueOffset());
final long keyLength = loc.getKeyLength();
if (keyLength == 0) {
Assert.assertTrue("value " + value + " was not divisible by 5", value % 5 == 0);
} else {
final long key = Platform.getLong(loc.getKeyBase(), loc.getKeyOffset());
Assert.assertEquals(value, key);
}
valuesSeen.set((int) value);
if (destructive) {
// The iterator moves onto next page and frees previous page
if (map.getNumDataPages() < numPages) {
numPages = map.getNumDataPages();
countFreedPages++;
}
}
}
if (destructive) {
// Latest page is not freed by iterator but by map itself
Assert.assertEquals(countFreedPages, numPages - 1);
}
Assert.assertEquals(size, valuesSeen.cardinality());
} finally {
map.free();
}
}
@Test
public void iteratorTest() throws Exception {
iteratorTestBase(false);
}
@Test
public void destructiveIteratorTest() throws Exception {
iteratorTestBase(true);
}
@Test
public void iteratingOverDataPagesWithWastedSpace() throws Exception {
final int NUM_ENTRIES = 1000 * 1000;
final int KEY_LENGTH = 24;
final int VALUE_LENGTH = 40;
final BytesToBytesMap map =
new BytesToBytesMap(taskMemoryManager, NUM_ENTRIES, PAGE_SIZE_BYTES);
// Each record will take 8 + 24 + 40 = 72 bytes of space in the data page. Our 64-megabyte
// pages won't be evenly-divisible by records of this size, which will cause us to waste some
// space at the end of the page. This is necessary in order for us to take the end-of-record
// handling branch in iterator().
try {
for (int i = 0; i < NUM_ENTRIES; i++) {
final long[] key = new long[] { i, i, i }; // 3 * 8 = 24 bytes
final long[] value = new long[] { i, i, i, i, i }; // 5 * 8 = 40 bytes
final BytesToBytesMap.Location loc = map.lookup(
key,
Platform.LONG_ARRAY_OFFSET,
KEY_LENGTH
);
Assert.assertFalse(loc.isDefined());
Assert.assertTrue(loc.append(
key,
Platform.LONG_ARRAY_OFFSET,
KEY_LENGTH,
value,
Platform.LONG_ARRAY_OFFSET,
VALUE_LENGTH
));
}
Assert.assertEquals(2, map.getNumDataPages());
final java.util.BitSet valuesSeen = new java.util.BitSet(NUM_ENTRIES);
final Iterator<BytesToBytesMap.Location> iter = map.iterator();
final long[] key = new long[KEY_LENGTH / 8];
final long[] value = new long[VALUE_LENGTH / 8];
while (iter.hasNext()) {
final BytesToBytesMap.Location loc = iter.next();
Assert.assertTrue(loc.isDefined());
Assert.assertEquals(KEY_LENGTH, loc.getKeyLength());
Assert.assertEquals(VALUE_LENGTH, loc.getValueLength());
Platform.copyMemory(
loc.getKeyBase(),
loc.getKeyOffset(),
key,
Platform.LONG_ARRAY_OFFSET,
KEY_LENGTH
);
Platform.copyMemory(
loc.getValueBase(),
loc.getValueOffset(),
value,
Platform.LONG_ARRAY_OFFSET,
VALUE_LENGTH
);
for (long j : key) {
Assert.assertEquals(key[0], j);
}
for (long j : value) {
Assert.assertEquals(key[0], j);
}
valuesSeen.set((int) key[0]);
}
Assert.assertEquals(NUM_ENTRIES, valuesSeen.cardinality());
} finally {
map.free();
}
}
@Test
public void randomizedStressTest() {
final int size = 32768;
// Java arrays' hashCodes() aren't based on the arrays' contents, so we need to wrap arrays
// into ByteBuffers in order to use them as keys here.
final Map<ByteBuffer, byte[]> expected = new HashMap<>();
final BytesToBytesMap map = new BytesToBytesMap(taskMemoryManager, size, PAGE_SIZE_BYTES);
try {
// Fill the map to 90% full so that we can trigger probing
for (int i = 0; i < size * 0.9; i++) {
final byte[] key = getRandomByteArray(rand.nextInt(256) + 1);
final byte[] value = getRandomByteArray(rand.nextInt(256) + 1);
if (!expected.containsKey(ByteBuffer.wrap(key))) {
expected.put(ByteBuffer.wrap(key), value);
final BytesToBytesMap.Location loc = map.lookup(
key,
Platform.BYTE_ARRAY_OFFSET,
key.length
);
Assert.assertFalse(loc.isDefined());
Assert.assertTrue(loc.append(
key,
Platform.BYTE_ARRAY_OFFSET,
key.length,
value,
Platform.BYTE_ARRAY_OFFSET,
value.length
));
// After calling putNewKey, the following should be true, even before calling
// lookup():
Assert.assertTrue(loc.isDefined());
Assert.assertEquals(key.length, loc.getKeyLength());
Assert.assertEquals(value.length, loc.getValueLength());
Assert.assertTrue(arrayEquals(key, loc.getKeyBase(), loc.getKeyOffset(), key.length));
Assert.assertTrue(
arrayEquals(value, loc.getValueBase(), loc.getValueOffset(), value.length));
}
}
for (Map.Entry<ByteBuffer, byte[]> entry : expected.entrySet()) {
final byte[] key = JavaUtils.bufferToArray(entry.getKey());
final byte[] value = entry.getValue();
final BytesToBytesMap.Location loc =
map.lookup(key, Platform.BYTE_ARRAY_OFFSET, key.length);
Assert.assertTrue(loc.isDefined());
Assert.assertTrue(
arrayEquals(key, loc.getKeyBase(), loc.getKeyOffset(), loc.getKeyLength()));
Assert.assertTrue(
arrayEquals(value, loc.getValueBase(), loc.getValueOffset(), loc.getValueLength()));
}
} finally {
map.free();
}
}
@Test
public void randomizedTestWithRecordsLargerThanPageSize() {
final long pageSizeBytes = 128;
final BytesToBytesMap map = new BytesToBytesMap(taskMemoryManager, 64, pageSizeBytes);
// Java arrays' hashCodes() aren't based on the arrays' contents, so we need to wrap arrays
// into ByteBuffers in order to use them as keys here.
final Map<ByteBuffer, byte[]> expected = new HashMap<>();
try {
for (int i = 0; i < 1000; i++) {
final byte[] key = getRandomByteArray(rand.nextInt(128));
final byte[] value = getRandomByteArray(rand.nextInt(128));
if (!expected.containsKey(ByteBuffer.wrap(key))) {
expected.put(ByteBuffer.wrap(key), value);
final BytesToBytesMap.Location loc = map.lookup(
key,
Platform.BYTE_ARRAY_OFFSET,
key.length
);
Assert.assertFalse(loc.isDefined());
Assert.assertTrue(loc.append(
key,
Platform.BYTE_ARRAY_OFFSET,
key.length,
value,
Platform.BYTE_ARRAY_OFFSET,
value.length
));
// After calling putNewKey, the following should be true, even before calling
// lookup():
Assert.assertTrue(loc.isDefined());
Assert.assertEquals(key.length, loc.getKeyLength());
Assert.assertEquals(value.length, loc.getValueLength());
Assert.assertTrue(arrayEquals(key, loc.getKeyBase(), loc.getKeyOffset(), key.length));
Assert.assertTrue(
arrayEquals(value, loc.getValueBase(), loc.getValueOffset(), value.length));
}
}
for (Map.Entry<ByteBuffer, byte[]> entry : expected.entrySet()) {
final byte[] key = JavaUtils.bufferToArray(entry.getKey());
final byte[] value = entry.getValue();
final BytesToBytesMap.Location loc =
map.lookup(key, Platform.BYTE_ARRAY_OFFSET, key.length);
Assert.assertTrue(loc.isDefined());
Assert.assertTrue(
arrayEquals(key, loc.getKeyBase(), loc.getKeyOffset(), loc.getKeyLength()));
Assert.assertTrue(
arrayEquals(value, loc.getValueBase(), loc.getValueOffset(), loc.getValueLength()));
}
} finally {
map.free();
}
}
@Test
public void failureToAllocateFirstPage() {
memoryManager.limit(1024); // longArray
BytesToBytesMap map = new BytesToBytesMap(taskMemoryManager, 1, PAGE_SIZE_BYTES);
try {
final long[] emptyArray = new long[0];
final BytesToBytesMap.Location loc =
map.lookup(emptyArray, Platform.LONG_ARRAY_OFFSET, 0);
Assert.assertFalse(loc.isDefined());
Assert.assertFalse(loc.append(
emptyArray, Platform.LONG_ARRAY_OFFSET, 0, emptyArray, Platform.LONG_ARRAY_OFFSET, 0));
} finally {
map.free();
}
}
@Test
public void failureToGrow() {
BytesToBytesMap map = new BytesToBytesMap(taskMemoryManager, 1, 1024);
try {
boolean success = true;
int i;
for (i = 0; i < 127; i++) {
if (i > 0) {
memoryManager.limit(0);
}
final long[] arr = new long[]{i};
final BytesToBytesMap.Location loc = map.lookup(arr, Platform.LONG_ARRAY_OFFSET, 8);
success =
loc.append(arr, Platform.LONG_ARRAY_OFFSET, 8, arr, Platform.LONG_ARRAY_OFFSET, 8);
if (!success) {
break;
}
}
Assert.assertThat(i, greaterThan(0));
Assert.assertFalse(success);
} finally {
map.free();
}
}
@Test
public void spillInIterator() throws IOException {
BytesToBytesMap map = new BytesToBytesMap(
taskMemoryManager, blockManager, serializerManager, 1, 0.75, 1024);
try {
int i;
for (i = 0; i < 1024; i++) {
final long[] arr = new long[]{i};
final BytesToBytesMap.Location loc = map.lookup(arr, Platform.LONG_ARRAY_OFFSET, 8);
loc.append(arr, Platform.LONG_ARRAY_OFFSET, 8, arr, Platform.LONG_ARRAY_OFFSET, 8);
}
BytesToBytesMap.MapIterator iter = map.iterator();
for (i = 0; i < 100; i++) {
iter.next();
}
// Non-destructive iterator is not spillable
Assert.assertEquals(0, iter.spill(1024L * 10));
for (i = 100; i < 1024; i++) {
iter.next();
}
BytesToBytesMap.MapIterator iter2 = map.destructiveIterator();
for (i = 0; i < 100; i++) {
iter2.next();
}
Assert.assertTrue(iter2.spill(1024) >= 1024);
for (i = 100; i < 1024; i++) {
iter2.next();
}
assertFalse(iter2.hasNext());
} finally {
map.free();
for (File spillFile : spillFilesCreated) {
assertFalse("Spill file " + spillFile.getPath() + " was not cleaned up",
spillFile.exists());
}
}
}
@Test
public void multipleValuesForSameKey() {
BytesToBytesMap map =
new BytesToBytesMap(taskMemoryManager, blockManager, serializerManager, 1, 0.5, 1024);
try {
int i;
for (i = 0; i < 1024; i++) {
final long[] arr = new long[]{i};
map.lookup(arr, Platform.LONG_ARRAY_OFFSET, 8)
.append(arr, Platform.LONG_ARRAY_OFFSET, 8, arr, Platform.LONG_ARRAY_OFFSET, 8);
}
assert map.numKeys() == 1024;
assert map.numValues() == 1024;
for (i = 0; i < 1024; i++) {
final long[] arr = new long[]{i};
map.lookup(arr, Platform.LONG_ARRAY_OFFSET, 8)
.append(arr, Platform.LONG_ARRAY_OFFSET, 8, arr, Platform.LONG_ARRAY_OFFSET, 8);
}
assert map.numKeys() == 1024;
assert map.numValues() == 2048;
for (i = 0; i < 1024; i++) {
final long[] arr = new long[]{i};
final BytesToBytesMap.Location loc = map.lookup(arr, Platform.LONG_ARRAY_OFFSET, 8);
assert loc.isDefined();
assert loc.nextValue();
assert !loc.nextValue();
}
BytesToBytesMap.MapIterator iter = map.iterator();
for (i = 0; i < 2048; i++) {
assert iter.hasNext();
final BytesToBytesMap.Location loc = iter.next();
assert loc.isDefined();
}
} finally {
map.free();
}
}
@Test
public void initialCapacityBoundsChecking() {
try {
new BytesToBytesMap(taskMemoryManager, 0, PAGE_SIZE_BYTES);
Assert.fail("Expected IllegalArgumentException to be thrown");
} catch (IllegalArgumentException e) {
// expected exception
}
try {
new BytesToBytesMap(
taskMemoryManager,
BytesToBytesMap.MAX_CAPACITY + 1,
PAGE_SIZE_BYTES);
Assert.fail("Expected IllegalArgumentException to be thrown");
} catch (IllegalArgumentException e) {
// expected exception
}
try {
new BytesToBytesMap(
taskMemoryManager,
1,
TaskMemoryManager.MAXIMUM_PAGE_SIZE_BYTES + 1);
Assert.fail("Expected IllegalArgumentException to be thrown");
} catch (IllegalArgumentException e) {
// expected exception
}
}
@Test
public void testPeakMemoryUsed() {
final long recordLengthBytes = 32;
final long pageSizeBytes = 256 + 8; // 8 bytes for end-of-page marker
final long numRecordsPerPage = (pageSizeBytes - 8) / recordLengthBytes;
final BytesToBytesMap map = new BytesToBytesMap(taskMemoryManager, 1024, pageSizeBytes);
// Since BytesToBytesMap is append-only, we expect the total memory consumption to be
// monotonically increasing. More specifically, every time we allocate a new page it
// should increase by exactly the size of the page. In this regard, the memory usage
// at any given time is also the peak memory used.
long previousPeakMemory = map.getPeakMemoryUsedBytes();
long newPeakMemory;
try {
for (long i = 0; i < numRecordsPerPage * 10; i++) {
final long[] value = new long[]{i};
map.lookup(value, Platform.LONG_ARRAY_OFFSET, 8).append(
value,
Platform.LONG_ARRAY_OFFSET,
8,
value,
Platform.LONG_ARRAY_OFFSET,
8);
newPeakMemory = map.getPeakMemoryUsedBytes();
if (i % numRecordsPerPage == 0) {
// We allocated a new page for this record, so peak memory should change
assertEquals(previousPeakMemory + pageSizeBytes, newPeakMemory);
} else {
assertEquals(previousPeakMemory, newPeakMemory);
}
previousPeakMemory = newPeakMemory;
}
// Freeing the map should not change the peak memory
map.free();
newPeakMemory = map.getPeakMemoryUsedBytes();
assertEquals(previousPeakMemory, newPeakMemory);
} finally {
map.free();
}
}
@Test
public void avoidDeadlock() throws InterruptedException {
memoryManager.limit(PAGE_SIZE_BYTES);
MemoryMode mode = useOffHeapMemoryAllocator() ? MemoryMode.OFF_HEAP: MemoryMode.ON_HEAP;
TestMemoryConsumer c1 = new TestMemoryConsumer(taskMemoryManager, mode);
BytesToBytesMap map =
new BytesToBytesMap(taskMemoryManager, blockManager, serializerManager, 1, 0.5, 1024);
Thread thread = new Thread(() -> {
int i = 0;
long used = 0;
while (i < 10) {
c1.use(10000000);
used += 10000000;
i++;
}
c1.free(used);
});
try {
int i;
for (i = 0; i < 1024; i++) {
final long[] arr = new long[]{i};
final BytesToBytesMap.Location loc = map.lookup(arr, Platform.LONG_ARRAY_OFFSET, 8);
loc.append(arr, Platform.LONG_ARRAY_OFFSET, 8, arr, Platform.LONG_ARRAY_OFFSET, 8);
}
// Starts to require memory at another memory consumer.
thread.start();
BytesToBytesMap.MapIterator iter = map.destructiveIterator();
for (i = 0; i < 1024; i++) {
iter.next();
}
assertFalse(iter.hasNext());
} finally {
map.free();
thread.join();
for (File spillFile : spillFilesCreated) {
assertFalse("Spill file " + spillFile.getPath() + " was not cleaned up",
spillFile.exists());
}
}
}
}
| apache-2.0 |
aozarov/gcloud-java | gcloud-java-bigquery/src/test/java/com/google/cloud/bigquery/TableDataWriteChannelTest.java | 10342 | /*
* Copyright 2015 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.bigquery;
import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.capture;
import static org.easymock.EasyMock.captureLong;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.eq;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import com.google.cloud.RestorableState;
import com.google.cloud.WriteChannel;
import com.google.cloud.bigquery.spi.BigQueryRpc;
import com.google.cloud.bigquery.spi.BigQueryRpcFactory;
import org.easymock.Capture;
import org.easymock.CaptureType;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Random;
public class TableDataWriteChannelTest {
private static final String UPLOAD_ID = "uploadid";
private static final TableId TABLE_ID = TableId.of("dataset", "table");
private static final WriteChannelConfiguration LOAD_CONFIGURATION =
WriteChannelConfiguration.builder(TABLE_ID)
.createDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED)
.writeDisposition(JobInfo.WriteDisposition.WRITE_APPEND)
.formatOptions(FormatOptions.json())
.ignoreUnknownValues(true)
.maxBadRecords(10)
.build();
private static final int MIN_CHUNK_SIZE = 256 * 1024;
private static final int DEFAULT_CHUNK_SIZE = 8 * MIN_CHUNK_SIZE;
private static final int CUSTOM_CHUNK_SIZE = 4 * MIN_CHUNK_SIZE;
private static final Random RANDOM = new Random();
private BigQueryOptions options;
private BigQueryRpcFactory rpcFactoryMock;
private BigQueryRpc bigqueryRpcMock;
private TableDataWriteChannel writer;
@Before
public void setUp() {
rpcFactoryMock = createMock(BigQueryRpcFactory.class);
bigqueryRpcMock = createMock(BigQueryRpc.class);
expect(rpcFactoryMock.create(anyObject(BigQueryOptions.class)))
.andReturn(bigqueryRpcMock);
replay(rpcFactoryMock);
options = BigQueryOptions.builder()
.projectId("projectid")
.serviceRpcFactory(rpcFactoryMock)
.build();
}
@After
public void tearDown() throws Exception {
verify(rpcFactoryMock, bigqueryRpcMock);
}
@Test
public void testCreate() {
expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID);
replay(bigqueryRpcMock);
writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION);
assertTrue(writer.isOpen());
}
@Test
public void testWriteWithoutFlush() throws IOException {
expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID);
replay(bigqueryRpcMock);
writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION);
assertEquals(MIN_CHUNK_SIZE, writer.write(ByteBuffer.allocate(MIN_CHUNK_SIZE)));
}
@Test
public void testWriteWithFlush() throws IOException {
expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID);
Capture<byte[]> capturedBuffer = Capture.newInstance();
bigqueryRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L),
eq(CUSTOM_CHUNK_SIZE), eq(false));
replay(bigqueryRpcMock);
writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION);
writer.chunkSize(CUSTOM_CHUNK_SIZE);
ByteBuffer buffer = randomBuffer(CUSTOM_CHUNK_SIZE);
assertEquals(CUSTOM_CHUNK_SIZE, writer.write(buffer));
assertArrayEquals(buffer.array(), capturedBuffer.getValue());
}
@Test
public void testWritesAndFlush() throws IOException {
expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID);
Capture<byte[]> capturedBuffer = Capture.newInstance();
bigqueryRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L),
eq(DEFAULT_CHUNK_SIZE), eq(false));
replay(bigqueryRpcMock);
writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION);
ByteBuffer[] buffers = new ByteBuffer[DEFAULT_CHUNK_SIZE / MIN_CHUNK_SIZE];
for (int i = 0; i < buffers.length; i++) {
buffers[i] = randomBuffer(MIN_CHUNK_SIZE);
assertEquals(MIN_CHUNK_SIZE, writer.write(buffers[i]));
}
for (int i = 0; i < buffers.length; i++) {
assertArrayEquals(
buffers[i].array(),
Arrays.copyOfRange(
capturedBuffer.getValue(), MIN_CHUNK_SIZE * i, MIN_CHUNK_SIZE * (i + 1)));
}
}
@Test
public void testCloseWithoutFlush() throws IOException {
expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID);
Capture<byte[]> capturedBuffer = Capture.newInstance();
bigqueryRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(0), eq(true));
replay(bigqueryRpcMock);
writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION);
assertTrue(writer.isOpen());
writer.close();
assertArrayEquals(new byte[0], capturedBuffer.getValue());
assertTrue(!writer.isOpen());
}
@Test
public void testCloseWithFlush() throws IOException {
expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID);
Capture<byte[]> capturedBuffer = Capture.newInstance();
ByteBuffer buffer = randomBuffer(MIN_CHUNK_SIZE);
bigqueryRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(MIN_CHUNK_SIZE),
eq(true));
replay(bigqueryRpcMock);
writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION);
assertTrue(writer.isOpen());
writer.write(buffer);
writer.close();
assertEquals(DEFAULT_CHUNK_SIZE, capturedBuffer.getValue().length);
assertArrayEquals(buffer.array(), Arrays.copyOf(capturedBuffer.getValue(), MIN_CHUNK_SIZE));
assertTrue(!writer.isOpen());
}
@Test
public void testWriteClosed() throws IOException {
expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID);
Capture<byte[]> capturedBuffer = Capture.newInstance();
bigqueryRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(0), eq(true));
replay(bigqueryRpcMock);
writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION);
writer.close();
try {
writer.write(ByteBuffer.allocate(MIN_CHUNK_SIZE));
fail("Expected TableDataWriteChannel write to throw IOException");
} catch (IOException ex) {
// expected
}
}
@Test
public void testSaveAndRestore() throws IOException {
expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID);
Capture<byte[]> capturedBuffer = Capture.newInstance(CaptureType.ALL);
Capture<Long> capturedPosition = Capture.newInstance(CaptureType.ALL);
bigqueryRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0),
captureLong(capturedPosition), eq(DEFAULT_CHUNK_SIZE), eq(false));
expectLastCall().times(2);
replay(bigqueryRpcMock);
ByteBuffer buffer1 = randomBuffer(DEFAULT_CHUNK_SIZE);
ByteBuffer buffer2 = randomBuffer(DEFAULT_CHUNK_SIZE);
writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION);
assertEquals(DEFAULT_CHUNK_SIZE, writer.write(buffer1));
assertArrayEquals(buffer1.array(), capturedBuffer.getValues().get(0));
assertEquals(new Long(0L), capturedPosition.getValues().get(0));
RestorableState<WriteChannel> writerState = writer.capture();
WriteChannel restoredWriter = writerState.restore();
assertEquals(DEFAULT_CHUNK_SIZE, restoredWriter.write(buffer2));
assertArrayEquals(buffer2.array(), capturedBuffer.getValues().get(1));
assertEquals(new Long(DEFAULT_CHUNK_SIZE), capturedPosition.getValues().get(1));
}
@Test
public void testSaveAndRestoreClosed() throws IOException {
expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID);
Capture<byte[]> capturedBuffer = Capture.newInstance();
bigqueryRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(0), eq(true));
replay(bigqueryRpcMock);
writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION);
writer.close();
RestorableState<WriteChannel> writerState = writer.capture();
RestorableState<WriteChannel> expectedWriterState =
TableDataWriteChannel.StateImpl.builder(options, LOAD_CONFIGURATION, UPLOAD_ID)
.buffer(null)
.chunkSize(DEFAULT_CHUNK_SIZE)
.isOpen(false)
.position(0)
.build();
WriteChannel restoredWriter = writerState.restore();
assertArrayEquals(new byte[0], capturedBuffer.getValue());
assertEquals(expectedWriterState, restoredWriter.capture());
}
@Test
public void testStateEquals() {
expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID).times(2);
replay(bigqueryRpcMock);
writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION);
// avoid closing when you don't want partial writes upon failure
@SuppressWarnings("resource")
WriteChannel writer2 = new TableDataWriteChannel(options, LOAD_CONFIGURATION);
RestorableState<WriteChannel> state = writer.capture();
RestorableState<WriteChannel> state2 = writer2.capture();
assertEquals(state, state2);
assertEquals(state.hashCode(), state2.hashCode());
assertEquals(state.toString(), state2.toString());
}
private static ByteBuffer randomBuffer(int size) {
byte[] byteArray = new byte[size];
RANDOM.nextBytes(byteArray);
return ByteBuffer.wrap(byteArray);
}
}
| apache-2.0 |
apache/batik | batik-anim/src/main/java/org/apache/batik/anim/TransformAnimation.java | 10546 | /*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.batik.anim;
import org.apache.batik.anim.dom.AnimatableElement;
import org.apache.batik.anim.timing.TimedElement;
import org.apache.batik.anim.values.AnimatableValue;
import org.apache.batik.anim.values.AnimatableTransformListValue;
import org.w3c.dom.svg.SVGTransform;
/**
* An animation class for 'animateTransform' animations.
*
* @author <a href="mailto:cam%40mcc%2eid%2eau">Cameron McCormack</a>
* @version $Id$
*/
public class TransformAnimation extends SimpleAnimation {
/**
* The transform type. This should take one of the constants defined
* in {@link org.w3c.dom.svg.SVGTransform}.
*/
protected short type;
/**
* Time values to control the pacing of the second component of the
* animation.
*/
protected float[] keyTimes2;
/**
* Time values to control the pacing of the third component of the
* animation.
*/
protected float[] keyTimes3;
/**
* Creates a new TransformAnimation.
*/
public TransformAnimation(TimedElement timedElement,
AnimatableElement animatableElement,
int calcMode,
float[] keyTimes,
float[] keySplines,
boolean additive,
boolean cumulative,
AnimatableValue[] values,
AnimatableValue from,
AnimatableValue to,
AnimatableValue by,
short type) {
// pretend we didn't get a calcMode="paced", since we need specialised
// behaviour in sampledAtUnitTime.
super(timedElement, animatableElement,
calcMode == CALC_MODE_PACED ? CALC_MODE_LINEAR : calcMode,
calcMode == CALC_MODE_PACED ? null : keyTimes,
keySplines, additive, cumulative, values, from, to, by);
this.calcMode = calcMode;
this.type = type;
if (calcMode != CALC_MODE_PACED) {
return;
}
// Determine the equivalent keyTimes for the individual components
// of the transforms for CALC_MODE_PACED.
int count = this.values.length;
float[] cumulativeDistances1;
float[] cumulativeDistances2 = null;
float[] cumulativeDistances3 = null;
switch (type) {
case SVGTransform.SVG_TRANSFORM_ROTATE:
cumulativeDistances3 = new float[count];
cumulativeDistances3[0] = 0f;
// fall through
case SVGTransform.SVG_TRANSFORM_SCALE:
case SVGTransform.SVG_TRANSFORM_TRANSLATE:
cumulativeDistances2 = new float[count];
cumulativeDistances2[0] = 0f;
// fall through
default:
cumulativeDistances1 = new float[count];
cumulativeDistances1[0] = 0f;
}
for (int i = 1; i < this.values.length; i++) {
switch (type) {
case SVGTransform.SVG_TRANSFORM_ROTATE:
cumulativeDistances3[i] =
cumulativeDistances3[i - 1]
+ ((AnimatableTransformListValue)
this.values[i - 1]).distanceTo3(this.values[i]);
// fall through
case SVGTransform.SVG_TRANSFORM_SCALE:
case SVGTransform.SVG_TRANSFORM_TRANSLATE:
cumulativeDistances2[i] =
cumulativeDistances2[i - 1]
+ ((AnimatableTransformListValue)
this.values[i - 1]).distanceTo2(this.values[i]);
// fall through
default:
cumulativeDistances1[i] =
cumulativeDistances1[i - 1]
+ ((AnimatableTransformListValue)
this.values[i - 1]).distanceTo1(this.values[i]);
}
}
switch (type) {
case SVGTransform.SVG_TRANSFORM_ROTATE:
float totalLength = cumulativeDistances3[count - 1];
keyTimes3 = new float[count];
keyTimes3[0] = 0f;
for (int i = 1; i < count - 1; i++) {
keyTimes3[i] = cumulativeDistances3[i] / totalLength;
}
keyTimes3[count - 1] = 1f;
// fall through
case SVGTransform.SVG_TRANSFORM_SCALE:
case SVGTransform.SVG_TRANSFORM_TRANSLATE:
totalLength = cumulativeDistances2[count - 1];
keyTimes2 = new float[count];
keyTimes2[0] = 0f;
for (int i = 1; i < count - 1; i++) {
keyTimes2[i] = cumulativeDistances2[i] / totalLength;
}
keyTimes2[count - 1] = 1f;
// fall through
default:
totalLength = cumulativeDistances1[count - 1];
this.keyTimes = new float[count];
this.keyTimes[0] = 0f;
for (int i = 1; i < count - 1; i++) {
this.keyTimes[i] = cumulativeDistances1[i] / totalLength;
}
this.keyTimes[count - 1] = 1f;
}
}
/**
* Called when the element is sampled at the given unit time. This updates
* the {@link #value} of the animation if active.
*/
protected void sampledAtUnitTime(float unitTime, int repeatIteration) {
// Note that skews are handled by SimpleAnimation and not here, since
// they need just the one component of interpolation.
if (calcMode != CALC_MODE_PACED
|| type == SVGTransform.SVG_TRANSFORM_SKEWX
|| type == SVGTransform.SVG_TRANSFORM_SKEWY) {
super.sampledAtUnitTime(unitTime, repeatIteration);
return;
}
AnimatableTransformListValue
value1, value2, value3 = null, nextValue1, nextValue2,
nextValue3 = null, accumulation;
float interpolation1 = 0f, interpolation2 = 0f, interpolation3 = 0f;
if (unitTime != 1) {
switch (type) {
case SVGTransform.SVG_TRANSFORM_ROTATE:
int keyTimeIndex = 0;
while (keyTimeIndex < keyTimes3.length - 1
&& unitTime >= keyTimes3[keyTimeIndex + 1]) {
keyTimeIndex++;
}
value3 = (AnimatableTransformListValue)
this.values[keyTimeIndex];
nextValue3 = (AnimatableTransformListValue)
this.values[keyTimeIndex + 1];
interpolation3 = (unitTime - keyTimes3[keyTimeIndex])
/ (keyTimes3[keyTimeIndex + 1] -
keyTimes3[keyTimeIndex]);
// fall through
default:
keyTimeIndex = 0;
while (keyTimeIndex < keyTimes2.length - 1
&& unitTime >= keyTimes2[keyTimeIndex + 1]) {
keyTimeIndex++;
}
value2 = (AnimatableTransformListValue)
this.values[keyTimeIndex];
nextValue2 = (AnimatableTransformListValue)
this.values[keyTimeIndex + 1];
interpolation2 = (unitTime - keyTimes2[keyTimeIndex])
/ (keyTimes2[keyTimeIndex + 1] -
keyTimes2[keyTimeIndex]);
keyTimeIndex = 0;
while (keyTimeIndex < keyTimes.length - 1
&& unitTime >= keyTimes[keyTimeIndex + 1]) {
keyTimeIndex++;
}
value1 = (AnimatableTransformListValue)
this.values[keyTimeIndex];
nextValue1 = (AnimatableTransformListValue)
this.values[keyTimeIndex + 1];
interpolation1 = (unitTime - keyTimes[keyTimeIndex])
/ (keyTimes[keyTimeIndex + 1] -
keyTimes[keyTimeIndex]);
}
} else {
value1 = value2 = value3 = (AnimatableTransformListValue)
this.values[this.values.length - 1];
nextValue1 = nextValue2 = nextValue3 = null;
interpolation1 = interpolation2 = interpolation3 = 1f;
}
if (cumulative) {
accumulation = (AnimatableTransformListValue)
this.values[this.values.length - 1];
} else {
accumulation = null;
}
switch (type) {
case SVGTransform.SVG_TRANSFORM_ROTATE:
this.value = AnimatableTransformListValue.interpolate
((AnimatableTransformListValue) this.value, value1, value2,
value3, nextValue1, nextValue2, nextValue3, interpolation1,
interpolation2, interpolation3, accumulation,
repeatIteration);
break;
default:
this.value = AnimatableTransformListValue.interpolate
((AnimatableTransformListValue) this.value, value1, value2,
nextValue1, nextValue2, interpolation1, interpolation2,
accumulation, repeatIteration);
break;
}
if (this.value.hasChanged()) {
markDirty();
}
}
}
| apache-2.0 |
jasongzcity/algorithm | leetcode/src/LowestCommonAncestor_236/Solution.java | 2989 | package LowestCommonAncestor_236;
import BinaryTree.TreeNode;
import java.util.Deque;
import java.util.LinkedList;
/**
* Given a binary tree, find the lowest common ancestor (LCA) of
* two given nodes in the tree.
*
* According to the definition of LCA on Wikipedia:
* “The lowest common ancestor is defined between two nodes
* v and w as the lowest node in T that has both v and w as descendants
* (where we allow a node to be a descendant of itself).”
*
* _______3______
* / \
* ___5__ ___1__
* / \ / \
* 6 2 0 8
* / \
* 7 4
* For example, the lowest common ancestor (LCA) of nodes 5 and 1 is 3.
* Another example is LCA of nodes 5 and 4 is 5
*/
public class Solution {
// Second session
public TreeNode lowestCommonAncestorII(TreeNode root, TreeNode p, TreeNode q){
return find(root,p,q);
}
private TreeNode find(TreeNode root, TreeNode p, TreeNode q){
if(root==null) return null;
if(root==p||root==q) return root;
TreeNode left = find(root.left,p,q), right = find(root.right,p,q);
if(left!=null&&right!=null) return root;
if(left==right) return null; // both child not found
return left==null?right:left;
}
// iterative solution
// may look ugly but it got accepted :-))))
public TreeNode lowestCommonAncestor2(TreeNode root, TreeNode p, TreeNode q) {
if (p == q) return p;
Deque<TreeNode> stack = new LinkedList<>();
TreeNode n = root, target;
while (true) {
while (n != null) {
stack.push(n);
n = n.left;
}
n = stack.pop();
if (n == p || n == q) {
if (n == p) target = q;
else target = p;
break;
}
n = n.right;
}
// find target in right trees.
TreeNode prev, rs = n;
n = n.right;
Deque<TreeNode> stack2 = new LinkedList<>();
while (true) {
while (n != null) {
stack2.push(n);
n = n.left;
}
if (stack2.isEmpty()) {
do {
prev = rs;
rs = stack.pop();
} while (rs.right == prev);
n = rs;
} else {
n = stack2.pop();
}
if (n == target) return rs;
n = n.right;
}
}
// most voted solution on leetcode
// recursive solution
// This solution is super fast
public TreeNode lowestCommonAncestor(TreeNode root, TreeNode p, TreeNode q) {
if (root == null || root == p || root == q) return root;
TreeNode left = lowestCommonAncestor(root.left, p, q);
TreeNode right = lowestCommonAncestor(root.right, p, q);
return left == null ? right : (right == null ? left : root);
}
}
| apache-2.0 |
PeterIJia/android_xlight | app/src/main/java/com/umarbhutta/xlightcompanion/Tools/ToastUtil.java | 723 | package com.umarbhutta.xlightcompanion.Tools;
import android.content.Context;
import android.text.TextUtils;
import android.view.Gravity;
import android.widget.Toast;
public class ToastUtil {
private static Toast mToast = null;
public static void showToast(Context context, int Stringid) {
showToast(context, context.getString(Stringid));
}
public static void showToast(Context context, String string) {
if (TextUtils.isEmpty(string))
return;
try {
if (null == mToast) {
mToast = Toast.makeText(context, string, Toast.LENGTH_SHORT);
} else {
mToast.setText(string);
}
mToast.setGravity(Gravity.CENTER, 0, 0);
mToast.show();
} catch (Exception e) {
e.printStackTrace();
}
}
} | apache-2.0 |
ralgond/paxoskeeper | src/java/main/org/apache/zookeeper/server/quorum/QuorumPeer.java | 73988 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zookeeper.server.quorum;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.StringReader;
import java.io.StringWriter;
import java.io.Writer;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import javax.security.sasl.SaslException;
import org.apache.zookeeper.KeeperException.BadArgumentsException;
import org.apache.zookeeper.common.AtomicFileWritingIdiom;
import org.apache.zookeeper.common.AtomicFileWritingIdiom.WriterStatement;
import org.apache.zookeeper.common.Time;
import org.apache.zookeeper.jmx.MBeanRegistry;
import org.apache.zookeeper.jmx.ZKMBeanInfo;
import org.apache.zookeeper.server.ServerCnxnFactory;
import org.apache.zookeeper.server.ZKDatabase;
import org.apache.zookeeper.server.ZooKeeperServer;
import org.apache.zookeeper.server.ZooKeeperThread;
import org.apache.zookeeper.server.quorum.auth.QuorumAuth;
import org.apache.zookeeper.server.quorum.auth.QuorumAuthLearner;
import org.apache.zookeeper.server.quorum.auth.QuorumAuthServer;
import org.apache.zookeeper.server.quorum.auth.SaslQuorumAuthLearner;
import org.apache.zookeeper.server.quorum.auth.SaslQuorumAuthServer;
import org.apache.zookeeper.server.quorum.auth.NullQuorumAuthLearner;
import org.apache.zookeeper.server.quorum.auth.NullQuorumAuthServer;
import org.apache.zookeeper.server.admin.AdminServer;
import org.apache.zookeeper.server.admin.AdminServer.AdminServerException;
import org.apache.zookeeper.server.admin.AdminServerFactory;
import org.apache.zookeeper.server.persistence.FileTxnSnapLog;
import org.apache.zookeeper.server.quorum.QuorumPeerConfig.ConfigException;
import org.apache.zookeeper.server.quorum.flexible.QuorumMaj;
import org.apache.zookeeper.server.quorum.flexible.QuorumVerifier;
import org.apache.zookeeper.server.util.ZxidUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class manages the quorum protocol. There are three states this server
* can be in:
* <ol>
* <li>Leader election - each server will elect a leader (proposing itself as a
* leader initially).</li>
* <li>Follower - the server will synchronize with the leader and replicate any
* transactions.</li>
* <li>Leader - the server will process requests and forward them to followers.
* A majority of followers must log the request before it can be accepted.
* </ol>
*
* This class will setup a datagram socket that will always respond with its
* view of the current leader. The response will take the form of:
*
* <pre>
* int xid;
*
* long myid;
*
* long leader_id;
*
* long leader_zxid;
* </pre>
*
* The request for the current leader will consist solely of an xid: int xid;
*/
public class QuorumPeer extends ZooKeeperThread implements QuorumStats.Provider {
private static final Logger LOG = LoggerFactory.getLogger(QuorumPeer.class);
private QuorumBean jmxQuorumBean;
LocalPeerBean jmxLocalPeerBean;
private Map<Long, RemotePeerBean> jmxRemotePeerBean;
LeaderElectionBean jmxLeaderElectionBean;
private QuorumCnxManager qcm;
QuorumAuthServer authServer;
QuorumAuthLearner authLearner;
/**
* ZKDatabase is a top level member of quorumpeer
* which will be used in all the zookeeperservers
* instantiated later. Also, it is created once on
* bootup and only thrown away in case of a truncate
* message from the leader
*/
private ZKDatabase zkDb;
public static class QuorumServer {
public InetSocketAddress addr = null;
public InetSocketAddress electionAddr = null;
public InetSocketAddress clientAddr = null;
public long id;
public String hostname;
public LearnerType type = LearnerType.PARTICIPANT;
private List<InetSocketAddress> myAddrs;
public QuorumServer(long id, InetSocketAddress addr,
InetSocketAddress electionAddr, InetSocketAddress clientAddr) {
this(id, addr, electionAddr, clientAddr, LearnerType.PARTICIPANT);
}
public QuorumServer(long id, InetSocketAddress addr,
InetSocketAddress electionAddr) {
this(id, addr, electionAddr, (InetSocketAddress)null, LearnerType.PARTICIPANT);
}
// VisibleForTesting
public QuorumServer(long id, InetSocketAddress addr) {
this(id, addr, (InetSocketAddress)null, (InetSocketAddress)null, LearnerType.PARTICIPANT);
}
/**
* Performs a DNS lookup for server address and election address.
*
* If the DNS lookup fails, this.addr and electionAddr remain
* unmodified.
*/
public void recreateSocketAddresses() {
if (this.addr == null) {
LOG.warn("Server address has not been initialized");
return;
}
if (this.electionAddr == null) {
LOG.warn("Election address has not been initialized");
return;
}
String host = this.addr.getHostString();
InetAddress address = null;
try {
address = InetAddress.getByName(host);
} catch (UnknownHostException ex) {
LOG.warn("Failed to resolve address: {}", host, ex);
return;
}
LOG.debug("Resolved address for {}: {}", host, address);
int port = this.addr.getPort();
this.addr = new InetSocketAddress(address, port);
port = this.electionAddr.getPort();
this.electionAddr = new InetSocketAddress(address, port);
}
private void setType(String s) throws ConfigException {
if (s.toLowerCase().equals("observer")) {
type = LearnerType.OBSERVER;
} else if (s.toLowerCase().equals("participant")) {
type = LearnerType.PARTICIPANT;
} else {
throw new ConfigException("Unrecognised peertype: " + s);
}
}
private static String[] splitWithLeadingHostname(String s)
throws ConfigException
{
/* Does it start with an IPv6 literal? */
if (s.startsWith("[")) {
int i = s.indexOf("]:");
if (i < 0) {
throw new ConfigException(s + " starts with '[' but has no matching ']:'");
}
String[] sa = s.substring(i + 2).split(":");
String[] nsa = new String[sa.length + 1];
nsa[0] = s.substring(1, i);
System.arraycopy(sa, 0, nsa, 1, sa.length);
return nsa;
} else {
return s.split(":");
}
}
private static final String wrongFormat = " does not have the form server_config or server_config;client_config"+
" where server_config is host:port:port or host:port:port:type and client_config is port or host:port";
public QuorumServer(long sid, String addressStr) throws ConfigException {
// LOG.warn("sid = " + sid + " addressStr = " + addressStr);
this.id = sid;
String serverClientParts[] = addressStr.split(";");
String serverParts[] = splitWithLeadingHostname(serverClientParts[0]);
if ((serverClientParts.length > 2) || (serverParts.length < 3)
|| (serverParts.length > 4)) {
throw new ConfigException(addressStr + wrongFormat);
}
if (serverClientParts.length == 2) {
//LOG.warn("ClientParts: " + serverClientParts[1]);
String clientParts[] = splitWithLeadingHostname(serverClientParts[1]);
if (clientParts.length > 2) {
throw new ConfigException(addressStr + wrongFormat);
}
// is client_config a host:port or just a port
hostname = (clientParts.length == 2) ? clientParts[0] : "0.0.0.0";
try {
clientAddr = new InetSocketAddress(hostname,
Integer.parseInt(clientParts[clientParts.length - 1]));
//LOG.warn("Set clientAddr to " + clientAddr);
} catch (NumberFormatException e) {
throw new ConfigException("Address unresolved: " + hostname + ":" + clientParts[clientParts.length - 1]);
}
}
// server_config should be either host:port:port or host:port:port:type
try {
addr = new InetSocketAddress(serverParts[0],
Integer.parseInt(serverParts[1]));
} catch (NumberFormatException e) {
throw new ConfigException("Address unresolved: " + serverParts[0] + ":" + serverParts[1]);
}
try {
electionAddr = new InetSocketAddress(serverParts[0],
Integer.parseInt(serverParts[2]));
} catch (NumberFormatException e) {
throw new ConfigException("Address unresolved: " + serverParts[0] + ":" + serverParts[2]);
}
if (serverParts.length == 4) {
setType(serverParts[3]);
}
this.hostname = serverParts[0];
setMyAddrs();
}
public QuorumServer(long id, InetSocketAddress addr,
InetSocketAddress electionAddr, LearnerType type) {
this(id, addr, electionAddr, (InetSocketAddress)null, type);
}
public QuorumServer(long id, InetSocketAddress addr,
InetSocketAddress electionAddr, InetSocketAddress clientAddr, LearnerType type) {
this.id = id;
this.addr = addr;
this.electionAddr = electionAddr;
this.type = type;
this.clientAddr = clientAddr;
setMyAddrs();
}
private void setMyAddrs() {
this.myAddrs = new ArrayList<InetSocketAddress>();
this.myAddrs.add(this.addr);
this.myAddrs.add(this.clientAddr);
this.myAddrs.add(this.electionAddr);
this.myAddrs = excludedSpecialAddresses(this.myAddrs);
}
private static String delimitedHostString(InetSocketAddress addr)
{
String host = addr.getHostString();
if (host.contains(":")) {
return "[" + host + "]";
} else {
return host;
}
}
public String toString(){
StringWriter sw = new StringWriter();
//addr should never be null, but just to make sure
if (addr !=null) {
sw.append(delimitedHostString(addr));
sw.append(":");
sw.append(String.valueOf(addr.getPort()));
}
if (electionAddr!=null){
sw.append(":");
sw.append(String.valueOf(electionAddr.getPort()));
}
if (type == LearnerType.OBSERVER) sw.append(":observer");
else if (type == LearnerType.PARTICIPANT) sw.append(":participant");
if (clientAddr!=null){
sw.append(";");
sw.append(delimitedHostString(clientAddr));
sw.append(":");
sw.append(String.valueOf(clientAddr.getPort()));
}
return sw.toString();
}
public int hashCode() {
assert false : "hashCode not designed";
return 42; // any arbitrary constant will do
}
private boolean checkAddressesEqual(InetSocketAddress addr1, InetSocketAddress addr2){
if ((addr1 == null && addr2!=null) ||
(addr1!=null && addr2==null) ||
(addr1!=null && addr2!=null && !addr1.equals(addr2))) return false;
return true;
}
public boolean equals(Object o){
if (!(o instanceof QuorumServer)) return false;
QuorumServer qs = (QuorumServer)o;
if ((qs.id != id) || (qs.type != type)) return false;
if (!checkAddressesEqual(addr, qs.addr)) return false;
if (!checkAddressesEqual(electionAddr, qs.electionAddr)) return false;
if (!checkAddressesEqual(clientAddr, qs.clientAddr)) return false;
return true;
}
public void checkAddressDuplicate(QuorumServer s) throws BadArgumentsException {
List<InetSocketAddress> otherAddrs = new ArrayList<InetSocketAddress>();
otherAddrs.add(s.addr);
otherAddrs.add(s.clientAddr);
otherAddrs.add(s.electionAddr);
otherAddrs = excludedSpecialAddresses(otherAddrs);
for (InetSocketAddress my: this.myAddrs) {
for (InetSocketAddress other: otherAddrs) {
if (my.equals(other)) {
String error = String.format("%s of server.%d conflicts %s of server.%d", my, this.id, other, s.id);
throw new BadArgumentsException(error);
}
}
}
}
private List<InetSocketAddress> excludedSpecialAddresses(List<InetSocketAddress> addrs) {
List<InetSocketAddress> included = new ArrayList<InetSocketAddress>();
InetAddress wcAddr = new InetSocketAddress(0).getAddress();
for (InetSocketAddress addr : addrs) {
if (addr == null) {
continue;
}
InetAddress inetaddr = addr.getAddress();
if (inetaddr == null ||
inetaddr.equals(wcAddr) || // wildCard address(0.0.0.0)
inetaddr.isLoopbackAddress()) { // loopback address(localhost/127.0.0.1)
continue;
}
included.add(addr);
}
return included;
}
}
public enum ServerState {
LOOKING, FOLLOWING, LEADING, OBSERVING;
}
/*
* A peer can either be participating, which implies that it is willing to
* both vote in instances of consensus and to elect or become a Leader, or
* it may be observing in which case it isn't.
*
* We need this distinction to decide which ServerState to move to when
* conditions change (e.g. which state to become after LOOKING).
*/
public enum LearnerType {
PARTICIPANT, OBSERVER;
}
/*
* To enable observers to have no identifier, we need a generic identifier
* at least for QuorumCnxManager. We use the following constant to as the
* value of such a generic identifier.
*/
static final long OBSERVER_ID = Long.MAX_VALUE;
/*
* Record leader election time
*/
public long start_fle, end_fle; // fle = fast leader election
public static final String FLE_TIME_UNIT = "MS";
/*
* Default value of peer is participant
*/
private LearnerType learnerType = LearnerType.PARTICIPANT;
public LearnerType getLearnerType() {
return learnerType;
}
/**
* Sets the LearnerType
*/
public void setLearnerType(LearnerType p) {
learnerType = p;
}
protected synchronized void setConfigFileName(String s) {
configFilename = s;
}
private String configFilename = null;
public int getQuorumSize(){
return getVotingView().size();
}
/**
* QuorumVerifier implementation; default (majority).
*/
//last committed quorum verifier
public QuorumVerifier quorumVerifier;
//last proposed quorum verifier
public QuorumVerifier lastSeenQuorumVerifier = null;
// Lock object that guard access to quorumVerifier and lastSeenQuorumVerifier.
final Object QV_LOCK = new Object();
/**
* My id
*/
private long myid;
/**
* get the id of this quorum peer.
*/
public long getId() {
return myid;
}
/**
* This is who I think the leader currently is.
*/
volatile private Vote currentVote;
public synchronized Vote getCurrentVote(){
return currentVote;
}
public synchronized void setCurrentVote(Vote v){
currentVote = v;
}
private volatile boolean running = true;
/**
* The number of milliseconds of each tick
*/
protected int tickTime;
/**
* Whether learners in this quorum should create new sessions as local.
* False by default to preserve existing behavior.
*/
protected boolean localSessionsEnabled = false;
/**
* Whether learners in this quorum should upgrade local sessions to
* global. Only matters if local sessions are enabled.
*/
protected boolean localSessionsUpgradingEnabled = true;
/**
* Minimum number of milliseconds to allow for session timeout.
* A value of -1 indicates unset, use default.
*/
protected int minSessionTimeout = -1;
/**
* Maximum number of milliseconds to allow for session timeout.
* A value of -1 indicates unset, use default.
*/
protected int maxSessionTimeout = -1;
/**
* The number of ticks that the initial synchronization phase can take
*/
protected int initLimit;
/**
* The number of ticks that can pass between sending a request and getting
* an acknowledgment
*/
protected int syncLimit;
/**
* Enables/Disables sync request processor. This option is enabled
* by default and is to be used with observers.
*/
protected boolean syncEnabled = true;
/**
* The current tick
*/
protected AtomicInteger tick = new AtomicInteger();
/**
* Whether or not to listen on all IPs for the two quorum ports
* (broadcast and fast leader election).
*/
protected boolean quorumListenOnAllIPs = false;
/**
* Keeps time taken for leader election in milliseconds. Sets the value to
* this variable only after the completion of leader election.
*/
private long electionTimeTaken = -1;
/**
* Enable/Disables quorum authentication using sasl. Defaulting to false.
*/
protected boolean quorumSaslEnableAuth;
/**
* If this is false, quorum peer server will accept another quorum peer client
* connection even if the authentication did not succeed. This can be used while
* upgrading ZooKeeper server. Defaulting to false (required).
*/
protected boolean quorumServerSaslAuthRequired;
/**
* If this is false, quorum peer learner will talk to quorum peer server
* without authentication. This can be used while upgrading ZooKeeper
* server. Defaulting to false (required).
*/
protected boolean quorumLearnerSaslAuthRequired;
/**
* Kerberos quorum service principal. Defaulting to 'zkquorum/localhost'.
*/
protected String quorumServicePrincipal;
/**
* Quorum learner login context name in jaas-conf file to read the kerberos
* security details. Defaulting to 'QuorumLearner'.
*/
protected String quorumLearnerLoginContext;
/**
* Quorum server login context name in jaas-conf file to read the kerberos
* security details. Defaulting to 'QuorumServer'.
*/
protected String quorumServerLoginContext;
// TODO: need to tune the default value of thread size
private static final int QUORUM_CNXN_THREADS_SIZE_DEFAULT_VALUE = 20;
/**
* The maximum number of threads to allow in the connectionExecutors thread
* pool which will be used to initiate quorum server connections.
*/
protected int quorumCnxnThreadsSize = QUORUM_CNXN_THREADS_SIZE_DEFAULT_VALUE;
/**
* @deprecated As of release 3.4.0, this class has been deprecated, since
* it is used with one of the udp-based versions of leader election, which
* we are also deprecating.
*
* This class simply responds to requests for the current leader of this
* node.
* <p>
* The request contains just an xid generated by the requestor.
* <p>
* The response has the xid, the id of this server, the id of the leader,
* and the zxid of the leader.
*
*
*/
@Deprecated
class ResponderThread extends ZooKeeperThread {
ResponderThread() {
super("ResponderThread");
}
volatile boolean running = true;
@Override
public void run() {
try {
byte b[] = new byte[36];
ByteBuffer responseBuffer = ByteBuffer.wrap(b);
DatagramPacket packet = new DatagramPacket(b, b.length);
while (running) {
udpSocket.receive(packet);
if (packet.getLength() != 4) {
LOG.warn("Got more than just an xid! Len = "
+ packet.getLength());
} else {
responseBuffer.clear();
responseBuffer.getInt(); // Skip the xid
responseBuffer.putLong(myid);
Vote current = getCurrentVote();
switch (getPeerState()) {
case LOOKING:
responseBuffer.putLong(current.getId());
responseBuffer.putLong(current.getZxid());
break;
case LEADING:
responseBuffer.putLong(myid);
try {
long proposed;
synchronized(leader) {
proposed = leader.lastProposed;
}
responseBuffer.putLong(proposed);
} catch (NullPointerException npe) {
// This can happen in state transitions,
// just ignore the request
}
break;
case FOLLOWING:
responseBuffer.putLong(current.getId());
try {
responseBuffer.putLong(follower.getZxid());
} catch (NullPointerException npe) {
// This can happen in state transitions,
// just ignore the request
}
break;
case OBSERVING:
// Do nothing, Observers keep themselves to
// themselves.
break;
}
packet.setData(b);
udpSocket.send(packet);
}
packet.setLength(b.length);
}
} catch (RuntimeException e) {
LOG.warn("Unexpected runtime exception in ResponderThread",e);
} catch (IOException e) {
LOG.warn("Unexpected IO exception in ResponderThread",e);
} finally {
LOG.warn("QuorumPeer responder thread exited");
}
}
}
private ServerState state = ServerState.LOOKING;
private boolean reconfigFlag = false; // indicates that a reconfig just committed
public synchronized void setPeerState(ServerState newState){
state=newState;
}
public synchronized void reconfigFlagSet(){
reconfigFlag = true;
}
public synchronized void reconfigFlagClear(){
reconfigFlag = false;
}
public synchronized boolean isReconfigStateChange(){
return reconfigFlag;
}
public synchronized ServerState getPeerState(){
return state;
}
DatagramSocket udpSocket;
private InetSocketAddress myQuorumAddr;
private InetSocketAddress myElectionAddr = null;
private InetSocketAddress myClientAddr = null;
/**
* Resolves hostname for a given server ID.
*
* This method resolves hostname for a given server ID in both quorumVerifer
* and lastSeenQuorumVerifier. If the server ID matches the local server ID,
* it also updates myQuorumAddr and myElectionAddr.
*/
public void recreateSocketAddresses(long id) {
QuorumVerifier qv = getQuorumVerifier();
if (qv != null) {
QuorumServer qs = qv.getAllMembers().get(id);
if (qs != null) {
qs.recreateSocketAddresses();
if (id == getId()) {
setQuorumAddress(qs.addr);
setElectionAddress(qs.electionAddr);
}
}
}
qv = getLastSeenQuorumVerifier();
if (qv != null) {
QuorumServer qs = qv.getAllMembers().get(id);
if (qs != null) {
qs.recreateSocketAddresses();
}
}
}
public InetSocketAddress getQuorumAddress(){
synchronized (QV_LOCK) {
return myQuorumAddr;
}
}
public void setQuorumAddress(InetSocketAddress addr){
synchronized (QV_LOCK) {
myQuorumAddr = addr;
}
}
public InetSocketAddress getElectionAddress(){
synchronized (QV_LOCK) {
return myElectionAddr;
}
}
public void setElectionAddress(InetSocketAddress addr){
synchronized (QV_LOCK) {
myElectionAddr = addr;
}
}
public InetSocketAddress getClientAddress(){
synchronized (QV_LOCK) {
return myClientAddr;
}
}
public void setClientAddress(InetSocketAddress addr){
synchronized (QV_LOCK) {
myClientAddr = addr;
}
}
private int electionType;
Election electionAlg;
ServerCnxnFactory cnxnFactory;
ServerCnxnFactory secureCnxnFactory;
private FileTxnSnapLog logFactory = null;
private final QuorumStats quorumStats;
AdminServer adminServer;
public static QuorumPeer testingQuorumPeer() throws SaslException {
return new QuorumPeer();
}
public QuorumPeer() throws SaslException {
super("QuorumPeer");
quorumStats = new QuorumStats(this);
jmxRemotePeerBean = new HashMap<Long, RemotePeerBean>();
adminServer = AdminServerFactory.createAdminServer();
initialize();
}
/**
* For backward compatibility purposes, we instantiate QuorumMaj by default.
*/
public QuorumPeer(Map<Long, QuorumServer> quorumPeers, File dataDir,
File dataLogDir, int electionType,
long myid, int tickTime, int initLimit, int syncLimit,
ServerCnxnFactory cnxnFactory) throws IOException {
this(quorumPeers, dataDir, dataLogDir, electionType, myid, tickTime,
initLimit, syncLimit, false, cnxnFactory,
new QuorumMaj(quorumPeers));
}
public QuorumPeer(Map<Long, QuorumServer> quorumPeers, File dataDir,
File dataLogDir, int electionType,
long myid, int tickTime, int initLimit, int syncLimit,
boolean quorumListenOnAllIPs,
ServerCnxnFactory cnxnFactory,
QuorumVerifier quorumConfig) throws IOException {
this();
this.cnxnFactory = cnxnFactory;
this.electionType = electionType;
this.myid = myid;
this.tickTime = tickTime;
this.initLimit = initLimit;
this.syncLimit = syncLimit;
this.quorumListenOnAllIPs = quorumListenOnAllIPs;
this.logFactory = new FileTxnSnapLog(dataLogDir, dataDir);
this.zkDb = new ZKDatabase(this.logFactory);
if(quorumConfig == null) quorumConfig = new QuorumMaj(quorumPeers);
setQuorumVerifier(quorumConfig, false);
adminServer = AdminServerFactory.createAdminServer();
}
public void initialize() throws SaslException {
// init quorum auth server & learner
if (isQuorumSaslAuthEnabled()) {
Set<String> authzHosts = new HashSet<String>();
for (QuorumServer qs : getView().values()) {
authzHosts.add(qs.hostname);
}
authServer = new SaslQuorumAuthServer(isQuorumServerSaslAuthRequired(),
quorumServerLoginContext, authzHosts);
authLearner = new SaslQuorumAuthLearner(isQuorumLearnerSaslAuthRequired(),
quorumServicePrincipal, quorumLearnerLoginContext);
} else {
authServer = new NullQuorumAuthServer();
authLearner = new NullQuorumAuthLearner();
}
}
QuorumStats quorumStats() {
return quorumStats;
}
@Override
public synchronized void start() {
if (!getView().containsKey(myid)) {
throw new RuntimeException("My id " + myid + " not in the peer list");
}
loadDataBase();
startServerCnxnFactory();
try {
adminServer.start();
} catch (AdminServerException e) {
LOG.warn("Problem starting AdminServer", e);
System.out.println(e);
}
startLeaderElection();
super.start();
}
private void loadDataBase() {
try {
zkDb.loadDataBase();
// load the epochs
long lastProcessedZxid = zkDb.getDataTree().lastProcessedZxid;
long epochOfZxid = ZxidUtils.getEpochFromZxid(lastProcessedZxid);
try {
currentEpoch = readLongFromFile(CURRENT_EPOCH_FILENAME);
} catch(FileNotFoundException e) {
// pick a reasonable epoch number
// this should only happen once when moving to a
// new code version
currentEpoch = epochOfZxid;
LOG.info(CURRENT_EPOCH_FILENAME
+ " not found! Creating with a reasonable default of {}. This should only happen when you are upgrading your installation",
currentEpoch);
writeLongToFile(CURRENT_EPOCH_FILENAME, currentEpoch);
}
if (epochOfZxid > currentEpoch) {
throw new IOException("The current epoch, " + ZxidUtils.zxidToString(currentEpoch) + ", is older than the last zxid, " + lastProcessedZxid);
}
try {
acceptedEpoch = readLongFromFile(ACCEPTED_EPOCH_FILENAME);
} catch(FileNotFoundException e) {
// pick a reasonable epoch number
// this should only happen once when moving to a
// new code version
acceptedEpoch = epochOfZxid;
LOG.info(ACCEPTED_EPOCH_FILENAME
+ " not found! Creating with a reasonable default of {}. This should only happen when you are upgrading your installation",
acceptedEpoch);
writeLongToFile(ACCEPTED_EPOCH_FILENAME, acceptedEpoch);
}
if (acceptedEpoch < currentEpoch) {
throw new IOException("The accepted epoch, " + ZxidUtils.zxidToString(acceptedEpoch) + " is less than the current epoch, " + ZxidUtils.zxidToString(currentEpoch));
}
} catch(IOException ie) {
LOG.error("Unable to load database on disk", ie);
throw new RuntimeException("Unable to run quorum server ", ie);
}
}
ResponderThread responder;
synchronized public void stopLeaderElection() {
responder.running = false;
responder.interrupt();
}
synchronized public void startLeaderElection() {
try {
if (getPeerState() == ServerState.LOOKING) {
currentVote = new Vote(myid, getLastLoggedZxid(), getCurrentEpoch());
}
} catch(IOException e) {
RuntimeException re = new RuntimeException(e.getMessage());
re.setStackTrace(e.getStackTrace());
throw re;
}
this.electionAlg = createElectionAlgorithm(electionType);
}
/**
* Count the number of nodes in the map that could be followers.
* @param peers
* @return The number of followers in the map
*/
protected static int countParticipants(Map<Long,QuorumServer> peers) {
int count = 0;
for (QuorumServer q : peers.values()) {
if (q.type == LearnerType.PARTICIPANT) {
count++;
}
}
return count;
}
/**
* This constructor is only used by the existing unit test code.
* It defaults to FileLogProvider persistence provider.
*/
public QuorumPeer(Map<Long,QuorumServer> quorumPeers, File snapDir,
File logDir, int clientPort, int electionAlg,
long myid, int tickTime, int initLimit, int syncLimit)
throws IOException
{
this(quorumPeers, snapDir, logDir, electionAlg, myid, tickTime, initLimit, syncLimit, false,
ServerCnxnFactory.createFactory(getClientAddress(quorumPeers, myid, clientPort), -1),
new QuorumMaj(quorumPeers));
}
/**
* This constructor is only used by the existing unit test code.
* It defaults to FileLogProvider persistence provider.
*/
public QuorumPeer(Map<Long,QuorumServer> quorumPeers, File snapDir,
File logDir, int clientPort, int electionAlg,
long myid, int tickTime, int initLimit, int syncLimit,
QuorumVerifier quorumConfig)
throws IOException
{
this(quorumPeers, snapDir, logDir, electionAlg,
myid,tickTime, initLimit,syncLimit, false,
ServerCnxnFactory.createFactory(getClientAddress(quorumPeers, myid, clientPort), -1),
quorumConfig);
}
private static InetSocketAddress getClientAddress(Map<Long, QuorumServer> quorumPeers, long myid, int clientPort)
throws IOException {
QuorumServer quorumServer = quorumPeers.get(myid);
if (null == quorumServer) {
throw new IOException("No QuorumServer correspoding to myid " + myid);
}
if (null == quorumServer.clientAddr) {
return new InetSocketAddress(clientPort);
}
if (quorumServer.clientAddr.getPort() != clientPort) {
throw new IOException("QuorumServer port " + quorumServer.clientAddr.getPort()
+ " does not match with given port " + clientPort);
}
return quorumServer.clientAddr;
}
/**
* returns the highest zxid that this host has seen
*
* @return the highest zxid for this host
*/
public long getLastLoggedZxid() {
if (!zkDb.isInitialized()) {
loadDataBase();
}
return zkDb.getDataTreeLastProcessedZxid();
}
public Follower follower;
public Leader leader;
public Observer observer;
protected Follower makeFollower(FileTxnSnapLog logFactory) throws IOException {
return new Follower(this, new FollowerZooKeeperServer(logFactory, this, this.zkDb));
}
protected Leader makeLeader(FileTxnSnapLog logFactory) throws IOException {
return new Leader(this, new LeaderZooKeeperServer(logFactory, this, this.zkDb));
}
protected Observer makeObserver(FileTxnSnapLog logFactory) throws IOException {
return new Observer(this, new ObserverZooKeeperServer(logFactory, this, this.zkDb));
}
@SuppressWarnings("deprecation")
protected Election createElectionAlgorithm(int electionAlgorithm){
Election le=null;
//TODO: use a factory rather than a switch
switch (electionAlgorithm) {
case 1:
le = new AuthFastLeaderElection(this);
break;
case 2:
le = new AuthFastLeaderElection(this, true);
break;
case 3:
qcm = createCnxnManager();
QuorumCnxManager.Listener listener = qcm.listener;
if(listener != null){
listener.start();
FastLeaderElection fle = new FastLeaderElection(this, qcm);
fle.start();
le = fle;
} else {
LOG.error("Null listener when initializing cnx manager");
}
break;
default:
assert false;
}
return le;
}
@SuppressWarnings("deprecation")
protected Election makeLEStrategy(){
LOG.debug("Initializing leader election protocol...");
return electionAlg;
}
synchronized protected void setLeader(Leader newLeader){
leader=newLeader;
}
synchronized protected void setFollower(Follower newFollower){
follower=newFollower;
}
synchronized protected void setObserver(Observer newObserver){
observer=newObserver;
}
synchronized public ZooKeeperServer getActiveServer(){
if(leader!=null)
return leader.zk;
else if(follower!=null)
return follower.zk;
else if (observer != null)
return observer.zk;
return null;
}
boolean shuttingDownLE = false;
@Override
public void run() {
updateThreadName();
LOG.debug("Starting quorum peer");
try {
jmxQuorumBean = new QuorumBean(this);
MBeanRegistry.getInstance().register(jmxQuorumBean, null);
for(QuorumServer s: getView().values()){
ZKMBeanInfo p;
if (getId() == s.id) {
p = jmxLocalPeerBean = new LocalPeerBean(this);
try {
MBeanRegistry.getInstance().register(p, jmxQuorumBean);
} catch (Exception e) {
LOG.warn("Failed to register with JMX", e);
jmxLocalPeerBean = null;
}
} else {
RemotePeerBean rBean = new RemotePeerBean(s);
try {
MBeanRegistry.getInstance().register(rBean, jmxQuorumBean);
jmxRemotePeerBean.put(s.id, rBean);
} catch (Exception e) {
LOG.warn("Failed to register with JMX", e);
}
}
}
} catch (Exception e) {
LOG.warn("Failed to register with JMX", e);
jmxQuorumBean = null;
}
try {
/*
* Main loop
*/
while (running) {
switch (getPeerState()) {
case LOOKING:
LOG.info("LOOKING");
if (Boolean.getBoolean("readonlymode.enabled")) {
LOG.info("Attempting to start ReadOnlyZooKeeperServer");
// Create read-only server but don't start it immediately
final ReadOnlyZooKeeperServer roZk =
new ReadOnlyZooKeeperServer(logFactory, this, this.zkDb);
// Instead of starting roZk immediately, wait some grace
// period before we decide we're partitioned.
//
// Thread is used here because otherwise it would require
// changes in each of election strategy classes which is
// unnecessary code coupling.
Thread roZkMgr = new Thread() {
public void run() {
try {
// lower-bound grace period to 2 secs
sleep(Math.max(2000, tickTime));
if (ServerState.LOOKING.equals(getPeerState())) {
roZk.startup();
}
} catch (InterruptedException e) {
LOG.info("Interrupted while attempting to start ReadOnlyZooKeeperServer, not started");
} catch (Exception e) {
LOG.error("FAILED to start ReadOnlyZooKeeperServer", e);
}
}
};
try {
roZkMgr.start();
reconfigFlagClear();
if (shuttingDownLE) {
shuttingDownLE = false;
startLeaderElection();
}
setCurrentVote(makeLEStrategy().lookForLeader());
} catch (Exception e) {
LOG.warn("Unexpected exception", e);
setPeerState(ServerState.LOOKING);
} finally {
// If the thread is in the the grace period, interrupt
// to come out of waiting.
roZkMgr.interrupt();
roZk.shutdown();
}
} else {
try {
reconfigFlagClear();
if (shuttingDownLE) {
shuttingDownLE = false;
startLeaderElection();
}
setCurrentVote(makeLEStrategy().lookForLeader());
} catch (Exception e) {
LOG.warn("Unexpected exception", e);
setPeerState(ServerState.LOOKING);
}
}
break;
case OBSERVING:
try {
LOG.info("OBSERVING");
setObserver(makeObserver(logFactory));
observer.observeLeader();
} catch (Exception e) {
LOG.warn("Unexpected exception",e );
} finally {
observer.shutdown();
setObserver(null);
updateServerState();
}
break;
case FOLLOWING:
try {
LOG.info("FOLLOWING");
setFollower(makeFollower(logFactory));
follower.followLeader();
} catch (Exception e) {
LOG.warn("Unexpected exception",e);
} finally {
follower.shutdown();
setFollower(null);
updateServerState();
}
break;
case LEADING:
LOG.info("LEADING");
try {
setLeader(makeLeader(logFactory));
leader.lead();
setLeader(null);
} catch (Exception e) {
LOG.warn("Unexpected exception",e);
} finally {
if (leader != null) {
leader.shutdown("Forcing shutdown");
setLeader(null);
}
updateServerState();
}
break;
}
start_fle = Time.currentElapsedTime();
}
} finally {
LOG.warn("QuorumPeer main thread exited");
MBeanRegistry instance = MBeanRegistry.getInstance();
instance.unregister(jmxQuorumBean);
instance.unregister(jmxLocalPeerBean);
for (RemotePeerBean remotePeerBean : jmxRemotePeerBean.values()) {
instance.unregister(remotePeerBean);
}
jmxQuorumBean = null;
jmxLocalPeerBean = null;
jmxRemotePeerBean = null;
}
}
private synchronized void updateServerState(){
if (!reconfigFlag) {
setPeerState(ServerState.LOOKING);
LOG.warn("PeerState set to LOOKING");
return;
}
if (getId() == getCurrentVote().getId()) {
setPeerState(ServerState.LEADING);
LOG.debug("PeerState set to LEADING");
} else if (getLearnerType() == LearnerType.PARTICIPANT) {
setPeerState(ServerState.FOLLOWING);
LOG.debug("PeerState set to FOLLOWING");
} else if (getLearnerType() == LearnerType.OBSERVER) {
setPeerState(ServerState.OBSERVING);
LOG.debug("PeerState set to OBSERVER");
} else { // currently shouldn't happen since there are only 2 learner types
setPeerState(ServerState.LOOKING);
LOG.debug("Shouldn't be here");
}
reconfigFlag = false;
}
public void shutdown() {
running = false;
if (leader != null) {
leader.shutdown("quorum Peer shutdown");
}
if (follower != null) {
follower.shutdown();
}
shutdownServerCnxnFactory();
if(udpSocket != null) {
udpSocket.close();
}
try {
adminServer.shutdown();
} catch (AdminServerException e) {
LOG.warn("Problem stopping AdminServer", e);
}
if(getElectionAlg() != null){
this.interrupt();
getElectionAlg().shutdown();
}
try {
zkDb.close();
} catch (IOException ie) {
LOG.warn("Error closing logs ", ie);
}
}
/**
* A 'view' is a node's current opinion of the membership of the entire
* ensemble.
*/
public Map<Long,QuorumPeer.QuorumServer> getView() {
return Collections.unmodifiableMap(getQuorumVerifier().getAllMembers());
}
/**
* Observers are not contained in this view, only nodes with
* PeerType=PARTICIPANT.
*/
public Map<Long,QuorumPeer.QuorumServer> getVotingView() {
return getQuorumVerifier().getVotingMembers();
}
/**
* Returns only observers, no followers.
*/
public Map<Long,QuorumPeer.QuorumServer> getObservingView() {
return getQuorumVerifier().getObservingMembers();
}
public synchronized Set<Long> getCurrentAndNextConfigVoters() {
Set<Long> voterIds = new HashSet<Long>(getQuorumVerifier()
.getVotingMembers().keySet());
if (getLastSeenQuorumVerifier() != null) {
voterIds.addAll(getLastSeenQuorumVerifier().getVotingMembers()
.keySet());
}
return voterIds;
}
/**
* Check if a node is in the current view. With static membership, the
* result of this check will never change; only when dynamic membership
* is introduced will this be more useful.
*/
public boolean viewContains(Long sid) {
return this.getView().containsKey(sid);
}
/**
* Only used by QuorumStats at the moment
*/
public String[] getQuorumPeers() {
List<String> l = new ArrayList<String>();
synchronized (this) {
if (leader != null) {
for (LearnerHandler fh : leader.getLearners()) {
if (fh.getSocket() != null) {
String s = fh.getSocket().getRemoteSocketAddress().toString();
if (leader.isLearnerSynced(fh))
s += "*";
l.add(s);
}
}
} else if (follower != null) {
l.add(follower.sock.getRemoteSocketAddress().toString());
}
}
return l.toArray(new String[0]);
}
public String getServerState() {
switch (getPeerState()) {
case LOOKING:
return QuorumStats.Provider.LOOKING_STATE;
case LEADING:
return QuorumStats.Provider.LEADING_STATE;
case FOLLOWING:
return QuorumStats.Provider.FOLLOWING_STATE;
case OBSERVING:
return QuorumStats.Provider.OBSERVING_STATE;
}
return QuorumStats.Provider.UNKNOWN_STATE;
}
/**
* set the id of this quorum peer.
*/
public void setMyid(long myid) {
this.myid = myid;
}
/**
* Get the number of milliseconds of each tick
*/
public int getTickTime() {
return tickTime;
}
/**
* Set the number of milliseconds of each tick
*/
public void setTickTime(int tickTime) {
LOG.info("tickTime set to " + tickTime);
this.tickTime = tickTime;
}
/** Maximum number of connections allowed from particular host (ip) */
public int getMaxClientCnxnsPerHost() {
if (cnxnFactory != null) {
return cnxnFactory.getMaxClientCnxnsPerHost();
}
if (secureCnxnFactory != null) {
return secureCnxnFactory.getMaxClientCnxnsPerHost();
}
return -1;
}
/** Whether local sessions are enabled */
public boolean areLocalSessionsEnabled() {
return localSessionsEnabled;
}
/** Whether to enable local sessions */
public void enableLocalSessions(boolean flag) {
LOG.info("Local sessions " + (flag ? "enabled" : "disabled"));
localSessionsEnabled = flag;
}
/** Whether local sessions are allowed to upgrade to global sessions */
public boolean isLocalSessionsUpgradingEnabled() {
return localSessionsUpgradingEnabled;
}
/** Whether to allow local sessions to upgrade to global sessions */
public void enableLocalSessionsUpgrading(boolean flag) {
LOG.info("Local session upgrading " + (flag ? "enabled" : "disabled"));
localSessionsUpgradingEnabled = flag;
}
/** minimum session timeout in milliseconds */
public int getMinSessionTimeout() {
return minSessionTimeout;
}
/** minimum session timeout in milliseconds */
public void setMinSessionTimeout(int min) {
LOG.info("minSessionTimeout set to " + min);
this.minSessionTimeout = min;
}
/** maximum session timeout in milliseconds */
public int getMaxSessionTimeout() {
return maxSessionTimeout;
}
/** maximum session timeout in milliseconds */
public void setMaxSessionTimeout(int max) {
LOG.info("maxSessionTimeout set to " + max);
this.maxSessionTimeout = max;
}
/**
* Get the number of ticks that the initial synchronization phase can take
*/
public int getInitLimit() {
return initLimit;
}
/**
* Set the number of ticks that the initial synchronization phase can take
*/
public void setInitLimit(int initLimit) {
LOG.info("initLimit set to " + initLimit);
this.initLimit = initLimit;
}
/**
* Get the current tick
*/
public int getTick() {
return tick.get();
}
public QuorumVerifier configFromString(String s) throws IOException, ConfigException{
Properties props = new Properties();
props.load(new StringReader(s));
return QuorumPeerConfig.parseDynamicConfig(props, electionType, false, false);
}
/**
* Return QuorumVerifier object for the last committed configuration.
*/
public QuorumVerifier getQuorumVerifier(){
synchronized (QV_LOCK) {
return quorumVerifier;
}
}
/**
* Return QuorumVerifier object for the last proposed configuration.
*/
public QuorumVerifier getLastSeenQuorumVerifier(){
synchronized (QV_LOCK) {
return lastSeenQuorumVerifier;
}
}
private void connectNewPeers(){
synchronized (QV_LOCK) {
if (qcm != null && quorumVerifier != null && lastSeenQuorumVerifier != null) {
Map<Long, QuorumServer> committedView = quorumVerifier.getAllMembers();
for (Entry<Long, QuorumServer> e : lastSeenQuorumVerifier.getAllMembers().entrySet()) {
if (e.getKey() != getId() && !committedView.containsKey(e.getKey()))
qcm.connectOne(e.getKey());
}
}
}
}
public synchronized void restartLeaderElection(QuorumVerifier qvOLD, QuorumVerifier qvNEW){
if (qvOLD == null || !qvOLD.equals(qvNEW)) {
LOG.warn("Restarting Leader Election");
getElectionAlg().shutdown();
shuttingDownLE = false;
startLeaderElection();
}
}
public String getNextDynamicConfigFilename() {
if (configFilename == null) {
LOG.warn("configFilename is null! This should only happen in tests.");
return null;
}
return configFilename + QuorumPeerConfig.nextDynamicConfigFileSuffix;
}
public void setLastSeenQuorumVerifier(QuorumVerifier qv, boolean writeToDisk){
synchronized (QV_LOCK) {
if (lastSeenQuorumVerifier != null && lastSeenQuorumVerifier.getVersion() > qv.getVersion()) {
LOG.error("setLastSeenQuorumVerifier called with stale config " + qv.getVersion() +
". Current version: " + quorumVerifier.getVersion());
}
// assuming that a version uniquely identifies a configuration, so if
// version is the same, nothing to do here.
if (lastSeenQuorumVerifier != null &&
lastSeenQuorumVerifier.getVersion() == qv.getVersion()) {
return;
}
lastSeenQuorumVerifier = qv;
connectNewPeers();
if (writeToDisk) {
try {
String fileName = getNextDynamicConfigFilename();
if (fileName != null) {
QuorumPeerConfig.writeDynamicConfig(fileName, qv, true);
}
} catch (IOException e) {
LOG.error("Error writing next dynamic config file to disk: ", e.getMessage());
}
}
}
}
public QuorumVerifier setQuorumVerifier(QuorumVerifier qv, boolean writeToDisk){
synchronized (QV_LOCK) {
if ((quorumVerifier != null) && (quorumVerifier.getVersion() >= qv.getVersion())) {
// this is normal. For example - server found out about new config through FastLeaderElection gossiping
// and then got the same config in UPTODATE message so its already known
LOG.debug(getId() + " setQuorumVerifier called with known or old config " + qv.getVersion() +
". Current version: " + quorumVerifier.getVersion());
return quorumVerifier;
}
QuorumVerifier prevQV = quorumVerifier;
quorumVerifier = qv;
if (lastSeenQuorumVerifier == null || (qv.getVersion() > lastSeenQuorumVerifier.getVersion()))
lastSeenQuorumVerifier = qv;
if (writeToDisk) {
// some tests initialize QuorumPeer without a static config file
if (configFilename != null) {
try {
String dynamicConfigFilename = makeDynamicConfigFilename(
qv.getVersion());
QuorumPeerConfig.writeDynamicConfig(
dynamicConfigFilename, qv, false);
QuorumPeerConfig.editStaticConfig(configFilename,
dynamicConfigFilename,
needEraseClientInfoFromStaticConfig());
} catch (IOException e) {
LOG.error("Error closing file: ", e.getMessage());
}
} else {
LOG.info("writeToDisk == true but configFilename == null");
}
}
if (qv.getVersion() == lastSeenQuorumVerifier.getVersion()) {
QuorumPeerConfig.deleteFile(getNextDynamicConfigFilename());
}
QuorumServer qs = qv.getAllMembers().get(getId());
if (qs != null) {
setQuorumAddress(qs.addr);
setElectionAddress(qs.electionAddr);
setClientAddress(qs.clientAddr);
}
return prevQV;
}
}
private String makeDynamicConfigFilename(long version) {
return configFilename + ".dynamic." + Long.toHexString(version);
}
private boolean needEraseClientInfoFromStaticConfig() {
QuorumServer server = quorumVerifier.getAllMembers().get(getId());
return (server != null && server.clientAddr != null);
}
/**
* Get an instance of LeaderElection
*/
public Election getElectionAlg(){
return electionAlg;
}
/**
* Get the synclimit
*/
public int getSyncLimit() {
return syncLimit;
}
/**
* Set the synclimit
*/
public void setSyncLimit(int syncLimit) {
this.syncLimit = syncLimit;
}
/**
* The syncEnabled can also be set via a system property.
*/
public static final String SYNC_ENABLED = "zookeeper.observer.syncEnabled";
/**
* Return syncEnabled.
*
* @return
*/
public boolean getSyncEnabled() {
if (System.getProperty(SYNC_ENABLED) != null) {
LOG.info(SYNC_ENABLED + "=" + Boolean.getBoolean(SYNC_ENABLED));
return Boolean.getBoolean(SYNC_ENABLED);
} else {
return syncEnabled;
}
}
/**
* Set syncEnabled.
*
* @param syncEnabled
*/
public void setSyncEnabled(boolean syncEnabled) {
this.syncEnabled = syncEnabled;
}
/**
* Gets the election type
*/
public int getElectionType() {
return electionType;
}
/**
* Sets the election type
*/
public void setElectionType(int electionType) {
this.electionType = electionType;
}
public boolean getQuorumListenOnAllIPs() {
return quorumListenOnAllIPs;
}
public void setQuorumListenOnAllIPs(boolean quorumListenOnAllIPs) {
this.quorumListenOnAllIPs = quorumListenOnAllIPs;
}
public void setCnxnFactory(ServerCnxnFactory cnxnFactory) {
this.cnxnFactory = cnxnFactory;
}
public void setSecureCnxnFactory(ServerCnxnFactory secureCnxnFactory) {
this.secureCnxnFactory = secureCnxnFactory;
}
private void startServerCnxnFactory() {
if (cnxnFactory != null) {
cnxnFactory.start();
}
if (secureCnxnFactory != null) {
secureCnxnFactory.start();
}
}
private void shutdownServerCnxnFactory() {
if (cnxnFactory != null) {
cnxnFactory.shutdown();
}
if (secureCnxnFactory != null) {
secureCnxnFactory.shutdown();
}
}
// Leader and learner will control the zookeeper server and pass it into QuorumPeer.
public void setZooKeeperServer(ZooKeeperServer zks) {
if (cnxnFactory != null) {
cnxnFactory.setZooKeeperServer(zks);
}
if (secureCnxnFactory != null) {
secureCnxnFactory.setZooKeeperServer(zks);
}
}
public void closeAllConnections() {
if (cnxnFactory != null) {
cnxnFactory.closeAll();
}
if (secureCnxnFactory != null) {
secureCnxnFactory.closeAll();
}
}
public int getClientPort() {
if (cnxnFactory != null) {
return cnxnFactory.getLocalPort();
}
return -1;
}
public void setTxnFactory(FileTxnSnapLog factory) {
this.logFactory = factory;
}
public FileTxnSnapLog getTxnFactory() {
return this.logFactory;
}
/**
* set zk database for this node
* @param database
*/
public void setZKDatabase(ZKDatabase database) {
this.zkDb = database;
}
protected ZKDatabase getZkDb() {
return zkDb;
}
public synchronized void initConfigInZKDatabase() {
if (zkDb != null) zkDb.initConfigInZKDatabase(getQuorumVerifier());
}
public boolean isRunning() {
return running;
}
/**
* get reference to QuorumCnxManager
*/
public QuorumCnxManager getQuorumCnxManager() {
return qcm;
}
private long readLongFromFile(String name) throws IOException {
File file = new File(logFactory.getSnapDir(), name);
BufferedReader br = new BufferedReader(new FileReader(file));
String line = "";
try {
line = br.readLine();
return Long.parseLong(line);
} catch(NumberFormatException e) {
throw new IOException("Found " + line + " in " + file);
} finally {
br.close();
}
}
private long acceptedEpoch = -1;
private long currentEpoch = -1;
public static final String CURRENT_EPOCH_FILENAME = "currentEpoch";
public static final String ACCEPTED_EPOCH_FILENAME = "acceptedEpoch";
/**
* Write a long value to disk atomically. Either succeeds or an exception
* is thrown.
* @param name file name to write the long to
* @param value the long value to write to the named file
* @throws IOException if the file cannot be written atomically
*/
private void writeLongToFile(String name, final long value) throws IOException {
File file = new File(logFactory.getSnapDir(), name);
new AtomicFileWritingIdiom(file, new WriterStatement() {
@Override
public void write(Writer bw) throws IOException {
bw.write(Long.toString(value));
}
});
}
public long getCurrentEpoch() throws IOException {
if (currentEpoch == -1) {
currentEpoch = readLongFromFile(CURRENT_EPOCH_FILENAME);
}
return currentEpoch;
}
public long getAcceptedEpoch() throws IOException {
if (acceptedEpoch == -1) {
acceptedEpoch = readLongFromFile(ACCEPTED_EPOCH_FILENAME);
}
return acceptedEpoch;
}
public void setCurrentEpoch(long e) throws IOException {
currentEpoch = e;
writeLongToFile(CURRENT_EPOCH_FILENAME, e);
}
public void setAcceptedEpoch(long e) throws IOException {
acceptedEpoch = e;
writeLongToFile(ACCEPTED_EPOCH_FILENAME, e);
}
public boolean processReconfig(QuorumVerifier qv, Long suggestedLeaderId, Long zxid, boolean restartLE) {
if (!QuorumPeerConfig.isReconfigEnabled()) {
LOG.debug("Reconfig feature is disabled, skip reconfig processing.");
return false;
}
InetSocketAddress oldClientAddr = getClientAddress();
// update last committed quorum verifier, write the new config to disk
// and restart leader election if config changed.
QuorumVerifier prevQV = setQuorumVerifier(qv, true);
// There is no log record for the initial config, thus after syncing
// with leader
// /zookeeper/config is empty! it is also possible that last committed
// config is propagated during leader election
// without the propagation the corresponding log records.
// so we should explicitly do this (this is not necessary when we're
// already a Follower/Observer, only
// for Learner):
initConfigInZKDatabase();
if (prevQV.getVersion() < qv.getVersion() && !prevQV.equals(qv)) {
Map<Long, QuorumServer> newMembers = qv.getAllMembers();
updateRemotePeerMXBeans(newMembers);
if (restartLE) restartLeaderElection(prevQV, qv);
QuorumServer myNewQS = newMembers.get(getId());
if (myNewQS != null && myNewQS.clientAddr != null
&& !myNewQS.clientAddr.equals(oldClientAddr)) {
cnxnFactory.reconfigure(myNewQS.clientAddr);
updateThreadName();
}
boolean roleChange = updateLearnerType(qv);
boolean leaderChange = false;
if (suggestedLeaderId != null) {
// zxid should be non-null too
leaderChange = updateVote(suggestedLeaderId, zxid);
} else {
long currentLeaderId = getCurrentVote().getId();
QuorumServer myleaderInCurQV = prevQV.getVotingMembers().get(currentLeaderId);
QuorumServer myleaderInNewQV = qv.getVotingMembers().get(currentLeaderId);
leaderChange = (myleaderInCurQV == null || myleaderInCurQV.addr == null ||
myleaderInNewQV == null || !myleaderInCurQV.addr.equals(myleaderInNewQV.addr));
// we don't have a designated leader - need to go into leader
// election
reconfigFlagClear();
}
if (roleChange || leaderChange) {
return true;
}
}
return false;
}
private void updateRemotePeerMXBeans(Map<Long, QuorumServer> newMembers) {
Set<Long> existingMembers = new HashSet<Long>(newMembers.keySet());
existingMembers.retainAll(jmxRemotePeerBean.keySet());
for (Long id : existingMembers) {
RemotePeerBean rBean = jmxRemotePeerBean.get(id);
rBean.setQuorumServer(newMembers.get(id));
}
Set<Long> joiningMembers = new HashSet<Long>(newMembers.keySet());
joiningMembers.removeAll(jmxRemotePeerBean.keySet());
joiningMembers.remove(getId()); // remove self as it is local bean
for (Long id : joiningMembers) {
QuorumServer qs = newMembers.get(id);
RemotePeerBean rBean = new RemotePeerBean(qs);
try {
MBeanRegistry.getInstance().register(rBean, jmxQuorumBean);
jmxRemotePeerBean.put(qs.id, rBean);
} catch (Exception e) {
LOG.warn("Failed to register with JMX", e);
}
}
Set<Long> leavingMembers = new HashSet<Long>(jmxRemotePeerBean.keySet());
leavingMembers.removeAll(newMembers.keySet());
for (Long id : leavingMembers) {
RemotePeerBean rBean = jmxRemotePeerBean.remove(id);
try {
MBeanRegistry.getInstance().unregister(rBean);
} catch (Exception e) {
LOG.warn("Failed to unregister with JMX", e);
}
}
}
private boolean updateLearnerType(QuorumVerifier newQV) {
//check if I'm an observer in new config
if (newQV.getObservingMembers().containsKey(getId())) {
if (getLearnerType()!=LearnerType.OBSERVER){
setLearnerType(LearnerType.OBSERVER);
LOG.info("Becoming an observer");
reconfigFlagSet();
return true;
} else {
return false;
}
} else if (newQV.getVotingMembers().containsKey(getId())) {
if (getLearnerType()!=LearnerType.PARTICIPANT){
setLearnerType(LearnerType.PARTICIPANT);
LOG.info("Becoming a voting participant");
reconfigFlagSet();
return true;
} else {
return false;
}
}
// I'm not in the view
if (getLearnerType()!=LearnerType.PARTICIPANT){
setLearnerType(LearnerType.PARTICIPANT);
LOG.info("Becoming a non-voting participant");
reconfigFlagSet();
return true;
}
return false;
}
private boolean updateVote(long designatedLeader, long zxid){
Vote currentVote = getCurrentVote();
if (currentVote!=null && designatedLeader != currentVote.getId()) {
setCurrentVote(new Vote(designatedLeader, zxid));
reconfigFlagSet();
LOG.warn("Suggested leader: " + designatedLeader);
return true;
}
return false;
}
/**
* Updates leader election info to avoid inconsistencies when
* a new server tries to join the ensemble.
*
* @see https://issues.apache.org/jira/browse/ZOOKEEPER-1732
*/
protected void updateElectionVote(long newEpoch) {
Vote currentVote = getCurrentVote();
if (currentVote != null) {
setCurrentVote(new Vote(currentVote.getId(),
currentVote.getZxid(),
currentVote.getElectionEpoch(),
newEpoch,
currentVote.getState()));
}
}
private void updateThreadName() {
String plain = cnxnFactory != null ?
cnxnFactory.getLocalAddress() != null ?
cnxnFactory.getLocalAddress().toString() : "disabled" : "disabled";
String secure = secureCnxnFactory != null ? secureCnxnFactory.getLocalAddress().toString() : "disabled";
setName(String.format("QuorumPeer[myid=%d](plain=%s)(secure=%s)", getId(), plain, secure));
}
/**
* Sets the time taken for leader election in milliseconds.
*
* @param electionTimeTaken time taken for leader election
*/
void setElectionTimeTaken(long electionTimeTaken) {
this.electionTimeTaken = electionTimeTaken;
}
/**
* @return the time taken for leader election in milliseconds.
*/
long getElectionTimeTaken() {
return electionTimeTaken;
}
void setQuorumServerSaslRequired(boolean serverSaslRequired) {
quorumServerSaslAuthRequired = serverSaslRequired;
LOG.info("{} set to {}", QuorumAuth.QUORUM_SERVER_SASL_AUTH_REQUIRED,
serverSaslRequired);
}
void setQuorumLearnerSaslRequired(boolean learnerSaslRequired) {
quorumLearnerSaslAuthRequired = learnerSaslRequired;
LOG.info("{} set to {}", QuorumAuth.QUORUM_LEARNER_SASL_AUTH_REQUIRED,
learnerSaslRequired);
}
void setQuorumSaslEnabled(boolean enableAuth) {
quorumSaslEnableAuth = enableAuth;
if (!quorumSaslEnableAuth) {
LOG.info("QuorumPeer communication is not secured!");
} else {
LOG.info("{} set to {}",
QuorumAuth.QUORUM_SASL_AUTH_ENABLED, enableAuth);
}
}
void setQuorumServicePrincipal(String servicePrincipal) {
quorumServicePrincipal = servicePrincipal;
LOG.info("{} set to {}", QuorumAuth.QUORUM_KERBEROS_SERVICE_PRINCIPAL,
quorumServicePrincipal);
}
void setQuorumLearnerLoginContext(String learnerContext) {
quorumLearnerLoginContext = learnerContext;
LOG.info("{} set to {}", QuorumAuth.QUORUM_LEARNER_SASL_LOGIN_CONTEXT,
quorumLearnerLoginContext);
}
void setQuorumServerLoginContext(String serverContext) {
quorumServerLoginContext = serverContext;
LOG.info("{} set to {}", QuorumAuth.QUORUM_SERVER_SASL_LOGIN_CONTEXT,
quorumServerLoginContext);
}
void setQuorumCnxnThreadsSize(int qCnxnThreadsSize) {
if (qCnxnThreadsSize > QUORUM_CNXN_THREADS_SIZE_DEFAULT_VALUE) {
quorumCnxnThreadsSize = qCnxnThreadsSize;
}
LOG.info("quorum.cnxn.threads.size set to {}", quorumCnxnThreadsSize);
}
boolean isQuorumSaslAuthEnabled() {
return quorumSaslEnableAuth;
}
private boolean isQuorumServerSaslAuthRequired() {
return quorumServerSaslAuthRequired;
}
private boolean isQuorumLearnerSaslAuthRequired() {
return quorumLearnerSaslAuthRequired;
}
public QuorumCnxManager createCnxnManager() {
return new QuorumCnxManager(this,
this.getId(),
this.getView(),
this.authServer,
this.authLearner,
this.tickTime * this.syncLimit,
this.getQuorumListenOnAllIPs(),
this.quorumCnxnThreadsSize,
this.isQuorumSaslAuthEnabled());
}
}
| apache-2.0 |
CloudSlang/cs-actions | cs-date-time/src/main/java/io/cloudslang/content/datetime/utils/DatetimeInputs.java | 1537 | /*
* (c) Copyright 2019 EntIT Software LLC, a Micro Focus company, L.P.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License v2.0 which accompany this distribution.
*
* The Apache License is available at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.cloudslang.content.datetime.utils;
import io.cloudslang.content.constants.InputNames;
/**
* Created by victor on 17.10.2016.
*/
public class DatetimeInputs extends InputNames {
public static final String LOCALE_LANG = "localeLang";
public static final String LOCALE_COUNTRY = "localeCountry";
public static final String LOCALE_DATE = "date";
public static final String LOCALE_OFFSET = "offset";
public static final String DATE_FORMAT = "dateFormat";
public static final String DATE_LOCALE_LANG = "dateLocaleLang";
public static final String DATE_LOCALE_COUNTRY = "dateLocaleCountry";
public static final String OUT_FORMAT = "outFormat";
public static final String OUT_LOCALE_LANG = "outLocaleLang";
public static final String OUT_LOCALE_COUNTRY = "outLocaleCountry";
public static final String TIMEZONE = "timezone";
}
| apache-2.0 |
yafengguo/Apache-beam | sdks/java/extensions/sorter/src/test/java/org/apache/beam/sdk/extensions/sorter/InMemorySorterTest.java | 4860 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.extensions.sorter;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.fail;
import org.apache.beam.sdk.extensions.sorter.SorterTestUtils.SorterGenerator;
import org.apache.beam.sdk.values.KV;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests for {@link InMemorySorter}. */
@RunWith(JUnit4.class)
public class InMemorySorterTest {
@Rule public ExpectedException thrown = ExpectedException.none();
@Test
public void testEmpty() throws Exception {
SorterTestUtils.testEmpty(InMemorySorter.create(new InMemorySorter.Options()));
}
@Test
public void testSingleElement() throws Exception {
SorterTestUtils.testSingleElement(InMemorySorter.create(new InMemorySorter.Options()));
}
@Test
public void testEmptyKeyValueElement() throws Exception {
SorterTestUtils.testEmptyKeyValueElement(InMemorySorter.create(new InMemorySorter.Options()));
}
@Test
public void testMultipleIterations() throws Exception {
SorterTestUtils.testMultipleIterations(InMemorySorter.create(new InMemorySorter.Options()));
}
@Test
public void testManySorters() throws Exception {
SorterTestUtils.testRandom(
new SorterGenerator() {
@Override
public Sorter generateSorter() throws Exception {
return InMemorySorter.create(new InMemorySorter.Options());
}
},
1000000,
10);
}
@Test
public void testAddAfterSort() throws Exception {
SorterTestUtils.testAddAfterSort(InMemorySorter.create(new InMemorySorter.Options()), thrown);
fail();
}
@Test
public void testSortTwice() throws Exception {
SorterTestUtils.testSortTwice(InMemorySorter.create(new InMemorySorter.Options()), thrown);
fail();
}
/**
* Verify an exception is thrown when the in memory sorter runs out of space.
*
* @throws Exception
*/
@Test
public void testOutOfSpace() throws Exception {
thrown.expect(IllegalStateException.class);
thrown.expectMessage(is("No space remaining for in memory sorting"));
SorterTestUtils.testRandom(
new SorterGenerator() {
@Override
public Sorter generateSorter() throws Exception {
InMemorySorter.Options options = new InMemorySorter.Options();
options.setMemoryMB(1);
return InMemorySorter.create(options);
}
},
1,
10000000);
}
@Test
public void testAddIfRoom() throws Exception {
InMemorySorter.Options options = new InMemorySorter.Options();
options.setMemoryMB(1);
InMemorySorter sorter = InMemorySorter.create(options);
// Should be a few kb less than what the total buffer supports
KV<byte[], byte[]> bigRecord = KV.of(new byte[1024 * 500], new byte[1024 * 500]);
// First add should succeed, second add should fail due to insufficient room
Assert.assertTrue(sorter.addIfRoom(bigRecord));
Assert.assertFalse(sorter.addIfRoom(bigRecord));
}
@Test
public void testAddIfRoomOverhead() throws Exception {
InMemorySorter.Options options = new InMemorySorter.Options();
options.setMemoryMB(1);
InMemorySorter sorter = InMemorySorter.create(options);
// No bytes within record, should still run out of room due to memory overhead of record
KV<byte[], byte[]> tinyRecord = KV.of(new byte[0], new byte[0]);
// Verify we can't insert one million records into this one megabyte buffer
boolean stillRoom = true;
for (int i = 0; (i < 1000000) && stillRoom; i++) {
stillRoom = sorter.addIfRoom(tinyRecord);
}
Assert.assertFalse(stillRoom);
}
@Test
public void testNegativeMemory() throws Exception {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("memoryMB must be greater than zero");
InMemorySorter.Options options = new InMemorySorter.Options();
options.setMemoryMB(-1);
}
}
| apache-2.0 |
jimmylai/slideshare | storm_demo/storm-starter/src/jvm/storm/starter/bolt/PrinterBolt.java | 491 | package storm.starter.bolt;
import backtype.storm.topology.BasicOutputCollector;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseBasicBolt;
import backtype.storm.tuple.Tuple;
public class PrinterBolt extends BaseBasicBolt {
@Override
public void execute(Tuple tuple, BasicOutputCollector collector) {
System.out.println(tuple);
}
@Override
public void declareOutputFields(OutputFieldsDeclarer ofd) {
}
}
| apache-2.0 |
NikoYuwono/retrofit | retrofit/src/main/java/retrofit2/Callback.java | 1684 | /*
* Copyright (C) 2012 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package retrofit2;
/**
* Communicates responses from a server or offline requests. One and only one method will be
* invoked in response to a given request.
* <p>
* Callback methods are executed using the {@link Retrofit} callback executor. When none is
* specified, the following defaults are used:
* <ul>
* <li>Android: Callbacks are executed on the application's main (UI) thread.</li>
* <li>JVM: Callbacks are executed on the background thread which performed the request.</li>
* </ul>
*
* @param <T> Successful response body type.
*/
public interface Callback<T> {
/**
* Invoked for a received HTTP response.
* <p>
* Note: An HTTP response may still indicate an application-level failure such as a 404 or 500.
* Call {@link Response#isSuccess()} to determine if the response indicates success.
*/
void onResponse(Response<T> response);
/**
* Invoked when a network exception occurred talking to the server or when an unexpected
* exception occurred creating the request or processing the response.
*/
void onFailure(Throwable t);
}
| apache-2.0 |
q474818917/solr-5.2.0 | solr/core/src/test/org/apache/solr/cloud/AsyncMigrateRouteKeyTest.java | 4508 | package org.apache.solr.cloud;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.solr.client.solrj.SolrRequest;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.request.QueryRequest;
import org.apache.solr.common.params.CollectionParams;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.NamedList;
import org.junit.Test;
import java.io.IOException;
public class AsyncMigrateRouteKeyTest extends MigrateRouteKeyTest {
public AsyncMigrateRouteKeyTest() {
schemaString = "schema15.xml"; // we need a string id
}
private static final int MAX_WAIT_SECONDS = 2 * 60;
@Test
public void test() throws Exception {
waitForThingsToLevelOut(15);
multipleShardMigrateTest();
printLayout();
}
protected void checkAsyncRequestForCompletion(String asyncId) throws SolrServerException, IOException {
ModifiableSolrParams params;
String message;
params = new ModifiableSolrParams();
params.set("action", CollectionParams.CollectionAction.REQUESTSTATUS.toString());
params.set(OverseerCollectionProcessor.REQUESTID, asyncId);
// This task takes long enough to run. Also check for the current state of the task to be running.
message = sendStatusRequestWithRetry(params, 5);
assertEquals("found " + asyncId + " in running tasks", message);
// Now wait until the task actually completes successfully/fails.
message = sendStatusRequestWithRetry(params, MAX_WAIT_SECONDS);
assertEquals("Task " + asyncId + " not found in completed tasks.",
"found " + asyncId + " in completed tasks", message);
}
@Override
protected void invokeMigrateApi(String sourceCollection, String splitKey, String targetCollection) throws SolrServerException, IOException {
ModifiableSolrParams params = new ModifiableSolrParams();
String asyncId = "20140128";
params.set(CollectionParams.ACTION, CollectionParams.CollectionAction.MIGRATE.toString());
params.set("collection", sourceCollection);
params.set("target.collection", targetCollection);
params.set("split.key", splitKey);
params.set("forward.timeout", 45);
params.set("async", asyncId);
invoke(params);
checkAsyncRequestForCompletion(asyncId);
}
/**
* Helper method to send a status request with specific retry limit and return
* the message/null from the success response.
*/
private String sendStatusRequestWithRetry(ModifiableSolrParams params, int maxCounter)
throws SolrServerException, IOException {
NamedList status = null;
String state = null;
String message = null;
NamedList r;
while (maxCounter-- > 0) {
r = sendRequest(params);
status = (NamedList) r.get("status");
state = (String) status.get("state");
message = (String) status.get("msg");
if (state.equals("completed") || state.equals("failed"))
return (String) status.get("msg");
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
}
}
// Return last state?
return message;
}
protected NamedList sendRequest(ModifiableSolrParams params) throws SolrServerException, IOException {
SolrRequest request = new QueryRequest(params);
request.setPath("/admin/collections");
String baseUrl = ((HttpSolrClient) shardToJetty.get(SHARD1).get(0).client.solrClient)
.getBaseURL();
baseUrl = baseUrl.substring(0, baseUrl.length() - "collection1".length());
try (HttpSolrClient baseServer = new HttpSolrClient(baseUrl)) {
baseServer.setConnectionTimeout(15000);
return baseServer.request(request);
}
}
}
| apache-2.0 |
treejames/LoadingComponentView | src/mss/activities/LauncherMainActivity.java | 533 | package mss.activities;
import mss.fragments.sample.LoadingComponentFragment;
import android.app.Activity;
import android.os.Bundle;
/**
*
* @author Juan Aguilar Guisado
* @version 1.0
* @since 1.0
*
*/
public class LauncherMainActivity extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.layout_launcher_main);
getFragmentManager().beginTransaction()
.add(R.id.container, new LoadingComponentFragment()).commit();
}
}
| apache-2.0 |
sankarh/hive | standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Schema.java | 20789 | /**
* Autogenerated by Thrift Compiler (0.14.1)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.hadoop.hive.metastore.api;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.14.1)")
@org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public class Schema implements org.apache.thrift.TBase<Schema, Schema._Fields>, java.io.Serializable, Cloneable, Comparable<Schema> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Schema");
private static final org.apache.thrift.protocol.TField FIELD_SCHEMAS_FIELD_DESC = new org.apache.thrift.protocol.TField("fieldSchemas", org.apache.thrift.protocol.TType.LIST, (short)1);
private static final org.apache.thrift.protocol.TField PROPERTIES_FIELD_DESC = new org.apache.thrift.protocol.TField("properties", org.apache.thrift.protocol.TType.MAP, (short)2);
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new SchemaStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new SchemaTupleSchemeFactory();
private @org.apache.thrift.annotation.Nullable java.util.List<FieldSchema> fieldSchemas; // required
private @org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.String> properties; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
FIELD_SCHEMAS((short)1, "fieldSchemas"),
PROPERTIES((short)2, "properties");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // FIELD_SCHEMAS
return FIELD_SCHEMAS;
case 2: // PROPERTIES
return PROPERTIES;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.FIELD_SCHEMAS, new org.apache.thrift.meta_data.FieldMetaData("fieldSchemas", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, FieldSchema.class))));
tmpMap.put(_Fields.PROPERTIES, new org.apache.thrift.meta_data.FieldMetaData("properties", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING),
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(Schema.class, metaDataMap);
}
public Schema() {
}
public Schema(
java.util.List<FieldSchema> fieldSchemas,
java.util.Map<java.lang.String,java.lang.String> properties)
{
this();
this.fieldSchemas = fieldSchemas;
this.properties = properties;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public Schema(Schema other) {
if (other.isSetFieldSchemas()) {
java.util.List<FieldSchema> __this__fieldSchemas = new java.util.ArrayList<FieldSchema>(other.fieldSchemas.size());
for (FieldSchema other_element : other.fieldSchemas) {
__this__fieldSchemas.add(new FieldSchema(other_element));
}
this.fieldSchemas = __this__fieldSchemas;
}
if (other.isSetProperties()) {
java.util.Map<java.lang.String,java.lang.String> __this__properties = new java.util.HashMap<java.lang.String,java.lang.String>(other.properties);
this.properties = __this__properties;
}
}
public Schema deepCopy() {
return new Schema(this);
}
@Override
public void clear() {
this.fieldSchemas = null;
this.properties = null;
}
public int getFieldSchemasSize() {
return (this.fieldSchemas == null) ? 0 : this.fieldSchemas.size();
}
@org.apache.thrift.annotation.Nullable
public java.util.Iterator<FieldSchema> getFieldSchemasIterator() {
return (this.fieldSchemas == null) ? null : this.fieldSchemas.iterator();
}
public void addToFieldSchemas(FieldSchema elem) {
if (this.fieldSchemas == null) {
this.fieldSchemas = new java.util.ArrayList<FieldSchema>();
}
this.fieldSchemas.add(elem);
}
@org.apache.thrift.annotation.Nullable
public java.util.List<FieldSchema> getFieldSchemas() {
return this.fieldSchemas;
}
public void setFieldSchemas(@org.apache.thrift.annotation.Nullable java.util.List<FieldSchema> fieldSchemas) {
this.fieldSchemas = fieldSchemas;
}
public void unsetFieldSchemas() {
this.fieldSchemas = null;
}
/** Returns true if field fieldSchemas is set (has been assigned a value) and false otherwise */
public boolean isSetFieldSchemas() {
return this.fieldSchemas != null;
}
public void setFieldSchemasIsSet(boolean value) {
if (!value) {
this.fieldSchemas = null;
}
}
public int getPropertiesSize() {
return (this.properties == null) ? 0 : this.properties.size();
}
public void putToProperties(java.lang.String key, java.lang.String val) {
if (this.properties == null) {
this.properties = new java.util.HashMap<java.lang.String,java.lang.String>();
}
this.properties.put(key, val);
}
@org.apache.thrift.annotation.Nullable
public java.util.Map<java.lang.String,java.lang.String> getProperties() {
return this.properties;
}
public void setProperties(@org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.String> properties) {
this.properties = properties;
}
public void unsetProperties() {
this.properties = null;
}
/** Returns true if field properties is set (has been assigned a value) and false otherwise */
public boolean isSetProperties() {
return this.properties != null;
}
public void setPropertiesIsSet(boolean value) {
if (!value) {
this.properties = null;
}
}
public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
switch (field) {
case FIELD_SCHEMAS:
if (value == null) {
unsetFieldSchemas();
} else {
setFieldSchemas((java.util.List<FieldSchema>)value);
}
break;
case PROPERTIES:
if (value == null) {
unsetProperties();
} else {
setProperties((java.util.Map<java.lang.String,java.lang.String>)value);
}
break;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
case FIELD_SCHEMAS:
return getFieldSchemas();
case PROPERTIES:
return getProperties();
}
throw new java.lang.IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
case FIELD_SCHEMAS:
return isSetFieldSchemas();
case PROPERTIES:
return isSetProperties();
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that instanceof Schema)
return this.equals((Schema)that);
return false;
}
public boolean equals(Schema that) {
if (that == null)
return false;
if (this == that)
return true;
boolean this_present_fieldSchemas = true && this.isSetFieldSchemas();
boolean that_present_fieldSchemas = true && that.isSetFieldSchemas();
if (this_present_fieldSchemas || that_present_fieldSchemas) {
if (!(this_present_fieldSchemas && that_present_fieldSchemas))
return false;
if (!this.fieldSchemas.equals(that.fieldSchemas))
return false;
}
boolean this_present_properties = true && this.isSetProperties();
boolean that_present_properties = true && that.isSetProperties();
if (this_present_properties || that_present_properties) {
if (!(this_present_properties && that_present_properties))
return false;
if (!this.properties.equals(that.properties))
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = hashCode * 8191 + ((isSetFieldSchemas()) ? 131071 : 524287);
if (isSetFieldSchemas())
hashCode = hashCode * 8191 + fieldSchemas.hashCode();
hashCode = hashCode * 8191 + ((isSetProperties()) ? 131071 : 524287);
if (isSetProperties())
hashCode = hashCode * 8191 + properties.hashCode();
return hashCode;
}
@Override
public int compareTo(Schema other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = java.lang.Boolean.compare(isSetFieldSchemas(), other.isSetFieldSchemas());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetFieldSchemas()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.fieldSchemas, other.fieldSchemas);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetProperties(), other.isSetProperties());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetProperties()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.properties, other.properties);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
@org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("Schema(");
boolean first = true;
sb.append("fieldSchemas:");
if (this.fieldSchemas == null) {
sb.append("null");
} else {
sb.append(this.fieldSchemas);
}
first = false;
if (!first) sb.append(", ");
sb.append("properties:");
if (this.properties == null) {
sb.append("null");
} else {
sb.append(this.properties);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class SchemaStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public SchemaStandardScheme getScheme() {
return new SchemaStandardScheme();
}
}
private static class SchemaStandardScheme extends org.apache.thrift.scheme.StandardScheme<Schema> {
public void read(org.apache.thrift.protocol.TProtocol iprot, Schema struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // FIELD_SCHEMAS
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
org.apache.thrift.protocol.TList _list416 = iprot.readListBegin();
struct.fieldSchemas = new java.util.ArrayList<FieldSchema>(_list416.size);
@org.apache.thrift.annotation.Nullable FieldSchema _elem417;
for (int _i418 = 0; _i418 < _list416.size; ++_i418)
{
_elem417 = new FieldSchema();
_elem417.read(iprot);
struct.fieldSchemas.add(_elem417);
}
iprot.readListEnd();
}
struct.setFieldSchemasIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // PROPERTIES
if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
{
org.apache.thrift.protocol.TMap _map419 = iprot.readMapBegin();
struct.properties = new java.util.HashMap<java.lang.String,java.lang.String>(2*_map419.size);
@org.apache.thrift.annotation.Nullable java.lang.String _key420;
@org.apache.thrift.annotation.Nullable java.lang.String _val421;
for (int _i422 = 0; _i422 < _map419.size; ++_i422)
{
_key420 = iprot.readString();
_val421 = iprot.readString();
struct.properties.put(_key420, _val421);
}
iprot.readMapEnd();
}
struct.setPropertiesIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, Schema struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.fieldSchemas != null) {
oprot.writeFieldBegin(FIELD_SCHEMAS_FIELD_DESC);
{
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.fieldSchemas.size()));
for (FieldSchema _iter423 : struct.fieldSchemas)
{
_iter423.write(oprot);
}
oprot.writeListEnd();
}
oprot.writeFieldEnd();
}
if (struct.properties != null) {
oprot.writeFieldBegin(PROPERTIES_FIELD_DESC);
{
oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, struct.properties.size()));
for (java.util.Map.Entry<java.lang.String, java.lang.String> _iter424 : struct.properties.entrySet())
{
oprot.writeString(_iter424.getKey());
oprot.writeString(_iter424.getValue());
}
oprot.writeMapEnd();
}
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class SchemaTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public SchemaTupleScheme getScheme() {
return new SchemaTupleScheme();
}
}
private static class SchemaTupleScheme extends org.apache.thrift.scheme.TupleScheme<Schema> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, Schema struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet optionals = new java.util.BitSet();
if (struct.isSetFieldSchemas()) {
optionals.set(0);
}
if (struct.isSetProperties()) {
optionals.set(1);
}
oprot.writeBitSet(optionals, 2);
if (struct.isSetFieldSchemas()) {
{
oprot.writeI32(struct.fieldSchemas.size());
for (FieldSchema _iter425 : struct.fieldSchemas)
{
_iter425.write(oprot);
}
}
}
if (struct.isSetProperties()) {
{
oprot.writeI32(struct.properties.size());
for (java.util.Map.Entry<java.lang.String, java.lang.String> _iter426 : struct.properties.entrySet())
{
oprot.writeString(_iter426.getKey());
oprot.writeString(_iter426.getValue());
}
}
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, Schema struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet incoming = iprot.readBitSet(2);
if (incoming.get(0)) {
{
org.apache.thrift.protocol.TList _list427 = iprot.readListBegin(org.apache.thrift.protocol.TType.STRUCT);
struct.fieldSchemas = new java.util.ArrayList<FieldSchema>(_list427.size);
@org.apache.thrift.annotation.Nullable FieldSchema _elem428;
for (int _i429 = 0; _i429 < _list427.size; ++_i429)
{
_elem428 = new FieldSchema();
_elem428.read(iprot);
struct.fieldSchemas.add(_elem428);
}
}
struct.setFieldSchemasIsSet(true);
}
if (incoming.get(1)) {
{
org.apache.thrift.protocol.TMap _map430 = iprot.readMapBegin(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING);
struct.properties = new java.util.HashMap<java.lang.String,java.lang.String>(2*_map430.size);
@org.apache.thrift.annotation.Nullable java.lang.String _key431;
@org.apache.thrift.annotation.Nullable java.lang.String _val432;
for (int _i433 = 0; _i433 < _map430.size; ++_i433)
{
_key431 = iprot.readString();
_val432 = iprot.readString();
struct.properties.put(_key431, _val432);
}
}
struct.setPropertiesIsSet(true);
}
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
}
}
| apache-2.0 |
googleapis/google-api-java-client-services | clients/google-api-services-content/v2/1.29.2/com/google/api/services/content/model/Account.java | 11392 | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.content.model;
/**
* Account data. After the creation of a new account it may take a few minutes before it is fully
* operational. The methods delete, insert, and update require the admin role.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Content API for Shopping. For a detailed explanation
* see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Account extends com.google.api.client.json.GenericJson {
/**
* Indicates whether the merchant sells adult content.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean adultContent;
/**
* List of linked AdWords accounts that are active or pending approval. To create a new link
* request, add a new link with status active to the list. It will remain in a pending state until
* approved or rejected either in the AdWords interface or through the AdWords API. To delete an
* active link, or to cancel a link request, remove it from the list.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<AccountAdwordsLink> adwordsLinks;
/**
* The business information of the account.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AccountBusinessInformation businessInformation;
/**
* The GMB account which is linked or in the process of being linked with the Merchant Center
* account.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AccountGoogleMyBusinessLink googleMyBusinessLink;
/**
* Merchant Center account ID.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.math.BigInteger id;
/**
* Identifies what kind of resource this is. Value: the fixed string "content#account".
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* Display name for the account.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* [DEPRECATED] This field is never returned and will be ignored if provided.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String reviewsUrl;
/**
* Client-specific, locally-unique, internal ID for the child account.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sellerId;
/**
* Users with access to the account. Every account (except for subaccounts) must have at least one
* admin user.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<AccountUser> users;
/**
* The merchant's website.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String websiteUrl;
/**
* List of linked YouTube channels that are active or pending approval. To create a new link
* request, add a new link with status active to the list. It will remain in a pending state until
* approved or rejected in the YT Creator Studio interface. To delete an active link, or to cancel
* a link request, remove it from the list.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<AccountYouTubeChannelLink> youtubeChannelLinks;
/**
* Indicates whether the merchant sells adult content.
* @return value or {@code null} for none
*/
public java.lang.Boolean getAdultContent() {
return adultContent;
}
/**
* Indicates whether the merchant sells adult content.
* @param adultContent adultContent or {@code null} for none
*/
public Account setAdultContent(java.lang.Boolean adultContent) {
this.adultContent = adultContent;
return this;
}
/**
* List of linked AdWords accounts that are active or pending approval. To create a new link
* request, add a new link with status active to the list. It will remain in a pending state until
* approved or rejected either in the AdWords interface or through the AdWords API. To delete an
* active link, or to cancel a link request, remove it from the list.
* @return value or {@code null} for none
*/
public java.util.List<AccountAdwordsLink> getAdwordsLinks() {
return adwordsLinks;
}
/**
* List of linked AdWords accounts that are active or pending approval. To create a new link
* request, add a new link with status active to the list. It will remain in a pending state until
* approved or rejected either in the AdWords interface or through the AdWords API. To delete an
* active link, or to cancel a link request, remove it from the list.
* @param adwordsLinks adwordsLinks or {@code null} for none
*/
public Account setAdwordsLinks(java.util.List<AccountAdwordsLink> adwordsLinks) {
this.adwordsLinks = adwordsLinks;
return this;
}
/**
* The business information of the account.
* @return value or {@code null} for none
*/
public AccountBusinessInformation getBusinessInformation() {
return businessInformation;
}
/**
* The business information of the account.
* @param businessInformation businessInformation or {@code null} for none
*/
public Account setBusinessInformation(AccountBusinessInformation businessInformation) {
this.businessInformation = businessInformation;
return this;
}
/**
* The GMB account which is linked or in the process of being linked with the Merchant Center
* account.
* @return value or {@code null} for none
*/
public AccountGoogleMyBusinessLink getGoogleMyBusinessLink() {
return googleMyBusinessLink;
}
/**
* The GMB account which is linked or in the process of being linked with the Merchant Center
* account.
* @param googleMyBusinessLink googleMyBusinessLink or {@code null} for none
*/
public Account setGoogleMyBusinessLink(AccountGoogleMyBusinessLink googleMyBusinessLink) {
this.googleMyBusinessLink = googleMyBusinessLink;
return this;
}
/**
* Merchant Center account ID.
* @return value or {@code null} for none
*/
public java.math.BigInteger getId() {
return id;
}
/**
* Merchant Center account ID.
* @param id id or {@code null} for none
*/
public Account setId(java.math.BigInteger id) {
this.id = id;
return this;
}
/**
* Identifies what kind of resource this is. Value: the fixed string "content#account".
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* Identifies what kind of resource this is. Value: the fixed string "content#account".
* @param kind kind or {@code null} for none
*/
public Account setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* Display name for the account.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Display name for the account.
* @param name name or {@code null} for none
*/
public Account setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* [DEPRECATED] This field is never returned and will be ignored if provided.
* @return value or {@code null} for none
*/
public java.lang.String getReviewsUrl() {
return reviewsUrl;
}
/**
* [DEPRECATED] This field is never returned and will be ignored if provided.
* @param reviewsUrl reviewsUrl or {@code null} for none
*/
public Account setReviewsUrl(java.lang.String reviewsUrl) {
this.reviewsUrl = reviewsUrl;
return this;
}
/**
* Client-specific, locally-unique, internal ID for the child account.
* @return value or {@code null} for none
*/
public java.lang.String getSellerId() {
return sellerId;
}
/**
* Client-specific, locally-unique, internal ID for the child account.
* @param sellerId sellerId or {@code null} for none
*/
public Account setSellerId(java.lang.String sellerId) {
this.sellerId = sellerId;
return this;
}
/**
* Users with access to the account. Every account (except for subaccounts) must have at least one
* admin user.
* @return value or {@code null} for none
*/
public java.util.List<AccountUser> getUsers() {
return users;
}
/**
* Users with access to the account. Every account (except for subaccounts) must have at least one
* admin user.
* @param users users or {@code null} for none
*/
public Account setUsers(java.util.List<AccountUser> users) {
this.users = users;
return this;
}
/**
* The merchant's website.
* @return value or {@code null} for none
*/
public java.lang.String getWebsiteUrl() {
return websiteUrl;
}
/**
* The merchant's website.
* @param websiteUrl websiteUrl or {@code null} for none
*/
public Account setWebsiteUrl(java.lang.String websiteUrl) {
this.websiteUrl = websiteUrl;
return this;
}
/**
* List of linked YouTube channels that are active or pending approval. To create a new link
* request, add a new link with status active to the list. It will remain in a pending state until
* approved or rejected in the YT Creator Studio interface. To delete an active link, or to cancel
* a link request, remove it from the list.
* @return value or {@code null} for none
*/
public java.util.List<AccountYouTubeChannelLink> getYoutubeChannelLinks() {
return youtubeChannelLinks;
}
/**
* List of linked YouTube channels that are active or pending approval. To create a new link
* request, add a new link with status active to the list. It will remain in a pending state until
* approved or rejected in the YT Creator Studio interface. To delete an active link, or to cancel
* a link request, remove it from the list.
* @param youtubeChannelLinks youtubeChannelLinks or {@code null} for none
*/
public Account setYoutubeChannelLinks(java.util.List<AccountYouTubeChannelLink> youtubeChannelLinks) {
this.youtubeChannelLinks = youtubeChannelLinks;
return this;
}
@Override
public Account set(String fieldName, Object value) {
return (Account) super.set(fieldName, value);
}
@Override
public Account clone() {
return (Account) super.clone();
}
}
| apache-2.0 |
michaelliao/openweixin | robot/src/main/java/com/itranswarp/wxapi/sample/controller/AbstractController.java | 384 | package com.itranswarp.wxapi.sample.controller;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Autowired;
import com.itranswarp.wxapi.WeixinClient;
abstract class AbstractController {
protected final Log log = LogFactory.getLog(getClass());
@Autowired
protected WeixinClient client;
}
| apache-2.0 |
nimble-platform/catalog-service-backend | libs/alibaba/composition-object/src/main/java/org/openrdf/repository/object/traits/FloatMessage.java | 2005 | /*
* Copyright (c) 2011 Talis Inc., Some rights reserved.
* Copyright (c) 2012, 3 Round Stones Inc. Some rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* - Neither the name of the openrdf.org nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
*/
package org.openrdf.repository.object.traits;
/**
* Represents a method call with a primitive float return type.
*
* @author James Leigh
*/
public interface FloatMessage extends MessageContext {
/**
* Called to allow the message to proceed to the next implementation method.
*/
float proceed() throws Exception;
}
| apache-2.0 |
ManfredTremmel/mt-bean-validators | src/main/java/de/knightsoftnet/validators/shared/MustNotBeEqual.java | 3325 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package de.knightsoftnet.validators.shared;
import de.knightsoftnet.validators.shared.impl.MustNotBeEqualValidator;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import javax.validation.Constraint;
import javax.validation.Payload;
/**
* The annotated bean must contain at least two properties:
* <ul>
* <li>a field to compare (option <code>field1</code>)</li>
* <li>another field to compared (option <code>field2</code>)</li>
* <li>add the error to field1 (option <code>addErrorToField1</code>, default true)</li>
* <li>add the error to field2 (option <code>addErrorToField2</code>, default true)</li>
* </ul>
* the entry of <code>field1</code> must not be equal to the entry of <code>field2</code>, can be
* used e.g. for password old and password new fields.<br>
* Supported types are beans, <code>null</code> elements are considered valid.<br>
*
* @author Manfred Tremmel
*
*/
@Documented
@Constraint(validatedBy = MustNotBeEqualValidator.class)
@Target({ElementType.TYPE, ElementType.ANNOTATION_TYPE})
@Retention(RetentionPolicy.RUNTIME)
public @interface MustNotBeEqual {
/**
* localized message.
*
* @return localized validation message
*/
String message() default "{deKnightsoftnetValidatorsSharedValidationMustNotBeEqualMessage}";
/**
* groups to use.
*
* @return array of validation groups
*/
Class<?>[] groups() default {};
/**
* field1 name to compare.
*
* @return field/path contains value to compare
*/
String field1();
/**
* field2 name to compare.
*
* @return field/path contains value to compare
*/
String field2();
/**
* add error to field1 (default true).
*
* @return true if error should be added to field1
*/
boolean addErrorToField1() default true;
/**
* add error to field2 (default true).
*
* @return true if error should be added to field2
*/
boolean addErrorToField2() default true;
/**
* payload whatever.
*
* @return payload class
*/
Class<? extends Payload>[] payload() default {};
/**
* Defines several {@code MustNotBeEqual} annotations on the same element.
*/
@Target({ElementType.TYPE, ElementType.ANNOTATION_TYPE})
@Retention(RetentionPolicy.RUNTIME)
@Documented
@interface List {
/**
* must not be equal value.
*
* @return value
*/
MustNotBeEqual[] value();
}
}
| apache-2.0 |
Fabryprog/camel | core/camel-core/src/test/java/org/apache/camel/issues/AdviceWithStartTargetIssueTest.java | 3753 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.issues;
import org.apache.camel.CamelContext;
import org.apache.camel.ContextTestSupport;
import org.apache.camel.Exchange;
import org.apache.camel.ExtendedCamelContext;
import org.apache.camel.NamedNode;
import org.apache.camel.Processor;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.reifier.RouteReifier;
import org.apache.camel.spi.InterceptStrategy;
import org.apache.camel.support.processor.DelegateAsyncProcessor;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
*/
public class AdviceWithStartTargetIssueTest extends ContextTestSupport {
@Test
public void testAdvised() throws Exception {
RouteReifier.adviceWith(context.getRouteDefinitions().get(0), context,
new RouteBuilder() {
@Override
public void configure() throws Exception {
interceptSendToEndpoint("mock:foo").skipSendToOriginalEndpoint()
.to("log:foo")
.to("mock:advised");
}
});
getMockEndpoint("mock:foo").expectedMessageCount(0);
getMockEndpoint("mock:advised").expectedMessageCount(1);
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
getContext().adapt(ExtendedCamelContext.class).addInterceptStrategy(new ContainerWideInterceptor());
from("direct:start").to("mock:foo").to("mock:result");
}
};
}
static class ContainerWideInterceptor implements InterceptStrategy {
private static final Logger LOG = LoggerFactory.getLogger(ContainerWideInterceptor.class);
private static int count;
public Processor wrapProcessorInInterceptors(final CamelContext context, final NamedNode definition,
final Processor target, final Processor nextTarget) throws Exception {
return new DelegateAsyncProcessor(new Processor() {
public void process(Exchange exchange) throws Exception {
// we just count number of interceptions
count++;
LOG.info("I am the container wide interceptor. Intercepted total count: " + count);
target.process(exchange);
}
@Override
public String toString() {
return "ContainerWideInterceptor[" + target + "]";
}
});
}
public int getCount() {
return count;
}
}
}
| apache-2.0 |
massakam/pulsar | pulsar-broker/src/test/java/org/apache/pulsar/client/api/MessageDispatchThrottlingTest.java | 54645 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.client.api;
import static org.testng.Assert.assertNotNull;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import java.lang.reflect.Field;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.bookkeeper.mledger.impl.ManagedLedgerImpl;
import org.apache.pulsar.broker.service.BrokerService;
import org.apache.pulsar.broker.service.persistent.DispatchRateLimiter;
import org.apache.pulsar.broker.service.persistent.PersistentTopic;
import org.apache.pulsar.common.policies.data.ClusterData;
import org.apache.pulsar.common.policies.data.DispatchRate;
import org.apache.pulsar.common.policies.data.Policies;
import org.apache.pulsar.common.policies.data.impl.DispatchRateImpl;
import org.awaitility.Awaitility;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.Assert;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
@Test(groups = "flaky")
public class MessageDispatchThrottlingTest extends ProducerConsumerBase {
private static final Logger log = LoggerFactory.getLogger(MessageDispatchThrottlingTest.class);
@BeforeMethod
@Override
protected void setup() throws Exception {
this.conf.setClusterName("test");
super.internalSetup();
super.producerBaseSetup();
}
@AfterMethod(alwaysRun = true)
@Override
protected void cleanup() throws Exception {
super.internalCleanup();
super.resetConfig();
}
@DataProvider(name = "subscriptions")
public Object[][] subscriptionsProvider() {
return new Object[][] { new Object[] { SubscriptionType.Shared }, { SubscriptionType.Exclusive } };
}
@DataProvider(name = "dispatchRateType")
public Object[][] dispatchRateProvider() {
return new Object[][] { { DispatchRateType.messageRate }, { DispatchRateType.byteRate } };
}
@DataProvider(name = "subscriptionAndDispatchRateType")
public Object[][] subDisTypeProvider() {
List<Object[]> mergeList = new LinkedList<>();
for (Object[] sub : subscriptionsProvider()) {
for (Object[] dispatch : dispatchRateProvider()) {
mergeList.add(merge(sub, dispatch));
}
}
return mergeList.toArray(new Object[0][0]);
}
public static <T> T[] merge(T[] first, T[] last) {
int totalLength = first.length + last.length;
T[] result = Arrays.copyOf(first, totalLength);
int offset = first.length;
System.arraycopy(last, 0, result, offset, first.length);
return result;
}
enum DispatchRateType {
messageRate, byteRate;
}
/**
* verifies: message-rate change gets reflected immediately into topic at runtime
*
* @throws Exception
*/
@SuppressWarnings("deprecation")
@Test
public void testMessageRateDynamicallyChange() throws Exception {
log.info("-- Starting {} test --", methodName);
final String namespace = "my-property/throttling_ns";
final String topicName = "persistent://" + namespace + "/throttlingBlock";
admin.namespaces().createNamespace(namespace, Sets.newHashSet("test"));
// create producer and topic
Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName).create();
PersistentTopic topic = (PersistentTopic) pulsar.getBrokerService().getOrCreateTopic(topicName).get();
// (1) verify message-rate is -1 initially
Assert.assertFalse(topic.getDispatchRateLimiter().isPresent());
// (2) change to 100
int messageRate = 100;
DispatchRate dispatchRate = DispatchRate.builder()
.dispatchThrottlingRateInMsg(messageRate)
.dispatchThrottlingRateInByte(-1)
.ratePeriodInSecond(360)
.build();
admin.namespaces().setDispatchRate(namespace, dispatchRate);
boolean isDispatchRateUpdate = false;
int retry = 5;
for (int i = 0; i < retry; i++) {
if (topic.getDispatchRateLimiter().isPresent()) {
isDispatchRateUpdate = true;
break;
} else {
if (i != retry - 1) {
Thread.sleep(100);
}
}
}
Assert.assertTrue(isDispatchRateUpdate);
Assert.assertEquals(admin.namespaces().getDispatchRate(namespace), dispatchRate);
Policies policies = admin.namespaces().getPolicies(namespace);
Map<String, DispatchRate> dispatchRateMap = Maps.newHashMap();
dispatchRateMap.put("test", dispatchRate);
Assert.assertEquals(policies.clusterDispatchRate, dispatchRateMap);
Assert.assertEquals(policies.topicDispatchRate, dispatchRateMap);
// (3) change to 500
messageRate = 500;
dispatchRate = DispatchRate.builder()
.dispatchThrottlingRateInMsg(-1)
.dispatchThrottlingRateInByte(messageRate)
.ratePeriodInSecond(360)
.build();
admin.namespaces().setDispatchRate(namespace, dispatchRate);
isDispatchRateUpdate = false;
for (int i = 0; i < retry; i++) {
if (topic.getDispatchRateLimiter().get().getDispatchRateOnByte() == messageRate) {
isDispatchRateUpdate = true;
break;
} else {
if (i != retry - 1) {
Thread.sleep(100);
}
}
}
Assert.assertTrue(isDispatchRateUpdate);
Assert.assertEquals(admin.namespaces().getDispatchRate(namespace), dispatchRate);
policies = admin.namespaces().getPolicies(namespace);
dispatchRateMap.put("test", dispatchRate);
Assert.assertEquals(policies.clusterDispatchRate, dispatchRateMap);
Assert.assertEquals(policies.topicDispatchRate, dispatchRateMap);
producer.close();
}
/**
* verify: consumer should not receive all messages due to message-rate throttling
*
* @param subscription
* @throws Exception
*/
@Test(dataProvider = "subscriptionAndDispatchRateType", timeOut = 5000)
public void testMessageRateLimitingNotReceiveAllMessages(SubscriptionType subscription,
DispatchRateType dispatchRateType) throws Exception {
log.info("-- Starting {} test --", methodName);
final String namespace = "my-property/throttling_ns";
final String topicName = "persistent://" + namespace + "/throttlingBlock";
final int messageRate = 100;
DispatchRate dispatchRate = null;
if (DispatchRateType.messageRate.equals(dispatchRateType)) {
dispatchRate = DispatchRate.builder()
.dispatchThrottlingRateInMsg(messageRate)
.dispatchThrottlingRateInByte(-1)
.ratePeriodInSecond(360)
.build();
} else {
dispatchRate = DispatchRate.builder()
.dispatchThrottlingRateInMsg(-1)
.dispatchThrottlingRateInByte(messageRate)
.ratePeriodInSecond(360)
.build();
}
admin.namespaces().createNamespace(namespace, Sets.newHashSet("test"));
admin.namespaces().setDispatchRate(namespace, dispatchRate);
// create producer and topic
Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName).create();
PersistentTopic topic = (PersistentTopic) pulsar.getBrokerService().getOrCreateTopic(topicName).get();
boolean isMessageRateUpdate = false;
int retry = 5;
for (int i = 0; i < retry; i++) {
if (topic.getDispatchRateLimiter().get().getDispatchRateOnMsg() > 0
|| topic.getDispatchRateLimiter().get().getDispatchRateOnByte() > 0) {
isMessageRateUpdate = true;
break;
} else {
if (i != retry - 1) {
Thread.sleep(100);
}
}
}
Assert.assertTrue(isMessageRateUpdate);
Assert.assertEquals(admin.namespaces().getDispatchRate(namespace), dispatchRate);
int numMessages = 500;
final AtomicInteger totalReceived = new AtomicInteger(0);
Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName).subscriptionName("my-subscriber-name")
.subscriptionType(subscription).messageListener((c1, msg) -> {
Assert.assertNotNull(msg, "Message cannot be null");
String receivedMessage = new String(msg.getData());
log.debug("Received message [{}] in the listener", receivedMessage);
totalReceived.incrementAndGet();
}).subscribe();
// deactive cursors
deactiveCursors((ManagedLedgerImpl) topic.getManagedLedger());
// Asynchronously produce messages
for (int i = 0; i < numMessages; i++) {
producer.send(new byte[80]);
}
// consumer should not have received all published message due to message-rate throttling
Assert.assertTrue(totalReceived.get() < messageRate * 2);
consumer.close();
producer.close();
log.info("-- Exiting {} test --", methodName);
}
/**
* It verifies that dispatch-rate throttling with cluster-configuration
*
* @throws Exception
*/
@Test
public void testClusterMsgByteRateLimitingClusterConfig() throws Exception {
log.info("-- Starting {} test --", methodName);
final String namespace = "my-property/throttling_ns";
final String topicName = "persistent://" + namespace + "/throttlingBlock";
final int messageRate = 5;
final long byteRate = 1024 * 1024;// 1MB rate enough to let all msg to be delivered
int initValue = pulsar.getConfiguration().getDispatchThrottlingRatePerTopicInMsg();
// (1) Update message-dispatch-rate limit
admin.brokers().updateDynamicConfiguration("dispatchThrottlingRatePerTopicInMsg",
Integer.toString(messageRate));
admin.brokers().updateDynamicConfiguration("dispatchThrottlingRatePerTopicInByte", Long.toString(byteRate));
// sleep incrementally as zk-watch notification is async and may take some time
for (int i = 0; i < 5; i++) {
if (pulsar.getConfiguration().getDispatchThrottlingRatePerTopicInMsg() == initValue) {
Thread.sleep(50 + (i * 10));
}
}
Assert.assertNotEquals(pulsar.getConfiguration().getDispatchThrottlingRatePerTopicInMsg(), initValue);
admin.namespaces().createNamespace(namespace, Sets.newHashSet("test"));
// create producer and topic
Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName).create();
PersistentTopic topic = (PersistentTopic) pulsar.getBrokerService().getOrCreateTopic(topicName).get();
int numMessages = 500;
final AtomicInteger totalReceived = new AtomicInteger(0);
Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName).subscriptionName("my-subscriber-name")
.subscriptionType(SubscriptionType.Shared).messageListener((c1, msg) -> {
Assert.assertNotNull(msg, "Message cannot be null");
String receivedMessage = new String(msg.getData());
log.debug("Received message [{}] in the listener", receivedMessage);
totalReceived.incrementAndGet();
}).subscribe();
// deactive cursors
deactiveCursors((ManagedLedgerImpl) topic.getManagedLedger());
// Asynchronously produce messages
for (int i = 0; i < numMessages; i++) {
final String message = "my-message-" + i;
producer.send(message.getBytes());
}
// it can make sure that consumer had enough time to consume message but couldn't consume due to throttling
Thread.sleep(500);
// consumer should not have received all published message due to message-rate throttling
Assert.assertNotEquals(totalReceived.get(), numMessages);
consumer.close();
producer.close();
pulsar.getConfiguration().setDispatchThrottlingRatePerTopicInMsg(initValue);
log.info("-- Exiting {} test --", methodName);
}
/**
* verify rate-limiting should throttle message-dispatching based on message-rate
*
* <pre>
* 1. dispatch-msg-rate = 10 msg/sec
* 2. send 20 msgs
* 3. it should take up to 2 second to receive all messages
* </pre>
*
* @param subscription
* @throws Exception
*/
@Test(dataProvider = "subscriptions", timeOut = 5000)
public void testMessageRateLimitingReceiveAllMessagesAfterThrottling(SubscriptionType subscription)
throws Exception {
log.info("-- Starting {} test --", methodName);
final String namespace = "my-property/throttling_ns";
final String topicName = "persistent://" + namespace + "/throttlingAll";
final int messageRate = 10;
DispatchRate dispatchRate = DispatchRate.builder()
.dispatchThrottlingRateInMsg(messageRate)
.dispatchThrottlingRateInByte(-1)
.ratePeriodInSecond(1)
.build();
admin.namespaces().createNamespace(namespace, Sets.newHashSet("test"));
admin.namespaces().setDispatchRate(namespace, dispatchRate);
// create producer and topic
Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName).create();
PersistentTopic topic = (PersistentTopic) pulsar.getBrokerService().getOrCreateTopic(topicName).get();
boolean isMessageRateUpdate = false;
int retry = 5;
for (int i = 0; i < retry; i++) {
if (topic.getDispatchRateLimiter().get().getDispatchRateOnMsg() > 0) {
isMessageRateUpdate = true;
break;
} else {
if (i != retry - 1) {
Thread.sleep(100);
}
}
}
Assert.assertTrue(isMessageRateUpdate);
Assert.assertEquals(admin.namespaces().getDispatchRate(namespace), dispatchRate);
final int numProducedMessages = 20;
final CountDownLatch latch = new CountDownLatch(numProducedMessages);
final AtomicInteger totalReceived = new AtomicInteger(0);
Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName).subscriptionName("my-subscriber-name")
.subscriptionType(subscription).messageListener((c1, msg) -> {
Assert.assertNotNull(msg, "Message cannot be null");
String receivedMessage = new String(msg.getData());
log.debug("Received message [{}] in the listener", receivedMessage);
totalReceived.incrementAndGet();
latch.countDown();
}).subscribe();
// deactive cursors
deactiveCursors((ManagedLedgerImpl) topic.getManagedLedger());
// Asynchronously produce messages
for (int i = 0; i < numProducedMessages; i++) {
final String message = "my-message-" + i;
producer.send(message.getBytes());
}
latch.await();
Assert.assertEquals(totalReceived.get(), numProducedMessages);
consumer.close();
producer.close();
log.info("-- Exiting {} test --", methodName);
}
/**
* verify rate-limiting should throttle message-dispatching based on byte-rate
*
* <pre>
* 1. dispatch-byte-rate = 100 bytes/sec
* 2. send 20 msgs : each with 10 byte
* 3. it should take up to 2 second to receive all messages
* </pre>
*
* @param subscription
* @throws Exception
*/
@Test(dataProvider = "subscriptions", timeOut = 5000)
public void testBytesRateLimitingReceiveAllMessagesAfterThrottling(SubscriptionType subscription) throws Exception {
conf.setDispatchThrottlingOnNonBacklogConsumerEnabled(true);
log.info("-- Starting {} test --", methodName);
final String namespace = "my-property/throttling_ns";
final String topicName = "persistent://" + namespace + "/throttlingAll";
final String subscriptionName = "my-subscriber-name";
//
final int byteRate = 250;
DispatchRate dispatchRate = DispatchRate.builder()
.dispatchThrottlingRateInMsg(-1)
.dispatchThrottlingRateInByte(byteRate)
.ratePeriodInSecond(1)
.build();
admin.namespaces().createNamespace(namespace, Sets.newHashSet("test"));
admin.namespaces().setDispatchRate(namespace, dispatchRate);
admin.topics().createSubscription(topicName, subscriptionName, MessageId.earliest);
// create producer and topic
Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName).enableBatching(false).create();
PersistentTopic topic = (PersistentTopic) pulsar.getBrokerService().getOrCreateTopic(topicName).get();
Awaitility.await().until(() -> topic.getDispatchRateLimiter().get().getDispatchRateOnByte() > 0);
Assert.assertEquals(admin.namespaces().getDispatchRate(namespace), dispatchRate);
final int numProducedMessages = 20;
final AtomicInteger totalReceived = new AtomicInteger(0);
for (int i = 0; i < numProducedMessages; i++) {
producer.send(new byte[99]);
}
Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName).subscriptionName(subscriptionName)
.subscriptionType(subscription).messageListener((c1, msg) -> {
Assert.assertNotNull(msg, "Message cannot be null");
String receivedMessage = new String(msg.getData());
log.debug("Received message [{}] in the listener", receivedMessage);
totalReceived.incrementAndGet();
}).subscribe();
Awaitility.await().atLeast(3, TimeUnit.SECONDS)
.atMost(5, TimeUnit.SECONDS).until(() -> totalReceived.get() > 6 && totalReceived.get() < 10);
consumer.close();
producer.close();
log.info("-- Exiting {} test --", methodName);
}
/**
* verify message-rate on multiple consumers with shared-subscription
*
* @throws Exception
*/
@Test(timeOut = 5000)
public void testRateLimitingMultipleConsumers() throws Exception {
log.info("-- Starting {} test --", methodName);
final String namespace = "my-property/throttling_ns";
final String topicName = "persistent://" + namespace + "/throttlingMultipleConsumers";
final int messageRate = 5;
DispatchRate dispatchRate = DispatchRate.builder()
.dispatchThrottlingRateInMsg(messageRate)
.dispatchThrottlingRateInByte(-1)
.ratePeriodInSecond(360)
.build();
admin.namespaces().createNamespace(namespace, Sets.newHashSet("test"));
admin.namespaces().setDispatchRate(namespace, dispatchRate);
// create producer and topic
Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName).create();
PersistentTopic topic = (PersistentTopic) pulsar.getBrokerService().getOrCreateTopic(topicName).get();
boolean isMessageRateUpdate = false;
int retry = 5;
for (int i = 0; i < retry; i++) {
if (topic.getDispatchRateLimiter().get().getDispatchRateOnMsg() > 0) {
isMessageRateUpdate = true;
break;
} else {
if (i != retry - 1) {
Thread.sleep(100);
}
}
}
Assert.assertTrue(isMessageRateUpdate);
Assert.assertEquals(admin.namespaces().getDispatchRate(namespace), dispatchRate);
final int numProducedMessages = 500;
final AtomicInteger totalReceived = new AtomicInteger(0);
ConsumerBuilder<byte[]> consumerBuilder = pulsarClient.newConsumer().topic(topicName)
.subscriptionName("my-subscriber-name").subscriptionType(SubscriptionType.Shared).messageListener((c1, msg) -> {
Assert.assertNotNull(msg, "Message cannot be null");
String receivedMessage = new String(msg.getData());
log.debug("Received message [{}] in the listener", receivedMessage);
totalReceived.incrementAndGet();
});
Consumer<byte[]> consumer1 = consumerBuilder.subscribe();
Consumer<byte[]> consumer2 = consumerBuilder.subscribe();
Consumer<byte[]> consumer3 = consumerBuilder.subscribe();
Consumer<byte[]> consumer4 = consumerBuilder.subscribe();
Consumer<byte[]> consumer5 = consumerBuilder.subscribe();
// deactive cursors
deactiveCursors((ManagedLedgerImpl) topic.getManagedLedger());
// Asynchronously produce messages
for (int i = 0; i < numProducedMessages; i++) {
final String message = "my-message-" + i;
producer.send(message.getBytes());
}
// it can make sure that consumer had enough time to consume message but couldn't consume due to throttling
Thread.sleep(500);
// consumer should not have received all published message due to message-rate throttling
Assert.assertNotEquals(totalReceived.get(), numProducedMessages);
consumer1.close();
consumer2.close();
consumer3.close();
consumer4.close();
consumer5.close();
producer.close();
log.info("-- Exiting {} test --", methodName);
}
@Test
public void testRateLimitingWithBatchMsgEnabled() throws Exception {
log.info("-- Starting {} test --", methodName);
conf.setDispatchThrottlingOnBatchMessageEnabled(true);
final String namespace = "my-property/throttling_ns";
final String topicName = "persistent://" + namespace + "/throttlingMultipleConsumers";
final int messageRate = 5;
DispatchRate dispatchRate = DispatchRate.builder().dispatchThrottlingRateInMsg(messageRate)
.dispatchThrottlingRateInByte(-1).ratePeriodInSecond(360).build();
admin.namespaces().createNamespace(namespace, Sets.newHashSet("test"));
admin.namespaces().setDispatchRate(namespace, dispatchRate);
final int messagesPerBatch = 100;
final int numProducedMessages = messageRate * messagesPerBatch;
// create producer and topic
Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName).enableBatching(true)
.batchingMaxPublishDelay(1, TimeUnit.SECONDS).batchingMaxMessages(messagesPerBatch).create();
PersistentTopic topic = (PersistentTopic) pulsar.getBrokerService().getOrCreateTopic(topicName).get();
boolean isMessageRateUpdate = false;
int retry = 5;
for (int i = 0; i < retry; i++) {
if (topic.getDispatchRateLimiter().get().getDispatchRateOnMsg() > 0) {
isMessageRateUpdate = true;
break;
} else {
if (i != retry - 1) {
Thread.sleep(100);
}
}
}
Assert.assertTrue(isMessageRateUpdate);
Assert.assertEquals(admin.namespaces().getDispatchRate(namespace), dispatchRate);
final AtomicInteger totalReceived = new AtomicInteger(0);
ConsumerBuilder<byte[]> consumerBuilder = pulsarClient.newConsumer().topic(topicName)
.subscriptionName("my-subscriber-name").subscriptionType(SubscriptionType.Shared)
.messageListener((c1, msg) -> {
Assert.assertNotNull(msg, "Message cannot be null");
String receivedMessage = new String(msg.getData());
log.debug("Received message [{}] in the listener", receivedMessage);
totalReceived.incrementAndGet();
});
Consumer<byte[]> consumer1 = consumerBuilder.subscribe();
Consumer<byte[]> consumer2 = consumerBuilder.subscribe();
Consumer<byte[]> consumer3 = consumerBuilder.subscribe();
Consumer<byte[]> consumer4 = consumerBuilder.subscribe();
Consumer<byte[]> consumer5 = consumerBuilder.subscribe();
// deactive cursors
deactiveCursors((ManagedLedgerImpl) topic.getManagedLedger());
// Asynchronously produce messages
CountDownLatch latch = new CountDownLatch(numProducedMessages);
for (int i = 0; i < numProducedMessages; i++) {
final String message = "my-message-" + i;
producer.sendAsync(message.getBytes()).thenAccept(__ -> latch.countDown());
}
latch.await();
Awaitility.await().atMost(10, TimeUnit.SECONDS).until(() -> totalReceived.get() == numProducedMessages);
// consumer should not have received all published message due to message-rate throttling
Assert.assertEquals(totalReceived.get(), numProducedMessages);
consumer1.close();
consumer2.close();
consumer3.close();
consumer4.close();
consumer5.close();
producer.close();
log.info("-- Exiting {} test --", methodName);
}
@Test(dataProvider = "subscriptions", timeOut = 5000)
public void testClusterRateLimitingConfiguration(SubscriptionType subscription) throws Exception {
log.info("-- Starting {} test --", methodName);
final String namespace = "my-property/throttling_ns";
final String topicName = "persistent://" + namespace + "/throttlingBlock";
final int messageRate = 5;
int initValue = pulsar.getConfiguration().getDispatchThrottlingRatePerTopicInMsg();
// (1) Update message-dispatch-rate limit
admin.brokers().updateDynamicConfiguration("dispatchThrottlingRatePerTopicInMsg",
Integer.toString(messageRate));
// sleep incrementally as zk-watch notification is async and may take some time
for (int i = 0; i < 5; i++) {
if (pulsar.getConfiguration().getDispatchThrottlingRatePerTopicInMsg() == initValue) {
Thread.sleep(50 + (i * 10));
}
}
Assert.assertNotEquals(pulsar.getConfiguration().getDispatchThrottlingRatePerTopicInMsg(), initValue);
admin.namespaces().createNamespace(namespace, Sets.newHashSet("test"));
// create producer and topic
Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName).create();
PersistentTopic topic = (PersistentTopic) pulsar.getBrokerService().getOrCreateTopic(topicName).get();
int numMessages = 500;
final AtomicInteger totalReceived = new AtomicInteger(0);
Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName).subscriptionName("my-subscriber-name")
.subscriptionType(subscription).messageListener((c1, msg) -> {
Assert.assertNotNull(msg, "Message cannot be null");
String receivedMessage = new String(msg.getData());
log.debug("Received message [{}] in the listener", receivedMessage);
totalReceived.incrementAndGet();
}).subscribe();
// deactive cursors
deactiveCursors((ManagedLedgerImpl) topic.getManagedLedger());
// Asynchronously produce messages
for (int i = 0; i < numMessages; i++) {
final String message = "my-message-" + i;
producer.send(message.getBytes());
}
// it can make sure that consumer had enough time to consume message but couldn't consume due to throttling
Thread.sleep(500);
// consumer should not have received all published message due to message-rate throttling
Assert.assertNotEquals(totalReceived.get(), numMessages);
consumer.close();
producer.close();
pulsar.getConfiguration().setDispatchThrottlingRatePerTopicInMsg(initValue);
log.info("-- Exiting {} test --", methodName);
}
/**
* It verifies that that dispatch-throttling considers both msg/byte rate if both of them are configured together
*
* @param subscription
* @throws Exception
*/
@Test(dataProvider = "subscriptions", timeOut = 5000)
public void testMessageByteRateThrottlingCombined(SubscriptionType subscription) throws Exception {
log.info("-- Starting {} test --", methodName);
final String namespace = "my-property/throttling_ns";
final String topicName = "persistent://" + namespace + "/throttlingAll";
final int messageRate = 5; // 5 msgs per second
final long byteRate = 10; // 10 bytes per second
DispatchRate dispatchRate = DispatchRate.builder()
.dispatchThrottlingRateInMsg(messageRate)
.dispatchThrottlingRateInByte(byteRate)
.ratePeriodInSecond(360)
.build();
admin.namespaces().createNamespace(namespace, Sets.newHashSet("test"));
admin.namespaces().setDispatchRate(namespace, dispatchRate);
// create producer and topic
Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName).create();
PersistentTopic topic = (PersistentTopic) pulsar.getBrokerService().getOrCreateTopic(topicName).get();
boolean isMessageRateUpdate = false;
int retry = 5;
for (int i = 0; i < retry; i++) {
if (topic.getDispatchRateLimiter().get().getDispatchRateOnMsg() > 0
&& topic.getDispatchRateLimiter().get().getDispatchRateOnByte() > 0) {
isMessageRateUpdate = true;
break;
} else {
if (i != retry - 1) {
Thread.sleep(100);
}
}
}
Assert.assertTrue(isMessageRateUpdate);
Assert.assertEquals(admin.namespaces().getDispatchRate(namespace), dispatchRate);
final int numProducedMessages = 200;
final AtomicInteger totalReceived = new AtomicInteger(0);
ConsumerBuilder<byte[]> consumerBuilder = pulsarClient.newConsumer().topic(topicName)
.subscriptionName("my-subscriber-name").subscriptionType(subscription).messageListener((c1, msg) -> {
Assert.assertNotNull(msg, "Message cannot be null");
String receivedMessage = new String(msg.getData());
log.debug("Received message [{}] in the listener", receivedMessage);
totalReceived.incrementAndGet();
});
Consumer<byte[]> consumer = consumerBuilder.subscribe();
// deactive cursors
deactiveCursors((ManagedLedgerImpl) topic.getManagedLedger());
consumer.close();
// Asynchronously produce messages
final int dataSize = 50;
final byte[] data = new byte[dataSize];
for (int i = 0; i < numProducedMessages; i++) {
producer.send(data);
}
consumer = consumerBuilder.subscribe();
final int totalReceivedBytes = dataSize * totalReceived.get();
Assert.assertNotEquals(totalReceivedBytes, byteRate * 2);
consumer.close();
producer.close();
log.info("-- Exiting {} test --", methodName);
}
/**
* <pre>
* Verifies setting dispatch-rate on global namespace.
* 1. It sets dispatch-rate for a local cluster into global-zk.policies
* 2. Topic fetches dispatch-rate for the local cluster from policies
* 3. applies dispatch rate
*
* </pre>
*
* @throws Exception
*/
@Test
public void testGlobalNamespaceThrottling() throws Exception {
log.info("-- Starting {} test --", methodName);
final String namespace = "my-property/throttling_ns";
final String topicName = "persistent://" + namespace + "/throttlingBlock";
final int messageRate = 5;
DispatchRate dispatchRate = DispatchRate.builder()
.dispatchThrottlingRateInMsg(messageRate)
.dispatchThrottlingRateInByte(-1)
.ratePeriodInSecond(360)
.build();
admin.clusters().createCluster("global", ClusterData.builder().serviceUrl("http://global:8080").build());
admin.namespaces().createNamespace(namespace);
admin.namespaces().setNamespaceReplicationClusters(namespace, Sets.newHashSet("test"));
admin.namespaces().setDispatchRate(namespace, dispatchRate);
// create producer and topic
Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName).create();
PersistentTopic topic = (PersistentTopic) pulsar.getBrokerService().getOrCreateTopic(topicName).get();
boolean isMessageRateUpdate = false;
int retry = 5;
for (int i = 0; i < retry; i++) {
if (topic.getDispatchRateLimiter().get().getDispatchRateOnMsg() > 0
|| topic.getDispatchRateLimiter().get().getDispatchRateOnByte() > 0) {
isMessageRateUpdate = true;
break;
} else {
if (i != retry - 1) {
Thread.sleep(100);
}
}
}
Assert.assertTrue(isMessageRateUpdate);
Assert.assertEquals(admin.namespaces().getDispatchRate(namespace), dispatchRate);
int numMessages = 500;
final AtomicInteger totalReceived = new AtomicInteger(0);
Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName).subscriptionName("my-subscriber-name")
.subscriptionType(SubscriptionType.Shared).messageListener((c1, msg) -> {
Assert.assertNotNull(msg, "Message cannot be null");
String receivedMessage = new String(msg.getData());
log.debug("Received message [{}] in the listener", receivedMessage);
totalReceived.incrementAndGet();
}).subscribe();
// deactive cursors
deactiveCursors((ManagedLedgerImpl) topic.getManagedLedger());
// Asynchronously produce messages
for (int i = 0; i < numMessages; i++) {
producer.send(new byte[80]);
}
// it can make sure that consumer had enough time to consume message but couldn't consume due to throttling
Thread.sleep(500);
// consumer should not have received all published message due to message-rate throttling
Assert.assertNotEquals(totalReceived.get(), numMessages);
consumer.close();
producer.close();
log.info("-- Exiting {} test --", methodName);
}
/**
* It verifies that broker throttles already caught-up consumer which doesn't have backlog if the flag is enabled
*
* @param subscription
* @throws Exception
*/
@Test(dataProvider = "subscriptions", timeOut = 5000)
public void testNonBacklogConsumerWithThrottlingEnabled(SubscriptionType subscription) throws Exception {
log.info("-- Starting {} test --", methodName);
final String namespace = "my-property/throttling_ns";
final String topicName = "persistent://" + namespace + "/throttlingBlock";
final int messageRate = 10;
DispatchRate dispatchRate = DispatchRate.builder()
.dispatchThrottlingRateInMsg(messageRate)
.dispatchThrottlingRateInByte(-1)
.ratePeriodInSecond(360)
.build();
admin.namespaces().createNamespace(namespace, Sets.newHashSet("test"));
admin.namespaces().setDispatchRate(namespace, dispatchRate);
admin.brokers().updateDynamicConfiguration("dispatchThrottlingOnNonBacklogConsumerEnabled",
Boolean.TRUE.toString());
// create producer and topic
Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName).create();
PersistentTopic topic = (PersistentTopic) pulsar.getBrokerService().getOrCreateTopic(topicName).get();
boolean isUpdated = false;
int retry = 5;
for (int i = 0; i < retry; i++) {
if (topic.getDispatchRateLimiter().get().getDispatchRateOnMsg() > 0) {
isUpdated = true;
break;
} else {
if (i != retry - 1) {
Thread.sleep(100);
}
}
}
Assert.assertTrue(isUpdated);
Assert.assertEquals(admin.namespaces().getDispatchRate(namespace), dispatchRate);
// enable throttling for nonBacklog consumers
conf.setDispatchThrottlingOnNonBacklogConsumerEnabled(true);
int numMessages = 500;
final AtomicInteger totalReceived = new AtomicInteger(0);
Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName).subscriptionName("my-subscriber-name")
.subscriptionType(subscription).messageListener((c1, msg) -> {
Assert.assertNotNull(msg, "Message cannot be null");
String receivedMessage = new String(msg.getData());
log.debug("Received message [{}] in the listener", receivedMessage);
totalReceived.incrementAndGet();
}).subscribe();
// Asynchronously produce messages
for (int i = 0; i < numMessages; i++) {
producer.send(new byte[80]);
}
// consumer should not have received all published message due to message-rate throttling
Assert.assertTrue(totalReceived.get() < messageRate * 2);
consumer.close();
producer.close();
// revert default value
this.conf.setDispatchThrottlingOnNonBacklogConsumerEnabled(false);
log.info("-- Exiting {} test --", methodName);
}
/**
* <pre>
* It verifies that cluster-throttling value gets considered when namespace-policy throttling is disabled.
*
* 1. Update cluster-throttling-config: topic rate-limiter has cluster-config
* 2. Update namespace-throttling-config: topic rate-limiter has namespace-config
* 3. Disable namespace-throttling-config: topic rate-limiter has cluster-config
* 4. Create new topic with disable namespace-config and enabled cluster-config: it takes cluster-config
*
* </pre>
*
* @throws Exception
*/
@Test
public void testClusterPolicyOverrideConfiguration() throws Exception {
log.info("-- Starting {} test --", methodName);
final String namespace = "my-property/throttling_ns";
final String topicName1 = "persistent://" + namespace + "/throttlingOverride1";
final String topicName2 = "persistent://" + namespace + "/throttlingOverride2";
final int clusterMessageRate = 100;
int initValue = pulsar.getConfiguration().getDispatchThrottlingRatePerTopicInMsg();
// (1) Update message-dispatch-rate limit
admin.brokers().updateDynamicConfiguration("dispatchThrottlingRatePerTopicInMsg",
Integer.toString(clusterMessageRate));
// sleep incrementally as zk-watch notification is async and may take some time
for (int i = 0; i < 5; i++) {
if (pulsar.getConfiguration().getDispatchThrottlingRatePerTopicInMsg() == initValue) {
Thread.sleep(50 + (i * 10));
}
}
Assert.assertNotEquals(pulsar.getConfiguration().getDispatchThrottlingRatePerTopicInMsg(), initValue);
admin.namespaces().createNamespace(namespace, Sets.newHashSet("test"));
// create producer and topic
Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName1).create();
PersistentTopic topic = (PersistentTopic) pulsar.getBrokerService().getOrCreateTopic(topicName1).get();
// (1) Update dispatch rate on cluster-config update
Assert.assertEquals(clusterMessageRate, topic.getDispatchRateLimiter().get().getDispatchRateOnMsg());
// (2) Update namespace throttling limit
int nsMessageRate = 500;
DispatchRate dispatchRate = DispatchRate.builder()
.dispatchThrottlingRateInMsg(nsMessageRate)
.dispatchThrottlingRateInByte(0)
.ratePeriodInSecond(1)
.build();
admin.namespaces().setDispatchRate(namespace, dispatchRate);
for (int i = 0; i < 5; i++) {
if (topic.getDispatchRateLimiter().get().getDispatchRateOnMsg() != nsMessageRate) {
Thread.sleep(50 + (i * 10));
}
}
Assert.assertEquals(nsMessageRate, topic.getDispatchRateLimiter().get().getDispatchRateOnMsg());
// (3) Disable namespace throttling limit will force to take cluster-config
dispatchRate = DispatchRate.builder()
.dispatchThrottlingRateInMsg(0)
.dispatchThrottlingRateInByte(0)
.ratePeriodInSecond(1)
.build();
admin.namespaces().setDispatchRate(namespace, dispatchRate);
for (int i = 0; i < 5; i++) {
if (topic.getDispatchRateLimiter().get().getDispatchRateOnMsg() == nsMessageRate) {
Thread.sleep(50 + (i * 10));
}
}
Assert.assertEquals(clusterMessageRate, topic.getDispatchRateLimiter().get().getDispatchRateOnMsg());
// (5) Namespace throttling is disabled so, new topic should take cluster throttling limit
Producer<byte[]> producer2 = pulsarClient.newProducer().topic(topicName2).create();
PersistentTopic topic2 = (PersistentTopic) pulsar.getBrokerService().getOrCreateTopic(topicName2).get();
Assert.assertEquals(clusterMessageRate, topic2.getDispatchRateLimiter().get().getDispatchRateOnMsg());
producer.close();
producer2.close();
log.info("-- Exiting {} test --", methodName);
}
@Test(dataProvider = "subscriptions", timeOut = 10000)
public void testClosingRateLimiter(SubscriptionType subscription) throws Exception {
log.info("-- Starting {} test --", methodName);
final String namespace = "my-property/throttling_ns";
final String topicName = "persistent://" + namespace + "/closingRateLimiter" + subscription.name();
final String subName = "mySubscription" + subscription.name();
DispatchRate dispatchRate = DispatchRate.builder()
.dispatchThrottlingRateInMsg(10)
.dispatchThrottlingRateInByte(1024)
.ratePeriodInSecond(1)
.build();
admin.namespaces().createNamespace(namespace, Sets.newHashSet("test"));
admin.namespaces().setDispatchRate(namespace, dispatchRate);
Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName).create();
Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName).subscriptionName(subName)
.subscriptionType(subscription).subscribe();
PersistentTopic topic = (PersistentTopic) pulsar.getBrokerService().getOrCreateTopic(topicName).get();
final int numProducedMessages = 10;
for (int i = 0; i < numProducedMessages; i++) {
final String message = "my-message-" + i;
producer.send(message.getBytes());
}
for (int i = 0; i < numProducedMessages; i++) {
Message<byte[]> msg = consumer.receive();
consumer.acknowledge(msg);
}
Assert.assertTrue(topic.getDispatchRateLimiter().isPresent());
DispatchRateLimiter dispatchRateLimiter = topic.getDispatchRateLimiter().get();
producer.close();
consumer.unsubscribe();
consumer.close();
topic.close().get();
// Make sure that the rate limiter is closed
Assert.assertEquals(dispatchRateLimiter.getDispatchRateOnMsg(), -1);
Assert.assertEquals(dispatchRateLimiter.getDispatchRateOnByte(), -1);
log.info("-- Exiting {} test --", methodName);
}
@SuppressWarnings("deprecation")
@Test
public void testDispatchRateCompatibility1() throws Exception {
final String cluster = "test";
Optional<Policies> policies = Optional.of(new Policies());
DispatchRateImpl clusterDispatchRate = DispatchRateImpl.builder()
.dispatchThrottlingRateInMsg(10)
.dispatchThrottlingRateInByte(512)
.ratePeriodInSecond(1)
.build();
DispatchRateImpl topicDispatchRate = DispatchRateImpl.builder()
.dispatchThrottlingRateInMsg(200)
.dispatchThrottlingRateInByte(1024)
.ratePeriodInSecond(1)
.build();
// (1) If both clusterDispatchRate and topicDispatchRate are empty, dispatch throttling is disabled
DispatchRateImpl dispatchRate = DispatchRateLimiter.getPoliciesDispatchRate(cluster, policies,
DispatchRateLimiter.Type.TOPIC);
Assert.assertNull(dispatchRate);
// (2) If topicDispatchRate is empty, clusterDispatchRate is effective
policies.get().clusterDispatchRate.put(cluster, clusterDispatchRate);
dispatchRate = DispatchRateLimiter.getPoliciesDispatchRate(cluster, policies, DispatchRateLimiter.Type.TOPIC);
Assert.assertEquals(dispatchRate, clusterDispatchRate);
// (3) If topicDispatchRate is not empty, topicDispatchRate is effective
policies.get().topicDispatchRate.put(cluster, topicDispatchRate);
dispatchRate = DispatchRateLimiter.getPoliciesDispatchRate(cluster, policies, DispatchRateLimiter.Type.TOPIC);
Assert.assertEquals(dispatchRate, topicDispatchRate);
}
@SuppressWarnings("deprecation")
@Test
public void testDispatchRateCompatibility2() throws Exception {
final String namespace = "my-property/dispatch-rate-compatibility";
final String topicName = "persistent://" + namespace + "/t1";
final String cluster = "test";
admin.namespaces().createNamespace(namespace, Sets.newHashSet(cluster));
Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName).create();
PersistentTopic topic = (PersistentTopic) pulsar.getBrokerService().getOrCreateTopic(topicName).get();
DispatchRateLimiter dispatchRateLimiter = new DispatchRateLimiter(topic, DispatchRateLimiter.Type.TOPIC);
Policies policies = new Policies();
DispatchRateImpl clusterDispatchRate = DispatchRateImpl.builder()
.dispatchThrottlingRateInMsg(100)
.dispatchThrottlingRateInByte(512)
.ratePeriodInSecond(1)
.build();
DispatchRateImpl topicDispatchRate = DispatchRateImpl.builder()
.dispatchThrottlingRateInMsg(200)
.dispatchThrottlingRateInByte(1024)
.ratePeriodInSecond(1)
.build();
// (1) If both clusterDispatchRate and topicDispatchRate are empty, dispatch throttling is disabled
dispatchRateLimiter.onPoliciesUpdate(policies);
Assert.assertEquals(dispatchRateLimiter.getDispatchRateOnMsg(), -1);
Assert.assertEquals(dispatchRateLimiter.getDispatchRateOnByte(), -1);
// (2) If topicDispatchRate is empty, clusterDispatchRate is effective
policies.clusterDispatchRate.put(cluster, clusterDispatchRate);
dispatchRateLimiter.onPoliciesUpdate(policies);
Assert.assertEquals(dispatchRateLimiter.getDispatchRateOnMsg(), 100);
Assert.assertEquals(dispatchRateLimiter.getDispatchRateOnByte(), 512);
// (3) If topicDispatchRate is not empty, topicDispatchRate is effective
policies.topicDispatchRate.put(cluster, topicDispatchRate);
dispatchRateLimiter.onPoliciesUpdate(policies);
Assert.assertEquals(dispatchRateLimiter.getDispatchRateOnMsg(), 200);
Assert.assertEquals(dispatchRateLimiter.getDispatchRateOnByte(), 1024);
producer.close();
topic.close().get();
}
protected void deactiveCursors(ManagedLedgerImpl ledger) throws Exception {
Field statsUpdaterField = BrokerService.class.getDeclaredField("statsUpdater");
statsUpdaterField.setAccessible(true);
ScheduledExecutorService statsUpdater = (ScheduledExecutorService) statsUpdaterField
.get(pulsar.getBrokerService());
statsUpdater.shutdownNow();
ledger.getCursors().forEach(cursor -> {
ledger.deactivateCursor(cursor);
});
}
/**
* It verifies that relative throttling at least dispatch messages as publish-rate.
*
* @param subscription
* @throws Exception
*/
@Test(dataProvider = "subscriptions")
public void testRelativeMessageRateLimitingThrottling(SubscriptionType subscription) throws Exception {
log.info("-- Starting {} test --", methodName);
final String namespace = "my-property/relative_throttling_ns";
final String topicName = "persistent://" + namespace + "/relative-throttle" + subscription;
final int messageRate = 1;
DispatchRate dispatchRate = DispatchRate.builder()
.dispatchThrottlingRateInMsg(messageRate)
.dispatchThrottlingRateInByte(-1)
.ratePeriodInSecond(1)
.relativeToPublishRate(true)
.build();
admin.namespaces().createNamespace(namespace, Sets.newHashSet("test"));
admin.namespaces().setDispatchRate(namespace, dispatchRate);
// create producer and topic
Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName).enableBatching(false).create();
PersistentTopic topic = (PersistentTopic) pulsar.getBrokerService().getOrCreateTopic(topicName).get();
boolean isMessageRateUpdate = false;
int retry = 10;
for (int i = 0; i < retry; i++) {
if (topic.getDispatchRateLimiter().get().getDispatchRateOnMsg() > 0) {
isMessageRateUpdate = true;
break;
} else {
if (i != retry - 1) {
Thread.sleep(100);
}
}
}
Assert.assertTrue(isMessageRateUpdate);
Assert.assertEquals(admin.namespaces().getDispatchRate(namespace), dispatchRate);
Thread.sleep(2000);
final int numProducedMessages = 1000;
Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName).subscriptionName("my-subscriber-name")
.subscriptionType(subscription).subscribe();
// deactive cursors
deactiveCursors((ManagedLedgerImpl) topic.getManagedLedger());
// send a message, which will make dispatcher-ratelimiter initialize and schedule renew task
producer.send("test".getBytes());
assertNotNull(consumer.receive());
Field lastUpdatedMsgRateIn = PersistentTopic.class.getDeclaredField("lastUpdatedAvgPublishRateInMsg");
lastUpdatedMsgRateIn.setAccessible(true);
lastUpdatedMsgRateIn.set(topic, numProducedMessages);
for (int i = 0; i < numProducedMessages; i++) {
final String message = "my-message-" + i;
producer.send(message.getBytes());
}
int totalReceived = 0;
// Relative throttling will let it drain immediately because it allows to dispatch = (publish-rate +
// dispatch-rate)
// All messages should be received in the next 1.1 seconds. 100 millis should be enough for the actual delivery,
// while the previous call to receive above may have thrown the dispatcher into a read backoff, as nothing
// may have been produced before the call to readNext() and the permits for dispatch had already been used.
// The backoff is 1 second, so we expect to be able to receive all messages in at most 1.1 seconds, while the
// basic dispatch rate limit would only allow one message in that time.
long maxTimeNanos = TimeUnit.MILLISECONDS.toNanos(1100);
long startNanos = System.nanoTime();
for (int i = 0; i < numProducedMessages; i++) {
Message<byte[]> msg = consumer.receive((int)maxTimeNanos, TimeUnit.NANOSECONDS);
totalReceived++;
assertNotNull(msg);
long elapsedNanos = System.nanoTime() - startNanos;
if (elapsedNanos > maxTimeNanos) { // fail fast
log.info("Test has only received {} messages in {}ms, {} expected",
totalReceived, TimeUnit.NANOSECONDS.toMillis(elapsedNanos), numProducedMessages);
Assert.fail("Messages not received in time");
}
log.info("Received {}-{}", msg.getMessageId(), new String(msg.getData()));
}
Assert.assertEquals(totalReceived, numProducedMessages);
long elapsedNanos = System.nanoTime() - startNanos;
Assert.assertTrue(elapsedNanos < maxTimeNanos);
consumer.close();
producer.close();
log.info("-- Exiting {} test --", methodName);
}
}
| apache-2.0 |
OSBI/oodt | filemgr/src/test/java/org/apache/oodt/cas/filemgr/cli/TestFileManagerCli.java | 19481 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.oodt.cas.filemgr.cli;
//JDK imports
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Properties;
//JUnit imports
import junit.framework.TestCase;
//Apache imports
import org.apache.commons.io.FileUtils;
//OODT imports
import org.apache.oodt.cas.cli.CmdLineUtility;
import org.apache.oodt.cas.cli.util.OptionPropertyRegister;
import org.apache.oodt.cas.filemgr.datatransfer.InPlaceDataTransferFactory;
import org.apache.oodt.cas.filemgr.structs.Product;
import org.apache.oodt.cas.filemgr.structs.ProductPage;
import org.apache.oodt.cas.filemgr.structs.ProductType;
import org.apache.oodt.cas.filemgr.structs.Query;
import org.apache.oodt.cas.filemgr.structs.Reference;
import org.apache.oodt.cas.filemgr.structs.TermQueryCriteria;
import org.apache.oodt.cas.filemgr.structs.exceptions.ConnectionException;
import org.apache.oodt.cas.filemgr.structs.query.ComplexQuery;
import org.apache.oodt.cas.filemgr.system.MockXmlRpcFileManagerClient;
import org.apache.oodt.cas.filemgr.system.MockXmlRpcFileManagerClient.MethodCallDetails;
import org.apache.oodt.cas.metadata.Metadata;
//Google imports
import com.google.common.collect.Lists;
/**
* Tests File Manager Clients Command-line interface.
*
* @author bfoster (Brian Foster)
*/
public class TestFileManagerCli extends TestCase {
private CmdLineUtility cmdLineUtility;
private MockXmlRpcFileManagerClient client;
private Properties initialProperties = new Properties(
System.getProperties());
@Override
public void setUp() throws Exception {
super.setUp();
Properties properties = new Properties(System.getProperties());
properties.setProperty("org.apache.oodt.cas.cli.debug", "true");
URL actionsUrl = this.getClass().getResource("/cmd-line-actions.xml");
properties.setProperty("org.apache.oodt.cas.cli.action.spring.config",
"file:" + new File(actionsUrl.getFile()).getAbsolutePath());
URL optionsUrl = this.getClass().getResource("/cmd-line-options.xml");
properties.setProperty("org.apache.oodt.cas.cli.option.spring.config",
"file:" + new File(optionsUrl.getFile()).getAbsolutePath());
System.setProperties(properties);
cmdLineUtility = new CmdLineUtility();
UseMockClientCmdLineActionStore actionStore = new UseMockClientCmdLineActionStore();
client = actionStore.getClient();
cmdLineUtility.setActionStore(actionStore);
}
@Override
public void tearDown() throws Exception {
OptionPropertyRegister.clearRegister();
System.setProperties(initialProperties);
}
public void testAddProductType() throws MalformedURLException,
ConnectionException {
String productTypeName = "TestProductType";
String productTypeDesc = "ProductTypeDesc";
String productTypeRepo = "ProductTypeRepo";
String productTypeVersioner = "ProductTypeVersioner";
String productTypeId = "MyProductTypeId";
cmdLineUtility
.run(("--url http://localhost:9000 --operation --addProductType"
+ " --typeName " + productTypeName + " --typeId "+productTypeId+" --typeDesc "
+ productTypeDesc + " --repository " + productTypeRepo
+ " --versionClass " + productTypeVersioner).split(" "));
MethodCallDetails methodCallDetails = client.getLastMethodCallDetails();
assertEquals("addProductType", methodCallDetails.getMethodName());
assertEquals(1, methodCallDetails.getArgs().size());
ProductType pt = (ProductType) methodCallDetails.getArgs().get(0);
assertEquals(productTypeName, pt.getName());
assertEquals(productTypeDesc, pt.getDescription());
assertEquals(productTypeRepo, pt.getProductRepositoryPath());
assertEquals(productTypeVersioner, pt.getVersioner());
}
public void testDeleteProductById() {
String productId = "TestProductId";
cmdLineUtility
.run(("--url http://localhost:9000 --operation --deleteProductById"
+ " --productId " + productId).split(" "));
MethodCallDetails methodCallDetails = client.getLastMethodCallDetails();
assertEquals("removeProduct", methodCallDetails.getMethodName());
assertEquals(1, methodCallDetails.getArgs().size());
assertEquals(productId,
((Product) methodCallDetails.getArgs().get(0)).getProductId());
}
public void testDeleteProductByName() {
String productName = "TestProductName";
cmdLineUtility
.run(("--url http://localhost:9000 --operation --deleteProductByName"
+ " --productName " + productName).split(" "));
MethodCallDetails methodCallDetails = client.getLastMethodCallDetails();
assertEquals("removeProduct", methodCallDetails.getMethodName());
assertEquals(1, methodCallDetails.getArgs().size());
assertEquals(productName,
((Product) methodCallDetails.getArgs().get(0)).getProductName());
}
public void testGetCurrentTransfer() {
cmdLineUtility
.run(("--url http://localhost:9000 --operation --getCurrentTransfer")
.split(" "));
MethodCallDetails methodCallDetails = client.getLastMethodCallDetails();
assertEquals("getCurrentFileTransfer", methodCallDetails.getMethodName());
assertEquals(0, methodCallDetails.getArgs().size());
}
public void testGetCurrentTransfers() {
cmdLineUtility
.run(("--url http://localhost:9000 --operation --getCurrentTransfers")
.split(" "));
MethodCallDetails methodCallDetails = client.getLastMethodCallDetails();
assertEquals("getCurrentFileTransfers", methodCallDetails.getMethodName());
assertEquals(0, methodCallDetails.getArgs().size());
}
public void testGetFilePercentTransferred() {
URL refUrl = this.getClass().getResource("/ingest/test.txt");
String refPath = new File(refUrl.getFile()).getAbsolutePath();
cmdLineUtility
.run(("--url http://localhost:9000 --operation --getFilePctTransferred"
+ " --origRef " + refPath).split(" "));
MethodCallDetails methodCallDetails = client.getLastMethodCallDetails();
assertEquals("getRefPctTransferred", methodCallDetails.getMethodName());
assertTrue(((Reference) methodCallDetails.getArgs().get(0))
.getOrigReference().endsWith("test.txt"));
}
public void testGetFirstPage() {
String productTypeName = "ProductTypeName";
cmdLineUtility
.run(("--url http://localhost:9000 --operation --getFirstPage"
+ " --productTypeName " + productTypeName).split(" "));
MethodCallDetails methodCallDetails = client.getLastMethodCallDetails();
assertEquals("getFirstPage", methodCallDetails.getMethodName());
assertEquals(productTypeName, ((ProductType) methodCallDetails.getArgs()
.get(0)).getName());
}
public void testGetLastPage() {
String productTypeName = "ProductTypeName";
cmdLineUtility
.run(("--url http://localhost:9000 --operation --getLastPage"
+ " --productTypeName " + productTypeName).split(" "));
MethodCallDetails methodCallDetails = client.getLastMethodCallDetails();
assertEquals("getLastPage", methodCallDetails.getMethodName());
assertEquals(productTypeName, ((ProductType) methodCallDetails.getArgs()
.get(0)).getName());
}
public void testGetNextPage() {
String productTypeName = "ProductTypeName";
int curPage = 1;
cmdLineUtility
.run(("--url http://localhost:9000 --operation --getNextPage"
+ " --productTypeName " + productTypeName + " --curPage " + curPage)
.split(" "));
MethodCallDetails methodCallDetails = client.getLastMethodCallDetails();
assertEquals("getNextPage", methodCallDetails.getMethodName());
assertEquals(productTypeName, ((ProductType) methodCallDetails.getArgs()
.get(0)).getName());
assertEquals(curPage,
((ProductPage) methodCallDetails.getArgs().get(1)).getPageNum());
}
public void testGetPrevPage() {
String productTypeName = "ProductTypeName";
int curPage = 1;
cmdLineUtility
.run(("--url http://localhost:9000 --operation --getPrevPage"
+ " --productTypeName " + productTypeName + " --curPage " + curPage)
.split(" "));
MethodCallDetails methodCallDetails = client.getLastMethodCallDetails();
assertEquals("getPrevPage", methodCallDetails.getMethodName());
assertEquals(productTypeName, ((ProductType) methodCallDetails.getArgs()
.get(0)).getName());
assertEquals(curPage,
((ProductPage) methodCallDetails.getArgs().get(1)).getPageNum());
}
public void testGetNumProducts() {
String productTypeName = "ProductTypeName";
cmdLineUtility
.run(("--url http://localhost:9000 --operation --getNumProducts"
+ " --productTypeName " + productTypeName).split(" "));
MethodCallDetails methodCallDetails = client.getLastMethodCallDetails();
assertEquals("getNumProducts", methodCallDetails.getMethodName());
assertEquals(productTypeName, ((ProductType) methodCallDetails.getArgs()
.get(0)).getName());
}
public void testGetProductById() {
String productId = "TestProductId";
cmdLineUtility
.run(("--url http://localhost:9000 --operation --getProductById"
+ " --productId " + productId).split(" "));
MethodCallDetails methodCallDetails = client.getLastMethodCallDetails();
assertEquals("getProductById", methodCallDetails.getMethodName());
assertEquals(productId, methodCallDetails.getArgs().get(0));
}
public void testGetProductByName() {
String productName = "TestProductName";
cmdLineUtility
.run(("--url http://localhost:9000 --operation --getProductByName"
+ " --productName " + productName).split(" "));
MethodCallDetails methodCallDetails = client.getLastMethodCallDetails();
assertEquals("getProductByName", methodCallDetails.getMethodName());
assertEquals(productName, methodCallDetails.getArgs().get(0));
}
public void testGetProductPercnetTransferred() {
String productId = "TestProductId";
String productTypeName = "TestProductType";
cmdLineUtility
.run(("--url http://localhost:9000 --operation --getProductPctTransferred"
+ " --productId " + productId + " --productTypeName " + productTypeName)
.split(" "));
MethodCallDetails methodCallDetails = client.getLastMethodCallDetails();
assertEquals("getProductPctTransferred",
methodCallDetails.getMethodName());
assertEquals(productId,
((Product) methodCallDetails.getArgs().get(0)).getProductId());
}
public void testGetProductTypeByName() {
String productTypeName = "TestProductType";
cmdLineUtility
.run(("--url http://localhost:9000 --operation --getProductTypeByName"
+ " --productTypeName " + productTypeName).split(" "));
MethodCallDetails methodCallDetails = client.getLastMethodCallDetails();
assertEquals("getProductTypeByName", methodCallDetails.getMethodName());
assertEquals(productTypeName, methodCallDetails.getArgs().get(0));
}
public void testHasProduct() {
String productName = "TestProductName";
cmdLineUtility
.run(("--url http://localhost:9000 --operation --hasProduct"
+ " --productName " + productName).split(" "));
MethodCallDetails methodCallDetails = client.getLastMethodCallDetails();
assertEquals("hasProduct", methodCallDetails.getMethodName());
assertEquals(productName, methodCallDetails.getArgs().get(0));
}
public void testIngestProduct() {
URL refUrl = this.getClass().getResource("/ingest/test.txt");
URL metUrl = this.getClass().getResource("/ingest/test.txt.met");
String productName = "TestProductName";
String structure = Product.STRUCTURE_FLAT;
String productTypeName = "TestProductType";
String metadataFile = new File(metUrl.getFile()).getAbsolutePath();
String dataTransferer = InPlaceDataTransferFactory.class
.getCanonicalName();
String ref = new File(refUrl.getFile()).getAbsolutePath();
cmdLineUtility
.run(("--url http://localhost:9000 --operation --ingestProduct"
+ " --productName " + productName + " --productStructure "
+ structure + " --productTypeName " + productTypeName
+ " --metadataFile " + metadataFile + " --refs " + ref
+ " --clientTransfer --dataTransfer " + dataTransferer)
.split(" "));
MethodCallDetails methodCallDetails = client.getLastMethodCallDetails();
assertEquals("ingestProduct", methodCallDetails.getMethodName());
assertEquals(productName,
((Product) methodCallDetails.getArgs().get(0)).getProductName());
assertEquals(structure,
((Product) methodCallDetails.getArgs().get(0))
.getProductStructure());
assertEquals(productTypeName,
((Product) methodCallDetails.getArgs().get(0)).getProductType()
.getName());
assertTrue(((Product) methodCallDetails.getArgs().get(0))
.getProductReferences().get(0).getOrigReference().endsWith(ref));
assertEquals("test.txt",
((Metadata) methodCallDetails.getArgs().get(1))
.getMetadata("Filename"));
assertEquals("GenericFile", ((Metadata) methodCallDetails.getArgs()
.get(1)).getMetadata("ProductType"));
assertEquals(true, methodCallDetails.getArgs().get(2));
}
public void testDumpMetadata() throws IOException {
String productId = "TestProductId";
File bogusFile = File.createTempFile("bogus", "bogus");
File tmpFile = new File(bogusFile.getParentFile(), "CliDumpMetadata");
tmpFile.mkdirs();
bogusFile.delete();
cmdLineUtility
.run(("--url http://localhost:9000 --operation --dumpMetadata"
+ " --productId " + productId).split(" "));
OptionPropertyRegister.clearRegister();
MethodCallDetails methodCallDetails = client.getLastMethodCallDetails();
assertEquals("getMetadata", methodCallDetails.getMethodName());
assertEquals(productId,
((Product) methodCallDetails.getArgs().get(0)).getProductId());
cmdLineUtility
.run(("--url http://localhost:9000 --operation --dumpMetadata"
+ " --productId " + productId + " --outputDir " + tmpFile
.getAbsolutePath()).split(" "));
FileUtils.forceDelete(tmpFile);
}
public void testLuceneQuery() {
String query = "ProductId:TestProductId";
cmdLineUtility
.run(("--url http://localhost:9000 --operation --luceneQuery"
+ " --query " + query).split(" "));
MethodCallDetails methodCallDetails = client.getLastMethodCallDetails();
assertEquals("complexQuery", methodCallDetails.getMethodName());
assertEquals("ProductId", ((TermQueryCriteria) ((Query) methodCallDetails
.getArgs().get(0)).getCriteria().get(0)).getElementName());
assertEquals("TestProductId",
((TermQueryCriteria) ((Query) methodCallDetails.getArgs().get(0))
.getCriteria().get(0)).getValue());
OptionPropertyRegister.clearRegister();
String reducedMetadataKeys = "ProductId ProductType";
String outputFormat = "$ProductId";
String reducedProductTypes = "TestProductType";
String sortBy = "ProductId";
String delimiter = ",";
cmdLineUtility
.run(("--url http://localhost:9000 --operation --luceneQuery"
+ " --query " + query + " --reducedMetadataKeys " + reducedMetadataKeys
+ " --outputFormat " + outputFormat
+ " --reducedProductTypes " + reducedProductTypes
+ " --sortBy " + sortBy
+ " --delimiter " + delimiter).split(" "));
methodCallDetails = client.getLastMethodCallDetails();
assertEquals("complexQuery", methodCallDetails.getMethodName());
ComplexQuery complexQuery = (ComplexQuery) methodCallDetails.getArgs().get(0);
assertEquals("ProductId", ((TermQueryCriteria) complexQuery.getCriteria().get(0)).getElementName());
assertEquals("TestProductId",
((TermQueryCriteria) complexQuery.getCriteria().get(0)).getValue());
assertEquals(Lists.newArrayList(reducedMetadataKeys.split(" ")), complexQuery.getReducedMetadata());
assertEquals(outputFormat, complexQuery.getToStringResultFormat());
assertEquals(Lists.newArrayList(reducedProductTypes.split(" ")), complexQuery.getReducedProductTypeNames());
assertEquals(sortBy, complexQuery.getSortByMetKey());
}
public void testRetrieveFilesById() {
String productId = "TestProductId";
String destination = "/tmp/toDir";
String transferer = "org.apache.oodt.cas.filemgr.datatransfer.InPlaceDataTransferFactory";
cmdLineUtility
.run(("--url http://localhost:9000 --operation --retrieveFilesById"
+ " --productId " + productId + " --destination " + destination
+ " --transferer " + transferer).split(" "));
MethodCallDetails methodCallDetails = client.getLastMethodCallDetails();
assertEquals("getProductReferences", methodCallDetails.getMethodName());
assertEquals(productId, ((Product) methodCallDetails.getArgs().get(0)).getProductId());
}
public void testRetrieveFilesByName() {
String productName = "TestProductName";
String destination = "/tmp/toDir";
String transferer = "org.apache.oodt.cas.filemgr.datatransfer.InPlaceDataTransferFactory";
cmdLineUtility
.run(("--url http://localhost:9000 --operation --retrieveFilesByName"
+ " --productName " + productName + " --destination " + destination
+ " --transferer " + transferer).split(" "));
MethodCallDetails methodCallDetails = client.getLastMethodCallDetails();
assertEquals("getProductReferences", methodCallDetails.getMethodName());
assertEquals(productName, ((Product) methodCallDetails.getArgs().get(0)).getProductName());
}
}
| apache-2.0 |
vivekpaypal/PCVC-Organization | AndroidApp/Vidiyal/app/src/main/java/com/vidiyal/servicelayer/IApiService.java | 312 | package com.vidiyal.servicelayer;
import retrofit.Callback;
import retrofit.http.GET;
import retrofit.http.Path;
import retrofit.http.Query;
/**
* Created by Rifan on 11/28/2015.
*/
public interface IApiService {
@GET("{node}")
public void callApi(@Query("node") String node, Callback<String> callback);
}
| apache-2.0 |
xschildw/Synapse-Repository-Services | lib/jdomodels/src/main/java/org/sagebionetworks/repo/model/dbo/dao/table/DBOColumnModelDAOImpl.java | 11807 | package org.sagebionetworks.repo.model.dbo.dao.table;
import static org.sagebionetworks.repo.model.query.jdo.SqlConstants.COL_BOUND_CM_ORD_COLUMN_ID;
import static org.sagebionetworks.repo.model.query.jdo.SqlConstants.COL_BOUND_CM_ORD_OBJECT_ID;
import static org.sagebionetworks.repo.model.query.jdo.SqlConstants.COL_BOUND_CM_ORD_OBJECT_VERSION;
import static org.sagebionetworks.repo.model.query.jdo.SqlConstants.COL_BOUND_CM_ORD_ORDINAL;
import static org.sagebionetworks.repo.model.query.jdo.SqlConstants.COL_BOUND_OWNER_ETAG;
import static org.sagebionetworks.repo.model.query.jdo.SqlConstants.COL_BOUND_OWNER_OBJECT_ID;
import static org.sagebionetworks.repo.model.query.jdo.SqlConstants.COL_CM_HASH;
import static org.sagebionetworks.repo.model.query.jdo.SqlConstants.COL_CM_ID;
import static org.sagebionetworks.repo.model.query.jdo.SqlConstants.COL_CM_NAME;
import static org.sagebionetworks.repo.model.query.jdo.SqlConstants.TABLE_BOUND_COLUMN_ORDINAL;
import static org.sagebionetworks.repo.model.query.jdo.SqlConstants.TABLE_BOUND_COLUMN_OWNER;
import static org.sagebionetworks.repo.model.query.jdo.SqlConstants.TABLE_COLUMN_MODEL;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import org.sagebionetworks.StackConfigurationSingleton;
import org.sagebionetworks.evaluation.dbo.DBOConstants;
import org.sagebionetworks.ids.IdGenerator;
import org.sagebionetworks.ids.IdType;
import org.sagebionetworks.repo.model.DatastoreException;
import org.sagebionetworks.repo.model.dao.table.ColumnModelDAO;
import org.sagebionetworks.repo.model.dbo.DBOBasicDao;
import org.sagebionetworks.repo.model.dbo.SinglePrimaryKeySqlParameterSource;
import org.sagebionetworks.repo.model.dbo.persistence.table.ColumnModelUtils;
import org.sagebionetworks.repo.model.dbo.persistence.table.DBOBoundColumnOrdinal;
import org.sagebionetworks.repo.model.dbo.persistence.table.DBOBoundColumnOwner;
import org.sagebionetworks.repo.model.dbo.persistence.table.DBOColumnModel;
import org.sagebionetworks.repo.model.entity.IdAndVersion;
import org.sagebionetworks.repo.model.jdo.KeyFactory;
import org.sagebionetworks.repo.model.table.ColumnModel;
import org.sagebionetworks.repo.transactions.WriteTransaction;
import org.sagebionetworks.repo.web.NotFoundException;
import org.sagebionetworks.util.ValidateArgument;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.EmptyResultDataAccessException;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowCallbackHandler;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.jdbc.core.SingleColumnRowMapper;
import org.springframework.jdbc.core.namedparam.MapSqlParameterSource;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
/**
* Database implementation of the ColumnModelDAO interface.
*
* @author John
*
*/
public class DBOColumnModelDAOImpl implements ColumnModelDAO {
private static final String INPUT = "input";
private static final String SELECT_COLUMN_NAME = "SELECT "+ COL_CM_ID+","+COL_CM_NAME+" FROM "+TABLE_COLUMN_MODEL+" WHERE "+COL_CM_ID+" IN (:"+INPUT+")";
private static final String SQL_SELECT_OWNER_ETAG_FOR_UPDATE = "SELECT "+COL_BOUND_OWNER_ETAG+" FROM "+TABLE_BOUND_COLUMN_OWNER+" WHERE "+COL_BOUND_OWNER_OBJECT_ID+" = ? FOR UPDATE";
private static final String SQL_GET_COLUMN_MODELS_FOR_OBJECT = "SELECT CM.* FROM " + TABLE_BOUND_COLUMN_ORDINAL
+ " BO JOIN " + TABLE_COLUMN_MODEL + " CM ON (BO." + COL_BOUND_CM_ORD_COLUMN_ID + " = CM." + COL_CM_ID + ")"
+ " WHERE BO." + COL_BOUND_CM_ORD_OBJECT_ID + " = ? AND BO." + COL_BOUND_CM_ORD_OBJECT_VERSION
+ " = ? ORDER BY BO." + COL_BOUND_CM_ORD_ORDINAL + " ASC";
private static final String SQL_GET_COLUMN_ID_FOR_OBJECT = "SELECT " + COL_BOUND_CM_ORD_COLUMN_ID + " FROM "
+ TABLE_BOUND_COLUMN_ORDINAL + " BO WHERE BO." + COL_BOUND_CM_ORD_OBJECT_ID + " = ? AND BO."
+ COL_BOUND_CM_ORD_OBJECT_VERSION + " = ? ORDER BY BO." + COL_BOUND_CM_ORD_ORDINAL + " ASC";
private static final String SQL_DELETE_BOUND_ORDINAL = "DELETE FROM "+TABLE_BOUND_COLUMN_ORDINAL+" WHERE "+COL_BOUND_CM_ORD_OBJECT_ID+" = ? AND "+COL_BOUND_CM_ORD_OBJECT_VERSION+" = ?";
private static final String SQL_DELETE_COLUMN_MODEL = "DELETE FROM "+TABLE_COLUMN_MODEL+" WHERE "+COL_CM_ID+" = ?";
private static final String SQL_SELECT_COLUMNS_WITH_NAME_PREFIX_COUNT = "SELECT COUNT(*) FROM "+TABLE_COLUMN_MODEL+" WHERE "+COL_CM_NAME+" LIKE ? ";
private static final String SQL_SELECT_COLUMNS_WITH_NAME_PREFIX = "SELECT * FROM "+TABLE_COLUMN_MODEL+" WHERE "+COL_CM_NAME+" LIKE ? ORDER BY "+COL_CM_NAME+" LIMIT ? OFFSET ?";
private static final String SQL_TRUNCATE_BOUND_COLUMN_ORDINAL = "DELETE FROM "+TABLE_BOUND_COLUMN_ORDINAL+" WHERE "+COL_BOUND_CM_ORD_ORDINAL+" >= 0";
private static final String SQL_TRUNCATE_COLUMN_MODEL= "DELETE FROM "+TABLE_COLUMN_MODEL+" WHERE "+COL_CM_ID+" >= 0";
private static final String SQL_SELECT_COLUMNS_FOR_IDS = "SELECT * FROM "+TABLE_COLUMN_MODEL+" WHERE "+COL_CM_ID+" IN ( :ids ) ORDER BY "+COL_CM_NAME;
private static final String SQL_SELECT_ID_WHERE_HASH_EQUALS = "SELECT "+COL_CM_ID+" FROM "+TABLE_COLUMN_MODEL+" WHERE "+COL_CM_HASH+" = ?";
@Autowired
private DBOBasicDao basicDao;
@Autowired
private JdbcTemplate jdbcTemplate;
@Autowired
private NamedParameterJdbcTemplate namedJdbcTemplate;
@Autowired
private IdGenerator idGenerator;
private static RowMapper<DBOColumnModel> ROW_MAPPER = new DBOColumnModel().getTableMapping();
@Override
public List<ColumnModel> listColumnModels(String namePrefix, long limit, long offset) {
String likeString = preparePrefix(namePrefix);
List<DBOColumnModel> dbos = jdbcTemplate.query(SQL_SELECT_COLUMNS_WITH_NAME_PREFIX, ROW_MAPPER, likeString, limit, offset);
// Convert to DTOs
return ColumnModelUtils.createDTOFromDBO(dbos);
}
@Override
public List<ColumnModel> getColumnModelsForObject(IdAndVersion idAndVersion) throws DatastoreException {
List<DBOColumnModel> dbos = jdbcTemplate.query(SQL_GET_COLUMN_MODELS_FOR_OBJECT, ROW_MAPPER,
idAndVersion.getId(), idAndVersion.getVersion().orElse(DBOBoundColumnOrdinal.DEFAULT_NULL_VERSION));
// Convert to DTOs
return ColumnModelUtils.createDTOFromDBO(dbos);
}
@Override
public List<String> getColumnModelIdsForObject(IdAndVersion idAndVersion) {
return jdbcTemplate.queryForList(SQL_GET_COLUMN_ID_FOR_OBJECT, String.class, idAndVersion.getId(),
idAndVersion.getVersion().orElse(DBOBoundColumnOrdinal.DEFAULT_NULL_VERSION));
}
/**
* @param namePrefix
* @return
*/
public String preparePrefix(String namePrefix) {
if(namePrefix == null){
namePrefix = "";
}
String likeString = namePrefix.toLowerCase()+"%";
return likeString;
}
@Override
public long listColumnModelsCount(String namePrefix) {
String likeString = preparePrefix(namePrefix);
return jdbcTemplate.queryForObject(SQL_SELECT_COLUMNS_WITH_NAME_PREFIX_COUNT,new SingleColumnRowMapper<Long>(), likeString);
}
@WriteTransaction
@Override
public ColumnModel createColumnModel(ColumnModel model) throws DatastoreException, NotFoundException {
// Convert to the DBO
DBOColumnModel dbo = ColumnModelUtils.createDBOFromDTO(model, StackConfigurationSingleton.singleton().getTableMaxEnumValues());
// check to see if a column model already exists with this hash.
String existingId = getColumnForHash(dbo.getHash());
if(existingId != null){
// a column already exists with this same hash.
return getColumnModel(existingId);
}
// This is a new unique hash.
Long id = idGenerator.generateNewId(IdType.COLUMN_MODEL_ID);
dbo.setId(id);
// Save it.
basicDao.createNew(dbo);
return getColumnModel(Long.toString(id));
}
@Override
public ColumnModel getColumnModel(String id) throws DatastoreException, NotFoundException {
MapSqlParameterSource param = new MapSqlParameterSource();
param.addValue(DBOConstants.PARAM_EVALUATION_ID, id);
DBOColumnModel dbo = basicDao.getObjectByPrimaryKey(DBOColumnModel.class, param);
return ColumnModelUtils.createDTOFromDBO(dbo);
}
@WriteTransaction
@Override
public int deleteColumModel(String id) {
if(id == null) throw new IllegalArgumentException("id cannot be null");
return jdbcTemplate.update(SQL_DELETE_COLUMN_MODEL, id);
}
@WriteTransaction
@Override
public void deleteOwner(String objectId) {
basicDao.deleteObjectByPrimaryKey(DBOBoundColumnOwner.class, new SinglePrimaryKeySqlParameterSource(KeyFactory.stringToKey(objectId)));
}
@WriteTransaction
@Override
public void bindColumnToObject(final List<ColumnModel> newColumns, final IdAndVersion idAndVersion) throws NotFoundException {
ValidateArgument.required(idAndVersion, "idAndVersion");
// Create or update the owner.
DBOBoundColumnOwner owner = new DBOBoundColumnOwner();
owner.setObjectId(idAndVersion.getId());
owner.setEtag(UUID.randomUUID().toString());
basicDao.createOrUpdate(owner);
// Now replace the current current ordinal binding for this object.
jdbcTemplate.update(SQL_DELETE_BOUND_ORDINAL, idAndVersion.getId(), idAndVersion.getVersion().orElse(DBOBoundColumnOrdinal.DEFAULT_NULL_VERSION));
// bind the new columns if provided.
if(newColumns != null && !newColumns.isEmpty()) {
// Now insert the ordinal values
List<DBOBoundColumnOrdinal> ordinal = ColumnModelUtils.createDBOBoundColumnOrdinalList(idAndVersion, newColumns);
// this is just an insert
basicDao.createBatch(ordinal);
}
}
@Override
public String getColumnForHash(String hash) {
try {
long id = jdbcTemplate.queryForObject(SQL_SELECT_ID_WHERE_HASH_EQUALS,new SingleColumnRowMapper<Long>(), hash);
return Long.toString(id);
} catch (EmptyResultDataAccessException e) {
return null;
}
}
@Override
public List<ColumnModel> getColumnModels(List<String> ids) throws DatastoreException, NotFoundException {
if(ids == null) throw new IllegalArgumentException("Ids cannot be null");
if(ids.isEmpty()){
return new LinkedList<ColumnModel>();
}
MapSqlParameterSource parameters = new MapSqlParameterSource("ids", ids);
NamedParameterJdbcTemplate namedTemplate = new NamedParameterJdbcTemplate(jdbcTemplate);
List<DBOColumnModel> dbos = namedTemplate.query(SQL_SELECT_COLUMNS_FOR_IDS, parameters,ROW_MAPPER);
// Convert to DTOs
return ColumnModelUtils.createDTOFromDBO(dbos);
}
@WriteTransaction
@Override
public boolean truncateAllColumnData() {
int count = jdbcTemplate.update(SQL_TRUNCATE_BOUND_COLUMN_ORDINAL);
count += jdbcTemplate.update(SQL_TRUNCATE_COLUMN_MODEL);
return count >0;
}
@WriteTransaction
@Override
public String lockOnOwner(String objectIdString) {
Long objectId = KeyFactory.stringToKey(objectIdString);
return jdbcTemplate.queryForObject(SQL_SELECT_OWNER_ETAG_FOR_UPDATE, new RowMapper<String>() {
@Override
public String mapRow(ResultSet rs, int rowNum) throws SQLException {
return rs.getString(COL_BOUND_OWNER_ETAG);
}
}, objectId);
}
@Override
public Map<Long, String> getColumnNames(Set<Long> columnIds) {
ValidateArgument.required(columnIds, "columnIds");
final Map<Long, String> results = new HashMap<>(columnIds.size());
if(columnIds.isEmpty()) {
return results;
}
MapSqlParameterSource param = new MapSqlParameterSource();
param.addValue(INPUT, columnIds);
namedJdbcTemplate.query(SELECT_COLUMN_NAME, param, new RowCallbackHandler() {
@Override
public void processRow(ResultSet rs) throws SQLException {
long id = rs.getLong(COL_CM_ID);
String name = rs.getString(COL_CM_NAME);
results.put(id, name);
}
});
return results;
}
}
| apache-2.0 |
roalva1/opencga | opencga-catalog/src/main/java/org/opencb/opencga/catalog/exceptions/CatalogException.java | 988 | /*
* Copyright 2015 OpenCB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opencb.opencga.catalog.exceptions;
/**
* Created by jacobo on 12/12/14.
*/
public class CatalogException extends Exception {
public CatalogException(String message) {
super(message);
}
public CatalogException(String message, Throwable cause) {
super(message, cause);
}
public CatalogException(Throwable cause) {
super(cause);
}
}
| apache-2.0 |
jexp/idea2 | platform/platform-impl/src/com/intellij/openapi/editor/actions/PreviousWordAction.java | 1405 | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Created by IntelliJ IDEA.
* User: max
* Date: May 14, 2002
* Time: 6:49:27 PM
* To change template for new class use
* Code Style | Class Templates options (Tools | IDE Options).
*/
package com.intellij.openapi.editor.actions;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.actionSystem.EditorAction;
import com.intellij.openapi.editor.actionSystem.EditorActionHandler;
import com.intellij.openapi.actionSystem.DataContext;
public class PreviousWordAction extends TextComponentEditorAction {
public PreviousWordAction() {
super(new Handler());
}
private static class Handler extends EditorActionHandler {
public void execute(Editor editor, DataContext dataContext) {
EditorActionUtil.moveCaretToPreviousWord(editor, false);
}
}
}
| apache-2.0 |
lizhanhui/rocketmq-storm | src/main/java/com/alibaba/rocketmq/storm/MessageConsumerManager.java | 2840 | package com.alibaba.rocketmq.storm;
import com.alibaba.rocketmq.common.MixAll;
import org.apache.commons.lang.BooleanUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.alibaba.rocketmq.client.consumer.DefaultMQPullConsumer;
import com.alibaba.rocketmq.client.consumer.DefaultMQPushConsumer;
import com.alibaba.rocketmq.client.consumer.MQConsumer;
import com.alibaba.rocketmq.client.consumer.listener.MessageListener;
import com.alibaba.rocketmq.client.exception.MQClientException;
import com.alibaba.rocketmq.common.consumer.ConsumeFromWhere;
import com.alibaba.rocketmq.common.protocol.heartbeat.MessageModel;
import com.alibaba.rocketmq.storm.domain.RocketMQConfig;
import com.alibaba.rocketmq.storm.internal.tools.FastBeanUtils;
import com.google.common.collect.Sets;
/**
* @author Von Gosling
*/
public class MessageConsumerManager {
private static final Logger LOG = LoggerFactory
.getLogger(MessageConsumerManager.class);
private static DefaultMQPushConsumer pushConsumer;
private static DefaultMQPullConsumer pullConsumer;
MessageConsumerManager() {
}
public static MQConsumer getConsumerInstance(RocketMQConfig config, MessageListener listener,
Boolean isPushlet) throws MQClientException {
LOG.info("Begin to init consumer,instanceName->{},configuration->{}",
new Object[] { config.getInstanceName(), config });
LOG.info("----------------------------------------------- Enable SSL: " + System.getProperty("enable_ssl"));
LOG.info("----------------------------------------------- rocketmq.namesrv.domain: " + MixAll.WS_DOMAIN_NAME);
if (BooleanUtils.isTrue(isPushlet)) {
pushConsumer = (DefaultMQPushConsumer) FastBeanUtils.copyProperties(config,
DefaultMQPushConsumer.class);
pushConsumer.setConsumerGroup(config.getGroupId());
pushConsumer.setConsumeFromWhere(ConsumeFromWhere.CONSUME_FROM_LAST_OFFSET);
pushConsumer.subscribe(config.getTopic(), config.getTopicTag());
pushConsumer.setMessageModel(MessageModel.CLUSTERING);
pushConsumer.registerMessageListener(listener);
//pushConsumer.setNamesrvAddr(null);
return pushConsumer;
} else {
pullConsumer = (DefaultMQPullConsumer) FastBeanUtils.copyProperties(config,
DefaultMQPullConsumer.class);
pullConsumer.setConsumerGroup(config.getGroupId());
pullConsumer.setMessageModel(MessageModel.CLUSTERING);
//pullConsumer.setRegisterTopics(Sets.newHashSet(config.getTopic()));
//pullConsumer.setNamesrvAddr(null);
return pullConsumer;
}
}
}
| apache-2.0 |
ButterflyNetwork/bazel | src/main/java/com/google/devtools/build/lib/rules/java/JavaImplicitAttributes.java | 1022 | // Copyright 2015 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.java;
/**
* Implicit attribute names that Java rules use, such as the JDK target name.
*/
public class JavaImplicitAttributes {
/**
* Label of the default target JDK.
*/
public static final String JDK_LABEL = "//tools/jdk:jdk";
/**
* Label of the default host JDK.
*/
public static final String HOST_JDK_LABEL = "//tools/jdk:host_jdk";
}
| apache-2.0 |
GenericBreakGlass/GenericBreakGlass-XACML | src/com.sun.xacml/src/main/java/com/sun/xacml/cond/GeneralSetFunction.java | 9691 |
/*
* @(#)GeneralSetFunction.java
*
* Copyright 2004-2006 Sun Microsystems, Inc. All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistribution of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistribution in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* Neither the name of Sun Microsystems, Inc. or the names of contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* This software is provided "AS IS," without a warranty of any kind. ALL
* EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND WARRANTIES, INCLUDING
* ANY IMPLIED WARRANTY OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE
* OR NON-INFRINGEMENT, ARE HEREBY EXCLUDED. SUN MICROSYSTEMS, INC. ("SUN")
* AND ITS LICENSORS SHALL NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE
* AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS
* DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR ANY LOST
* REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL, CONSEQUENTIAL,
* INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND REGARDLESS OF THE THEORY
* OF LIABILITY, ARISING OUT OF THE USE OF OR INABILITY TO USE THIS SOFTWARE,
* EVEN IF SUN HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
*
* You acknowledge that this software is not designed or intended for use in
* the design, construction, operation or maintenance of any nuclear facility.
*/
package com.sun.xacml.cond;
import com.sun.xacml.EvaluationCtx;
import com.sun.xacml.attr.AttributeValue;
import com.sun.xacml.attr.BagAttribute;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
/**
* Specific <code>SetFunction</code> class that supports all of the
* general-purpose set functions: type-intersection and type-union.
*
* @since 1.2
* @author Seth Proctor
*/
public class GeneralSetFunction extends SetFunction
{
// private identifiers for the supported functions
private static final int ID_BASE_INTERSECTION = 0;
private static final int ID_BASE_UNION = 1;
// mapping of function name to its associated id and parameter type
private static HashMap<String, Integer> idMap;
private static HashMap<String, String> typeMap;
/**
* Static initializer that sets up the parameter info for all the
* supported functions.
*/
static {
idMap = new HashMap<String, Integer>();
typeMap = new HashMap<String, String>();
idMap.put(NAME_BASE_INTERSECTION, Integer.valueOf(ID_BASE_INTERSECTION));
idMap.put(NAME_BASE_UNION, Integer.valueOf(ID_BASE_UNION));
for (int i = 0; i < baseTypes.length; i++) {
String baseName = FUNCTION_NS + simpleTypes[i];
String baseType = baseTypes[i];
idMap.put(baseName + NAME_BASE_INTERSECTION,
Integer.valueOf(ID_BASE_INTERSECTION));
idMap.put(baseName + NAME_BASE_UNION,
Integer.valueOf(ID_BASE_UNION));
typeMap.put(baseName + NAME_BASE_INTERSECTION, baseType);
typeMap.put(baseName + NAME_BASE_UNION, baseType);
}
for (int i = 0; i < baseTypes2.length; i++) {
String baseName = FUNCTION_NS_2 + simpleTypes2[i];
String baseType = baseTypes2[i];
idMap.put(baseName + NAME_BASE_INTERSECTION,
Integer.valueOf(ID_BASE_INTERSECTION));
idMap.put(baseName + NAME_BASE_UNION,
Integer.valueOf(ID_BASE_UNION));
typeMap.put(baseName + NAME_BASE_INTERSECTION, baseType);
typeMap.put(baseName + NAME_BASE_UNION, baseType);
}
}
/**
* Constructor that is used to create one of the general-purpose standard
* set functions. The name supplied must be one of the standard XACML
* functions supported by this class, including the full namespace,
* otherwise an exception is thrown. Look in <code>SetFunction</code>
* for details about the supported names.
*
* @param functionName the name of the function to create
*
* @throws IllegalArgumentException if the function is unknown
*/
public GeneralSetFunction(String functionName) {
super(functionName, getId(functionName), getArgumentType(functionName),
getArgumentType(functionName), true);
}
/**
* Constructor that is used to create instances of general-purpose set
* functions for new (non-standard) datatypes. This is equivalent to
* using the <code>getInstance</code> methods in <code>SetFunction</code>
* and is generally only used by the run-time configuration code.
*
* @param functionName the name of the new function
* @param datatype the full identifier for the supported datatype
* @param functionType which kind of Set function, based on the
* <code>NAME_BASE_*</code> fields
*/
public GeneralSetFunction(String functionName, String datatype,
String functionType) {
super(functionName, getId(functionType), datatype, datatype, true);
}
/**
* Private helper that returns the internal identifier used for the
* given standard function.
*/
private static int getId(String functionName) {
Integer id = (Integer)(idMap.get(functionName));
if (id == null) {
throw new IllegalArgumentException("unknown set function " +
functionName);
}
return id.intValue();
}
/**
* Private helper that returns the argument type for the given standard
* function. Note that this doesn't check on the return value since the
* method always is called after getId, so we assume that the function
* is present.
*/
private static String getArgumentType(String functionName) {
return (String)(typeMap.get(functionName));
}
/**
* Returns a <code>Set</code> containing all the function identifiers
* supported by this class.
*
* @return a <code>Set</code> of <code>String</code>s
*/
public static Set<String> getSupportedIdentifiers() {
return Collections.unmodifiableSet(idMap.keySet());
}
/**
* Evaluates the function, using the specified parameters.
*
* @param inputs a <code>List</code> of <code>Evaluatable</code>
* objects representing the arguments passed to the function
* @param context an <code>EvaluationCtx</code> so that the
* <code>Evaluatable</code> objects can be evaluated
* @return an <code>EvaluationResult</code> representing the
* function's result
*/
public EvaluationResult evaluate(List<Expression> inputs, EvaluationCtx context) {
// Evaluate the arguments
AttributeValue [] argValues = new AttributeValue[inputs.size()];
EvaluationResult evalResult = evalArgs(inputs, context, argValues);
if (evalResult != null) {
return evalResult;
}
// setup the two bags we'll be using
BagAttribute [] bags = new BagAttribute[2];
bags[0] = (BagAttribute)(argValues[0]);
bags[1] = (BagAttribute)(argValues[1]);
AttributeValue result = null;
Set<AttributeValue> set = new HashSet<AttributeValue>();
if (getFunctionId() == ID_BASE_INTERSECTION) {
// *-intersection takes two bags of the same type and returns
// a bag of that type
// create a bag with the common elements of both inputs, removing
// all duplicate values
Iterator<AttributeValue> it = bags[0].iterator();
// find all the things in bags[0] that are also in bags[1]
while (it.hasNext()) {
AttributeValue value = it.next();
if (bags[1].contains(value)) {
// sets won't allow duplicates, so this addition is ok
set.add(value);
}
}
result = new BagAttribute(bags[0].getType(), set);
} else if (getFunctionId() == ID_BASE_UNION) {
// *-union takes two bags of the same type and returns a bag of
// that type
// create a bag with all the elements from both inputs, removing
// all duplicate values
Iterator<AttributeValue> it0 = bags[0].iterator();
while (it0.hasNext()) {
// first off, add all elements from the first bag...the set
// will ignore all duplicates
set.add(it0.next());
}
Iterator<AttributeValue> it1 = bags[1].iterator();
while (it1.hasNext()) {
// now add all the elements from the second bag...again, all
// duplicates will be ignored by the set
set.add(it1.next());
}
result = new BagAttribute(bags[0].getType(), set);
}
return new EvaluationResult(result);
}
}
| apache-2.0 |
gdgvietnam/android_app | app/src/main/java/org/gdg/frisbee/android/view/MyView.java | 11096 | /*
* Copyright 2013 The GDG Frisbee Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gdg.frisbee.android.view;
import android.content.Context;
import android.database.DataSetObserver;
import android.os.Build;
import android.util.AttributeSet;
import android.util.Log;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.widget.*;
import org.gdg.frisbee.android.adapter.NewsAdapter;
import java.util.ArrayList;
import java.util.Map;
/**
* GDG Aachen
* org.gdg.frisbee.android.view
* <p/>
* User: maui
* Date: 29.04.13
* Time: 16:56
*/
public class MyView extends AbsListView {
private final static String LOG_TAG = "GDG-MyView";
private ListAdapter mAdapter;
private LinearLayout mRoot;
private ArrayList<WrapAdapter> mAdapters;
private ArrayList<ListView> mColumns;
private int mColumnCount = 1;
public MyView(Context context) {
super(context);
mColumns = new ArrayList<ListView>();
mAdapters = new ArrayList<WrapAdapter>();
mColumns = new ArrayList<ListView>();
mAdapters = new ArrayList<WrapAdapter>();
}
public MyView(Context context, AttributeSet attrs) {
super(context, attrs);
int columns = attrs.getAttributeIntValue("http://schemas.android.com/apk/res-auto","numColumns",1);
mColumns = new ArrayList<ListView>(columns);
mAdapters = new ArrayList<WrapAdapter>(columns);
mColumnCount = columns;
}
public MyView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
int columns = attrs.getAttributeIntValue("http://schemas.android.com/apk/res-auto","numColumns",1);
mColumns = new ArrayList<ListView>(columns);
mAdapters = new ArrayList<WrapAdapter>(columns);
mColumnCount = columns;
}
private void initLayout() {
mRoot = new LinearLayout(getContext());
mRoot.setOrientation(LinearLayout.HORIZONTAL);
// Defining the LinearLayout layout parameters to fill the parent.
LinearLayout.LayoutParams llp = new LinearLayout.LayoutParams(
LinearLayout.LayoutParams.MATCH_PARENT,
LinearLayout.LayoutParams.MATCH_PARENT);
mRoot.setLayoutParams(llp);
for(int i = 0; i < mColumnCount; i++) {
ListView list = new InnerListView(getContext());
list.setDivider(null);
list.setDividerHeight(0);
list.setVerticalScrollBarEnabled(false);
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
list.setScrollBarSize(0);
//list.setScrollIndicators(null, null);
}
mColumns.add(list);
LinearLayout.LayoutParams l = new LinearLayout.LayoutParams(
LinearLayout.LayoutParams.MATCH_PARENT,
LinearLayout.LayoutParams.MATCH_PARENT);
l.weight = 1;
mRoot.addView(list, i, l);
list.setAdapter(mAdapters.get(i));
}
addViewInLayout(mRoot, -1, llp, false);
mRoot.measure(MeasureSpec.makeMeasureSpec(getWidth(), MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(getHeight(), MeasureSpec.EXACTLY));
mRoot.layout(0,0,getWidth(),getHeight());
Log.d(LOG_TAG, "layoutInit()");
}
@Override
public void setAdapter(ListAdapter adapter) {
mAdapter = adapter;
for(int i = 0; i < mColumnCount; i++) {
mAdapters.add(new WrapAdapter(mAdapter));
}
mAdapter.registerDataSetObserver(new DataSetObserver() {
@Override
public void onInvalidated() {
super.onInvalidated();
Log.d(LOG_TAG, "onInvalidated()");
}
@Override
public void onChanged() {
super.onChanged();
applyAdapterUpdate();
}
});
Log.d(LOG_TAG, "setAdapter()");
}
/*
@Override
public int getChildCount() {
if(mRoot == null)
return 0;
int children = 0;
return mColumns.size();
}
@Override
public View getChildAt(int index) {
View v = null;
ViewGroup vg = null;
if(mRoot == null) {
return null;
} else {
vg = (ViewGroup) mRoot.getChildAt(0);
v = vg.getChildAt(index);
if(v == null) {
vg = (ViewGroup) mRoot.getChildAt(1);
v = vg.getChildAt(index);
}
}
if(v == null) {
Log.d(LOG_TAG, "Not so good..."+index);
}
return v;
}
*/
private void applyAdapterUpdate() {
for(WrapAdapter a : mAdapters) {
a.clear();
}
for(int i = 0; i <mAdapter.getCount(); i++) {
View v = mAdapter.getView(i, null, null);
if(mAdapter instanceof NewsAdapter) {
((NewsAdapter)mAdapter).getItemInternal(i).setConsumed(false);
}
v.measure(MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED), MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED));
getShortestAdapter().add(i, v.getMeasuredHeight());
}
}
private WrapAdapter getShortestAdapter() {
WrapAdapter a = null;
int minHeight = Integer.MAX_VALUE;
for(WrapAdapter adapter : mAdapters) {
if(adapter.getHeight() < minHeight) {
a = adapter;
minHeight = adapter.getHeight();
}
}
return a;
}
@Override
public int getFirstVisiblePosition() {
int val = 0;
for(int i = 0; i < mColumnCount; i++) {
ListView lv = (ListView) mRoot.getChildAt(i);
if(lv.getFirstVisiblePosition() > val)
val = lv.getFirstVisiblePosition();
}
Log.d(LOG_TAG, "Visible: "+ val);
return val;
}
@Override
public ListAdapter getAdapter() {
return mAdapter;
}
@Override
public void setSelection(int i) {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
int widthMode = MeasureSpec.getMode(widthMeasureSpec);
int heightMode = MeasureSpec.getMode(heightMeasureSpec);
int widthSize = MeasureSpec.getSize(widthMeasureSpec);
int heightSize = MeasureSpec.getSize(heightMeasureSpec);
if (widthMode != MeasureSpec.EXACTLY) {
widthMode = MeasureSpec.EXACTLY;
}
if (heightMode != MeasureSpec.EXACTLY) {
heightMode = MeasureSpec.EXACTLY;
}
setMeasuredDimension(widthSize, heightSize);
}
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
super.onLayout(changed, l, t, r, b);
initLayout();
}
@Override
public void requestLayout() {
if(mRoot != null) {
//mRoot.requestLayout();
}
}
@Override
public boolean onInterceptTouchEvent(MotionEvent ev) {
//return super.onInterceptTouchEvent(ev);
return true;
}
@Override
public int getCount() {
return mAdapter.getCount();
}
public class WrapAdapter extends BaseAdapter {
private ListAdapter mBaseAdapter;
private ArrayList<Integer> mItems;
private int mHeight = 0;
public WrapAdapter(ListAdapter adapter) {
mBaseAdapter = adapter;
mItems = new ArrayList<Integer>();
}
public int getHeight() {
return mHeight;
}
public void add(int baseItem, int height) {
mHeight += height;
mItems.add(baseItem);
notifyDataSetChanged();
}
public void addAll(Map<Integer,Integer> items) {
for(Map.Entry<Integer,Integer> item : items.entrySet()) {
mHeight += item.getValue();
mItems.add(item.getKey());
}
notifyDataSetChanged();
}
public void clear() {
mItems.clear();
mHeight = 0;
notifyDataSetChanged();
}
@Override
public int getCount() {
return mItems.size();
}
@Override
public Object getItem(int i) {
return mBaseAdapter.getItem(mItems.get(i));
}
@Override
public long getItemId(int i) {
return mBaseAdapter.getItemId(mItems.get(i));
}
@Override
public int getViewTypeCount() {
return mBaseAdapter.getViewTypeCount();
}
@Override
public int getItemViewType(int position) {
return mBaseAdapter.getItemViewType(mItems.get(position));
}
@Override
public View getView(int i, View view, ViewGroup viewGroup) {
View v = mBaseAdapter.getView(mItems.get(i), view, viewGroup);
return v;
}
}
@Override
public boolean onTouchEvent(MotionEvent ev) {
/*Rect hit = new Rect();
for(ListView v : mColumns) {
v.getDrawingRect(hit);
if(hit.contains((int) ev.getX(), (int) ev.getY()))
return v.onTouchEvent(ev);
} */
for(ListView v : mColumns) {
v.dispatchTouchEvent(ev);
}
return true;
}
public class InnerListView extends ListView {
public InnerListView(Context context) {
super(context);
}
@Override
protected boolean overScrollBy(int deltaX, int deltaY, int scrollX, int scrollY, int scrollRangeX, int scrollRangeY, int maxOverScrollX, int maxOverScrollY, boolean isTouchEvent)
{
//This is where the magic happens, we have replaced the incoming maxOverScrollY with our own custom variable mMaxYOverscrollDistance;
return super.overScrollBy(deltaX, deltaY, scrollX, scrollY, scrollRangeX, scrollRangeY, maxOverScrollX, maxOverScrollY, isTouchEvent);
}
}
}
| apache-2.0 |
DimitrisAndreou/flexigraph | test/gr/forth/ics/graph/event/GraphEventTest.java | 1642 | package gr.forth.ics.graph.event;
import gr.forth.ics.graph.Graph;
import gr.forth.ics.graph.PrimaryGraph;
import junit.framework.*;
import static gr.forth.ics.util.Sample.*;
public class GraphEventTest extends TestCase {
public GraphEventTest(String testName) {
super(testName);
}
protected void setUp() throws Exception {
reinitData();
}
public static Test suite() {
TestSuite suite = new TestSuite(GraphEventTest.class);
return suite;
}
public void testGetData() {
try {
GraphEvent ge = new GraphEvent(graph, GraphEvent.Type.NODE_ADDED, node);
} catch (Exception e) {
fail(e.getMessage());
}
callWrong(GraphEvent.Type.NODE_ADDED, edge);
callWrong(GraphEvent.Type.EDGE_REMOVED, node);
callWrong(GraphEvent.Type.NODE_REORDERED, edge);
callWrong(GraphEvent.Type.EDGE_REORDERED, node);
}
private void callWrong(GraphEvent.Type type, Object data) {
try {
new GraphEvent(graph, type, data);
fail("Did not throw class cast exception on input: " + type + ", " + data);
} catch (ClassCastException ok) { }
}
public void testRemoveHotFiredListener() {
final Graph g = new PrimaryGraph();
g.addGraphListener(new EmptyGraphListener() {
public void nodeAdded(GraphEvent e) {
g.removeGraphListener(this);
}
});
//should NOT throw ConcurrentModificationException
g.newNode();
}
}
| apache-2.0 |
theoriginalbit/MoarPeripherals | src/main/java/com/moarperipherals/integration/mount/MountMoarP.java | 2277 | /**
* Copyright 2014-2015 Joshua Asbury (@theoriginalbit)
* <p/>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.moarperipherals.integration.mount;
import com.theoriginalbit.framework.peripheral.interfaces.IPFMount;
import com.moarperipherals.ModInfo;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
/**
* @author theoriginalbit
* @since 13/11/14
*/
public class MountMoarP implements IPFMount {
private static final File BASE_DIR = new File(ModInfo.EXTRACTED_LUA_PATH);
private static final String LUA_NAME = "moarp";
@Override
public String getMountLocation() {
return LUA_NAME;
}
/**
* {@inheritDoc}
*/
@Override
public boolean exists(String path) throws IOException {
return new File(BASE_DIR, path).exists();
}
/**
* {@inheritDoc}
*/
@Override
public boolean isDirectory(String path) throws IOException {
return new File(BASE_DIR, path).isDirectory();
}
/**
* {@inheritDoc}
*/
@Override
public void list(String path, List<String> contents) throws IOException {
final File[] files = new File(BASE_DIR, path).listFiles();
if (files != null) {
for (final File f : files) {
contents.add(f.getName());
}
}
}
/**
* {@inheritDoc}
*/
@Override
public long getSize(String path) throws IOException {
return new File(BASE_DIR, path).length();
}
/**
* {@inheritDoc}
*/
@Override
public InputStream openForRead(String path) throws IOException {
return new FileInputStream(new File(BASE_DIR, path));
}
}
| apache-2.0 |
jblankendaal/effektif | effektif-workflow-impl/src/main/java/com/effektif/workflow/impl/bpmn/BpmnFieldName.java | 1070 | /* Copyright (c) 2014, Effektif GmbH.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. */
package com.effektif.workflow.impl.bpmn;
/**
* @author Tom Baeyens
*/
public class BpmnFieldName {
protected String name;
protected String namespaceUri;
public BpmnFieldName(String name, String namespaceUri) {
this.name = name;
if (Bpmn.BPMN_URI.equals(namespaceUri)) {
this.namespaceUri = Bpmn.BPMN_URI;
} else {
this.namespaceUri = namespaceUri;
}
}
public boolean isBpmn() {
return Bpmn.BPMN_URI==namespaceUri;
}
}
| apache-2.0 |
runepeter/maven-deploy-plugin-2.8.1 | maven-core/src/main/java/org/apache/maven/plugin/version/PluginVersionRequest.java | 3596 | package org.apache.maven.plugin.version;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.List;
import org.apache.maven.model.Model;
import org.eclipse.aether.RepositorySystemSession;
import org.eclipse.aether.repository.RemoteRepository;
/**
* Collects settings required to resolve the version for a plugin.
*
* @since 3.0
* @author Benjamin Bentmann
*/
public interface PluginVersionRequest
{
/**
* Gets the group id of the plugin.
*
* @return The group id of the plugin.
*/
String getGroupId();
/**
* Sets the group id of the plugin.
*
* @param groupId The group id of the plugin.
* @return This request, never {@code null}.
*/
PluginVersionRequest setGroupId( String groupId );
/**
* Gets the artifact id of the plugin.
*
* @return The artifact id of the plugin.
*/
String getArtifactId();
/**
* Sets the artifact id of the plugin.
*
* @param artifactId The artifact id of the plugin.
* @return This request, never {@code null}.
*/
PluginVersionRequest setArtifactId( String artifactId );
/**
* Gets the POM whose build plugins are to be scanned for the version.
*
* @return The POM whose build plugins are to be scanned for the verion or {@code null} to only search the plugin
* repositories.
*/
Model getPom();
/**
* Sets the POM whose build plugins are to be scanned for the version.
*
* @param pom The POM whose build plugins are to be scanned for the version, may be {@code null} to only search the
* plugin repositories.
* @return This request, never {@code null}.
*/
PluginVersionRequest setPom( Model pom );
/**
* Gets the remote repositories to use.
*
* @return The remote repositories to use, never {@code null}.
*/
List<RemoteRepository> getRepositories();
/**
* Sets the remote repositories to use. <em>Note:</em> When creating a request from a project, be sure to use the
* plugin repositories and not the regular project repositories.
*
* @param repositories The remote repositories to use.
* @return This request, never {@code null}.
*/
PluginVersionRequest setRepositories( List<RemoteRepository> repositories );
/**
* Gets the session to use for repository access.
*
* @return The repository session or {@code null} if not set.
*/
RepositorySystemSession getRepositorySession();
/**
* Sets the session to use for repository access.
*
* @param repositorySession The repository session to use.
* @return This request, never {@code null}.
*/
PluginVersionRequest setRepositorySession( RepositorySystemSession repositorySession );
}
| apache-2.0 |
etirelli/drools-wb | drools-wb-services/drools-wb-verifier/drools-wb-verifier-core/src/main/java/org/drools/workbench/services/verifier/core/checks/base/CheckRunManager.java | 2649 | /*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.workbench.services.verifier.core.checks.base;
import java.util.HashSet;
import java.util.Set;
import com.google.gwt.user.client.Command;
import org.drools.workbench.services.verifier.api.client.StatusUpdate;
import org.drools.workbench.services.verifier.api.client.configuration.RunnerType;
import org.drools.workbench.services.verifier.core.cache.inspectors.RuleInspector;
public class CheckRunManager{
protected final Set<Check> rechecks = new HashSet<>();
private final CheckRunner checkRunner;
public CheckRunManager( final RunnerType runnerType ) {
checkRunner = CheckRunnerFactory.make( runnerType );
}
/**
* Run analysis with feedback
* @param onStatus Command executed repeatedly receiving status update
* @param onCompletion Command executed on completion
*/
public void run( final StatusUpdate onStatus,
final Command onCompletion ) {
//Ensure active analysis is cancelled
cancelExistingAnalysis();
//If there are no checks to run simply return
if ( rechecks.isEmpty() ) {
if ( onCompletion != null ) {
onCompletion.execute();
return;
}
}
checkRunner.run( rechecks,
onStatus,
onCompletion );
rechecks.clear();
}
public void addChecks( final Set<Check> checks ) {
//Ensure active analysis is cancelled
cancelExistingAnalysis();
//Add new checks
rechecks.addAll( checks );
}
public boolean isEmpty() {
return rechecks.isEmpty();
}
public void remove( final RuleInspector removedRuleInspector ) {
//Ensure active analysis is cancelled
cancelExistingAnalysis();
final Set<Check> checks = removedRuleInspector.clearChecks();
rechecks.removeAll( checks );
}
public void cancelExistingAnalysis() {
checkRunner.cancelExistingAnalysis();
}
}
| apache-2.0 |
vjuranek/radargun | core/src/main/java/org/radargun/SlaveBase.java | 7265 | package org.radargun;
import java.io.IOException;
import java.util.Map;
import org.radargun.config.Cluster;
import org.radargun.config.Configuration;
import org.radargun.config.InitHelper;
import org.radargun.config.PropertyHelper;
import org.radargun.config.Scenario;
import org.radargun.logging.Log;
import org.radargun.logging.LogFactory;
import org.radargun.reporting.Timeline;
import org.radargun.stages.ScenarioCleanupStage;
import org.radargun.state.SlaveState;
import org.radargun.traits.TraitHelper;
import org.radargun.utils.TimeService;
/**
* Base class for both standalone slave and slave integrated in master node (local cluster).
*
* @author Radim Vansa <rvansa@redhat.com>
*/
public abstract class SlaveBase {
protected final Log log = LogFactory.getLog(getClass());
protected SlaveState state = new SlaveState();
protected Configuration configuration;
protected Cluster cluster;
protected Scenario scenario;
protected void scenarioLoop() throws IOException {
Cluster.Group group = cluster.getGroup(state.getSlaveIndex());
Configuration.Setup setup = configuration.getSetup(group.name);
state.setCluster(cluster);
state.setPlugin(setup.plugin);
state.setService(setup.service);
state.setTimeline(new Timeline(state.getSlaveIndex()));
Map<String, String> extras = getCurrentExtras(configuration, cluster);
ServiceHelper.setServiceContext(setup.plugin, configuration.name, state.getSlaveIndex());
Object service = ServiceHelper.createService(setup.plugin, setup.service, setup.getProperties(), extras);
Map<Class<?>, Object> traits = null;
try {
log.info("Service is " + service.getClass().getSimpleName() + PropertyHelper.toString(service));
traits = TraitHelper.retrieve(service);
state.setTraits(traits);
for (;;) {
int stageId = getNextStageId();
Map<String, Object> masterData = getNextMasterData();
for (Map.Entry<String, Object> entry : masterData.entrySet()) {
state.put(entry.getKey(), entry.getValue());
}
log.trace("Received stage ID " + stageId);
DistStage stage = (DistStage) scenario.getStage(stageId, state, extras, null);
if (stage instanceof ScenarioCleanupStage) {
// this is always the last stage and is ran in main thread (not sc-main)
break;
}
TraitHelper.InjectResult result = null;
DistStageAck response;
Exception initException = null;
try {
result = TraitHelper.inject(stage, traits);
InitHelper.init(stage);
stage.initOnSlave(state);
} catch (Exception e) {
log.error("Stage '" + stage.getName() + "' initialization has failed", e);
initException = e;
}
if (initException != null) {
response = new DistStageAck(state).error("Stage '" + stage.getName() + "' initialization has failed",
initException);
} else if (!stage.shouldExecute()) {
log.info("Stage '" + stage.getName() + "' should not be executed");
response = new DistStageAck(state);
} else if (result == TraitHelper.InjectResult.SKIP) {
log.info("Stage '" + stage.getName() + "' was skipped because it was missing some traits");
response = new DistStageAck(state);
} else if (result == TraitHelper.InjectResult.FAILURE) {
String message = "The stage '" + stage.getName()
+ "' was not executed because it missed some mandatory traits.";
log.error(message);
response = new DistStageAck(state).error(message, null);
} else {
String stageName = stage.getName();
log.info("Starting stage " + (log.isDebugEnabled() ? stage.toString() : stageName));
long start = TimeService.currentTimeMillis();
long end;
try {
response = stage.executeOnSlave();
end = TimeService.currentTimeMillis();
if (response == null) {
response = new DistStageAck(state).error("Stage returned null response", null);
}
log.info("Finished stage " + stageName);
response.setDuration(end - start);
} catch (Exception e) {
end = TimeService.currentTimeMillis();
log.error("Stage execution has failed", e);
response = new DistStageAck(state).error("Stage execution has failed", e);
} finally {
InitHelper.destroy(stage);
}
state.getTimeline().addEvent(Stage.STAGE, new Timeline.IntervalEvent(start, stageName, end - start));
}
sendResponse(response);
}
} finally {
if (traits != null) {
for (Object trait : traits.values()) {
InitHelper.destroy(trait);
}
}
InitHelper.destroy(service);
}
}
protected abstract int getNextStageId() throws IOException;
protected abstract Map<String, Object> getNextMasterData() throws IOException;
protected abstract void sendResponse(DistStageAck response) throws IOException;
protected void runCleanup() throws IOException {
DistStageAck response = null;
try {
Map<String, String> extras = getCurrentExtras(configuration, cluster);
ScenarioCleanupStage stage = (ScenarioCleanupStage) scenario.getStage(scenario.getStageCount() - 1, state,
extras, null);
InitHelper.init(stage);
stage.initOnSlave(state);
log.info("Starting stage " + (log.isDebugEnabled() ? stage.toString() : stage.getName()));
response = stage.executeOnSlave();
} catch (Exception e) {
log.error("Stage execution has failed", e);
response = new DistStageAck(state).error("Stage execution has failed", e);
} finally {
if (response == null) {
response = new DistStageAck(state).error("Stage returned null response", null);
}
sendResponse(response);
}
}
protected abstract Map<String, String> getCurrentExtras(Configuration configuration, Cluster cluster);
// In RadarGun 2.0, we had to run each service in new thread in order to prevent
// classloader leaking through thread locals. This is not necessary anymore,
// but we still do checks in ScenarioCleanupStage
protected class ScenarioRunner extends Thread {
protected ScenarioRunner() {
super("sc-main");
}
@Override
public void run() {
try {
scenarioLoop();
} catch (IOException e) {
log.error("Communication with master failed", e);
e.printStackTrace();
ShutDownHook.exit(127);
} catch (Throwable t) {
log.error("Unexpected error in scenario", t);
t.printStackTrace();
ShutDownHook.exit(127);
}
}
}
}
| apache-2.0 |
jkacer/pac4j | pac4j-core/src/test/java/org/pac4j/core/authorization/checker/DefaultAuthorizationCheckerTests.java | 13408 | package org.pac4j.core.authorization.checker;
import org.junit.Before;
import org.junit.Test;
import org.pac4j.core.authorization.authorizer.Authorizer;
import org.pac4j.core.authorization.authorizer.RequireAnyRoleAuthorizer;
import org.pac4j.core.authorization.authorizer.csrf.DefaultCsrfTokenGenerator;
import org.pac4j.core.context.*;
import org.pac4j.core.exception.TechnicalException;
import org.pac4j.core.profile.AnonymousProfile;
import org.pac4j.core.profile.CommonProfile;
import org.pac4j.core.util.TestsConstants;
import java.util.*;
import java.util.stream.Collectors;
import static org.junit.Assert.*;
import static org.pac4j.core.context.HttpConstants.*;
/**
* Tests the {@link DefaultAuthorizationChecker}.
*
* @author Jerome Leleu
* @since 1.8.0
*/
public final class DefaultAuthorizationCheckerTests implements TestsConstants {
private final DefaultAuthorizationChecker checker = new DefaultAuthorizationChecker();
private List<CommonProfile> profiles;
private CommonProfile profile;
@Before
public void setUp() {
profile = new CommonProfile();
profiles = new ArrayList<>();
profiles.add(profile);
}
private static class IdAuthorizer implements Authorizer<CommonProfile> {
public boolean isAuthorized(final WebContext context, final List<CommonProfile> profiles) {
return VALUE.equals(profiles.get(0).getId());
}
}
@Test
public void testBlankAuthorizerNameAProfile() {
assertTrue(checker.isAuthorized(null, profiles, null, null));
}
@Test
public void testOneExistingAuthorizerProfileMatch() {
profile.setId(VALUE);
final Map<String, Authorizer> authorizers = new HashMap<>();
authorizers.put(NAME, new IdAuthorizer());
assertTrue(checker.isAuthorized(null, profiles, NAME, authorizers));
}
@Test
public void testOneExistingAuthorizerProfileDoesNotMatch() {
internalTestOneExistingAuthorizerProfileDoesNotMatch(NAME);
}
@Test
public void testOneExistingAuthorizerProfileDoesNotMatchCasTrim() {
internalTestOneExistingAuthorizerProfileDoesNotMatch(" NaME ");
}
private void internalTestOneExistingAuthorizerProfileDoesNotMatch(final String name) {
final Map<String, Authorizer> authorizers = new HashMap<>();
authorizers.put(NAME, new IdAuthorizer());
assertFalse(checker.isAuthorized(null, profiles, name, authorizers));
}
@Test(expected = TechnicalException.class)
public void testOneAuthorizerDoesNotExist() {
final Map<String, Authorizer> authorizers = new HashMap<>();
authorizers.put(NAME, new IdAuthorizer());
checker.isAuthorized(null, profiles, VALUE, authorizers);
}
@Test
public void testTwoExistingAuthorizerProfileMatch() {
profile.setId(VALUE);
profile.addRole(ROLE);
final Map<String, Authorizer> authorizers = new HashMap<>();
authorizers.put(NAME, new IdAuthorizer());
authorizers.put(VALUE, new RequireAnyRoleAuthorizer(ROLE));
assertTrue(checker.isAuthorized(null, profiles, NAME + Pac4jConstants.ELEMENT_SEPRATOR + VALUE, authorizers));
}
@Test
public void testTwoExistingAuthorizerProfileDoesNotMatch() {
profile.addRole(ROLE);
final Map<String, Authorizer> authorizers = new HashMap<>();
authorizers.put(NAME, new IdAuthorizer());
authorizers.put(VALUE, new RequireAnyRoleAuthorizer(ROLE));
assertFalse(checker.isAuthorized(null, profiles, NAME + Pac4jConstants.ELEMENT_SEPRATOR + VALUE, authorizers));
}
@Test(expected = TechnicalException.class)
public void testTwoAuthorizerOneDoesNotExist() {
final Map<String, Authorizer> authorizers = new HashMap<>();
authorizers.put(NAME, new IdAuthorizer());
checker.isAuthorized(null, profiles, NAME + Pac4jConstants.ELEMENT_SEPRATOR + VALUE, authorizers);
}
@Test(expected = TechnicalException.class)
public void testNullAuthorizers() {
assertTrue(checker.isAuthorized(null, profiles, null));
checker.isAuthorized(null, profiles, "auth1", null);
}
@Test
public void testZeroAuthorizers() {
assertTrue(checker.isAuthorized(null, profiles, new ArrayList<>()));
assertTrue(checker.isAuthorized(null, profiles, "", new HashMap<>()));
}
@Test
public void testOneExistingAuthorizerProfileMatch2() {
profile.setId(VALUE);
final List<Authorizer> authorizers = new ArrayList<>();
authorizers.add(new IdAuthorizer());
assertTrue(checker.isAuthorized(null, profiles, authorizers));
}
@Test
public void testOneExistingAuthorizerProfileDoesNotMatch2() {
final List<Authorizer> authorizers = new ArrayList<>();
authorizers.add(new IdAuthorizer());
assertFalse(checker.isAuthorized(null, profiles, authorizers));
}
@Test
public void testTwoExistingAuthorizerProfileMatch2() {
profile.setId(VALUE);
profile.addRole(ROLE);
final List<Authorizer> authorizers = new ArrayList<>();
authorizers.add(new IdAuthorizer());
authorizers.add(new RequireAnyRoleAuthorizer(ROLE));
assertTrue(checker.isAuthorized(null, profiles, authorizers));
}
@Test
public void testTwoExistingAuthorizerProfileDoesNotMatch2() {
profile.addRole(ROLE);
final List<Authorizer> authorizers = new ArrayList<>();
authorizers.add(new IdAuthorizer());
authorizers.add(new RequireAnyRoleAuthorizer(ROLE));
assertFalse(checker.isAuthorized(null, profiles, authorizers));
}
@Test(expected = TechnicalException.class)
public void testNullProfile() {
checker.isAuthorized(null, null, new ArrayList<>());
}
@Test
public void testHsts() {
final MockWebContext context = MockWebContext.create();
context.setScheme(SCHEME_HTTPS);
checker.isAuthorized(context, profiles, DefaultAuthorizers.HSTS, null);
assertNotNull(context.getResponseHeaders().get("Strict-Transport-Security"));
}
@Test
public void testHstsCaseTrim() {
final MockWebContext context = MockWebContext.create();
context.setScheme(SCHEME_HTTPS);
checker.isAuthorized(context, profiles, " HSTS ", null);
assertNotNull(context.getResponseHeaders().get("Strict-Transport-Security"));
}
@Test
public void testNosniff() {
final MockWebContext context = MockWebContext.create();
checker.isAuthorized(context, profiles, DefaultAuthorizers.NOSNIFF, null);
assertNotNull(context.getResponseHeaders().get("X-Content-Type-Options"));
}
@Test
public void testNoframe() {
final MockWebContext context = MockWebContext.create();
checker.isAuthorized(context, profiles, DefaultAuthorizers.NOFRAME, null);
assertNotNull(context.getResponseHeaders().get("X-Frame-Options"));
}
@Test
public void testXssprotection() {
final MockWebContext context = MockWebContext.create();
checker.isAuthorized(context, profiles, DefaultAuthorizers.XSSPROTECTION, null);
assertNotNull(context.getResponseHeaders().get("X-XSS-Protection"));
}
@Test
public void testNocache() {
final MockWebContext context = MockWebContext.create();
checker.isAuthorized(context, profiles, DefaultAuthorizers.NOCACHE, null);
assertNotNull(context.getResponseHeaders().get("Cache-Control"));
assertNotNull(context.getResponseHeaders().get("Pragma"));
assertNotNull(context.getResponseHeaders().get("Expires"));
}
@Test
public void testAllowAjaxRequests() {
final MockWebContext context = MockWebContext.create();
checker.isAuthorized(context, profiles, DefaultAuthorizers.ALLOW_AJAX_REQUESTS, null);
assertEquals("*", context.getResponseHeaders().get(ACCESS_CONTROL_ALLOW_ORIGIN_HEADER));
assertEquals("true", context.getResponseHeaders().get(ACCESS_CONTROL_ALLOW_CREDENTIALS_HEADER));
final String methods = context.getResponseHeaders().get(ACCESS_CONTROL_ALLOW_METHODS_HEADER);
final List<String> methodArray = Arrays.asList(methods.split(",")).stream().map(String::trim).collect(Collectors.toList());
assertTrue(methodArray.contains(HTTP_METHOD.POST.name()));
assertTrue(methodArray.contains(HTTP_METHOD.PUT.name()));
assertTrue(methodArray.contains(HTTP_METHOD.DELETE.name()));
assertTrue(methodArray.contains(HTTP_METHOD.OPTIONS.name()));
assertTrue(methodArray.contains(HTTP_METHOD.GET.name()));
}
@Test
public void testSecurityHeaders() {
final MockWebContext context = MockWebContext.create();
context.setScheme(SCHEME_HTTPS);
checker.isAuthorized(context, profiles, DefaultAuthorizers.SECURITYHEADERS, null);
assertNotNull(context.getResponseHeaders().get("Strict-Transport-Security"));
assertNotNull(context.getResponseHeaders().get("X-Content-Type-Options"));
assertNotNull(context.getResponseHeaders().get("X-Content-Type-Options"));
assertNotNull(context.getResponseHeaders().get("X-XSS-Protection"));
assertNotNull(context.getResponseHeaders().get("Cache-Control"));
assertNotNull(context.getResponseHeaders().get("Pragma"));
assertNotNull(context.getResponseHeaders().get("Expires"));
}
@Test
public void testCsrf() {
final MockWebContext context = MockWebContext.create();
assertTrue(checker.isAuthorized(context, profiles, DefaultAuthorizers.CSRF, null));
assertNotNull(context.getRequestAttribute(Pac4jConstants.CSRF_TOKEN));
assertNotNull(ContextHelper.getCookie(context.getResponseCookies(), Pac4jConstants.CSRF_TOKEN));
}
@Test
public void testCsrfToken() {
final MockWebContext context = MockWebContext.create();
assertTrue(checker.isAuthorized(context, profiles, DefaultAuthorizers.CSRF_TOKEN, null));
assertNotNull(context.getRequestAttribute(Pac4jConstants.CSRF_TOKEN));
assertNotNull(ContextHelper.getCookie(context.getResponseCookies(), Pac4jConstants.CSRF_TOKEN));
}
@Test
public void testCsrfPost() {
final MockWebContext context = MockWebContext.create().setRequestMethod(HTTP_METHOD.POST.name());
assertFalse(checker.isAuthorized(context, profiles, DefaultAuthorizers.CSRF, null));
assertNotNull(context.getRequestAttribute(Pac4jConstants.CSRF_TOKEN));
assertNotNull(ContextHelper.getCookie(context.getResponseCookies(), Pac4jConstants.CSRF_TOKEN));
}
@Test
public void testCsrfTokenPost() {
final MockWebContext context = MockWebContext.create().setRequestMethod(HTTP_METHOD.POST.name());
assertTrue(checker.isAuthorized(context, profiles, DefaultAuthorizers.CSRF_TOKEN, null));
assertNotNull(context.getRequestAttribute(Pac4jConstants.CSRF_TOKEN));
assertNotNull(ContextHelper.getCookie(context.getResponseCookies(), Pac4jConstants.CSRF_TOKEN));
}
@Test
public void testCsrfPostTokenParameter() {
final MockWebContext context = MockWebContext.create().setRequestMethod(HTTP_METHOD.POST.name());
final DefaultCsrfTokenGenerator generator = new DefaultCsrfTokenGenerator();
final String token = generator.get(context);
context.addRequestParameter(Pac4jConstants.CSRF_TOKEN, token);
assertTrue(checker.isAuthorized(context, profiles, DefaultAuthorizers.CSRF, null));
assertNotNull(context.getRequestAttribute(Pac4jConstants.CSRF_TOKEN));
assertNotNull(ContextHelper.getCookie(context.getResponseCookies(), Pac4jConstants.CSRF_TOKEN));
}
@Test
public void testCsrfCheckPost() {
final MockWebContext context = MockWebContext.create().setRequestMethod(HTTP_METHOD.POST.name());
final DefaultCsrfTokenGenerator generator = new DefaultCsrfTokenGenerator();
generator.get(context);
assertFalse(checker.isAuthorized(context, profiles, DefaultAuthorizers.CSRF_CHECK, null));
}
@Test
public void testCsrfCheckPostTokenParameter() {
final MockWebContext context = MockWebContext.create().setRequestMethod(HTTP_METHOD.POST.name());
final DefaultCsrfTokenGenerator generator = new DefaultCsrfTokenGenerator();
final String token = generator.get(context);
context.addRequestParameter(Pac4jConstants.CSRF_TOKEN, token);
assertTrue(checker.isAuthorized(context, profiles, DefaultAuthorizers.CSRF_CHECK, null));
}
@Test
public void testIsAnonymous() {
profiles.clear();
profiles.add(new AnonymousProfile());
assertTrue(checker.isAuthorized(null, profiles, DefaultAuthorizers.IS_ANONYMOUS, null));
}
@Test
public void testIsAuthenticated() {
assertTrue(checker.isAuthorized(null, profiles, DefaultAuthorizers.IS_AUTHENTICATED, null));
}
@Test
public void testIsFullyAuthenticated() {
assertTrue(checker.isAuthorized(null, profiles, DefaultAuthorizers.IS_FULLY_AUTHENTICATED, null));
}
@Test
public void testIsRemembered() {
profile.setRemembered(true);
assertTrue(checker.isAuthorized(null, profiles, DefaultAuthorizers.IS_REMEMBERED, null));
}
}
| apache-2.0 |
coolcrowd/worker-service | src/main/java/edu/kit/ipd/crowdcontrol/workerservice/database/model/tables/records/NotificationTokenRecord.java | 5258 | /**
* This class is generated by jOOQ
*/
package edu.kit.ipd.crowdcontrol.workerservice.database.model.tables.records;
import edu.kit.ipd.crowdcontrol.workerservice.database.model.tables.NotificationToken;
import javax.annotation.Generated;
import org.jooq.Field;
import org.jooq.Record1;
import org.jooq.Record4;
import org.jooq.Row4;
import org.jooq.impl.UpdatableRecordImpl;
/**
* This class is generated by jOOQ.
*/
@Generated(
value = {
"http://www.jooq.org",
"jOOQ version:3.7.3"
},
comments = "This class is generated by jOOQ"
)
@SuppressWarnings({ "all", "unchecked", "rawtypes" })
public class NotificationTokenRecord extends UpdatableRecordImpl<NotificationTokenRecord> implements Record4<Integer, Integer, String, Integer> {
private static final long serialVersionUID = -1512938238;
/**
* Setter for <code>crowdcontrol.Notification_Token.id_notification_Token</code>.
*/
public void setIdNotificationToken(Integer value) {
setValue(0, value);
}
/**
* Getter for <code>crowdcontrol.Notification_Token.id_notification_Token</code>.
*/
public Integer getIdNotificationToken() {
return (Integer) getValue(0);
}
/**
* Setter for <code>crowdcontrol.Notification_Token.result_id</code>.
*/
public void setResultId(Integer value) {
setValue(1, value);
}
/**
* Getter for <code>crowdcontrol.Notification_Token.result_id</code>.
*/
public Integer getResultId() {
return (Integer) getValue(1);
}
/**
* Setter for <code>crowdcontrol.Notification_Token.result_token</code>.
*/
public void setResultToken(String value) {
setValue(2, value);
}
/**
* Getter for <code>crowdcontrol.Notification_Token.result_token</code>.
*/
public String getResultToken() {
return (String) getValue(2);
}
/**
* Setter for <code>crowdcontrol.Notification_Token.notification</code>.
*/
public void setNotification(Integer value) {
setValue(3, value);
}
/**
* Getter for <code>crowdcontrol.Notification_Token.notification</code>.
*/
public Integer getNotification() {
return (Integer) getValue(3);
}
// -------------------------------------------------------------------------
// Primary key information
// -------------------------------------------------------------------------
/**
* {@inheritDoc}
*/
@Override
public Record1<Integer> key() {
return (Record1) super.key();
}
// -------------------------------------------------------------------------
// Record4 type implementation
// -------------------------------------------------------------------------
/**
* {@inheritDoc}
*/
@Override
public Row4<Integer, Integer, String, Integer> fieldsRow() {
return (Row4) super.fieldsRow();
}
/**
* {@inheritDoc}
*/
@Override
public Row4<Integer, Integer, String, Integer> valuesRow() {
return (Row4) super.valuesRow();
}
/**
* {@inheritDoc}
*/
@Override
public Field<Integer> field1() {
return NotificationToken.NOTIFICATION_TOKEN.ID_NOTIFICATION_TOKEN;
}
/**
* {@inheritDoc}
*/
@Override
public Field<Integer> field2() {
return NotificationToken.NOTIFICATION_TOKEN.RESULT_ID;
}
/**
* {@inheritDoc}
*/
@Override
public Field<String> field3() {
return NotificationToken.NOTIFICATION_TOKEN.RESULT_TOKEN;
}
/**
* {@inheritDoc}
*/
@Override
public Field<Integer> field4() {
return NotificationToken.NOTIFICATION_TOKEN.NOTIFICATION;
}
/**
* {@inheritDoc}
*/
@Override
public Integer value1() {
return getIdNotificationToken();
}
/**
* {@inheritDoc}
*/
@Override
public Integer value2() {
return getResultId();
}
/**
* {@inheritDoc}
*/
@Override
public String value3() {
return getResultToken();
}
/**
* {@inheritDoc}
*/
@Override
public Integer value4() {
return getNotification();
}
/**
* {@inheritDoc}
*/
@Override
public NotificationTokenRecord value1(Integer value) {
setIdNotificationToken(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public NotificationTokenRecord value2(Integer value) {
setResultId(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public NotificationTokenRecord value3(String value) {
setResultToken(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public NotificationTokenRecord value4(Integer value) {
setNotification(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public NotificationTokenRecord values(Integer value1, Integer value2, String value3, Integer value4) {
value1(value1);
value2(value2);
value3(value3);
value4(value4);
return this;
}
// -------------------------------------------------------------------------
// Constructors
// -------------------------------------------------------------------------
/**
* Create a detached NotificationTokenRecord
*/
public NotificationTokenRecord() {
super(NotificationToken.NOTIFICATION_TOKEN);
}
/**
* Create a detached, initialised NotificationTokenRecord
*/
public NotificationTokenRecord(Integer idNotificationToken, Integer resultId, String resultToken, Integer notification) {
super(NotificationToken.NOTIFICATION_TOKEN);
setValue(0, idNotificationToken);
setValue(1, resultId);
setValue(2, resultToken);
setValue(3, notification);
}
}
| apache-2.0 |
seanzwx/tmp | seatalk/im/im-client/src/main/java/com/sean/im/client/tray/TrayManager.java | 7308 | package com.sean.im.client.tray;
import java.awt.MenuItem;
import java.awt.PopupMenu;
import java.awt.SystemTray;
import java.awt.TrayIcon;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.util.ArrayList;
import java.util.List;
import javax.swing.ImageIcon;
import javax.swing.JFrame;
import com.sean.im.client.constant.Global;
import com.sean.im.client.core.ApplicationContext;
import com.sean.im.client.form.ChatForm;
import com.sean.im.client.form.MainForm;
import com.sean.im.client.form.flock.ChatRoomForm;
import com.sean.im.client.util.ChatFormCache;
import com.sean.im.commom.constant.StatusEnum;
import com.sean.im.commom.core.HttpUtil;
import com.sean.im.commom.core.Request;
import com.sean.im.commom.entity.Flock;
import com.sean.im.commom.entity.Friend;
import com.sean.im.commom.entity.Message;
/**
* 系统托盘管理
* @author sean
*/
@SuppressWarnings("static-access")
public class TrayManager
{
private static TrayIcon trayIcon;
private SystemTray tray;
private TrayThread trayThread;
private boolean isLight = false;
private int MSG = 0;
private MenuItem show, exit;
private static ImageIcon online = new ImageIcon(Global.Root + "resource/image/icon.png");
private static ImageIcon leave = new ImageIcon(Global.Root + "resource/image/leave.png");
private static ImageIcon offline = new ImageIcon(Global.Root + "resource/image/offline.png");
private static ImageIcon curr = online;
private static TrayManager instance = new TrayManager();
private TrayManager()
{
}
public static TrayManager getInstance()
{
return instance;
}
public static void setStatus(int state)
{
switch (state)
{
case StatusEnum.Online:
curr = online;
break;
case StatusEnum.Leave:
curr = leave;
break;
case StatusEnum.Hide:
curr = offline;
break;
case StatusEnum.OffLine:
curr = offline;
break;
}
if (trayIcon != null)
{
trayIcon.setImage(curr.getImage());
}
}
public void init()
{
tray = SystemTray.getSystemTray();
PopupMenu pop = new PopupMenu();
show = new MenuItem("show main");
exit = new MenuItem("exit");
MenuItem_Listener ml = new MenuItem_Listener();
show.addActionListener(ml);
exit.addActionListener(ml);
pop.add(show);
pop.add(exit);
trayIcon = new TrayIcon(curr.getImage(), "Seatalk", pop);
trayIcon.setImageAutoSize(true);
try
{
tray.add(trayIcon);
}
catch (Exception e)
{
e.printStackTrace();
}
trayIcon.addMouseListener(new MouseClickListener());
}
public void startLight(long headId)
{
if (!isLight)
{
isLight = true;
try
{
Thread.currentThread().sleep(600);
}
catch (InterruptedException e)
{
e.printStackTrace();
}
if (this.trayThread == null)
{
trayThread = new TrayThread(headId);
new Thread(trayThread).start();
}
}
}
public void startLight(String headPath)
{
if (!isLight)
{
isLight = true;
try
{
Thread.currentThread().sleep(600);
}
catch (InterruptedException e)
{
e.printStackTrace();
}
if (this.trayThread == null)
{
trayThread = new TrayThread(headPath);
new Thread(trayThread).start();
}
}
}
public void shutdownLight()
{
if (isLight)
{
isLight = false;
if (this.trayThread != null)
{
trayThread.shutdown();
}
}
}
private class MenuItem_Listener implements ActionListener
{
@Override
public void actionPerformed(ActionEvent e)
{
if (e.getSource() == show)
{
MainForm.FORM.setVisible(true);
MainForm.FORM.setState(JFrame.NORMAL);
}
else if (e.getSource() == exit)
{
Request request = new Request("ExitAction");
HttpUtil.requestBlock(request, null);
System.exit(0);
}
}
}
private class MouseClickListener extends MouseAdapter
{
@Override
public void mouseClicked(MouseEvent e)
{
if (e.getClickCount() == 2)
{
List<Message> msgs = ApplicationContext.CTX.getMessageQueue();
List<Message> topMsgs = new ArrayList<Message>(msgs.size());
Message tmp;
// 如果有未读消息
if (!msgs.isEmpty())
{
final Message top = msgs.remove(0);
topMsgs.add(top);
// 如果是普通聊天信息或者图片或者语音
if (top.isChatMessage())
{
// 继续读取相同人发送的聊天消息
for (int i = 0; i < msgs.size(); i++)
{
tmp = msgs.get(i);
if (tmp.getSenderId() == top.getSenderId() && (tmp.isChatMessage()))
{
topMsgs.add(msgs.remove(i));
i--;
}
}
// 打开聊天窗体,显示消息
Friend friend = MainForm.FORM.getFriendList().getFriendByUserId(top.getSenderId());
ChatForm chatForm = ChatFormCache.getChatForm(friend);
chatForm.open();
for (Message item : topMsgs)
{
chatForm.appendRightMessage(item, ApplicationContext.getSender(friend));
}
}
// 如果是群消息
else if (top.isFlockChatMessage())
{
// 继续读取相同群发送的聊天消息
for (int i = 0; i < msgs.size(); i++)
{
tmp = msgs.get(i);
if (tmp.getFlockId() == top.getFlockId() && (tmp.isChatMessage()))
{
topMsgs.add(msgs.remove(i));
i--;
}
}
// 打开聊天窗体,显示消息
Flock flock = MainForm.FORM.getFlockList().getFlock(top.getFlockId());
ChatRoomForm room = ChatFormCache.getChatRoomForm(flock);
room.open();
room.initData();
for (Message item : topMsgs)
{
room.appendRightMessage(item, ApplicationContext.getSender(room.getFlockMember(item.getSenderId())));
}
}
else
{
ApplicationContext.CTX.doMessage(top);
}
// 停止闪耀
shutdownLight();
if (msgs.size() > 0)
{
Message top1 = msgs.get(0);
if (top1.isChatMessage())
{
Friend friend = MainForm.FORM.getFriendList().getFriendByUserId(top1.getSenderId());
startLight(friend.getHead());
}
else
{
startLight(0);
}
}
}
else
{
MainForm.FORM.setVisible(true);
MainForm.FORM.setState(JFrame.NORMAL);
}
}
}
}
private class TrayThread implements Runnable
{
private int flag = 0;
private boolean run = true;
private ImageIcon head, bg;
public TrayThread(long headId)
{
if (headId == MSG)
{
head = new ImageIcon(Global.Root + "resource/image/msg.png");
}
else
{
head = new ImageIcon(Global.Root + "resource/image/head/" + headId + ".jpg");
}
bg = new ImageIcon(Global.Root + "resource/image/tray_bg.png");
}
public TrayThread(String headPath)
{
head = new ImageIcon(headPath);
bg = new ImageIcon(Global.Root + "resource/image/tray_bg.png");
}
public void shutdown()
{
run = false;
}
@Override
public void run()
{
while (run)
{
if (flag % 2 == 0)
{
trayIcon.setImage(head.getImage());
}
else
{
trayIcon.setImage(bg.getImage());
}
try
{
Thread.currentThread().sleep(500);
}
catch (InterruptedException e)
{
e.printStackTrace();
}
flag++;
}
trayThread = null;
trayIcon.setImage(curr.getImage());
}
}
}
| apache-2.0 |
mityung/XERUNG | Andriod/Xerung/Xerung/src/main/java/com/example/contactplusgroup/crop/CropImageView.java | 6349 | package com.example.contactplusgroup.crop;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.support.annotation.NonNull;
import android.util.AttributeSet;
import android.view.MotionEvent;
import java.util.ArrayList;
public class CropImageView extends ImageViewTouchBase {
ArrayList<HighlightView> highlightViews = new ArrayList<HighlightView>();
HighlightView motionHighlightView;
Context context;
private float lastX;
private float lastY;
private int motionEdge;
private int validPointerId;
public CropImageView(Context context) {
super(context);
}
public CropImageView(Context context, AttributeSet attrs) {
super(context, attrs);
}
public CropImageView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
super.onLayout(changed, left, top, right, bottom);
if (bitmapDisplayed.getBitmap() != null) {
for (HighlightView hv : highlightViews) {
hv.matrix.set(getUnrotatedMatrix());
hv.invalidate();
if (hv.hasFocus()) {
centerBasedOnHighlightView(hv);
}
}
}
}
@Override
protected void zoomTo(float scale, float centerX, float centerY) {
super.zoomTo(scale, centerX, centerY);
for (HighlightView hv : highlightViews) {
hv.matrix.set(getUnrotatedMatrix());
hv.invalidate();
}
}
@Override
protected void zoomIn() {
super.zoomIn();
for (HighlightView hv : highlightViews) {
hv.matrix.set(getUnrotatedMatrix());
hv.invalidate();
}
}
@Override
protected void zoomOut() {
super.zoomOut();
for (HighlightView hv : highlightViews) {
hv.matrix.set(getUnrotatedMatrix());
hv.invalidate();
}
}
@Override
protected void postTranslate(float deltaX, float deltaY) {
super.postTranslate(deltaX, deltaY);
for (HighlightView hv : highlightViews) {
hv.matrix.postTranslate(deltaX, deltaY);
hv.invalidate();
}
}
@Override
public boolean onTouchEvent(@NonNull MotionEvent event) {
CropImageActivity cropImageActivity = (CropImageActivity) context;
if (cropImageActivity.isSaving()) {
return false;
}
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
for (HighlightView hv : highlightViews) {
int edge = hv.getHit(event.getX(), event.getY());
if (edge != HighlightView.GROW_NONE) {
motionEdge = edge;
motionHighlightView = hv;
lastX = event.getX();
lastY = event.getY();
// Prevent multiple touches from interfering with crop area re-sizing
validPointerId = event.getPointerId(event.getActionIndex());
motionHighlightView.setMode((edge == HighlightView.MOVE)
? HighlightView.ModifyMode.Move
: HighlightView.ModifyMode.Grow);
break;
}
}
break;
case MotionEvent.ACTION_UP:
if (motionHighlightView != null) {
centerBasedOnHighlightView(motionHighlightView);
motionHighlightView.setMode(HighlightView.ModifyMode.None);
}
motionHighlightView = null;
center();
break;
case MotionEvent.ACTION_MOVE:
if (motionHighlightView != null && event.getPointerId(event.getActionIndex()) == validPointerId) {
motionHighlightView.handleMotion(motionEdge, event.getX()
- lastX, event.getY() - lastY);
lastX = event.getX();
lastY = event.getY();
}
// If we're not zoomed then there's no point in even allowing the user to move the image around.
// This call to center puts it back to the normalized location.
if (getScale() == 1F) {
center();
}
break;
}
return true;
}
// Pan the displayed image to make sure the cropping rectangle is visible.
private void ensureVisible(HighlightView hv) {
Rect r = hv.drawRect;
int panDeltaX1 = Math.max(0, getLeft() - r.left);
int panDeltaX2 = Math.min(0, getRight() - r.right);
int panDeltaY1 = Math.max(0, getTop() - r.top);
int panDeltaY2 = Math.min(0, getBottom() - r.bottom);
int panDeltaX = panDeltaX1 != 0 ? panDeltaX1 : panDeltaX2;
int panDeltaY = panDeltaY1 != 0 ? panDeltaY1 : panDeltaY2;
if (panDeltaX != 0 || panDeltaY != 0) {
panBy(panDeltaX, panDeltaY);
}
}
// If the cropping rectangle's size changed significantly, change the
// view's center and scale according to the cropping rectangle.
private void centerBasedOnHighlightView(HighlightView hv) {
Rect drawRect = hv.drawRect;
float width = drawRect.width();
float height = drawRect.height();
float thisWidth = getWidth();
float thisHeight = getHeight();
float z1 = thisWidth / width * .6F;
float z2 = thisHeight / height * .6F;
float zoom = Math.min(z1, z2);
zoom = zoom * this.getScale();
zoom = Math.max(1F, zoom);
if ((Math.abs(zoom - getScale()) / zoom) > .1) {
float[] coordinates = new float[] { hv.cropRect.centerX(), hv.cropRect.centerY() };
getUnrotatedMatrix().mapPoints(coordinates);
zoomTo(zoom, coordinates[0], coordinates[1], 300F);
}
ensureVisible(hv);
}
@Override
protected void onDraw(@NonNull Canvas canvas) {
super.onDraw(canvas);
for (HighlightView highlightView : highlightViews) {
highlightView.draw(canvas);
}
}
public void add(HighlightView hv) {
highlightViews.add(hv);
invalidate();
}
}
| apache-2.0 |
seanzwx/tmp | seatalk/im/im-friend/im-friend-impl/src/main/java/com/sean/im/friend/dic/UserDicImpl.java | 1009 | package com.sean.im.friend.dic;
import java.util.Map;
import com.sean.im.account.dic.UserDic;
import com.sean.im.friend.entity.UserInfoEntity;
import com.sean.im.friend.service.UserServiceImpl;
import com.sean.persist.dictionary.Dictionary;
import com.sean.persist.dictionary.DictionaryProviderConfig;
import com.sean.service.core.ApplicationContext;
/**
* User Dic implemention
* @author sean
*/
@DictionaryProviderConfig(description = "UserDic implemention")
public class UserDicImpl extends Dictionary implements UserDic
{
@Override
public void getDicVal(Object id, Map<String, String> dic)
{
UserServiceImpl us = ApplicationContext.CTX.getBean(UserServiceImpl.class);
UserInfoEntity user = us.getUserById((long) id);
if (user != null)
{
dic.put("username", user.getUsername());
dic.put("nickname", user.getNickname());
dic.put("head", String.valueOf(user.getHead()));
dic.put("signature", user.getSignature());
dic.put("status", String.valueOf(user.getStatus()));
}
}
}
| apache-2.0 |
apache/bval | bval-jsr/src/main/java/org/apache/bval/jsr/valueextraction/IterableElementExtractor.java | 1266 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.bval.jsr.valueextraction;
import javax.validation.valueextraction.ExtractedValue;
import javax.validation.valueextraction.ValueExtractor;
public class IterableElementExtractor implements ValueExtractor<Iterable<@ExtractedValue ?>> {
@Override
public void extractValues(Iterable<?> originalValue, ValueExtractor.ValueReceiver receiver) {
originalValue.forEach(v -> receiver.iterableValue("<iterable element>", v));
}
}
| apache-2.0 |
jwagenleitner/incubator-groovy | src/main/java/org/codehaus/groovy/ast/ASTNode.java | 4567 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.codehaus.groovy.ast;
import org.codehaus.groovy.util.ListHashMap;
import java.util.Map;
import java.util.Objects;
/**
* Base class for any AST node. This class supports basic information used in all nodes of the AST:
* <ul>
* <li> line and column number information. Usually a node represents a certain
* area in a text file determined by a starting position and an ending position.
* For nodes that do not represent this, this information will be -1. A node can
* also be configured in its line/col information using another node through
* setSourcePosition(otherNode).</li>
* <li> every node can store meta data. A phase operation or transform can use
* this to transport arbitrary information to another phase operation or
* transform. The only requirement is that the other phase operation or transform
* runs after the part storing the information. If the information transport is
* done it is strongly recommended to remove that meta data.</li>
* <li> a text representation of this node trough getText(). This was in the
* past used for assertion messages. Since the usage of power asserts this
* method will not be called for this purpose anymore and might be removed in
* future versions of Groovy</li>
* </ul>
*/
public class ASTNode implements NodeMetaDataHandler {
private int lineNumber = -1;
private int columnNumber = -1;
private int lastLineNumber = -1;
private int lastColumnNumber = -1;
private Map metaDataMap = null;
public void visit(GroovyCodeVisitor visitor) {
throw new RuntimeException("No visit() method implemented for class: " + getClass().getName());
}
public String getText() {
return "<not implemented yet for class: " + getClass().getName() + ">";
}
public int getLineNumber() {
return lineNumber;
}
public void setLineNumber(int lineNumber) {
this.lineNumber = lineNumber;
}
public int getColumnNumber() {
return columnNumber;
}
public void setColumnNumber(int columnNumber) {
this.columnNumber = columnNumber;
}
public int getLastLineNumber() {
return lastLineNumber;
}
public void setLastLineNumber(int lastLineNumber) {
this.lastLineNumber = lastLineNumber;
}
public int getLastColumnNumber() {
return lastColumnNumber;
}
public void setLastColumnNumber(int lastColumnNumber) {
this.lastColumnNumber = lastColumnNumber;
}
/**
* Sets the source position using another ASTNode.
* The sourcePosition consists of a line/column pair for
* the start and a line/column pair for the end of the
* expression or statement
*
* @param node - the node used to configure the position information
*/
public void setSourcePosition(ASTNode node) {
this.columnNumber = node.getColumnNumber();
this.lastLineNumber = node.getLastLineNumber();
this.lastColumnNumber = node.getLastColumnNumber();
this.lineNumber = node.getLineNumber();
}
/**
* Copies all node meta data from the other node to this one
* @param other - the other node
*/
public void copyNodeMetaData(ASTNode other) {
copyNodeMetaData((NodeMetaDataHandler) other);
}
@Override
public ListHashMap getMetaDataMap() {
return (ListHashMap) metaDataMap;
}
@Override
public void setMetaDataMap(Map<?, ?> metaDataMap) {
this.metaDataMap = metaDataMap;
}
@Override
public boolean equals(Object o) {
return this == o;
}
@Override
public int hashCode() {
return Objects.hash(lineNumber, columnNumber, lastLineNumber, lastColumnNumber);
}
}
| apache-2.0 |
dubex/concourse | concourse-integration-tests/src/test/java/com/cinchapi/concourse/FixedConnectionPoolTest.java | 2962 | /*
* Copyright (c) 2013-2017 Cinchapi Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cinchapi.concourse;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.junit.Assert;
import org.junit.Test;
import com.cinchapi.concourse.Concourse;
import com.cinchapi.concourse.ConnectionPool;
import com.cinchapi.concourse.FixedConnectionPool;
import com.cinchapi.concourse.util.StandardActions;
import com.google.common.collect.Lists;
/**
* Unit tests for {@link FixedConnectionPool}.
*
* @author Jeff Nelson
*/
public class FixedConnectionPoolTest extends ConnectionPoolTest {
@Test
public void testBlockUnitlConnectionAvailable() {
List<Concourse> toReturn = Lists.newArrayList();
for (int i = 0; i < POOL_SIZE; i++) {
toReturn.add(connections.request());
}
Thread thread = new Thread(new Runnable() {
@Override
public void run() {
System.out.println("Waiting for next available connection...");
Concourse connection = connections.request();
System.out.println("Finally acquired connection");
connections.release(connection);
}
});
thread.start();
StandardActions.wait(60, TimeUnit.MILLISECONDS);
for (Concourse concourse : toReturn) {
// must return all the connections so the pool can shutdown after
// the test
connections.release(concourse);
}
}
@Test
public void testNotHasAvailableConnectionWhenAllInUse() {
List<Concourse> toReturn = Lists.newArrayList();
for (int i = 0; i < POOL_SIZE; i++) {
toReturn.add(connections.request());
}
Assert.assertFalse(connections.hasAvailableConnection());
for (Concourse concourse : toReturn) {
// must return all the connections so the pool can shutdown after
// the test
connections.release(concourse);
}
}
@Override
protected ConnectionPool getConnectionPool() {
return ConnectionPool.newFixedConnectionPool(SERVER_HOST, SERVER_PORT,
USERNAME, PASSWORD, POOL_SIZE);
}
@Override
protected ConnectionPool getConnectionPool(String env) {
return ConnectionPool.newFixedConnectionPool(SERVER_HOST, SERVER_PORT,
USERNAME, PASSWORD, env, POOL_SIZE);
}
}
| apache-2.0 |
Primosbookkeeping/gxp | java/test/com/google/gxp/compiler/functests/annotate/BuzAnnotation.java | 809 | /*
* Copyright (C) 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.gxp.compiler.functests.annotate;
import java.lang.annotation.*;
/**
* A test annotation.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface BuzAnnotation {
}
| apache-2.0 |
yanzhijun/jclouds-aliyun | providers/ultradns-ws/src/main/java/org/jclouds/ultradns/ws/binders/DirectionalRecordAndGeoGroupToXML.java | 5833 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jclouds.ultradns.ws.binders;
import static java.lang.String.format;
import java.util.Collection;
import java.util.Map;
import java.util.Map.Entry;
import org.jclouds.http.HttpRequest;
import org.jclouds.rest.MapBinder;
import org.jclouds.ultradns.ws.domain.DirectionalGroup;
import org.jclouds.ultradns.ws.domain.DirectionalPoolRecord;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
public class DirectionalRecordAndGeoGroupToXML implements MapBinder {
@SuppressWarnings("unchecked")
@Override
public <R extends HttpRequest> R bindToRequest(R request, Map<String, Object> postParams) {
DirectionalPoolRecord record = DirectionalPoolRecord.class.cast(postParams.get("record"));
DirectionalGroup group = DirectionalGroup.class.cast(postParams.get("group"));
String xml = toXML(postParams.get("poolId"), record, group, postParams.get("dirPoolRecordId"),
postParams.get("groupId"));
return (R) request.toBuilder().payload(xml).build();
}
private static final String ADD_TEMPLATE = "<v01:addDirectionalPoolRecord><transactionID />%s%s%s</AddDirectionalRecordData></v01:addDirectionalPoolRecord>";
private static final String UPDATE_TEMPLATE = "<v01:updateDirectionalPoolRecord><transactionID /><UpdateDirectionalRecordData directionalPoolRecordId=\"%s\">%s%s</UpdateDirectionalRecordData></v01:updateDirectionalPoolRecord>";
private static final String NEWGROUP_TEMPLATE = "<GeolocationGroupData><GroupData groupingType=\"DEFINE_NEW_GROUP\" />%s</GeolocationGroupData>";
private static final String EXISTINGGROUP_TEMPLATE = "<GeolocationGroupData><GroupData groupingType=\"ASSIGN_EXISTING_GROUP\" assignExistingGroupId=\"%s\" />%s</GeolocationGroupData>";
@VisibleForTesting
static String toXML(Object poolId, DirectionalPoolRecord record, DirectionalGroup group, Object recordId,
Object groupId) {
if (poolId == null) {
if (group != null)
return format(UPDATE_TEMPLATE, recordId, updateRecord(record), geo(group));
return format(UPDATE_TEMPLATE, recordId, updateRecord(record), "");
}
if (group == null && groupId == null) {
return format(
ADD_TEMPLATE,
format("<AddDirectionalRecordData directionalPoolId=\"%s\" createAllNonConfiguredGrp=\"true\">", poolId),
createRecord(record), "");
}
String addRecordToPool = format("<AddDirectionalRecordData directionalPoolId=\"%s\">", poolId);
if (groupId != null) {
return format(ADD_TEMPLATE, addRecordToPool, createRecord(record), format(EXISTINGGROUP_TEMPLATE, groupId, ""));
}
return format(ADD_TEMPLATE, addRecordToPool, createRecord(record), format(NEWGROUP_TEMPLATE, geo(group)));
}
private static String createRecord(DirectionalPoolRecord record) {
StringBuilder recordConfig = new StringBuilder();
recordConfig.append("<DirectionalRecordConfiguration recordType=\"").append(record.getType()).append('"');
recordConfig.append(" TTL=\"").append(record.getTTL()).append('"');
recordConfig.append(" noResponseRecord=\"").append(record.isNoResponseRecord()).append("\" >");
recordConfig.append(values(record));
recordConfig.append("</DirectionalRecordConfiguration>");
return recordConfig.toString();
}
/**
* don't pass type or is no response when updating
*/
private static String updateRecord(DirectionalPoolRecord record) {
return format("<DirectionalRecordConfiguration TTL=\"%s\" >%s</DirectionalRecordConfiguration>", record.getTTL(),
values(record));
}
private static String values(DirectionalPoolRecord record) {
StringBuilder values = new StringBuilder("<InfoValues");
for (int i = 0; i < record.getRData().size(); i++) {
values.append(' ').append("Info").append(i + 1).append("Value=").append('"').append(record.getRData().get(i))
.append('"');
}
values.append(" />");
return values.toString();
}
private static String geo(DirectionalGroup group) {
StringBuilder groupData = new StringBuilder();
groupData.append("<GeolocationGroupDetails groupName=\"").append(group.getName()).append('"');
if (group.getDescription().isPresent())
groupData.append(" description=\"").append(group.getDescription().get()).append('"');
groupData.append(" >");
for (Entry<String, Collection<String>> region : group.asMap().entrySet()) {
groupData.append("<GeolocationGroupDefinitionData regionName=\"").append(region.getKey()).append('"');
groupData.append(" territoryNames=\"").append(Joiner.on(';').join(region.getValue())).append("\" />");
}
groupData.append("</GeolocationGroupDetails>");
return groupData.toString();
}
@Override
public <R extends HttpRequest> R bindToRequest(R request, Object input) {
throw new UnsupportedOperationException("use map form");
}
}
| apache-2.0 |
apache/continuum | continuum-commons/src/main/java/org/apache/maven/continuum/utils/DefaultWorkingDirectoryService.java | 6128 | package org.apache.maven.continuum.utils;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.continuum.configuration.ConfigurationService;
import org.apache.maven.continuum.model.project.Project;
import org.codehaus.plexus.util.StringUtils;
import org.springframework.stereotype.Service;
import java.io.File;
import java.util.List;
import javax.annotation.Resource;
/**
* @author <a href="mailto:trygvis@inamo.no">Trygve Laugstøl</a>
*/
@Service( "workingDirectoryService" )
public class DefaultWorkingDirectoryService
implements WorkingDirectoryService
{
@Resource
private ConfigurationService configurationService;
public void setConfigurationService( ConfigurationService configurationService )
{
this.configurationService = configurationService;
}
public ConfigurationService getConfigurationService()
{
return configurationService;
}
// ----------------------------------------------------------------------
// WorkingDirectoryService Implementation
// ----------------------------------------------------------------------
public File getWorkingDirectory( Project project )
{
return getWorkingDirectory( project, null, null );
}
public File getWorkingDirectory( Project project, boolean shouldSet )
{
return getWorkingDirectory( project, null, null, shouldSet );
}
/**
* @param project
* @param projectScmRoot
* @param projects projects under the same projectScmRoot
* @return
*/
public File getWorkingDirectory( Project project, String projectScmRoot, List<Project> projects )
{
return getWorkingDirectory( project, projectScmRoot, projects, true );
}
/**
* @param project
* @param projectScmRoot
* @param projects projects under the same projectScmRoot
* @param shouldSet
* @return
*/
public File getWorkingDirectory( Project project, String projectScmRoot, List<Project> projects, boolean shouldSet )
{
// TODO: Enable, this is what we really want
// ContinuumProjectGroup projectGroup = project.getProjectGroup();
//
// return new File( projectGroup.getWorkingDirectory(),
// project.getPath() );
String workingDirectory = project.getWorkingDirectory();
if ( project.getWorkingDirectory() == null || "".equals( project.getWorkingDirectory() ) )
{
if ( project.isCheckedOutInSingleDirectory() && projectScmRoot != null && !"".equals( projectScmRoot ) )
{
Project rootProject = project;
if ( projects != null )
{
// the root project should have the lowest id since it's always added first
for ( Project projectUnderScmRoot : projects )
{
if ( projectUnderScmRoot.getId() < rootProject.getId() )
{
rootProject = projectUnderScmRoot;
}
}
}
// determine the path
String projectScmUrl = project.getScmUrl();
int indexDiff = StringUtils.differenceAt( projectScmUrl, projectScmRoot );
String pathToProject = "";
if ( indexDiff != -1 )
{
pathToProject = projectScmUrl.substring( indexDiff );
}
if ( pathToProject.startsWith( "\\" ) || pathToProject.startsWith( "/" ) )
{
workingDirectory = Integer.toString( rootProject.getId() ) + pathToProject;
}
else
{
workingDirectory = Integer.toString( rootProject.getId() ) + File.separatorChar + pathToProject;
}
}
else
{
workingDirectory = Integer.toString( project.getId() );
}
}
if ( shouldSet )
{
project.setWorkingDirectory( workingDirectory );
}
File workDir;
File projectWorkingDirectory = new File( workingDirectory );
if ( projectWorkingDirectory.isAbsolute() )
{
// clean the project working directory path if it's a subdirectory of the global working directory
if ( projectWorkingDirectory.getAbsolutePath().startsWith(
getConfigurationService().getWorkingDirectory().getAbsolutePath() ) )
{
String pwd = projectWorkingDirectory.getAbsolutePath().substring(
getConfigurationService().getWorkingDirectory().getAbsolutePath().length() );
if ( pwd.startsWith( "/" ) || pwd.startsWith( "\\" ) )
{
pwd = pwd.substring( 1 );
}
if ( shouldSet )
{
project.setWorkingDirectory( pwd );
}
}
workDir = projectWorkingDirectory;
}
else
{
File baseWorkingDir = getConfigurationService().getWorkingDirectory();
workDir = new File( baseWorkingDir, workingDirectory );
}
return workDir;
}
}
| apache-2.0 |
xhoong/incubator-calcite | core/src/main/java/org/apache/calcite/plan/RelOptQuery.java | 4002 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.plan;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.core.CorrelationId;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rex.RexBuilder;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
/**
* A <code>RelOptQuery</code> represents a set of
* {@link RelNode relational expressions} which derive from the same
* <code>select</code> statement.
*/
public class RelOptQuery {
//~ Static fields/initializers ---------------------------------------------
/**
* Prefix to the name of correlating variables.
*/
public static final String CORREL_PREFIX = CorrelationId.CORREL_PREFIX;
//~ Instance fields --------------------------------------------------------
/**
* Maps name of correlating variable (e.g. "$cor3") to the {@link RelNode}
* which implements it.
*/
final Map<String, RelNode> mapCorrelToRel;
private final RelOptPlanner planner;
final AtomicInteger nextCorrel;
//~ Constructors -----------------------------------------------------------
/**
* Creates a query.
*
* @param planner Planner
*/
@Deprecated // to be removed before 2.0
public RelOptQuery(RelOptPlanner planner) {
this(planner, new AtomicInteger(0), new HashMap<>());
}
/** For use by RelOptCluster only. */
RelOptQuery(RelOptPlanner planner, AtomicInteger nextCorrel,
Map<String, RelNode> mapCorrelToRel) {
this.planner = planner;
this.nextCorrel = nextCorrel;
this.mapCorrelToRel = mapCorrelToRel;
}
//~ Methods ----------------------------------------------------------------
/**
* Converts a correlating variable name into an ordinal, unique within the
* query.
*
* @param correlName Name of correlating variable
* @return Correlating variable ordinal
*/
@Deprecated // to be removed before 2.0
public static int getCorrelOrdinal(String correlName) {
assert correlName.startsWith(CORREL_PREFIX);
return Integer.parseInt(correlName.substring(CORREL_PREFIX.length()));
}
/**
* Creates a cluster.
*
* @param typeFactory Type factory
* @param rexBuilder Expression builder
* @return New cluster
*/
@Deprecated // to be removed before 2.0
public RelOptCluster createCluster(
RelDataTypeFactory typeFactory,
RexBuilder rexBuilder) {
return new RelOptCluster(planner, typeFactory, rexBuilder, nextCorrel,
mapCorrelToRel);
}
/**
* Constructs a new name for a correlating variable. It is unique within the
* whole query.
*
* @deprecated Use {@link RelOptCluster#createCorrel()}
*/
@Deprecated // to be removed before 2.0
public String createCorrel() {
int n = nextCorrel.getAndIncrement();
return CORREL_PREFIX + n;
}
/**
* Returns the relational expression which populates a correlating variable.
*/
public RelNode lookupCorrel(String name) {
return mapCorrelToRel.get(name);
}
/**
* Maps a correlating variable to a {@link RelNode}.
*/
public void mapCorrel(
String name,
RelNode rel) {
mapCorrelToRel.put(name, rel);
}
}
// End RelOptQuery.java
| apache-2.0 |
royclarkson/spring-boot | spring-boot-project/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/web/servlet/error/ErrorMvcAutoConfigurationTests.java | 4738 | /*
* Copyright 2012-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.web.servlet.error;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.boot.autoconfigure.AutoConfigurations;
import org.springframework.boot.autoconfigure.web.servlet.DispatcherServletAutoConfiguration;
import org.springframework.boot.test.context.runner.WebApplicationContextRunner;
import org.springframework.boot.test.system.CapturedOutput;
import org.springframework.boot.test.system.OutputCaptureExtension;
import org.springframework.boot.web.error.ErrorAttributeOptions;
import org.springframework.boot.web.error.ErrorAttributeOptions.Include;
import org.springframework.boot.web.servlet.error.ErrorAttributes;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.mock.web.MockHttpServletResponse;
import org.springframework.web.context.request.RequestAttributes;
import org.springframework.web.servlet.View;
import org.springframework.web.servlet.handler.DispatcherServletWebRequest;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Tests for {@link ErrorMvcAutoConfiguration}.
*
* @author Brian Clozel
* @author Scott Frederick
*/
@ExtendWith(OutputCaptureExtension.class)
class ErrorMvcAutoConfigurationTests {
private final WebApplicationContextRunner contextRunner = new WebApplicationContextRunner().withConfiguration(
AutoConfigurations.of(DispatcherServletAutoConfiguration.class, ErrorMvcAutoConfiguration.class));
@Test
void renderContainsViewWithExceptionDetails() {
this.contextRunner.run((context) -> {
View errorView = context.getBean("error", View.class);
ErrorAttributes errorAttributes = context.getBean(ErrorAttributes.class);
DispatcherServletWebRequest webRequest = createWebRequest(new IllegalStateException("Exception message"),
false);
errorView.render(errorAttributes.getErrorAttributes(webRequest, withAllOptions()), webRequest.getRequest(),
webRequest.getResponse());
assertThat(webRequest.getResponse().getContentType()).isEqualTo("text/html;charset=UTF-8");
String responseString = ((MockHttpServletResponse) webRequest.getResponse()).getContentAsString();
assertThat(responseString).contains(
"<p>This application has no explicit mapping for /error, so you are seeing this as a fallback.</p>")
.contains("<div>Exception message</div>")
.contains("<div style='white-space:pre-wrap;'>java.lang.IllegalStateException");
});
}
@Test
void renderWhenAlreadyCommittedLogsMessage(CapturedOutput output) {
this.contextRunner.run((context) -> {
View errorView = context.getBean("error", View.class);
ErrorAttributes errorAttributes = context.getBean(ErrorAttributes.class);
DispatcherServletWebRequest webRequest = createWebRequest(new IllegalStateException("Exception message"),
true);
errorView.render(errorAttributes.getErrorAttributes(webRequest, withAllOptions()), webRequest.getRequest(),
webRequest.getResponse());
assertThat(output).contains("Cannot render error page for request [/path] "
+ "and exception [Exception message] as the response has "
+ "already been committed. As a result, the response may have the wrong status code.");
});
}
private DispatcherServletWebRequest createWebRequest(Exception ex, boolean committed) {
MockHttpServletRequest request = new MockHttpServletRequest("GET", "/path");
MockHttpServletResponse response = new MockHttpServletResponse();
DispatcherServletWebRequest webRequest = new DispatcherServletWebRequest(request, response);
webRequest.setAttribute("javax.servlet.error.exception", ex, RequestAttributes.SCOPE_REQUEST);
webRequest.setAttribute("javax.servlet.error.request_uri", "/path", RequestAttributes.SCOPE_REQUEST);
response.setCommitted(committed);
response.setOutputStreamAccessAllowed(!committed);
response.setWriterAccessAllowed(!committed);
return webRequest;
}
private ErrorAttributeOptions withAllOptions() {
return ErrorAttributeOptions.of(Include.EXCEPTION, Include.STACK_TRACE, Include.MESSAGE,
Include.BINDING_ERRORS);
}
}
| apache-2.0 |