repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
Alexey1Gavrilov/dropwizard | dropwizard-jersey/src/test/java/io/dropwizard/jersey/errors/ErrorEntityWriterTest.java | 2886 | package io.dropwizard.jersey.errors;
import io.dropwizard.jersey.AbstractJerseyTest;
import io.dropwizard.jersey.DropwizardResourceConfig;
import org.glassfish.jersey.server.ResourceConfig;
import org.glassfish.jersey.servlet.ServletProperties;
import org.glassfish.jersey.test.DeploymentContext;
import org.glassfish.jersey.test.ServletDeploymentContext;
import org.glassfish.jersey.test.TestProperties;
import org.glassfish.jersey.test.grizzly.GrizzlyWebTestContainerFactory;
import org.glassfish.jersey.test.spi.TestContainerException;
import org.glassfish.jersey.test.spi.TestContainerFactory;
import org.junit.jupiter.api.Test;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.failBecauseExceptionWasNotThrown;
public class ErrorEntityWriterTest extends AbstractJerseyTest {
public static class ErrorEntityWriterTestResourceConfig extends DropwizardResourceConfig {
public ErrorEntityWriterTestResourceConfig() {
super();
property(TestProperties.CONTAINER_PORT, "0");
register(DefaultLoggingExceptionMapper.class);
register(DefaultJacksonMessageBodyProvider.class);
register(ExceptionResource.class);
register(new ErrorEntityWriter<ErrorMessage, String>(MediaType.TEXT_HTML_TYPE, String.class) {
@Override
protected String getRepresentation(ErrorMessage entity) {
return "<!DOCTYPE html><html><body>" + entity.getMessage() + "</body></html>";
}
});
}
}
@Override
protected TestContainerFactory getTestContainerFactory() throws TestContainerException {
return new GrizzlyWebTestContainerFactory();
}
@Override
protected DeploymentContext configureDeployment() {
final ResourceConfig rc = new ErrorEntityWriterTestResourceConfig();
return ServletDeploymentContext.builder(rc)
.initParam(ServletProperties.JAXRS_APPLICATION_CLASS, ErrorEntityWriterTestResourceConfig.class.getName())
.build();
}
@Test
public void formatsErrorsAsHtml() {
try {
target("/exception/html-exception")
.request(MediaType.TEXT_HTML_TYPE)
.get(String.class);
failBecauseExceptionWasNotThrown(WebApplicationException.class);
} catch (WebApplicationException e) {
final Response response = e.getResponse();
assertThat(response.getStatus()).isEqualTo(400);
assertThat(response.getMediaType()).isEqualTo(MediaType.TEXT_HTML_TYPE);
assertThat(response.readEntity(String.class)).isEqualTo("<!DOCTYPE html><html><body>BIFF</body></html>");
}
}
}
| apache-2.0 |
intrigus/VisEditor | Plugins/SpineRuntime/src/com/esotericsoftware/spine/attachments/Attachment.java | 2037 | /*
* Spine Runtimes Software License
* Version 2.3
*
* Copyright (c) 2013-2015, Esoteric Software
* All rights reserved.
*
* You are granted a perpetual, non-exclusive, non-sublicensable and
* non-transferable license to use, install, execute and perform the Spine
* Runtimes Software (the "Software") and derivative works solely for personal
* or internal use. Without the written permission of Esoteric Software (see
* Section 2 of the Spine Software License Agreement), you may not (a) modify,
* translate, adapt or otherwise create derivative works, improvements of the
* Software or develop new applications using the Software or (b) remove,
* delete, alter or obscure any trademarks or any copyright, trademark, patent
* or other intellectual property or proprietary rights notices on or in the
* Software, including any copy thereof. Redistributions in binary or source
* form must include this license and terms.
*
* THIS SOFTWARE IS PROVIDED BY ESOTERIC SOFTWARE "AS IS" AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL ESOTERIC SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.esotericsoftware.spine.attachments;
abstract public class Attachment {
final String name;
public Attachment (String name) {
if (name == null) throw new IllegalArgumentException("name cannot be null.");
this.name = name;
}
public String getName () {
return name;
}
public String toString () {
return getName();
}
}
| apache-2.0 |
msebire/intellij-community | python/gen/com/jetbrains/python/console/protocol/RowHeader.java | 11511 | /**
* Autogenerated by Thrift Compiler (0.11.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package com.jetbrains.python.console.protocol;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.11.0)", date = "2018-08-07")
public class RowHeader implements org.apache.thrift.TBase<RowHeader, RowHeader._Fields>, java.io.Serializable, Cloneable, Comparable<RowHeader> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("RowHeader");
private static final org.apache.thrift.protocol.TField LABEL_FIELD_DESC = new org.apache.thrift.protocol.TField("label", org.apache.thrift.protocol.TType.STRING, (short)1);
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new RowHeaderStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new RowHeaderTupleSchemeFactory();
public java.lang.String label; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
LABEL((short)1, "label");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // LABEL
return LABEL;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.LABEL, new org.apache.thrift.meta_data.FieldMetaData("label", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(RowHeader.class, metaDataMap);
}
public RowHeader() {
}
public RowHeader(
java.lang.String label)
{
this();
this.label = label;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public RowHeader(RowHeader other) {
if (other.isSetLabel()) {
this.label = other.label;
}
}
public RowHeader deepCopy() {
return new RowHeader(this);
}
@Override
public void clear() {
this.label = null;
}
public java.lang.String getLabel() {
return this.label;
}
public RowHeader setLabel(java.lang.String label) {
this.label = label;
return this;
}
public void unsetLabel() {
this.label = null;
}
/** Returns true if field label is set (has been assigned a value) and false otherwise */
public boolean isSetLabel() {
return this.label != null;
}
public void setLabelIsSet(boolean value) {
if (!value) {
this.label = null;
}
}
public void setFieldValue(_Fields field, java.lang.Object value) {
switch (field) {
case LABEL:
if (value == null) {
unsetLabel();
} else {
setLabel((java.lang.String)value);
}
break;
}
}
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
case LABEL:
return getLabel();
}
throw new java.lang.IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
case LABEL:
return isSetLabel();
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that == null)
return false;
if (that instanceof RowHeader)
return this.equals((RowHeader)that);
return false;
}
public boolean equals(RowHeader that) {
if (that == null)
return false;
if (this == that)
return true;
boolean this_present_label = true && this.isSetLabel();
boolean that_present_label = true && that.isSetLabel();
if (this_present_label || that_present_label) {
if (!(this_present_label && that_present_label))
return false;
if (!this.label.equals(that.label))
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = hashCode * 8191 + ((isSetLabel()) ? 131071 : 524287);
if (isSetLabel())
hashCode = hashCode * 8191 + label.hashCode();
return hashCode;
}
@Override
public int compareTo(RowHeader other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = java.lang.Boolean.valueOf(isSetLabel()).compareTo(other.isSetLabel());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetLabel()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.label, other.label);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("RowHeader(");
boolean first = true;
sb.append("label:");
if (this.label == null) {
sb.append("null");
} else {
sb.append(this.label);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class RowHeaderStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public RowHeaderStandardScheme getScheme() {
return new RowHeaderStandardScheme();
}
}
private static class RowHeaderStandardScheme extends org.apache.thrift.scheme.StandardScheme<RowHeader> {
public void read(org.apache.thrift.protocol.TProtocol iprot, RowHeader struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // LABEL
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.label = iprot.readString();
struct.setLabelIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, RowHeader struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.label != null) {
oprot.writeFieldBegin(LABEL_FIELD_DESC);
oprot.writeString(struct.label);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class RowHeaderTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public RowHeaderTupleScheme getScheme() {
return new RowHeaderTupleScheme();
}
}
private static class RowHeaderTupleScheme extends org.apache.thrift.scheme.TupleScheme<RowHeader> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, RowHeader struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet optionals = new java.util.BitSet();
if (struct.isSetLabel()) {
optionals.set(0);
}
oprot.writeBitSet(optionals, 1);
if (struct.isSetLabel()) {
oprot.writeString(struct.label);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, RowHeader struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet incoming = iprot.readBitSet(1);
if (incoming.get(0)) {
struct.label = iprot.readString();
struct.setLabelIsSet(true);
}
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
}
}
| apache-2.0 |
sdole/aws-sdk-java | aws-java-sdk-ec2/src/main/java/com/amazonaws/services/ec2/model/transform/DeleteNetworkAclEntryRequestMarshaller.java | 2365 | /*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.ec2.model.transform;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.amazonaws.AmazonClientException;
import com.amazonaws.Request;
import com.amazonaws.DefaultRequest;
import com.amazonaws.internal.ListWithAutoConstructFlag;
import com.amazonaws.services.ec2.model.*;
import com.amazonaws.transform.Marshaller;
import com.amazonaws.util.StringUtils;
/**
* Delete Network Acl Entry Request Marshaller
*/
public class DeleteNetworkAclEntryRequestMarshaller implements Marshaller<Request<DeleteNetworkAclEntryRequest>, DeleteNetworkAclEntryRequest> {
public Request<DeleteNetworkAclEntryRequest> marshall(DeleteNetworkAclEntryRequest deleteNetworkAclEntryRequest) {
if (deleteNetworkAclEntryRequest == null) {
throw new AmazonClientException("Invalid argument passed to marshall(...)");
}
Request<DeleteNetworkAclEntryRequest> request = new DefaultRequest<DeleteNetworkAclEntryRequest>(deleteNetworkAclEntryRequest, "AmazonEC2");
request.addParameter("Action", "DeleteNetworkAclEntry");
request.addParameter("Version", "2015-10-01");
if (deleteNetworkAclEntryRequest.getNetworkAclId() != null) {
request.addParameter("NetworkAclId", StringUtils.fromString(deleteNetworkAclEntryRequest.getNetworkAclId()));
}
if (deleteNetworkAclEntryRequest.getRuleNumber() != null) {
request.addParameter("RuleNumber", StringUtils.fromInteger(deleteNetworkAclEntryRequest.getRuleNumber()));
}
if (deleteNetworkAclEntryRequest.isEgress() != null) {
request.addParameter("Egress", StringUtils.fromBoolean(deleteNetworkAclEntryRequest.isEgress()));
}
return request;
}
}
| apache-2.0 |
yuri0x7c1/ofbiz-explorer | src/test/resources/apache-ofbiz-16.11.03/framework/base/src/main/java/org/apache/ofbiz/base/util/collections/MapStack.java | 4203 | /*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package org.apache.ofbiz.base.util.collections;
import java.util.Locale;
import java.util.Map;
import org.apache.ofbiz.base.util.Debug;
/**
* Map Stack
*
*/
public class MapStack<K> extends MapContext<K, Object> {
public static final String module = MapStack.class.getName();
public static <K> MapStack<K> create() {
MapStack<K> newValue = new MapStack<K>();
// initialize with a single entry
newValue.push();
return newValue;
}
@SuppressWarnings("unchecked")
public static <K> MapStack<K> create(Map<K, Object> baseMap) {
MapStack<K> newValue = new MapStack<K>();
if (baseMap instanceof MapStack) {
newValue.stackList.addAll(((MapStack) baseMap).stackList);
} else {
newValue.stackList.add(0, baseMap);
}
return newValue;
}
/** Does a shallow copy of the internal stack of the passed MapStack; enables simultaneous stacks that share common parent Maps */
public static <K> MapStack<K> create(MapStack<K> source) {
MapStack<K> newValue = new MapStack<K>();
newValue.stackList.addAll(source.stackList);
return newValue;
}
protected MapStack() {
super();
}
/**
* Creates a MapStack object that has the same Map objects on its stack;
* meant to be used to enable a
* situation where a parent and child context are operating simultaneously
* using two different MapStack objects, but sharing the Maps in common
*/
@Override
public MapStack<K> standAloneStack() {
MapStack<K> standAlone = MapStack.create(this);
return standAlone;
}
/**
* Creates a MapStack object that has the same Map objects on its stack,
* but with a new Map pushed on the top; meant to be used to enable a
* situation where a parent and child context are operating simultaneously
* using two different MapStack objects, but sharing the Maps in common
*/
@Override
public MapStack<K> standAloneChildStack() {
MapStack<K> standAloneChild = MapStack.create(this);
standAloneChild.push();
return standAloneChild;
}
/* (non-Javadoc)
* @see java.util.Map#get(java.lang.Object)
*/
@Override
public Object get(Object key) {
if ("context".equals(key)) {
return this;
}
return super.get(key);
}
/* (non-Javadoc)
* @see org.apache.ofbiz.base.util.collections.LocalizedMap#get(java.lang.String, java.util.Locale)
*/
@Override
public Object get(String name, Locale locale) {
if ("context".equals(name)) {
return this;
}
return super.get(name, locale);
}
/* (non-Javadoc)
* @see java.util.Map#put(java.lang.Object, java.lang.Object)
*/
@Override
public Object put(K key, Object value) {
if ("context".equals(key)) {
if (value == null || this != value) {
Debug.logWarning("Putting a value in a MapStack with key [context] that is not this MapStack, will be hidden by the current MapStack self-reference: " + value, module);
}
}
return super.put(key, value);
}
}
| apache-2.0 |
WillJiang/WillJiang | src/apps/mailreader/src/main/java/mailreader2/Login.java | 1397 | /*
* $Id: Login.java 471756 2006-11-06 15:01:43Z husted $
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package mailreader2;
import org.apache.struts.apps.mailreader.dao.User;
import org.apache.struts.apps.mailreader.dao.ExpiredPasswordException;
/**
* <p> Validate a user login. </p>
*/
public final class Login extends MailreaderSupport {
public String execute() throws ExpiredPasswordException {
User user = findUser(getUsername(), getPassword());
if (user != null) {
setUser(user);
}
if (hasErrors()) {
return INPUT;
}
return SUCCESS;
}
}
| apache-2.0 |
jamespatriot/shiro-root | web/src/main/java/org/apache/shiro/web/env/EnvironmentLoader.java | 11381 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.shiro.web.env;
import org.apache.shiro.config.ConfigurationException;
import org.apache.shiro.config.ResourceConfigurable;
import org.apache.shiro.util.ClassUtils;
import org.apache.shiro.util.LifecycleUtils;
import org.apache.shiro.util.StringUtils;
import org.apache.shiro.util.UnknownClassException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.ServletContext;
/**
* An {@code EnvironmentLoader} is responsible for loading a web application's Shiro {@link WebEnvironment}
* (which includes the web app's {@link org.apache.shiro.web.mgt.WebSecurityManager WebSecurityManager}) into the
* {@code ServletContext} at application startup.
* <p/>
* In Shiro 1.1 and earlier, the Shiro ServletFilter was responsible for creating the {@code WebSecurityManager} and
* any additional objects (security filters, etc). However, any component not filtered by the Shiro Filter (such
* as other context listeners) was not able to easily acquire the these objects to perform security operations.
* <p/>
* Due to this, in Shiro 1.2 and later, this {@code EnvironmentLoader} (or more likely, the
* {@link EnvironmentLoaderListener} subclass) is the preferred mechanism to initialize
* a Shiro environment. The Shiro Filter, while still required for request filtering, will not perform this
* initialization at startup if the {@code EnvironmentLoader} (or listener) runs first.
* <h2>Usage</h2>
* This implementation will look for two servlet context {@code context-param}s in {@code web.xml}:
* {@code shiroEnvironmentClass} and {@code shiroConfigLocations} that customize how the {@code WebEnvironment} instance
* will be initialized.
* <h3>shiroEnvironmentClass</h3>
* The {@code shiroEnvironmentClass} {@code context-param}, if it exists, allows you to specify the
* fully-qualified implementation class name of the {@link WebEnvironment} to instantiate. For example:
* <pre>
* <context-param>
* <param-name>shiroEnvironmentClass</param-name>
* <param-value>com.foo.bar.shiro.MyWebEnvironment</param-value>
* </context-param>
* </pre>
* If not specified, the default value is the {@link IniWebEnvironment} class, which assumes Shiro's default
* <a href="http://shiro.apache.org/configuration.html">INI configuration format</a>
* <h3>shiroConfigLocations</h3>
* The {@code shiroConfigLocations} {@code context-param}, if it exists, allows you to specify the config location(s)
* (resource path(s)) that will be relayed to the instantiated {@link WebEnvironment}. For example:
* <pre>
* <context-param>
* <param-name>shiroConfigLocations</param-name>
* <param-value>/WEB-INF/someLocation/shiro.ini</param-value>
* </context-param>
* </pre>
* The {@code WebEnvironment} implementation must implement the {@link ResourceConfigurable} interface if it is to
* acquire the {@code shiroConfigLocations} value.
* <p/>
* If this {@code context-param} is not specified, the {@code WebEnvironment} instance determines default resource
* lookup behavior. For example, the {@link IniWebEnvironment} will check the following two locations for INI config
* by default (in order):
* <ol>
* <li>/WEB-INF/shiro.ini</li>
* <li>classpath:shiro.ini</li>
* </ol>
* <h2>Web Security Enforcement</h2>
* Using this loader will only initialize Shiro's environment in a web application - it will not filter web requests or
* perform web-specific security operations. To do this, you must ensure that you have also configured the
* {@link org.apache.shiro.web.servlet.ShiroFilter ShiroFilter} in {@code web.xml}.
* <p/>
* Finally, it should be noted that this implementation was based on ideas in Spring 3's
* {@code org.springframework.web.context.ContextLoader} implementation - no need to reinvent the wheel for this common
* behavior.
*
* @see EnvironmentLoaderListener
* @see org.apache.shiro.web.servlet.ShiroFilter ShiroFilter
* @since 1.2
*/
public class EnvironmentLoader {
/**
* Servlet Context config param for specifying the {@link WebEnvironment} implementation class to use:
* {@code shiroEnvironmentClass}
*/
public static final String ENVIRONMENT_CLASS_PARAM = "shiroEnvironmentClass";
/**
* Servlet Context config param for the resource path to use for configuring the {@link WebEnvironment} instance:
* {@code shiroConfigLocations}
*/
public static final String CONFIG_LOCATIONS_PARAM = "shiroConfigLocations";
public static final String ENVIRONMENT_ATTRIBUTE_KEY = EnvironmentLoader.class.getName() + ".ENVIRONMENT_ATTRIBUTE_KEY";
private static final Logger log = LoggerFactory.getLogger(EnvironmentLoader.class);
/**
* Initializes Shiro's {@link WebEnvironment} instance for the specified {@code ServletContext} based on the
* {@link #CONFIG_LOCATIONS_PARAM} value.
*
* @param servletContext current servlet context
* @return the new Shiro {@code WebEnvironment} instance.
* @throws IllegalStateException if an existing WebEnvironment has already been initialized and associated with
* the specified {@code ServletContext}.
*/
public WebEnvironment initEnvironment(ServletContext servletContext) throws IllegalStateException {
if (servletContext.getAttribute(ENVIRONMENT_ATTRIBUTE_KEY) != null) {
String msg = "There is already a Shiro environment associated with the current ServletContext. " +
"Check if you have multiple EnvironmentLoader* definitions in your web.xml!";
throw new IllegalStateException(msg);
}
servletContext.log("Initializing Shiro environment");
log.info("Starting Shiro environment initialization.");
long startTime = System.currentTimeMillis();
try {
WebEnvironment environment = createEnvironment(servletContext);
servletContext.setAttribute(ENVIRONMENT_ATTRIBUTE_KEY, environment);
log.debug("Published WebEnvironment as ServletContext attribute with name [{}]",
ENVIRONMENT_ATTRIBUTE_KEY);
if (log.isInfoEnabled()) {
long elapsed = System.currentTimeMillis() - startTime;
log.info("Shiro environment initialized in {} ms.", elapsed);
}
return environment;
} catch (RuntimeException ex) {
log.error("Shiro environment initialization failed", ex);
servletContext.setAttribute(ENVIRONMENT_ATTRIBUTE_KEY, ex);
throw ex;
} catch (Error err) {
log.error("Shiro environment initialization failed", err);
servletContext.setAttribute(ENVIRONMENT_ATTRIBUTE_KEY, err);
throw err;
}
}
/**
* Return the WebEnvironment implementation class to use, either the default
* {@link IniWebEnvironment} or a custom class if specified.
*
* @param servletContext current servlet context
* @return the WebEnvironment implementation class to use
* @see #ENVIRONMENT_CLASS_PARAM
* @see IniWebEnvironment
*/
protected Class<?> determineWebEnvironmentClass(ServletContext servletContext) {
String className = servletContext.getInitParameter(ENVIRONMENT_CLASS_PARAM);
if (className != null) {
try {
return ClassUtils.forName(className);
} catch (UnknownClassException ex) {
throw new ConfigurationException(
"Failed to load custom WebEnvironment class [" + className + "]", ex);
}
} else {
return IniWebEnvironment.class;
}
}
/**
* Instantiates a {@link WebEnvironment} based on the specified ServletContext.
* <p/>
* This implementation {@link #determineWebEnvironmentClass(javax.servlet.ServletContext) determines} a
* {@link WebEnvironment} implementation class to use. That class is instantiated, configured, and returned.
* <p/>
* This allows custom {@code WebEnvironment} implementations to be specified via a ServletContext init-param if
* desired. If not specified, the default {@link IniWebEnvironment} implementation will be used.
*
* @param sc current servlet context
* @return the constructed Shiro WebEnvironment instance
* @see MutableWebEnvironment
* @see ResourceConfigurable
*/
protected WebEnvironment createEnvironment(ServletContext sc) {
Class<?> clazz = determineWebEnvironmentClass(sc);
if (!MutableWebEnvironment.class.isAssignableFrom(clazz)) {
throw new ConfigurationException("Custom WebEnvironment class [" + clazz.getName() +
"] is not of required type [" + WebEnvironment.class.getName() + "]");
}
String configLocations = sc.getInitParameter(CONFIG_LOCATIONS_PARAM);
boolean configSpecified = StringUtils.hasText(configLocations);
if (configSpecified && !(ResourceConfigurable.class.isAssignableFrom(clazz))) {
String msg = "WebEnvironment class [" + clazz.getName() + "] does not implement the " +
ResourceConfigurable.class.getName() + "interface. This is required to accept any " +
"configured " + CONFIG_LOCATIONS_PARAM + "value(s).";
throw new ConfigurationException(msg);
}
MutableWebEnvironment environment = (MutableWebEnvironment) ClassUtils.newInstance(clazz);
environment.setServletContext(sc);
if (configSpecified && (environment instanceof ResourceConfigurable)) {
((ResourceConfigurable) environment).setConfigLocations(configLocations);
}
customizeEnvironment(environment);
LifecycleUtils.init(environment);
return environment;
}
protected void customizeEnvironment(WebEnvironment environment) {
}
/**
* Destroys the {@link WebEnvironment} for the given servlet context.
*
* @param servletContext the ServletContext attributed to the WebSecurityManager
*/
public void destroyEnvironment(ServletContext servletContext) {
servletContext.log("Cleaning up Shiro Environment");
try {
Object environment = servletContext.getAttribute(ENVIRONMENT_ATTRIBUTE_KEY);
LifecycleUtils.destroy(environment);
} finally {
servletContext.removeAttribute(ENVIRONMENT_ATTRIBUTE_KEY);
}
}
}
| apache-2.0 |
goodwinnk/intellij-community | xml/xml-psi-impl/src/com/intellij/psi/impl/source/xml/TagNameReference.java | 6957 | /*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.impl.source.xml;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiReference;
import com.intellij.psi.impl.source.tree.TreeElement;
import com.intellij.psi.meta.PsiMetaData;
import com.intellij.psi.meta.PsiMetaOwner;
import com.intellij.psi.xml.XmlElement;
import com.intellij.psi.xml.XmlTag;
import com.intellij.util.IncorrectOperationException;
import com.intellij.xml.XmlElementDescriptor;
import com.intellij.xml.XmlExtension;
import com.intellij.xml.impl.schema.AnyXmlElementDescriptor;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
public class TagNameReference implements PsiReference {
private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.source.xml.TagNameReference");
protected final boolean myStartTagFlag;
private final ASTNode myNameElement;
public TagNameReference(ASTNode nameElement, boolean startTagFlag) {
myStartTagFlag = startTagFlag;
myNameElement = nameElement;
}
@NotNull
@Override
public PsiElement getElement() {
PsiElement element = myNameElement.getPsi();
final PsiElement parent = element.getParent();
return parent instanceof XmlTag ? parent : element;
}
@Nullable
protected XmlTag getTagElement() {
final PsiElement element = getElement();
if(element == myNameElement.getPsi()) return null;
return (XmlTag)element;
}
@NotNull
@Override
public TextRange getRangeInElement() {
final ASTNode nameElement = getNameElement();
if (nameElement == null){
return TextRange.EMPTY_RANGE;
}
int colon = getPrefixIndex(nameElement.getText()) + 1;
if (myStartTagFlag) {
final int parentOffset = ((TreeElement)nameElement).getStartOffsetInParent();
return new TextRange(parentOffset + colon, parentOffset + nameElement.getTextLength());
}
else {
final PsiElement element = getElement();
if (element == myNameElement) return new TextRange(colon, myNameElement.getTextLength());
final int elementLength = element.getTextLength();
int diffFromEnd = 0;
for(ASTNode node = element.getNode().getLastChildNode(); node != nameElement && node != null; node = node.getTreePrev()) {
diffFromEnd += node.getTextLength();
}
final int nameEnd = elementLength - diffFromEnd;
return new TextRange(nameEnd - nameElement.getTextLength() + colon, nameEnd);
}
}
protected int getPrefixIndex(@NotNull String name) {
return name.indexOf(":");
}
public ASTNode getNameElement() {
return myNameElement;
}
@Override
public PsiElement resolve() {
final XmlTag tag = getTagElement();
final XmlElementDescriptor descriptor = tag != null ? tag.getDescriptor():null;
if (LOG.isDebugEnabled()) {
LOG.debug("Descriptor for tag " +
(tag != null ? tag.getName() : "NULL") +
" is " +
(descriptor != null ? (descriptor.toString() + ": " + descriptor.getClass().getCanonicalName()) : "NULL"));
}
if (descriptor != null){
return descriptor instanceof AnyXmlElementDescriptor ? tag : descriptor.getDeclaration();
}
return null;
}
@Override
@NotNull
public String getCanonicalText() {
return getNameElement().getText();
}
@Override
@Nullable
public PsiElement handleElementRename(@NotNull String newElementName) throws IncorrectOperationException {
final XmlTag element = getTagElement();
if (element == null || !myStartTagFlag) return element;
if (getPrefixIndex(newElementName) == -1) {
final String namespacePrefix = element.getNamespacePrefix();
final int index = newElementName.lastIndexOf('.');
if (index != -1) {
final PsiElement psiElement = resolve();
if (psiElement instanceof PsiFile || (psiElement != null && psiElement.isEquivalentTo(psiElement.getContainingFile()))) {
newElementName = newElementName.substring(0, index);
}
}
newElementName = prependNamespacePrefix(newElementName, namespacePrefix);
}
element.setName(newElementName);
return element;
}
protected String prependNamespacePrefix(String newElementName, String namespacePrefix) {
newElementName = (!namespacePrefix.isEmpty() ? namespacePrefix + ":":namespacePrefix) + newElementName;
return newElementName;
}
@Override
public PsiElement bindToElement(@NotNull PsiElement element) throws IncorrectOperationException {
PsiMetaData metaData = null;
if (element instanceof PsiMetaOwner){
final PsiMetaOwner owner = (PsiMetaOwner)element;
metaData = owner.getMetaData();
if (metaData instanceof XmlElementDescriptor){
return getTagElement().setName(metaData.getName(getElement())); // TODO: need to evaluate new ns prefix
}
} else if (element instanceof PsiFile) {
final XmlTag tagElement = getTagElement();
if (tagElement == null || !myStartTagFlag) return tagElement;
String newElementName = ((PsiFile)element).getName();
final int index = newElementName.lastIndexOf('.');
// TODO: need to evaluate new ns prefix
newElementName = prependNamespacePrefix(newElementName.substring(0, index), tagElement.getNamespacePrefix());
return getTagElement().setName(newElementName);
}
final XmlTag tag = getTagElement();
throw new IncorrectOperationException("Cant bind to not a xml element definition!"+element+","+metaData + "," + tag + "," + (tag != null ? tag.getDescriptor() : "unknown descriptor"));
}
@Override
public boolean isReferenceTo(@NotNull PsiElement element) {
return getElement().getManager().areElementsEquivalent(element, resolve());
}
@Override
public boolean isSoft() {
return false;
}
@Nullable
static TagNameReference createTagNameReference(XmlElement element, @NotNull ASTNode nameElement, boolean startTagFlag) {
final XmlExtension extension = XmlExtension.getExtensionByElement(element);
return extension == null ? null : extension.createTagNameReference(nameElement, startTagFlag);
}
public boolean isStartTagFlag() {
return myStartTagFlag;
}
}
| apache-2.0 |
anomaly/closure-compiler | test/com/google/javascript/jscomp/NewTypeInferenceWithTypeSyntaxTranspilationTest.java | 6823 | /*
* Copyright 2015 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.javascript.jscomp.CompilerOptions.LanguageMode;
import com.google.javascript.jscomp.newtypes.JSTypeCreatorFromJSDoc;
/**
* Tests for the new type inference on transpiled code that includes
* type annotations in the language syntax.
*
* <p>We will eventually type check it natively, without transpiling.
*
* @author dimvar@google.com (Dimitris Vardoulakis)
*/
public final class NewTypeInferenceWithTypeSyntaxTranspilationTest
extends NewTypeInferenceTestBase {
@Override
protected void setUp() throws Exception {
super.setUp();
compilerOptions.setLanguageIn(LanguageMode.ECMASCRIPT6_TYPED);
compilerOptions.setLanguageOut(LanguageMode.ECMASCRIPT3);
}
public void testSimpleAnnotationsNoWarnings() {
typeCheck("var x: number = 123;");
typeCheck("var x: string = 'adsf';");
typeCheck("var x: boolean = true;");
typeCheck("var x: number[] = [1, 2, 3];");
typeCheck("function f(): void { return undefined; }");
typeCheck(LINE_JOINER.join(
"class Foo {}",
"var x: Foo = new Foo;"));
typeCheck("var x: {p: string; q: number};");
typeCheck("type Foo = number; var x: Foo = 3;");
}
public void testSimpleAnnotationsWarnings() {
typeCheck("var x: number[] = ['hello'];", NewTypeInference.MISTYPED_ASSIGN_RHS);
typeCheck("var x: {p: string; q: number}; x = {p: 3, q: 3}",
NewTypeInference.MISTYPED_ASSIGN_RHS);
typeCheck("type Foo = number; var x: Foo = '3';", NewTypeInference.MISTYPED_ASSIGN_RHS);
}
public void testSimpleFunctions() {
typeCheck(LINE_JOINER.join(
"function f(x: number) {}",
"f(123);"));
typeCheck(LINE_JOINER.join(
"function f(x: number) {}",
"f('asdf');"),
NewTypeInference.INVALID_ARGUMENT_TYPE);
typeCheck(LINE_JOINER.join(
"function f(x): string { return x; }",
"f(123);"),
NewTypeInference.INVALID_ARGUMENT_TYPE);
}
public void testSimpleClasses() {
typeCheck(LINE_JOINER.join(
"class Foo {}",
// Nominal types are non-nullable by default
"var x: Foo = null;"),
NewTypeInference.MISTYPED_ASSIGN_RHS);
typeCheck(LINE_JOINER.join(
"class Foo {}",
"class Bar {}",
"var x: Bar = new Foo;"),
NewTypeInference.MISTYPED_ASSIGN_RHS);
}
public void testClassPropertyDeclarations() {
typeCheck(LINE_JOINER.join(
"class Foo {",
" prop: number;",
" constructor() { this.prop = 'asdf'; }",
"}"),
NewTypeInference.MISTYPED_ASSIGN_RHS);
typeCheck(LINE_JOINER.join(
"class Foo {",
" prop: string;",
"}",
"(new Foo).prop - 5;"),
NewTypeInference.INVALID_OPERAND_TYPE);
typeCheck(LINE_JOINER.join(
"class Foo {",
" static prop: number;",
"}",
"Foo.prop = 'asdf';"),
NewTypeInference.MISTYPED_ASSIGN_RHS);
// TODO(dimvar): up to ES5, prop decls use dot.
// Should we start allowing [] for @unrestricted classes?
typeCheck(LINE_JOINER.join(
"/** @unrestricted */ class Foo {",
" ['prop']: string;",
"}",
"(new Foo).prop - 5;"),
NewTypeInference.INEXISTENT_PROPERTY);
}
public void testOptionalParameter() {
typeCheck(LINE_JOINER.join(
"function foo(p1?: string) {}",
"foo(); foo('str');"));
typeCheck(LINE_JOINER.join(
"function foo(p0, p1?: string) {}",
"foo('2', 3)"),
NewTypeInference.INVALID_ARGUMENT_TYPE);
}
public void testRestParameter() {
typeCheck(LINE_JOINER.join(
"function foo(...p1: number[]) {}",
"foo(); foo(3); foo(3, 4);"));
typeCheck(LINE_JOINER.join(
"function foo(...p1: number[]) {}",
"foo('3')"),
NewTypeInference.INVALID_ARGUMENT_TYPE);
typeCheck("function foo(...p1: number[]) { var s:string = p1[0]; }",
NewTypeInference.MISTYPED_ASSIGN_RHS);
typeCheck("function foo(...p1: number[]) { p1 = ['3']; }",
NewTypeInference.MISTYPED_ASSIGN_RHS);
}
public void testClass() {
typeCheck(LINE_JOINER.join(
"class Foo {",
" prop: number;",
"}",
"class Bar extends Foo {",
"}",
"(new Bar).prop = '3'"),
NewTypeInference.MISTYPED_ASSIGN_RHS);
typeCheck(
"class Foo extends Foo {}",
JSTypeCreatorFromJSDoc.INHERITANCE_CYCLE,
NewTypeInference.UNDEFINED_SUPER_CLASS);
}
public void testInterface() {
typeCheck(LINE_JOINER.join(
"interface Foo {}",
"(new Foo);"),
NewTypeInference.NOT_A_CONSTRUCTOR);
typeCheck(LINE_JOINER.join(
"interface Foo {",
" prop: number;",
"}",
"class Bar implements Foo {",
"}"),
GlobalTypeInfo.INTERFACE_METHOD_NOT_IMPLEMENTED);
typeCheck(LINE_JOINER.join(
"interface Foo {",
" prop: number;",
"}",
"class Bar extends Foo {",
"}"),
JSTypeCreatorFromJSDoc.CONFLICTING_EXTENDED_TYPE);
typeCheck("interface Foo extends Foo {}",
JSTypeCreatorFromJSDoc.INHERITANCE_CYCLE);
typeCheck(LINE_JOINER.join(
"interface Foo {",
" prop: number;",
"}",
"interface Bar {",
" prop: string;",
"}",
"interface Baz extends Foo, Bar {}"),
GlobalTypeInfo.SUPER_INTERFACES_HAVE_INCOMPATIBLE_PROPERTIES);
}
public void testAmbientDeclarationsInCode() {
typeCheck("declare var x: number;", Es6TypedToEs6Converter.DECLARE_IN_NON_EXTERNS);
typeCheck("declare function f(): void;", Es6TypedToEs6Converter.DECLARE_IN_NON_EXTERNS);
typeCheck("declare class C { constructor(); }", Es6TypedToEs6Converter.DECLARE_IN_NON_EXTERNS);
typeCheck("declare enum Foo { BAR }", Es6TypedToEs6Converter.DECLARE_IN_NON_EXTERNS);
}
public void testGetterReturnNonDeclaredType() {
// getters cannot be transpiled to EC3
compilerOptions.setLanguageOut(LanguageMode.ECMASCRIPT5);
typeCheck(
"var x = {get a(): number { return 'str'; }}",
NewTypeInference.RETURN_NONDECLARED_TYPE);
}
}
| apache-2.0 |
armstrongli/etcd4j | src/test/java/mousio/etcd4j/responses/EtcdKeysResponseParserTest.java | 16035 | package mousio.etcd4j.responses;
import io.netty.buffer.Unpooled;
import io.netty.handler.codec.http.DefaultHttpHeaders;
import io.netty.handler.codec.http.HttpHeaders;
import org.junit.Before;
import org.junit.Test;
import static mousio.etcd4j.EtcdUtil.convertDate;
import static mousio.etcd4j.responses.EtcdResponseDecoders.*;
import static org.junit.Assert.*;
/**
* Examples are taken out of the api.md of etcd project.
*/
public class EtcdKeysResponseParserTest {
private HttpHeaders headers;
@Before
public void setup() {
this.headers = new DefaultHttpHeaders();
this.headers.add(X_ETCD_CLUSTER_ID, "test");
this.headers.add(X_ETCD_INDEX, 208);
this.headers.add(X_RAFT_INDEX, 5);
this.headers.add(X_RAFT_TERM, 15);
}
@Test
public void testParseSetKey() throws Exception {
EtcdKeysResponse action = EtcdKeysResponse.DECODER.decode(headers, Unpooled.copiedBuffer(("{\n" +
" \"action\": \"set\",\n" +
" \"node\": {\n" +
" \"createdIndex\": 2,\n" +
" \"key\": \"/message\",\n" +
" \"modifiedIndex\": 2,\n" +
" \"value\": \"Hello world\"\n" +
" }\n" +
"}").getBytes()));
assertEquals(EtcdKeyAction.set, action.action);
assertEquals(2, action.node.createdIndex.intValue());
assertEquals("/message", action.node.key);
assertEquals(2, action.node.modifiedIndex.intValue());
assertEquals("Hello world", action.node.value);
assertEquals("test", action.etcdClusterId);
assertEquals(208, action.etcdIndex.longValue());
assertEquals(5, action.raftIndex.longValue());
assertEquals(15, action.raftTerm.longValue());
}
@Test
public void testParseGetKey() throws Exception {
EtcdKeysResponse action = EtcdKeysResponse.DECODER.decode(headers, Unpooled.copiedBuffer(("{\n" +
" \"action\": \"get\",\n" +
" \"node\": {\n" +
" \"createdIndex\": 2,\n" +
" \"key\": \"/message\",\n" +
" \"modifiedIndex\": 2,\n" +
" \"value\": \"Hello world\"\n" +
" }\n" +
"}").getBytes()));
assertEquals(EtcdKeyAction.get, action.action);
assertEquals(2, action.node.createdIndex.intValue());
assertEquals("/message", action.node.key);
assertEquals(2, action.node.modifiedIndex.intValue());
assertEquals("Hello world", action.node.value);
}
@Test
public void testParseChangeKey() throws Exception {
EtcdKeysResponse action = EtcdKeysResponse.DECODER.decode(headers, Unpooled.copiedBuffer(("{\n" +
" \"action\": \"set\",\n" +
" \"node\": {\n" +
" \"createdIndex\": 3,\n" +
" \"key\": \"/message\",\n" +
" \"modifiedIndex\": 3,\n" +
" \"value\": \"Hello etcd\"\n" +
" },\n" +
" \"prevNode\": {\n" +
" \"createdIndex\": 2,\n" +
" \"key\": \"/message\",\n" +
" \"value\": \"Hello world\",\n" +
" \"modifiedIndex\": 2\n" +
" }\n" +
"}").getBytes()));
assertEquals(EtcdKeyAction.set, action.action);
assertEquals(3, action.node.createdIndex.intValue());
assertEquals("/message", action.node.key);
assertEquals(3, action.node.modifiedIndex.intValue());
assertEquals("Hello etcd", action.node.value);
assertEquals(2, action.prevNode.createdIndex.intValue());
assertEquals("/message", action.prevNode.key);
assertEquals(2, action.prevNode.modifiedIndex.intValue());
assertEquals("Hello world", action.prevNode.value);
}
@Test
public void testParseDeleteKey() throws Exception {
EtcdKeysResponse action = EtcdKeysResponse.DECODER.decode(headers, Unpooled.copiedBuffer(("{\n" +
" \"action\": \"delete\",\n" +
" \"node\": {\n" +
" \"createdIndex\": 3,\n" +
" \"key\": \"/message\",\n" +
" \"modifiedIndex\": 4\n" +
" },\n" +
" \"prevNode\": {\n" +
" \"key\": \"/message\",\n" +
" \"value\": \"Hello etcd\",\n" +
" \"modifiedIndex\": 3,\n" +
" \"createdIndex\": 3\n" +
" }\n" +
"}").getBytes()));
assertEquals(EtcdKeyAction.delete, action.action);
assertEquals(3, action.node.createdIndex.intValue());
assertEquals("/message", action.node.key);
assertEquals(4, action.node.modifiedIndex.intValue());
assertEquals(3, action.prevNode.createdIndex.intValue());
assertEquals("/message", action.prevNode.key);
assertEquals(3, action.prevNode.modifiedIndex.intValue());
assertEquals("Hello etcd", action.prevNode.value);
}
@Test
public void testParseSetKeyTtl() throws Exception {
EtcdKeysResponse action = EtcdKeysResponse.DECODER.decode(headers, Unpooled.copiedBuffer(("{\n" +
" \"action\": \"set\",\n" +
" \"node\": {\n" +
" \"createdIndex\": 5,\n" +
" \"expiration\": \"2013-12-04T12:01:21.874888581-08:00\",\n" +
" \"key\": \"/foo\",\n" +
" \"modifiedIndex\": 5,\n" +
" \"ttl\": 5,\n" +
" \"value\": \"bar\"\n" +
" }\n" +
"}").getBytes()));
assertEquals(EtcdKeyAction.set, action.action);
assertEquals(5, action.node.createdIndex.intValue());
assertEquals("/foo", action.node.key);
assertEquals(5, action.node.modifiedIndex.intValue());
assertEquals("bar", action.node.value);
assertEquals(5, action.node.ttl.intValue());
assertEquals(convertDate("2013-12-04T12:01:21.874888581-08:00"), action.node.expiration);
}
@Test
public void testParseTtlExpiredException() throws Exception {
EtcdException e = EtcdException.DECODER.decode(headers, Unpooled.copiedBuffer(("{\n" +
" \"cause\": \"/foo\",\n" +
" \"errorCode\": 100,\n" +
" \"index\": 6,\n" +
" \"message\": \"Key Not Found\"\n" +
"}").getBytes()));
assertEquals(100, e.errorCode);
assertEquals("/foo", e.etcdCause);
assertEquals(6, e.index.intValue());
assertEquals("Key Not Found", e.etcdMessage);
}
@Test
public void testParseUpdateKeyTtl() throws Exception {
EtcdKeysResponse action = EtcdKeysResponse.DECODER.decode(headers, Unpooled.copiedBuffer(("{\n" +
" \"action\": \"update\",\n" +
" \"node\": {\n" +
" \"createdIndex\": 5,\n" +
" \"key\": \"/foo\",\n" +
" \"modifiedIndex\": 6,\n" +
" \"value\": \"bar\"\n" +
" },\n" +
" \"prevNode\": {\n" +
" \"createdIndex\": 5,\n" +
" \"expiration\": \"2013-12-04T12:01:21.874888581-08:00\",\n" +
" \"key\": \"/foo\",\n" +
" \"modifiedIndex\": 5,\n" +
" \"ttl\": 3,\n" +
" \"value\": \"bar\"\n" +
" }\n" +
"}").getBytes()));
assertEquals(EtcdKeyAction.update, action.action);
assertEquals(5, action.node.createdIndex.intValue());
assertEquals("/foo", action.node.key);
assertEquals(6, action.node.modifiedIndex.intValue());
assertEquals("bar", action.node.value);
assertEquals(5, action.prevNode.createdIndex.intValue());
assertEquals("/foo", action.prevNode.key);
assertEquals(5, action.prevNode.modifiedIndex.intValue());
assertEquals("bar", action.prevNode.value);
assertEquals(3, action.prevNode.ttl.intValue());
assertEquals(convertDate("2013-12-04T12:01:21.874888581-08:00"), action.prevNode.expiration);
}
@Test
public void testParseCreateKey() throws Exception {
EtcdKeysResponse action = EtcdKeysResponse.DECODER.decode(headers, Unpooled.copiedBuffer(("{\n" +
" \"action\": \"create\",\n" +
" \"node\": {\n" +
" \"createdIndex\": 6,\n" +
" \"key\": \"/queue/6\",\n" +
" \"modifiedIndex\": 6,\n" +
" \"value\": \"Job1\"\n" +
" }\n" +
"}").getBytes()));
assertEquals(EtcdKeyAction.create, action.action);
assertEquals(6, action.node.createdIndex.intValue());
assertEquals("/queue/6", action.node.key);
assertEquals(6, action.node.modifiedIndex.intValue());
assertEquals("Job1", action.node.value);
}
@Test
public void testParseGetOrderedKeys() throws Exception {
EtcdKeysResponse action = EtcdKeysResponse.DECODER.decode(headers, Unpooled.copiedBuffer(("{\n" +
" \"action\": \"get\",\n" +
" \"node\": {\n" +
" \"createdIndex\": 2,\n" +
" \"dir\": true,\n" +
" \"key\": \"/queue\",\n" +
" \"modifiedIndex\": 2,\n" +
" \"nodes\": [\n" +
" {\n" +
" \"createdIndex\": 2,\n" +
" \"key\": \"/queue/2\",\n" +
" \"modifiedIndex\": 2,\n" +
" \"value\": \"Job1\"\n" +
" },\n" +
" {\n" +
" \"createdIndex\": 3,\n" +
" \"key\": \"/queue/3\",\n" +
" \"modifiedIndex\": 3,\n" +
" \"value\": \"Job2\"\n" +
" }\n" +
" ]\n" +
" }\n" +
"}").getBytes()));
assertEquals(EtcdKeyAction.get, action.action);
assertEquals(2, action.node.createdIndex.intValue());
assertEquals("/queue", action.node.key);
assertEquals(2, action.node.modifiedIndex.intValue());
assertTrue(action.node.dir);
assertEquals(2, action.node.nodes.size());
assertEquals(2, action.node.nodes.get(0).createdIndex.intValue());
assertEquals(2, action.node.nodes.get(0).modifiedIndex.intValue());
assertEquals("Job1", action.node.nodes.get(0).value);
assertEquals("/queue/2", action.node.nodes.get(0).key);
assertEquals(3, action.node.nodes.get(1).createdIndex.intValue());
assertEquals(3, action.node.nodes.get(1).modifiedIndex.intValue());
assertEquals("Job2", action.node.nodes.get(1).value);
assertEquals("/queue/3", action.node.nodes.get(1).key);
}
@Test
public void testParseExpiredDir() throws Exception {
EtcdKeysResponse action = EtcdKeysResponse.DECODER.decode(headers, Unpooled.copiedBuffer(("{\n" +
" \"action\": \"expire\",\n" +
" \"node\": {\n" +
" \"createdIndex\": 8,\n" +
" \"key\": \"/dir\",\n" +
" \"modifiedIndex\": 15\n" +
" },\n" +
" \"prevNode\": {\n" +
" \"createdIndex\": 8,\n" +
" \"key\": \"/dir\",\n" +
" \"dir\":true,\n" +
" \"modifiedIndex\": 17,\n" +
" \"expiration\": \"2013-12-11T10:39:35.689275857-08:00\"\n" +
" }\n" +
"}").getBytes()));
assertEquals(EtcdKeyAction.expire, action.action);
assertEquals(8, action.node.createdIndex.intValue());
assertEquals("/dir", action.node.key);
assertEquals(15, action.node.modifiedIndex.intValue());
assertEquals(8, action.prevNode.createdIndex.intValue());
assertEquals("/dir", action.prevNode.key);
assertEquals(17, action.prevNode.modifiedIndex.intValue());
assertEquals(convertDate("2013-12-11T10:39:35.689275857-08:00"), action.prevNode.expiration);
assertTrue(action.prevNode.dir);
}
@Test
public void testParseCompareAndSwap() throws Exception {
EtcdKeysResponse action = EtcdKeysResponse.DECODER.decode(headers, Unpooled.copiedBuffer(("{\n" +
" \"action\": \"compareAndSwap\",\n" +
" \"node\": {\n" +
" \"createdIndex\": 8,\n" +
" \"key\": \"/foo\",\n" +
" \"modifiedIndex\": 9,\n" +
" \"value\": \"two\"\n" +
" },\n" +
" \"prevNode\": {\n" +
" \"createdIndex\": 8,\n" +
" \"key\": \"/foo\",\n" +
" \"modifiedIndex\": 8,\n" +
" \"value\": \"one\"\n" +
" }\n" +
"}").getBytes()));
assertEquals(EtcdKeyAction.compareAndSwap, action.action);
assertEquals(8, action.node.createdIndex.intValue());
assertEquals("/foo", action.node.key);
assertEquals(9, action.node.modifiedIndex.intValue());
assertEquals("two", action.node.value);
assertEquals(8, action.prevNode.createdIndex.intValue());
assertEquals("/foo", action.prevNode.key);
assertEquals(8, action.prevNode.modifiedIndex.intValue());
assertEquals("one", action.prevNode.value);
}
@Test
public void testParseCompareAndDelete() throws Exception {
EtcdKeysResponse action = EtcdKeysResponse.DECODER.decode(headers, Unpooled.copiedBuffer(("{\n" +
" \"action\": \"compareAndDelete\",\n" +
" \"node\": {\n" +
" \"key\": \"/foo\",\n" +
" \"modifiedIndex\": 9,\n" +
" \"createdIndex\": 8\n" +
" },\n" +
" \"prevNode\": {\n" +
" \"key\": \"/foo\",\n" +
" \"value\": \"one\",\n" +
" \"modifiedIndex\": 8,\n" +
" \"createdIndex\": 8\n" +
" }\n" +
"}").getBytes()));
assertEquals(EtcdKeyAction.compareAndDelete, action.action);
assertEquals(8, action.node.createdIndex.intValue());
assertEquals("/foo", action.node.key);
assertEquals(9, action.node.modifiedIndex.intValue());
assertEquals(8, action.prevNode.createdIndex.intValue());
assertEquals("/foo", action.prevNode.key);
assertEquals(8, action.prevNode.modifiedIndex.intValue());
assertEquals("one", action.prevNode.value);
}
@Test
public void testParseRecursiveGet() throws Exception {
EtcdKeysResponse action = EtcdKeysResponse.DECODER.decode(headers, Unpooled.copiedBuffer(("{\n" +
" \"action\": \"get\",\n" +
" \"node\": {\n" +
" \"dir\": true,\n" +
" \"key\": \"/\",\n" +
" \"nodes\": [\n" +
" {\n" +
" \"createdIndex\": 2,\n" +
" \"dir\": true,\n" +
" \"key\": \"/foo_dir\",\n" +
" \"modifiedIndex\": 2,\n" +
" \"nodes\": [\n" +
" {\n" +
" \"createdIndex\": 2,\n" +
" \"key\": \"/foo_dir/foo\",\n" +
" \"modifiedIndex\": 2,\n" +
" \"value\": \"bar\"\n" +
" }\n" +
" ]\n" +
" }\n" +
" ]\n" +
" }\n" +
"}").getBytes()));
assertEquals(EtcdKeyAction.get, action.action);
assertTrue(action.node.dir);
assertEquals(1, action.node.nodes.size());
assertEquals(2, action.node.nodes.get(0).createdIndex.intValue());
assertEquals("/foo_dir", action.node.nodes.get(0).key);
assertEquals(2, action.node.nodes.get(0).modifiedIndex.intValue());
assertTrue(action.node.nodes.get(0).dir);
assertEquals(2, action.node.nodes.get(0).nodes.get(0).createdIndex.intValue());
assertEquals("/foo_dir/foo", action.node.nodes.get(0).nodes.get(0).key);
assertEquals(2, action.node.nodes.get(0).nodes.get(0).modifiedIndex.intValue());
assertEquals("bar", action.node.nodes.get(0).nodes.get(0).value);
}
@Test
public void testErrorCode() throws Exception {
EtcdException e = EtcdException.DECODER.decode(headers, Unpooled.copiedBuffer((
"{\n" +
" \"errorCode\": 105,\n" +
" \"message\": \"Key already exists\",\n" +
" \"cause\": \"/foo/bar\",\n" +
" \"index\": 1024\n" +
"}").getBytes()));
assertTrue(e.isErrorCode(EtcdErrorCode.NodeExist));
assertNotEquals(e.getErrorCode(), EtcdErrorCode.KeyNotFound);
}
}
| apache-2.0 |
michaelcapizzi/processors | src/main/java/org/maltparserx/core/syntaxgraph/reader/NegraReader.java | 16507 | package org.maltparserx.core.syntaxgraph.reader;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.util.Iterator;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.regex.PatternSyntaxException;
import org.maltparserx.core.exception.MaltChainedException;
import org.maltparserx.core.io.dataformat.ColumnDescription;
import org.maltparserx.core.io.dataformat.DataFormatException;
import org.maltparserx.core.io.dataformat.DataFormatInstance;
import org.maltparserx.core.syntaxgraph.MappablePhraseStructureGraph;
import org.maltparserx.core.syntaxgraph.PhraseStructure;
import org.maltparserx.core.syntaxgraph.TokenStructure;
import org.maltparserx.core.syntaxgraph.edge.Edge;
import org.maltparserx.core.syntaxgraph.node.PhraseStructureNode;
/**
*
*
* @author Johan Hall
*/
public class NegraReader implements SyntaxGraphReader {
private enum NegraTables {
ORIGIN, EDITOR, WORDTAG, MORPHTAG, NODETAG, EDGETAG, SECEDGETAG, SENTENCE, UNDEF
};
private BufferedReader reader;
private DataFormatInstance dataFormatInstance;
private int sentenceCount;
private String optionString;
private int formatVersion;
private NegraTables currentHeaderTable;
private int currentTerminalSize;
private int currentNonTerminalSize;
private SortedMap<Integer,PhraseStructureNode> nonterminals;
private StringBuilder edgelabelSymbol;
private StringBuilder edgelabelTableName;
private int START_ID_OF_NONTERMINALS = 500;
private String fileName = null;
private URL url = null;
private String charsetName;
private int nIterations;
private int cIterations;
private boolean closeStream = true;
public NegraReader() {
currentHeaderTable = NegraTables.UNDEF;
edgelabelSymbol = new StringBuilder();
edgelabelTableName = new StringBuilder();
nonterminals = new TreeMap<Integer,PhraseStructureNode>();
nIterations = 1;
cIterations = 1;
}
private void reopen() throws MaltChainedException {
close();
if (fileName != null) {
open(fileName, charsetName);
} else if (url != null) {
open(url, charsetName);
} else {
throw new DataFormatException("The input stream cannot be reopen. ");
}
}
public void open(String fileName, String charsetName) throws MaltChainedException {
setFileName(fileName);
setCharsetName(charsetName);
try {
open(new FileInputStream(fileName), charsetName);
} catch (FileNotFoundException e) {
throw new DataFormatException("The input file '"+fileName+"' cannot be found. ", e);
}
}
public void open(URL url, String charsetName) throws MaltChainedException {
setUrl(url);
setCharsetName(charsetName);
try {
open(url.openStream(), charsetName);
} catch (IOException e) {
throw new DataFormatException("The URL '"+url.toString()+"' cannot be opened. ", e);
}
}
public void open(InputStream is, String charsetName) throws MaltChainedException {
try {
if (is == System.in) {
closeStream = false;
}
open(new InputStreamReader(is, charsetName));
} catch (UnsupportedEncodingException e) {
throw new DataFormatException("The character encoding set '"+charsetName+"' isn't supported. ", e);
}
}
private void open(InputStreamReader isr) throws MaltChainedException {
setReader(new BufferedReader(isr));
setSentenceCount(0);
}
public void readProlog() throws MaltChainedException {
}
public boolean readSentence(TokenStructure syntaxGraph) throws MaltChainedException {
if (syntaxGraph == null || !(syntaxGraph instanceof PhraseStructure)) {
return false;
}
syntaxGraph.clear();
final PhraseStructure phraseStructure = (PhraseStructure)syntaxGraph;
PhraseStructureNode parent = null;
PhraseStructureNode child = null;
currentHeaderTable = NegraTables.UNDEF;
String line = null;
syntaxGraph.clear();
nonterminals.clear();
try {
while (true) {
line = reader.readLine();
if (line == null) {
if (syntaxGraph.hasTokens()) {
sentenceCount++;
if (syntaxGraph instanceof MappablePhraseStructureGraph) {
((MappablePhraseStructureGraph)syntaxGraph).getMapping().updateDependenyGraph(((MappablePhraseStructureGraph)syntaxGraph), ((PhraseStructure)syntaxGraph).getPhraseStructureRoot());
}
}
if (cIterations < nIterations) {
cIterations++;
reopen();
return true;
}
return false;
} else if (line.startsWith("#EOS")) {
currentTerminalSize = 0;
currentNonTerminalSize = 0;
currentHeaderTable = NegraTables.UNDEF;
if (syntaxGraph instanceof MappablePhraseStructureGraph) {
((MappablePhraseStructureGraph)syntaxGraph).getMapping().updateDependenyGraph(((MappablePhraseStructureGraph)syntaxGraph), ((PhraseStructure)syntaxGraph).getPhraseStructureRoot());
}
return true;
} else if (line.startsWith("#BOS")) {
currentHeaderTable = NegraTables.SENTENCE;
int s = -1, e = -1;
for (int i = 5, n = line.length(); i < n; i++) {
if (Character.isDigit(line.charAt(i)) && s == -1) {
s = i;
}
if (line.charAt(i) == ' ') {
e = i;
break;
}
}
if (s != e && s != -1 && e != -1) {
phraseStructure.setSentenceID(Integer.parseInt(line.substring(s,e)));
}
sentenceCount++;
} else if (currentHeaderTable == NegraTables.SENTENCE) {
if (line.length() >= 2 && line.charAt(0) == '#' && Character.isDigit(line.charAt(1))) { // Non-terminal
Iterator<ColumnDescription> columns = dataFormatInstance.iterator();
ColumnDescription column = null;
currentNonTerminalSize++;
char[] lineChars = line.toCharArray();
int start = 0;
int secedgecounter = 0;
for (int i = 0, n = lineChars.length; i < n; i++) {
if (lineChars[i] == '\t' && start == i) {
start++;
} else if (lineChars[i] == '\t' || i == n - 1) {
if (columns.hasNext()) {
column = columns.next();
}
if (column.getPosition() == 0) {
int index = Integer.parseInt((i == n - 1)?line.substring(start+1):line.substring(start+1, i));
child = nonterminals.get(index);
if (child == null) {
if (index != 0) {
child = ((PhraseStructure)syntaxGraph).addNonTerminalNode(index-START_ID_OF_NONTERMINALS+1);
}
nonterminals.put(index,child);
}
} else if (column.getPosition() == 2 && child != null) {
syntaxGraph.addLabel(child, "CAT", (i == n - 1)?line.substring(start):line.substring(start, i));
} else if (column.getCategory() == ColumnDescription.PHRASE_STRUCTURE_EDGE_LABEL) {
edgelabelSymbol.setLength(0);
edgelabelSymbol.append((i == n - 1)?line.substring(start):line.substring(start, i));
edgelabelTableName.setLength(0);
edgelabelTableName.append(column.getName());
} else if (column.getCategory() == ColumnDescription.PHRASE_STRUCTURE_NODE_LABEL && child != null) {
int index = Integer.parseInt((i == n - 1)?line.substring(start):line.substring(start, i));
parent = nonterminals.get(index);
if (parent == null) {
if (index == 0) {
parent = phraseStructure.getPhraseStructureRoot();
} else {
parent = phraseStructure.addNonTerminalNode(index-START_ID_OF_NONTERMINALS+1);
}
nonterminals.put(index,parent);
}
Edge e = phraseStructure.addPhraseStructureEdge(parent, child);
syntaxGraph.addLabel(e, edgelabelTableName.toString(), edgelabelSymbol.toString());
} else if (column.getCategory() == ColumnDescription.SECONDARY_EDGE_LABEL && child != null) {
if (secedgecounter % 2 == 0) {
edgelabelSymbol.setLength(0);
edgelabelSymbol.append((i == n - 1)?line.substring(start):line.substring(start, i));
secedgecounter++;
} else {
int index = Integer.parseInt((i == n - 1)?line.substring(start):line.substring(start, i));
if (index == 0) {
parent = phraseStructure.getPhraseStructureRoot();
} else if (index < START_ID_OF_NONTERMINALS) {
parent = phraseStructure.getTokenNode(index);
} else {
parent = nonterminals.get(index);
if (parent == null) {
parent = phraseStructure.addNonTerminalNode(index-START_ID_OF_NONTERMINALS+1);
nonterminals.put(index,parent);
}
}
Edge e = phraseStructure.addSecondaryEdge(parent, child);
e.addLabel(column.getSymbolTable(), edgelabelSymbol.toString());
secedgecounter++;
}
}
start = i + 1;
}
}
} else { // Terminal
Iterator<ColumnDescription> columns = dataFormatInstance.iterator();
ColumnDescription column = null;
currentTerminalSize++;
child = syntaxGraph.addTokenNode(currentTerminalSize);
char[] lineChars = line.toCharArray();
int start = 0;
int secedgecounter = 0;
for (int i = 0, n = lineChars.length; i < n; i++) {
if (lineChars[i] == '\t' && start == i) {
start++;
} else if (lineChars[i] == '\t' || i == n - 1) {
if (columns.hasNext()) {
column = columns.next();
}
if (column.getCategory() == ColumnDescription.INPUT && child != null) {
syntaxGraph.addLabel(child, column.getName(), (i == n - 1)?line.substring(start):line.substring(start, i));
} else if (column.getCategory() == ColumnDescription.PHRASE_STRUCTURE_EDGE_LABEL && child != null) { // && column.getName().equals("EDGELABEL")) {
edgelabelSymbol.setLength(0);
edgelabelSymbol.append((i == n - 1)?line.substring(start):line.substring(start, i));
edgelabelTableName.setLength(0);
edgelabelTableName.append(column.getName());
} else if (column.getCategory() == ColumnDescription.PHRASE_STRUCTURE_NODE_LABEL && child != null) {
int index = Integer.parseInt((i == n - 1)?line.substring(start):line.substring(start, i));
parent = nonterminals.get(index);
if (parent == null) {
if (index == 0) {
parent = phraseStructure.getPhraseStructureRoot();
} else {
parent = phraseStructure.addNonTerminalNode(index-START_ID_OF_NONTERMINALS+1);
}
nonterminals.put(index,parent);
}
Edge e = phraseStructure.addPhraseStructureEdge(parent, child);
syntaxGraph.addLabel(e, edgelabelTableName.toString(), edgelabelSymbol.toString());
} else if (column.getCategory() == ColumnDescription.SECONDARY_EDGE_LABEL && child != null) {
if (secedgecounter % 2 == 0) {
edgelabelSymbol.setLength(0);
edgelabelSymbol.append((i == n - 1)?line.substring(start):line.substring(start, i));
secedgecounter++;
} else {
int index = Integer.parseInt((i == n - 1)?line.substring(start):line.substring(start, i));
if (index == 0) {
parent = phraseStructure.getPhraseStructureRoot();
} else if (index < START_ID_OF_NONTERMINALS) {
parent = phraseStructure.getTokenNode(index);
} else {
parent = nonterminals.get(index);
if (parent == null) {
parent = phraseStructure.addNonTerminalNode(index-START_ID_OF_NONTERMINALS+1);
nonterminals.put(index,parent);
}
}
Edge e = phraseStructure.addSecondaryEdge(parent, child);
e.addLabel(column.getSymbolTable(), edgelabelSymbol.toString());
secedgecounter++;
}
}
start = i + 1;
}
}
}
} else if (line.startsWith("%%")) { // comment skip
} else if (line.startsWith("#FORMAT")) {
// int index = line.indexOf(' ');
// if (index > -1) {
// try {
// formatVersion = Integer.parseInt(line.substring(index+1));
// } catch (NumberFormatException e) {
//
// }
// }
} else if (line.startsWith("#BOT")) {
// int index = line.indexOf(' ');
// if (index > -1) {
// if (line.substring(index+1).equals("ORIGIN")) {
// currentHeaderTable = NegraTables.ORIGIN;
// } else if (line.substring(index+1).equals("EDITOR")) {
// currentHeaderTable = NegraTables.EDITOR;
// } else if (line.substring(index+1).equals("WORDTAG")) {
// currentHeaderTable = NegraTables.WORDTAG;
// } else if (line.substring(index+1).equals("MORPHTAG")) {
// currentHeaderTable = NegraTables.MORPHTAG;
// } else if (line.substring(index+1).equals("NODETAG")) {
// currentHeaderTable = NegraTables.NODETAG;
// } else if (line.substring(index+1).equals("EDGETAG")) {
// currentHeaderTable = NegraTables.EDGETAG;
// } else if (line.substring(index+1).equals("SECEDGETAG")) {
// currentHeaderTable = NegraTables.SECEDGETAG;
// } else {
// currentHeaderTable = NegraTables.UNDEF;
// }
// }
} else if (line.startsWith("#EOT")) {
currentHeaderTable = NegraTables.UNDEF;
}
}
} catch (IOException e) {
throw new DataFormatException("Error when reading from the input file. ", e);
}
}
public void readEpilog() throws MaltChainedException {
}
public BufferedReader getReader() {
return reader;
}
public void setReader(BufferedReader reader) {
this.reader = reader;
}
public int getSentenceCount() {
return sentenceCount;
}
public void setSentenceCount(int sentenceCount) {
this.sentenceCount = sentenceCount;
}
public int getFormatVersion() {
return formatVersion;
}
public void setFormatVersion(int formatVersion) {
this.formatVersion = formatVersion;
}
public DataFormatInstance getDataFormatInstance() {
return dataFormatInstance;
}
public void setDataFormatInstance(DataFormatInstance inputDataFormatInstance) {
this.dataFormatInstance = inputDataFormatInstance;
}
public String getOptions() {
return optionString;
}
public void setOptions(String optionString) throws MaltChainedException {
this.optionString = optionString;
String[] argv;
try {
argv = optionString.split("[_\\p{Blank}]");
} catch (PatternSyntaxException e) {
throw new DataFormatException("Could not split the penn writer option '"+optionString+"'. ", e);
}
for (int i=0; i < argv.length-1; i++) {
if(argv[i].charAt(0) != '-') {
throw new DataFormatException("The argument flag should start with the following character '-', not with "+argv[i].charAt(0));
}
if(++i>=argv.length) {
throw new DataFormatException("The last argument does not have any value. ");
}
switch(argv[i-1].charAt(1)) {
case 's':
try {
START_ID_OF_NONTERMINALS = Integer.parseInt(argv[i]);
} catch (NumberFormatException e){
throw new MaltChainedException("The TigerXML Reader option -s must be an integer value. ");
}
break;
default:
throw new DataFormatException("Unknown NegraReader parameter: '"+argv[i-1]+"' with value '"+argv[i]+"'. ");
}
}
}
public String getFileName() {
return fileName;
}
public void setFileName(String fileName) {
this.fileName = fileName;
}
public URL getUrl() {
return url;
}
public void setUrl(URL url) {
this.url = url;
}
public String getCharsetName() {
return charsetName;
}
public void setCharsetName(String charsetName) {
this.charsetName = charsetName;
}
public int getNIterations() {
return nIterations;
}
public void setNIterations(int iterations) {
nIterations = iterations;
}
public int getIterationCounter() {
return cIterations;
}
public void close() throws MaltChainedException {
try {
if (reader != null) {
if (closeStream) {
reader.close();
}
reader = null;
}
} catch (IOException e) {
throw new DataFormatException("Error when closing the input file.", e);
}
}
}
| apache-2.0 |
objectiser/camel | examples/camel-example-transformer-cdi/src/main/java/org/apache/camel/example/transformer/cdi/MyRoutes.java | 1699 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.example.transformer.cdi;
import org.apache.camel.builder.RouteBuilder;
/**
* Configures all our Camel routes, components, endpoints and beans
*/
public class MyRoutes extends RouteBuilder {
@Override
public void configure() {
transformer()
.fromType("xml:MyRequest")
.toType("xml:MyResponse")
.withUri("xslt:transform.xsl");
from("timer:foo?period=5000").id("timer-route")
.log("start -->")
.setBody(constant("<MyRequest>foobar</MyRequest>"))
.log("--> Sending:[${body}]")
.to("direct:a")
.log("--> Received:[${body}]")
.log("<-- end");
from("direct:a").id("xslt-route")
.inputType("xml:MyRequest")
.outputType("xml:MyResponse")
.log("----> Received:[${body}]");
}
}
| apache-2.0 |
jludvice/fabric8 | fabric/fabric-zookeeper-commands/src/main/java/io/fabric8/zookeeper/commands/Create.java | 2701 | /**
* Copyright 2005-2016 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.zookeeper.commands;
import io.fabric8.boot.commands.support.AbstractCommandComponent;
import io.fabric8.commands.support.ZNodeCompleter;
import io.fabric8.zookeeper.curator.CuratorFrameworkLocator;
import org.apache.curator.framework.CuratorFramework;
import org.apache.felix.gogo.commands.Action;
import org.apache.felix.gogo.commands.basic.AbstractCommand;
import org.apache.felix.scr.annotations.Activate;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Deactivate;
import org.apache.felix.scr.annotations.Property;
import org.apache.felix.scr.annotations.Reference;
import org.apache.felix.scr.annotations.Service;
import org.apache.felix.service.command.Function;
@Component(immediate = true)
@Service({Function.class, AbstractCommand.class})
@org.apache.felix.scr.annotations.Properties({
@Property(name = "osgi.command.scope", value = Create.SCOPE_VALUE),
@Property(name = "osgi.command.function", value = Create.FUNCTION_VALUE)
})
public class Create extends AbstractCommandComponent {
public static final String SCOPE_VALUE = "zk";
public static final String FUNCTION_VALUE = "create";
public static final String DESCRIPTION = "Create a znode";
// Completers
@Reference(referenceInterface = ZNodeCompleter.class, bind = "bindZnodeCompleter", unbind = "unbindZnodeCompleter")
private ZNodeCompleter zNodeCompleter; // dummy field
@Activate
void activate() {
activateComponent();
}
@Deactivate
void deactivate() {
deactivateComponent();
}
@Override
public Action createNewAction() {
assertValid();
// this is how we get hold of the curator framework
CuratorFramework curator = CuratorFrameworkLocator.getCuratorFramework();
return new CreateAction(curator);
}
void bindZnodeCompleter(ZNodeCompleter completer) {
bindCompleter(completer);
}
void unbindZnodeCompleter(ZNodeCompleter completer) {
unbindCompleter(completer);
}
}
| apache-2.0 |
gastaldi/wildfly-swarm | fractions/javaee/ejb/src/main/java/org/wildfly/swarm/ejb/detect/EJBPackageDetector.java | 974 | /**
* Copyright 2017 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wildfly.swarm.ejb.detect;
import org.wildfly.swarm.spi.meta.PackageFractionDetector;
/**
* @author Ken Finnigan
*/
public class EJBPackageDetector extends PackageFractionDetector {
public EJBPackageDetector() {
anyPackageOf("javax.ejb");
}
@Override
public String artifactId() {
return "ejb";
}
}
| apache-2.0 |
apache/oodt | resource/src/main/java/org/apache/oodt/cas/resource/mux/BackendManager.java | 2489 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.oodt.cas.resource.mux;
import java.util.List;
import org.apache.oodt.cas.resource.batchmgr.Batchmgr;
import org.apache.oodt.cas.resource.monitor.Monitor;
import org.apache.oodt.cas.resource.scheduler.Scheduler;
import org.apache.oodt.cas.resource.structs.exceptions.QueueManagerException;
/**
* Interface for the backend manager
*
* @author starchmd
*/
public interface BackendManager {
/**
* Add in a backend set to this manager.
* @param queue - queue that maps to the given monitor, batchmgr, and scheduler
* @param monitor - monitor used for this set
* @param batchmgr - batch manager for this set
* @param scheduler - scheduler for this set
*/
void addSet(String queue, Monitor monitor, Batchmgr batchmgr, Scheduler scheduler);
/**
* Return monitor for the given queue.
* @param queue - queue to check
* @return montior
* @throws QueueManagerException when queue does not exist
*/
Monitor getMonitor(String queue) throws QueueManagerException;
/**
* Return batch manager for the given queue.
* @param queue - queue to check
* @return batchmgr
* @throws QueueManagerException when queue does not exist
*/
Batchmgr getBatchmgr(String queue) throws QueueManagerException;
/**
* Return scheduler for the given queue.
* @param queue - queue to check
* @return scheduler
* @throws QueueManagerException when queue does not exist
*/
Scheduler getScheduler(String queue) throws QueueManagerException;
/**
* Return a list of all monitors.
* @return list of all monitors
*/
List<Monitor> getMonitors();
}
| apache-2.0 |
zhenyuy-fb/airlift | http-client/src/main/java/io/airlift/http/client/spnego/SpnegoAuthenticationStore.java | 2775 | package io.airlift.http.client.spnego;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import io.airlift.units.Duration;
import org.eclipse.jetty.client.api.Authentication;
import org.eclipse.jetty.client.api.AuthenticationStore;
import java.net.URI;
import java.util.concurrent.TimeUnit;
import static java.util.Objects.requireNonNull;
public class SpnegoAuthenticationStore
implements AuthenticationStore
{
private static final int CACHE_SIZE = 10000;
private static final Duration CACHE_EXPIRE_TIME = new Duration(5, TimeUnit.MINUTES);
private static final int CONCURRENCY_LEVEL = 16;
private final Cache<URI, Authentication.Result> results;
private final SpnegoAuthentication authentication;
public SpnegoAuthenticationStore(SpnegoAuthentication authentication)
{
requireNonNull(authentication, "authentication is null");
this.authentication = authentication;
results = CacheBuilder.newBuilder()
.concurrencyLevel(CONCURRENCY_LEVEL)
.maximumSize(CACHE_SIZE)
.expireAfterWrite(CACHE_EXPIRE_TIME.roundTo(TimeUnit.MINUTES), TimeUnit.MINUTES).build();
}
@Override
public void addAuthentication(Authentication authentication)
{
throw new UnsupportedOperationException("addAuthentication is not supported");
}
@Override
public void removeAuthentication(Authentication authentication)
{
throw new UnsupportedOperationException("removeAuthentication is not supported");
}
@Override
public void clearAuthentications()
{
throw new UnsupportedOperationException("clearAuthentications is not supported");
}
@Override
public Authentication findAuthentication(String type, URI uri, String realm)
{
if (authentication.matches(type, uri, realm)) {
return authentication;
}
return null;
}
@Override
public void addAuthenticationResult(Authentication.Result result)
{
results.put(UriUtil.normalizedUri(result.getURI()), result);
}
@Override
public void removeAuthenticationResult(Authentication.Result result)
{
results.invalidate(UriUtil.normalizedUri(result.getURI()));
}
@Override
public void clearAuthenticationResults()
{
results.invalidateAll();
}
@Override
public Authentication.Result findAuthenticationResult(URI uri)
{
requireNonNull(uri, "uri is null");
if ("https".equalsIgnoreCase(uri.getScheme())) {
// TODO: match the longest URI based on Trie for fine grained control
return results.getIfPresent(UriUtil.normalizedUri(uri));
}
return null;
}
}
| apache-2.0 |
ifunny/SmoothProgressBar | library-circular/src/main/java/fr.castorflex.android.circularprogressbar/PowerSaveModeDelegate.java | 1396 | package fr.castorflex.android.circularprogressbar;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.os.SystemClock;
import android.support.annotation.NonNull;
import java.util.concurrent.TimeUnit;
/**
* Created by castorflex on 9/12/15.
*/
public class PowerSaveModeDelegate implements PBDelegate {
private static final long REFRESH_RATE = TimeUnit.SECONDS.toMillis(1L);
private final CircularProgressDrawable mParent;
private int mCurrentRotation;
public PowerSaveModeDelegate(@NonNull CircularProgressDrawable parent) {
mParent = parent;
}
@Override
public void draw(Canvas canvas, Paint paint) {
canvas.drawArc(mParent.getDrawableBounds(), mCurrentRotation, 300, false, paint);
}
@Override
public void start() {
mParent.invalidate();
mParent.scheduleSelf(mRunnable, SystemClock.uptimeMillis() + REFRESH_RATE);
}
@Override
public void stop() {
mParent.unscheduleSelf(mRunnable);
}
@Override
public void progressiveStop(CircularProgressDrawable.OnEndListener listener) {
mParent.stop();
}
private final Runnable mRunnable = new Runnable() {
@Override
public void run() {
mCurrentRotation += 50;
mCurrentRotation %= 360;
if (mParent.isRunning())
mParent.scheduleSelf(this, SystemClock.uptimeMillis() + REFRESH_RATE);
mParent.invalidate();
}
};
}
| apache-2.0 |
prabushi/devstudio-tooling-esb | plugins/org.wso2.developerstudio.visualdatamapper.diagram/src/org/wso2/developerstudio/datamapper/diagram/providers/assistants/DataMapperModelingAssistantProviderOfDataMapperRootEditPart.java | 2706 | package org.wso2.developerstudio.datamapper.diagram.providers.assistants;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.core.runtime.IAdaptable;
import org.eclipse.gmf.runtime.emf.type.core.IElementType;
import org.wso2.developerstudio.datamapper.diagram.providers.DataMapperElementTypes;
import org.wso2.developerstudio.datamapper.diagram.providers.DataMapperModelingAssistantProvider;
/**
* @generated
*/
public class DataMapperModelingAssistantProviderOfDataMapperRootEditPart extends DataMapperModelingAssistantProvider {
/**
* @generated
*/
@Override
public List<IElementType> getTypesForPopupBar(IAdaptable host) {
List<IElementType> types = new ArrayList<IElementType>(39);
types.add(DataMapperElementTypes.Input_2002);
types.add(DataMapperElementTypes.Output_2003);
types.add(DataMapperElementTypes.Equal_2005);
types.add(DataMapperElementTypes.Subtract_2013);
types.add(DataMapperElementTypes.Concat_2006);
types.add(DataMapperElementTypes.Add_2012);
types.add(DataMapperElementTypes.Split_2007);
types.add(DataMapperElementTypes.Constant_2008);
types.add(DataMapperElementTypes.LowerCase_2009);
types.add(DataMapperElementTypes.Contains_2010);
types.add(DataMapperElementTypes.UpperCase_2011);
types.add(DataMapperElementTypes.Multiply_2014);
types.add(DataMapperElementTypes.Divide_2015);
types.add(DataMapperElementTypes.Celi_2016);
types.add(DataMapperElementTypes.Floor_2017);
types.add(DataMapperElementTypes.Round_2018);
types.add(DataMapperElementTypes.SetPrecision_2019);
types.add(DataMapperElementTypes.AbsoluteValue_2020);
types.add(DataMapperElementTypes.StringLength_2021);
types.add(DataMapperElementTypes.StartsWith_2022);
types.add(DataMapperElementTypes.EndsWith_2023);
types.add(DataMapperElementTypes.Substring_2024);
types.add(DataMapperElementTypes.IfElse_2025);
types.add(DataMapperElementTypes.AND_2026);
types.add(DataMapperElementTypes.OR_2027);
types.add(DataMapperElementTypes.NOT_2028);
types.add(DataMapperElementTypes.Trim_2029);
types.add(DataMapperElementTypes.Replace_2030);
types.add(DataMapperElementTypes.Match_2031);
types.add(DataMapperElementTypes.Min_2032);
types.add(DataMapperElementTypes.Max_2033);
types.add(DataMapperElementTypes.CustomFunction_2034);
types.add(DataMapperElementTypes.Properties_2035);
types.add(DataMapperElementTypes.Compare_2036);
types.add(DataMapperElementTypes.StringToNumber_2037);
types.add(DataMapperElementTypes.StringToBoolean_2038);
types.add(DataMapperElementTypes.Clone_2039);
types.add(DataMapperElementTypes.ToString_2040);
types.add(DataMapperElementTypes.GlobalVariable_2041);
return types;
}
}
| apache-2.0 |
reactualize/Force.com-Toolkit-for-Android | SampleApp/src/com/sforce/android/sample/SforceOAuthLogin.java | 2252 | package com.sforce.android.sample;
import android.app.Activity;
import android.content.Context;
import android.os.Bundle;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import com.sforce.android.soap.partner.BaseResponseListener;
import com.sforce.android.soap.partner.OAuthConnectorConfig;
import com.sforce.android.soap.partner.OAuthLoginResult;
import com.sforce.android.soap.partner.Salesforce;
import com.sforce.android.soap.partner.fault.ApiFault;
import com.sforce.android.soap.partner.fault.ExceptionCode;
public class SforceOAuthLogin extends Activity implements OnClickListener{
String consumerKey=null;
String callbackUrl=null;
Button loginButton;
Context context;
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setContentView(R.layout.oauth_login_layout);
setTitle("Sforce Toolkit Demo - OAuth Login");
loginButton = (Button)this.findViewById(R.id.loginButton);
loginButton.setOnClickListener(this);
context=getApplicationContext();
consumerKey=(this.getResources().getString(R.string.consumerKey).toString());
callbackUrl=(this.getResources().getString(R.string.callbackUrl).toString());
Salesforce.init(context);
}
public void onClick(View v) {
OAuthConnectorConfig parameters=new OAuthConnectorConfig(consumerKey, callbackUrl);
try {
Salesforce.loginOAuth(this, parameters, new LoginResponseListener());
} catch (Exception e){
e.printStackTrace();
}
}
public class LoginResponseListener extends BaseResponseListener{
@Override
public void onComplete(Object sObjects) {
OAuthLoginResult result = (OAuthLoginResult) sObjects;
String id = result.getUserId();
setResult(RESULT_OK);
finish();
}
@Override
public void onSforceError(ApiFault apiFault){
String msg = apiFault.getExceptionMessage();
String code = apiFault.getExceptionCode().getValue();
if (code.equals(ExceptionCode._ACCESS_DENIED))
{
System.out.println("User didn't grant access");
}
}
@Override
public void onException(Exception e){
}
}
}
| bsd-3-clause |
bhav0904/eclipse-collections | eclipse-collections/src/main/java/org/eclipse/collections/impl/lazy/parallel/set/AbstractParallelUnsortedSetIterable.java | 4263 | /*
* Copyright (c) 2016 Goldman Sachs.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Eclipse Distribution License v. 1.0 which accompany this distribution.
* The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html
* and the Eclipse Distribution License is available at
* http://www.eclipse.org/org/documents/edl-v10.php.
*/
package org.eclipse.collections.impl.lazy.parallel.set;
import org.eclipse.collections.api.ParallelIterable;
import org.eclipse.collections.api.annotation.Beta;
import org.eclipse.collections.api.block.function.Function;
import org.eclipse.collections.api.block.function.Function2;
import org.eclipse.collections.api.block.predicate.Predicate;
import org.eclipse.collections.api.block.predicate.Predicate2;
import org.eclipse.collections.api.multimap.set.MutableSetMultimap;
import org.eclipse.collections.api.multimap.set.UnsortedSetMultimap;
import org.eclipse.collections.api.set.ParallelUnsortedSetIterable;
import org.eclipse.collections.impl.block.factory.Functions;
import org.eclipse.collections.impl.block.factory.Predicates;
import org.eclipse.collections.impl.lazy.parallel.AbstractParallelIterable;
import org.eclipse.collections.impl.multimap.set.SynchronizedPutUnifiedSetMultimap;
@Beta
public abstract class AbstractParallelUnsortedSetIterable<T, B extends UnsortedSetBatch<T>> extends AbstractParallelIterable<T, B> implements ParallelUnsortedSetIterable<T>
{
@Override
protected boolean isOrdered()
{
return false;
}
@Override
public ParallelUnsortedSetIterable<T> asUnique()
{
return this;
}
@Override
public ParallelUnsortedSetIterable<T> select(Predicate<? super T> predicate)
{
return new ParallelSelectUnsortedSetIterable<>(this, predicate);
}
@Override
public <P> ParallelUnsortedSetIterable<T> selectWith(Predicate2<? super T, ? super P> predicate, P parameter)
{
return this.select(Predicates.bind(predicate, parameter));
}
@Override
public <S> ParallelUnsortedSetIterable<S> selectInstancesOf(Class<S> clazz)
{
return (ParallelUnsortedSetIterable<S>) this.select(Predicates.instanceOf(clazz));
}
@Override
public ParallelUnsortedSetIterable<T> reject(Predicate<? super T> predicate)
{
return this.select(Predicates.not(predicate));
}
@Override
public <P> ParallelUnsortedSetIterable<T> rejectWith(Predicate2<? super T, ? super P> predicate, P parameter)
{
return this.reject(Predicates.bind(predicate, parameter));
}
@Override
public <V> ParallelIterable<V> collect(Function<? super T, ? extends V> function)
{
return new ParallelCollectIterable<>(this, function);
}
@Override
public <P, V> ParallelIterable<V> collectWith(Function2<? super T, ? super P, ? extends V> function, P parameter)
{
return this.collect(Functions.bind(function, parameter));
}
@Override
public <V> ParallelIterable<V> collectIf(Predicate<? super T> predicate, Function<? super T, ? extends V> function)
{
return this.select(predicate).collect(function);
}
@Override
public <V> ParallelIterable<V> flatCollect(Function<? super T, ? extends Iterable<V>> function)
{
return new ParallelFlatCollectIterable<>(this, function);
}
@Override
public <V> UnsortedSetMultimap<V, T> groupBy(Function<? super T, ? extends V> function)
{
MutableSetMultimap<V, T> result = SynchronizedPutUnifiedSetMultimap.newMultimap();
this.forEach(each -> {
V key = function.valueOf(each);
result.put(key, each);
});
return result;
}
@Override
public <V> UnsortedSetMultimap<V, T> groupByEach(Function<? super T, ? extends Iterable<V>> function)
{
MutableSetMultimap<V, T> result = SynchronizedPutUnifiedSetMultimap.newMultimap();
this.forEach(each -> {
Iterable<V> keys = function.valueOf(each);
for (V key : keys)
{
result.put(key, each);
}
});
return result;
}
}
| bsd-3-clause |
brunyuriy/quick-fix-scout | org.eclipse.jdt.ui_3.7.1.r371_v20110824-0800/src/org/eclipse/jdt/ui/actions/NavigateActionGroup.java | 4070 | /*******************************************************************************
* Copyright (c) 2000, 2011 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.jdt.ui.actions;
import org.eclipse.jface.action.IAction;
import org.eclipse.jface.action.IMenuManager;
import org.eclipse.jface.viewers.ISelectionProvider;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.ui.IActionBars;
import org.eclipse.ui.IViewPart;
import org.eclipse.ui.IWorkbenchPartSite;
import org.eclipse.ui.actions.ActionContext;
import org.eclipse.ui.actions.ActionGroup;
/**
* Action group that adds the open and show actions to a context menu and
* the action bar's navigate menu. This action group reuses the <code>
* OpenEditorActionGroup</code> and <code>OpenViewActionGroup</code>.
*
* <p>
* This class may be instantiated; it is not intended to be subclassed.
* </p>
*
* @since 2.0
*
* @noextend This class is not intended to be subclassed by clients.
*/
public class NavigateActionGroup extends ActionGroup {
private OpenEditorActionGroup fOpenEditorActionGroup;
private OpenViewActionGroup fOpenViewActionGroup;
/**
* Creates a new <code>NavigateActionGroup</code>. The group requires
* that the selection provided by the part's selection provider is of type <code>
* org.eclipse.jface.viewers.IStructuredSelection</code>.
*
* @param part the view part that owns this action group
*/
public NavigateActionGroup(IViewPart part) {
fOpenEditorActionGroup= new OpenEditorActionGroup(part);
fOpenViewActionGroup= new OpenViewActionGroup(part);
}
/**
* Creates a new <code>NavigateActionGroup</code>. The group requires
* that the selection provided by the given selection provider is of type
* {@link IStructuredSelection}.
*
* @param site the site that will own the action group.
* @param specialSelectionProvider the selection provider used instead of the
* sites selection provider.
*
* @since 3.4
*/
public NavigateActionGroup(IWorkbenchPartSite site, ISelectionProvider specialSelectionProvider) {
fOpenEditorActionGroup= new OpenEditorActionGroup(site, specialSelectionProvider);
fOpenViewActionGroup= new OpenViewActionGroup(site, specialSelectionProvider);
}
/**
* Returns the open action managed by this action group.
*
* @return the open action. Returns <code>null</code> if the group
* doesn't provide any open action
*/
public IAction getOpenAction() {
return fOpenEditorActionGroup.getOpenAction();
}
/* (non-Javadoc)
* Method declared in ActionGroup
*/
@Override
public void dispose() {
super.dispose();
fOpenEditorActionGroup.dispose();
fOpenViewActionGroup.dispose();
}
/* (non-Javadoc)
* Method declared in ActionGroup
*/
@Override
public void fillActionBars(IActionBars actionBars) {
super.fillActionBars(actionBars);
fOpenEditorActionGroup.fillActionBars(actionBars);
fOpenViewActionGroup.fillActionBars(actionBars);
}
/* (non-Javadoc)
* Method declared in ActionGroup
*/
@Override
public void fillContextMenu(IMenuManager menu) {
super.fillContextMenu(menu);
fOpenEditorActionGroup.fillContextMenu(menu);
fOpenViewActionGroup.fillContextMenu(menu);
}
/* (non-Javadoc)
* Method declared in ActionGroup
*/
@Override
public void setContext(ActionContext context) {
super.setContext(context);
fOpenEditorActionGroup.setContext(context);
fOpenViewActionGroup.setContext(context);
}
/* (non-Javadoc)
* Method declared in ActionGroup
*/
@Override
public void updateActionBars() {
super.updateActionBars();
fOpenEditorActionGroup.updateActionBars();
fOpenViewActionGroup.updateActionBars();
}
}
| mit |
bpowers/doppio | classes/special_test/StdIn.java | 482 | package classes.special_test;
import java.io.IOException;
public class StdIn {
public static void main(String[] args) {
for (int i=0; i<3; i++) {
int inChar;
System.out.println("Enter a Character (" + (2-i) + " left):");
try {
inChar = System.in.read();
System.out.print("You entered ");
System.out.println((char)inChar);
}
catch (IOException e){
System.out.println("Error reading from user");
}
}
}
}
| mit |
MarAvFe/Footballer | mysql-connector-java-5.1.37/src/com/mysql/jdbc/LoadBalancedConnection.java | 1420 | /*
Copyright (c) 2013, 2014, Oracle and/or its affiliates. All rights reserved.
The MySQL Connector/J is licensed under the terms of the GPLv2
<http://www.gnu.org/licenses/old-licenses/gpl-2.0.html>, like most MySQL Connectors.
There are special exceptions to the terms and conditions of the GPLv2 as it is applied to
this software, see the FOSS License Exception
<http://www.mysql.com/about/legal/licensing/foss-exception.html>.
This program is free software; you can redistribute it and/or modify it under the terms
of the GNU General Public License as published by the Free Software Foundation; version 2
of the License.
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with this
program; if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth
Floor, Boston, MA 02110-1301 USA
*/
package com.mysql.jdbc;
import java.sql.SQLException;
public interface LoadBalancedConnection extends MySQLConnection {
public boolean addHost(String host) throws SQLException;
public void removeHost(String host) throws SQLException;
public void removeHostWhenNotInUse(String host) throws SQLException;
}
| mit |
brunyuriy/quick-fix-scout | org.eclipse.jdt.ui_3.7.1.r371_v20110824-0800/src/org/eclipse/jdt/internal/ui/preferences/cleanup/CleanUpProfileVersioner.java | 3278 | /*******************************************************************************
* Copyright (c) 2000, 2011 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.jdt.internal.ui.preferences.cleanup;
import java.util.Iterator;
import java.util.Map;
import org.eclipse.jdt.internal.corext.fix.CleanUpConstants;
import org.eclipse.jdt.ui.cleanup.CleanUpOptions;
import org.eclipse.jdt.internal.ui.JavaPlugin;
import org.eclipse.jdt.internal.ui.preferences.formatter.IProfileVersioner;
import org.eclipse.jdt.internal.ui.preferences.formatter.ProfileManager.CustomProfile;
public class CleanUpProfileVersioner implements IProfileVersioner {
public static final String PROFILE_KIND= "CleanUpProfile"; //$NON-NLS-1$
private static final int VERSION_1= 1; // 3.3M2
private static final int VERSION_2= 2; // 3.3M3 Added ORGANIZE_IMPORTS
public static final int CURRENT_VERSION= VERSION_2;
/* (non-Javadoc)
* @see org.eclipse.jdt.internal.ui.preferences.cleanup.IProfileVersioner#getFirstVersion()
*/
public int getFirstVersion() {
return VERSION_1;
}
/* (non-Javadoc)
* @see org.eclipse.jdt.internal.ui.preferences.cleanup.IProfileVersioner#getCurrentVersion()
*/
public int getCurrentVersion() {
return CURRENT_VERSION;
}
/* (non-Javadoc)
* @see org.eclipse.jdt.internal.ui.preferences.cleanup.IProfileVersioner#updateAndComplete(org.eclipse.jdt.internal.ui.preferences.cleanup.ProfileManager.CustomProfile)
*/
public void update(CustomProfile profile) {
final Map<String, String> oldSettings= profile.getSettings();
Map<String, String> newSettings= updateAndComplete(oldSettings, profile.getVersion());
profile.setVersion(CURRENT_VERSION);
profile.setSettings(newSettings);
}
private Map<String, String> updateAndComplete(Map<String, String> oldSettings, int version) {
final Map<String, String> newSettings= JavaPlugin.getDefault().getCleanUpRegistry().getDefaultOptions(CleanUpConstants.DEFAULT_CLEAN_UP_OPTIONS).getMap();
switch (version) {
case VERSION_1:
updateFrom1To2(oldSettings);
//$FALL-THROUGH$
default:
for (final Iterator<String> iter= oldSettings.keySet().iterator(); iter.hasNext();) {
final String key= iter.next();
if (!newSettings.containsKey(key))
continue;
final String value= oldSettings.get(key);
if (value != null) {
newSettings.put(key, value);
}
}
}
return newSettings;
}
/**
* {@inheritDoc}
*/
public String getProfileKind() {
return PROFILE_KIND;
}
private static void updateFrom1To2(Map<String, String> settings) {
CleanUpOptions defaultSettings= JavaPlugin.getDefault().getCleanUpRegistry().getDefaultOptions(CleanUpConstants.DEFAULT_CLEAN_UP_OPTIONS);
settings.put(CleanUpConstants.ORGANIZE_IMPORTS, defaultSettings.getValue(CleanUpConstants.ORGANIZE_IMPORTS));
}
}
| mit |
gcc2ge/ethereumj | ethereumj-core/src/test/java/org/ethereum/core/BloomTest.java | 1992 | package org.ethereum.core;
import org.ethereum.crypto.HashUtil;
import org.junit.Assert;
import org.junit.Test;
import org.spongycastle.util.encoders.Hex;
/**
* @author Roman Mandeleil
* @since 20.11.2014
*/
public class BloomTest {
@Test /// based on http://bit.ly/1MtXxFg
public void test1(){
byte[] address = Hex.decode("095e7baea6a6c7c4c2dfeb977efac326af552d87");
Bloom addressBloom = Bloom.create(HashUtil.sha3(address));
byte[] topic = Hex.decode("0000000000000000000000000000000000000000000000000000000000000000");
Bloom topicBloom = Bloom.create(HashUtil.sha3(topic));
Bloom totalBloom = new Bloom();
totalBloom.or(addressBloom);
totalBloom.or(topicBloom);
Assert.assertEquals(
"00000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000040000000000000000000000000000000000000000000000000000000",
totalBloom.toString()
);
Assert.assertTrue(totalBloom.matches(addressBloom));
Assert.assertTrue(totalBloom.matches(topicBloom));
Assert.assertFalse(totalBloom.matches(Bloom.create(HashUtil.sha3(Hex.decode("1000000000000000000000000000000000000000000000000000000000000000")))));
Assert.assertFalse(totalBloom.matches(Bloom.create(HashUtil.sha3(Hex.decode("195e7baea6a6c7c4c2dfeb977efac326af552d87")))));
}
@Test
public void test2() {
// todo: more testing
}
@Test
public void test3() {
// todo: more testing
}
@Test
public void test4() {
// todo: more testing
}
}
| mit |
Mr-Eskildsen/openhab2-addons | addons/binding/org.openhab.binding.rfxcom.test/src/test/java/org/openhab/binding/rfxcom/internal/messages/RFXComTemperatureRainMessageTest.java | 1633 | /**
* Copyright (c) 2010-2018 by the respective copyright holders.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.openhab.binding.rfxcom.internal.messages;
import static org.junit.Assert.assertEquals;
import static org.openhab.binding.rfxcom.internal.messages.RFXComTemperatureRainMessage.SubType.WS1200;
import org.eclipse.smarthome.core.util.HexUtils;
import org.junit.Test;
import org.openhab.binding.rfxcom.internal.exceptions.RFXComException;
/**
* Test for RFXCom-binding
*
* @author Martin van Wingerden
* @since 1.9.0
*/
public class RFXComTemperatureRainMessageTest {
@Test
public void testSomeMessages() throws RFXComException {
String hexMessage = "0A4F01CCF001004F03B759";
byte[] message = HexUtils.hexToBytes(hexMessage);
RFXComTemperatureRainMessage msg = (RFXComTemperatureRainMessage) RFXComMessageFactory.createMessage(message);
assertEquals("SubType", WS1200, msg.subType);
assertEquals("Seq Number", 204, (short) (msg.seqNbr & 0xFF));
assertEquals("Sensor Id", "61441", msg.getDeviceId());
assertEquals("Temperature", 7.9, msg.temperature, 0.001);
assertEquals("Rain total", 95.1, msg.rainTotal, 0.001);
assertEquals("Signal Level", (byte) 5, msg.signalLevel);
byte[] decoded = msg.decodeMessage();
assertEquals("Message converted back", hexMessage, HexUtils.bytesToHex(decoded));
}
}
| epl-1.0 |
WelcomeHUME/svn-caucho-com-resin | modules/resin/src/com/caucho/jsp/java/JspOutput.java | 5096 | /*
* Copyright (c) 1998-2012 Caucho Technology -- all rights reserved
*
* This file is part of Resin(R) Open Source
*
* Each copy or derived work must preserve the copyright notice and this
* notice unmodified.
*
* Resin Open Source is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* Resin Open Source is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE, or any warranty
* of NON-INFRINGEMENT. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License
* along with Resin Open Source; if not, write to the
*
* Free Software Foundation, Inc.
* 59 Temple Place, Suite 330
* Boston, MA 02111-1307 USA
*
* @author Scott Ferguson
*/
package com.caucho.jsp.java;
import com.caucho.jsp.JspParseException;
import com.caucho.util.L10N;
import com.caucho.vfs.WriteStream;
import com.caucho.xml.QName;
import java.io.IOException;
public class JspOutput extends JspNode {
static final L10N L = new L10N(JspOutput.class);
static final private QName OMIT_XML_DECLARATION =
new QName("omit-xml-declaration");
static final private QName DOCTYPE_SYSTEM =
new QName("doctype-system");
static final private QName DOCTYPE_PUBLIC =
new QName("doctype-public");
static final private QName DOCTYPE_ROOT_ELEMENT =
new QName("doctype-root-element");
private boolean _omitXmlDeclaration;
private String _doctypeSystem;
private String _doctypePublic;
private String _doctypeRootElement;
/**
* Adds an attribute.
*
* @param name the attribute name
* @param value the attribute value
*/
public void addAttribute(QName name, String value)
throws JspParseException
{
if (! _gen.isXml())
throw error(L.l("jsp:output is only allowed in jspx files."));
if (OMIT_XML_DECLARATION.equals(name)) {
_gen.setOmitXmlDeclaration(attributeToBoolean(name.getName(), value));
}
else if (DOCTYPE_SYSTEM.equals(name)) {
String oldValue = _gen.getDoctypeSystem();
if (oldValue != null && ! oldValue.equals(value)) {
throw error(L.l("jsp:output doctype-system '{0}' conflicts with previous value '{1}'. The doctype-system attribute may only be specified once.",
value, oldValue));
}
_gen.setDoctypeSystem(value);
_doctypeSystem = value;
}
else if (DOCTYPE_PUBLIC.equals(name)) {
String oldValue = _gen.getDoctypePublic();
if (oldValue != null && ! oldValue.equals(value)) {
throw error(L.l("jsp:output doctype-public '{0}' conflicts with previous value '{1}'. The doctype-public attribute may only be specified once.",
value, oldValue));
}
_gen.setDoctypePublic(value);
_doctypePublic = value;
}
else if (DOCTYPE_ROOT_ELEMENT.equals(name)) {
String oldValue = _gen.getDoctypeRootElement();
if (oldValue != null && ! oldValue.equals(value)) {
throw error(L.l("jsp:output doctype-root-element '{0}' conflicts with previous value '{1}'. The doctype-root-element attribute may only be specified once.",
value, oldValue));
}
_gen.setDoctypeRootElement(value);
_doctypeRootElement = value;
}
else {
throw error(L.l("'{0}' is an unknown jsp:output attribute. Value attributes are: doctype-public, doctype-system, doctype-root-element.",
name.getName()));
}
}
/**
* When the element completes.
*/
public void endElement()
throws JspParseException
{
if (_doctypeSystem != null && _doctypeRootElement == null) {
throw error(L.l("<jsp:output> with a 'doctype-system' attribute requires a 'doctype-root-element' attribute."));
}
if (_doctypePublic != null && _doctypeSystem == null) {
throw error(L.l("<jsp:output> with a 'doctype-public' attribute requires a 'doctype-system' attribute."));
}
if (_doctypeRootElement != null && _doctypeSystem == null) {
throw error(L.l("<jsp:output> with a 'doctype-root-element' attribute requires a 'doctype-system' attribute."));
}
_gen.setDoctypeSystem(_doctypeSystem);
_gen.setDoctypePublic(_doctypePublic);
_gen.setDoctypeRootElement(_doctypeRootElement);
}
/**
* Return true if the node only has static text.
*/
public boolean isStatic()
{
return true;
}
/**
* Generates the XML text representation for the tag validation.
*
* @param os write stream to the generated XML.
*/
public void printXml(WriteStream os)
throws IOException
{
}
/**
* Generates the code for the tag
*
* @param out the output writer for the generated java.
*/
public void generate(JspJavaWriter out)
throws Exception
{
}
}
| gpl-2.0 |
winiceo/my-oscgit-android | mygitosc/src/main/java/com/bill/mygitosc/utils/StringUtils.java | 244 | package com.bill.mygitosc.utils;
/**
* Created by liaobb on 2015/8/3.
*/
public class StringUtils {
public static boolean ignoreCaseContain(String s1, String s2) {
return s1.toLowerCase().indexOf(s2.toLowerCase()) >= 0;
}
}
| gpl-2.0 |
mdaniel/svn-caucho-com-resin | modules/resin/src/com/caucho/db/sql/DoubleNeqExpr.java | 2255 | /*
* Copyright (c) 1998-2012 Caucho Technology -- all rights reserved
*
* This file is part of Resin(R) Open Source
*
* Each copy or derived work must preserve the copyright notice and this
* notice unmodified.
*
* Resin Open Source is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* Resin Open Source is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE, or any warranty
* of NON-INFRINGEMENT. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License
* along with Resin Open Source; if not, write to the
*
* Free Software Foundation, Inc.
* 59 Temple Place, Suite 330
* Boston, MA 02111-1307 USA
*
* @author Scott Ferguson
*/
package com.caucho.db.sql;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.logging.Logger;
final class DoubleNeqExpr extends AbstractBinaryBooleanExpr {
private final Expr _left;
private final Expr _right;
DoubleNeqExpr(Expr left, Expr right)
{
_left = left;
_right = right;
}
@Override
public final Expr getLeft()
{
return _left;
}
@Override
public final Expr getRight()
{
return _right;
}
@Override
public Expr create(Expr left, Expr right)
{
return new DoubleNeqExpr(left, right);
}
@Override
public final boolean isSelect(final QueryContext context)
throws SQLException
{
if (_left.isNull(context) || _right.isNull(context))
return false;
return (_left.evalDouble(context) != _right.evalDouble(context));
}
@Override
public final int evalBoolean(final QueryContext context)
throws SQLException
{
if (_left.isNull(context) || _right.isNull(context))
return UNKNOWN;
if (_left.evalDouble(context) != _right.evalDouble(context))
return TRUE;
else
return FALSE;
}
@Override
public String toString()
{
return "(" + _left + " != " + _right + ")";
}
}
| gpl-2.0 |
mdaniel/svn-caucho-com-resin | modules/resin/src/com/caucho/rewrite/IfSecure.java | 1979 | /*
* Copyright (c) 1998-2012 Caucho Technology -- all rights reserved
*
* This file is part of Resin(R) Open Source
*
* Each copy or derived work must preserve the copyright notice and this
* notice unmodified.
*
* Resin Open Source is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* Resin Open Source is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE, or any warranty
* of NON-INFRINGEMENT. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License
* along with Resin Open Source; if not, write to the
*
* Free Software Foundation, Inc.
* 59 Temple Place, Suite 330
* Boston, MA 02111-1307 USA
*
* @author Sam
*/
package com.caucho.rewrite;
import javax.servlet.http.HttpServletRequest;
import com.caucho.config.Configurable;
/**
* Match if the request is secure, i.e. if request.isSecure() matches.
*
* <pre>
* <resin:Allow url-pattern="/admin/*">
* xmlns:resin="urn:java:com.caucho.resin">
* <resin:IfSecure/>
* </resin:Allow>
* </pre>
*
* <p>RequestPredicates may be used for security and rewrite actions.
*/
@Configurable
public class IfSecure implements RequestPredicate
{
private boolean _isSecure = true;
/**
* Sets the secure value to test against. If true matches if
* isSecure() is true
*/
@Configurable
public void setValue(boolean value)
{
_isSecure = value;
}
/**
* True if the predicate matches.
*
* @param request the servlet request to test
*/
@Override
public boolean isMatch(HttpServletRequest request)
{
return _isSecure == request.isSecure();
}
}
| gpl-2.0 |
nicodelpiano/prism | src/pta/parser/PTAParser.java | 23388 | /* Generated By:JavaCC: Do not edit this line. PTAParser.java */
package pta.parser;
import java.io.*;
import java.util.*;
import pta.*;
import prism.PrismLangException;
public class PTAParser implements PTAParserConstants {
//-----------------------------------------------------------------------------------
// Main method for testing purposes
//-----------------------------------------------------------------------------------
public static void main(String[] args)
{
PTAParser p = null;
InputStream str = null;
String src = null;
try {
p = new PTAParser();
str = (args.length > 0) ? new FileInputStream(args[0]) : System.in;
src = (args.length > 0) ? "file "+args[0] : "stdin";
System.out.println("Reading from "+src+"...\u005cn");
PTA pta = p.parsePTA(str);
System.out.print(pta);
}
catch (PrismLangException e) {
System.out.println("Error in "+src+": " + e.getMessage()+"."); System.exit(1);
}
catch (FileNotFoundException e) {
System.out.println(e); System.exit(1);
}
}
//-----------------------------------------------------------------------------------
// Methods called by Prism
//-----------------------------------------------------------------------------------
// Constructor
public PTAParser()
{
// Call default constructor
this(System.in);
}
// Parse PTA
public PTA parsePTA(InputStream str) throws PrismLangException
{
astPTA pta = null;
// (Re)start parser
ReInit(str);
// Parse
try {
pta = PTA();
}
catch (ParseException e) {
throw new PrismLangException(e.getMessage());
}
return pta.createDataStructures();
}
//------------------------------------------------------------------------------
// Abstract syntax tree classes
//------------------------------------------------------------------------------
// Classes used to build AST representing PTA.
// Note: locations are indexed by name here, not integer as in the normal PTA class
// (this is the main reason for needing separate AST classes here).
// For clocks, this approach is not needed: we just take the ordering of the clocks
// to be as they appear in the file (unlike locations, which have an explicit ordering
// combined with possible forward references).
static class astPTA
{
// Data
public ArrayList<String> clockNames;
public ArrayList<String> locationNames;
public HashMap<String,LinkedHashSet<Constraint>> invariants;
public HashMap<String,ArrayList<astTransition>> transitions;
// Methods
public astPTA() {
clockNames = new ArrayList<String>();
locationNames = new ArrayList<String>();
invariants = new HashMap<String,LinkedHashSet<Constraint>>();
transitions = new HashMap<String,ArrayList<astTransition>>();
}
public int getOrAddClock(String name) {
int i = clockNames.indexOf(name);
if (i == -1) { clockNames.add(name); return clockNames.size(); }
else return i+1;
}
public void addLocation(String name) {
locationNames.add(name);
invariants.put(name, new LinkedHashSet<Constraint>());
transitions.put(name, new ArrayList<astTransition>());
}
public void addInvariantCondition(String locName, Constraint c) {
invariants.get(locName).add(c);
}
public void setInvariantConditions(String locName, LinkedHashSet<Constraint> cs) {
invariants.put(locName, cs);
}
public astTransition addTransition(String locName) {
astTransition t = new astTransition();
transitions.get(locName).add(t);
return t;
}
public int getLocationIndex(String name) {
return locationNames.indexOf(name);
}
// Conversion to pta classes
public PTA createDataStructures()
{
int i, n;
String name;
PTA pta;
Transition trans;
pta = new PTA();
// Add all clocks
n = clockNames.size();
for (i = 0; i < n; i++)
pta.addClock(clockNames.get(i));
// Add all locations
n = locationNames.size();
for (i = 0; i < n; i++)
pta.addLocation(locationNames.get(i));
// Add invariants/transitions to locations
n = locationNames.size();
for (i = 0; i < n; i++) {
name = locationNames.get(i);
pta.setInvariantConditions(i, invariants.get(name));
ArrayList<astTransition> tt = transitions.get(name);
if (tt == null || tt.isEmpty()) continue;
for (astTransition t : tt) {
if (!(t.edges.isEmpty())) {
trans = pta.addTransition(i, t.action);
t.createDataStructures(this, trans);
}
}
}
return pta;
}
}
static class astTransition
{
// Data
private String action = null;
private ArrayList<Constraint> guard;
public ArrayList<astEdge> edges;
// Methods
public astTransition() { guard = new ArrayList<Constraint>(); edges = new ArrayList<astEdge>(); }
public void setAction(String action) { this.action = action; }
public void addGuardConstraint(Constraint c) { guard.add(c); }
public astEdge addEdge(double prob, String dest) { astEdge e = new astEdge(prob, dest); edges.add(e); return e; }
// Conversion to pta classes
public void createDataStructures(astPTA pta, Transition trans)
{
for (Constraint c : guard)
trans.addGuardConstraint(c);
for (astEdge e : edges)
e.createDataStructures(pta, trans);
}
}
static class astEdge
{
// Data
public double prob;
public String dest;
public HashMap<Integer,Integer> resets;
// Methods
public astEdge(double prob, String dest) { this.prob = prob; this.dest = dest; resets = new HashMap<Integer,Integer>(); }
public void addReset(int clock, int val) { resets.put(clock, val); }
// Conversion to pta classes
public void createDataStructures(astPTA pta, Transition trans)
{
int d = pta.getLocationIndex(dest);
if (d == -1) { System.err.println("Error: Location \u005c""+dest+"\u005c" does not exist"); System.exit(1); }
Edge edge = trans.addEdge(prob, d);
for (Map.Entry<Integer,Integer> e : resets.entrySet()) edge.addReset(e.getKey(), e.getValue());
}
}
//-----------------------------------------------------------------------------------
// Top-level production
//-----------------------------------------------------------------------------------
// PTA
static final public astPTA PTA() throws ParseException {
astPTA pta = new astPTA();
label_1:
while (true) {
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case LBRACE:
;
break;
default:
jj_la1[0] = jj_gen;
break label_1;
}
Location(pta);
}
jj_consume_token(0);
{if (true) return pta;}
throw new Error("Missing return statement in function");
}
static final public void Location(astPTA pta) throws ParseException {
String name;
LinkedHashSet<Constraint> constrs;
jj_consume_token(LBRACE);
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case INIT:
jj_consume_token(INIT);
break;
default:
jj_la1[1] = jj_gen;
;
}
jj_consume_token(NODE);
name = Identifier();
pta.addLocation(name);
jj_consume_token(SEMICOLON);
constrs = ConstraintList(pta);
pta.setInvariantConditions(name, constrs);
label_2:
while (true) {
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case LBRACKET:
;
break;
default:
jj_la1[2] = jj_gen;
break label_2;
}
Transition(pta, name);
}
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case RBRACE:
jj_consume_token(RBRACE);
break;
case TIMES:
jj_consume_token(TIMES);
break;
default:
jj_la1[3] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
}
static final public void Transition(astPTA pta, String locName) throws ParseException {
astTransition tr;
jj_consume_token(LBRACKET);
tr = pta.addTransition(locName);
label_3:
while (true) {
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case TRAN:
case REG_IDENT:
;
break;
default:
jj_la1[4] = jj_gen;
break label_3;
}
Edge(pta, tr);
}
jj_consume_token(RBRACKET);
}
static final public void Edge(astPTA pta, astTransition tr) throws ParseException {
LinkedHashSet<Constraint> constrs;
String action = null, dest;
double p;
astEdge edge;
HashMap<Integer,Integer> resets;
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case REG_IDENT:
action = Identifier();
jj_consume_token(OR);
jj_consume_token(OR);
break;
default:
jj_la1[5] = jj_gen;
;
}
jj_consume_token(TRAN);
dest = Identifier();
jj_consume_token(SEMICOLON);
constrs = ConstraintList(pta);
jj_consume_token(SEMICOLON);
resets = Resets(pta);
jj_consume_token(SEMICOLON);
p = Probability();
tr.setAction(action);
edge = tr.addEdge(p, dest);
for (Map.Entry<Integer,Integer> e : resets.entrySet()) edge.addReset(e.getKey(), e.getValue());
for (Constraint c : constrs) tr.addGuardConstraint(c);
}
static final public LinkedHashSet<Constraint> ConstraintList(astPTA pta) throws ParseException {
LinkedHashSet<Constraint> constrs = new LinkedHashSet<Constraint>();
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case REG_IDENT:
Constraint(pta, constrs);
label_4:
while (true) {
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case COMMA:
;
break;
default:
jj_la1[6] = jj_gen;
break label_4;
}
jj_consume_token(COMMA);
Constraint(pta, constrs);
}
break;
case TRUE:
jj_consume_token(TRUE);
break;
default:
jj_la1[7] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
{if (true) return constrs;}
throw new Error("Missing return statement in function");
}
static final public void Constraint(astPTA pta, LinkedHashSet<Constraint> constrs) throws ParseException {
String clock1Name, clock2Name;
int clock1, clock2, val;
Token t;
clock1Name = Identifier();
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case LT:
t = jj_consume_token(LT);
break;
case LE:
t = jj_consume_token(LE);
break;
case GT:
t = jj_consume_token(GT);
break;
case GE:
t = jj_consume_token(GE);
break;
case EQ:
t = jj_consume_token(EQ);
break;
default:
jj_la1[8] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case REG_INT:
val = Integer();
clock1 = pta.getOrAddClock(clock1Name);
switch (t.kind) {
case PTAParserConstants.LT:
constrs.add(Constraint.buildLt(clock1, val)); break;
case PTAParserConstants.LE:
constrs.add(Constraint.buildLeq(clock1, val)); break;
case PTAParserConstants.GT:
constrs.add(Constraint.buildGt(clock1, val)); break;
case PTAParserConstants.GE:
constrs.add(Constraint.buildGeq(clock1, val)); break;
case PTAParserConstants.EQ:
constrs.add(Constraint.buildLeq(clock1, val));
constrs.add(Constraint.buildGeq(clock1, val)); break;
}
break;
case REG_IDENT:
clock2Name = Identifier();
clock1 = pta.getOrAddClock(clock1Name);
clock2 = pta.getOrAddClock(clock2Name);
switch (t.kind) {
case PTAParserConstants.LT:
constrs.add(Constraint.buildLt(clock1, clock2)); break;
default:
System.err.println("Error: Unsupported constraint type"); System.exit(1);
}
break;
default:
jj_la1[9] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
}
static final public HashMap<Integer,Integer> Resets(astPTA pta) throws ParseException {
HashMap<Integer,Integer> resets = new HashMap<Integer,Integer>();
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case REG_IDENT:
Reset(pta, resets);
label_5:
while (true) {
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case COMMA:
;
break;
default:
jj_la1[10] = jj_gen;
break label_5;
}
jj_consume_token(COMMA);
Reset(pta, resets);
}
break;
case NULL:
jj_consume_token(NULL);
break;
default:
jj_la1[11] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
{if (true) return resets;}
throw new Error("Missing return statement in function");
}
static final public void Reset(astPTA pta, HashMap<Integer,Integer> resets) throws ParseException {
String clockName;
int clock;
int val;
clockName = Identifier();
jj_consume_token(EQ);
val = Integer();
clock = pta.getOrAddClock(clockName);
resets.put(clock, val);
}
static final public double Probability() throws ParseException {
Token t;
double d;
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case REG_DOUBLE:
t = jj_consume_token(REG_DOUBLE);
break;
case REG_INT:
t = jj_consume_token(REG_INT);
break;
default:
jj_la1[12] = jj_gen;
jj_consume_token(-1);
throw new ParseException();
}
{if (true) return Double.parseDouble(t.image);}
throw new Error("Missing return statement in function");
}
//-----------------------------------------------------------------------------------
// Miscellaneous stuff
//-----------------------------------------------------------------------------------
// Identifier (returns String)
static final public String Identifier() throws ParseException {
jj_consume_token(REG_IDENT);
{if (true) return getToken(0).image;}
throw new Error("Missing return statement in function");
}
// Integer
static final public int Integer() throws ParseException {
jj_consume_token(REG_INT);
{if (true) return Integer.parseInt(getToken(0).image);}
throw new Error("Missing return statement in function");
}
static private boolean jj_initialized_once = false;
/** Generated Token Manager. */
static public PTAParserTokenManager token_source;
static SimpleCharStream jj_input_stream;
/** Current token. */
static public Token token;
/** Next token. */
static public Token jj_nt;
static private int jj_ntk;
static private int jj_gen;
static final private int[] jj_la1 = new int[13];
static private int[] jj_la1_0;
static private int[] jj_la1_1;
static {
jj_la1_init_0();
jj_la1_init_1();
}
private static void jj_la1_init_0() {
jj_la1_0 = new int[] {0x200000,0x8,0x80000,0x80400000,0x40,0x0,0x8000,0x80,0x1e800000,0x0,0x8000,0x20,0x0,};
}
private static void jj_la1_init_1() {
jj_la1_1 = new int[] {0x0,0x0,0x0,0x0,0x100,0x100,0x0,0x100,0x0,0x120,0x0,0x100,0x60,};
}
/** Constructor with InputStream. */
public PTAParser(java.io.InputStream stream) {
this(stream, null);
}
/** Constructor with InputStream and supplied encoding */
public PTAParser(java.io.InputStream stream, String encoding) {
if (jj_initialized_once) {
System.out.println("ERROR: Second call to constructor of static parser. ");
System.out.println(" You must either use ReInit() or set the JavaCC option STATIC to false");
System.out.println(" during parser generation.");
throw new Error();
}
jj_initialized_once = true;
try { jj_input_stream = new SimpleCharStream(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); }
token_source = new PTAParserTokenManager(jj_input_stream);
token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 13; i++) jj_la1[i] = -1;
}
/** Reinitialise. */
static public void ReInit(java.io.InputStream stream) {
ReInit(stream, null);
}
/** Reinitialise. */
static public void ReInit(java.io.InputStream stream, String encoding) {
try { jj_input_stream.ReInit(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); }
token_source.ReInit(jj_input_stream);
token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 13; i++) jj_la1[i] = -1;
}
/** Constructor. */
public PTAParser(java.io.Reader stream) {
if (jj_initialized_once) {
System.out.println("ERROR: Second call to constructor of static parser. ");
System.out.println(" You must either use ReInit() or set the JavaCC option STATIC to false");
System.out.println(" during parser generation.");
throw new Error();
}
jj_initialized_once = true;
jj_input_stream = new SimpleCharStream(stream, 1, 1);
token_source = new PTAParserTokenManager(jj_input_stream);
token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 13; i++) jj_la1[i] = -1;
}
/** Reinitialise. */
static public void ReInit(java.io.Reader stream) {
jj_input_stream.ReInit(stream, 1, 1);
token_source.ReInit(jj_input_stream);
token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 13; i++) jj_la1[i] = -1;
}
/** Constructor with generated Token Manager. */
public PTAParser(PTAParserTokenManager tm) {
if (jj_initialized_once) {
System.out.println("ERROR: Second call to constructor of static parser. ");
System.out.println(" You must either use ReInit() or set the JavaCC option STATIC to false");
System.out.println(" during parser generation.");
throw new Error();
}
jj_initialized_once = true;
token_source = tm;
token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 13; i++) jj_la1[i] = -1;
}
/** Reinitialise. */
public void ReInit(PTAParserTokenManager tm) {
token_source = tm;
token = new Token();
jj_ntk = -1;
jj_gen = 0;
for (int i = 0; i < 13; i++) jj_la1[i] = -1;
}
static private Token jj_consume_token(int kind) throws ParseException {
Token oldToken;
if ((oldToken = token).next != null) token = token.next;
else token = token.next = token_source.getNextToken();
jj_ntk = -1;
if (token.kind == kind) {
jj_gen++;
return token;
}
token = oldToken;
jj_kind = kind;
throw generateParseException();
}
/** Get the next Token. */
static final public Token getNextToken() {
if (token.next != null) token = token.next;
else token = token.next = token_source.getNextToken();
jj_ntk = -1;
jj_gen++;
return token;
}
/** Get the specific Token. */
static final public Token getToken(int index) {
Token t = token;
for (int i = 0; i < index; i++) {
if (t.next != null) t = t.next;
else t = t.next = token_source.getNextToken();
}
return t;
}
static private int jj_ntk() {
if ((jj_nt=token.next) == null)
return (jj_ntk = (token.next=token_source.getNextToken()).kind);
else
return (jj_ntk = jj_nt.kind);
}
static private java.util.List<int[]> jj_expentries = new java.util.ArrayList<int[]>();
static private int[] jj_expentry;
static private int jj_kind = -1;
/** Generate ParseException. */
static public ParseException generateParseException() {
jj_expentries.clear();
boolean[] la1tokens = new boolean[42];
if (jj_kind >= 0) {
la1tokens[jj_kind] = true;
jj_kind = -1;
}
for (int i = 0; i < 13; i++) {
if (jj_la1[i] == jj_gen) {
for (int j = 0; j < 32; j++) {
if ((jj_la1_0[i] & (1<<j)) != 0) {
la1tokens[j] = true;
}
if ((jj_la1_1[i] & (1<<j)) != 0) {
la1tokens[32+j] = true;
}
}
}
}
for (int i = 0; i < 42; i++) {
if (la1tokens[i]) {
jj_expentry = new int[1];
jj_expentry[0] = i;
jj_expentries.add(jj_expentry);
}
}
int[][] exptokseq = new int[jj_expentries.size()][];
for (int i = 0; i < jj_expentries.size(); i++) {
exptokseq[i] = jj_expentries.get(i);
}
return new ParseException(token, exptokseq, tokenImage);
}
/** Enable tracing. */
static final public void enable_tracing() {
}
/** Disable tracing. */
static final public void disable_tracing() {
}
}
| gpl-2.0 |
lawrancej/logisim | src/main/java/com/cburch/logisim/tools/AddTool.java | 18224 | /* Copyright (c) 2010, Carl Burch. License information is located in the
* com.cburch.logisim.Main source code and at www.cburch.com/logisim/. */
package com.cburch.logisim.tools;
import java.awt.Color;
import java.awt.Cursor;
import java.awt.Graphics;
import java.awt.event.KeyEvent;
import java.awt.event.MouseEvent;
import javax.swing.Icon;
import javax.swing.JOptionPane;
import com.cburch.logisim.LogisimVersion;
import com.cburch.logisim.circuit.Circuit;
import com.cburch.logisim.circuit.CircuitException;
import com.cburch.logisim.circuit.CircuitMutation;
import com.cburch.logisim.circuit.SubcircuitFactory;
import com.cburch.logisim.comp.Component;
import com.cburch.logisim.comp.ComponentFactory;
import com.cburch.logisim.comp.ComponentDrawContext;
import com.cburch.logisim.data.Attribute;
import com.cburch.logisim.data.AttributeEvent;
import com.cburch.logisim.data.AttributeListener;
import com.cburch.logisim.data.AttributeSet;
import com.cburch.logisim.data.Bounds;
import com.cburch.logisim.data.Direction;
import com.cburch.logisim.data.Location;
import com.cburch.logisim.gui.main.Canvas;
import com.cburch.logisim.gui.main.SelectionActions;
import com.cburch.logisim.gui.main.ToolAttributeAction;
import com.cburch.logisim.prefs.AppPreferences;
import com.cburch.logisim.proj.Action;
import com.cburch.logisim.proj.Dependencies;
import com.cburch.logisim.proj.Project;
import com.cburch.logisim.tools.key.KeyConfigurationEvent;
import com.cburch.logisim.tools.key.KeyConfigurator;
import com.cburch.logisim.tools.key.KeyConfigurationResult;
import static com.cburch.logisim.util.LocaleString.*;
public class AddTool extends Tool {
private static int INVALID_COORD = Integer.MIN_VALUE;
private static int SHOW_NONE = 0;
private static int SHOW_GHOST = 1;
private static int SHOW_ADD = 2;
private static int SHOW_ADD_NO = 3;
private static Cursor cursor
= Cursor.getPredefinedCursor(Cursor.CROSSHAIR_CURSOR);
private class MyAttributeListener implements AttributeListener {
@Override
public void attributeListChanged(AttributeEvent e) {
bounds = null;
}
@Override
public void attributeValueChanged(AttributeEvent e) {
bounds = null;
}
}
private Class<? extends Library> descriptionBase;
private FactoryDescription description;
private boolean sourceLoadAttempted;
private ComponentFactory factory;
private AttributeSet attrs;
private Bounds bounds;
private boolean shouldSnap;
private int lastX = INVALID_COORD;
private int lastY = INVALID_COORD;
private int state = SHOW_GHOST;
private Action lastAddition;
private boolean keyHandlerTried;
private KeyConfigurator keyHandler;
public AddTool(Class<? extends Library> base, FactoryDescription description) {
this.descriptionBase = base;
this.description = description;
this.sourceLoadAttempted = false;
this.shouldSnap = true;
this.attrs = new FactoryAttributes(base, description);
attrs.addAttributeListener(new MyAttributeListener());
this.keyHandlerTried = false;
}
public AddTool(ComponentFactory source) {
this.description = null;
this.sourceLoadAttempted = true;
this.factory = source;
this.bounds = null;
this.attrs = new FactoryAttributes(source);
attrs.addAttributeListener(new MyAttributeListener());
Boolean value = (Boolean) source.getFeature(ComponentFactory.SHOULD_SNAP, attrs);
this.shouldSnap = value == null ? true : value.booleanValue();
}
private AddTool(AddTool base) {
this.descriptionBase = base.descriptionBase;
this.description = base.description;
this.sourceLoadAttempted = base.sourceLoadAttempted;
this.factory = base.factory;
this.bounds = base.bounds;
this.shouldSnap = base.shouldSnap;
this.attrs = (AttributeSet) base.attrs.clone();
attrs.addAttributeListener(new MyAttributeListener());
}
@Override
public boolean equals(Object other) {
if (!(other instanceof AddTool)) {
return false;
}
AddTool o = (AddTool) other;
if (this.description != null) {
return this.descriptionBase == o.descriptionBase
&& this.description.equals(o.description);
} else {
return this.factory.equals(o.factory);
}
}
@Override
public int hashCode() {
FactoryDescription desc = description;
return desc != null ? desc.hashCode() : factory.hashCode();
}
@Override
public boolean sharesSource(Tool other) {
if (!(other instanceof AddTool)) {
return false;
}
AddTool o = (AddTool) other;
if (this.sourceLoadAttempted && o.sourceLoadAttempted) {
return this.factory.equals(o.factory);
}
if (this.description == null) {
return o.description == null;
}
return this.description.equals(o.description);
}
public ComponentFactory getFactory(boolean forceLoad) {
return forceLoad ? getFactory() : factory;
}
public ComponentFactory getFactory() {
ComponentFactory ret = factory;
if (ret != null || sourceLoadAttempted) {
return ret;
}
ret = description.getFactory(descriptionBase);
if (ret != null) {
AttributeSet base = getBaseAttributes();
Boolean value = (Boolean) ret.getFeature(ComponentFactory.SHOULD_SNAP, base);
shouldSnap = value == null ? true : value.booleanValue();
}
factory = ret;
sourceLoadAttempted = true;
return ret;
}
@Override
public String getName() {
FactoryDescription desc = description;
return desc == null ? factory.getName() : desc.getName();
}
@Override
public String getDisplayName() {
FactoryDescription desc = description;
return desc == null ? factory.getDisplayName() : desc.getDisplayName();
}
@Override
public String getDescription() {
String ret;
FactoryDescription desc = description;
if (desc != null) {
ret = desc.getToolTip();
} else {
ComponentFactory source = getFactory();
if (source != null) {
ret = (String) source.getFeature(ComponentFactory.TOOL_TIP,
getAttributeSet());
} else {
ret = null;
}
}
if (ret == null) {
ret = getFromLocale("addToolText", getDisplayName());
}
return ret;
}
@Override
public Tool cloneTool() {
return new AddTool(this);
}
@Override
public AttributeSet getAttributeSet() {
return attrs;
}
@Override
public boolean isAllDefaultValues(AttributeSet attrs, LogisimVersion ver) {
return this.attrs == attrs && attrs instanceof FactoryAttributes
&& !((FactoryAttributes) attrs).isFactoryInstantiated();
}
@Override
public Object getDefaultAttributeValue(Attribute<?> attr, LogisimVersion ver) {
return getFactory().getDefaultAttributeValue(attr, ver);
}
@Override
public void draw(Canvas canvas, ComponentDrawContext context) {
// next "if" suggested roughly by Kevin Walsh of Cornell to take care of
// repaint problems on OpenJDK under Ubuntu
int x = lastX;
int y = lastY;
if (x == INVALID_COORD || y == INVALID_COORD) {
return;
}
ComponentFactory source = getFactory();
if (source == null) {
return;
}
if (state == SHOW_GHOST) {
source.drawGhost(context, Color.GRAY, x, y, getBaseAttributes());
} else if (state == SHOW_ADD) {
source.drawGhost(context, Color.BLACK, x, y, getBaseAttributes());
}
}
private AttributeSet getBaseAttributes() {
AttributeSet ret = attrs;
if (ret instanceof FactoryAttributes) {
ret = ((FactoryAttributes) ret).getBase();
}
return ret;
}
public void cancelOp() { }
@Override
public void select(Canvas canvas) {
setState(canvas, SHOW_GHOST);
bounds = null;
}
@Override
public void deselect(Canvas canvas) {
setState(canvas, SHOW_GHOST);
moveTo(canvas, canvas.getGraphics(), INVALID_COORD, INVALID_COORD);
bounds = null;
lastAddition = null;
}
private synchronized void moveTo(Canvas canvas, Graphics g,
int x, int y) {
if (state != SHOW_NONE) {
expose(canvas, lastX, lastY);
}
lastX = x;
lastY = y;
if (state != SHOW_NONE) {
expose(canvas, lastX, lastY);
}
}
@Override
public void mouseEntered(Canvas canvas, Graphics g,
MouseEvent e) {
if (state == SHOW_GHOST || state == SHOW_NONE) {
setState(canvas, SHOW_GHOST);
canvas.requestFocusInWindow();
} else if (state == SHOW_ADD_NO) {
setState(canvas, SHOW_ADD);
canvas.requestFocusInWindow();
}
}
@Override
public void mouseExited(Canvas canvas, Graphics g,
MouseEvent e) {
if (state == SHOW_GHOST) {
moveTo(canvas, canvas.getGraphics(), INVALID_COORD, INVALID_COORD);
setState(canvas, SHOW_NONE);
} else if (state == SHOW_ADD) {
moveTo(canvas, canvas.getGraphics(), INVALID_COORD, INVALID_COORD);
setState(canvas, SHOW_ADD_NO);
}
}
@Override
public void mouseMoved(Canvas canvas, Graphics g, MouseEvent e) {
if (state != SHOW_NONE) {
if (shouldSnap) {
Canvas.snapToGrid(e);
}
moveTo(canvas, g, e.getX(), e.getY());
}
}
@Override
public void mousePressed(Canvas canvas, Graphics g, MouseEvent e) {
// verify the addition would be valid
Circuit circ = canvas.getCircuit();
if (!canvas.getProject().getLogisimFile().contains(circ)) {
canvas.setErrorMessage(getFromLocale("cannotModifyError"), 0, 0);
return;
}
if (factory instanceof SubcircuitFactory) {
SubcircuitFactory circFact = (SubcircuitFactory) factory;
Dependencies depends = canvas.getProject().getDependencies();
if (!depends.canAdd(circ, circFact.getSubcircuit())) {
canvas.setErrorMessage(getFromLocale("circularError"), 0, 0);
return;
}
}
if (shouldSnap) {
Canvas.snapToGrid(e);
}
moveTo(canvas, g, e.getX(), e.getY());
setState(canvas, SHOW_ADD);
}
@Override
public void mouseDragged(Canvas canvas, Graphics g, MouseEvent e) {
if (state != SHOW_NONE) {
if (shouldSnap) {
Canvas.snapToGrid(e);
}
moveTo(canvas, g, e.getX(), e.getY());
}
}
@Override
public void mouseReleased(Canvas canvas, Graphics g,
MouseEvent e) {
Component added = null;
if (state == SHOW_ADD) {
Circuit circ = canvas.getCircuit();
if (!canvas.getProject().getLogisimFile().contains(circ)) {
return;
}
if (shouldSnap) {
Canvas.snapToGrid(e);
}
moveTo(canvas, g, e.getX(), e.getY());
Location loc = Location.create(e.getX(), e.getY());
AttributeSet attrsCopy = (AttributeSet) attrs.clone();
ComponentFactory source = getFactory();
if (source == null) {
return;
}
Component c = source.createComponent(loc, attrsCopy);
if (circ.hasConflict(c)) {
canvas.setErrorMessage(getFromLocale("exclusiveError"), 0, 0);
return;
}
Bounds bds = c.getBounds(g);
if (bds.getX() < 0 || bds.getY() < 0) {
canvas.setErrorMessage(getFromLocale("negativeCoordError"), 0, 0);
return;
}
try {
CircuitMutation mutation = new CircuitMutation(circ);
mutation.add(c);
Action action = mutation.toAction(getFromLocale("addComponentAction", factory.getDisplayGetter()));
canvas.getProject().doAction(action);
lastAddition = action;
added = c;
} catch (CircuitException ex) {
JOptionPane.showMessageDialog(canvas.getProject().getFrame(),
ex.getMessage());
}
setState(canvas, SHOW_GHOST);
} else if (state == SHOW_ADD_NO) {
setState(canvas, SHOW_NONE);
}
Project proj = canvas.getProject();
Tool next = determineNext(proj);
if (next != null) {
proj.setTool(next);
Action act = SelectionActions.dropAll(canvas.getSelection());
if (act != null) {
proj.doAction(act);
}
if (added != null) {
canvas.getSelection().add(added);
}
}
}
private Tool determineNext(Project proj) {
String afterAdd = AppPreferences.ADD_AFTER.get();
if (afterAdd.equals(AppPreferences.ADD_AFTER_UNCHANGED)) {
return null;
// switch to Edit Tool
} else {
Library base = proj.getLogisimFile().getLibrary("Base");
if (base == null) {
return null;
} else {
return base.getTool("Edit Tool");
}
}
}
@Override
public void keyPressed(Canvas canvas, KeyEvent event) {
processKeyEvent(canvas, event, KeyConfigurationEvent.KEY_PRESSED);
if (!event.isConsumed() && event.getModifiersEx() == 0) {
switch (event.getKeyCode()) {
case KeyEvent.VK_UP: setFacing(canvas, Direction.NORTH); break;
case KeyEvent.VK_DOWN: setFacing(canvas, Direction.SOUTH); break;
case KeyEvent.VK_LEFT: setFacing(canvas, Direction.WEST); break;
case KeyEvent.VK_RIGHT: setFacing(canvas, Direction.EAST); break;
case KeyEvent.VK_BACK_SPACE:
if (lastAddition != null && canvas.getProject().getLastAction() == lastAddition) {
canvas.getProject().undoAction();
lastAddition = null;
}
}
}
}
@Override
public void keyReleased(Canvas canvas, KeyEvent event) {
processKeyEvent(canvas, event, KeyConfigurationEvent.KEY_RELEASED);
}
@Override
public void keyTyped(Canvas canvas, KeyEvent event) {
processKeyEvent(canvas, event, KeyConfigurationEvent.KEY_TYPED);
}
private void processKeyEvent(Canvas canvas, KeyEvent event, int type) {
KeyConfigurator handler = keyHandler;
if (!keyHandlerTried) {
ComponentFactory source = getFactory();
AttributeSet baseAttrs = getBaseAttributes();
handler = (KeyConfigurator) source.getFeature(KeyConfigurator.class, baseAttrs);
keyHandler = handler;
keyHandlerTried = true;
}
if (handler != null) {
AttributeSet baseAttrs = getBaseAttributes();
KeyConfigurationEvent e = new KeyConfigurationEvent(type, baseAttrs, event, this);
KeyConfigurationResult r = handler.keyEventReceived(e);
if (r != null) {
Action act = ToolAttributeAction.create(r);
canvas.getProject().doAction(act);
}
}
}
private void setFacing(Canvas canvas, Direction facing) {
ComponentFactory source = getFactory();
if (source == null) {
return;
}
AttributeSet base = getBaseAttributes();
Object feature = source.getFeature(ComponentFactory.FACING_ATTRIBUTE_KEY, base);
@SuppressWarnings("unchecked")
Attribute<Direction> attr = (Attribute<Direction>) feature;
if (attr != null) {
Action act = ToolAttributeAction.create(this, attr, facing);
canvas.getProject().doAction(act);
}
}
@Override
public void paintIcon(ComponentDrawContext c, int x, int y) {
FactoryDescription desc = description;
if (desc != null && !desc.isFactoryLoaded()) {
Icon icon = desc.getIcon();
if (icon != null) {
icon.paintIcon(c.getDestination(), c.getGraphics(), x + 2, y + 2);
return;
}
}
ComponentFactory source = getFactory();
if (source != null) {
AttributeSet base = getBaseAttributes();
source.paintIcon(c, x, y, base);
}
}
private void expose(java.awt.Component c, int x, int y) {
Bounds bds = getBounds();
c.repaint(x + bds.getX(), y + bds.getY(),
bds.getWidth(), bds.getHeight());
}
@Override
public Cursor getCursor() { return cursor; }
private void setState(Canvas canvas, int value) {
if (value == SHOW_GHOST) {
if (canvas.getProject().getLogisimFile().contains(canvas.getCircuit())
&& AppPreferences.ADD_SHOW_GHOSTS.getBoolean()) {
state = SHOW_GHOST;
} else {
state = SHOW_NONE;
}
} else {
state = value;
}
}
private Bounds getBounds() {
Bounds ret = bounds;
if (ret == null) {
ComponentFactory source = getFactory();
if (source == null) {
ret = Bounds.EMPTY_BOUNDS;
} else {
AttributeSet base = getBaseAttributes();
ret = source.getOffsetBounds(base).expand(5);
}
bounds = ret;
}
return ret;
}
}
| gpl-3.0 |
testIT-LivingDoc/livingdoc-core | livingdoc-client/src/main/java/info/novatec/testit/livingdoc/server/rest/requests/GetSpecificationRepositoriesOfAssociatedProjectRequest.java | 446 | package info.novatec.testit.livingdoc.server.rest.requests;
import info.novatec.testit.livingdoc.server.domain.Repository;
public class GetSpecificationRepositoriesOfAssociatedProjectRequest {
public Repository repository;
public GetSpecificationRepositoriesOfAssociatedProjectRequest() {
}
public GetSpecificationRepositoriesOfAssociatedProjectRequest(Repository repository) {
this.repository = repository;
}
}
| gpl-3.0 |
mukadder/kc | coeus-impl/src/main/java/org/kuali/kra/institutionalproposal/dao/ojb/AllFundingProposalQueryCustomizer.java | 2282 | /*
* Kuali Coeus, a comprehensive research administration system for higher education.
*
* Copyright 2005-2016 Kuali, Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.kuali.kra.institutionalproposal.dao.ojb;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.ojb.broker.PersistenceBroker;
import org.apache.ojb.broker.accesslayer.QueryCustomizerDefaultImpl;
import org.apache.ojb.broker.metadata.CollectionDescriptor;
import org.apache.ojb.broker.query.Criteria;
import org.apache.ojb.broker.query.Query;
import org.apache.ojb.broker.query.QueryByCriteria;
import org.kuali.coeus.common.framework.version.VersionStatus;
import org.kuali.kra.institutionalproposal.home.InstitutionalProposal;
public class AllFundingProposalQueryCustomizer extends QueryCustomizerDefaultImpl {
private static final String ACTIVE = "active";
private static final String PROPOSAL_SEQUENCE_STATUS = "proposal.proposalSequenceStatus";
private static final String PROPOSAL_NUMBER = "proposal.proposalNumber";
@Override
public Query customizeQuery(Object anObject,
PersistenceBroker aBroker,
CollectionDescriptor aCod, QueryByCriteria aQuery){
final Criteria crit = new Criteria();
crit.addEqualTo(PROPOSAL_NUMBER, ((InstitutionalProposal) anObject).getProposalNumber());
crit.addIn(PROPOSAL_SEQUENCE_STATUS, Stream.of(VersionStatus.ACTIVE.toString(), VersionStatus.PENDING.toString(), VersionStatus.ARCHIVED.toString()).collect(Collectors.toList()));
crit.addEqualTo(ACTIVE, Boolean.TRUE);
aQuery.setCriteria(crit);
return aQuery;
}
}
| agpl-3.0 |
sanjupolus/KC6.oLatest | coeus-impl/src/main/java/org/kuali/coeus/common/framework/impl/LineItemObject.java | 2593 | /*
* Kuali Coeus, a comprehensive research administration system for higher education.
*
* Copyright 2005-2015 Kuali, Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.kuali.coeus.common.framework.impl;
import org.kuali.coeus.sys.api.model.ScaleTwoDecimal;
import java.io.Serializable;
/**
* A line item object to be used by LineItemTable which represents an amount. The line item objects are matched
* by the table for row by lineItemId, so the lineItemId MUST match across Periods to create a row.
*
* @author Kuali Coeus
*/
public class LineItemObject extends LineItem implements Serializable {
private static final long serialVersionUID = 2763265895600649723L;
private String lineItemId;
private String name;
private ScaleTwoDecimal amount;
public LineItemObject(String lineItemId, String name, ScaleTwoDecimal amount) {
this.lineItemId = lineItemId;
this.name = name;
this.amount = amount;
}
/**
* The id of the lineItem, should be unique per a period but match for line items across multiple periods
*
* @return the line item id
*/
public String getLineItemId() {
return lineItemId;
}
/**
* @see #getLineItemId()
*/
public void setLineItemId(String lineItemId) {
this.lineItemId = lineItemId;
}
/**
* The name of the line item that will be displayed
*
* @return the name that will be displayed for this line item
*/
public String getName() {
return name;
}
/**
* @see #getName()
*/
public void setName(String name) {
this.name = name;
}
/**
* The amount in ScaleTwoDecimal format
*
* @return the amount
*/
public ScaleTwoDecimal getAmount() {
return amount;
}
/**
* @see #getAmount()
*/
public void setAmount(ScaleTwoDecimal amount) {
this.amount = amount;
}
}
| agpl-3.0 |
deerwalk/voltdb | third_party/java/src/com/google_voltpatches/common/collect/AbstractSortedKeySortedSetMultimap.java | 1658 | /*
* Copyright (C) 2012 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google_voltpatches.common.collect;
import com.google_voltpatches.common.annotations.GwtCompatible;
import java.util.Collection;
import java.util.SortedMap;
import java.util.SortedSet;
/**
* Basic implementation of a {@link SortedSetMultimap} with a sorted key set.
*
* <p>This superclass allows {@code TreeMultimap} to override methods to return
* navigable set and map types in non-GWT only, while GWT code will inherit the
* SortedMap/SortedSet overrides.
*
* @author Louis Wasserman
*/
@GwtCompatible
abstract class AbstractSortedKeySortedSetMultimap<K, V> extends AbstractSortedSetMultimap<K, V> {
AbstractSortedKeySortedSetMultimap(SortedMap<K, Collection<V>> map) {
super(map);
}
@Override
public SortedMap<K, Collection<V>> asMap() {
return (SortedMap<K, Collection<V>>) super.asMap();
}
@Override
SortedMap<K, Collection<V>> backingMap() {
return (SortedMap<K, Collection<V>>) super.backingMap();
}
@Override
public SortedSet<K> keySet() {
return (SortedSet<K>) super.keySet();
}
}
| agpl-3.0 |
sanjupolus/KC6.oLatest | coeus-impl/src/main/java/org/kuali/coeus/common/budget/framework/personnel/BudgetPersonSalaryDetails.java | 3833 | /*
* Kuali Coeus, a comprehensive research administration system for higher education.
*
* Copyright 2005-2015 Kuali, Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.kuali.coeus.common.budget.framework.personnel;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.JoinColumns;
import javax.persistence.ManyToOne;
import javax.persistence.Table;
import org.kuali.coeus.sys.framework.model.KcPersistableBusinessObjectBase;
import org.kuali.coeus.sys.api.model.ScaleTwoDecimal;
import org.kuali.rice.krad.data.jpa.PortableSequenceGenerator;
@Entity
@Table(name = "BUDGET_PERSON_SALARY_DETAILS")
public class BudgetPersonSalaryDetails extends KcPersistableBusinessObjectBase {
@PortableSequenceGenerator(name = "SEQ_BUDGET_PER_SAL_DET_ID")
@GeneratedValue(generator = "SEQ_BUDGET_PER_SAL_DET_ID")
@Id
@Column(name = "BUDGET_PERSON_SALARY_DETAIL_ID")
private Long budgetPersonSalaryDetailId;
@Column(name = "PERSON_SEQUENCE_NUMBER", insertable = false, updatable = false)
private Integer personSequenceNumber;
@Column(name = "BUDGET_ID", insertable = false, updatable = false)
private Long budgetId;
@Column(name = "BUDGET_PERIOD")
private Integer budgetPeriod;
@Column(name = "PERSON_ID")
private String personId;
@Column(name = "BASE_SALARY")
private ScaleTwoDecimal baseSalary = ScaleTwoDecimal.ZERO;
@ManyToOne(cascade = { CascadeType.REFRESH })
@JoinColumns({ @JoinColumn(name = "BUDGET_ID", referencedColumnName = "BUDGET_ID"),
@JoinColumn(name = "PERSON_SEQUENCE_NUMBER", referencedColumnName = "PERSON_SEQUENCE_NUMBER") })
private BudgetPerson budgetPerson;
public Long getBudgetPersonSalaryDetailId() {
return budgetPersonSalaryDetailId;
}
public void setBudgetPersonSalaryDetailId(Long budgetPersonSalaryDetailId) {
this.budgetPersonSalaryDetailId = budgetPersonSalaryDetailId;
}
public Integer getPersonSequenceNumber() {
return personSequenceNumber;
}
public void setPersonSequenceNumber(Integer personSequenceNumber) {
this.personSequenceNumber = personSequenceNumber;
}
public Long getBudgetId() {
return budgetId;
}
public void setBudgetId(Long budgetId) {
this.budgetId = budgetId;
}
public Integer getBudgetPeriod() {
return budgetPeriod;
}
public void setBudgetPeriod(Integer budgetPeriod) {
this.budgetPeriod = budgetPeriod;
}
public ScaleTwoDecimal getBaseSalary() {
return baseSalary;
}
public void setBaseSalary(ScaleTwoDecimal baseSalary) {
this.baseSalary = baseSalary;
}
public void setPersonId(String personId) {
this.personId = personId;
}
public String getPersonId() {
return personId;
}
public BudgetPerson getBudgetPerson() {
return budgetPerson;
}
public void setBudgetPerson(BudgetPerson budgetPerson) {
this.budgetPerson = budgetPerson;
}
}
| agpl-3.0 |
kumarrus/voltdb | tests/testprocs/org/voltdb_testprocs/regressionsuites/catchexceptions/SPBigBatchOnPartitionTable.java | 2707 | /* This file is part of VoltDB.
* Copyright (C) 2008-2015 VoltDB Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.voltdb_testprocs.regressionsuites.catchexceptions;
import org.voltdb.ProcInfo;
import org.voltdb.SQLStmt;
import org.voltdb.VoltProcedure;
@ProcInfo
(
singlePartition = true,
partitionInfo = "P1.NUM: 0"
)
public class SPBigBatchOnPartitionTable extends VoltProcedure {
public final SQLStmt insertP1 = new SQLStmt("insert into P1 (ID, ratio) values (?, ?)");
private boolean isTrue(int value) {
return value == 0 ? false: true;
}
// use a partition key here to put all data insert into one partition
public long run(int partitionKey, int hasPreviousBatch, int duplicatedID,
int hasFollowingBatch, int followingBatchHasException) {
int result = 0;
if (isTrue(hasPreviousBatch)) {
voltQueueSQL(insertP1, 0, 0.1);
voltExecuteSQL();
}
// VoltDB break large batch with 200 units
// 300 here will be be broke into two batches at least
try {
for (int i = 1; i <= 300; i++) {
voltQueueSQL(insertP1, i, 10.1);
if (i == duplicatedID) {
voltQueueSQL(insertP1, i, 10.2);
}
}
voltExecuteSQL();
} catch (Exception e) {
result = -1;
}
if (isTrue(hasFollowingBatch)) {
voltQueueSQL(insertP1, 500, 500.1);
if (isTrue(followingBatchHasException)) {
voltQueueSQL(insertP1, 500, 500.2);
}
voltExecuteSQL();
}
return result;
}
}
| agpl-3.0 |
AsherBond/MondocosmOS | wonderland/modules/tools/phone/src/classes/org/jdesktop/wonderland/modules/phone/server/cell/PhoneStatusListener.java | 6625 | /**
* Project Wonderland
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc., All Rights Reserved
*
* Redistributions in source code form must reproduce the above
* copyright and this condition.
*
* The contents of this file are subject to the GNU General Public
* License, Version 2 (the "License"); you may not use this file
* except in compliance with the License. A copy of the License is
* available at http://www.opensource.org/licenses/gpl-license.php.
*
* Sun designates this particular file as subject to the "Classpath"
* exception as provided by Sun in the License file that accompanied
* this code.
*/
package org.jdesktop.wonderland.modules.phone.server.cell;
import com.sun.sgs.app.ManagedReference;
import org.jdesktop.wonderland.modules.phone.common.CallListing;
import org.jdesktop.wonderland.modules.phone.common.messages.CallInvitedResponseMessage;
import org.jdesktop.wonderland.modules.phone.common.messages.CallEndedResponseMessage;
import org.jdesktop.wonderland.modules.phone.common.messages.CallEstablishedResponseMessage;
import com.sun.mpk20.voicelib.app.AudioGroup;
import com.sun.mpk20.voicelib.app.Call;
import com.sun.mpk20.voicelib.app.ManagedCallStatusListener;
import com.sun.mpk20.voicelib.app.Player;
import com.sun.mpk20.voicelib.app.VoiceManager;
import com.sun.sgs.app.AppContext;
import com.sun.sgs.app.ManagedObject;
import com.sun.voip.CallParticipant;
import com.sun.voip.client.connector.CallStatus;
import org.jdesktop.wonderland.common.cell.messages.CellMessage;
import org.jdesktop.wonderland.server.WonderlandContext;
import org.jdesktop.wonderland.server.comms.WonderlandClientID;
import org.jdesktop.wonderland.server.comms.WonderlandClientSender;
import org.jdesktop.wonderland.server.comms.CommsManager;
import org.jdesktop.wonderland.common.cell.CellChannelConnectionType;
import java.io.IOException;
import java.io.Serializable;
import java.util.logging.Logger;
import org.jdesktop.wonderland.common.messages.Message;
import org.jdesktop.wonderland.common.cell.CellID;
import org.jdesktop.wonderland.server.cell.CellMO;
import com.jme.math.Vector3f;
/**
* A server cell that provides conference phone functionality
* @author jprovino
*/
public class PhoneStatusListener implements ManagedCallStatusListener,
Serializable {
private static final Logger logger =
Logger.getLogger(PhoneStatusListener.class.getName());
private CellID cellID;
private CallListing listing;
private WonderlandClientID clientID;
public PhoneStatusListener(PhoneCellMO phoneCellMO, CallListing listing,
WonderlandClientID clientID) {
cellID = phoneCellMO.getCellID();
this.listing = listing;
this.clientID = clientID;
AppContext.getManager(VoiceManager.class).addCallStatusListener(this,
listing.getExternalCallID());
}
private boolean ended;
public void callStatusChanged(CallStatus status) {
logger.finer("got status " + status);
WonderlandClientSender sender = WonderlandContext.getCommsManager().getSender(
CellChannelConnectionType.CLIENT_TYPE);
VoiceManager vm = AppContext.getManager(VoiceManager.class);
Call externalCall = null;
if (listing.getExternalCallID() != null) {
externalCall = vm.getCall(listing.getExternalCallID());
}
Call softphoneCall = vm.getCall(listing.getSoftphoneCallID());
logger.fine("external call: " + externalCall);
logger.fine("softphone call: " + softphoneCall);
switch(status.getCode()) {
case CallStatus.INVITED:
//The call has been placed, the phone should be ringing
/** HARRISDEBUG: It should be tested whether or not we'll catch
* callStatus changes for calls which we've just set up.
* If not, this code will have to be moved back to the
* "messageReceived->PlaceCall" function.
**/
if (listing.isPrivate()) {
//Start playing the phone ringing sound
try {
softphoneCall.playTreatment("ring_tone.au");
} catch (IOException e) {
logger.warning("Unable to play treatment " + softphoneCall + ": "
+ e.getMessage());
}
}
sender.send(clientID, new CallInvitedResponseMessage(cellID, listing, true));
break;
//Something's picked up, the call has been connected
case CallStatus.ESTABLISHED:
if (listing.isPrivate()) {
//Stop playing the phone ringing sound
try {
softphoneCall.stopTreatment("ring_tone.au");
} catch (IOException e) {
logger.warning("Unable to stop treatment " + softphoneCall + ": "
+ e.getMessage());
}
}
sender.send(clientID, new CallEstablishedResponseMessage(cellID, listing, true));
break;
case CallStatus.ENDED:
if (ended) {
return;
}
ended = true;
vm.removeCallStatusListener(this);
//Stop the ringing
if (softphoneCall != null) {
try {
softphoneCall.stopTreatment("ring_tone.au");
} catch (IOException e) {
logger.warning(
"Unable to stop treatment " + softphoneCall + ": "
+ e.getMessage());
}
}
String softphoneCallID = listing.getSoftphoneCallID();
//This may appear redundant, but it's necessary for the VoiceManager
// to remove its internal data structures.
if (listing.simulateCalls() == false) {
if (externalCall != null) {
try {
vm.endCall(externalCall, true);
} catch (IOException e) {
logger.warning(
"Unable to end call " + externalCall + ": "
+ e.getMessage());
}
}
if (listing.isPrivate()) {
String audioGroupId = softphoneCallID + "_"
+ listing.getPrivateClientName();
AudioGroup audioGroup = vm.getAudioGroup(audioGroupId);
if (audioGroup != null) {
if (softphoneCall.getPlayer() != null) {
softphoneCall.getPlayer().attenuateOtherGroups(audioGroup,
AudioGroup.DEFAULT_SPEAKING_ATTENUATION,
AudioGroup.DEFAULT_LISTEN_ATTENUATION);
}
vm.removeAudioGroup(audioGroup);
}
}
}
sender.send(clientID, new CallEndedResponseMessage(cellID,
listing, true, status.getOption("Reason")));
break;
}
}
}
| agpl-3.0 |
apetresc/JCommon | src/main/java/org/jfree/threads/ReaderWriterLock.java | 5871 | /* ========================================================================
* JCommon : a free general purpose class library for the Java(tm) platform
* ========================================================================
*
* (C) Copyright 2000-2005, by Object Refinery Limited and Contributors.
*
* Project Info: http://www.jfree.org/jcommon/index.html
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 2.1 of the License, or
* (at your option) any later version.
*
* This library is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
* USA.
*
* [Java is a trademark or registered trademark of Sun Microsystems, Inc.
* in the United States and other countries.]
*
* ---------------------
* ReaderWriterLock.java
* ---------------------
*
* $Id: ReaderWriterLock.java,v 1.3 2005/10/18 13:18:34 mungady Exp $
*
* Changes
* -------
* 29-Jan-2003 : Added standard header (DG);
*
*/
package org.jfree.threads;
import java.util.ArrayList;
import java.util.Iterator;
/**
* A reader-writer lock from "Java Threads" by Scott Oak and Henry Wong.
*
* @author Scott Oak and Henry Wong
*/
public class ReaderWriterLock {
/**
* A node for the waiting list.
*
* @author Scott Oak and Henry Wong
*/
private static class ReaderWriterNode {
/** A reader. */
protected static final int READER = 0;
/** A writer. */
protected static final int WRITER = 1;
/** The thread. */
protected Thread t;
/** The state. */
protected int state;
/** The number of acquires.*/
protected int nAcquires;
/**
* Creates a new node.
*
* @param t the thread.
* @param state the state.
*/
private ReaderWriterNode(final Thread t, final int state) {
this.t = t;
this.state = state;
this.nAcquires = 0;
}
}
/** The waiting threads. */
private ArrayList waiters;
/**
* Default constructor.
*/
public ReaderWriterLock() {
this.waiters = new ArrayList();
}
/**
* Grab the read lock.
*/
public synchronized void lockRead() {
final ReaderWriterNode node;
final Thread me = Thread.currentThread();
final int index = getIndex(me);
if (index == -1) {
node = new ReaderWriterNode(me, ReaderWriterNode.READER);
this.waiters.add(node);
}
else {
node = (ReaderWriterNode) this.waiters.get(index);
}
while (getIndex(me) > firstWriter()) {
try {
wait();
}
catch (Exception e) {
System.err.println("ReaderWriterLock.lockRead(): exception.");
System.err.print(e.getMessage());
}
}
node.nAcquires++;
}
/**
* Grab the write lock.
*/
public synchronized void lockWrite() {
final ReaderWriterNode node;
final Thread me = Thread.currentThread();
final int index = getIndex(me);
if (index == -1) {
node = new ReaderWriterNode(me, ReaderWriterNode.WRITER);
this.waiters.add(node);
}
else {
node = (ReaderWriterNode) this.waiters.get(index);
if (node.state == ReaderWriterNode.READER) {
throw new IllegalArgumentException("Upgrade lock");
}
node.state = ReaderWriterNode.WRITER;
}
while (getIndex(me) != 0) {
try {
wait();
}
catch (Exception e) {
System.err.println("ReaderWriterLock.lockWrite(): exception.");
System.err.print(e.getMessage());
}
}
node.nAcquires++;
}
/**
* Unlock.
*/
public synchronized void unlock() {
final ReaderWriterNode node;
final Thread me = Thread.currentThread();
final int index = getIndex(me);
if (index > firstWriter()) {
throw new IllegalArgumentException("Lock not held");
}
node = (ReaderWriterNode) this.waiters.get(index);
node.nAcquires--;
if (node.nAcquires == 0) {
this.waiters.remove(index);
}
notifyAll();
}
/**
* Returns the index of the first waiting writer.
*
* @return The index.
*/
private int firstWriter() {
final Iterator e = this.waiters.iterator();
int index = 0;
while (e.hasNext()) {
final ReaderWriterNode node = (ReaderWriterNode) e.next();
if (node.state == ReaderWriterNode.WRITER) {
return index;
}
index += 1;
}
return Integer.MAX_VALUE;
}
/**
* Returns the index of a thread.
*
* @param t the thread.
*
* @return The index.
*/
private int getIndex(final Thread t) {
final Iterator e = this.waiters.iterator();
int index = 0;
while (e.hasNext()) {
final ReaderWriterNode node = (ReaderWriterNode) e.next();
if (node.t == t) {
return index;
}
index += 1;
}
return -1;
}
}
| lgpl-2.1 |
hungerburg/exist | extensions/expath/src/org/expath/httpclient/model/exist/EXistResult.java | 5815 | /*
* eXist EXPath
* Copyright (C) 2013 Adam Retter <adam@existsolutions.com>
* www.existsolutions.com
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*
* $Id$
*/
package org.expath.httpclient.model.exist;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import javax.xml.transform.Source;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.exist.dom.memtree.DocumentImpl;
import org.exist.util.io.TemporaryFileManager;
import org.exist.xquery.NodeTest;
import org.exist.xquery.TypeTest;
import org.exist.xquery.XPathException;
import org.exist.xquery.XQueryContext;
import org.exist.xquery.modules.ModuleUtils;
import org.exist.xquery.value.*;
import org.expath.httpclient.HttpClientException;
import org.expath.httpclient.HttpResponse;
import org.expath.httpclient.model.Result;
import org.xml.sax.SAXException;
/**
* @author Adam Retter <adam@existsolutions.com>
*/
public class EXistResult implements Result {
private static final Logger logger = LogManager.getLogger(EXistResult.class);
ValueSequence result = new ValueSequence();
private final XQueryContext context;
public EXistResult(final XQueryContext context) {
this.context = context;
}
@Override
public Result makeNewResult() throws HttpClientException {
return new EXistResult(context.copyContext());
}
@Override
public void add(final Reader reader, final Charset charset) throws HttpClientException {
// START TEMP
//TODO(AR) - replace with a deferred StringReader when eXist has this soon.
final StringBuilder builder = new StringBuilder();
try {
final char cbuf[] = new char[4096];
int read = -1;
while((read = reader.read(cbuf)) > -1) {
builder.append(cbuf, 0, read);
}
} catch(final IOException ioe) {
throw new HttpClientException("Unable to add string value to result: " + ioe.getMessage(), ioe);
} finally {
try {
reader.close();
} catch(final IOException ioe) {
logger.warn(ioe.getMessage(), ioe);
}
}
// END TEMP
result.add(new StringValue(builder.toString()));
}
@Override
public void add(final InputStream is) throws HttpClientException {
try {
// we have to make a temporary copy of the data stream, as the socket will be closed shortly
final TemporaryFileManager temporaryFileManager = TemporaryFileManager.getInstance();
final Path tempFile = temporaryFileManager.getTemporaryFile();
Files.copy(is, tempFile, StandardCopyOption.REPLACE_EXISTING);
result.add(BinaryValueFromFile.getInstance(context, new Base64BinaryValueType(), tempFile, (isClosed, file) -> temporaryFileManager.returnTemporaryFile(file)));
} catch(final XPathException | IOException xpe) {
throw new HttpClientException("Unable to add binary value to result:" + xpe.getMessage(), xpe);
} finally {
try {
is.close();
} catch(final IOException ioe) {
logger.warn(ioe.getMessage(), ioe);
}
}
}
@Override
public void add(final Source src) throws HttpClientException {
try {
final NodeValue nodeValue = ModuleUtils.sourceToXML(context, src);
result.add(nodeValue);
} catch(final SAXException saxe) {
throw new HttpClientException("Unable to add Source to result:" + saxe.getMessage(), saxe);
} catch(final IOException ioe) {
throw new HttpClientException("Unable to add Source to result:" + ioe.getMessage(), ioe);
}
}
@Override
public void add(final HttpResponse response) throws HttpClientException {
final EXistTreeBuilder builder = new EXistTreeBuilder(context);
response.outputResponseElement(builder);
final DocumentImpl doc = builder.close();
try {
// we add the root *element* to the result sequence
final NodeTest kind = new TypeTest(Type.ELEMENT);
// the elem must always be added at the front, so if there are
// already other items, we create a new one, add the elem, then
// add the original items after
if(result.isEmpty()) {
doc.selectChildren(kind, result);
} else {
final ValueSequence newResult = new ValueSequence();
doc.selectChildren(kind, newResult);
newResult.addAll(result);
result = newResult;
}
} catch (final XPathException xpe) {
throw new HttpClientException("Unable to add HttpResponse to result:" + xpe.getMessage(), xpe);
}
}
public Sequence getResult() {
return result;
}
}
| lgpl-2.1 |
nhminus/jaudiotagger-androidpatch | src/org/jaudiotagger/audio/ogg/util/OggInfoReader.java | 6507 | /*
* Entagged Audio Tag library
* Copyright (c) 2003-2005 Raphaël Slinckx <raphael@slinckx.net>
* Copyright (c) 2004-2005 Christian Laireiter <liree@web.de>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
package org.jaudiotagger.audio.ogg.util;
import org.jaudiotagger.audio.exceptions.CannotReadException;
import org.jaudiotagger.audio.generic.GenericAudioHeader;
import org.jaudiotagger.logging.ErrorMessage;
import org.jaudiotagger.tag.id3.AbstractID3v2Tag;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.util.logging.Logger;
import java.util.Arrays;
/**
* Read encoding info, only implemented for vorbis streams
*/
public class OggInfoReader
{
// Logger Object
public static Logger logger = Logger.getLogger("org.jaudiotagger.audio.ogg.atom");
public GenericAudioHeader read(RandomAccessFile raf) throws CannotReadException, IOException
{
long start = raf.getFilePointer();
GenericAudioHeader info = new GenericAudioHeader();
logger.fine("Started");
long oldPos;
//Check start of file does it have Ogg pattern
byte[] b = new byte[OggPageHeader.CAPTURE_PATTERN.length];
raf.read(b);
if (!(Arrays.equals(b, OggPageHeader.CAPTURE_PATTERN)))
{
raf.seek(0);
if(AbstractID3v2Tag.isId3Tag(raf))
{
raf.read(b);
if ((Arrays.equals(b, OggPageHeader.CAPTURE_PATTERN)))
{
start=raf.getFilePointer();
}
}
else
{
throw new CannotReadException(ErrorMessage.OGG_HEADER_CANNOT_BE_FOUND.getMsg(new String(b)));
}
}
//Now work backwards from file looking for the last ogg page, it reads the granule position for this last page
//which must be set.
//TODO should do buffering to cut down the number of file reads
raf.seek(start);
double pcmSamplesNumber = -1;
raf.seek(raf.length() - 2);
while (raf.getFilePointer() >= 4)
{
if (raf.read() == OggPageHeader.CAPTURE_PATTERN[3])
{
raf.seek(raf.getFilePointer() - OggPageHeader.FIELD_CAPTURE_PATTERN_LENGTH);
byte[] ogg = new byte[3];
raf.readFully(ogg);
if (ogg[0] == OggPageHeader.CAPTURE_PATTERN[0] && ogg[1] == OggPageHeader.CAPTURE_PATTERN[1] && ogg[2] == OggPageHeader.CAPTURE_PATTERN[2])
{
raf.seek(raf.getFilePointer() - 3);
oldPos = raf.getFilePointer();
raf.seek(raf.getFilePointer() + OggPageHeader.FIELD_PAGE_SEGMENTS_POS);
int pageSegments = raf.readByte() & 0xFF; //Unsigned
raf.seek(oldPos);
b = new byte[OggPageHeader.OGG_PAGE_HEADER_FIXED_LENGTH + pageSegments];
raf.readFully(b);
OggPageHeader pageHeader = new OggPageHeader(b);
raf.seek(0);
pcmSamplesNumber = pageHeader.getAbsoluteGranulePosition();
break;
}
}
raf.seek(raf.getFilePointer() - 2);
}
if (pcmSamplesNumber == -1)
{
//According to spec a value of -1 indicates no packet finished on this page, this should not occur
throw new CannotReadException(ErrorMessage.OGG_VORBIS_NO_SETUP_BLOCK.getMsg());
}
//1st page = Identification Header
OggPageHeader pageHeader = OggPageHeader.read(raf);
byte[] vorbisData = new byte[pageHeader.getPageLength()];
raf.read(vorbisData);
VorbisIdentificationHeader vorbisIdentificationHeader = new VorbisIdentificationHeader(vorbisData);
//Map to generic encodingInfo
info.setPreciseLength((float) (pcmSamplesNumber / vorbisIdentificationHeader.getSamplingRate()));
info.setChannelNumber(vorbisIdentificationHeader.getChannelNumber());
info.setSamplingRate(vorbisIdentificationHeader.getSamplingRate());
info.setEncodingType(vorbisIdentificationHeader.getEncodingType());
info.setExtraEncodingInfos("");
//According to Wikipedia Vorbis Page, Vorbis only works on 16bits 44khz
info.setBitsPerSample(16);
//TODO this calculation should be done within identification header
if (vorbisIdentificationHeader.getNominalBitrate() != 0 && vorbisIdentificationHeader.getMaxBitrate() == vorbisIdentificationHeader.getNominalBitrate() && vorbisIdentificationHeader.getMinBitrate() == vorbisIdentificationHeader.getNominalBitrate())
{
//CBR (in kbps)
info.setBitrate(vorbisIdentificationHeader.getNominalBitrate() / 1000);
info.setVariableBitRate(false);
}
else
if (vorbisIdentificationHeader.getNominalBitrate() != 0 && vorbisIdentificationHeader.getMaxBitrate() == 0 && vorbisIdentificationHeader.getMinBitrate() == 0)
{
//Average vbr (in kpbs)
info.setBitrate(vorbisIdentificationHeader.getNominalBitrate() / 1000);
info.setVariableBitRate(true);
}
else
{
//TODO need to remove comment from raf.getLength()
info.setBitrate(computeBitrate(info.getTrackLength(), raf.length()));
info.setVariableBitRate(true);
}
logger.fine("Finished");
return info;
}
private int computeBitrate(int length, long size)
{
//Protect against audio less than 0.5 seconds that can be rounded to zero causing Arithmetic Exception
if(length==0)
{
length=1;
}
return (int) ((size / 1000) * 8 / length);
}
}
| lgpl-2.1 |
PandaTeam/HydroBlocks | src/buildcraft/api/power/PowerHandler.java | 10105 | /**
* Copyright (c) SpaceToad, 2011 http://www.mod-buildcraft.com
*
* BuildCraft is distributed under the terms of the Minecraft Mod Public License
* 1.0, or MMPL. Please check the contents of the license located in
* http://www.mod-buildcraft.com/MMPL-1.0.txt
*/
package buildcraft.api.power;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraftforge.common.ForgeDirection;
import buildcraft.api.core.SafeTimeTracker;
public final class PowerHandler
{
public static enum Type
{
ENGINE, GATE, MACHINE, PIPE, STORAGE;
public boolean canReceiveFromPipes()
{
switch (this)
{
case MACHINE:
case STORAGE:
return true;
default:
return false;
}
}
public boolean eatsEngineExcess()
{
switch (this)
{
case MACHINE:
case STORAGE:
return true;
default:
return false;
}
}
}
public static class PerditionCalculator
{
public static final float DEFAULT_POWERLOSS = 1F;
public static final float MIN_POWERLOSS = 0.01F;
private final float powerLoss;
public PerditionCalculator()
{
powerLoss = DEFAULT_POWERLOSS;
}
public PerditionCalculator(float powerLoss)
{
if (powerLoss < MIN_POWERLOSS)
{
powerLoss = MIN_POWERLOSS;
}
this.powerLoss = powerLoss;
}
/**
* Apply the perdition algorithm to the current stored energy. This function can only be
* called once per tick, but it might not be called every tick. It is triggered by any
* manipulation of the stored energy.
*
* @param powerHandler the PowerHandler requesting the perdition update
* @param current the current stored energy
* @param ticksPassed ticks since the last time this function was called
* @return
*/
public float applyPerdition(PowerHandler powerHandler, float current, long ticksPassed)
{
current -= powerLoss * ticksPassed;
if (current < 0)
{
current = 0;
}
return current;
}
}
public static final PerditionCalculator DEFAULT_PERDITION = new PerditionCalculator();
private float minEnergyReceived;
private float maxEnergyReceived;
private float maxEnergyStored;
private float activationEnergy;
private float energyStored = 0;
private final SafeTimeTracker doWorkTracker = new SafeTimeTracker();
private final SafeTimeTracker sourcesTracker = new SafeTimeTracker();
private final SafeTimeTracker perditionTracker = new SafeTimeTracker();
public final int[] powerSources = new int[6];
public final IPowerReceptor receptor;
private PerditionCalculator perdition;
private final PowerReceiver receiver;
private final Type type;
public PowerHandler(IPowerReceptor receptor, Type type)
{
this.receptor = receptor;
this.type = type;
this.receiver = new PowerReceiver();
this.perdition = DEFAULT_PERDITION;
}
public PowerReceiver getPowerReceiver()
{
return receiver;
}
public float getMinEnergyReceived()
{
return minEnergyReceived;
}
public float getMaxEnergyReceived()
{
return maxEnergyReceived;
}
public float getMaxEnergyStored()
{
return maxEnergyStored;
}
public float getActivationEnergy()
{
return activationEnergy;
}
public float getEnergyStored()
{
return energyStored;
}
/**
* Setup your PowerHandler's settings.
*
* @param minEnergyReceived This is the minimum about of power that will be accepted by the
* PowerHandler. This should generally be greater than the activationEnergy if you plan to use
* the doWork() callback. Anything greater than 1 will prevent Redstone Engines from powering
* this Provider.
* @param maxEnergyReceived The maximum amount of power accepted by the PowerHandler. This
* should generally be less than 500. Too low and larger engines will overheat while trying to
* power the machine. Too high, and the engines will never warm up. Greater values also place
* greater strain on the power net.
* @param activationEnergy If the stored energy is greater than this value, the doWork()
* callback is called (once per tick).
* @param maxStoredEnergy The maximum amount of power this PowerHandler can store. Values tend
* to range between 100 and 5000. With 1000 and 1500 being common.
*/
public void configure(float minEnergyReceived, float maxEnergyReceived, float activationEnergy, float maxStoredEnergy)
{
if (minEnergyReceived > maxEnergyReceived)
{
maxEnergyReceived = minEnergyReceived;
}
this.minEnergyReceived = minEnergyReceived;
this.maxEnergyReceived = maxEnergyReceived;
this.maxEnergyStored = maxStoredEnergy;
this.activationEnergy = activationEnergy;
}
public void configurePowerPerdition(int powerLoss, int powerLossRegularity)
{
if (powerLoss == 0 || powerLossRegularity == 0)
{
perdition = new PerditionCalculator(0);
return;
}
perdition = new PerditionCalculator((float) powerLoss / (float) powerLossRegularity);
}
/**
* Allows you to define a new PerditionCalculator class to handler perdition calculations.
*
* For example if you want exponentially increasing loss based on amount stored.
*
* @param perdition
*/
public void setPerdition(PerditionCalculator perdition)
{
if (perdition == null)
perdition = DEFAULT_PERDITION;
this.perdition = perdition;
}
public PerditionCalculator getPerdition()
{
if (perdition == null)
return DEFAULT_PERDITION;
return perdition;
}
/**
* Ticks the power handler. You should call this if you can, but its not required.
*
* If you don't call it, the possibility exists for some weirdness with the perdition algorithm
* and work callback as its possible they will not be called on every tick they otherwise would
* be. You should be able to design around this though if you are aware of the limitations.
*/
public void update()
{
applyPerdition();
applyWork();
validateEnergy();
}
private void applyPerdition()
{
if (perditionTracker.markTimeIfDelay(receptor.getWorld(), 1) && energyStored > 0)
{
float newEnergy = getPerdition().applyPerdition(this, energyStored, perditionTracker.durationOfLastDelay());
if (newEnergy == 0 || newEnergy < energyStored)
energyStored = newEnergy;
else
energyStored = DEFAULT_PERDITION.applyPerdition(this, energyStored, perditionTracker.durationOfLastDelay());
validateEnergy();
}
}
private void applyWork()
{
if (energyStored >= activationEnergy)
{
if (doWorkTracker.markTimeIfDelay(receptor.getWorld(), 1))
{
receptor.doWork(this);
}
}
}
private void updateSources(ForgeDirection source)
{
if (sourcesTracker.markTimeIfDelay(receptor.getWorld(), 1))
{
for (int i = 0; i < 6; ++i)
{
powerSources[i] -= sourcesTracker.durationOfLastDelay();
if (powerSources[i] < 0)
{
powerSources[i] = 0;
}
}
}
if (source != null)
powerSources[source.ordinal()] = 10;
}
/**
* Extract energy from the PowerHandler. You must call this even if doWork() triggers.
*
* @param min
* @param max
* @param doUse
* @return amount used
*/
public float useEnergy(float min, float max, boolean doUse)
{
applyPerdition();
float result = 0;
if (energyStored >= min)
{
if (energyStored <= max)
{
result = energyStored;
if (doUse)
{
energyStored = 0;
}
}
else
{
result = max;
if (doUse)
{
energyStored -= max;
}
}
}
validateEnergy();
return result;
}
public void readFromNBT(NBTTagCompound data)
{
readFromNBT(data, "powerProvider");
}
public void readFromNBT(NBTTagCompound data, String tag)
{
NBTTagCompound nbt = data.getCompoundTag(tag);
energyStored = nbt.getFloat("storedEnergy");
}
public void writeToNBT(NBTTagCompound data)
{
writeToNBT(data, "powerProvider");
}
public void writeToNBT(NBTTagCompound data, String tag)
{
NBTTagCompound nbt = new NBTTagCompound();
nbt.setFloat("storedEnergy", energyStored);
data.setCompoundTag(tag, nbt);
}
public final class PowerReceiver
{
private PowerReceiver()
{
}
public float getMinEnergyReceived()
{
return minEnergyReceived;
}
public float getMaxEnergyReceived()
{
return maxEnergyReceived;
}
public float getMaxEnergyStored()
{
return maxEnergyStored;
}
public float getActivationEnergy()
{
return activationEnergy;
}
public float getEnergyStored()
{
return energyStored;
}
public Type getType()
{
return type;
}
public void update()
{
PowerHandler.this.update();
}
/**
* The amount of power that this PowerHandler currently needs.
*
* @return
*/
public float powerRequest()
{
update();
return Math.min(maxEnergyReceived, maxEnergyStored - energyStored);
}
/**
* Add power to the PowerReceiver from an external source.
*
* @param quantity
* @param from
* @return the amount of power used
*/
public float receiveEnergy(Type source, final float quantity, ForgeDirection from)
{
float used = quantity;
if (source == Type.ENGINE)
{
if (used < minEnergyReceived)
{
return 0;
}
else if (used > maxEnergyReceived)
{
used = maxEnergyReceived;
}
}
updateSources(from);
used = addEnergy(used);
applyWork();
if (source == Type.ENGINE && type.eatsEngineExcess())
{
return Math.min(quantity, maxEnergyReceived);
}
return used;
}
}
/**
*
* @return the amount the power changed by
*/
public float addEnergy(float quantity)
{
energyStored += quantity;
if (energyStored > maxEnergyStored)
{
quantity -= energyStored - maxEnergyStored;
energyStored = maxEnergyStored;
}
else if (energyStored < 0)
{
quantity -= energyStored;
energyStored = 0;
}
applyPerdition();
return quantity;
}
public void setEnergy(float quantity)
{
this.energyStored = quantity;
validateEnergy();
}
public boolean isPowerSource(ForgeDirection from)
{
return powerSources[from.ordinal()] != 0;
}
private void validateEnergy()
{
if (energyStored < 0)
{
energyStored = 0;
}
if (energyStored > maxEnergyStored)
{
energyStored = maxEnergyStored;
}
}
}
| lgpl-3.0 |
Alfresco/alfresco-repository | src/main/java/org/alfresco/repo/domain/permissions/ibatis/AclCrudDAOImpl.java | 21316 | /*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.domain.permissions.ibatis;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.alfresco.repo.domain.permissions.AbstractAclCrudDAOImpl;
import org.alfresco.repo.domain.permissions.Ace;
import org.alfresco.repo.domain.permissions.AceContextEntity;
import org.alfresco.repo.domain.permissions.AceEntity;
import org.alfresco.repo.domain.permissions.AclChangeSetEntity;
import org.alfresco.repo.domain.permissions.AclEntity;
import org.alfresco.repo.domain.permissions.AclMemberEntity;
import org.alfresco.repo.domain.permissions.Authority;
import org.alfresco.repo.domain.permissions.AuthorityAliasEntity;
import org.alfresco.repo.domain.permissions.AuthorityEntity;
import org.alfresco.repo.domain.permissions.PermissionEntity;
import org.alfresco.repo.security.permissions.ACEType;
import org.apache.ibatis.session.RowBounds;
import org.mybatis.spring.SqlSessionTemplate;
import org.springframework.util.Assert;
/**
* iBatis-specific implementation of the ACL Crud DAO.
*
* @author janv
* @since 3.4
*/
public class AclCrudDAOImpl extends AbstractAclCrudDAOImpl
{
private static final String INSERT_ACL = "alfresco.permissions.insert.insert_Acl";
private static final String SELECT_ACL_BY_ID = "alfresco.permissions.select_AclById";
private static final String SELECT_ACLS_THAT_INHERIT_FROM_ACL = "alfresco.permissions.select_AclsThatInheritFromAcl";
private static final String SELECT_LATEST_ACL_BY_GUID = "alfresco.permissions.select_LatestAclByGuid";
private static final String SELECT_ADM_NODES_BY_ACL = "alfresco.permissions.select_ADMNodesByAclId";
private static final String UPDATE_ACL = "alfresco.permissions.update_Acl";
private static final String DELETE_ACL = "alfresco.permissions.delete_Acl";
private static final String INSERT_ACL_MEMBER = "alfresco.permissions.insert.insert_AclMember";
private static final String SELECT_ACL_MEMBERS_BY_ACL = "alfresco.permissions.select_AclMembersByAclId";
private static final String SELECT_ACL_MEMBER = "alfresco.permissions.select_AclMember";
private static final String SELECT_ACL_MEMBERS_BY_AUTHORITY = "alfresco.permissions.select_AclMembersByAuthorityName";
private static final String UPDATE_ACL_MEMBER = "alfresco.permissions.update_AclMember";
private static final String DELETE_ACL_MEMBERS_LIST = "alfresco.permissions.delete_AclMembersList";
private static final String DELETE_ACL_MEMBERS_BY_ACL = "alfresco.permissions.delete_AclMembersByAclId";
private static final String INSERT_ACL_CHANGESET = "alfresco.permissions.insert.insert_AclChangeSet";
private static final String UPDATE_ACL_CHANGESET = "alfresco.permissions.update_AclChangeSet";
private static final String SELECT_ACL_CHANGESET_BY_ID = "alfresco.permissions.select_AclChangeSetById";
private static final String DELETE_ACL_CHANGESET = "alfresco.permissions.delete_AclChangeSet";
private static final String INSERT_ACE = "alfresco.permissions.insert.insert_Ace";
private static final String SELECT_ACE_BY_ID = "alfresco.permissions.select_AceById";
private static final String SELECT_ACES_BY_AUTHORITY = "alfresco.permissions.select_AcesByAuthorityId";
private static final String SELECT_ACES_AND_AUTHORIES_BY_ACL = "alfresco.permissions.select_AcesAndAuthoritiesByAclId";
private static final String SELECT_ACE_WITH_NO_CONTEXT = "alfresco.permissions.select_AceWithNoContext";
private static final String DELETE_ACES_LIST = "alfresco.permissions.delete_AcesList";
private static final String UPDATE_ACE = "alfresco.permissions.update_Ace";
private static final String INSERT_ACE_CONTEXT = "alfresco.permissions.insert.insert_AceContext";
private static final String SELECT_ACE_CONTEXT_BY_ID = "alfresco.permissions.select_AceContextById";
private static final String DELETE_ACE_CONTEXT = "alfresco.permissions.delete_AceContext";
private static final String INSERT_PERMISSION = "alfresco.permissions.insert.insert_Permission";
private static final String SELECT_PERMISSION_BY_ID = "alfresco.permissions.select_PermissionById";
private static final String SELECT_PERMISSION_BY_TYPE_AND_NAME = "alfresco.permissions.select_PermissionByTypeAndName";
private static final String UPDATE_PERMISSION = "alfresco.permissions.update_Permission";
private static final String DELETE_PERMISSION = "alfresco.permissions.delete_Permission";
private static final String INSERT_AUTHORITY = "alfresco.permissions.insert.insert_Authority";
private static final String SELECT_AUTHORITY_BY_ID = "alfresco.permissions.select_AuthorityById";
private static final String SELECT_AUTHORITY_BY_NAME = "alfresco.permissions.select_AuthorityByName";
private static final String UPDATE_AUTHORITY = "alfresco.permissions.update_Authority";
private static final String DELETE_AUTHORITY = "alfresco.permissions.delete_Authority";
private static final String INSERT_AUTHORITY_ALIAS = "alfresco.permissions.insert.insert_AuthorityAlias";
private static final String DELETE_AUTHORITY_ALIAS = "alfresco.permissions.delete_AuthorityAlias";
private static final String SELECT_CHANGE_SET_LAST = "alfresco.permissions.select_ChangeSetLast";
private static final String SELECT_CHANGE_SET_MAX_COMMIT_TIME = "alfresco.permissions.select_ChangeSetMaxCommitTime";;
private SqlSessionTemplate template;
public final void setSqlSessionTemplate(SqlSessionTemplate sqlSessionTemplate)
{
this.template = sqlSessionTemplate;
}
@Override
protected AclEntity createAclEntity(AclEntity entity)
{
entity.setVersion(0L);
template.insert(INSERT_ACL, entity);
return entity;
}
@Override
protected AclEntity getAclEntity(long aclEntityId)
{
Map<String, Object> params = new HashMap<String, Object>(1);
params.put("id", aclEntityId);
return template.selectOne(SELECT_ACL_BY_ID, params);
}
@SuppressWarnings("unchecked")
@Override
protected List<Long> getAclEntitiesThatInheritFromAcl(long aclEntityId)
{
Map<String, Object> params = new HashMap<String, Object>(2);
params.put("id", aclEntityId);
params.put("bool", true);
return template.selectList(SELECT_ACLS_THAT_INHERIT_FROM_ACL, params);
}
@Override
protected Long getLatestAclEntityByGuid(String aclGuid)
{
Map<String, Object> params = new HashMap<String, Object>(2);
params.put("name", aclGuid);
params.put("bool", true);
return template.selectOne(SELECT_LATEST_ACL_BY_GUID, params);
}
@SuppressWarnings("unchecked")
@Override
protected List<Long> getADMNodeEntityIdsByAcl(long aclEntityId, int maxResults)
{
if (maxResults < 0)
{
maxResults = RowBounds.NO_ROW_LIMIT;
}
Map<String, Object> params = new HashMap<String, Object>(1);
params.put("id", aclEntityId);
return template.selectList(SELECT_ADM_NODES_BY_ACL, params, new RowBounds(0 , maxResults));
}
@Override
protected int updateAclEntity(AclEntity updatedAclEntity)
{
updatedAclEntity.incrementVersion();
return template.update(UPDATE_ACL, updatedAclEntity);
}
@Override
protected int updateAceEntity(AceEntity updatedAceEntity)
{
AceEntity existingAceEntity = getAceEntity(updatedAceEntity.getPermissionId(), updatedAceEntity.getAuthorityId(), updatedAceEntity.isAllowed(), updatedAceEntity.getAceType());
if(existingAceEntity != null)
{
for(AclMemberEntity aclMemberEntity : getAclMemberEntitiesByAuthority(getAuthority(updatedAceEntity.getAuthorityId()).getAuthority()))
{
aclMemberEntity.setAceId(updatedAceEntity.getId());
updateAclMember(aclMemberEntity);
}
deleteAceEntities(Collections.singletonList(existingAceEntity.getId()));
}
updatedAceEntity.incrementVersion();
return template.update(UPDATE_ACE, updatedAceEntity);
}
@Override
protected int deleteAclEntity(long aclEntityId)
{
Map<String, Object> params = new HashMap<String, Object>(1);
params.put("id", aclEntityId);
return template.delete(DELETE_ACL, params);
}
@Override
protected AclMemberEntity createAclMemberEntity(AclMemberEntity entity)
{
entity.setVersion(0L);
template.insert(INSERT_ACL_MEMBER, entity);
return entity;
}
@SuppressWarnings("unchecked")
@Override
protected List<AclMemberEntity> getAclMemberEntitiesByAcl(long aclEntityId)
{
Map<String, Object> params = new HashMap<String, Object>(1);
params.put("id", aclEntityId);
return template.selectList(SELECT_ACL_MEMBERS_BY_ACL, params);
}
@SuppressWarnings("unchecked")
@Override
protected List<AclMemberEntity> getAclMemberEntitiesByAuthority(String authorityName)
{
Map<String, Object> params = new HashMap<String, Object>(1);
params.put("name", authorityName);
return template.selectList(SELECT_ACL_MEMBERS_BY_AUTHORITY, params);
}
@Override
protected int updateAclMemberEntity(AclMemberEntity updatedAclMemberEntity)
{
AclMemberEntity existingAclMemberEntity = getAclMemberEntity(updatedAclMemberEntity.getAclId(), updatedAclMemberEntity.getAceId(), updatedAclMemberEntity.getPos());
if(existingAclMemberEntity != null)
{
deleteAclMemberEntities(Collections.singletonList(existingAclMemberEntity.getId()));
}
updatedAclMemberEntity.incrementVersion();
return template.update(UPDATE_ACL_MEMBER, updatedAclMemberEntity);
}
@Override
protected int deleteAclMemberEntities(List<Long> aclMemberEntityIds)
{
return template.delete(DELETE_ACL_MEMBERS_LIST, aclMemberEntityIds);
}
@Override
protected int deleteAclMemberEntitiesByAcl(long aclEntityId)
{
Map<String, Object> params = new HashMap<String, Object>(1);
params.put("id", aclEntityId);
return template.delete(DELETE_ACL_MEMBERS_BY_ACL, params);
}
@Override
protected Long createAclChangeSetEntity()
{
AclChangeSetEntity entity = new AclChangeSetEntity();
template.insert(INSERT_ACL_CHANGESET, entity);
return entity.getId();
}
@Override
protected AclChangeSetEntity getAclChangeSetEntity(Long aclChangeSetEntityId)
{
Map<String, Object> params = new HashMap<String, Object>(1);
params.put("id", aclChangeSetEntityId);
return template.selectOne(SELECT_ACL_CHANGESET_BY_ID, params);
}
@Override
protected int deleteAclChangeSetEntity(Long aclChangeSetEntityId)
{
Map<String, Object> params = new HashMap<String, Object>(1);
params.put("id", aclChangeSetEntityId);
return template.delete(DELETE_ACL_CHANGESET, params);
}
@Override
protected int updateChangeSetEntity(Long id, long commitTimeMs)
{
AclChangeSetEntity entity = new AclChangeSetEntity();
entity.setId(id);
entity.setCommitTimeMs(commitTimeMs);
return template.update(UPDATE_ACL_CHANGESET, entity);
}
@Override
protected long createAceEntity(AceEntity entity)
{
entity.setVersion(0L);
template.insert(INSERT_ACE, entity);
Long id = entity.getId();
return (id != null ? id : -1);
}
@Override
protected AceEntity getAceEntity(long aceEntityId)
{
Map<String, Object> params = new HashMap<String, Object>(1);
params.put("id", aceEntityId);
return template.selectOne(SELECT_ACE_BY_ID, params);
}
@Override
protected AceEntity getAceEntity(long permissionId, long authorityId, boolean allowed, ACEType type)
{
Map<String, Object> params = new HashMap<String, Object>(4);
params.put("id1", permissionId);
params.put("id2", authorityId);
params.put("bool", allowed);
params.put("int", type.getId());
return template.selectOne(SELECT_ACE_WITH_NO_CONTEXT, params);
}
@Override
protected AclMemberEntity getAclMemberEntity(long aclId, long aceId, int pos)
{
Map<String, Object> params = new HashMap<String, Object>(4);
params.put("id1", aclId);
params.put("id2", aceId);
params.put("int", pos);
return (AclMemberEntity)template.selectOne(SELECT_ACL_MEMBER, params);
}
@SuppressWarnings("unchecked")
@Override
protected List<Ace> getAceEntitiesByAuthority(long authorityEntityId)
{
Map<String, Object> params = new HashMap<String, Object>(1);
params.put("id", authorityEntityId);
return template.selectList(SELECT_ACES_BY_AUTHORITY, params);
}
@SuppressWarnings("unchecked")
@Override
protected List<Map<String, Object>> getAceAndAuthorityEntitiesByAcl(long aclEntityId)
{
Map<String, Object> params = new HashMap<String, Object>(1);
params.put("id", aclEntityId);
return template.selectList(SELECT_ACES_AND_AUTHORIES_BY_ACL, params);
}
@Override
protected int deleteAceEntities(List<Long> aceEntityIds)
{
return template.delete(DELETE_ACES_LIST, aceEntityIds);
}
@Override
protected long createAceContextEntity(AceContextEntity entity)
{
entity.setVersion(0L);
template.insert(INSERT_ACE_CONTEXT, entity);
Long id = entity.getId();
return (id != null ? id : -1);
}
@Override
protected AceContextEntity getAceContextEntity(long aceContextEntityId)
{
Map<String, Object> params = new HashMap<String, Object>(1);
params.put("id", aceContextEntityId);
return template.selectOne(SELECT_ACE_CONTEXT_BY_ID, params);
}
@Override
protected int deleteAceContextEntity(long aceContextEntityId)
{
Map<String, Object> params = new HashMap<String, Object>(1);
params.put("id", aceContextEntityId);
return template.delete(DELETE_ACE_CONTEXT, params);
}
@Override
protected PermissionEntity createPermissionEntity(PermissionEntity entity)
{
entity.setVersion(0L);
template.insert(INSERT_PERMISSION, entity);
return entity;
}
@Override
protected PermissionEntity getPermissionEntity(long permissionEntityId)
{
Map<String, Object> params = new HashMap<String, Object>(1);
params.put("id", permissionEntityId);
return template.selectOne(SELECT_PERMISSION_BY_ID, params);
}
@Override
protected PermissionEntity getPermissionEntity(long qnameId, String name)
{
Map<String, Object> params = new HashMap<String, Object>(2);
params.put("id", qnameId);
params.put("name", name);
return template.selectOne(SELECT_PERMISSION_BY_TYPE_AND_NAME, params);
}
@Override
protected int updatePermissionEntity(PermissionEntity permissionEntity)
{
permissionEntity.incrementVersion();
return template.update(UPDATE_PERMISSION, permissionEntity);
}
@Override
protected int deletePermissionEntity(long permissionEntityId)
{
Map<String, Object> params = new HashMap<String, Object>(1);
params.put("id", permissionEntityId);
return template.delete(DELETE_PERMISSION, params);
}
@Override
protected AuthorityEntity createAuthorityEntity(AuthorityEntity entity)
{
entity.setVersion(0L);
template.insert(INSERT_AUTHORITY, entity);
return entity;
}
@Override
protected AuthorityEntity getAuthorityEntity(long authorityEntityId)
{
Map<String, Object> params = new HashMap<String, Object>(1);
params.put("id", authorityEntityId);
return template.selectOne(SELECT_AUTHORITY_BY_ID, params);
}
@SuppressWarnings("unchecked")
@Override
protected AuthorityEntity getAuthorityEntity(String authorityName)
{
Map<String, Object> params = new HashMap<String, Object>(1);
params.put("name", authorityName);
// note: allow for list (non-unique name) in case of upgrade of old schemas
AuthorityEntity result = null;
List<AuthorityEntity> authorities = template.selectList(SELECT_AUTHORITY_BY_NAME, params);
for (AuthorityEntity found : authorities)
{
if (found.getAuthority().equals(authorityName))
{
result = found;
break;
}
}
return result;
}
@Override
protected int updateAuthorityEntity(AuthorityEntity authorityEntity)
{
Authority existingAuthorityEntity = getAuthority(authorityEntity.getAuthority());
if(existingAuthorityEntity != null)
{
for(Ace ace : getAceEntitiesByAuthority(existingAuthorityEntity.getId()))
{
AceEntity aceEntity = getAceEntity(ace.getId());
aceEntity.setAuthorityId(authorityEntity.getId());
updateAceEntity(aceEntity);
}
deleteAuthority(existingAuthorityEntity.getId());
}
authorityEntity.incrementVersion();
return template.update(UPDATE_AUTHORITY, authorityEntity);
}
@Override
protected int deleteAuthorityEntity(long authorityEntityId)
{
Map<String, Object> params = new HashMap<String, Object>(1);
params.put("id", authorityEntityId);
return template.delete(DELETE_AUTHORITY, params);
}
@Override
protected long createAuthorityAliasEntity(AuthorityAliasEntity entity)
{
entity.setVersion(0L);
template.insert(INSERT_AUTHORITY_ALIAS, entity);
Long id = entity.getId();
return (id != null ? id : -1);
}
@Override
protected int deleteAuthorityAliasEntity(long authorityAliasEntityId)
{
Map<String, Object> params = new HashMap<String, Object>(1);
params.put("id", authorityAliasEntityId);
return template.delete(DELETE_AUTHORITY_ALIAS, params);
}
/* (non-Javadoc)
* @see org.alfresco.repo.domain.permissions.AbstractAclCrudDAOImpl#selectMaxChangeSetCommitTime()
*/
@Override
protected Long selectMaxChangeSetCommitTime()
{
return template.selectOne(SELECT_CHANGE_SET_MAX_COMMIT_TIME);
}
/* (non-Javadoc)
* @see org.alfresco.repo.domain.permissions.AbstractAclCrudDAOImpl#selectMaxChangeSetIdBeforeCommitTime(long)
*/
@Override
protected Long selectMaxChangeSetIdBeforeCommitTime(long maxCommitTime)
{
Assert.notNull(maxCommitTime, "maxCommitTime");
Map<String, Object> params = new HashMap<String, Object>(1);
params.put("commit_time_ms", maxCommitTime);
List<Long> sets = template.selectList(SELECT_CHANGE_SET_LAST, params, new RowBounds(0, 1));
if (sets.size() > 0)
{
return sets.get(0);
}
else
{
return null;
}
}
}
| lgpl-3.0 |
mmaracic/elasticsearch | core/src/test/java/org/elasticsearch/cluster/routing/RoutingServiceTests.java | 15018 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.routing;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.allocation.AllocationService;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.test.ESAllocationTestCase;
import org.elasticsearch.threadpool.ThreadPool;
import org.junit.After;
import org.junit.Before;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;
import static java.util.Collections.singleton;
import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING;
import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED;
import static org.elasticsearch.cluster.service.ClusterServiceUtils.createClusterService;
import static org.elasticsearch.cluster.service.ClusterServiceUtils.setState;
import static org.hamcrest.Matchers.equalTo;
/**
*/
public class RoutingServiceTests extends ESAllocationTestCase {
private TestRoutingService routingService;
@Before
public void createRoutingService() {
routingService = new TestRoutingService();
}
@After
public void shutdownRoutingService() throws Exception {
routingService.shutdown();
}
public void testReroute() {
assertThat(routingService.hasReroutedAndClear(), equalTo(false));
routingService.reroute("test");
assertThat(routingService.hasReroutedAndClear(), equalTo(true));
}
public void testNoDelayedUnassigned() throws Exception {
AllocationService allocation = createAllocationService(Settings.EMPTY, new DelayedShardsMockGatewayAllocator());
MetaData metaData = MetaData.builder()
.put(IndexMetaData.builder("test").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "0"))
.numberOfShards(1).numberOfReplicas(1))
.build();
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT)
.metaData(metaData)
.routingTable(RoutingTable.builder().addAsNew(metaData.index("test")).build()).build();
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1")).put(newNode("node2")).localNodeId("node1").masterNodeId("node1")).build();
clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState, "reroute")).build();
// starting primaries
clusterState = ClusterState.builder(clusterState).routingResult(allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING))).build();
// starting replicas
clusterState = ClusterState.builder(clusterState).routingResult(allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING))).build();
assertThat(clusterState.getRoutingNodes().unassigned().size() > 0, equalTo(false));
// remove node2 and reroute
ClusterState prevState = clusterState;
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).remove("node2")).build();
clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState, "reroute")).build();
ClusterState newState = clusterState;
assertThat(routingService.getMinDelaySettingAtLastSchedulingNanos(), equalTo(Long.MAX_VALUE));
routingService.clusterChanged(new ClusterChangedEvent("test", newState, prevState));
assertThat(routingService.getMinDelaySettingAtLastSchedulingNanos(), equalTo(Long.MAX_VALUE));
assertThat(routingService.hasReroutedAndClear(), equalTo(false));
}
public void testDelayedUnassignedScheduleReroute() throws Exception {
MockAllocationService allocation = createAllocationService(Settings.EMPTY, new DelayedShardsMockGatewayAllocator());
MetaData metaData = MetaData.builder()
.put(IndexMetaData.builder("test").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms"))
.numberOfShards(1).numberOfReplicas(1))
.build();
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT)
.metaData(metaData)
.routingTable(RoutingTable.builder().addAsNew(metaData.index("test")).build()).build();
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1")).put(newNode("node2")).localNodeId("node1").masterNodeId("node1")).build();
clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState, "reroute")).build();
// starting primaries
clusterState = ClusterState.builder(clusterState).routingResult(allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING))).build();
// starting replicas
clusterState = ClusterState.builder(clusterState).routingResult(allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING))).build();
assertFalse("no shards should be unassigned", clusterState.getRoutingNodes().unassigned().size() > 0);
String nodeId = null;
final List<ShardRouting> allShards = clusterState.getRoutingNodes().routingTable().allShards("test");
// we need to find the node with the replica otherwise we will not reroute
for (ShardRouting shardRouting : allShards) {
if (shardRouting.primary() == false) {
nodeId = shardRouting.currentNodeId();
break;
}
}
assertNotNull(nodeId);
// remove nodeId and reroute
ClusterState prevState = clusterState;
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).remove(nodeId)).build();
// make sure the replica is marked as delayed (i.e. not reallocated)
clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState, "reroute")).build();
assertEquals(1, clusterState.getRoutingNodes().unassigned().size());
ClusterState newState = clusterState;
routingService.clusterChanged(new ClusterChangedEvent("test", newState, prevState));
assertBusy(() -> assertTrue("routing service should have run a reroute", routingService.hasReroutedAndClear()));
// verify the registration has been reset
assertThat(routingService.getMinDelaySettingAtLastSchedulingNanos(), equalTo(Long.MAX_VALUE));
}
/**
* This tests that a new delayed reroute is scheduled right after a delayed reroute was run
*/
public void testDelayedUnassignedScheduleRerouteAfterDelayedReroute() throws Exception {
final ThreadPool testThreadPool = new ThreadPool(getTestName());
ClusterService clusterService = createClusterService(testThreadPool);
try {
MockAllocationService allocation = createAllocationService(Settings.EMPTY, new DelayedShardsMockGatewayAllocator());
MetaData metaData = MetaData.builder()
.put(IndexMetaData.builder("short_delay").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms"))
.numberOfShards(1).numberOfReplicas(1))
.put(IndexMetaData.builder("long_delay").settings(settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "10s"))
.numberOfShards(1).numberOfReplicas(1))
.build();
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metaData(metaData)
.routingTable(RoutingTable.builder().addAsNew(metaData.index("short_delay")).addAsNew(metaData.index("long_delay")).build())
.nodes(DiscoveryNodes.builder()
.put(newNode("node0", singleton(DiscoveryNode.Role.MASTER))).localNodeId("node0").masterNodeId("node0")
.put(newNode("node1")).put(newNode("node2")).put(newNode("node3")).put(newNode("node4"))).build();
// allocate shards
clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState, "reroute")).build();
// start primaries
clusterState = ClusterState.builder(clusterState).routingResult(allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING))).build();
// start replicas
clusterState = ClusterState.builder(clusterState).routingResult(allocation.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING))).build();
assertThat("all shards should be started", clusterState.getRoutingNodes().shardsWithState(STARTED).size(), equalTo(4));
// find replica of short_delay
ShardRouting shortDelayReplica = null;
for (ShardRouting shardRouting : clusterState.getRoutingNodes().routingTable().allShards("short_delay")) {
if (shardRouting.primary() == false) {
shortDelayReplica = shardRouting;
break;
}
}
assertNotNull(shortDelayReplica);
// find replica of long_delay
ShardRouting longDelayReplica = null;
for (ShardRouting shardRouting : clusterState.getRoutingNodes().routingTable().allShards("long_delay")) {
if (shardRouting.primary() == false) {
longDelayReplica = shardRouting;
break;
}
}
assertNotNull(longDelayReplica);
final long baseTime = System.nanoTime();
// remove node of shortDelayReplica and node of longDelayReplica and reroute
ClusterState prevState = clusterState;
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).remove(shortDelayReplica.currentNodeId()).remove(longDelayReplica.currentNodeId())).build();
// make sure both replicas are marked as delayed (i.e. not reallocated)
allocation.setNanoTimeOverride(baseTime);
clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState, "reroute")).build();
// check that shortDelayReplica and longDelayReplica have been marked unassigned
RoutingNodes.UnassignedShards unassigned = clusterState.getRoutingNodes().unassigned();
assertEquals(2, unassigned.size());
// update shortDelayReplica and longDelayReplica variables with new shard routing
ShardRouting shortDelayUnassignedReplica = null;
ShardRouting longDelayUnassignedReplica = null;
for (ShardRouting shr : unassigned) {
if (shr.getIndexName().equals("short_delay")) {
shortDelayUnassignedReplica = shr;
} else {
longDelayUnassignedReplica = shr;
}
}
assertTrue(shortDelayReplica.isSameShard(shortDelayUnassignedReplica));
assertTrue(longDelayReplica.isSameShard(longDelayUnassignedReplica));
// manually trigger a clusterChanged event on routingService
ClusterState newState = clusterState;
setState(clusterService, newState);
// create routing service, also registers listener on cluster service
RoutingService routingService = new RoutingService(Settings.EMPTY, testThreadPool, clusterService, allocation);
routingService.start(); // just so performReroute does not prematurely return
// next (delayed) reroute should only delay longDelayReplica/longDelayUnassignedReplica, simulate that we are now 1 second after shards became unassigned
allocation.setNanoTimeOverride(baseTime + TimeValue.timeValueSeconds(1).nanos());
// register listener on cluster state so we know when cluster state has been changed
CountDownLatch latch = new CountDownLatch(1);
clusterService.addLast(event -> latch.countDown());
// instead of clusterService calling clusterChanged, we call it directly here
routingService.clusterChanged(new ClusterChangedEvent("test", newState, prevState));
// cluster service should have updated state and called routingService with clusterChanged
latch.await();
// verify the registration has been set to the delay of longDelayReplica/longDelayUnassignedReplica
assertThat(routingService.getMinDelaySettingAtLastSchedulingNanos(), equalTo(TimeValue.timeValueSeconds(10).nanos()));
} finally {
clusterService.stop();
terminate(testThreadPool);
}
}
private class TestRoutingService extends RoutingService {
private AtomicBoolean rerouted = new AtomicBoolean();
public TestRoutingService() {
super(Settings.EMPTY, new ThreadPool(getTestName()), null, null);
}
void shutdown() throws Exception {
terminate(threadPool);
}
public boolean hasReroutedAndClear() {
return rerouted.getAndSet(false);
}
@Override
protected void performReroute(String reason) {
logger.info("--> performing fake reroute [{}]", reason);
rerouted.set(true);
}
}
}
| apache-2.0 |
pwong-mapr/incubator-drill | exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet2/TestDrillParquetReader.java | 11749 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.store.parquet2;
import java.math.BigDecimal;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.Arrays;
import org.apache.drill.exec.planner.physical.PlannerSettings;
import org.apache.drill.test.BaseTestQuery;
import org.joda.time.Period;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
public class TestDrillParquetReader extends BaseTestQuery {
// enable decimal data type
@BeforeClass
public static void enableDecimalDataType() throws Exception {
test(String.format("alter session set `%s` = true", PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY));
}
@AfterClass
public static void disableDecimalDataType() throws Exception {
test(String.format("alter session set `%s` = false", PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY));
}
private void testColumn(String columnName) throws Exception {
testNoResult("alter session set `store.parquet.use_new_reader` = true");
BigDecimal result = new BigDecimal("1.20000000");
testBuilder()
.ordered()
.sqlQuery("select %s from cp.`parquet2/decimal28_38.parquet`", columnName)
.baselineColumns(columnName)
.baselineValues(result)
.go();
testNoResult("alter session set `store.parquet.use_new_reader` = false");
}
@Test
public void testRequiredDecimal28() throws Exception {
testColumn("d28_req");
}
@Test
public void testRequiredDecimal38() throws Exception {
testColumn("d38_req");
}
@Test
public void testOptionalDecimal28() throws Exception {
testColumn("d28_opt");
}
@Test
public void testOptionalDecimal38() throws Exception {
testColumn("d38_opt");
}
@Test
public void test4349() throws Exception {
// start by creating a parquet file from the input csv file
runSQL("CREATE TABLE dfs.tmp.`4349` AS SELECT columns[0] id, CAST(NULLIF(columns[1], '') AS DOUBLE) val FROM cp.`parquet2/4349.csv.gz`");
// querying the parquet file should return the same results found in the csv file
testBuilder()
.unOrdered()
.sqlQuery("SELECT * FROM dfs.tmp.`4349` WHERE id = 'b'")
.sqlBaselineQuery("SELECT columns[0] id, CAST(NULLIF(columns[1], '') AS DOUBLE) val FROM cp.`parquet2/4349.csv.gz` WHERE columns[0] = 'b'")
.go();
}
@Test
public void testUnsignedAndSignedIntTypes() throws Exception {
testBuilder()
.unOrdered()
.sqlQuery("select * from cp.`parquet/uint_types.parquet`")
.baselineColumns("uint8_field", "uint16_field", "uint32_field", "uint64_field", "int8_field", "int16_field",
"required_uint8_field", "required_uint16_field", "required_uint32_field", "required_uint64_field",
"required_int8_field", "required_int16_field")
.baselineValues(255, 65535, 2147483647, 9223372036854775807L, 255, 65535, -1, -1, -1, -1L, -2147483648, -2147483648)
.baselineValues(-1, -1, -1, -1L, -2147483648, -2147483648, 255, 65535, 2147483647, 9223372036854775807L, 255, 65535)
.baselineValues(null, null, null, null, null, null, 0, 0, 0, 0L, 0, 0)
.go();
}
@Test
public void testLogicalIntTypes() throws Exception {
String query = String.format("select " +
"t.uint_64 as uint_64, t.uint_32 as uint_32, t.uint_16 as uint_16, t.uint_8 as uint_8, " +
"t.int_64 as int_64, t.int_32 as int_32, t.int_16 as int_16, t.int_8 as int_8 " +
"from cp.`parquet/logical_int.parquet` t" );
String[] columns = {"uint_64", "uint_32", "uint_16", "uint_8", "int_64", "int_32", "int_16", "int_8" };
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns(columns)
.baselineValues( 0L , 0 , 0 , 0 , 0L , 0 , 0 ,0 )
.baselineValues( -1L , -1 , -1 , -1 , -1L , -1 , -1 , -1 )
.baselineValues( 1L , 1 , 1 , 1 , -9223372036854775808L , 1 , 1 , 1 )
.baselineValues( 9223372036854775807L , 2147483647 , 65535 , 255 , 9223372036854775807L , -2147483648 , -32768 , -128 )
.build()
.run();
}
@Test //DRILL-5971
public void testLogicalIntTypes2() throws Exception {
byte[] bytes12 = {'1', '2', '3', '4', '5', '6', '7', '8', '9', '0', 'a', 'b' };
byte[] bytesOnes = new byte[12]; Arrays.fill(bytesOnes, (byte)1);
byte[] bytesZeros = new byte[12];
String query = String.format(
" select " +
" t.rowKey as rowKey, " +
" t._UTF8 as _UTF8, " +
" t._Enum as _Enum, " +
" t._INT32_RAW as _INT32_RAW, " +
" t._INT_8 as _INT_8, " +
" t._INT_16 as _INT_16, " +
" t._INT_32 as _INT_32, " +
" t._UINT_8 as _UINT_8, " +
" t._UINT_16 as _UINT_16, " +
" t._UINT_32 as _UINT_32, " +
" t._INT64_RAW as _INT64_RAW, " +
" t._INT_64 as _INT_64, " +
" t._UINT_64 as _UINT_64, " +
" t._DATE_int32 as _DATE_int32, " +
" t._TIME_MILLIS_int32 as _TIME_MILLIS_int32, " +
" t._TIMESTAMP_MILLIS_int64 as _TIMESTAMP_MILLIS_int64, " +
" t._INTERVAL_fixed_len_byte_array_12 as _INTERVAL_fixed_len_byte_array_12, " +
" t._INT96_RAW as _INT96_RAW " +
" from " +
" cp.`parquet/parquet_logical_types_simple.parquet` t " +
" order by t.rowKey "
);
String[] columns = {
"rowKey " ,
"_UTF8" ,
"_Enum" ,
"_INT32_RAW" ,
"_INT_8" ,
"_INT_16" ,
"_INT_32" ,
"_UINT_8" ,
"_UINT_16" ,
"_UINT_32" ,
"_INT64_RAW" ,
"_INT_64" ,
"_UINT_64" ,
"_DATE_int32" ,
"_TIME_MILLIS_int32" ,
"_TIMESTAMP_MILLIS_int64" ,
"_INTERVAL_fixed_len_byte_array_12" ,
"_INT96_RAW"
};
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns(columns)
.baselineValues(1, "UTF8 string1", "RANDOM_VALUE", 1234567, 123, 12345, 1234567, 123, 1234, 1234567,
1234567890123456L, 1234567890123456L, 1234567890123456L, LocalDate.parse("5350-02-17"),
ZonedDateTime.ofInstant(Instant.ofEpochMilli(1234567), ZoneOffset.UTC).toLocalTime(),
LocalDateTime.parse("1973-11-29T21:33:09.012"),
new Period().plusMonths(875770417).plusDays(943142453).plusMillis(1650536505),
bytes12)
.baselineValues(2, "UTF8 string2", "MAX_VALUE", 2147483647, 127, 32767, 2147483647, 255, 65535, -1,
9223372036854775807L, 9223372036854775807L, -1L, LocalDate.parse("1969-12-31"),
ZonedDateTime.ofInstant(Instant.ofEpochMilli(0xFFFFFFFF), ZoneOffset.UTC).toLocalTime(),
LocalDateTime.parse("2038-01-19T03:14:07.999"),
new Period().plusMonths(16843009).plusDays(16843009).plusMillis(16843009),
bytesOnes)
.baselineValues(3, "UTF8 string3", "MIN_VALUE", -2147483648, -128, -32768, -2147483648, 0, 0, 0,
-9223372036854775808L, -9223372036854775808L, 0L, LocalDate.parse("1970-01-01"),
ZonedDateTime.ofInstant(Instant.ofEpochMilli(0), ZoneOffset.UTC).toLocalTime(),
LocalDateTime.parse("1970-01-01T00:00:00.0"), new Period("PT0S"), bytesZeros)
.build()
.run();
}
@Test //DRILL-5971
public void testLogicalIntTypes3() throws Exception {
byte[] bytes12 = {'1', '2', '3', '4', '5', '6', '7', '8', '9', '0', 'a', 'b' };
byte[] bytesOnes = new byte[12]; Arrays.fill(bytesOnes, (byte)1);
byte[] bytesZeros = new byte[12];
String query = String.format(
" select " +
" t.rowKey as rowKey, " +
" t._UTF8 as _UTF8, " +
" t._Enum as _Enum, " +
" t._INT32_RAW as _INT32_RAW, " +
" t._INT_8 as _INT_8, " +
" t._INT_16 as _INT_16, " +
" t._INT_32 as _INT_32, " +
" t._UINT_8 as _UINT_8, " +
" t._UINT_16 as _UINT_16, " +
" t._UINT_32 as _UINT_32, " +
" t._INT64_RAW as _INT64_RAW, " +
" t._INT_64 as _INT_64, " +
" t._UINT_64 as _UINT_64, " +
" t._DATE_int32 as _DATE_int32, " +
" t._TIME_MILLIS_int32 as _TIME_MILLIS_int32, " +
" t._TIMESTAMP_MILLIS_int64 as _TIMESTAMP_MILLIS_int64, " +
" t._INTERVAL_fixed_len_byte_array_12 as _INTERVAL_fixed_len_byte_array_12, " +
" t._INT96_RAW as _INT96_RAW " +
" from " +
" cp.`parquet/parquet_logical_types_simple_nullable.parquet` t " +
" order by t.rowKey "
);
String[] columns = {
"rowKey " ,
"_UTF8" ,
"_Enum" ,
"_INT32_RAW" ,
"_INT_8" ,
"_INT_16" ,
"_INT_32" ,
"_UINT_8" ,
"_UINT_16" ,
"_UINT_32" ,
"_INT64_RAW" ,
"_INT_64" ,
"_UINT_64" ,
"_DATE_int32" ,
"_TIME_MILLIS_int32" ,
"_TIMESTAMP_MILLIS_int64" ,
"_INTERVAL_fixed_len_byte_array_12" ,
"_INT96_RAW"
};
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns(columns)
.baselineValues(1, "UTF8 string1", "RANDOM_VALUE", 1234567, 123, 12345, 1234567, 123, 1234, 1234567,
1234567890123456L, 1234567890123456L, 1234567890123456L, LocalDate.parse("5350-02-17"),
ZonedDateTime.ofInstant(Instant.ofEpochMilli(1234567), ZoneOffset.UTC).toLocalTime(),
LocalDateTime.parse("1973-11-29T21:33:09.012"),
new Period().plusMonths(875770417).plusDays(943142453).plusMillis(1650536505),
bytes12)
.baselineValues(2, "UTF8 string2", "MAX_VALUE", 2147483647, 127, 32767, 2147483647, 255, 65535, -1,
9223372036854775807L, 9223372036854775807L, -1L, LocalDate.parse("1969-12-31"),
ZonedDateTime.ofInstant(Instant.ofEpochMilli(0xFFFFFFFF), ZoneOffset.UTC).toLocalTime(),
LocalDateTime.parse("2038-01-19T03:14:07.999"),
new Period().plusMonths(16843009).plusDays(16843009).plusMillis(16843009),
bytesOnes)
.baselineValues(3, "UTF8 string3", "MIN_VALUE", -2147483648, -128, -32768, -2147483648, 0, 0, 0,
-9223372036854775808L, -9223372036854775808L, 0L, LocalDate.parse("1970-01-01"),
ZonedDateTime.ofInstant(Instant.ofEpochMilli(0), ZoneOffset.UTC).toLocalTime(),
LocalDateTime.parse("1970-01-01T00:00:00.0"), new Period("PT0S"), bytesZeros)
.baselineValues(4, null, null, null, null, null, null, null, null, null, null, null, null, null,
null, null, null, null)
.build().run();
}
}
| apache-2.0 |
ddraj/hbase-mttr | hbase-server/src/main/java/org/apache/hadoop/hbase/NotAllMetaRegionsOnlineException.java | 1526 | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.DoNotRetryIOException;
/**
* Thrown when an operation requires the root and all meta regions to be online
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public class NotAllMetaRegionsOnlineException extends DoNotRetryIOException {
private static final long serialVersionUID = 6439786157874827523L;
/**
* default constructor
*/
public NotAllMetaRegionsOnlineException() {
super();
}
/**
* @param message
*/
public NotAllMetaRegionsOnlineException(String message) {
super(message);
}
}
| apache-2.0 |
pavel-sakun/pentaho-kettle | engine/src/test/java/org/pentaho/di/trans/steps/streamlookup/StreamLookupMetaTest.java | 5864 | /*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2017 by Hitachi Vantara : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.streamlookup;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotSame;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.pentaho.di.core.KettleEnvironment;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.plugins.PluginRegistry;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
import org.pentaho.di.trans.steps.loadsave.LoadSaveTester;
import org.pentaho.di.trans.steps.loadsave.initializer.InitializerInterface;
import org.pentaho.di.trans.steps.loadsave.validator.ArrayLoadSaveValidator;
import org.pentaho.di.trans.steps.loadsave.validator.FieldLoadSaveValidator;
import org.pentaho.di.trans.steps.loadsave.validator.IntLoadSaveValidator;
import org.pentaho.di.trans.steps.loadsave.validator.PrimitiveIntArrayLoadSaveValidator;
import org.pentaho.di.trans.steps.loadsave.validator.StringLoadSaveValidator;
public class StreamLookupMetaTest implements InitializerInterface<StepMetaInterface> {
LoadSaveTester loadSaveTester;
Class<StreamLookupMeta> testMetaClass = StreamLookupMeta.class;
@Before
public void setUpLoadSave() throws Exception {
KettleEnvironment.init();
PluginRegistry.init( true );
List<String> attributes =
Arrays.asList( "inputSorted", "memoryPreservationActive", "usingSortedList", "usingIntegerPair", "keystream",
"keylookup", "value", "valueName", "valueDefault", "valueDefaultType" );
FieldLoadSaveValidator<String[]> stringArrayLoadSaveValidator =
new ArrayLoadSaveValidator<String>( new StringLoadSaveValidator(), 5 );
Map<String, FieldLoadSaveValidator<?>> attrValidatorMap = new HashMap<String, FieldLoadSaveValidator<?>>();
attrValidatorMap.put( "keystream", stringArrayLoadSaveValidator );
attrValidatorMap.put( "keylookup", stringArrayLoadSaveValidator );
attrValidatorMap.put( "value", stringArrayLoadSaveValidator );
attrValidatorMap.put( "valueName", stringArrayLoadSaveValidator );
attrValidatorMap.put( "valueDefault", stringArrayLoadSaveValidator );
attrValidatorMap.put( "valueDefaultType", new PrimitiveIntArrayLoadSaveValidator( new IntLoadSaveValidator( 7 ), 5 ) );
Map<String, FieldLoadSaveValidator<?>> typeValidatorMap = new HashMap<String, FieldLoadSaveValidator<?>>();
loadSaveTester =
new LoadSaveTester( testMetaClass, attributes, new ArrayList<String>(), new ArrayList<String>(),
new HashMap<String, String>(), new HashMap<String, String>(), attrValidatorMap, typeValidatorMap, this );
}
// Call the allocate method on the LoadSaveTester meta class
@Override
public void modify( StepMetaInterface someMeta ) {
if ( someMeta instanceof StreamLookupMeta ) {
( (StreamLookupMeta) someMeta ).allocate( 5, 5 );
}
}
@Test
public void testSerialization() throws KettleException {
loadSaveTester.testSerialization();
}
@Test
public void testCloneInfoSteps() {
StreamLookupMeta meta = new StreamLookupMeta();
meta.setDefault();
final String stepName = UUID.randomUUID().toString();
StepMeta infoStep = mock( StepMeta.class );
when( infoStep.getName() ).thenReturn( stepName );
meta.getStepIOMeta().getInfoStreams().get( 0 ).setStepMeta( infoStep );
StreamLookupMeta cloned = (StreamLookupMeta) meta.clone();
assertEquals( stepName, cloned.getStepIOMeta().getInfoStreams().get( 0 ).getStepname() );
assertNotSame( meta.getStepIOMeta().getInfoStreams().get( 0 ),
cloned.getStepIOMeta().getInfoStreams().get( 0 ) );
}
//PDI-16110
@Test
public void testGetXML() {
StreamLookupMeta streamLookupMeta = new StreamLookupMeta();
streamLookupMeta.setKeystream( new String[] { "testKeyStreamValue" } );
streamLookupMeta.setKeylookup( new String[] { "testKeyLookupValue" } );
streamLookupMeta.setValue( new String[] { "testValue" } );
streamLookupMeta.setValueName( new String[] {} );
streamLookupMeta.setValueDefault( new String[] {} );
streamLookupMeta.setValueDefaultType( new int[] {} );
//run without exception
streamLookupMeta.afterInjectionSynchronization();
streamLookupMeta.getXML();
Assert.assertEquals( streamLookupMeta.getKeystream().length, streamLookupMeta.getValueName().length );
Assert.assertEquals( streamLookupMeta.getKeystream().length, streamLookupMeta.getValueDefault().length );
Assert.assertEquals( streamLookupMeta.getKeystream().length, streamLookupMeta.getValueDefaultType().length );
}
}
| apache-2.0 |
dain/presto | core/trino-main/src/main/java/io/trino/execution/TaskStatus.java | 9616 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.execution;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import io.airlift.units.DataSize;
import io.airlift.units.Duration;
import java.net.URI;
import java.util.List;
import java.util.Set;
import static com.google.common.base.MoreObjects.toStringHelper;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static io.trino.execution.DynamicFiltersCollector.INITIAL_DYNAMIC_FILTERS_VERSION;
import static io.trino.execution.TaskState.PLANNED;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
public class TaskStatus
{
/**
* Version of task status that can be used to create an initial local task
* that is always older or equal than any remote task.
*/
public static final long STARTING_VERSION = 0;
/**
* A value larger than any valid value. This value can be used to create
* a final local task that is always newer than any remote task.
*/
private static final long MAX_VERSION = Long.MAX_VALUE;
private final TaskId taskId;
private final String taskInstanceId;
private final long version;
private final TaskState state;
private final URI self;
private final String nodeId;
private final Set<Lifespan> completedDriverGroups;
private final int queuedPartitionedDrivers;
private final int runningPartitionedDrivers;
private final boolean outputBufferOverutilized;
private final DataSize physicalWrittenDataSize;
private final DataSize memoryReservation;
private final DataSize systemMemoryReservation;
private final DataSize revocableMemoryReservation;
private final long fullGcCount;
private final Duration fullGcTime;
private final List<ExecutionFailureInfo> failures;
private final long dynamicFiltersVersion;
@JsonCreator
public TaskStatus(
@JsonProperty("taskId") TaskId taskId,
@JsonProperty("taskInstanceId") String taskInstanceId,
@JsonProperty("version") long version,
@JsonProperty("state") TaskState state,
@JsonProperty("self") URI self,
@JsonProperty("nodeId") String nodeId,
@JsonProperty("completedDriverGroups") Set<Lifespan> completedDriverGroups,
@JsonProperty("failures") List<ExecutionFailureInfo> failures,
@JsonProperty("queuedPartitionedDrivers") int queuedPartitionedDrivers,
@JsonProperty("runningPartitionedDrivers") int runningPartitionedDrivers,
@JsonProperty("outputBufferOverutilized") boolean outputBufferOverutilized,
@JsonProperty("physicalWrittenDataSize") DataSize physicalWrittenDataSize,
@JsonProperty("memoryReservation") DataSize memoryReservation,
@JsonProperty("systemMemoryReservation") DataSize systemMemoryReservation,
@JsonProperty("revocableMemoryReservation") DataSize revocableMemoryReservation,
@JsonProperty("fullGcCount") long fullGcCount,
@JsonProperty("fullGcTime") Duration fullGcTime,
@JsonProperty("dynamicFiltersVersion") long dynamicFiltersVersion)
{
this.taskId = requireNonNull(taskId, "taskId is null");
this.taskInstanceId = requireNonNull(taskInstanceId, "taskInstanceId is null");
checkState(version >= STARTING_VERSION, "version must be >= STARTING_VERSION");
this.version = version;
this.state = requireNonNull(state, "state is null");
this.self = requireNonNull(self, "self is null");
this.nodeId = requireNonNull(nodeId, "nodeId is null");
this.completedDriverGroups = requireNonNull(completedDriverGroups, "completedDriverGroups is null");
checkArgument(queuedPartitionedDrivers >= 0, "queuedPartitionedDrivers must be positive");
this.queuedPartitionedDrivers = queuedPartitionedDrivers;
checkArgument(runningPartitionedDrivers >= 0, "runningPartitionedDrivers must be positive");
this.runningPartitionedDrivers = runningPartitionedDrivers;
this.outputBufferOverutilized = outputBufferOverutilized;
this.physicalWrittenDataSize = requireNonNull(physicalWrittenDataSize, "physicalWrittenDataSize is null");
this.memoryReservation = requireNonNull(memoryReservation, "memoryReservation is null");
this.systemMemoryReservation = requireNonNull(systemMemoryReservation, "systemMemoryReservation is null");
this.revocableMemoryReservation = requireNonNull(revocableMemoryReservation, "revocableMemoryReservation is null");
this.failures = ImmutableList.copyOf(requireNonNull(failures, "failures is null"));
checkArgument(fullGcCount >= 0, "fullGcCount is negative");
this.fullGcCount = fullGcCount;
this.fullGcTime = requireNonNull(fullGcTime, "fullGcTime is null");
checkArgument(dynamicFiltersVersion >= INITIAL_DYNAMIC_FILTERS_VERSION, "dynamicFiltersVersion must be >= INITIAL_DYNAMIC_FILTERS_VERSION");
this.dynamicFiltersVersion = dynamicFiltersVersion;
}
@JsonProperty
public TaskId getTaskId()
{
return taskId;
}
@JsonProperty
public String getTaskInstanceId()
{
return taskInstanceId;
}
@JsonProperty
public long getVersion()
{
return version;
}
@JsonProperty
public TaskState getState()
{
return state;
}
@JsonProperty
public URI getSelf()
{
return self;
}
@JsonProperty
public String getNodeId()
{
return nodeId;
}
@JsonProperty
public Set<Lifespan> getCompletedDriverGroups()
{
return completedDriverGroups;
}
@JsonProperty
public List<ExecutionFailureInfo> getFailures()
{
return failures;
}
@JsonProperty
public int getQueuedPartitionedDrivers()
{
return queuedPartitionedDrivers;
}
@JsonProperty
public int getRunningPartitionedDrivers()
{
return runningPartitionedDrivers;
}
@JsonProperty
public DataSize getPhysicalWrittenDataSize()
{
return physicalWrittenDataSize;
}
@JsonProperty
public boolean isOutputBufferOverutilized()
{
return outputBufferOverutilized;
}
@JsonProperty
public DataSize getMemoryReservation()
{
return memoryReservation;
}
@JsonProperty
public DataSize getSystemMemoryReservation()
{
return systemMemoryReservation;
}
@JsonProperty
public DataSize getRevocableMemoryReservation()
{
return revocableMemoryReservation;
}
@JsonProperty
public long getFullGcCount()
{
return fullGcCount;
}
@JsonProperty
public Duration getFullGcTime()
{
return fullGcTime;
}
@JsonProperty
public long getDynamicFiltersVersion()
{
return dynamicFiltersVersion;
}
@Override
public String toString()
{
return toStringHelper(this)
.add("taskId", taskId)
.add("state", state)
.toString();
}
public static TaskStatus initialTaskStatus(TaskId taskId, URI location, String nodeId)
{
return new TaskStatus(
taskId,
"",
STARTING_VERSION,
PLANNED,
location,
nodeId,
ImmutableSet.of(),
ImmutableList.of(),
0,
0,
false,
DataSize.ofBytes(0),
DataSize.ofBytes(0),
DataSize.ofBytes(0),
DataSize.ofBytes(0),
0,
new Duration(0, MILLISECONDS),
INITIAL_DYNAMIC_FILTERS_VERSION);
}
public static TaskStatus failWith(TaskStatus taskStatus, TaskState state, List<ExecutionFailureInfo> exceptions)
{
return new TaskStatus(
taskStatus.getTaskId(),
taskStatus.getTaskInstanceId(),
MAX_VERSION,
state,
taskStatus.getSelf(),
taskStatus.getNodeId(),
taskStatus.getCompletedDriverGroups(),
exceptions,
taskStatus.getQueuedPartitionedDrivers(),
taskStatus.getRunningPartitionedDrivers(),
taskStatus.isOutputBufferOverutilized(),
taskStatus.getPhysicalWrittenDataSize(),
taskStatus.getMemoryReservation(),
taskStatus.getSystemMemoryReservation(),
taskStatus.getRevocableMemoryReservation(),
taskStatus.getFullGcCount(),
taskStatus.getFullGcTime(),
taskStatus.getDynamicFiltersVersion());
}
}
| apache-2.0 |
creamer/cas | core/cas-server-core-configuration/src/main/java/org/apereo/cas/configuration/model/core/sso/SsoProperties.java | 668 | package org.apereo.cas.configuration.model.core.sso;
/**
* Configuration properties class for {@code create.sso}.
*
* @author Dmitriy Kopylenko
* @since 5.0.0
*/
public class SsoProperties {
private boolean renewedAuthn = true;
private boolean missingService = true;
public boolean isRenewedAuthn() {
return renewedAuthn;
}
public void setRenewedAuthn(final boolean renewedAuthn) {
this.renewedAuthn = renewedAuthn;
}
public boolean isMissingService() {
return missingService;
}
public void setMissingService(final boolean missingService) {
this.missingService = missingService;
}
}
| apache-2.0 |
rangadi/incubator-beam | sdks/java/io/kinesis/src/test/java/org/apache/beam/sdk/io/kinesis/KinesisMockWriteTest.java | 10531 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.io.kinesis;
import static org.junit.Assert.assertEquals;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import com.amazonaws.http.SdkHttpMetadata;
import com.amazonaws.services.cloudwatch.AmazonCloudWatch;
import com.amazonaws.services.kinesis.AmazonKinesis;
import com.amazonaws.services.kinesis.clientlibrary.lib.worker.InitialPositionInStream;
import com.amazonaws.services.kinesis.model.DescribeStreamResult;
import com.amazonaws.services.kinesis.producer.IKinesisProducer;
import com.amazonaws.services.kinesis.producer.KinesisProducerConfiguration;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Properties;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.Create;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.values.PCollection;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests for {@link KinesisIO.Write}. */
@RunWith(JUnit4.class)
public class KinesisMockWriteTest {
private static final String STREAM = "BEAM";
private static final String PARTITION_KEY = "partitionKey";
@Rule public final transient TestPipeline p = TestPipeline.create();
@Rule public final transient TestPipeline p2 = TestPipeline.create();
@Rule public ExpectedException thrown = ExpectedException.none();
@Before
public void beforeTest() {
KinesisServiceMock kinesisService = KinesisServiceMock.getInstance();
kinesisService.init(STREAM, 1);
}
@Test
public void testWriteBuildsCorrectly() {
Properties properties = new Properties();
properties.setProperty("KinesisEndpoint", "localhost");
properties.setProperty("KinesisPort", "4567");
KinesisIO.Write write =
KinesisIO.write()
.withStreamName(STREAM)
.withPartitionKey(PARTITION_KEY)
.withPartitioner(new BasicKinesisPartitioner())
.withAWSClientsProvider(new FakeKinesisProvider())
.withProducerProperties(properties)
.withRetries(10);
assertEquals(STREAM, write.getStreamName());
assertEquals(PARTITION_KEY, write.getPartitionKey());
assertEquals(properties, write.getProducerProperties());
assertEquals(FakeKinesisProvider.class, write.getAWSClientsProvider().getClass());
assertEquals(BasicKinesisPartitioner.class, write.getPartitioner().getClass());
assertEquals(10, write.getRetries());
assertEquals("localhost", write.getProducerProperties().getProperty("KinesisEndpoint"));
assertEquals("4567", write.getProducerProperties().getProperty("KinesisPort"));
}
@Test
public void testWriteValidationFailsMissingStreamName() {
KinesisIO.Write write =
KinesisIO.write()
.withPartitionKey(PARTITION_KEY)
.withAWSClientsProvider(new FakeKinesisProvider());
thrown.expect(IllegalArgumentException.class);
write.expand(null);
}
@Test
public void testWriteValidationFailsMissingPartitioner() {
KinesisIO.Write write =
KinesisIO.write().withStreamName(STREAM).withAWSClientsProvider(new FakeKinesisProvider());
thrown.expect(IllegalArgumentException.class);
write.expand(null);
}
@Test
public void testWriteValidationFailsPartitionerAndPartitioneKey() {
KinesisIO.Write write =
KinesisIO.write()
.withStreamName(STREAM)
.withPartitionKey(PARTITION_KEY)
.withPartitioner(new BasicKinesisPartitioner())
.withAWSClientsProvider(new FakeKinesisProvider());
thrown.expect(IllegalArgumentException.class);
write.expand(null);
}
@Test
public void testWriteValidationFailsMissingAWSClientsProvider() {
KinesisIO.Write write =
KinesisIO.write().withPartitionKey(PARTITION_KEY).withStreamName(STREAM);
thrown.expect(IllegalArgumentException.class);
write.expand(null);
}
@Test
public void testNotExistedStream() {
Iterable<byte[]> data = ImmutableList.of("1".getBytes(StandardCharsets.UTF_8));
p.apply(Create.of(data))
.apply(
KinesisIO.write()
.withStreamName(STREAM)
.withPartitionKey(PARTITION_KEY)
.withAWSClientsProvider(new FakeKinesisProvider(false)));
thrown.expect(RuntimeException.class);
p.run().waitUntilFinish();
}
@Test
public void testSetInvalidProperty() {
Properties properties = new Properties();
properties.setProperty("KinesisPort", "qwe");
Iterable<byte[]> data = ImmutableList.of("1".getBytes(StandardCharsets.UTF_8));
p.apply(Create.of(data))
.apply(
KinesisIO.write()
.withStreamName(STREAM)
.withPartitionKey(PARTITION_KEY)
.withAWSClientsProvider(new FakeKinesisProvider())
.withProducerProperties(properties));
thrown.expect(RuntimeException.class);
p.run().waitUntilFinish();
}
@Test
public void testWrite() {
KinesisServiceMock kinesisService = KinesisServiceMock.getInstance();
Properties properties = new Properties();
properties.setProperty("KinesisEndpoint", "localhost");
properties.setProperty("KinesisPort", "4567");
properties.setProperty("VerifyCertificate", "false");
Iterable<byte[]> data =
ImmutableList.of(
"1".getBytes(StandardCharsets.UTF_8),
"2".getBytes(StandardCharsets.UTF_8),
"3".getBytes(StandardCharsets.UTF_8));
p.apply(Create.of(data))
.apply(
KinesisIO.write()
.withStreamName(STREAM)
.withPartitionKey(PARTITION_KEY)
.withAWSClientsProvider(new FakeKinesisProvider())
.withProducerProperties(properties));
p.run().waitUntilFinish();
assertEquals(3, kinesisService.getAddedRecords().get());
}
@Test
public void testWriteFailed() {
Iterable<byte[]> data = ImmutableList.of("1".getBytes(StandardCharsets.UTF_8));
p.apply(Create.of(data))
.apply(
KinesisIO.write()
.withStreamName(STREAM)
.withPartitionKey(PARTITION_KEY)
.withAWSClientsProvider(new FakeKinesisProvider().setFailedFlush(true))
.withRetries(1));
thrown.expect(RuntimeException.class);
p.run().waitUntilFinish();
}
@Test
public void testWriteAndReadFromMockKinesis() {
KinesisServiceMock kinesisService = KinesisServiceMock.getInstance();
Iterable<byte[]> data =
ImmutableList.of(
"1".getBytes(StandardCharsets.UTF_8), "2".getBytes(StandardCharsets.UTF_8));
p.apply(Create.of(data))
.apply(
KinesisIO.write()
.withStreamName(STREAM)
.withPartitionKey(PARTITION_KEY)
.withAWSClientsProvider(new FakeKinesisProvider()));
p.run().waitUntilFinish();
assertEquals(2, kinesisService.getAddedRecords().get());
List<List<AmazonKinesisMock.TestData>> testData = kinesisService.getShardedData();
int noOfShards = 1;
int noOfEventsPerShard = 2;
PCollection<AmazonKinesisMock.TestData> result =
p2.apply(
KinesisIO.read()
.withStreamName(STREAM)
.withInitialPositionInStream(InitialPositionInStream.TRIM_HORIZON)
.withAWSClientsProvider(new AmazonKinesisMock.Provider(testData, 10))
.withMaxNumRecords(noOfShards * noOfEventsPerShard))
.apply(ParDo.of(new KinesisMockReadTest.KinesisRecordToTestData()));
PAssert.that(result).containsInAnyOrder(Iterables.concat(testData));
p2.run().waitUntilFinish();
}
private static final class BasicKinesisPartitioner implements KinesisPartitioner {
@Override
public String getPartitionKey(byte[] value) {
return String.valueOf(value.length);
}
@Override
public String getExplicitHashKey(byte[] value) {
return null;
}
}
private static final class FakeKinesisProvider implements AWSClientsProvider {
private boolean isExistingStream = true;
private boolean isFailedFlush = false;
public FakeKinesisProvider() {}
public FakeKinesisProvider(boolean isExistingStream) {
this.isExistingStream = isExistingStream;
}
public FakeKinesisProvider setFailedFlush(boolean failedFlush) {
isFailedFlush = failedFlush;
return this;
}
@Override
public AmazonKinesis getKinesisClient() {
return getMockedAmazonKinesisClient();
}
@Override
public AmazonCloudWatch getCloudWatchClient() {
throw new RuntimeException("Not implemented");
}
@Override
public IKinesisProducer createKinesisProducer(KinesisProducerConfiguration config) {
return new KinesisProducerMock(config, isFailedFlush);
}
private AmazonKinesis getMockedAmazonKinesisClient() {
int statusCode = isExistingStream ? 200 : 404;
SdkHttpMetadata httpMetadata = mock(SdkHttpMetadata.class);
when(httpMetadata.getHttpStatusCode()).thenReturn(statusCode);
DescribeStreamResult streamResult = mock(DescribeStreamResult.class);
when(streamResult.getSdkHttpMetadata()).thenReturn(httpMetadata);
AmazonKinesis client = mock(AmazonKinesis.class);
when(client.describeStream(any(String.class))).thenReturn(streamResult);
return client;
}
}
}
| apache-2.0 |
curso007/camel | components/camel-mail/src/test/java/org/apache/camel/component/mail/RawMailMessageTest.java | 6656 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.mail;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import javax.mail.Folder;
import javax.mail.Message;
import javax.mail.Store;
import javax.mail.internet.MimeMessage;
import org.apache.camel.Exchange;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.junit.Test;
import org.jvnet.mock_javamail.Mailbox;
/**
* Unit test for Mail using camel headers to set recipient subject.
*/
public class RawMailMessageTest extends CamelTestSupport {
@Override
public void setUp() throws Exception {
Mailbox.clearAll();
prepareMailbox("jonesPop3", "pop3");
prepareMailbox("jonesRawPop3", "pop3");
prepareMailbox("jonesImap", "imap");
prepareMailbox("jonesRawImap", "imap");
super.setUp();
}
@Test
public void testGetRawJavaMailMessage() throws Exception {
Mailbox.clearAll();
Map<String, Object> map = new HashMap<String, Object>();
map.put("To", "davsclaus@apache.org");
map.put("From", "jstrachan@apache.org");
map.put("Subject", "Camel rocks");
String body = "Hello Claus.\nYes it does.\n\nRegards James.";
getMockEndpoint("mock:mail").expectedMessageCount(1);
template.sendBodyAndHeaders("smtp://davsclaus@apache.org", body, map);
assertMockEndpointsSatisfied();
Exchange exchange = getMockEndpoint("mock:mail").getReceivedExchanges().get(0);
// START SNIPPET: e1
// get access to the raw javax.mail.Message as shown below
Message javaMailMessage = exchange.getIn(MailMessage.class).getMessage();
assertNotNull(javaMailMessage);
assertEquals("Camel rocks", javaMailMessage.getSubject());
// END SNIPPET: e1
}
@Test
public void testRawMessageConsumerPop3() throws Exception {
testRawMessageConsumer("Pop3");
}
@Test
public void testRawMessageConsumerImap() throws Exception {
testRawMessageConsumer("Imap");
}
private void testRawMessageConsumer(String type) throws Exception {
Mailbox mailboxRaw = Mailbox.get("jonesRaw" + type + "@localhost");
assertEquals(1, mailboxRaw.size());
MockEndpoint mock = getMockEndpoint("mock://rawMessage" + type);
mock.expectedMessageCount(1);
mock.expectedBodyReceived().body().isNotNull();
assertMockEndpointsSatisfied();
Message mailMessage = mock.getExchanges().get(0).getIn().getBody(Message.class);
assertNotNull("mail subject should not be null", mailMessage.getSubject());
assertEquals("mail subject should be hurz", "hurz", mailMessage.getSubject());
Map<String, Object> headers = mock.getExchanges().get(0).getIn().getHeaders();
assertNotNull(headers);
assertTrue(!headers.isEmpty());
}
@Test
public void testNormalMessageConsumerPop3() throws Exception {
testNormalMessageConsumer("Pop3");
}
@Test
public void testNormalMessageConsumerImap() throws Exception {
testNormalMessageConsumer("Imap");
}
private void testNormalMessageConsumer(String type) throws Exception {
Mailbox mailbox = Mailbox.get("jones" + type + "@localhost");
assertEquals(1, mailbox.size());
MockEndpoint mock = getMockEndpoint("mock://normalMessage" + type);
mock.expectedMessageCount(1);
mock.expectedBodyReceived().body().isNotNull();
assertMockEndpointsSatisfied();
String body = mock.getExchanges().get(0).getIn().getBody(String.class);
MimeMessage mm = new MimeMessage(null, new ByteArrayInputStream(body.getBytes()));
String subject = mm.getSubject();
assertNull("mail subject should not be available", subject);
Map<String, Object> headers = mock.getExchanges().get(0).getIn().getHeaders();
assertNotNull(headers);
assertTrue(!headers.isEmpty());
}
private void prepareMailbox(String user, String type) throws Exception {
// connect to mailbox
JavaMailSender sender = new DefaultJavaMailSender();
Store store = sender.getSession().getStore(type);
store.connect("localhost", 25, user, "secret");
Folder folder = store.getFolder("INBOX");
folder.open(Folder.READ_WRITE);
folder.expunge();
InputStream is = getClass().getResourceAsStream("/SignedMailTestCaseHurz.elm");
Message hurzMsg = new MimeMessage(sender.getSession(), is);
Message[] messages = new Message[] {hurzMsg};
// insert one signed message
folder.appendMessages(messages);
folder.close(true);
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
public void configure() throws Exception {
from("pop3://davsclaus@apache.org").to("mock:mail");
from("pop3://jonesRawPop3@localhost?password=secret&consumer.initialDelay=100&consumer.delay=100&delete=true&mapMailMessage=false")
.to("mock://rawMessagePop3");
from("imap://jonesRawImap@localhost?password=secret&consumer.initialDelay=100&consumer.delay=100&delete=true&mapMailMessage=false")
.to("mock://rawMessageImap");
from("pop3://jonesPop3@localhost?password=secret&consumer.initialDelay=100&consumer.delay=100&delete=true")
.to("mock://normalMessagePop3");
from("imap://jonesImap@localhost?password=secret&consumer.initialDelay=100&consumer.delay=100&delete=true")
.to("mock://normalMessageImap");
}
};
}
} | apache-2.0 |
MichaelNedzelsky/intellij-community | platform/xdebugger-impl/src/com/intellij/xdebugger/impl/breakpoints/XBreakpointPanelProvider.java | 6449 | /*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.xdebugger.impl.breakpoints;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.markup.GutterIconRenderer;
import com.intellij.openapi.editor.markup.RangeHighlighter;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.xdebugger.XDebuggerManager;
import com.intellij.xdebugger.XDebuggerUtil;
import com.intellij.xdebugger.breakpoints.*;
import com.intellij.xdebugger.breakpoints.ui.XBreakpointGroupingRule;
import com.intellij.xdebugger.impl.breakpoints.ui.BreakpointItem;
import com.intellij.xdebugger.impl.breakpoints.ui.BreakpointPanelProvider;
import com.intellij.xdebugger.impl.breakpoints.ui.grouping.XBreakpointCustomGroupingRule;
import com.intellij.xdebugger.impl.breakpoints.ui.grouping.XBreakpointFileGroupingRule;
import com.intellij.xdebugger.impl.breakpoints.ui.grouping.XBreakpointGroupingByTypeRule;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collection;
import java.util.List;
/**
* @author nik
*/
public class XBreakpointPanelProvider extends BreakpointPanelProvider<XBreakpoint> {
private final List<MyXBreakpointListener> myListeners = ContainerUtil.createLockFreeCopyOnWriteList();
@Override
public void createBreakpointsGroupingRules(Collection<XBreakpointGroupingRule> rules) {
rules.add(new XBreakpointGroupingByTypeRule());
rules.add(new XBreakpointFileGroupingRule());
rules.add(new XBreakpointCustomGroupingRule());
}
@Override
public void addListener(final BreakpointsListener listener, Project project, Disposable disposable) {
XBreakpointManager breakpointManager = XDebuggerManager.getInstance(project).getBreakpointManager();
final MyXBreakpointListener listener1 = new MyXBreakpointListener(listener, breakpointManager);
breakpointManager.addBreakpointListener(listener1);
myListeners.add(listener1);
Disposer.register(disposable, new Disposable() {
@Override
public void dispose() {
removeListener(listener);
}
});
}
@Override
protected void removeListener(BreakpointsListener listener) {
for (MyXBreakpointListener breakpointListener : myListeners) {
if (breakpointListener.myListener == listener) {
XBreakpointManager manager = breakpointListener.myBreakpointManager;
manager.removeBreakpointListener(breakpointListener);
myListeners.remove(breakpointListener);
break;
}
}
}
public int getPriority() {
return 0;
}
@Nullable
public XBreakpoint<?> findBreakpoint(@NotNull final Project project, @NotNull final Document document, final int offset) {
XBreakpointManager breakpointManager = XDebuggerManager.getInstance(project).getBreakpointManager();
int line = document.getLineNumber(offset);
VirtualFile file = FileDocumentManager.getInstance().getFile(document);
if (file == null) {
return null;
}
for (XLineBreakpointType<?> type : XDebuggerUtil.getInstance().getLineBreakpointTypes()) {
XLineBreakpoint<? extends XBreakpointProperties> breakpoint = breakpointManager.findBreakpointAtLine(type, file, line);
if (breakpoint != null) {
return breakpoint;
}
}
return null;
}
@Override
public GutterIconRenderer getBreakpointGutterIconRenderer(Object breakpoint) {
if (breakpoint instanceof XLineBreakpointImpl) {
RangeHighlighter highlighter = ((XLineBreakpointImpl)breakpoint).getHighlighter();
if (highlighter != null) {
return highlighter.getGutterIconRenderer();
}
}
return null;
}
public void onDialogClosed(final Project project) {
}
@Override
public void provideBreakpointItems(Project project, Collection<BreakpointItem> items) {
final XBreakpointType<?, ?>[] types = XBreakpointUtil.getBreakpointTypes();
final XBreakpointManager manager = XDebuggerManager.getInstance(project).getBreakpointManager();
for (XBreakpointType<?, ?> type : types) {
final Collection<? extends XBreakpoint<?>> breakpoints = manager.getBreakpoints(type);
if (breakpoints.isEmpty()) continue;
for (XBreakpoint<?> breakpoint : breakpoints) {
items.add(new XBreakpointItem(breakpoint));
}
}
}
private static class MyXBreakpointListener implements XBreakpointListener<XBreakpoint<?>> {
public final BreakpointsListener myListener;
public final XBreakpointManager myBreakpointManager;
public MyXBreakpointListener(BreakpointsListener listener, XBreakpointManager breakpointManager) {
myListener = listener;
myBreakpointManager = breakpointManager;
}
@Override
public void breakpointAdded(@NotNull XBreakpoint<?> breakpoint) {
myListener.breakpointsChanged();
}
@Override
public void breakpointRemoved(@NotNull XBreakpoint<?> breakpoint) {
myListener.breakpointsChanged();
}
@Override
public void breakpointChanged(@NotNull XBreakpoint<?> breakpoint) {
myListener.breakpointsChanged();
}
}
private static class AddXBreakpointAction extends AnAction {
private final XBreakpointType<?, ?> myType;
public AddXBreakpointAction(XBreakpointType<?, ?> type) {
myType = type;
getTemplatePresentation().setIcon(type.getEnabledIcon());
getTemplatePresentation().setText(type.getTitle());
}
@Override
public void actionPerformed(AnActionEvent e) {
myType.addBreakpoint(getEventProject(e), null);
}
}
}
| apache-2.0 |
psiroky/uberfire | uberfire-workbench/uberfire-workbench-client/src/test/java/org/uberfire/client/workbench/panels/impl/SimpleWorkbenchPanelViewUnitTestWrapper.java | 1045 | /*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.uberfire.client.workbench.panels.impl;
/**
* Minor behavioural alterations to
*/
public class SimpleWorkbenchPanelViewUnitTestWrapper extends SimpleWorkbenchPanelView {
private boolean forcedAttachState;
@Override
public boolean isAttached() {
return forcedAttachState;
}
public void forceAttachedState( boolean attached ) {
forcedAttachState = attached;
}
}
| apache-2.0 |
domix/spring-boot | spring-boot-cli/src/it/java/org/springframework/boot/cli/CommandLineIT.java | 2817 | /*
* Copyright 2012-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.cli;
import java.io.IOException;
import org.junit.Test;
import org.springframework.boot.cli.infrastructure.CommandLineInvoker;
import org.springframework.boot.cli.infrastructure.CommandLineInvoker.Invocation;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.startsWith;
import static org.junit.Assert.assertThat;
/**
* Integration Tests for the command line application.
*
* @author Andy Wilkinson
* @author Phillip Webb
*/
public class CommandLineIT {
private final CommandLineInvoker cli = new CommandLineInvoker();
@Test
public void hintProducesListOfValidCommands() throws IOException,
InterruptedException {
Invocation cli = this.cli.invoke("hint");
assertThat(cli.await(), equalTo(0));
assertThat(cli.getErrorOutput().length(), equalTo(0));
assertThat(cli.getStandardOutputLines().size(), equalTo(10));
}
@Test
public void invokingWithNoArgumentsDisplaysHelp() throws IOException,
InterruptedException {
Invocation cli = this.cli.invoke();
assertThat(cli.await(), equalTo(1));
assertThat(cli.getErrorOutput().length(), equalTo(0));
assertThat(cli.getStandardOutput(), startsWith("usage:"));
}
@Test
public void unrecognizedCommandsAreHandledGracefully() throws IOException,
InterruptedException {
Invocation cli = this.cli.invoke("not-a-real-command");
assertThat(cli.await(), equalTo(1));
assertThat(cli.getErrorOutput(),
containsString("'not-a-real-command' is not a valid command"));
assertThat(cli.getStandardOutput().length(), equalTo(0));
}
@Test
public void version() throws IOException, InterruptedException {
Invocation cli = this.cli.invoke("version");
assertThat(cli.await(), equalTo(0));
assertThat(cli.getErrorOutput().length(), equalTo(0));
assertThat(cli.getStandardOutput(), startsWith("Spring CLI v"));
}
@Test
public void help() throws IOException, InterruptedException {
Invocation cli = this.cli.invoke("help");
assertThat(cli.await(), equalTo(1));
assertThat(cli.getErrorOutput().length(), equalTo(0));
assertThat(cli.getStandardOutput(), startsWith("usage:"));
}
}
| apache-2.0 |
kunickiaj/datacollector | kudu-protolib/src/test/java/com/streamsets/pipeline/stage/destination/kudu/TestKuduOperationType.java | 1508 | /*
* Copyright 2017 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.pipeline.stage.destination.kudu;
import org.junit.Test;
import org.junit.Assert;
public class TestKuduOperationType {
@Test
public void TestconvertToIntCode() throws Exception {
int insert = KuduOperationType.convertToIntCode("1");
Assert.assertEquals(1, insert);
int delete = KuduOperationType.convertToIntCode("2");
Assert.assertEquals(2, delete);
int unsupported1 = KuduOperationType.convertToIntCode("10");
Assert.assertEquals(-1, unsupported1);
int unsupported2 = KuduOperationType.convertToIntCode("-10");
Assert.assertEquals(-1, unsupported2);
try {
KuduOperationType.convertToIntCode("insert");
Assert.fail();
} catch (NumberFormatException ex) {
// pass
}
try {
KuduOperationType.convertToIntCode("0.5");
Assert.fail();
} catch (NumberFormatException ex) {
// pass
}
}
}
| apache-2.0 |
tmess567/syncope | common/lib/src/main/java/org/apache/syncope/common/lib/to/ProvisionTO.java | 2858 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.syncope.common.lib.to;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElementWrapper;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
import org.apache.syncope.common.lib.AbstractBaseBean;
@XmlRootElement(name = "provision")
@XmlType
public class ProvisionTO extends AbstractBaseBean implements EntityTO {
private static final long serialVersionUID = 8298910216218007927L;
private String key;
private String anyType;
private String objectClass;
private final List<String> auxClasses = new ArrayList<>();
private String syncToken;
private MappingTO mapping;
private final List<String> virSchemas = new ArrayList<>();
@Override
public String getKey() {
return key;
}
@Override
public void setKey(final String key) {
this.key = key;
}
public String getAnyType() {
return anyType;
}
public void setAnyType(final String anyType) {
this.anyType = anyType;
}
public String getObjectClass() {
return objectClass;
}
public void setObjectClass(final String objectClass) {
this.objectClass = objectClass;
}
@XmlElementWrapper(name = "auxClasses")
@XmlElement(name = "class")
@JsonProperty("auxClasses")
public List<String> getAuxClasses() {
return auxClasses;
}
public String getSyncToken() {
return syncToken;
}
public void setSyncToken(final String syncToken) {
this.syncToken = syncToken;
}
public MappingTO getMapping() {
return mapping;
}
public void setMapping(final MappingTO mapping) {
this.mapping = mapping;
}
@XmlElementWrapper(name = "virSchemas")
@XmlElement(name = "virSchema")
@JsonProperty("virSchemas")
public List<String> getVirSchemas() {
return virSchemas;
}
}
| apache-2.0 |
zhangdian/solr4.6.0 | solr/core/src/test/org/apache/solr/core/TestSolrXMLSerializer.java | 6577 | package org.apache.solr.core;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.commons.io.FileUtils;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
import org.apache.solr.core.SolrXMLSerializer.SolrCoreXMLDef;
import org.apache.solr.core.SolrXMLSerializer.SolrXMLDef;
import org.junit.Test;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.xml.sax.SAXException;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.StringWriter;
import java.io.Writer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
public class TestSolrXMLSerializer extends LuceneTestCase {
private static final XPathFactory xpathFactory = XPathFactory.newInstance();
private static final String defaultCoreNameKey = "defaultCoreName";
private static final String defaultCoreNameVal = "collection1";
private static final String peristentKey = "persistent";
private static final String persistentVal = "true";
private static final String sharedLibKey = "sharedLib";
private static final String sharedLibVal = "true";
private static final String adminPathKey = "adminPath";
private static final String adminPathVal = "/admin";
private static final String shareSchemaKey = "admin";
private static final String shareSchemaVal = "true";
private static final String instanceDirKey = "instanceDir";
private static final String instanceDirVal = "core1";
@Test
public void basicUsageTest() throws Exception {
SolrXMLSerializer serializer = new SolrXMLSerializer();
SolrXMLDef solrXMLDef = getTestSolrXMLDef(defaultCoreNameKey,
defaultCoreNameVal, peristentKey, persistentVal, sharedLibKey,
sharedLibVal, adminPathKey, adminPathVal, shareSchemaKey,
shareSchemaVal, instanceDirKey, instanceDirVal);
Writer w = new StringWriter();
try {
serializer.persist(w, solrXMLDef);
} finally {
w.close();
}
assertResults(((StringWriter) w).getBuffer().toString().getBytes("UTF-8"));
// again with default file
File tmpFile = _TestUtil.createTempFile("solr.xml", null, TEMP_DIR);
serializer.persistFile(tmpFile, solrXMLDef);
assertResults(FileUtils.readFileToString(tmpFile, "UTF-8").getBytes("UTF-8"));
tmpFile.delete();
}
private void assertResults(byte[] bytes)
throws ParserConfigurationException, IOException, SAXException, XPathExpressionException {
DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
BufferedInputStream is = new BufferedInputStream(new ByteArrayInputStream(bytes));
Document document;
try {
// is.mark(0);
// System.out.println("SolrXML:" + IOUtils.toString(is, "UTF-8"));
// is.reset();
document = builder.parse(is);
} finally {
is.close();
}
assertTrue(exists("/solr[@" + peristentKey + "='" + persistentVal + "']", document));
assertTrue(exists("/solr[@" + sharedLibKey + "='" + sharedLibVal + "']", document));
assertTrue(exists("/solr/cores[@" + defaultCoreNameKey + "='" + defaultCoreNameVal + "']", document));
assertTrue(exists("/solr/cores[@" + adminPathKey + "='" + adminPathVal + "']", document));
assertTrue(exists("/solr/cores/core[@" + instanceDirKey + "='" + instanceDirVal + "']", document));
}
private SolrXMLDef getTestSolrXMLDef(String defaultCoreNameKey,
String defaultCoreNameVal, String peristentKey, String persistentVal,
String sharedLibKey, String sharedLibVal, String adminPathKey,
String adminPathVal, String shareSchemaKey, String shareSchemaVal,
String instanceDirKey, String instanceDirVal) {
// <solr attrib="value">
Map<String,String> rootSolrAttribs = new HashMap<String,String>();
rootSolrAttribs.put(sharedLibKey, sharedLibVal);
rootSolrAttribs.put(peristentKey, persistentVal);
// <solr attrib="value"> <cores attrib="value">
Map<String,String> coresAttribs = new HashMap<String,String>();
coresAttribs.put(adminPathKey, adminPathVal);
coresAttribs.put(shareSchemaKey, shareSchemaVal);
coresAttribs.put(defaultCoreNameKey, defaultCoreNameVal);
SolrXMLDef solrXMLDef = new SolrXMLDef();
// <solr attrib="value"> <cores attrib="value"> <core attrib="value">
List<SolrCoreXMLDef> solrCoreXMLDefs = new ArrayList<SolrCoreXMLDef>();
SolrCoreXMLDef coreDef = new SolrCoreXMLDef();
Map<String,String> coreAttribs = new HashMap<String,String>();
coreAttribs.put(instanceDirKey, instanceDirVal);
coreDef.coreAttribs = coreAttribs ;
coreDef.coreProperties = new Properties();
solrCoreXMLDefs.add(coreDef);
solrXMLDef.coresDefs = solrCoreXMLDefs ;
Properties containerProperties = new Properties();
solrXMLDef.containerProperties = containerProperties ;
solrXMLDef.solrAttribs = rootSolrAttribs;
solrXMLDef.coresAttribs = coresAttribs;
solrXMLDef.loggingAttribs = new HashMap<String, String>();
solrXMLDef.loggingAttribs = new HashMap<String, String>();
solrXMLDef.watcherAttribs = new HashMap<String, String>();
return solrXMLDef;
}
public static boolean exists(String xpathStr, Node node)
throws XPathExpressionException {
XPath xpath = xpathFactory.newXPath();
return (Boolean) xpath.evaluate(xpathStr, node, XPathConstants.BOOLEAN);
}
}
| apache-2.0 |
stoksey69/googleads-java-lib | modules/adwords_axis/src/main/java/com/google/api/ads/adwords/axis/v201502/o/DisplayAdSpec.java | 10202 | /**
* DisplayAdSpec.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.adwords.axis.v201502.o;
/**
* Placement request/response object which provides details about
* display
* ad types, options, and other available configuration variables.
*/
public class DisplayAdSpec implements java.io.Serializable {
/* Display ad subtypes requested. Only one of each DisplayType
* is allowed and multiples will result in an error being returned from
* the
* service. Parameters specific to each display type
* may be set within this
* set. */
private com.google.api.ads.adwords.axis.v201502.o.DisplayType[] displayTypes;
private com.google.api.ads.adwords.axis.v201502.o.DisplayAdSpecActivationOption[] activationOptions;
private com.google.api.ads.adwords.axis.v201502.o.DisplayAdSpecAdSizeSpec[] adSizeSpecs;
public DisplayAdSpec() {
}
public DisplayAdSpec(
com.google.api.ads.adwords.axis.v201502.o.DisplayType[] displayTypes,
com.google.api.ads.adwords.axis.v201502.o.DisplayAdSpecActivationOption[] activationOptions,
com.google.api.ads.adwords.axis.v201502.o.DisplayAdSpecAdSizeSpec[] adSizeSpecs) {
this.displayTypes = displayTypes;
this.activationOptions = activationOptions;
this.adSizeSpecs = adSizeSpecs;
}
/**
* Gets the displayTypes value for this DisplayAdSpec.
*
* @return displayTypes * Display ad subtypes requested. Only one of each DisplayType
* is allowed and multiples will result in an error being returned from
* the
* service. Parameters specific to each display type
* may be set within this
* set.
*/
public com.google.api.ads.adwords.axis.v201502.o.DisplayType[] getDisplayTypes() {
return displayTypes;
}
/**
* Sets the displayTypes value for this DisplayAdSpec.
*
* @param displayTypes * Display ad subtypes requested. Only one of each DisplayType
* is allowed and multiples will result in an error being returned from
* the
* service. Parameters specific to each display type
* may be set within this
* set.
*/
public void setDisplayTypes(com.google.api.ads.adwords.axis.v201502.o.DisplayType[] displayTypes) {
this.displayTypes = displayTypes;
}
public com.google.api.ads.adwords.axis.v201502.o.DisplayType getDisplayTypes(int i) {
return this.displayTypes[i];
}
public void setDisplayTypes(int i, com.google.api.ads.adwords.axis.v201502.o.DisplayType _value) {
this.displayTypes[i] = _value;
}
/**
* Gets the activationOptions value for this DisplayAdSpec.
*
* @return activationOptions
*/
public com.google.api.ads.adwords.axis.v201502.o.DisplayAdSpecActivationOption[] getActivationOptions() {
return activationOptions;
}
/**
* Sets the activationOptions value for this DisplayAdSpec.
*
* @param activationOptions
*/
public void setActivationOptions(com.google.api.ads.adwords.axis.v201502.o.DisplayAdSpecActivationOption[] activationOptions) {
this.activationOptions = activationOptions;
}
public com.google.api.ads.adwords.axis.v201502.o.DisplayAdSpecActivationOption getActivationOptions(int i) {
return this.activationOptions[i];
}
public void setActivationOptions(int i, com.google.api.ads.adwords.axis.v201502.o.DisplayAdSpecActivationOption _value) {
this.activationOptions[i] = _value;
}
/**
* Gets the adSizeSpecs value for this DisplayAdSpec.
*
* @return adSizeSpecs
*/
public com.google.api.ads.adwords.axis.v201502.o.DisplayAdSpecAdSizeSpec[] getAdSizeSpecs() {
return adSizeSpecs;
}
/**
* Sets the adSizeSpecs value for this DisplayAdSpec.
*
* @param adSizeSpecs
*/
public void setAdSizeSpecs(com.google.api.ads.adwords.axis.v201502.o.DisplayAdSpecAdSizeSpec[] adSizeSpecs) {
this.adSizeSpecs = adSizeSpecs;
}
public com.google.api.ads.adwords.axis.v201502.o.DisplayAdSpecAdSizeSpec getAdSizeSpecs(int i) {
return this.adSizeSpecs[i];
}
public void setAdSizeSpecs(int i, com.google.api.ads.adwords.axis.v201502.o.DisplayAdSpecAdSizeSpec _value) {
this.adSizeSpecs[i] = _value;
}
private java.lang.Object __equalsCalc = null;
public synchronized boolean equals(java.lang.Object obj) {
if (!(obj instanceof DisplayAdSpec)) return false;
DisplayAdSpec other = (DisplayAdSpec) obj;
if (obj == null) return false;
if (this == obj) return true;
if (__equalsCalc != null) {
return (__equalsCalc == obj);
}
__equalsCalc = obj;
boolean _equals;
_equals = true &&
((this.displayTypes==null && other.getDisplayTypes()==null) ||
(this.displayTypes!=null &&
java.util.Arrays.equals(this.displayTypes, other.getDisplayTypes()))) &&
((this.activationOptions==null && other.getActivationOptions()==null) ||
(this.activationOptions!=null &&
java.util.Arrays.equals(this.activationOptions, other.getActivationOptions()))) &&
((this.adSizeSpecs==null && other.getAdSizeSpecs()==null) ||
(this.adSizeSpecs!=null &&
java.util.Arrays.equals(this.adSizeSpecs, other.getAdSizeSpecs())));
__equalsCalc = null;
return _equals;
}
private boolean __hashCodeCalc = false;
public synchronized int hashCode() {
if (__hashCodeCalc) {
return 0;
}
__hashCodeCalc = true;
int _hashCode = 1;
if (getDisplayTypes() != null) {
for (int i=0;
i<java.lang.reflect.Array.getLength(getDisplayTypes());
i++) {
java.lang.Object obj = java.lang.reflect.Array.get(getDisplayTypes(), i);
if (obj != null &&
!obj.getClass().isArray()) {
_hashCode += obj.hashCode();
}
}
}
if (getActivationOptions() != null) {
for (int i=0;
i<java.lang.reflect.Array.getLength(getActivationOptions());
i++) {
java.lang.Object obj = java.lang.reflect.Array.get(getActivationOptions(), i);
if (obj != null &&
!obj.getClass().isArray()) {
_hashCode += obj.hashCode();
}
}
}
if (getAdSizeSpecs() != null) {
for (int i=0;
i<java.lang.reflect.Array.getLength(getAdSizeSpecs());
i++) {
java.lang.Object obj = java.lang.reflect.Array.get(getAdSizeSpecs(), i);
if (obj != null &&
!obj.getClass().isArray()) {
_hashCode += obj.hashCode();
}
}
}
__hashCodeCalc = false;
return _hashCode;
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(DisplayAdSpec.class, true);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201502", "DisplayAdSpec"));
org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("displayTypes");
elemField.setXmlName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201502", "displayTypes"));
elemField.setXmlType(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201502", "DisplayType"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
elemField.setMaxOccursUnbounded(true);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("activationOptions");
elemField.setXmlName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201502", "activationOptions"));
elemField.setXmlType(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201502", "DisplayAdSpec.ActivationOption"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
elemField.setMaxOccursUnbounded(true);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("adSizeSpecs");
elemField.setXmlName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201502", "adSizeSpecs"));
elemField.setXmlType(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201502", "DisplayAdSpec.AdSizeSpec"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
elemField.setMaxOccursUnbounded(true);
typeDesc.addFieldDesc(elemField);
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
/**
* Get Custom Serializer
*/
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanSerializer(
_javaType, _xmlType, typeDesc);
}
/**
* Get Custom Deserializer
*/
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanDeserializer(
_javaType, _xmlType, typeDesc);
}
}
| apache-2.0 |
rangadi/beam | sdks/java/core/src/test/java/org/apache/beam/sdk/testing/CoderPropertiesTest.java | 9270 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.testing;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import com.google.common.base.Strings;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import org.apache.beam.sdk.coders.AtomicCoder;
import org.apache.beam.sdk.coders.Coder.Context;
import org.apache.beam.sdk.coders.CoderException;
import org.apache.beam.sdk.coders.CustomCoder;
import org.apache.beam.sdk.coders.StringUtf8Coder;
import org.hamcrest.CoreMatchers;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Unit tests for {@link CoderProperties}. */
@RunWith(JUnit4.class)
public class CoderPropertiesTest {
@Rule public ExpectedException expectedException = ExpectedException.none();
@Test
public void testGoodCoderIsDeterministic() throws Exception {
CoderProperties.coderDeterministic(StringUtf8Coder.of(), "TestData", "TestData");
}
/** A coder that says it is not deterministic but actually is. */
public static class NonDeterministicCoder extends AtomicCoder<String> {
@Override
public void encode(String value, OutputStream outStream) throws CoderException, IOException {
StringUtf8Coder.of().encode(value, outStream);
}
@Override
public String decode(InputStream inStream) throws CoderException, IOException {
return StringUtf8Coder.of().decode(inStream);
}
@Override
public void verifyDeterministic() throws NonDeterministicException {
throw new NonDeterministicException(this, "Not Deterministic");
}
}
@Test
public void testNonDeterministicCoder() throws Exception {
try {
CoderProperties.coderDeterministic(new NonDeterministicCoder(), "TestData", "TestData");
} catch (AssertionError error) {
assertThat(
error.getMessage(),
CoreMatchers.containsString("Expected that the coder is deterministic"));
// success!
return;
}
fail("Expected AssertionError");
}
@Test
public void testPassingInNonEqualValuesWithDeterministicCoder() throws Exception {
AssertionError error = null;
try {
CoderProperties.coderDeterministic(StringUtf8Coder.of(), "AAA", "BBB");
} catch (AssertionError e) {
error = e;
}
assertNotNull("Expected AssertionError", error);
assertThat(
error.getMessage(), CoreMatchers.containsString("Expected that the passed in values"));
}
/** A coder that is non-deterministic because it adds a string to the value. */
private static class BadDeterminsticCoder extends AtomicCoder<String> {
public BadDeterminsticCoder() {}
@Override
public void encode(String value, OutputStream outStream) throws IOException, CoderException {
StringUtf8Coder.of().encode(value + System.nanoTime(), outStream);
}
@Override
public String decode(InputStream inStream) throws CoderException, IOException {
return StringUtf8Coder.of().decode(inStream);
}
@Override
public void verifyDeterministic() throws NonDeterministicException {}
}
@Test
public void testBadCoderIsNotDeterministic() throws Exception {
AssertionError error = null;
try {
CoderProperties.coderDeterministic(new BadDeterminsticCoder(), "TestData", "TestData");
} catch (AssertionError e) {
error = e;
}
assertNotNull("Expected AssertionError", error);
assertThat(error.getMessage(), CoreMatchers.containsString("<84>, <101>, <115>, <116>, <68>"));
}
@Test
public void testGoodCoderEncodesEqualValues() throws Exception {
CoderProperties.coderDecodeEncodeEqual(StringUtf8Coder.of(), "TestData");
}
/** This coder changes state during encoding/decoding. */
private static class StateChangingSerializingCoder extends CustomCoder<String> {
private int changedState;
public StateChangingSerializingCoder() {
changedState = 10;
}
@Override
public void encode(String value, OutputStream outStream) throws CoderException, IOException {
changedState += 1;
StringUtf8Coder.of().encode(value + Strings.repeat("A", changedState), outStream);
}
@Override
public String decode(InputStream inStream) throws CoderException, IOException {
String decodedValue = StringUtf8Coder.of().decode(inStream);
return decodedValue.substring(0, decodedValue.length() - changedState);
}
@Override
public boolean equals(Object other) {
return other instanceof StateChangingSerializingCoder
&& ((StateChangingSerializingCoder) other).changedState == this.changedState;
}
@Override
public int hashCode() {
return changedState;
}
}
@Test
public void testBadCoderThatDependsOnChangingState() throws Exception {
AssertionError error = null;
try {
CoderProperties.coderDecodeEncodeEqual(new StateChangingSerializingCoder(), "TestData");
} catch (AssertionError e) {
error = e;
}
assertNotNull("Expected AssertionError", error);
assertThat(error.getMessage(), CoreMatchers.containsString("TestData"));
}
/** This coder loses information critical to its operation. */
private static class ForgetfulSerializingCoder extends CustomCoder<String> {
private transient int lostState;
public ForgetfulSerializingCoder(int lostState) {
this.lostState = lostState;
}
@Override
public void encode(String value, OutputStream outStream) throws CoderException, IOException {
if (lostState == 0) {
throw new RuntimeException("I forgot something...");
}
StringUtf8Coder.of().encode(value, outStream);
}
@Override
public String decode(InputStream inStream) throws CoderException, IOException {
return StringUtf8Coder.of().decode(inStream);
}
@Override
public boolean equals(Object other) {
return (other instanceof ForgetfulSerializingCoder)
&& ((ForgetfulSerializingCoder) other).lostState == lostState;
}
@Override
public int hashCode() {
return lostState;
}
}
@Test
public void testBadCoderThatDependsOnStateThatIsLost() throws Exception {
expectedException.expect(RuntimeException.class);
expectedException.expectMessage("I forgot something...");
CoderProperties.coderDecodeEncodeEqual(new ForgetfulSerializingCoder(1), "TestData");
}
/** A coder which closes the underlying stream during encoding and decoding. */
public static class ClosingCoder extends AtomicCoder<String> {
@Override
public void encode(String value, OutputStream outStream) throws IOException {
outStream.close();
}
@Override
public String decode(InputStream inStream) throws IOException {
inStream.close();
return null;
}
}
@Test
public void testClosingCoderFailsWhenDecoding() throws Exception {
expectedException.expect(UnsupportedOperationException.class);
expectedException.expectMessage("Caller does not own the underlying");
CoderProperties.decode(new ClosingCoder(), Context.NESTED, new byte[0]);
}
@Test
public void testClosingCoderFailsWhenEncoding() throws Exception {
expectedException.expect(UnsupportedOperationException.class);
expectedException.expectMessage("Caller does not own the underlying");
CoderProperties.encode(new ClosingCoder(), Context.NESTED, "test-value");
}
/** Coder that consumes more bytes while decoding than required. */
public static class BadCoderThatConsumesMoreBytes extends NonDeterministicCoder {
@Override
public String decode(InputStream inStream, Context context) throws IOException {
String value = super.decode(inStream, context);
inStream.read();
return value;
}
}
@Test
public void testCoderWhichConsumesMoreBytesThanItProducesFail() throws IOException {
AssertionError error = null;
try {
BadCoderThatConsumesMoreBytes coder = new BadCoderThatConsumesMoreBytes();
byte[] bytes = CoderProperties.encode(coder, Context.NESTED, "TestData");
CoderProperties.decode(coder, Context.NESTED, bytes);
} catch (AssertionError e) {
error = e;
}
assertNotNull("Expected Assertion Error", error);
assertThat(
error.getMessage(), CoreMatchers.containsString("consumed bytes equal to encoded bytes"));
}
}
| apache-2.0 |
psiroky/uberfire | uberfire-workbench/uberfire-workbench-client/src/test/java/org/uberfire/client/workbench/widgets/dnd/CompassDropControllerUnitTestWrapper.java | 2481 | /*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.uberfire.client.workbench.widgets.dnd;
import org.uberfire.client.workbench.PanelManager;
import org.uberfire.client.workbench.panels.WorkbenchPanelPresenter;
import org.uberfire.client.workbench.panels.WorkbenchPanelView;
import org.uberfire.mvp.PlaceRequest;
import org.uberfire.workbench.model.CompassPosition;
import org.uberfire.workbench.model.PanelDefinition;
import org.uberfire.workbench.model.Position;
import static org.mockito.Mockito.*;
public class CompassDropControllerUnitTestWrapper extends CompassDropController {
WorkbenchDragContext workDragContextMock;
PanelDefinition positionMock;
public void setupMocks( WorkbenchDragAndDropManager dndManager, PanelManager panelManager ) {
this.dndManager = dndManager;
this.panelManager = panelManager;
workDragContextMock = mock( WorkbenchDragContext.class );
when( dndManager.getWorkbenchContext() ).thenReturn( workDragContextMock );
this.compass = mock( CompassWidget.class );
}
@Override
void firePartDroppedEvent( PlaceRequest place ) {
}
public void mockDropTargetPositionNone() {
when( this.compass.getDropPosition() ).thenReturn( CompassPosition.NONE );
}
public void mockDropTargetPosition(Position position) {
when( this.compass.getDropPosition() ).thenReturn( position);
}
public void mockSamePositionDrag( WorkbenchPanelView dropTarget ) {
this.dropTarget = dropTarget;
positionMock = mock( PanelDefinition.class );
when( workDragContextMock.getSourcePanel() ).thenReturn( positionMock );
when( dropTarget.getPresenter() ).thenReturn( mock( WorkbenchPanelPresenter.class ) );
WorkbenchPanelPresenter presenter = dropTarget.getPresenter();
when( presenter.getDefinition() ).thenReturn( positionMock );
}
}
| apache-2.0 |
enricopal/STEM | lib/Duke/duke-core/src/test/java/no/priv/garshol/duke/databases/InMemoryBlockingDatabaseTest.java | 751 |
package no.priv.garshol.duke.databases;
import java.util.ArrayList;
import java.util.Collection;
import no.priv.garshol.duke.Configuration;
import no.priv.garshol.duke.Database;
import no.priv.garshol.duke.Record;
public class InMemoryBlockingDatabaseTest extends DatabaseTest {
public Database createDatabase(Configuration config) {
InMemoryBlockingDatabase db = new InMemoryBlockingDatabase();
db.setConfiguration(config);
Collection<KeyFunction> functions = new ArrayList();
functions.add(new TestKeyFunction());
db.setKeyFunctions(functions);
return db;
}
private static class TestKeyFunction implements KeyFunction {
public String makeKey(Record record) {
return record.getValue("NAME");
}
}
} | apache-2.0 |
shun634501730/java_source_cn | src_en/javax/management/modelmbean/ModelMBean.java | 4920 | /*
* Copyright (c) 2000, 2006, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
/*
* @author IBM Corp.
*
* Copyright IBM Corp. 1999-2000. All rights reserved.
*/
package javax.management.modelmbean;
import javax.management.DynamicMBean;
import javax.management.InstanceNotFoundException;
import javax.management.MBeanException;
import javax.management.PersistentMBean;
import javax.management.RuntimeOperationsException;
/**
* This interface must be implemented by the ModelMBeans. An implementation of this interface
* must be shipped with every JMX Agent.
* <P>
* Java resources wishing to be manageable instantiate the ModelMBean using the MBeanServer's
* createMBean method. The resource then sets the ModelMBeanInfo (with Descriptors) for the ModelMBean
* instance. The attributes and operations exposed via the ModelMBeanInfo for the ModelMBean are accessible
* from MBeans, connectors/adaptors like other MBeans. Through the ModelMBeanInfo Descriptors, values and methods in
* the managed application can be defined and mapped to attributes and operations of the ModelMBean.
* This mapping can be defined during development in an XML formatted file or dynamically and
* programmatically at runtime.
* <P>
* Every ModelMBean which is instantiated in the MBeanServer becomes manageable:
* its attributes and operations
* become remotely accessible through the connectors/adaptors connected to that MBeanServer.
* A Java object cannot be registered in the MBeanServer unless it is a JMX compliant MBean.
* By instantiating a ModelMBean, resources are guaranteed that the MBean is valid.
* <P>
* MBeanException and RuntimeOperationsException must be thrown on every public method. This allows
* for wrapping exceptions from distributed communications (RMI, EJB, etc.). These exceptions do
* not have to be thrown by the implementation except in the scenarios described in the specification
* and javadoc.
*
* @since 1.5
*/
public interface ModelMBean extends
DynamicMBean,
PersistentMBean,
ModelMBeanNotificationBroadcaster
{
/**
* Initializes a ModelMBean object using ModelMBeanInfo passed in.
* This method makes it possible to set a customized ModelMBeanInfo on
* the ModelMBean as long as it is not registered with the MBeanServer.
* <br>
* Once the ModelMBean's ModelMBeanInfo (with Descriptors) are
* customized and set on the ModelMBean, the ModelMBean can be
* registered with the MBeanServer.
* <P>
* If the ModelMBean is currently registered, this method throws
* a {@link javax.management.RuntimeOperationsException} wrapping an
* {@link IllegalStateException}
*
* @param inModelMBeanInfo The ModelMBeanInfo object to be used
* by the ModelMBean.
*
* @exception MBeanException Wraps a distributed communication
* Exception.
* @exception RuntimeOperationsException
* <ul><li>Wraps an {@link IllegalArgumentException} if
* the MBeanInfo passed in parameter is null.</li>
* <li>Wraps an {@link IllegalStateException} if the ModelMBean
* is currently registered in the MBeanServer.</li>
* </ul>
*
**/
public void setModelMBeanInfo(ModelMBeanInfo inModelMBeanInfo)
throws MBeanException, RuntimeOperationsException;
/**
* Sets the instance handle of the object against which to
* execute all methods in this ModelMBean management interface
* (MBeanInfo and Descriptors).
*
* @param mr Object that is the managed resource
* @param mr_type The type of reference for the managed resource. Can be: ObjectReference,
* Handle, IOR, EJBHandle, RMIReference.
* If the MBeanServer cannot process the mr_type passed in, an InvalidTargetTypeException
* will be thrown.
*
*
* @exception MBeanException The initializer of the object has thrown an exception.
* @exception RuntimeOperationsException Wraps an IllegalArgumentException:
* The managed resource type passed in parameter is null.
* @exception InstanceNotFoundException The managed resource object could not be found
* @exception InvalidTargetObjectTypeException The managed resource type cannot be processed by the
* ModelMBean or JMX Agent.
*/
public void setManagedResource(Object mr, String mr_type)
throws MBeanException, RuntimeOperationsException,
InstanceNotFoundException, InvalidTargetObjectTypeException ;
}
| apache-2.0 |
codyhoag/blade.tools | blade.migrate.liferay70/projects/opensocial-portlet-6.2.x/docroot/WEB-INF/src/com/liferay/opensocial/admin/lar/GadgetStagedModelDataHandler.java | 3523 | /**
* Copyright (c) 2000-present Liferay, Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*/
package com.liferay.opensocial.admin.lar;
import com.liferay.opensocial.model.Gadget;
import com.liferay.opensocial.service.GadgetLocalServiceUtil;
import com.liferay.portal.kernel.exception.PortalException;
import com.liferay.portal.kernel.exception.SystemException;
import com.liferay.portal.kernel.lar.BaseStagedModelDataHandler;
import com.liferay.portal.kernel.lar.ExportImportPathUtil;
import com.liferay.portal.kernel.lar.PortletDataContext;
import com.liferay.portal.kernel.xml.Element;
import com.liferay.portal.model.Group;
import com.liferay.portal.service.GroupLocalServiceUtil;
import com.liferay.portal.service.ServiceContext;
/**
* @author Michael C. Han
*/
public class GadgetStagedModelDataHandler
extends BaseStagedModelDataHandler<Gadget> {
public static final String[] CLASS_NAMES = {Gadget.class.getName()};
@Override
public void deleteStagedModel(
String uuid, long groupId, String className, String extraData)
throws PortalException, SystemException {
Group group = GroupLocalServiceUtil.getGroup(groupId);
Gadget gadget = GadgetLocalServiceUtil.fetchGadgetByUuidAndCompanyId(
uuid, group.getCompanyId());
if (gadget != null) {
GadgetLocalServiceUtil.deleteGadget(gadget);
}
}
@Override
public String[] getClassNames() {
return CLASS_NAMES;
}
@Override
public String getDisplayName(Gadget gadget) {
return gadget.getName();
}
@Override
protected void doExportStagedModel(
PortletDataContext portletDataContext, Gadget gadget)
throws Exception {
Element gadgetElement = portletDataContext.getExportDataElement(gadget);
portletDataContext.addClassedModel(
gadgetElement, ExportImportPathUtil.getModelPath(gadget), gadget);
}
@Override
protected void doImportStagedModel(
PortletDataContext portletDataContext, Gadget gadget)
throws Exception {
ServiceContext serviceContext = portletDataContext.createServiceContext(
gadget);
Gadget importedGadget = null;
if (portletDataContext.isDataStrategyMirror()) {
Gadget existingGadget =
GadgetLocalServiceUtil.fetchGadgetByUuidAndCompanyId(
gadget.getUuid(), portletDataContext.getCompanyId());
if (existingGadget == null) {
serviceContext.setUuid(gadget.getUuid());
importedGadget = GadgetLocalServiceUtil.addGadget(
portletDataContext.getCompanyId(), gadget.getUrl(),
gadget.getPortletCategoryNames(), serviceContext);
}
else {
existingGadget.setName(gadget.getName());
existingGadget.setUrl(gadget.getUrl());
existingGadget.setPortletCategoryNames(
gadget.getPortletCategoryNames());
importedGadget = GadgetLocalServiceUtil.updateGadget(
existingGadget);
}
}
else {
importedGadget = GadgetLocalServiceUtil.addGadget(
portletDataContext.getCompanyId(), gadget.getUrl(),
gadget.getPortletCategoryNames(), serviceContext);
}
portletDataContext.importClassedModel(gadget, importedGadget);
}
} | apache-2.0 |
hekonsek/fabric8 | sandbox/fabric/fabric-commands/src/main/java/io/fabric8/commands/ContainerInfo.java | 3562 | /**
* Copyright 2005-2014 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.commands;
import io.fabric8.api.FabricService;
import io.fabric8.api.RuntimeProperties;
import io.fabric8.api.scr.ValidatingReference;
import io.fabric8.boot.commands.support.AbstractCommandComponent;
import io.fabric8.boot.commands.support.ContainerCompleter;
import org.apache.felix.gogo.commands.Action;
import org.apache.felix.gogo.commands.basic.AbstractCommand;
import org.apache.felix.scr.annotations.Activate;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Deactivate;
import org.apache.felix.scr.annotations.Property;
import org.apache.felix.scr.annotations.Reference;
import org.apache.felix.scr.annotations.Service;
import org.apache.felix.service.command.Function;
@Component(immediate = true)
@Service({ Function.class, AbstractCommand.class })
@org.apache.felix.scr.annotations.Properties({
@Property(name = "osgi.command.scope", value = ContainerInfo.SCOPE_VALUE),
@Property(name = "osgi.command.function", value = ContainerInfo.FUNCTION_VALUE)
})
public class ContainerInfo extends AbstractCommandComponent {
public static final String SCOPE_VALUE = "fabric";
public static final String FUNCTION_VALUE = "container-info";
public static final String DESCRIPTION = "Displays information about the containers";
@Reference(referenceInterface = FabricService.class)
private final ValidatingReference<FabricService> fabricService = new ValidatingReference<FabricService>();
@Reference(referenceInterface = RuntimeProperties.class)
private final ValidatingReference<RuntimeProperties> runtimeProperties = new ValidatingReference<RuntimeProperties>();
// Completers
@Reference(referenceInterface = ContainerCompleter.class, bind = "bindContainerCompleter", unbind = "unbindContainerCompleter")
private ContainerCompleter containerCompleter; // dummy field
@Activate
void activate() {
activateComponent();
}
@Deactivate
void deactivate() {
deactivateComponent();
}
@Override
public Action createNewAction() {
assertValid();
return new ContainerInfoAction(fabricService.get(), runtimeProperties.get());
}
void bindFabricService(FabricService fabricService) {
this.fabricService.bind(fabricService);
}
void unbindFabricService(FabricService fabricService) {
this.fabricService.unbind(fabricService);
}
void bindRuntimeProperties(RuntimeProperties runtimeProperties) {
this.runtimeProperties.bind(runtimeProperties);
}
void unbindRuntimeProperties(RuntimeProperties runtimeProperties) {
this.runtimeProperties.unbind(runtimeProperties);
}
void bindContainerCompleter(ContainerCompleter completer) {
bindCompleter(completer);
}
void unbindContainerCompleter(ContainerCompleter completer) {
unbindCompleter(completer);
}
}
| apache-2.0 |
kishorvpatil/incubator-storm | external/storm-kafka-client/src/test/java/org/apache/storm/kafka/spout/KafkaSpoutReactivationTest.java | 7711 | /*
* Copyright 2017 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.storm.kafka.spout;
import static org.apache.storm.kafka.spout.KafkaSpout.TIMER_DELAY_MS;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyList;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.clearInvocations;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.HashMap;
import java.util.Map;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.TopicPartition;
import org.apache.storm.kafka.KafkaUnitExtension;
import org.apache.storm.kafka.spout.config.builder.SingleTopicKafkaSpoutConfiguration;
import org.apache.storm.kafka.spout.internal.ConsumerFactory;
import org.apache.storm.kafka.spout.internal.ConsumerFactoryDefault;
import org.apache.storm.kafka.spout.subscription.TopicAssigner;
import org.apache.storm.spout.SpoutOutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.utils.Time;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.junit.jupiter.MockitoExtension;
@ExtendWith(MockitoExtension.class)
public class KafkaSpoutReactivationTest {
@RegisterExtension
public KafkaUnitExtension kafkaUnitExtension = new KafkaUnitExtension();
@Captor
private ArgumentCaptor<Map<TopicPartition, OffsetAndMetadata>> commitCapture;
private final TopologyContext topologyContext = mock(TopologyContext.class);
private final Map<String, Object> conf = new HashMap<>();
private final SpoutOutputCollector collector = mock(SpoutOutputCollector.class);
private final long commitOffsetPeriodMs = 2_000;
private Consumer<String, String> consumerSpy;
private KafkaSpout<String, String> spout;
private final int maxPollRecords = 10;
public void prepareSpout(int messageCount, FirstPollOffsetStrategy firstPollOffsetStrategy) throws Exception {
KafkaSpoutConfig<String, String> spoutConfig =
SingleTopicKafkaSpoutConfiguration.setCommonSpoutConfig(KafkaSpoutConfig.builder("127.0.0.1:" + kafkaUnitExtension.getKafkaUnit().getKafkaPort(),
SingleTopicKafkaSpoutConfiguration.TOPIC))
.setFirstPollOffsetStrategy(firstPollOffsetStrategy)
.setOffsetCommitPeriodMs(commitOffsetPeriodMs)
.setProp(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, maxPollRecords)
.build();
ConsumerFactory<String, String> consumerFactory = new ConsumerFactoryDefault<>();
this.consumerSpy = spy(consumerFactory.createConsumer(spoutConfig.getKafkaProps()));
ConsumerFactory<String, String> consumerFactoryMock = mock(ConsumerFactory.class);
when(consumerFactoryMock.createConsumer(any()))
.thenReturn(consumerSpy);
this.spout = new KafkaSpout<>(spoutConfig, consumerFactoryMock, new TopicAssigner());
SingleTopicKafkaUnitSetupHelper.populateTopicData(kafkaUnitExtension.getKafkaUnit(), SingleTopicKafkaSpoutConfiguration.TOPIC, messageCount);
SingleTopicKafkaUnitSetupHelper.initializeSpout(spout, conf, topologyContext, collector);
}
private KafkaSpoutMessageId emitOne() {
ArgumentCaptor<KafkaSpoutMessageId> messageId = ArgumentCaptor.forClass(KafkaSpoutMessageId.class);
spout.nextTuple();
verify(collector).emit(anyString(), anyList(), messageId.capture());
clearInvocations(collector);
return messageId.getValue();
}
private void doReactivationTest(FirstPollOffsetStrategy firstPollOffsetStrategy) throws Exception {
try (Time.SimulatedTime time = new Time.SimulatedTime()) {
int messageCount = maxPollRecords * 2;
prepareSpout(messageCount, firstPollOffsetStrategy);
//Emit and ack some tuples, ensure that some polled tuples remain cached in the spout by emitting less than maxPollRecords
int beforeReactivationEmits = maxPollRecords - 3;
for (int i = 0; i < beforeReactivationEmits - 1; i++) {
KafkaSpoutMessageId msgId = emitOne();
spout.ack(msgId);
}
KafkaSpoutMessageId ackAfterDeactivateMessageId = emitOne();
//Cycle spout activation
spout.deactivate();
SingleTopicKafkaUnitSetupHelper.verifyAllMessagesCommitted(consumerSpy, commitCapture, beforeReactivationEmits - 1);
clearInvocations(consumerSpy);
//Tuples may be acked/failed after the spout deactivates, so we have to be able to handle this too
spout.ack(ackAfterDeactivateMessageId);
spout.activate();
//Emit and ack the rest
for (int i = beforeReactivationEmits; i < messageCount; i++) {
KafkaSpoutMessageId msgId = emitOne();
spout.ack(msgId);
}
//Commit
Time.advanceTime(TIMER_DELAY_MS + commitOffsetPeriodMs);
spout.nextTuple();
//Verify that no more tuples are emitted and all tuples are committed
SingleTopicKafkaUnitSetupHelper.verifyAllMessagesCommitted(consumerSpy, commitCapture, messageCount);
clearInvocations(collector);
spout.nextTuple();
verify(collector, never()).emit(any(), any(), any());
}
}
@Test
public void testSpoutShouldResumeWhereItLeftOffWithUncommittedEarliestStrategy() throws Exception {
//With uncommitted earliest the spout should pick up where it left off when reactivating.
doReactivationTest(FirstPollOffsetStrategy.UNCOMMITTED_EARLIEST);
}
@Test
public void testSpoutShouldResumeWhereItLeftOffWithEarliestStrategy() throws Exception {
//With earliest, the spout should also resume where it left off, rather than restart at the earliest offset.
doReactivationTest(FirstPollOffsetStrategy.EARLIEST);
}
@Test
public void testSpoutMustHandleGettingMetricsWhileDeactivated() throws Exception {
//Storm will try to get metrics from the spout even while deactivated, the spout must be able to handle this
prepareSpout(10, FirstPollOffsetStrategy.UNCOMMITTED_EARLIEST);
for (int i = 0; i < 5; i++) {
KafkaSpoutMessageId msgId = emitOne();
spout.ack(msgId);
}
spout.deactivate();
Map<String, Long> offsetMetric = (Map<String, Long>) spout.getKafkaOffsetMetric().getValueAndReset();
assertThat(offsetMetric.get(SingleTopicKafkaSpoutConfiguration.TOPIC + "/totalSpoutLag"), is(5L));
}
}
| apache-2.0 |
atuljangra/neo4j-apoc-procedures | src/main/java/apoc/result/RowResult.java | 234 | package apoc.result;
import java.util.Map;
/**
* @author mh
* @since 26.02.16
*/
public class RowResult {
public final Map<String, Object> row;
public RowResult(Map<String, Object> row) {
this.row = row;
}
}
| apache-2.0 |
hawky-4s-/camunda-bpm-platform | engine/src/main/java/org/camunda/bpm/application/impl/event/ProcessApplicationEventListenerDelegate.java | 5414 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.application.impl.event;
import java.util.concurrent.Callable;
import org.camunda.bpm.application.ProcessApplicationInterface;
import org.camunda.bpm.application.ProcessApplicationReference;
import org.camunda.bpm.application.ProcessApplicationUnavailableException;
import org.camunda.bpm.application.impl.ProcessApplicationLogger;
import org.camunda.bpm.engine.delegate.DelegateExecution;
import org.camunda.bpm.engine.delegate.DelegateTask;
import org.camunda.bpm.engine.delegate.ExecutionListener;
import org.camunda.bpm.engine.delegate.TaskListener;
import org.camunda.bpm.engine.impl.context.Context;
import org.camunda.bpm.engine.impl.context.ProcessApplicationContextUtil;
import org.camunda.bpm.engine.impl.persistence.entity.ExecutionEntity;
/**
* <p>{@link ExecutionListener} and {@link TaskListener} implementation delegating to
* the {@link ExecutionListener} and {@link TaskListener} provided by a
* {@link ProcessApplicationInterface ProcessApplication}.</p>
*
* <p>If the process application does not provide an execution listener (ie.
* {@link ProcessApplicationInterface#getExecutionListener()} returns null), the
* request is silently ignored.</p>
*
* <p>If the process application does not provide a task listener (ie.
* {@link ProcessApplicationInterface#getTaskListener()} returns null), the
* request is silently ignored.</p>
*
*
* @author Daniel Meyer
* @see ProcessApplicationInterface#getExecutionListener()
* @see ProcessApplicationInterface#getTaskListener()
*
*/
public class ProcessApplicationEventListenerDelegate implements ExecutionListener, TaskListener {
private static ProcessApplicationLogger LOG = ProcessApplicationLogger.PROCESS_APPLICATION_LOGGER;
public void notify(final DelegateExecution execution) throws Exception {
Callable<Void> notification = new Callable<Void>() {
public Void call() throws Exception {
notifyExecutionListener(execution);
return null;
}
};
performNotification(execution, notification);
}
public void notify(final DelegateTask delegateTask){
if(delegateTask.getExecution() == null) {
LOG.taskNotRelatedToExecution(delegateTask);
} else {
final DelegateExecution execution = delegateTask.getExecution();
Callable<Void> notification = new Callable<Void>() {
public Void call() throws Exception {
notifyTaskListener(delegateTask);
return null;
}
};
try {
performNotification(execution, notification);
} catch(Exception e) {
throw LOG.exceptionWhileNotifyingPaTaskListener(e);
}
}
}
protected void performNotification(final DelegateExecution execution, Callable<Void> notification) throws Exception {
final ProcessApplicationReference processApp = ProcessApplicationContextUtil.getTargetProcessApplication((ExecutionEntity) execution);
if (processApp == null) {
// ignore silently
LOG.noTargetProcessApplicationForExecution(execution);
} else {
if (ProcessApplicationContextUtil.requiresContextSwitch(processApp)) {
// this should not be necessary since context switch is already performed by OperationContext and / or DelegateInterceptor
Context.executeWithinProcessApplication(notification, processApp);
} else {
// context switch already performed
notification.call();
}
}
}
protected void notifyExecutionListener(DelegateExecution execution) throws Exception {
ProcessApplicationReference processApp = Context.getCurrentProcessApplication();
try {
ProcessApplicationInterface processApplication = processApp.getProcessApplication();
ExecutionListener executionListener = processApplication.getExecutionListener();
if(executionListener != null) {
executionListener.notify(execution);
} else {
LOG.paDoesNotProvideExecutionListener(processApp.getName());
}
} catch (ProcessApplicationUnavailableException e) {
// Process Application unavailable => ignore silently
LOG.cannotInvokeListenerPaUnavailable(processApp.getName(), e);
}
}
protected void notifyTaskListener(DelegateTask task) throws Exception {
ProcessApplicationReference processApp = Context.getCurrentProcessApplication();
try {
ProcessApplicationInterface processApplication = processApp.getProcessApplication();
TaskListener taskListener = processApplication.getTaskListener();
if(taskListener != null) {
taskListener.notify(task);
} else {
LOG.paDoesNotProvideTaskListener(processApp.getName());
}
} catch (ProcessApplicationUnavailableException e) {
// Process Application unavailable => ignore silently
LOG.cannotInvokeListenerPaUnavailable(processApp.getName(), e);
}
}
}
| apache-2.0 |
moreus/hadoop | hadoop-0.11.2/src/java/org/apache/hadoop/mapred/SequenceFileInputFilter.java | 10738 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.charset.CharacterCodingException;
import java.security.DigestException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.util.ReflectionUtils;
/**
* A class that allows a map/red job to work on a sample of sequence files.
* The sample is decided by the filter class set by the job.
*
* @author hairong
*
*/
public class SequenceFileInputFilter extends SequenceFileInputFormat {
final private static String FILTER_CLASS = "sequencefile.filter.class";
final private static String FILTER_FREQUENCY
= "sequencefile.filter.frequency";
final private static String FILTER_REGEX = "sequencefile.filter.regex";
public SequenceFileInputFilter() {
}
/** Create a record reader for the given split
* @param split file split
* @param job job configuration
* @param reporter reporter who sends report to task tracker
* @return RecordReader
*/
public RecordReader getRecordReader(InputSplit split,
JobConf job, Reporter reporter)
throws IOException {
reporter.setStatus(split.toString());
return new FilterRecordReader(job, (FileSplit) split);
}
/** set the filter class
*
* @param conf application configuration
* @param filterClass filter class
*/
public static void setFilterClass(Configuration conf, Class filterClass) {
conf.set(FILTER_CLASS, filterClass.getName() );
}
/**
* filter interface
*/
public interface Filter extends Configurable {
/** filter function
* Decide if a record should be filtered or not
* @param key record key
* @return true if a record is accepted; return false otherwise
*/
public abstract boolean accept(Writable key);
}
/**
* base calss for Filters
*/
public static abstract class FilterBase implements Filter {
Configuration conf;
public Configuration getConf() {
return conf;
}
}
/** Records filter by matching key to regex
*/
public static class RegexFilter extends FilterBase {
private Pattern p;
/** Define the filtering regex and stores it in conf
* @param conf where the regex is set
* @param regex regex used as a filter
*/
public static void setPattern(Configuration conf, String regex )
throws PatternSyntaxException {
try {
Pattern.compile(regex);
} catch (PatternSyntaxException e) {
throw new IllegalArgumentException("Invalid pattern: "+regex);
}
conf.set(FILTER_REGEX, regex);
}
public RegexFilter() { }
/** configure the Filter by checking the configuration
*/
public void setConf(Configuration conf) {
String regex = conf.get(FILTER_REGEX);
if(regex==null)
throw new RuntimeException(FILTER_REGEX + "not set");
this.p = Pattern.compile(regex);
this.conf = conf;
}
/** Filtering method
* If key matches the regex, return true; otherwise return false
* @see org.apache.hadoop.mapred.SequenceFileInputFilter.Filter#accept(org.apache.hadoop.io.Writable)
*/
public boolean accept(Writable key) {
return p.matcher(key.toString()).matches();
}
}
/** This class returns a percentage of records
* The percentage is determined by a filtering frequency <i>f</i> using
* the criteria record# % f == 0.
* For example, if the frequency is 10, one out of 10 records is returned.
*/
public static class PercentFilter extends FilterBase {
private int frequency;
private int count;
/** set the frequency and stores it in conf
* @param conf configuration
* @param frequency filtering frequencey
*/
public static void setFrequency(Configuration conf, int frequency ){
if(frequency<=0)
throw new IllegalArgumentException(
"Negative " + FILTER_FREQUENCY + ": "+frequency);
conf.setInt(FILTER_FREQUENCY, frequency);
}
public PercentFilter() { }
/** configure the filter by checking the configuration
*
* @param conf configuration
*/
public void setConf(Configuration conf) {
this.frequency = conf.getInt("sequencefile.filter.frequency", 10);
if(this.frequency <=0 ) {
throw new RuntimeException(
"Negative "+FILTER_FREQUENCY+": "+this.frequency);
}
this.conf = conf;
}
/** Filtering method
* If record# % frequency==0, return true; otherwise return false
* @see org.apache.hadoop.mapred.SequenceFileInputFilter.Filter#accept(org.apache.hadoop.io.Writable)
*/
public boolean accept(Writable key) {
boolean accepted = false;
if(count == 0)
accepted = true;
if( ++count == frequency ) {
count = 0;
}
return accepted;
}
}
/** This class returns a set of records by examing the MD5 digest of its
* key against a filtering frequency <i>f</i>. The filtering criteria is
* MD5(key) % f == 0.
*/
public static class MD5Filter extends FilterBase {
private int frequency;
private static final MessageDigest DIGESTER;
public static final int MD5_LEN = 16;
private byte [] digest = new byte[MD5_LEN];
static {
try {
DIGESTER = MessageDigest.getInstance("MD5");
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
}
/** set the filtering frequency in configuration
*
* @param conf configuration
* @param frequency filtering frequency
*/
public static void setFrequency(Configuration conf, int frequency ){
if(frequency<=0)
throw new IllegalArgumentException(
"Negative " + FILTER_FREQUENCY + ": "+frequency);
conf.setInt(FILTER_FREQUENCY, frequency);
}
public MD5Filter() { }
/** configure the filter according to configuration
*
* @param conf configuration
*/
public void setConf(Configuration conf) {
this.frequency = conf.getInt(FILTER_FREQUENCY, 10);
if(this.frequency <=0 ) {
throw new RuntimeException(
"Negative "+FILTER_FREQUENCY+": "+this.frequency);
}
this.conf = conf;
}
/** Filtering method
* If MD5(key) % frequency==0, return true; otherwise return false
* @see org.apache.hadoop.mapred.SequenceFileInputFilter.Filter#accept(org.apache.hadoop.io.Writable)
*/
public boolean accept(Writable key) {
try {
long hashcode;
if( key instanceof Text) {
hashcode = MD5Hashcode((Text)key);
} else if( key instanceof BytesWritable) {
hashcode = MD5Hashcode((BytesWritable)key);
} else {
ByteBuffer bb;
bb = Text.encode(key.toString());
hashcode = MD5Hashcode(bb.array(),0, bb.limit());
}
if(hashcode/frequency*frequency==hashcode)
return true;
} catch(Exception e) {
LOG.warn(e);
throw new RuntimeException(e);
}
return false;
}
private long MD5Hashcode(Text key) throws DigestException {
return MD5Hashcode(key.getBytes(), 0, key.getLength());
}
private long MD5Hashcode(BytesWritable key) throws DigestException {
return MD5Hashcode(key.get(), 0, key.getSize());
}
synchronized private long MD5Hashcode(byte[] bytes,
int start, int length ) throws DigestException {
DIGESTER.update(bytes, 0, length);
DIGESTER.digest(digest, 0, MD5_LEN);
long hashcode=0;
for (int i = 0; i < 8; i++)
hashcode |= ((digest[i] & 0xffL) << (8*(7-i)));
return hashcode;
}
}
private static class FilterRecordReader extends SequenceFileRecordReader {
private Filter filter;
public FilterRecordReader(Configuration conf, FileSplit split)
throws IOException {
super(conf, split);
// instantiate filter
filter = (Filter)ReflectionUtils.newInstance(
conf.getClass(FILTER_CLASS, PercentFilter.class),
conf);
}
public synchronized boolean next(Writable key, Writable value)
throws IOException {
while (next(key)) {
if(filter.accept(key)) {
getCurrentValue(value);
return true;
}
}
return false;
}
}
}
| apache-2.0 |
pascalrobert/aribaweb | src/expr/src/main/java/ariba/util/expr/NullHandler.java | 2451 | //--------------------------------------------------------------------------
// Copyright (c) 1998-2004, Drew Davidson and Luke Blanshard
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// Neither the name of the Drew Davidson nor the names of its contributors
// may be used to endorse or promote products derived from this software
// without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
// COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
// OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
// AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
// DAMAGE.
//--------------------------------------------------------------------------
package ariba.util.expr;
import java.util.Map;
/**
* Interface for handling null results from Chains.
* Object has the opportunity to substitute an object for the
* null and continue.
* @author Luke Blanshard (blanshlu@netscape.net)
* @author Drew Davidson (drew@ognl.org)
*/
public interface NullHandler
{
/**
Method called on target returned null.
*/
public Object nullMethodResult(Map context, Object target, String methodName, Object[] args);
/**
Property in target evaluated to null. Property can be a constant
String property name or a DynamicSubscript.
*/
public Object nullPropertyValue(Map context, Object target, Object property);
}
| apache-2.0 |
kostya-sh/parquet-format | src/main/java/org/apache/parquet/format/event/Consumers.java | 6605 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.parquet.format.event;
import static java.util.Collections.unmodifiableMap;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.thrift.TBase;
import org.apache.thrift.TException;
import org.apache.thrift.TFieldIdEnum;
import org.apache.thrift.protocol.TList;
import org.apache.thrift.protocol.TProtocol;
import org.apache.thrift.protocol.TProtocolUtil;
import org.apache.parquet.format.event.Consumers.Consumer;
import org.apache.parquet.format.event.TypedConsumer.BoolConsumer;
import org.apache.parquet.format.event.TypedConsumer.ListConsumer;
import org.apache.parquet.format.event.TypedConsumer.StructConsumer;
/**
* Entry point for reading thrift in a streaming fashion
*
* @author Julien Le Dem
*
*/
public class Consumers {
/**
* To consume objects coming from a DelegatingFieldConsumer
* @author Julien Le Dem
*
* @param <T> the type of consumed objects
*/
public static interface Consumer<T> {
void consume(T t);
}
/**
* Delegates reading the field to TypedConsumers.
* There is one TypedConsumer per thrift type.
* use {@link DelegatingFieldConsumer#onField(TFieldIdEnum, BoolConsumer)} et al. to consume specific thrift fields.
* @see Consumers#fieldConsumer()
* @author Julien Le Dem
*
*/
public static class DelegatingFieldConsumer implements FieldConsumer {
private final Map<Short, TypedConsumer> contexts;
private final FieldConsumer defaultFieldEventConsumer;
private DelegatingFieldConsumer(FieldConsumer defaultFieldEventConsumer, Map<Short, TypedConsumer> contexts) {
this.defaultFieldEventConsumer = defaultFieldEventConsumer;
this.contexts = unmodifiableMap(contexts);
}
private DelegatingFieldConsumer() {
this(new SkippingFieldConsumer());
}
private DelegatingFieldConsumer(FieldConsumer defaultFieldEventConsumer) {
this(defaultFieldEventConsumer, Collections.<Short, TypedConsumer>emptyMap());
}
public DelegatingFieldConsumer onField(TFieldIdEnum e, TypedConsumer typedConsumer) {
Map<Short, TypedConsumer> newContexts = new HashMap<Short, TypedConsumer>(contexts);
newContexts.put(e.getThriftFieldId(), typedConsumer);
return new DelegatingFieldConsumer(defaultFieldEventConsumer, newContexts);
}
@Override
public void consumeField(
TProtocol protocol, EventBasedThriftReader reader,
short id, byte type) throws TException {
TypedConsumer delegate = contexts.get(id);
if (delegate != null) {
delegate.read(protocol, reader, type);
} else {
defaultFieldEventConsumer.consumeField(protocol, reader, id, type);
}
}
}
/**
* call onField on the resulting DelegatingFieldConsumer to handle individual fields
* @return a new DelegatingFieldConsumer
*/
public static DelegatingFieldConsumer fieldConsumer() {
return new DelegatingFieldConsumer();
}
/**
* To consume a list of elements
* @param c the type of the list content
* @param consumer the consumer that will receive the list
* @return a ListConsumer that can be passed to the DelegatingFieldConsumer
*/
public static <T extends TBase<T,? extends TFieldIdEnum>> ListConsumer listOf(Class<T> c, final Consumer<List<T>> consumer) {
class ListConsumer implements Consumer<T> {
List<T> list;
@Override
public void consume(T t) {
list.add(t);
}
}
final ListConsumer co = new ListConsumer();
return new DelegatingListElementsConsumer(struct(c, co)) {
@Override
public void consumeList(TProtocol protocol,
EventBasedThriftReader reader, TList tList) throws TException {
co.list = new ArrayList<T>();
super.consumeList(protocol, reader, tList);
consumer.consume(co.list);
}
};
}
/**
* To consume list elements one by one
* @param consumer the consumer that will read the elements
* @return a ListConsumer that can be passed to the DelegatingFieldConsumer
*/
public static ListConsumer listElementsOf(TypedConsumer consumer) {
return new DelegatingListElementsConsumer(consumer);
}
public static <T extends TBase<T,? extends TFieldIdEnum>> StructConsumer struct(final Class<T> c, final Consumer<T> consumer) {
return new TBaseStructConsumer<T>(c, consumer);
}
}
class SkippingFieldConsumer implements FieldConsumer {
@Override
public void consumeField(TProtocol protocol, EventBasedThriftReader reader, short id, byte type) throws TException {
TProtocolUtil.skip(protocol, type);
}
}
class DelegatingListElementsConsumer extends ListConsumer {
private TypedConsumer elementConsumer;
protected DelegatingListElementsConsumer(TypedConsumer consumer) {
this.elementConsumer = consumer;
}
@Override
public void consumeElement(TProtocol protocol, EventBasedThriftReader reader, byte elemType) throws TException {
elementConsumer.read(protocol, reader, elemType);
}
}
class TBaseStructConsumer<T extends TBase<T, ? extends TFieldIdEnum>> extends StructConsumer {
private final Class<T> c;
private Consumer<T> consumer;
public TBaseStructConsumer(Class<T> c, Consumer<T> consumer) {
this.c = c;
this.consumer = consumer;
}
@Override
public void consumeStruct(TProtocol protocol, EventBasedThriftReader reader) throws TException {
T o = newObject();
o.read(protocol);
consumer.consume(o);
}
protected T newObject() {
try {
return c.newInstance();
} catch (InstantiationException e) {
throw new RuntimeException(c.getName(), e);
} catch (IllegalAccessException e) {
throw new RuntimeException(c.getName(), e);
}
}
} | apache-2.0 |
threefish/acre | webapp/WEB-INF/src/com/google/acre/script/AnnotatedForJS.java | 995 | // Copyright 2007-2010 Google, Inc.
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.acre.script;
import org.mozilla.javascript.Scriptable;
/**
* Interface for types/objects to be processed for JS compatibility.<br>
* NOTE: properties marked with JS_prop... should have corresponding getters
* Scope is necessary to create NativeObjects while populating js properties
*
* @author Yuiry
*
*/
public interface AnnotatedForJS {
public Scriptable getScope();
}
| apache-2.0 |
sverkera/camel | components/camel-as2/camel-as2-api/src/main/java/org/apache/camel/component/as2/api/util/AS2HeaderUtils.java | 5793 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.as2.api.util;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.List;
import org.apache.camel.component.as2.api.entity.Importance;
import org.apache.http.Header;
import org.apache.http.HeaderElement;
import org.apache.http.NameValuePair;
import org.apache.http.message.BasicHeader;
import org.apache.http.message.ParserCursor;
import org.apache.http.message.TokenParser;
import org.apache.http.util.Args;
import org.apache.http.util.CharArrayBuffer;
public final class AS2HeaderUtils {
public static class Parameter {
private final String attribute;
private final Importance importance;
private final String[] values;
public Parameter(String attribute, String importance, String[] values) {
this.attribute = Args.notNull(attribute, "attribute");
this.importance = Importance.get(importance);
this.values = values;
}
public String getAttribute() {
return attribute;
}
public Importance getImportance() {
return importance;
}
public String[] getValues() {
return values;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(attribute);
if (importance != null) {
sb.append("=" + importance.toString());
}
if (values != null) {
for (String value : values) {
sb.append("," + value);
}
}
return sb.toString();
}
}
private static final char PARAM_DELIMITER = ',';
private static final char ELEM_DELIMITER = ';';
private static final char NAME_VALUE_DELIMITER = '=';
private static final TokenParser TOKEN_PARSER = TokenParser.INSTANCE;
private static final BitSet TOKEN_DELIMS = TokenParser.INIT_BITSET(NAME_VALUE_DELIMITER, PARAM_DELIMITER,
ELEM_DELIMITER);
private static final BitSet VALUE_DELIMS = TokenParser.INIT_BITSET(PARAM_DELIMITER, ELEM_DELIMITER);
private AS2HeaderUtils() {
}
public static Header createHeader(String headerName, String[]... elements) {
StringBuilder sb = new StringBuilder();
boolean firstElement = true;
for (String[] element: elements) {
if (element.length == 0) {
continue;
}
if (firstElement) {
firstElement = false;
} else {
sb.append(ELEM_DELIMITER);
}
sb.append(element[0]);
if (element.length > 1) {
sb.append(NAME_VALUE_DELIMITER + element[1]);
}
}
BasicHeader header = new BasicHeader(headerName, sb.toString());
return header;
}
public static Parameter parseParameter(final CharArrayBuffer buffer, final ParserCursor cursor) {
Args.notNull(buffer, "Char array buffer");
Args.notNull(cursor, "Parser cursor");
final String name = TOKEN_PARSER.parseToken(buffer, cursor, TOKEN_DELIMS);
if (cursor.atEnd()) {
return new Parameter(name, null, null);
}
final int delim = buffer.charAt(cursor.getPos());
cursor.updatePos(cursor.getPos() + 1);
if (delim != NAME_VALUE_DELIMITER) {
return new Parameter(name, null, null);
}
final String importance = TOKEN_PARSER.parseValue(buffer, cursor, VALUE_DELIMS);
if (!cursor.atEnd()) {
cursor.updatePos(cursor.getPos() + 1);
}
List<String> values = new ArrayList<>();
while (!cursor.atEnd()) {
String value = TOKEN_PARSER.parseValue(buffer, cursor, VALUE_DELIMS);
values.add(value);
if (cursor.atEnd()) {
break;
}
final int delimiter = buffer.charAt(cursor.getPos());
if (!cursor.atEnd()) {
cursor.updatePos(cursor.getPos() + 1);
}
if (delimiter == ELEM_DELIMITER) {
break;
}
}
return new Parameter(name, importance, values.toArray(new String[values.size()]));
}
public static String getBoundaryParameterValue(Header[] headers, String headerName) {
Args.notNull(headers, "headers");
Args.notNull(headerName, "headerName");
for (Header header : headers) {
if (header.getName().equalsIgnoreCase(headerName)) {
for (HeaderElement headerElement : header.getElements()) {
for (NameValuePair nameValuePair : headerElement.getParameters()) {
if (nameValuePair.getName().equalsIgnoreCase("boundary")) {
return nameValuePair.getValue();
}
}
}
}
}
return null;
}
}
| apache-2.0 |
hurricup/intellij-community | java/java-tests/testSrc/com/intellij/codeInspection/JavaAPIUsagesInspectionTest.java | 5146 | /*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* User: anna
* Date: 11-Sep-2007
*/
package com.intellij.codeInspection;
import com.intellij.JavaTestUtil;
import com.intellij.codeInspection.ex.LocalInspectionToolWrapper;
import com.intellij.codeInspection.java15api.Java15APIUsageInspection;
import com.intellij.pom.java.LanguageLevel;
import com.intellij.testFramework.IdeaTestUtil;
import com.intellij.testFramework.InspectionTestCase;
public class JavaAPIUsagesInspectionTest extends InspectionTestCase {
@Override
protected String getTestDataPath() {
return JavaTestUtil.getJavaTestDataPath() + "/inspection";
}
private void doTest() {
final Java15APIUsageInspection usageInspection = new Java15APIUsageInspection();
doTest("usage1.5/" + getTestName(true), new LocalInspectionToolWrapper(usageInspection), "java 1.5");
}
public void testConstructor() throws Exception {
IdeaTestUtil.withLevel(getModule(), LanguageLevel.JDK_1_4, this::doTest);
}
public void testIgnored() throws Exception {
doTest();
}
public void testAnnotation() throws Exception {
IdeaTestUtil.withLevel(getModule(), LanguageLevel.JDK_1_6, this::doTest);
}
public void testDefaultMethods() throws Exception {
IdeaTestUtil.withLevel(getModule(), LanguageLevel.JDK_1_6, this::doTest);
}
public void testOverrideAnnotation() throws Exception {
IdeaTestUtil.withLevel(getModule(), LanguageLevel.JDK_1_6, this::doTest);
}
public void testRawInheritFromNewlyGenerified() throws Exception {
IdeaTestUtil.withLevel(getModule(), LanguageLevel.JDK_1_6, this::doTest);
}
//generate apiXXX.txt
/*
//todo exclude inheritors of ConcurrentMap#putIfAbsent
public void testCollectSinceApiUsages() {
final String version = "1.8";
final LinkedHashSet<String> notDocumented = new LinkedHashSet<String>();
final ContentIterator contentIterator = new ContentIterator() {
@Override
public boolean processFile(VirtualFile fileOrDir) {
final PsiFile file = PsiManager.getInstance(getProject()).findFile(fileOrDir);
if (file instanceof PsiJavaFile) {
file.accept(new JavaRecursiveElementVisitor() {
@Override
public void visitElement(PsiElement element) {
super.visitElement(element);
if (isDocumentedSinceApi(element)) {
System.out.println(Java15APIUsageInspection.getSignature((PsiMember)element));
if (element instanceof PsiMethod) {
OverridingMethodsSearch.search((PsiMethod)element, GlobalSearchScope.notScope(GlobalSearchScope.projectScope(getProject())), true).forEach(
new Processor<PsiMethod>() {
@Override
public boolean process(PsiMethod method) {
if (isDocumentedSinceApi(method.getNavigationElement())) {
return true;
}
notDocumented.add(Java15APIUsageInspection.getSignature(method));
return true;
}
});
}
}
}
public boolean isDocumentedSinceApi(PsiElement element) {
if (element instanceof PsiDocCommentOwner) {
final PsiDocComment comment = ((PsiDocCommentOwner)element).getDocComment();
if (comment != null) {
for (PsiDocTag tag : comment.getTags()) {
if (Comparing.strEqual(tag.getName(), "since")) {
final PsiDocTagValue value = tag.getValueElement();
if (value != null && value.getText().equals(version)) {
return true;
}
break;
}
}
}
}
return false;
}
});
}
return true;
}
};
final VirtualFile srcFile = JarFileSystem.getInstance().findFileByPath("c:/tools/jdk8/src.zip!/");
assert srcFile != null;
VfsUtilCore.iterateChildrenRecursively(srcFile, VirtualFileFilter.ALL, contentIterator);
notDocumented.forEach(System.out::println);
}
@Override
protected void setUpJdk() {
Module[] modules = ModuleManager.getInstance(myProject).getModules();
final Sdk sdk = JavaSdk.getInstance().createJdk("1.8", "c:/tools/jdk8/", false);
for (Module module : modules) {
ModuleRootModificationUtil.setModuleSdk(module, sdk);
}
}*/
}
| apache-2.0 |
walteryang47/ovirt-engine | frontend/webadmin/modules/userportal-gwtp/src/main/java/org/ovirt/engine/ui/userportal/widget/resources/VmTable.java | 19033 | package org.ovirt.engine.ui.userportal.widget.resources;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.ovirt.engine.core.common.businessentities.VM;
import org.ovirt.engine.core.common.businessentities.storage.DiskImage;
import org.ovirt.engine.ui.common.SubTableResources;
import org.ovirt.engine.ui.common.widget.HasEditorDriver;
import org.ovirt.engine.ui.common.widget.editor.EntityModelCellTable;
import org.ovirt.engine.ui.common.widget.table.ActionCellTable;
import org.ovirt.engine.ui.common.widget.table.cell.CompositeCell;
import org.ovirt.engine.ui.common.widget.table.column.AbstractColumn;
import org.ovirt.engine.ui.common.widget.table.column.AbstractImageResourceColumn;
import org.ovirt.engine.ui.common.widget.table.column.AbstractTextColumn;
import org.ovirt.engine.ui.uicommonweb.models.EntityModel;
import org.ovirt.engine.ui.uicommonweb.models.ListModel;
import org.ovirt.engine.ui.uicommonweb.models.resources.ResourcesModel;
import org.ovirt.engine.ui.userportal.ApplicationConstants;
import org.ovirt.engine.ui.userportal.ApplicationResources;
import org.ovirt.engine.ui.userportal.gin.AssetProvider;
import org.ovirt.engine.ui.userportal.uicommon.model.UserPortalDataBoundModelProvider;
import com.google.gwt.cell.client.HasCell;
import com.google.gwt.core.client.GWT;
import com.google.gwt.event.logical.shared.CloseEvent;
import com.google.gwt.event.logical.shared.CloseHandler;
import com.google.gwt.event.logical.shared.OpenEvent;
import com.google.gwt.event.logical.shared.OpenHandler;
import com.google.gwt.event.shared.HandlerRegistration;
import com.google.gwt.resources.client.ImageResource;
import com.google.gwt.safehtml.client.SafeHtmlTemplates;
import com.google.gwt.safehtml.shared.SafeHtml;
import com.google.gwt.safehtml.shared.SafeHtmlBuilder;
import com.google.gwt.uibinder.client.UiBinder;
import com.google.gwt.uibinder.client.UiField;
import com.google.gwt.user.cellview.client.CellTable;
import com.google.gwt.user.cellview.client.Column;
import com.google.gwt.user.client.ui.Composite;
import com.google.gwt.user.client.ui.Tree;
import com.google.gwt.user.client.ui.TreeItem;
import com.google.gwt.user.client.ui.Widget;
import com.google.gwt.view.client.SingleSelectionModel;
public class VmTable extends Composite implements HasEditorDriver<ResourcesModel> {
private static final VmRowHeaderlessTableResources vmRowResources =
GWT.create(VmRowHeaderlessTableResources.class);
private static final DiskRowHeaderlessTableResources diskRowResources =
GWT.create(DiskRowHeaderlessTableResources.class);
private HandlerRegistration openHandler = null;
private HandlerRegistration closeHandler = null;
private final UserPortalDataBoundModelProvider<VM, ResourcesModel> modelProvider;
@UiField(provided = true)
ActionCellTable<VM> tableHeader;
@UiField
Tree vmTree;
private final static ApplicationResources resources = AssetProvider.getResources();
private final static ApplicationConstants constants = AssetProvider.getConstants();
private final VmSingleSelectionModel vmSelectionModel = new VmSingleSelectionModel();
interface WidgetUiBinder extends UiBinder<Widget, VmTable> {
WidgetUiBinder uiBinder = GWT.create(WidgetUiBinder.class);
}
public VmTable(UserPortalDataBoundModelProvider<VM, ResourcesModel> modelProvider,
SubTableResources headerResources) {
this.modelProvider = modelProvider;
tableHeader = new ActionCellTable<VM>(modelProvider, headerResources);
initWidget(WidgetUiBinder.uiBinder.createAndBindUi(this));
initTable();
}
private void initTable() {
tableHeader.addColumn(new EmptyColumn(), constants.virtualMachineSnapshotCreatePopupDescriptionLabel());
tableHeader.addColumn(new EmptyColumn(), constants.disksVm());
tableHeader.addColumn(new EmptyColumn(), constants.virtualSizeVm());
tableHeader.addColumn(new EmptyColumn(), constants.actualSizeVm());
tableHeader.addColumn(new EmptyColumn(), constants.snapshotsVm());
setHeaderColumnWidths(Arrays.asList("40%", "10%", "10%", "10%", "30%")); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$ //$NON-NLS-5$
tableHeader.setRowData(new ArrayList<VM>());
}
@SuppressWarnings("unchecked")
@Override
public void edit(ResourcesModel model) {
if (openHandler != null) {
openHandler.removeHandler();
}
if (closeHandler != null) {
closeHandler.removeHandler();
}
clearTreeItems();
vmTree.clear();
for (VM vm : (Iterable<VM>) model.getItems()) {
VmTreeItem vmItem = createVmItem(vm);
if (vm.getDiskList() != null){
for (DiskImage disk : vm.getDiskList()) {
TreeItem diskItem = createDiskItem(disk);
vmItem.addItem(diskItem);
}
}
vmTree.addItem(vmItem);
}
updateSelection(model);
listenOnSelectionChange();
}
private void clearTreeItems() {
int nodeCount = vmTree.getItemCount();
for (int i = 0; i < nodeCount; i++) {
clearTreeItems(vmTree.getItem(i));
}
}
private void clearTreeItems(TreeItem node) {
int nodeCount = node.getChildCount();
for (int i = 0; i < nodeCount; i++) {
TreeItem child = node.getChild(i);
if (child instanceof VmTreeItem) {
clearTreeItems(child);
}
}
if (node instanceof VmTreeItem) {
((VmTreeItem) node).reset();
}
}
@Override
public ResourcesModel flush() {
return modelProvider.getModel();
}
private void setHeaderColumnWidths(List<String> widths) {
for (int i = 0; i < tableHeader.getColumnCount(); i++) {
tableHeader.setColumnWidth(tableHeader.getColumn(i), widths.get(i));
}
}
protected void listenOnSelectionChange() {
openHandler = vmTree.addOpenHandler(new OpenHandler<TreeItem>() {
@Override
public void onOpen(OpenEvent<TreeItem> event) {
setSelectionToModel();
}
});
closeHandler = vmTree.addCloseHandler(new CloseHandler<TreeItem>() {
@Override
public void onClose(CloseEvent<TreeItem> event) {
setSelectionToModel();
}
});
}
private void setSelectionToModel() {
List<VM> selectedVMs = new ArrayList<VM>();
for (int i = 0; i < vmTree.getItemCount(); i++) {
if (vmTree.getItem(i) instanceof VmTreeItem) {
if (vmTree.getItem(i).getState()) {
selectedVMs.add(((VmTreeItem) vmTree.getItem(i)).getVm());
}
}
}
modelProvider.setSelectedItems(selectedVMs);
}
@SuppressWarnings("unchecked")
protected void updateSelection(final ResourcesModel model) {
if (model.getSelectedItems() == null || model.getSelectedItems().size() == 0) {
return;
}
for (int i = 0; i < vmTree.getItemCount(); i++) {
if (vmTree.getItem(i) instanceof VmTreeItem) {
((VmTreeItem) vmTree.getItem(i)).setState(model.getSelectedItems());
}
}
}
private TreeItem createDiskItem(DiskImage disk) {
EntityModelCellTable<ListModel> table =
new EntityModelCellTable<ListModel>(false,
diskRowResources,
true);
Column<EntityModel, EntityModel> diskWithMappingColumn =
new Column<EntityModel, EntityModel>(createDiskImageWithMappingComoisiteCell()) {
@Override
public EntityModel getValue(EntityModel object) {
return object;
}
};
AbstractTextColumn<EntityModel> paddingColumn = new AbstractTextColumn<EntityModel>() {
@Override
public String getValue(EntityModel entity) {
return asDisk(entity).getDiskAlias(); //$NON-NLS-1$
}
};
AbstractTextColumn<EntityModel> virtualSizeColumn = new AbstractTextColumn<EntityModel>() {
@Override
public String getValue(EntityModel entity) {
return asDisk(entity).getSizeInGigabytes() + "GB"; //$NON-NLS-1$
}
};
AbstractTextColumn<EntityModel> actualSizeColumn = new AbstractTextColumn<EntityModel>() {
@Override
public String getValue(EntityModel entity) {
return ((Double) asDisk(entity).getActualDiskWithSnapshotsSize()).intValue() + "GB"; //$NON-NLS-1$
}
};
AbstractTextColumn<EntityModel> snapshotsColumn = new AbstractTextColumn<EntityModel>() {
@Override
public String getValue(EntityModel entity) {
return asDisk(entity).getSnapshots().size() + ""; //$NON-NLS-1$
}
};
table.addColumn(diskWithMappingColumn, "", "39%"); //$NON-NLS-1$ //$NON-NLS-2$
table.addColumn(paddingColumn, "", "10%"); //$NON-NLS-1$ //$NON-NLS-2$
table.addColumn(virtualSizeColumn, "", "10%"); //$NON-NLS-1$ //$NON-NLS-2$
table.addColumn(actualSizeColumn, "", "10%"); //$NON-NLS-1$ //$NON-NLS-2$
table.addColumn(snapshotsColumn, "", "31%"); //$NON-NLS-1$ //$NON-NLS-2$
EntityModel entityModel = new EntityModel();
entityModel.setEntity(disk);
table.setRowData(Arrays.asList(entityModel));
return new TreeItem(table);
}
private VmTreeItem createVmItem(VM vm) {
EntityModelCellTable<ListModel> table =
new EntityModelCellTable<ListModel>(false,
vmRowResources,
true);
AbstractColumn<EntityModel, EntityModel> vmImageWithNameColumn =
new AbstractColumn<EntityModel, EntityModel>(createVmImageWithNameCompositeCell()) {
@Override
public EntityModel getValue(EntityModel object) {
return object;
}
};
AbstractTextColumn<EntityModel> diskSizeColumn = new AbstractTextColumn<EntityModel>() {
@Override
public String getValue(EntityModel entity) {
ArrayList<DiskImage> diskImages = asVm(entity).getDiskList();
return diskImages != null ? diskImages.size() + "" : "0"; //$NON-NLS-1$ //$NON-NLS-2$
}
};
AbstractTextColumn<EntityModel> virtualSizeColumn = new AbstractTextColumn<EntityModel>() {
@Override
public String getValue(EntityModel entity) {
return ((Double) asVm(entity).getDiskSize()).intValue() + "GB"; //$NON-NLS-1$
}
};
AbstractTextColumn<EntityModel> actualSizeColumn = new AbstractTextColumn<EntityModel>() {
@Override
public String getValue(EntityModel entity) {
return ((Double) asVm(entity).getActualDiskWithSnapshotsSize()).intValue() + "GB"; //$NON-NLS-1$
}
};
AbstractTextColumn<EntityModel> snapshotsColumn = new AbstractTextColumn<EntityModel>() {
@Override
public String getValue(EntityModel entity) {
ArrayList<DiskImage> diskImages = asVm(entity).getDiskList();
if(diskImages != null && diskImages.size() > 0){
int numOfSnapshot = 0;
for(DiskImage diskImage : diskImages){
numOfSnapshot += diskImage.getSnapshots().size();
}
return "" + numOfSnapshot;//$NON-NLS-1$
}
return "0"; //$NON-NLS-1$
}
};
table.addColumn(vmImageWithNameColumn, "", "39%"); //$NON-NLS-1$ //$NON-NLS-2$
table.addColumn(diskSizeColumn, "", "10%"); //$NON-NLS-1$ //$NON-NLS-2$
table.addColumn(virtualSizeColumn, "", "10%"); //$NON-NLS-1$ //$NON-NLS-2$
table.addColumn(actualSizeColumn, "", "10%"); //$NON-NLS-1$ //$NON-NLS-2$
table.addColumn(snapshotsColumn, "", "31%"); //$NON-NLS-1$ //$NON-NLS-2$
table.setSelectionModel(vmSelectionModel);
EntityModel entityModel = new EntityModel();
entityModel.setEntity(vm);
table.setRowData(Arrays.asList(entityModel));
return new VmTreeItem(table, vm);
}
private CompositeCell<EntityModel> createDiskImageWithMappingComoisiteCell() {
final AbstractImageResourceColumn<EntityModel> diskImageColumn = new AbstractImageResourceColumn<EntityModel>() {
@Override
public ImageResource getValue(EntityModel object) {
return resources.vmDiskIcon();
}
};
final AbstractTextColumn<EntityModel> driveMappingColumn = new AbstractTextColumn<EntityModel>() {
@Override
public String getValue(EntityModel entity) {
return asDisk(entity).getDiskAlias(); //$NON-NLS-1$
}
};
return new StyledCompositeCell<EntityModel>(
new ArrayList<HasCell<EntityModel, ?>>(Arrays.asList(diskImageColumn, driveMappingColumn)),
new StyledCompositeCell.StyledProvider<EntityModel>() {
@Override
public String styleStringOf(HasCell<EntityModel, ?> cell) {
if (cell == diskImageColumn) {
return "float: left"; //$NON-NLS-1$
} else if (cell == driveMappingColumn) {
return "float: left; padding-top: 4px; padding-left: 5px;"; //$NON-NLS-1$
}
return null;
}
});
}
private CompositeCell<EntityModel> createVmImageWithNameCompositeCell() {
final AbstractImageResourceColumn<EntityModel> vmImageColumn = new AbstractImageResourceColumn<EntityModel>() {
@Override
public ImageResource getValue(EntityModel object) {
return resources.vmIconWithVmTextInside();
}
};
final AbstractTextColumn<EntityModel> nameColumn = new AbstractTextColumn<EntityModel>() {
@Override
public String getValue(EntityModel entity) {
return asVm(entity).getName();
}
};
return new StyledCompositeCell<EntityModel>(
new ArrayList<HasCell<EntityModel, ?>>(Arrays.asList(vmImageColumn, nameColumn)),
new StyledCompositeCell.StyledProvider<EntityModel>() {
@Override
public String styleStringOf(HasCell<EntityModel, ?> cell) {
if (cell == vmImageColumn) {
return "float: left"; //$NON-NLS-1$
} else if (cell == nameColumn) {
return "float: left; padding-top: 4px;"; //$NON-NLS-1$
}
return null;
}
});
}
/**
* This class guards, that only one row can be selected at a given time and the selection survives the refresh. The
* single instance of this class should be used for more instances of EntityModelCellTable
*/
static class VmSingleSelectionModel extends SingleSelectionModel<EntityModel> {
private VM selectedVM = null;
@Override
public void setSelected(EntityModel object, boolean selected) {
if (object.getEntity() instanceof VM) {
if (selected) {
selectedVM = (VM) object.getEntity();
super.setSelected(object, true);
} else {
selectedVM = null;
super.setSelected(object, false);
}
} else {
super.setSelected(object, selected);
}
}
@Override
public boolean isSelected(EntityModel object) {
if (selectedVM == null || !(object.getEntity() instanceof VM)) {
return super.isSelected(object);
}
VM vm = (VM) object.getEntity();
if (vm.getId().equals(selectedVM.getId())) {
return true;
} else {
return false;
}
}
}
private VM asVm(EntityModel entity) {
return (VM) entity.getEntity();
}
private DiskImage asDisk(EntityModel entity) {
return (DiskImage) entity.getEntity();
}
public interface VmRowHeaderlessTableResources extends CellTable.Resources {
interface TableStyle extends CellTable.Style {
}
@Override
@Source({ CellTable.Style.DEFAULT_CSS, "org/ovirt/engine/ui/userportal/css/VmListHeaderlessTable.css" })
TableStyle cellTableStyle();
}
public interface DiskRowHeaderlessTableResources extends CellTable.Resources {
interface TableStyle extends CellTable.Style {
}
@Override
@Source({ CellTable.Style.DEFAULT_CSS, "org/ovirt/engine/ui/userportal/css/DiskListHeaderlessTable.css" })
TableStyle cellTableStyle();
}
/**
* An empty column - only for the header
*/
private static class EmptyColumn extends AbstractTextColumn<VM> {
@Override
public String getValue(VM object) {
return null;
}
}
}
class StyledCompositeCell<T> extends CompositeCell<T> {
interface CellTemplate extends SafeHtmlTemplates {
@Template("<div id=\"{0}\" style=\"{1}\">")
SafeHtml div(String id, String style);
}
private static final CellTemplate templates = GWT.create(CellTemplate.class);
private final List<HasCell<T, ?>> hasCells;
private final StyledProvider<T> styleProvider;
public StyledCompositeCell(List<HasCell<T, ?>> hasCells, StyledProvider<T> styleProvider) {
super(hasCells);
this.hasCells = hasCells;
this.styleProvider = styleProvider;
}
@Override
public void render(Context context, T value, SafeHtmlBuilder sb, String id) {
int i = 1;
for (HasCell<T, ?> hasCell : hasCells) {
String cellId = id + "_" + i; //$NON-NLS-1$
String style = styleProvider.styleStringOf(hasCell) == null ? "" : styleProvider.styleStringOf(hasCell); //$NON-NLS-1$
sb.append(templates.div(cellId, style));
render(context, value, sb, hasCell, id);
sb.appendHtmlConstant("</div>"); //$NON-NLS-1$
i++;
}
}
interface StyledProvider<T> {
String styleStringOf(HasCell<T, ?> cell);
}
}
| apache-2.0 |
mbogoevici/spring-cloud-data | spring-cloud-dataflow-completion/src/main/java/org/springframework/cloud/dataflow/completion/TaskCompletionProvider.java | 2632 | /*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.dataflow.completion;
import java.util.ArrayList;
import java.util.List;
import org.springframework.cloud.dataflow.core.TaskDefinition;
/**
* Provides code completion on a (maybe ill-formed) task definition.
*
* @author Eric Bottard
* @author Ilayaperumal Gopinathan
* @author Andy Clement
*/
public class TaskCompletionProvider {
private final List<RecoveryStrategy<?>> completionRecoveryStrategies;
private final List<TaskExpansionStrategy> completionExpansionStrategies;
public TaskCompletionProvider(List<RecoveryStrategy<?>> completionRecoveryStrategies,
List<TaskExpansionStrategy> completionExpansionStrategies) {
this.completionRecoveryStrategies = new ArrayList<>(completionRecoveryStrategies);
this.completionExpansionStrategies = new ArrayList<>(completionExpansionStrategies);
}
/*
* Attempt to parse the text the user has already typed in. This either succeeds, in
* which case we may propose to expand what she has typed, or it fails (most likely
* because this is not well formed), in which case we try to recover from the parsing
* failure and still add proposals.
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public List<CompletionProposal> complete(String dslStart, int detailLevel) {
List<CompletionProposal> collector = new ArrayList<>();
TaskDefinition parsed;
try {
parsed = new TaskDefinition("__dummy", dslStart);
}
catch (Exception recoverable) {
for (RecoveryStrategy strategy : completionRecoveryStrategies) {
if (strategy.shouldTrigger(dslStart, recoverable)) {
strategy.addProposals(dslStart, recoverable, detailLevel, collector);
}
}
return collector;
}
for (TaskExpansionStrategy strategy : completionExpansionStrategies) {
strategy.addProposals(dslStart, parsed, detailLevel, collector);
}
return collector;
}
public void addCompletionRecoveryStrategy(RecoveryStrategy<?> recoveryStrategy) {
this.completionRecoveryStrategies.add(recoveryStrategy);
}
}
| apache-2.0 |
madrob/incubator-htrace | htrace-zipkin/src/main/java/com/twitter/zipkin/gen/zipkinCoreConstants.java | 1332 | /**
* Autogenerated by Thrift Compiler (0.9.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package com.twitter.zipkin.gen;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import org.apache.thrift.protocol.TProtocolException;
import org.apache.thrift.EncodingUtils;
import org.apache.thrift.TException;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class zipkinCoreConstants {
public static final String CLIENT_SEND = "cs";
public static final String CLIENT_RECV = "cr";
public static final String SERVER_SEND = "ss";
public static final String SERVER_RECV = "sr";
public static final String WIRE_SEND = "ws";
public static final String WIRE_RECV = "wr";
public static final String CLIENT_ADDR = "ca";
public static final String SERVER_ADDR = "sa";
}
| apache-2.0 |
davidzchen/bazel | src/test/java/com/google/devtools/build/lib/packages/util/Crosstool.java | 24181 | // Copyright 2015 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.packages.util;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.devtools.build.lib.packages.util.Crosstool.CcToolchainConfig;
import com.google.devtools.build.lib.rules.cpp.CppRuleClasses;
import com.google.devtools.build.lib.testutil.TestConstants;
import com.google.devtools.build.lib.util.Pair;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.Set;
/**
* A helper class to create a crosstool package containing a CROSSTOOL file, and the various rules
* needed for a mock - use this only for configured target tests, not for execution tests.
*/
public final class Crosstool {
private static final ImmutableList<String> CROSSTOOL_BINARIES =
ImmutableList.of("ar", "as", "compile", "dwp", "link", "objcopy", "llvm-profdata");
/**
* A class that contains relevant fields from either the CROSSTOOL file or the Starlark rule
* implementation that are needed in order to generate the BUILD file.
*/
public static final class CcToolchainConfig {
private final String cpu;
private final String compiler;
private final String toolchainIdentifier;
private final String hostSystemName;
private final String targetSystemName;
private final String abiVersion;
private final String abiLibcVersion;
private final String targetLibc;
private final String builtinSysroot;
private final String ccTargetOs;
private final ImmutableList<String> features;
private final ImmutableList<String> actionConfigs;
private final ImmutableList<ImmutableList<String>> artifactNamePatterns;
private final ImmutableList<Pair<String, String>> toolPaths;
private final ImmutableList<String> cxxBuiltinIncludeDirectories;
private final ImmutableList<Pair<String, String>> makeVariables;
private CcToolchainConfig(
String cpu,
String compiler,
String toolchainIdentifier,
String hostSystemName,
String targetSystemName,
String abiVersion,
String abiLibcVersion,
String targetLibc,
String builtinSysroot,
String ccTargetOs,
ImmutableList<String> features,
ImmutableList<String> actionConfigs,
ImmutableList<ImmutableList<String>> artifactNamePatterns,
ImmutableList<Pair<String, String>> toolPaths,
ImmutableList<String> cxxBuiltinIncludeDirectories,
ImmutableList<Pair<String, String>> makeVariables) {
this.cpu = cpu;
this.compiler = compiler;
this.toolchainIdentifier = toolchainIdentifier;
this.hostSystemName = hostSystemName;
this.targetSystemName = targetSystemName;
this.abiVersion = abiVersion;
this.abiLibcVersion = abiLibcVersion;
this.targetLibc = targetLibc;
this.features = features;
this.actionConfigs = actionConfigs;
this.artifactNamePatterns = artifactNamePatterns;
this.toolPaths = toolPaths;
this.builtinSysroot = builtinSysroot;
this.cxxBuiltinIncludeDirectories = cxxBuiltinIncludeDirectories;
this.makeVariables = makeVariables;
this.ccTargetOs = ccTargetOs;
}
public static Builder builder() {
return new Builder();
}
/** A Builder for {@link CcToolchainConfig}. */
public static class Builder {
private ImmutableList<String> features = ImmutableList.of();
private ImmutableList<String> actionConfigs = ImmutableList.of();
private ImmutableList<ImmutableList<String>> artifactNamePatterns = ImmutableList.of();
private ImmutableList<Pair<String, String>> toolPaths = ImmutableList.of();
private String builtinSysroot = "/usr/grte/v1";
private ImmutableList<String> cxxBuiltinIncludeDirectories = ImmutableList.of();
private ImmutableList<Pair<String, String>> makeVariables = ImmutableList.of();
private String ccTargetOs = "";
private String cpu = "k8";
private String compiler = "compiler";
private String toolchainIdentifier = "mock-llvm-toolchain-k8";
private String hostSystemName = "local";
private String targetSystemName = "local";
private String targetLibc = "local";
private String abiVersion = "local";
private String abiLibcVersion = "local";
public Builder withCpu(String cpu) {
this.cpu = cpu;
return this;
}
public Builder withCompiler(String compiler) {
this.compiler = compiler;
return this;
}
public Builder withToolchainIdentifier(String toolchainIdentifier) {
this.toolchainIdentifier = toolchainIdentifier;
return this;
}
public Builder withHostSystemName(String hostSystemName) {
this.hostSystemName = hostSystemName;
return this;
}
public Builder withTargetSystemName(String targetSystemName) {
this.targetSystemName = targetSystemName;
return this;
}
public Builder withTargetLibc(String targetLibc) {
this.targetLibc = targetLibc;
return this;
}
public Builder withAbiVersion(String abiVersion) {
this.abiVersion = abiVersion;
return this;
}
public Builder withAbiLibcVersion(String abiLibcVersion) {
this.abiLibcVersion = abiLibcVersion;
return this;
}
public Builder withFeatures(String... features) {
this.features = ImmutableList.copyOf(features);
return this;
}
public Builder withActionConfigs(String... actionConfigs) {
this.actionConfigs = ImmutableList.copyOf(actionConfigs);
return this;
}
public Builder withArtifactNamePatterns(ImmutableList<String>... artifactNamePatterns) {
for (ImmutableList<String> pattern : artifactNamePatterns) {
Preconditions.checkArgument(
pattern.size() == 3,
"Artifact name pattern should have three attributes: category_name, prefix and"
+ " extension");
}
this.artifactNamePatterns = ImmutableList.copyOf(artifactNamePatterns);
return this;
}
public Builder withToolPaths(Pair<String, String>... toolPaths) {
this.toolPaths = ImmutableList.copyOf(toolPaths);
return this;
}
public Builder withSysroot(String sysroot) {
this.builtinSysroot = sysroot;
return this;
}
public Builder withCcTargetOs(String ccTargetOs) {
this.ccTargetOs = ccTargetOs;
return this;
}
public Builder withCxxBuiltinIncludeDirectories(String... directories) {
this.cxxBuiltinIncludeDirectories = ImmutableList.copyOf(directories);
return this;
}
public Builder withMakeVariables(Pair<String, String>... makeVariables) {
this.makeVariables = ImmutableList.copyOf(makeVariables);
return this;
}
public CcToolchainConfig build() {
return new CcToolchainConfig(
cpu,
compiler,
toolchainIdentifier,
hostSystemName,
targetSystemName,
abiVersion,
abiLibcVersion,
targetLibc,
builtinSysroot,
ccTargetOs,
features,
actionConfigs,
artifactNamePatterns,
toolPaths,
cxxBuiltinIncludeDirectories,
makeVariables);
}
}
public String getToolchainIdentifier() {
return toolchainIdentifier;
}
public String getTargetCpu() {
return cpu;
}
public String getCompiler() {
return compiler;
}
public boolean hasStaticLinkCppRuntimesFeature() {
return features.contains(CppRuleClasses.STATIC_LINK_CPP_RUNTIMES);
}
public static CcToolchainConfig getCcToolchainConfigForCpu(String cpu) {
return new CcToolchainConfig(
/* cpu= */ cpu,
/* compiler= */ "mock-compiler-for-" + cpu,
/* toolchainIdentifier= */ "mock-llvm-toolchain-for-" + cpu,
/* hostSystemName= */ "mock-system-name-for-" + cpu,
/* targetSystemName= */ "mock-target-system-name-for-" + cpu,
/* abiVersion= */ "mock-abi-version-for-" + cpu,
/* abiLibcVersion= */ "mock-abi-libc-for-" + cpu,
/* targetLibc= */ "mock-libc-for-" + cpu,
/* builtinSysroot= */ "",
/* ccTargetOs= */ "",
/* features= */ ImmutableList.of(),
/* actionConfigs= */ ImmutableList.of(),
/* artifactNamePatterns= */ ImmutableList.of(),
/* toolPaths= */ ImmutableList.of(),
/* cxxBuiltinIncludeDirectories= */ ImmutableList.of(),
/* makeVariables= */ ImmutableList.of());
}
public static CcToolchainConfig getDefaultCcToolchainConfig() {
return getCcToolchainConfigForCpu("k8");
}
public String getCcToolchainConfigRule() {
ImmutableList<String> featuresList =
features.stream()
.map(feature -> "'" + feature + "'")
.collect(ImmutableList.toImmutableList());
ImmutableList<String> actionConfigsList =
actionConfigs.stream()
.map(config -> "'" + config + "'")
.collect(ImmutableList.toImmutableList());
ImmutableList<String> patternsList =
artifactNamePatterns.stream()
.map(
pattern ->
String.format(
"'%s': ['%s', '%s']", pattern.get(0), pattern.get(1), pattern.get(2)))
.collect(ImmutableList.toImmutableList());
ImmutableList<String> toolPathsList =
toolPaths.stream()
.map(toolPath -> String.format("'%s': '%s'", toolPath.first, toolPath.second))
.collect(ImmutableList.toImmutableList());
ImmutableList<String> directoriesList =
cxxBuiltinIncludeDirectories.stream()
.map(directory -> "'" + directory + "'")
.collect(ImmutableList.toImmutableList());
ImmutableList<String> makeVariablesList =
makeVariables.stream()
.map(variable -> String.format("'%s': '%s'", variable.first, variable.second))
.collect(ImmutableList.toImmutableList());
return Joiner.on("\n")
.join(
"cc_toolchain_config(",
" name = '" + cpu + "-" + compiler + "_config',",
" toolchain_identifier = '" + toolchainIdentifier + "',",
" cpu = '" + cpu + "',",
" compiler = '" + compiler + "',",
" host_system_name = '" + hostSystemName + "',",
" target_system_name = '" + targetSystemName + "',",
" target_libc = '" + targetLibc + "',",
" abi_version = '" + abiVersion + "',",
" abi_libc_version = '" + abiLibcVersion + "',",
String.format(" feature_names = [%s],", Joiner.on(",\n ").join(featuresList)),
String.format(
" action_configs = [%s],", Joiner.on(",\n ").join(actionConfigsList)),
String.format(
" artifact_name_patterns = {%s},", Joiner.on(",\n ").join(patternsList)),
String.format(" tool_paths = {%s},", Joiner.on(",\n ").join(toolPathsList)),
" builtin_sysroot = '" + builtinSysroot + "',",
" cc_target_os = '" + ccTargetOs + "',",
String.format(
" cxx_builtin_include_directories = [%s],",
Joiner.on(",\n ").join(directoriesList)),
String.format(
" make_variables = {%s},", Joiner.on(",\n ").join(makeVariablesList)),
" )");
}
}
private final MockToolsConfig config;
private final String crosstoolTop;
private String ccToolchainConfigFileContents;
private ImmutableList<String> archs;
private boolean supportsHeaderParsing;
private ImmutableList<CcToolchainConfig> ccToolchainConfigList = ImmutableList.of();
Crosstool(MockToolsConfig config, String crosstoolTop) {
this.config = config;
this.crosstoolTop = crosstoolTop;
}
public Crosstool setCcToolchainFile(String ccToolchainConfigFileContents) {
this.ccToolchainConfigFileContents = ccToolchainConfigFileContents;
return this;
}
public Crosstool setSupportedArchs(ImmutableList<String> archs) {
this.archs = archs;
return this;
}
public Crosstool setSupportsHeaderParsing(boolean supportsHeaderParsing) {
this.supportsHeaderParsing = supportsHeaderParsing;
return this;
}
public Crosstool setToolchainConfigs(ImmutableList<CcToolchainConfig> ccToolchainConfigs) {
this.ccToolchainConfigList = ccToolchainConfigs;
return this;
}
public void write() throws IOException {
Set<String> runtimes = new HashSet<>();
StringBuilder compilationTools = new StringBuilder();
for (String compilationTool : CROSSTOOL_BINARIES) {
Collection<String> archTargets = new ArrayList<>();
for (String arch : archs) {
archTargets.add(compilationTool + '-' + arch);
}
compilationTools.append(
String.format(
"filegroup(name = '%s', srcs = ['%s'])\n",
compilationTool,
Joiner.on("', '").join(archTargets)));
for (String archTarget : archTargets) {
compilationTools.append(
String.format("filegroup(name = '%s', srcs = [':everything-multilib'])\n", archTarget));
}
}
Set<String> seenCpus = new LinkedHashSet<>();
StringBuilder compilerMap = new StringBuilder();
for (CcToolchainConfig toolchain : ccToolchainConfigList) {
String staticRuntimeLabel =
toolchain.hasStaticLinkCppRuntimesFeature()
? "mock-static-runtimes-target-for-" + toolchain.getToolchainIdentifier()
: null;
String dynamicRuntimeLabel =
toolchain.hasStaticLinkCppRuntimesFeature()
? "mock-dynamic-runtimes-target-for-" + toolchain.getToolchainIdentifier()
: null;
if (staticRuntimeLabel != null) {
runtimes.add(
Joiner.on('\n')
.join(
"filegroup(",
" name = '" + staticRuntimeLabel + "',",
" licenses = ['unencumbered'],",
" srcs = ['libstatic-runtime-lib-source.a'])",
""));
}
if (dynamicRuntimeLabel != null) {
runtimes.add(
Joiner.on('\n')
.join(
"filegroup(",
" name = '" + dynamicRuntimeLabel + "',",
" licenses = ['unencumbered'],",
" srcs = ['libdynamic-runtime-lib-source.so'])",
""));
}
// Generate entry to cc_toolchain_suite.toolchains
if (seenCpus.add(toolchain.getTargetCpu())) {
compilerMap.append(
String.format(
"'%s': ':cc-compiler-%s-%s',\n",
toolchain.getTargetCpu(), toolchain.getTargetCpu(), toolchain.getCompiler()));
}
compilerMap.append(
String.format(
"'%s|%s': ':cc-compiler-%s-%s',\n",
toolchain.getTargetCpu(),
toolchain.getCompiler(),
toolchain.getTargetCpu(),
toolchain.getCompiler()));
// Generate cc_toolchain target
String suffix = toolchain.getTargetCpu() + "-" + toolchain.getCompiler();
compilationTools.append(
Joiner.on("\n")
.join(
"toolchain(",
" name = 'cc-toolchain-" + suffix + "',",
" toolchain_type = ':toolchain_type',",
" toolchain = ':cc-compiler-" + suffix + "',",
")",
toolchain.getCcToolchainConfigRule(),
"cc_toolchain(",
" name = 'cc-compiler-" + suffix + "',",
" toolchain_identifier = '" + toolchain.getToolchainIdentifier() + "',",
" toolchain_config = ':" + suffix + "_config',",
" output_licenses = ['unencumbered'],",
" module_map = 'crosstool.cppmap',",
" ar_files = 'ar-" + toolchain.getTargetCpu() + "',",
" as_files = 'as-" + toolchain.getTargetCpu() + "',",
" compiler_files = 'compile-" + toolchain.getTargetCpu() + "',",
" dwp_files = 'dwp-" + toolchain.getTargetCpu() + "',",
" linker_files = 'link-" + toolchain.getTargetCpu() + "',",
" strip_files = ':every-file',",
" objcopy_files = 'objcopy-" + toolchain.getTargetCpu() + "',",
" all_files = ':every-file',",
" licenses = ['unencumbered'],",
supportsHeaderParsing ? " supports_header_parsing = 1," : "",
dynamicRuntimeLabel == null
? ""
: " dynamic_runtime_lib = '" + dynamicRuntimeLabel + "',",
staticRuntimeLabel == null
? ""
: " static_runtime_lib = '" + staticRuntimeLabel + "',",
")",
""));
}
String build =
Joiner.on("\n")
.join(
"package(default_visibility=['//visibility:public'])",
"licenses(['restricted'])",
"",
"load(':cc_toolchain_config.bzl', 'cc_toolchain_config')",
"load('"
+ TestConstants.TOOLS_REPOSITORY
+ "//third_party/cc_rules/macros:defs.bzl', 'cc_library', 'cc_toolchain',"
+ " 'cc_toolchain_suite')",
"toolchain_type(name = 'toolchain_type')",
"cc_toolchain_alias(name = 'current_cc_toolchain')",
"alias(name = 'toolchain', actual = 'everything')",
"filegroup(name = 'everything-multilib',",
" srcs = glob(['mock_version/**/*'],",
" exclude_directories = 1),",
" output_licenses = ['unencumbered'])",
"",
String.format(
"cc_toolchain_suite(name = 'everything', toolchains = {%s})", compilerMap),
"",
String.format(
"filegroup(name = 'every-file', srcs = ['%s'])",
Joiner.on("', '").join(CROSSTOOL_BINARIES)),
"",
compilationTools.toString(),
Joiner.on("\n").join(runtimes),
"",
"filegroup(",
" name = 'interface_library_builder',",
" srcs = ['build_interface_so'],",
")",
// We add an empty :malloc target in case we need it.
"cc_library(name = 'malloc')");
config.create(crosstoolTop + "/mock_version/x86/bin/gcc");
config.create(crosstoolTop + "/mock_version/x86/bin/ld");
config.overwrite(crosstoolTop + "/BUILD", build);
config.overwrite(crosstoolTop + "/cc_toolchain_config.bzl", ccToolchainConfigFileContents);
config.create(crosstoolTop + "/crosstool.cppmap", "module crosstool {}");
}
public void writeOSX() throws IOException {
// Create special lines specifying the compiler map entry for
// each toolchain.
StringBuilder compilerMap =
new StringBuilder()
.append("'k8': ':cc-compiler-darwin_x86_64',\n")
.append("'aarch64': ':cc-compiler-darwin_x86_64',\n")
.append("'darwin': ':cc-compiler-darwin_x86_64',\n");
Set<String> seenCpus = new LinkedHashSet<>();
for (CcToolchainConfig toolchain : ccToolchainConfigList) {
if (seenCpus.add(toolchain.getTargetCpu())) {
compilerMap.append(
String.format(
"'%s': ':cc-compiler-%s',\n", toolchain.getTargetCpu(), toolchain.getTargetCpu()));
}
compilerMap.append(
String.format(
"'%s|%s': ':cc-compiler-%s',\n",
toolchain.getTargetCpu(), toolchain.getCompiler(), toolchain.getTargetCpu()));
}
// Create the test BUILD file.
ImmutableList.Builder<String> crosstoolBuild =
ImmutableList.<String>builder()
.add(
"package(default_visibility=['//visibility:public'])",
"load(':cc_toolchain_config.bzl', 'cc_toolchain_config')",
"load('"
+ TestConstants.TOOLS_REPOSITORY
+ "//third_party/cc_rules/macros:defs.bzl', 'cc_library',"
+ " 'cc_toolchain_suite')",
"exports_files(glob(['**']))",
"cc_toolchain_suite(",
" name = 'crosstool',",
" toolchains = { " + compilerMap + " },",
")",
"",
"cc_library(",
" name = 'custom_malloc',",
")",
"",
"filegroup(",
" name = 'empty',",
" srcs = [],",
")",
"",
"filegroup(",
" name = 'link',",
" srcs = [",
" 'ar',",
" 'libempty.a',",
String.format(" '%s//tools/objc:libtool'", TestConstants.TOOLS_REPOSITORY),
" ],",
")");
for (CcToolchainConfig toolchainConfig : ccToolchainConfigList) {
crosstoolBuild.add(
"apple_cc_toolchain(",
" name = 'cc-compiler-" + toolchainConfig.getTargetCpu() + "',",
" toolchain_identifier = '" + toolchainConfig.getTargetCpu() + "',",
" toolchain_config = ':"
+ toolchainConfig.getTargetCpu()
+ "-"
+ toolchainConfig.getCompiler()
+ "_config',",
" all_files = ':empty',",
" ar_files = ':link',",
" as_files = ':empty',",
" compiler_files = ':empty',",
" dwp_files = ':empty',",
" linker_files = ':link',",
" objcopy_files = ':empty',",
" strip_files = ':empty',",
" supports_param_files = 0,",
supportsHeaderParsing ? " supports_header_parsing = 1," : "",
")",
"toolchain(name = 'cc-toolchain-" + toolchainConfig.getTargetCpu() + "',",
" exec_compatible_with = [],",
" target_compatible_with = [],",
" toolchain = ':cc-compiler-" + toolchainConfig.getTargetCpu() + "',",
" toolchain_type = '" + TestConstants.TOOLS_REPOSITORY + "//tools/cpp:toolchain_type'",
")");
crosstoolBuild.add(toolchainConfig.getCcToolchainConfigRule());
// Add the newly-created toolchain to the WORKSPACE.
config.append(
"WORKSPACE",
"register_toolchains('//" + MockObjcSupport.DEFAULT_OSX_CROSSTOOL_DIR + ":all')");
}
config.overwrite(
MockObjcSupport.DEFAULT_OSX_CROSSTOOL_DIR + "/BUILD",
Joiner.on("\n").join(crosstoolBuild.build()));
config.overwrite(crosstoolTop + "/cc_toolchain_config.bzl", ccToolchainConfigFileContents);
}
}
| apache-2.0 |
GerritCodeReview/buck | src/com/facebook/buck/java/Classpaths.java | 2041 | /*
* Copyright 2013-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.java;
import com.facebook.buck.rules.BuildRule;
import com.google.common.collect.ImmutableSetMultimap;
import java.util.Set;
public class Classpaths {
private Classpaths() {
// Utility class
}
/**
* Include the classpath entries from all JavaLibraryRules that have a direct line of lineage
* to this rule through other JavaLibraryRules. For example, in the following dependency graph:
*
* A
* / \
* B C
* / \ / \
* D E F G
*
* If all of the nodes correspond to BuildRules that implement JavaLibraryRule except for
* B (suppose B is a Genrule), then A's classpath will include C, F, and G, but not D and E.
* This is because D and E are used to generate B, but do not contribute .class files to things
* that depend on B. However, if C depended on E as well as F and G, then E would be included in
* A's classpath.
*/
public static ImmutableSetMultimap<JavaLibraryRule, String> getClasspathEntries(Set<BuildRule> deps) {
final ImmutableSetMultimap.Builder<JavaLibraryRule, String> classpathEntries =
ImmutableSetMultimap.builder();
for (BuildRule dep : deps) {
if (dep instanceof JavaLibraryRule) {
JavaLibraryRule libraryRule = (JavaLibraryRule)dep;
classpathEntries.putAll(libraryRule.getTransitiveClasspathEntries());
}
}
return classpathEntries.build();
}
}
| apache-2.0 |
dotunolafunmiloye/hadoop-common | src/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java | 3250 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io.retry;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Collections;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.util.StringUtils;
class RetryInvocationHandler implements InvocationHandler {
public static final Log LOG = LogFactory.getLog(RetryInvocationHandler.class);
private Object implementation;
private RetryPolicy defaultPolicy;
private Map<String,RetryPolicy> methodNameToPolicyMap;
public RetryInvocationHandler(Object implementation, RetryPolicy retryPolicy) {
this.implementation = implementation;
this.defaultPolicy = retryPolicy;
this.methodNameToPolicyMap = Collections.emptyMap();
}
public RetryInvocationHandler(Object implementation, Map<String, RetryPolicy> methodNameToPolicyMap) {
this.implementation = implementation;
this.defaultPolicy = RetryPolicies.TRY_ONCE_THEN_FAIL;
this.methodNameToPolicyMap = methodNameToPolicyMap;
}
public Object invoke(Object proxy, Method method, Object[] args)
throws Throwable {
RetryPolicy policy = methodNameToPolicyMap.get(method.getName());
if (policy == null) {
policy = defaultPolicy;
}
int retries = 0;
while (true) {
try {
return invokeMethod(method, args);
} catch (Exception e) {
if (!policy.shouldRetry(e, retries++)) {
LOG.info("Exception while invoking " + method.getName()
+ " of " + implementation.getClass() + ". Not retrying."
+ StringUtils.stringifyException(e));
if (!method.getReturnType().equals(Void.TYPE)) {
throw e; // non-void methods can't fail without an exception
}
return null;
}
if(LOG.isDebugEnabled()) {
LOG.debug("Exception while invoking " + method.getName()
+ " of " + implementation.getClass() + ". Retrying."
+ StringUtils.stringifyException(e));
}
}
}
}
private Object invokeMethod(Method method, Object[] args) throws Throwable {
try {
if (!method.isAccessible()) {
method.setAccessible(true);
}
return method.invoke(implementation, args);
} catch (InvocationTargetException e) {
throw e.getCause();
}
}
}
| apache-2.0 |
ravikumaran2015/ravikumaran201504 | core/api/src/main/java/org/onosproject/net/resource/DefaultLinkResourceAllocations.java | 2858 | /*
* Copyright 2014 Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.net.resource;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.base.MoreObjects;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.onosproject.net.Link;
import org.onosproject.net.intent.IntentId;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
/**
* Implementation of {@link LinkResourceAllocations}.
*/
public class DefaultLinkResourceAllocations implements LinkResourceAllocations {
private final LinkResourceRequest request;
// TODO: probably should be using LinkKey instead
private final Map<Link, Set<ResourceAllocation>> allocations;
/**
* Creates a new link resource allocations.
*
* @param request requested resources
* @param allocations allocated resources
*/
public DefaultLinkResourceAllocations(LinkResourceRequest request,
Map<Link, Set<ResourceAllocation>> allocations) {
this.request = checkNotNull(request);
ImmutableMap.Builder<Link, Set<ResourceAllocation>> builder
= ImmutableMap.builder();
for (Entry<Link, Set<ResourceAllocation>> e : allocations.entrySet()) {
builder.put(e.getKey(), ImmutableSet.copyOf(e.getValue()));
}
this.allocations = builder.build();
}
@Override
public IntentId intendId() {
return request.intendId();
}
@Override
public Collection<Link> links() {
return request.links();
}
@Override
public Set<ResourceRequest> resources() {
return request.resources();
}
@Override
public ResourceType type() {
return null;
}
@Override
public Set<ResourceAllocation> getResourceAllocation(Link link) {
Set<ResourceAllocation> result = allocations.get(link);
if (result == null) {
result = Collections.emptySet();
}
return result;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("allocations", allocations)
.toString();
}
}
| apache-2.0 |
trimnguye/JavaChatServer | src/plugins/ofmeet/src/java/org/jitsi/protocol/xmpp/XmppConnection.java | 783 | /*
* Jicofo, the Jitsi Conference Focus.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package org.jitsi.protocol.xmpp;
import org.jivesoftware.smack.packet.*;
/**
* The interface for Smack XMPP connection.
*
* @author Pawel Domas
*/
public interface XmppConnection
{
/**
* Sends given XMPP packet through this connection.
*
* @param packet the packet to be sent.
*/
void sendPacket(Packet packet);
/**
* Sends the packet and wait for reply in blocking mode.
*
* @param packet the packet to be sent.
*
* @return the response packet received within the time limit
* or <tt>null</tt> if no response was collected.
*/
Packet sendPacketAndGetReply(Packet packet);
}
| apache-2.0 |
plusplusjiajia/directory-kerby | kerby-kerb/kerb-server/src/main/java/org/apache/kerby/kerberos/kerb/server/impl/DefaultKdcHandler.java | 2751 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.kerby.kerberos.kerb.server.impl;
import org.apache.kerby.kerberos.kerb.server.KdcContext;
import org.apache.kerby.kerberos.kerb.server.KdcHandler;
import org.apache.kerby.kerberos.kerb.transport.KrbTcpTransport;
import org.apache.kerby.kerberos.kerb.transport.KrbTransport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.InetAddress;
import java.nio.ByteBuffer;
public class DefaultKdcHandler extends KdcHandler implements Runnable {
private static Logger logger = LoggerFactory.getLogger(DefaultKdcHandler.class);
private final KrbTransport transport;
public DefaultKdcHandler(KdcContext kdcContext, KrbTransport transport) {
super(kdcContext);
this.transport = transport;
}
@Override
public void run() {
while (true) {
try {
ByteBuffer message = transport.receiveMessage();
if (message == null) {
logger.debug("No valid request recved. Disconnect actively");
transport.release();
break;
}
handleMessage(message);
} catch (IOException e) {
transport.release();
logger.debug("Transport or decoding error occurred, "
+ "disconnecting abnormally", e);
break;
}
}
}
protected void handleMessage(ByteBuffer message) {
InetAddress clientAddress = transport.getRemoteAddress();
boolean isTcp = transport instanceof KrbTcpTransport;
try {
ByteBuffer krbResponse = handleMessage(message, isTcp, clientAddress);
transport.sendMessage(krbResponse);
} catch (Exception e) {
transport.release();
logger.error("Error occured while processing request:", e);
}
}
} | apache-2.0 |
baishuo/hbase-1.0.0-cdh5.4.7_baishuo | hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotMetadata.java | 12842 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.client;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy;
import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
/**
* Test class to verify that metadata is consistent before and after a snapshot attempt.
*/
@Category(MediumTests.class)
public class TestSnapshotMetadata {
private static final Log LOG = LogFactory.getLog(TestSnapshotMetadata.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final int NUM_RS = 2;
private static final String STRING_TABLE_NAME = "TestSnapshotMetadata";
private static final String MAX_VERSIONS_FAM_STR = "fam_max_columns";
private static final byte[] MAX_VERSIONS_FAM = Bytes.toBytes(MAX_VERSIONS_FAM_STR);
private static final String COMPRESSED_FAM_STR = "fam_compressed";
private static final byte[] COMPRESSED_FAM = Bytes.toBytes(COMPRESSED_FAM_STR);
private static final String BLOCKSIZE_FAM_STR = "fam_blocksize";
private static final byte[] BLOCKSIZE_FAM = Bytes.toBytes(BLOCKSIZE_FAM_STR);
private static final String BLOOMFILTER_FAM_STR = "fam_bloomfilter";
private static final byte[] BLOOMFILTER_FAM = Bytes.toBytes(BLOOMFILTER_FAM_STR);
private static final String TEST_CONF_CUSTOM_VALUE = "TestCustomConf";
private static final String TEST_CUSTOM_VALUE = "TestCustomValue";
private static final byte[][] families = {
MAX_VERSIONS_FAM, BLOOMFILTER_FAM, COMPRESSED_FAM, BLOCKSIZE_FAM
};
private static final DataBlockEncoding DATA_BLOCK_ENCODING_TYPE = DataBlockEncoding.FAST_DIFF;
private static final BloomType BLOOM_TYPE = BloomType.ROW;
private static final int BLOCK_SIZE = 98;
private static final int MAX_VERSIONS = 8;
private Admin admin;
private String originalTableDescription;
private HTableDescriptor originalTableDescriptor;
TableName originalTableName;
private static FileSystem fs;
private static Path rootDir;
@BeforeClass
public static void setupCluster() throws Exception {
setupConf(UTIL.getConfiguration());
UTIL.startMiniCluster(NUM_RS);
fs = UTIL.getHBaseCluster().getMaster().getMasterFileSystem().getFileSystem();
rootDir = UTIL.getHBaseCluster().getMaster().getMasterFileSystem().getRootDir();
}
@AfterClass
public static void cleanupTest() throws Exception {
try {
UTIL.shutdownMiniCluster();
} catch (Exception e) {
LOG.warn("failure shutting down cluster", e);
}
}
private static void setupConf(Configuration conf) {
// enable snapshot support
conf.setBoolean(SnapshotManager.HBASE_SNAPSHOT_ENABLED, true);
// disable the ui
conf.setInt("hbase.regionsever.info.port", -1);
// change the flush size to a small amount, regulating number of store files
conf.setInt("hbase.hregion.memstore.flush.size", 25000);
// so make sure we get a compaction when doing a load, but keep around
// some files in the store
conf.setInt("hbase.hstore.compaction.min", 10);
conf.setInt("hbase.hstore.compactionThreshold", 10);
// block writes if we get to 12 store files
conf.setInt("hbase.hstore.blockingStoreFiles", 12);
conf.setInt("hbase.regionserver.msginterval", 100);
conf.setBoolean("hbase.master.enabletable.roundrobin", true);
// Avoid potentially aggressive splitting which would cause snapshot to fail
conf.set(HConstants.HBASE_REGION_SPLIT_POLICY_KEY,
ConstantSizeRegionSplitPolicy.class.getName());
}
@Before
public void setup() throws Exception {
admin = UTIL.getHBaseAdmin();
createTableWithNonDefaultProperties();
}
@After
public void tearDown() throws Exception {
SnapshotTestingUtils.deleteAllSnapshots(admin);
}
/*
* Create a table that has non-default properties so we can see if they hold
*/
private void createTableWithNonDefaultProperties() throws Exception {
final long startTime = System.currentTimeMillis();
final String sourceTableNameAsString = STRING_TABLE_NAME + startTime;
originalTableName = TableName.valueOf(sourceTableNameAsString);
// enable replication on a column family
HColumnDescriptor maxVersionsColumn = new HColumnDescriptor(MAX_VERSIONS_FAM);
HColumnDescriptor bloomFilterColumn = new HColumnDescriptor(BLOOMFILTER_FAM);
HColumnDescriptor dataBlockColumn = new HColumnDescriptor(COMPRESSED_FAM);
HColumnDescriptor blockSizeColumn = new HColumnDescriptor(BLOCKSIZE_FAM);
maxVersionsColumn.setMaxVersions(MAX_VERSIONS);
bloomFilterColumn.setBloomFilterType(BLOOM_TYPE);
dataBlockColumn.setDataBlockEncoding(DATA_BLOCK_ENCODING_TYPE);
blockSizeColumn.setBlocksize(BLOCK_SIZE);
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(sourceTableNameAsString));
htd.addFamily(maxVersionsColumn);
htd.addFamily(bloomFilterColumn);
htd.addFamily(dataBlockColumn);
htd.addFamily(blockSizeColumn);
htd.setValue(TEST_CUSTOM_VALUE, TEST_CUSTOM_VALUE);
htd.setConfiguration(TEST_CONF_CUSTOM_VALUE, TEST_CONF_CUSTOM_VALUE);
assertTrue(htd.getConfiguration().size() > 0);
admin.createTable(htd);
Table original = new HTable(UTIL.getConfiguration(), originalTableName);
originalTableName = TableName.valueOf(sourceTableNameAsString);
originalTableDescriptor = admin.getTableDescriptor(originalTableName);
originalTableDescription = originalTableDescriptor.toStringCustomizedValues();
original.close();
}
/**
* Verify that the describe for a cloned table matches the describe from the original.
*/
@Test (timeout=300000)
public void testDescribeMatchesAfterClone() throws Exception {
// Clone the original table
final String clonedTableNameAsString = "clone" + originalTableName;
final TableName clonedTableName = TableName.valueOf(clonedTableNameAsString);
final String snapshotNameAsString = "snapshot" + originalTableName
+ System.currentTimeMillis();
final byte[] snapshotName = Bytes.toBytes(snapshotNameAsString);
// restore the snapshot into a cloned table and examine the output
List<byte[]> familiesList = new ArrayList<byte[]>();
Collections.addAll(familiesList, families);
// Create a snapshot in which all families are empty
SnapshotTestingUtils.createSnapshotAndValidate(admin, originalTableName, null,
familiesList, snapshotNameAsString, rootDir, fs, /* onlineSnapshot= */ false);
admin.cloneSnapshot(snapshotName, clonedTableName);
Table clonedTable = new HTable(UTIL.getConfiguration(), clonedTableName);
HTableDescriptor cloneHtd = admin.getTableDescriptor(clonedTableName);
assertEquals(
originalTableDescription.replace(originalTableName.getNameAsString(),clonedTableNameAsString),
cloneHtd.toStringCustomizedValues());
// Verify the custom fields
assertEquals(originalTableDescriptor.getValues().size(),
cloneHtd.getValues().size());
assertEquals(originalTableDescriptor.getConfiguration().size(),
cloneHtd.getConfiguration().size());
assertEquals(cloneHtd.getValue(TEST_CUSTOM_VALUE), TEST_CUSTOM_VALUE);
assertEquals(cloneHtd.getConfigurationValue(TEST_CONF_CUSTOM_VALUE), TEST_CONF_CUSTOM_VALUE);
assertEquals(originalTableDescriptor.getValues(), cloneHtd.getValues());
assertEquals(originalTableDescriptor.getConfiguration(), cloneHtd.getConfiguration());
admin.enableTable(originalTableName);
clonedTable.close();
}
/**
* Verify that the describe for a restored table matches the describe for one the original.
*/
@Test (timeout=300000)
public void testDescribeMatchesAfterRestore() throws Exception {
runRestoreWithAdditionalMetadata(false);
}
/**
* Verify that if metadata changed after a snapshot was taken, that the old metadata replaces the
* new metadata during a restore
*/
@Test (timeout=300000)
public void testDescribeMatchesAfterMetadataChangeAndRestore() throws Exception {
runRestoreWithAdditionalMetadata(true);
}
/**
* Verify that when the table is empty, making metadata changes after the restore does not affect
* the restored table's original metadata
* @throws Exception
*/
@Test (timeout=300000)
public void testDescribeOnEmptyTableMatchesAfterMetadataChangeAndRestore() throws Exception {
runRestoreWithAdditionalMetadata(true, false);
}
private void runRestoreWithAdditionalMetadata(boolean changeMetadata) throws Exception {
runRestoreWithAdditionalMetadata(changeMetadata, true);
}
private void runRestoreWithAdditionalMetadata(boolean changeMetadata, boolean addData)
throws Exception {
if (admin.isTableDisabled(originalTableName)) {
admin.enableTable(originalTableName);
}
// populate it with data
final byte[] familyForUpdate = BLOCKSIZE_FAM;
List<byte[]> familiesWithDataList = new ArrayList<byte[]>();
List<byte[]> emptyFamiliesList = new ArrayList<byte[]>();
if (addData) {
HTable original = new HTable(UTIL.getConfiguration(), originalTableName);
UTIL.loadTable(original, familyForUpdate); // family arbitrarily chosen
original.close();
for (byte[] family : families) {
if (family != familyForUpdate) {
emptyFamiliesList.add(family);
}
}
familiesWithDataList.add(familyForUpdate);
} else {
Collections.addAll(emptyFamiliesList, families);
}
// take a "disabled" snapshot
final String snapshotNameAsString = "snapshot" + originalTableName
+ System.currentTimeMillis();
final byte[] snapshotName = Bytes.toBytes(snapshotNameAsString);
SnapshotTestingUtils.createSnapshotAndValidate(admin, originalTableName,
familiesWithDataList, emptyFamiliesList, snapshotNameAsString, rootDir, fs,
/* onlineSnapshot= */ false);
admin.enableTable(originalTableName);
if (changeMetadata) {
final String newFamilyNameAsString = "newFamily" + System.currentTimeMillis();
final byte[] newFamilyName = Bytes.toBytes(newFamilyNameAsString);
admin.disableTable(originalTableName);
HColumnDescriptor hcd = new HColumnDescriptor(newFamilyName);
admin.addColumn(originalTableName, hcd);
assertTrue("New column family was not added.",
admin.getTableDescriptor(originalTableName).toString().contains(newFamilyNameAsString));
}
// restore it
if (!admin.isTableDisabled(originalTableName)) {
admin.disableTable(originalTableName);
}
admin.restoreSnapshot(snapshotName);
admin.enableTable(originalTableName);
// verify that the descrption is reverted
Table original = new HTable(UTIL.getConfiguration(), originalTableName);
try {
assertTrue(originalTableDescriptor.equals(admin.getTableDescriptor(originalTableName)));
assertTrue(originalTableDescriptor.equals(original.getTableDescriptor()));
} finally {
original.close();
}
}
}
| apache-2.0 |
emre-aydin/hazelcast | hazelcast/src/main/java/com/hazelcast/internal/config/PredicateConfigReadOnly.java | 1570 | /*
* Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.internal.config;
import com.hazelcast.config.PredicateConfig;
import com.hazelcast.query.Predicate;
/**
* Contains the configuration for a Predicate.
*
* @since 3.5
*/
public class PredicateConfigReadOnly extends PredicateConfig {
public PredicateConfigReadOnly(PredicateConfig config) {
super(config);
}
@Override
public PredicateConfig setClassName(String className) {
throw new UnsupportedOperationException("This config is read-only");
}
@Override
public PredicateConfig setImplementation(Predicate implementation) {
throw new UnsupportedOperationException("This config is read-only");
}
@Override
public PredicateConfig setSql(String sql) {
throw new UnsupportedOperationException("This config is read-only");
}
@Override
public String toString() {
return "PredicateConfigReadOnly{} " + super.toString();
}
}
| apache-2.0 |
baldimir/uberfire | uberfire-workbench/uberfire-workbench-client-views-bs2/src/test/java/org/uberfire/client/views/bs2/tab/MultiTabWorkbenchPanelViewTest.java | 1539 | package org.uberfire.client.views.bs2.tab;
import com.google.gwt.dom.client.Style;
import com.google.gwt.event.logical.shared.SelectionHandler;
import com.google.gwt.user.client.Element;
import com.google.gwtmockito.GwtMockitoTestRunner;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.uberfire.client.views.bs2.maximize.MaximizeToggleButton;
import org.uberfire.client.workbench.widgets.listbar.ResizeFlowPanel;
import org.uberfire.mvp.Command;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.*;
@RunWith(GwtMockitoTestRunner.class)
public class MultiTabWorkbenchPanelViewTest {
@InjectMocks
private MultiTabWorkbenchPanelView view;
@Mock
private UberTabPanel uberTabPanel;
@Mock
private MaximizeToggleButton maximizeButton;
@Mock
private ResizeFlowPanel partViewContainer;
@Before
public void setup() {
Element uberTabPanelElement = mock( Element.class );
Style uberTabPanelElementStyle = mock( Style.class );
when( uberTabPanel.getElement() ).thenReturn( uberTabPanelElement );
when( uberTabPanelElement.getStyle() ).thenReturn( uberTabPanelElementStyle );
}
@Test
public void setupWidget() {
view.setupWidget();
//assert event handlers
verify( uberTabPanel ).addSelectionHandler(any(SelectionHandler.class));
verify( uberTabPanel ).addOnFocusHandler( any( Command.class ));
}
}
| apache-2.0 |
ravikumaran2015/ravikumaran201504 | core/store/dist/src/main/java/org/onosproject/store/impl/WallClockTimestamp.java | 2334 | /*
* Copyright 2014 Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.store.impl;
import static com.google.common.base.Preconditions.checkArgument;
import java.util.Objects;
import org.onosproject.store.Timestamp;
import com.google.common.base.MoreObjects;
import com.google.common.collect.ComparisonChain;
/**
* A Timestamp that derives its value from the prevailing
* wallclock time on the controller where it is generated.
*/
public class WallClockTimestamp implements Timestamp {
private final long unixTimestamp;
public WallClockTimestamp() {
unixTimestamp = System.currentTimeMillis();
}
@Override
public int compareTo(Timestamp o) {
checkArgument(o instanceof WallClockTimestamp,
"Must be WallClockTimestamp", o);
WallClockTimestamp that = (WallClockTimestamp) o;
return ComparisonChain.start()
.compare(this.unixTimestamp, that.unixTimestamp)
.result();
}
@Override
public int hashCode() {
return Objects.hash(unixTimestamp);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof WallClockTimestamp)) {
return false;
}
WallClockTimestamp that = (WallClockTimestamp) obj;
return Objects.equals(this.unixTimestamp, that.unixTimestamp);
}
@Override
public String toString() {
return MoreObjects.toStringHelper(getClass())
.add("unixTimestamp", unixTimestamp)
.toString();
}
/**
* Returns the unixTimestamp.
*
* @return unix timestamp
*/
public long unixTimestamp() {
return unixTimestamp;
}
}
| apache-2.0 |
rmetzger/flink | flink-connectors/flink-connector-jdbc/src/main/java/org/apache/flink/connector/jdbc/table/JdbcDynamicOutputFormatBuilder.java | 12200 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.connector.jdbc.table;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
import org.apache.flink.connector.jdbc.dialect.JdbcDialect;
import org.apache.flink.connector.jdbc.internal.JdbcBatchingOutputFormat;
import org.apache.flink.connector.jdbc.internal.connection.SimpleJdbcConnectionProvider;
import org.apache.flink.connector.jdbc.internal.converter.JdbcRowConverter;
import org.apache.flink.connector.jdbc.internal.executor.JdbcBatchStatementExecutor;
import org.apache.flink.connector.jdbc.internal.executor.TableBufferReducedStatementExecutor;
import org.apache.flink.connector.jdbc.internal.executor.TableBufferedStatementExecutor;
import org.apache.flink.connector.jdbc.internal.executor.TableInsertOrUpdateStatementExecutor;
import org.apache.flink.connector.jdbc.internal.executor.TableSimpleStatementExecutor;
import org.apache.flink.connector.jdbc.internal.options.JdbcDmlOptions;
import org.apache.flink.connector.jdbc.internal.options.JdbcOptions;
import org.apache.flink.connector.jdbc.statement.FieldNamedPreparedStatement;
import org.apache.flink.table.data.GenericRowData;
import org.apache.flink.table.data.RowData;
import org.apache.flink.table.types.DataType;
import org.apache.flink.table.types.logical.LogicalType;
import org.apache.flink.table.types.logical.RowType;
import java.io.Serializable;
import java.util.Arrays;
import java.util.function.Function;
import static org.apache.flink.table.data.RowData.createFieldGetter;
import static org.apache.flink.util.Preconditions.checkArgument;
import static org.apache.flink.util.Preconditions.checkNotNull;
/** Builder for {@link JdbcBatchingOutputFormat} for Table/SQL. */
public class JdbcDynamicOutputFormatBuilder implements Serializable {
private static final long serialVersionUID = 1L;
private JdbcOptions jdbcOptions;
private JdbcExecutionOptions executionOptions;
private JdbcDmlOptions dmlOptions;
private TypeInformation<RowData> rowDataTypeInformation;
private DataType[] fieldDataTypes;
public JdbcDynamicOutputFormatBuilder() {}
public JdbcDynamicOutputFormatBuilder setJdbcOptions(JdbcOptions jdbcOptions) {
this.jdbcOptions = jdbcOptions;
return this;
}
public JdbcDynamicOutputFormatBuilder setJdbcExecutionOptions(
JdbcExecutionOptions executionOptions) {
this.executionOptions = executionOptions;
return this;
}
public JdbcDynamicOutputFormatBuilder setJdbcDmlOptions(JdbcDmlOptions dmlOptions) {
this.dmlOptions = dmlOptions;
return this;
}
public JdbcDynamicOutputFormatBuilder setRowDataTypeInfo(
TypeInformation<RowData> rowDataTypeInfo) {
this.rowDataTypeInformation = rowDataTypeInfo;
return this;
}
public JdbcDynamicOutputFormatBuilder setFieldDataTypes(DataType[] fieldDataTypes) {
this.fieldDataTypes = fieldDataTypes;
return this;
}
public JdbcBatchingOutputFormat<RowData, ?, ?> build() {
checkNotNull(jdbcOptions, "jdbc options can not be null");
checkNotNull(dmlOptions, "jdbc dml options can not be null");
checkNotNull(executionOptions, "jdbc execution options can not be null");
final LogicalType[] logicalTypes =
Arrays.stream(fieldDataTypes)
.map(DataType::getLogicalType)
.toArray(LogicalType[]::new);
if (dmlOptions.getKeyFields().isPresent() && dmlOptions.getKeyFields().get().length > 0) {
// upsert query
return new JdbcBatchingOutputFormat<>(
new SimpleJdbcConnectionProvider(jdbcOptions),
executionOptions,
ctx ->
createBufferReduceExecutor(
dmlOptions, ctx, rowDataTypeInformation, logicalTypes),
JdbcBatchingOutputFormat.RecordExtractor.identity());
} else {
// append only query
final String sql =
dmlOptions
.getDialect()
.getInsertIntoStatement(
dmlOptions.getTableName(), dmlOptions.getFieldNames());
return new JdbcBatchingOutputFormat<>(
new SimpleJdbcConnectionProvider(jdbcOptions),
executionOptions,
ctx ->
createSimpleBufferedExecutor(
ctx,
dmlOptions.getDialect(),
dmlOptions.getFieldNames(),
logicalTypes,
sql,
rowDataTypeInformation),
JdbcBatchingOutputFormat.RecordExtractor.identity());
}
}
private static JdbcBatchStatementExecutor<RowData> createBufferReduceExecutor(
JdbcDmlOptions opt,
RuntimeContext ctx,
TypeInformation<RowData> rowDataTypeInfo,
LogicalType[] fieldTypes) {
checkArgument(opt.getKeyFields().isPresent());
JdbcDialect dialect = opt.getDialect();
String tableName = opt.getTableName();
String[] pkNames = opt.getKeyFields().get();
int[] pkFields =
Arrays.stream(pkNames)
.mapToInt(Arrays.asList(opt.getFieldNames())::indexOf)
.toArray();
LogicalType[] pkTypes =
Arrays.stream(pkFields).mapToObj(f -> fieldTypes[f]).toArray(LogicalType[]::new);
final TypeSerializer<RowData> typeSerializer =
rowDataTypeInfo.createSerializer(ctx.getExecutionConfig());
final Function<RowData, RowData> valueTransform =
ctx.getExecutionConfig().isObjectReuseEnabled()
? typeSerializer::copy
: Function.identity();
return new TableBufferReducedStatementExecutor(
createUpsertRowExecutor(
dialect,
tableName,
opt.getFieldNames(),
fieldTypes,
pkFields,
pkNames,
pkTypes),
createDeleteExecutor(dialect, tableName, pkNames, pkTypes),
createRowKeyExtractor(fieldTypes, pkFields),
valueTransform);
}
private static JdbcBatchStatementExecutor<RowData> createSimpleBufferedExecutor(
RuntimeContext ctx,
JdbcDialect dialect,
String[] fieldNames,
LogicalType[] fieldTypes,
String sql,
TypeInformation<RowData> rowDataTypeInfo) {
final TypeSerializer<RowData> typeSerializer =
rowDataTypeInfo.createSerializer(ctx.getExecutionConfig());
return new TableBufferedStatementExecutor(
createSimpleRowExecutor(dialect, fieldNames, fieldTypes, sql),
ctx.getExecutionConfig().isObjectReuseEnabled()
? typeSerializer::copy
: Function.identity());
}
private static JdbcBatchStatementExecutor<RowData> createUpsertRowExecutor(
JdbcDialect dialect,
String tableName,
String[] fieldNames,
LogicalType[] fieldTypes,
int[] pkFields,
String[] pkNames,
LogicalType[] pkTypes) {
return dialect.getUpsertStatement(tableName, fieldNames, pkNames)
.map(sql -> createSimpleRowExecutor(dialect, fieldNames, fieldTypes, sql))
.orElseGet(
() ->
createInsertOrUpdateExecutor(
dialect,
tableName,
fieldNames,
fieldTypes,
pkFields,
pkNames,
pkTypes));
}
private static JdbcBatchStatementExecutor<RowData> createDeleteExecutor(
JdbcDialect dialect, String tableName, String[] pkNames, LogicalType[] pkTypes) {
String deleteSql = dialect.getDeleteStatement(tableName, pkNames);
return createSimpleRowExecutor(dialect, pkNames, pkTypes, deleteSql);
}
private static JdbcBatchStatementExecutor<RowData> createSimpleRowExecutor(
JdbcDialect dialect, String[] fieldNames, LogicalType[] fieldTypes, final String sql) {
final JdbcRowConverter rowConverter = dialect.getRowConverter(RowType.of(fieldTypes));
return new TableSimpleStatementExecutor(
connection ->
FieldNamedPreparedStatement.prepareStatement(connection, sql, fieldNames),
rowConverter);
}
private static JdbcBatchStatementExecutor<RowData> createInsertOrUpdateExecutor(
JdbcDialect dialect,
String tableName,
String[] fieldNames,
LogicalType[] fieldTypes,
int[] pkFields,
String[] pkNames,
LogicalType[] pkTypes) {
final String existStmt = dialect.getRowExistsStatement(tableName, pkNames);
final String insertStmt = dialect.getInsertIntoStatement(tableName, fieldNames);
final String updateStmt = dialect.getUpdateStatement(tableName, fieldNames, pkNames);
return new TableInsertOrUpdateStatementExecutor(
connection ->
FieldNamedPreparedStatement.prepareStatement(
connection, existStmt, pkNames),
connection ->
FieldNamedPreparedStatement.prepareStatement(
connection, insertStmt, fieldNames),
connection ->
FieldNamedPreparedStatement.prepareStatement(
connection, updateStmt, fieldNames),
dialect.getRowConverter(RowType.of(pkTypes)),
dialect.getRowConverter(RowType.of(fieldTypes)),
dialect.getRowConverter(RowType.of(fieldTypes)),
createRowKeyExtractor(fieldTypes, pkFields));
}
private static Function<RowData, RowData> createRowKeyExtractor(
LogicalType[] logicalTypes, int[] pkFields) {
final RowData.FieldGetter[] fieldGetters = new RowData.FieldGetter[pkFields.length];
for (int i = 0; i < pkFields.length; i++) {
fieldGetters[i] = createFieldGetter(logicalTypes[pkFields[i]], pkFields[i]);
}
return row -> getPrimaryKey(row, fieldGetters);
}
private static RowData getPrimaryKey(RowData row, RowData.FieldGetter[] fieldGetters) {
GenericRowData pkRow = new GenericRowData(fieldGetters.length);
for (int i = 0; i < fieldGetters.length; i++) {
pkRow.setField(i, fieldGetters[i].getFieldOrNull(row));
}
return pkRow;
}
}
| apache-2.0 |
rekhajoshm/pigfork | test/org/apache/pig/test/TestUnion.java | 16092 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.IOException;
import java.util.Iterator;
import java.util.Properties;
import org.apache.pig.ExecType;
import org.apache.pig.FuncSpec;
import org.apache.pig.PigServer;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.POStatus;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.Result;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhysicalPlan;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POFilter;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POForEach;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POLoad;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POUnion;
import org.apache.pig.builtin.PigStorage;
import org.apache.pig.data.DataBag;
import org.apache.pig.data.DataByteArray;
import org.apache.pig.data.DataType;
import org.apache.pig.data.DefaultBagFactory;
import org.apache.pig.data.DefaultTuple;
import org.apache.pig.data.Tuple;
import org.apache.pig.impl.PigContext;
import org.apache.pig.impl.io.FileSpec;
import org.apache.pig.impl.logicalLayer.schema.Schema;
import org.apache.pig.test.utils.GenPhyOp;
import org.apache.pig.test.utils.TestHelper;
import org.junit.Before;
import org.junit.Test;
/**
* Start Plan - --4430968173902769765
* |
* |---Filter - -3398344075398026874
* | |
* | |---For Each - --3361918026493682288
* | |
* | |---Load - --7021057205133896020
* |
* |---Filter - -4449068980110713814
* |
* |---For Each - -7192652407897311774
* |
* |---Load - --3816247505117325386
*
* Tests the Start Plan operator with the above plan.
* The verification is done as follows:
* Both loads load the same file(/etc/passwd).
* The filters cover the input. Here the filters used
* are $2<=50 & $2>50
* The bags coming out of Start Plan is checked against
* the projected input bag.
* Since types are not supported yet, there is an explicit
* conversion from DataByteArray to native types for computation
* and back to DataByteArray for comparison with input.
*/
public class TestUnion {
POUnion sp;
DataBag expBag;
PigContext pc;
PigServer pigServer;
@Before
public void setUp() throws Exception {
pigServer = new PigServer(ExecType.LOCAL, new Properties());
pc = pigServer.getPigContext();
pc.connect();
GenPhyOp.setPc(pc);
POLoad ld1 = GenPhyOp.topLoadOp();
String curDir = System.getProperty("user.dir");
String inpDir = curDir + File.separatorChar + "test/org/apache/pig/test/data/InputFiles/";
FileSpec fSpec = new FileSpec(Util.generateURI(Util.encodeEscape(inpDir + "passwd"), pc), new FuncSpec(PigStorage.class.getName() , new String[]{":"}));
ld1.setLFile(fSpec);
POLoad ld2 = GenPhyOp.topLoadOp();
ld2.setLFile(fSpec);
POFilter fl1 = GenPhyOp.topFilterOpWithProj(1, 50, GenPhyOp.LTE);
POFilter fl2 = GenPhyOp.topFilterOpWithProj(1, 50, GenPhyOp.GT);
int[] flds = {0,2};
Tuple sample = new DefaultTuple();
sample.append(new String("S"));
sample.append(new String("x"));
sample.append(new Integer("10"));
sample.append(new Integer("20"));
sample.append(new String("S"));
sample.append(new String("x"));
sample.append(new String("S"));
sample.append(new String("x"));
POForEach fe1 = GenPhyOp.topForEachOPWithPlan(flds , sample);
POForEach fe2 = GenPhyOp.topForEachOPWithPlan(flds , sample);
sp = GenPhyOp.topUnionOp();
PhysicalPlan plan = new PhysicalPlan();
plan.add(ld1);
plan.add(ld2);
plan.add(fl1);
plan.add(fl2);
plan.add(fe1);
plan.add(fe2);
plan.add(sp);
plan.connect(ld1, fe1);
plan.connect(fe1, fl1);
plan.connect(ld2, fe2);
plan.connect(fe2, fl2);
plan.connect(fl1, sp);
plan.connect(fl2, sp);
/*PlanPrinter ppp = new PlanPrinter(plan);
ppp.visit();*/
POLoad ld3 = GenPhyOp.topLoadOp();
ld3.setLFile(fSpec);
DataBag fullBag = DefaultBagFactory.getInstance().newDefaultBag();
Tuple t=null;
for(Result res=ld3.getNextTuple();res.returnStatus!=POStatus.STATUS_EOP;res=ld3.getNextTuple()){
fullBag.add((Tuple)res.result);
}
int[] fields = {0,2};
expBag = TestHelper.projectBag(fullBag, fields);
}
private Tuple castToDBA(Tuple in) throws ExecException{
Tuple res = new DefaultTuple();
for (int i=0;i<in.size();i++) {
DataByteArray dba = new DataByteArray(in.get(i).toString());
res.append(dba);
}
return res;
}
@Test
public void testGetNextTuple() throws ExecException, IOException {
Tuple t = null;
DataBag outBag = DefaultBagFactory.getInstance().newDefaultBag();
for(Result res=sp.getNextTuple();res.returnStatus!=POStatus.STATUS_EOP;res=sp.getNextTuple()){
outBag.add(castToDBA((Tuple)res.result));
}
assertTrue(TestHelper.compareBags(expBag, outBag));
}
// Test the case when POUnion is one of the roots in a map reduce
// plan and the input to it can be null
// This can happen when we have
// a plan like below
// POUnion
// |
// |--POLocalRearrange
// | |
// | |-POUnion (root 2)--> This union's getNext() can lead the code here
// |
// |--POLocalRearrange (root 1)
// The inner POUnion above is a root in the plan which has 2 roots.
// So these 2 roots would have input coming from different input
// sources (dfs files). So certain maps would be working on input only
// meant for "root 1" above and some maps would work on input
// meant only for "root 2". In the former case, "root 2" would
// neither get input attached to it nor does it have predecessors
@Test
public void testGetNextNullInput() throws Exception {
File f1 = Util.createInputFile("tmp", "a.txt", new String[] {"1\t2\t3", "4\t5\t6"});
File f2 = Util.createInputFile("tmp", "b.txt", new String[] {"7\t8\t9", "1\t200\t300"});
File f3 = Util.createInputFile("tmp", "c.txt", new String[] {"1\t20\t30"});
//FileLocalizer.deleteTempFiles();
pigServer.registerQuery("a = load '" + Util.encodeEscape(f1.getAbsolutePath()) + "' ;");
pigServer.registerQuery("b = load '" + Util.encodeEscape(f2.getAbsolutePath()) + "';");
pigServer.registerQuery("c = union a, b;");
pigServer.registerQuery("d = load '" + Util.encodeEscape(f3.getAbsolutePath()) + "' ;");
pigServer.registerQuery("e = cogroup c by $0 inner, d by $0 inner;");
pigServer.explain("e", System.err);
// output should be
// (1,{(1,2,3),(1,200,300)},{(1,20,30)})
Tuple expectedResult = new DefaultTuple();
expectedResult.append(new DataByteArray("1"));
Tuple[] secondFieldContents = new DefaultTuple[2];
secondFieldContents[0] = Util.createTuple(Util.toDataByteArrays(new String[] {"1", "2", "3"}));
secondFieldContents[1] = Util.createTuple(Util.toDataByteArrays(new String[] {"1", "200", "300"}));
DataBag secondField = Util.createBag(secondFieldContents);
expectedResult.append(secondField);
DataBag thirdField = Util.createBag(new Tuple[]{Util.createTuple(Util.toDataByteArrays(new String[]{"1", "20", "30"}))});
expectedResult.append(thirdField);
Iterator<Tuple> it = pigServer.openIterator("e");
assertEquals(expectedResult, it.next());
assertFalse(it.hasNext());
}
// Test schema merge in union when one of the fields is a bag
@Test
public void testSchemaMergeWithBag() throws Exception {
File f1 = Util.createInputFile("tmp", "input1.txt", new String[] {"dummy"});
File f2 = Util.createInputFile("tmp", "input2.txt", new String[] {"dummy"});
Util.registerMultiLineQuery(pigServer, "a = load '" + Util.encodeEscape(f1.getAbsolutePath()) + "';" +
"b = load '" + Util.encodeEscape(f2.getAbsolutePath()) + "';" +
"c = foreach a generate 1, {(1, 'str1')};" +
"d = foreach b generate 2, {(2, 'str2')};" +
"e = union c,d;" +
"");
Iterator<Tuple> it = pigServer.openIterator("e");
Object[] expected = new Object[] { Util.getPigConstant("(1, {(1, 'str1')})"),
Util.getPigConstant("(2, {(2, 'str2')})")};
Object[] results = new Object[2];
int i = 0;
while(it.hasNext()) {
if(i == 2) {
fail("Got more tuples than expected!");
}
Tuple t = it.next();
if(t.get(0).equals(1)) {
// this is the first tuple
results[0] = t;
} else {
results[1] = t;
}
i++;
}
for (int j = 0; j < expected.length; j++) {
assertEquals(expected[j], results[j]);
}
}
@Test
public void testCastingAfterUnion() throws Exception {
File f1 = Util.createInputFile("tmp", "i1.txt", new String[] {"aaa\t111"});
File f2 = Util.createInputFile("tmp", "i2.txt", new String[] {"bbb\t222"});
PigServer ps = new PigServer(ExecType.LOCAL, new Properties());
ps.registerQuery("A = load '" + Util.encodeEscape(f1.getAbsolutePath()) + "' as (a,b);");
ps.registerQuery("B = load '" + Util.encodeEscape(f2.getAbsolutePath()) + "' as (a,b);");
ps.registerQuery("C = union A,B;");
ps.registerQuery("D = foreach C generate (chararray)a as a,(int)b as b;");
Schema dumpSchema = ps.dumpSchema("D");
Schema expected = new Schema ();
expected.add(new Schema.FieldSchema("a", DataType.CHARARRAY));
expected.add(new Schema.FieldSchema("b", DataType.INTEGER));
assertEquals(expected, dumpSchema);
Iterator<Tuple> itr = ps.openIterator("D");
int recordCount = 0;
while(itr.next() != null)
++recordCount;
assertEquals(2, recordCount);
}
@Test
public void testCastingAfterUnionWithMultipleLoadersDifferentCasters()
throws Exception {
// Note that different caster case only works when each field is still coming
// from the single Loader.
// In the case below, 'a' is coming from A(PigStorage)
// and 'b' is coming from B(TextLoader). No overlaps.
File f1 = Util.createInputFile("tmp", "i1.txt", new String[] {"1","2","3"});
File f2 = Util.createInputFile("tmp", "i2.txt", new String[] {"a","b","c"});
PigServer ps = new PigServer(ExecType.LOCAL, new Properties());
//PigStorage and TextLoader have different LoadCasters
ps.registerQuery("A = load '" + f1.getAbsolutePath() + "' as (a:bytearray);");
ps.registerQuery("B = load '" + f2.getAbsolutePath() + "' using TextLoader() as (b:bytearray);");
ps.registerQuery("C = union onschema A,B;");
ps.registerQuery("D = foreach C generate (int)a as a,(chararray)b as b;");
Schema dumpSchema = ps.dumpSchema("D");
Schema expected = new Schema ();
expected.add(new Schema.FieldSchema("a", DataType.INTEGER));
expected.add(new Schema.FieldSchema("b", DataType.CHARARRAY));
assertEquals(expected, dumpSchema);
Iterator<Tuple> itr = ps.openIterator("D");
int recordCount = 0;
while(itr.next() != null)
++recordCount;
assertEquals(6, recordCount);
}
@Test
public void testCastingAfterUnionWithMultipleLoadersDifferentCasters2()
throws Exception {
// A bit more complicated pattern but still same requirement of each
// field coming from the same Loader.
// 'a' is coming from A(PigStorage)
// 'i' is coming from B and C but both from the TextLoader.
File f1 = Util.createInputFile("tmp", "i1.txt", new String[] {"b","c", "1", "3"});
File f2 = Util.createInputFile("tmp", "i2.txt", new String[] {"a","b","c"});
File f3 = Util.createInputFile("tmp", "i3.txt", new String[] {"1","2","3"});
PigServer ps = new PigServer(ExecType.LOCAL, new Properties());
ps.registerQuery("A = load '" + f1.getAbsolutePath() + "' as (a:bytearray);"); // Using PigStorage()
ps.registerQuery("B = load '" + f2.getAbsolutePath() + "' using TextLoader() as (i:bytearray);");
ps.registerQuery("C = load '" + f3.getAbsolutePath() + "' using TextLoader() as (i:bytearray);");
ps.registerQuery("B2 = join B by i, A by a;"); //{A::a: bytearray,B::i: bytearray}
ps.registerQuery("B3 = foreach B2 generate a, B::i as i;"); //{A::a: bytearray,i: bytearray}
ps.registerQuery("C2 = join C by i, A by a;"); //{A::a: bytearray,C::i: bytearray}
ps.registerQuery("C3 = foreach C2 generate a, C::i as i;"); //{A::a: bytearray,i: bytearray}
ps.registerQuery("D = union onschema B3,C3;"); // {A::a: bytearray,i: bytearray}
ps.registerQuery("E = foreach D generate (chararray) a, (chararray) i;");//{A::a: chararray,i: chararray}
Iterator<Tuple> itr = ps.openIterator("E");
int recordCount = 0;
while(itr.next() != null)
++recordCount;
assertEquals(4, recordCount);
}
@Test
public void testCastingAfterUnionWithMultipleLoadersSameCaster()
throws Exception {
// Fields coming from different loaders but
// having the same LoadCaster.
File f1 = Util.createInputFile("tmp", "i1.txt", new String[] {"1\ta","2\tb","3\tc"});
PigServer ps = new PigServer(ExecType.LOCAL, new Properties());
// PigStorage and PigStorageWithStatistics have the same
// LoadCaster(== Utf8StorageConverter)
ps.registerQuery("A = load '" + f1.getAbsolutePath() + "' as (a:bytearray, b:bytearray);");
ps.registerQuery("B = load '" + f1.getAbsolutePath() +
"' using org.apache.pig.test.PigStorageWithStatistics() as (a:bytearray, b:bytearray);");
ps.registerQuery("C = union onschema A,B;");
ps.registerQuery("D = foreach C generate (int)a as a,(chararray)b as b;");
// 'a' is coming from A and 'b' is coming from B; No overlaps.
Schema dumpSchema = ps.dumpSchema("D");
Schema expected = new Schema ();
expected.add(new Schema.FieldSchema("a", DataType.INTEGER));
expected.add(new Schema.FieldSchema("b", DataType.CHARARRAY));
assertEquals(expected, dumpSchema);
Iterator<Tuple> itr = ps.openIterator("D");
int recordCount = 0;
while(itr.next() != null)
++recordCount;
assertEquals(6, recordCount);
}
}
| apache-2.0 |
apache/incubator-asterixdb-hyracks | hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/RangePredicate.java | 4390 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.storage.am.btree.impls;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.storage.am.common.impls.AbstractSearchPredicate;
import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
public class RangePredicate extends AbstractSearchPredicate {
private static final long serialVersionUID = 1L;
protected ITupleReference lowKey = null;
protected ITupleReference highKey = null;
protected boolean lowKeyInclusive = true;
protected boolean highKeyInclusive = true;
protected MultiComparator lowKeyCmp;
protected MultiComparator highKeyCmp;
public RangePredicate() {
}
public RangePredicate(ITupleReference lowKey, ITupleReference highKey, boolean lowKeyInclusive,
boolean highKeyInclusive, MultiComparator lowKeyCmp, MultiComparator highKeyCmp) {
this.lowKey = lowKey;
this.highKey = highKey;
this.lowKeyInclusive = lowKeyInclusive;
this.highKeyInclusive = highKeyInclusive;
this.lowKeyCmp = lowKeyCmp;
this.highKeyCmp = highKeyCmp;
}
public RangePredicate(ITupleReference lowKey, ITupleReference highKey, boolean lowKeyInclusive,
boolean highKeyInclusive, MultiComparator lowKeyCmp, MultiComparator highKeyCmp,
ITupleReference minFilterTuple, ITupleReference maxFilterTuple) {
super(minFilterTuple, maxFilterTuple);
this.lowKey = lowKey;
this.highKey = highKey;
this.lowKeyInclusive = lowKeyInclusive;
this.highKeyInclusive = highKeyInclusive;
this.lowKeyCmp = lowKeyCmp;
this.highKeyCmp = highKeyCmp;
}
public void reset(ITupleReference lowKey, ITupleReference highKey, boolean lowKeyInclusive,
boolean highKeyInclusive, MultiComparator lowKeyCmp, MultiComparator highKeyCmp) {
this.lowKey = lowKey;
this.highKey = highKey;
this.lowKeyInclusive = lowKeyInclusive;
this.highKeyInclusive = highKeyInclusive;
this.lowKeyCmp = lowKeyCmp;
this.highKeyCmp = highKeyCmp;
}
@Override
public MultiComparator getLowKeyComparator() {
return lowKeyCmp;
}
@Override
public MultiComparator getHighKeyComparator() {
return highKeyCmp;
}
public void setLowKeyComparator(MultiComparator lowKeyCmp) {
this.lowKeyCmp = lowKeyCmp;
}
public void setHighKeyComparator(MultiComparator highKeyCmp) {
this.highKeyCmp = highKeyCmp;
}
@Override
public ITupleReference getLowKey() {
return lowKey;
}
public ITupleReference getHighKey() {
return highKey;
}
public void setLowKey(ITupleReference lowKey, boolean lowKeyInclusive) {
this.lowKey = lowKey;
this.lowKeyInclusive = lowKeyInclusive;
}
public void setHighKey(ITupleReference highKey, boolean highKeyInclusive) {
this.highKey = highKey;
this.highKeyInclusive = highKeyInclusive;
}
public boolean isLowKeyInclusive() {
return lowKeyInclusive;
}
public boolean isHighKeyInclusive() {
return highKeyInclusive;
}
public void setLowKey(ITupleReference lowKey) {
this.lowKey = lowKey;
}
public void setHighKey(ITupleReference highKey) {
this.highKey = highKey;
}
public void setLowKeyCmp(MultiComparator lowKeyCmp) {
this.lowKeyCmp = lowKeyCmp;
}
public void setHighKeyCmp(MultiComparator highKeyCmp) {
this.highKeyCmp = highKeyCmp;
}
}
| apache-2.0 |
smmribeiro/intellij-community | java/java-psi-impl/src/com/intellij/psi/impl/java/stubs/impl/PsiClassStubImpl.java | 7808 | // Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.psi.impl.java.stubs.impl;
import com.intellij.extapi.psi.StubBasedPsiElementBase;
import com.intellij.psi.PsiClass;
import com.intellij.psi.impl.DebugUtil;
import com.intellij.psi.impl.java.stubs.JavaClassElementType;
import com.intellij.psi.impl.java.stubs.PsiClassStub;
import com.intellij.psi.stubs.StubBase;
import com.intellij.psi.stubs.StubElement;
import com.intellij.util.BitUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
public class PsiClassStubImpl<T extends PsiClass> extends StubBase<T> implements PsiClassStub<T> {
private static final int DEPRECATED = 0x01;
private static final int INTERFACE = 0x02;
private static final int ENUM = 0x04;
private static final int ENUM_CONSTANT_INITIALIZER = 0x08;
private static final int ANONYMOUS = 0x10;
private static final int ANON_TYPE = 0x20;
private static final int IN_QUALIFIED_NEW = 0x40;
private static final int DEPRECATED_ANNOTATION = 0x80;
private static final int ANONYMOUS_INNER = 0x100;
private static final int LOCAL_CLASS_INNER = 0x200;
private static final int HAS_DOC_COMMENT = 0x400;
private static final int RECORD = 0x800;
private final String myQualifiedName;
private final String myName;
private final String myBaseRefText;
private final short myFlags;
private String mySourceFileName;
public PsiClassStubImpl(@NotNull JavaClassElementType type,
final StubElement parent,
@Nullable final String qualifiedName,
@Nullable final String name,
@Nullable final String baseRefText,
final short flags) {
super(parent, type);
myQualifiedName = qualifiedName;
myName = name;
myBaseRefText = baseRefText;
myFlags = flags;
if (StubBasedPsiElementBase.ourTraceStubAstBinding) {
String creationTrace = "Stub creation thread: " + Thread.currentThread() + "\n" + DebugUtil.currentStackTrace();
putUserData(StubBasedPsiElementBase.CREATION_TRACE, creationTrace);
}
}
@Override
public String getName() {
return myName;
}
@Override
public String getQualifiedName() {
return myQualifiedName;
}
@Override
public String getBaseClassReferenceText() {
return myBaseRefText;
}
@Override
public boolean isDeprecated() {
return BitUtil.isSet(myFlags, DEPRECATED);
}
@Override
public boolean hasDeprecatedAnnotation() {
return BitUtil.isSet(myFlags, DEPRECATED_ANNOTATION);
}
@Override
public boolean isInterface() {
return BitUtil.isSet(myFlags, INTERFACE);
}
@Override
public boolean isEnum() {
return BitUtil.isSet(myFlags, ENUM);
}
@Override
public boolean isRecord() {
return BitUtil.isSet(myFlags, RECORD);
}
@Override
public boolean isEnumConstantInitializer() {
return isEnumConstInitializer(myFlags);
}
public static boolean isEnumConstInitializer(final short flags) {
return BitUtil.isSet(flags, ENUM_CONSTANT_INITIALIZER);
}
@Override
public boolean isAnonymous() {
return isAnonymous(myFlags);
}
public static boolean isAnonymous(final short flags) {
return BitUtil.isSet(flags, ANONYMOUS);
}
@Override
public boolean isAnnotationType() {
return BitUtil.isSet(myFlags, ANON_TYPE);
}
@Override
public boolean hasDocComment() {
return BitUtil.isSet(myFlags, HAS_DOC_COMMENT);
}
@Override
public String getSourceFileName() {
return mySourceFileName;
}
public void setSourceFileName(String sourceFileName) {
mySourceFileName = sourceFileName;
}
@Override
public boolean isAnonymousInQualifiedNew() {
return BitUtil.isSet(myFlags, IN_QUALIFIED_NEW);
}
public short getFlags() {
return myFlags;
}
public static short packFlags(boolean isDeprecated,
boolean isInterface,
boolean isEnum,
boolean isEnumConstantInitializer,
boolean isAnonymous,
boolean isAnnotationType,
boolean isInQualifiedNew,
boolean hasDeprecatedAnnotation,
boolean anonymousInner,
boolean localClassInner,
boolean hasDocComment) {
return packFlags(isDeprecated,
isInterface,
isEnum,
isEnumConstantInitializer,
isAnonymous,
isAnnotationType,
isInQualifiedNew,
hasDeprecatedAnnotation,
anonymousInner,
localClassInner,
hasDocComment,
false);
}
public static short packFlags(boolean isDeprecated,
boolean isInterface,
boolean isEnum,
boolean isEnumConstantInitializer,
boolean isAnonymous,
boolean isAnnotationType,
boolean isInQualifiedNew,
boolean hasDeprecatedAnnotation,
boolean anonymousInner,
boolean localClassInner,
boolean hasDocComment,
boolean isRecord) {
short flags = 0;
if (isDeprecated) flags |= DEPRECATED;
if (isInterface) flags |= INTERFACE;
if (isEnum) flags |= ENUM;
if (isEnumConstantInitializer) flags |= ENUM_CONSTANT_INITIALIZER;
if (isAnonymous) flags |= ANONYMOUS;
if (isAnnotationType) flags |= ANON_TYPE;
if (isInQualifiedNew) flags |= IN_QUALIFIED_NEW;
if (hasDeprecatedAnnotation) flags |= DEPRECATED_ANNOTATION;
if (anonymousInner) flags |= ANONYMOUS_INNER;
if (localClassInner) flags |= LOCAL_CLASS_INNER;
if (hasDocComment) flags |= HAS_DOC_COMMENT;
if (isRecord) flags |= RECORD;
return flags;
}
public boolean isAnonymousInner() {
return BitUtil.isSet(myFlags, ANONYMOUS_INNER);
}
public boolean isLocalClassInner() {
return BitUtil.isSet(myFlags, LOCAL_CLASS_INNER);
}
@Override
@SuppressWarnings("SpellCheckingInspection")
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("PsiClassStub[");
if (isInterface()) {
builder.append("interface ");
}
if (isAnonymous()) {
builder.append("anonymous ");
}
if (isEnum()) {
builder.append("enum ");
}
if (isRecord()) {
builder.append("record ");
}
if (isAnnotationType()) {
builder.append("annotation ");
}
if (isEnumConstantInitializer()) {
builder.append("enumInit ");
}
if (isDeprecated()) {
builder.append("deprecated ");
}
if (hasDeprecatedAnnotation()) {
builder.append("deprecatedA ");
}
builder.append("name=").append(getName()).append(" fqn=").append(getQualifiedName());
if (getBaseClassReferenceText() != null) {
builder.append(" baseref=").append(getBaseClassReferenceText());
}
if (isAnonymousInQualifiedNew()) {
builder.append(" inqualifnew");
}
if (isAnonymousInner()) {
builder.append(" jvmAnonymousInner");
}
if (isLocalClassInner()) {
builder.append(" jvmLocalClassInner");
}
builder.append("]");
return builder.toString();
}
}
| apache-2.0 |
tillrohrmann/flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/delegation/BlinkPlannerFactory.java | 2727 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.delegation;
import org.apache.flink.annotation.Internal;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.TableConfig;
import org.apache.flink.table.catalog.CatalogManager;
import org.apache.flink.table.catalog.FunctionCatalog;
import org.apache.flink.table.delegation.Executor;
import org.apache.flink.table.delegation.Planner;
import org.apache.flink.table.delegation.PlannerFactory;
import org.apache.flink.table.descriptors.DescriptorProperties;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/** Factory to construct a {@link BatchPlanner} or {@link StreamPlanner}. */
@Internal
public final class BlinkPlannerFactory implements PlannerFactory {
@Override
public Planner create(
Map<String, String> properties,
Executor executor,
TableConfig tableConfig,
FunctionCatalog functionCatalog,
CatalogManager catalogManager) {
if (Boolean.valueOf(properties.getOrDefault(EnvironmentSettings.STREAMING_MODE, "true"))) {
return new StreamPlanner(executor, tableConfig, functionCatalog, catalogManager);
} else {
return new BatchPlanner(executor, tableConfig, functionCatalog, catalogManager);
}
}
@Override
public Map<String, String> optionalContext() {
Map<String, String> map = new HashMap<>();
map.put(EnvironmentSettings.CLASS_NAME, this.getClass().getCanonicalName());
return map;
}
@Override
public Map<String, String> requiredContext() {
DescriptorProperties properties = new DescriptorProperties();
return properties.asMap();
}
@Override
public List<String> supportedProperties() {
return Arrays.asList(EnvironmentSettings.STREAMING_MODE, EnvironmentSettings.CLASS_NAME);
}
}
| apache-2.0 |
gigaroby/hops | hadoop-tools/distributedLoadSimulator/src/main/java/org/apache/hadoop/distributedloadsimulator/sls/scheduler/FifoSchedulerMetrics.java | 1984 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.distributedloadsimulator.sls.scheduler;
import org.apache.hadoop.yarn.api.records.QueueInfo;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo
.FifoScheduler;
import com.codahale.metrics.Gauge;
public class FifoSchedulerMetrics extends SchedulerMetrics {
public FifoSchedulerMetrics() {
super();
}
@Override
public void trackQueue(String queueName) {
trackedQueues.add(queueName);
FifoScheduler fifo = (FifoScheduler) scheduler;
// for FifoScheduler, only DEFAULT_QUEUE
// here the three parameters doesn't affect results
final QueueInfo queue = fifo.getQueueInfo(queueName, false, false);
// track currentCapacity, maximumCapacity (always 1.0f)
metrics.register("variable.queue." + queueName + ".currentcapacity",
new Gauge<Float>() {
@Override
public Float getValue() {
return queue.getCurrentCapacity();
}
}
);
metrics.register("variable.queue." + queueName + ".",
new Gauge<Float>() {
@Override
public Float getValue() {
return queue.getCurrentCapacity();
}
}
);
}
}
| apache-2.0 |
ZhenyaM/veraPDF-pdfbox | tools/src/main/java/org/apache/pdfbox/tools/Version.java | 2063 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.tools;
/**
* A simple command line utility to get the version of PDFBox.
*
* @author Ben Litchfield
*/
class Version
{
private Version()
{
//should not be constructed.
}
/**
* Get the version of PDFBox or unknown if it is not known.
*
* @return The version of pdfbox that is being used.
*/
public static String getVersion()
{
String version = org.apache.pdfbox.util.Version.getVersion();
if (version != null)
{
return version;
}
else
{
return "unknown";
}
}
/**
* This will print out the version of PDF to System.out.
*
* @param args Command line arguments.
*/
public static void main(String[] args)
{
// suppress the Dock icon on OS X
System.setProperty("apple.awt.UIElement", "true");
if( args.length != 0 )
{
usage();
return;
}
System.out.println( "Version:" + getVersion() );
}
/**
* This will print out a message telling how to use this example.
*/
private static void usage()
{
System.err.println( "usage: " + Version.class.getName() );
}
}
| apache-2.0 |
android-ia/platform_tools_idea | plugins/hg4idea/src/org/zmlx/hg4idea/ui/HgRepositorySelectorComponent.java | 2354 | // Copyright 2008-2010 Victor Iacoban
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under
// the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions and
// limitations under the License.
package org.zmlx.hg4idea.ui;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.ui.IdeBorderFactory;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.event.ActionListener;
import java.util.Collection;
public class HgRepositorySelectorComponent {
private JComboBox repositorySelector;
private JPanel mainPanel;
public void setRoots(Collection<VirtualFile> roots) {
DefaultComboBoxModel model = new DefaultComboBoxModel();
for (VirtualFile repo : roots) {
model.addElement(new RepositoryDisplay(repo));
}
repositorySelector.setModel(model);
mainPanel.setVisible(roots.size() > 1);
}
public void setSelectedRoot(@Nullable VirtualFile repository) {
if (repository != null) {
repositorySelector.setSelectedItem(new RepositoryDisplay(repository));
}
}
public void addActionListener(ActionListener actionListener) {
repositorySelector.addActionListener(actionListener);
}
public void setTitle(String title) {
mainPanel.setBorder(IdeBorderFactory.createTitledBorder(title, true));
}
public VirtualFile getRepository() {
return ((RepositoryDisplay) repositorySelector.getSelectedItem()).repo;
}
private class RepositoryDisplay {
@NotNull private final VirtualFile repo;
public RepositoryDisplay(@NotNull VirtualFile repo) {
this.repo = repo;
}
@Override
public String toString() {
return repo.getPresentableUrl();
}
@Override
public boolean equals(Object obj) {
return obj instanceof RepositoryDisplay && this.repo.equals(((RepositoryDisplay)obj).repo);
}
@Override
public int hashCode() {
return repo.hashCode();
}
}
}
| apache-2.0 |
leafclick/intellij-community | platform/structuralsearch/source/com/intellij/structuralsearch/DocumentBasedReplaceHandler.java | 2415 | // Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.structuralsearch;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.RangeMarker;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiElement;
import com.intellij.structuralsearch.plugin.replace.ReplaceOptions;
import com.intellij.structuralsearch.plugin.replace.ReplacementInfo;
import java.util.HashMap;
import java.util.Map;
/**
* @author Eugene.Kudelevsky
*/
public class DocumentBasedReplaceHandler extends StructuralReplaceHandler {
private final Project myProject;
private final Map<ReplacementInfo, RangeMarker> myRangeMarkers = new HashMap<>();
public DocumentBasedReplaceHandler(Project project) {
myProject = project;
}
@Override
public void replace(ReplacementInfo info, ReplaceOptions options) {
final RangeMarker rangeMarker = myRangeMarkers.get(info);
final Document document = rangeMarker.getDocument();
document.replaceString(rangeMarker.getStartOffset(), rangeMarker.getEndOffset(), info.getReplacement());
PsiDocumentManager.getInstance(myProject).commitDocument(document);
}
@Override
public void prepare(ReplacementInfo info) {
final PsiElement firstElement = StructuralSearchUtil.getPresentableElement(info.getMatch(0));
if (firstElement == null) return;
final Document document = PsiDocumentManager.getInstance(myProject).getDocument(firstElement.getContainingFile());
assert document != null;
final TextRange range = firstElement.getTextRange();
int startOffset = range.getStartOffset();
int endOffset = range.getEndOffset();
final int count = info.getMatchesCount();
for (int i = 1; i < count; i++) {
final PsiElement match = info.getMatch(i);
if (match == null) {
continue;
}
final TextRange range1 = match.getTextRange();
startOffset = Math.min(startOffset, range1.getStartOffset());
endOffset = Math.max(endOffset, range1.getEndOffset());
}
final RangeMarker rangeMarker = document.createRangeMarker(startOffset, endOffset);
rangeMarker.setGreedyToLeft(true);
rangeMarker.setGreedyToRight(true);
myRangeMarkers.put(info, rangeMarker);
}
}
| apache-2.0 |
kuujo/onos | utils/misc/src/test/java/org/onlab/packet/BasePacketTest.java | 1128 | /*
* Copyright 2017-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onlab.packet;
import org.junit.Test;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
public class BasePacketTest {
@Test
public void testClone() {
Ethernet p1 = new Ethernet();
p1.sourceMACAddress = MacAddress.ONOS;
p1.destinationMACAddress = MacAddress.ZERO;
p1.payload = new Data("xyzzy".getBytes());
BasePacket copy1 = (BasePacket) p1.clone();
assertThat(p1, equalTo(copy1));
}
} | apache-2.0 |
minagri-rwanda/DHIS2-Agriculture | dhis-services/dhis-service-core/src/main/java/org/hisp/dhis/i18n/I18nLocaleService.java | 2638 | package org.hisp.dhis.i18n;
/*
* Copyright (c) 2004-2016, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import java.util.List;
import java.util.Locale;
import java.util.Map;
import org.hisp.dhis.i18n.locale.I18nLocale;
public interface I18nLocaleService
{
/**
* Returns available languages in a mapping between code and name.
*/
Map<String, String> getAvailableLanguages();
/**
* Returns available countries in a mapping between code and name.
*/
Map<String, String> getAvailableCountries();
boolean addI18nLocale( String language, String country );
void saveI18nLocale( I18nLocale locale );
I18nLocale getI18nLocale( int id );
I18nLocale getI18nLocaleByUid( String uid );
I18nLocale getI18nLocale( Locale locale );
void deleteI18nLocale( I18nLocale locale );
int getI18nLocaleCount();
int getI18nLocaleCountByName( String name );
List<I18nLocale> getI18nLocalesBetween( int first, int max );
List<I18nLocale> getI18nLocalesBetweenLikeName( String name, int first, int max );
List<Locale> getAllLocales();
}
| bsd-3-clause |
AlexejK/lombok-intellij-plugin | src/test/java/de/plushnikov/intellij/plugin/thirdparty/LombokUtilsAllGetterTest.java | 2868 | package de.plushnikov.intellij.plugin.thirdparty;
import de.plushnikov.intellij.plugin.processor.field.AccessorsInfo;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
public class LombokUtilsAllGetterTest {
private static final AccessorsInfo DEFAULT_ACCESSORS = AccessorsInfo.build(false, false, false);
private final List<String> lombokResult = new ArrayList<String>();
private final List<String> result = new ArrayList<String>();
private void makeResults(String fieldName, boolean isBoolean, AccessorsInfo accessorsInfo) {
lombokResult.clear();
result.clear();
lombokResult.addAll(LombokHandlerUtil.toAllGetterNames(accessorsInfo, fieldName, isBoolean));
result.addAll(LombokUtils.toAllGetterNames(accessorsInfo, fieldName, isBoolean));
assertThat(result, is(lombokResult));
}
@Test
public void testToAllGetterNames_NonBoolean() throws Exception {
makeResults("myField", false, DEFAULT_ACCESSORS);
assertThat(result, is(Arrays.asList("getMyField")));
}
@Test
public void testToAllGetterNames_NonBoolean_Uppercase() throws Exception {
makeResults("myField", false, DEFAULT_ACCESSORS);
assertThat(result, is(Arrays.asList("getMyField")));
}
@Test
public void testToAllGetterNames_NonBoolean_Uppercase_Multiple() throws Exception {
makeResults("MYField", false, DEFAULT_ACCESSORS);
assertThat(result, is(Arrays.asList("getMYField")));
}
@Test
public void testToAllGetterNames_Boolean() throws Exception {
makeResults("myField", true, DEFAULT_ACCESSORS);
assertThat(result, is(Arrays.asList("getMyField", "isMyField")));
}
@Test
public void testToAllGetterNames_Boolean_Uppercase() throws Exception {
makeResults("MyField", true, DEFAULT_ACCESSORS);
assertThat(result, is(Arrays.asList("getMyField", "isMyField")));
}
@Test
public void testToAllGetterNames_Boolean_is_Lowercase() throws Exception {
makeResults("ismyField", true, DEFAULT_ACCESSORS);
assertThat(result, is(Arrays.asList("isIsmyField", "getIsmyField")));
}
@Test
public void testToAllGetterNames_Boolean_is_Uppercase() throws Exception {
makeResults("isMyField", true, DEFAULT_ACCESSORS);
assertThat(result, is(Arrays.asList("isIsMyField", "getIsMyField", "getMyField", "isMyField")));
}
@Test
public void testToAllGetterNames_Boolean_IS() throws Exception {
makeResults("ISmyField", true, DEFAULT_ACCESSORS);
assertThat(result, is(Arrays.asList("getISmyField", "isISmyField")));
}
@Test
public void testToAllGetterNames_NonBoolean_Fluent() throws Exception {
makeResults("myField", false, AccessorsInfo.build(true, false, false));
assertThat(result, is(Arrays.asList("myField")));
}
} | bsd-3-clause |
minagri-rwanda/DHIS2-Agriculture | dhis-services/dhis-service-core/src/main/java/org/hisp/dhis/program/ProgramIndicatorDeletionHandler.java | 2980 | package org.hisp.dhis.program;
/*
* Copyright (c) 2004-2016, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import org.hisp.dhis.system.deletion.DeletionHandler;
import org.springframework.beans.factory.annotation.Autowired;
/**
* @author Chau Thu Tran
*/
public class ProgramIndicatorDeletionHandler
extends DeletionHandler
{
// -------------------------------------------------------------------------
// Dependencies
// -------------------------------------------------------------------------
@Autowired
private ProgramIndicatorService programIndicatorService;
// -------------------------------------------------------------------------
// Implementation methods
// -------------------------------------------------------------------------
@Override
public String getClassName()
{
return ProgramIndicator.class.getSimpleName();
}
@Override
public void deleteProgram( Program program )
{
Collection<ProgramIndicator> indicators = new HashSet<ProgramIndicator>( program.getProgramIndicators() );
Iterator<ProgramIndicator> iter = indicators.iterator();
while ( iter.hasNext() )
{
ProgramIndicator indicator = iter.next();
programIndicatorService.deleteProgramIndicator( indicator );
}
}
}
| bsd-3-clause |