repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
iCrany/ExtendLoveJ | Extend_loveJ(test)/src/com/icrany/gravatar/Rating.java | 701 | package com.icrany.gravatar;
/**
* gravatar 头像的等级分类
* @author <a href="http://www.icrany.com">iCrany</a>
* 2014年9月3日 下午10:51:04
*/
public enum Rating {
G("g"),//suitable for display on all websites with any audience type
PG("pg"),//may contain rude gestures, provocatively dressed individuals, the lesser swear words, or mild violence
R("r"),//may contain such things as harsh profanity, intense violence, nudity, or hard drug use
X("x");//may contain hardcore sexual imagery or extremely disturbing violence
private String rating;
private Rating(){
}
private Rating(String rating){
}
public String getRating(){
return this.rating;
}
}
| apache-2.0 |
org-metaeffekt/metaeffekt-core | libraries/ae-common-kernel/src/test/java/org/metaeffekt/core/common/kernel/annotation/mock/AnnotatedClassInner.java | 877 | /*
* Copyright 2009-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.metaeffekt.core.common.kernel.annotation.mock;
/**
* Annotated Class with inner class. Used for Public Annotation tests.
*
* @author Jochen Kohler
*/
@MyPublic
public class AnnotatedClassInner {
public class AnnotatedInner {
}
}
| apache-2.0 |
davidzchen/bazel | src/test/java/com/google/devtools/build/lib/query2/cquery/BuildOutputFormatterCallbackTest.java | 10006 | // Copyright 2019 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.query2.cquery;
import static com.google.common.truth.Truth.assertThat;
import static com.google.devtools.build.lib.packages.Attribute.attr;
import static com.google.devtools.build.lib.packages.BuildType.LABEL_LIST;
import static com.google.devtools.build.lib.packages.BuildType.OUTPUT;
import com.google.common.eventbus.EventBus;
import com.google.devtools.build.lib.analysis.ConfiguredTarget;
import com.google.devtools.build.lib.analysis.util.MockRule;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.events.Reporter;
import com.google.devtools.build.lib.query2.PostAnalysisQueryEnvironment;
import com.google.devtools.build.lib.query2.engine.QueryEnvironment.Setting;
import com.google.devtools.build.lib.query2.engine.QueryExpression;
import com.google.devtools.build.lib.query2.engine.QueryParser;
import com.google.devtools.build.lib.query2.query.aspectresolvers.AspectResolver.Mode;
import com.google.devtools.build.lib.util.FileTypeSet;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import org.junit.Before;
import org.junit.Test;
/** Tests cquery's BUILD output format. */
public class BuildOutputFormatterCallbackTest extends ConfiguredTargetQueryTest {
private CqueryOptions options;
private Reporter reporter;
private final List<Event> events = new ArrayList<>();
private static MockRule.State simpleRule() {
return MockRule.define(
"my_rule",
(builder, env) ->
builder
.add(attr("deps", LABEL_LIST).allowedFileTypes(FileTypeSet.ANY_FILE))
.add(attr("out", OUTPUT)));
}
@Before
public final void setUpCqueryOptions() throws Exception {
this.options = new CqueryOptions();
// TODO(bazel-team): reduce the confusion about these two seemingly similar settings.
// options.aspectDeps impacts how proto and similar output formatters output aspect results.
// Setting.INCLUDE_ASPECTS impacts whether or not aspect dependencies are included when
// following target deps. See CommonQueryOptions for further flag details.
options.aspectDeps = Mode.OFF;
helper.setQuerySettings(Setting.INCLUDE_ASPECTS);
this.reporter = new Reporter(new EventBus(), events::add);
helper.useRuleClassProvider(
setRuleClassProviders(BuildOutputFormatterCallbackTest::simpleRule).build());
}
private List<String> getOutput(String queryExpression) throws Exception {
QueryExpression expression = QueryParser.parse(queryExpression, getDefaultFunctions());
Set<String> targetPatternSet = new LinkedHashSet<>();
expression.collectTargetPatterns(targetPatternSet);
helper.setQuerySettings(Setting.NO_IMPLICIT_DEPS);
PostAnalysisQueryEnvironment<ConfiguredTarget> env =
((ConfiguredTargetQueryHelper) helper).getPostAnalysisQueryEnvironment(targetPatternSet);
ByteArrayOutputStream output = new ByteArrayOutputStream();
BuildOutputFormatterCallback callback =
new BuildOutputFormatterCallback(
reporter,
options,
new PrintStream(output),
getHelper().getSkyframeExecutor(),
env.getAccessor());
env.evaluateQuery(expression, callback);
return Arrays.asList(output.toString().split(System.lineSeparator()));
}
@Test
public void selectInAttribute() throws Exception {
writeFile(
"test/BUILD",
"my_rule(",
" name = 'my_rule',",
" deps = select({",
" ':garfield': ['lasagna.java', 'naps.java'],",
" '//conditions:default': ['mondays.java']",
" })",
")",
"config_setting(",
" name = 'garfield',",
" values = {'test_arg': 'cat'}",
")");
getHelper().useConfiguration("--test_arg=cat");
assertThat(getOutput("//test:my_rule"))
.containsExactly(
"# /workspace/test/BUILD:1:8",
"my_rule(",
" name = \"my_rule\",",
" deps = [\"//test:lasagna.java\", \"//test:naps.java\"],",
")",
"# Rule my_rule instantiated at (most recent call last):",
"# /workspace/test/BUILD:1:8 in <toplevel>")
.inOrder();
getHelper().useConfiguration("--test_arg=hound");
assertThat(getOutput("//test:my_rule"))
.containsExactly(
"# /workspace/test/BUILD:1:8",
"my_rule(",
" name = \"my_rule\",",
" deps = [\"//test:mondays.java\"],",
")",
"# Rule my_rule instantiated at (most recent call last):",
"# /workspace/test/BUILD:1:8 in <toplevel>")
.inOrder();
}
@Test
public void alias() throws Exception {
writeFile(
"test/BUILD",
"my_rule(",
" name = 'my_rule',",
" deps = select({",
" ':garfield': ['lasagna.java', 'naps.java'],",
" '//conditions:default': ['mondays.java']",
" })",
")",
"config_setting(",
" name = 'garfield',",
" values = {'test_arg': 'cat'}",
")",
"alias(",
" name = 'my_alias',",
" actual = ':my_rule'",
")",
"# Rule my_alias instantiated at (most recent call last):",
"# /workspace/test/BUILD:12:6 in <toplevel>");
assertThat(getOutput("//test:my_alias"))
.containsExactly(
"# /workspace/test/BUILD:12:6",
"alias(",
" name = \"my_alias\",",
" actual = \"//test:my_rule\",",
")",
"# Rule my_alias instantiated at (most recent call last):",
"# /workspace/test/BUILD:12:6 in <toplevel>")
.inOrder();
}
@Test
public void aliasWithSelect() throws Exception {
writeFile(
"test/BUILD",
"my_rule(",
" name = 'my_first_rule',",
" deps = ['penne.java'],",
")",
"my_rule(",
" name = 'my_second_rule',",
" deps = ['linguini.java'],",
")",
"config_setting(",
" name = 'garfield',",
" values = {'test_arg': 'cat'}",
")",
"alias(",
" name = 'my_alias',",
" actual = select({",
" ':garfield': ':my_first_rule',",
" '//conditions:default': ':my_second_rule'",
" })",
")");
getHelper().useConfiguration("--test_arg=cat");
assertThat(getOutput("//test:my_alias"))
.containsExactly(
"# /workspace/test/BUILD:13:6",
"alias(",
" name = \"my_alias\",",
" actual = \"//test:my_first_rule\",",
")",
"# Rule my_alias instantiated at (most recent call last):",
"# /workspace/test/BUILD:13:6 in <toplevel>")
.inOrder();
getHelper().useConfiguration("--test_arg=hound");
assertThat(getOutput("//test:my_alias"))
.containsExactly(
"# /workspace/test/BUILD:13:6",
"alias(",
" name = \"my_alias\",",
" actual = \"//test:my_second_rule\",",
")",
"# Rule my_alias instantiated at (most recent call last):",
"# /workspace/test/BUILD:13:6 in <toplevel>")
.inOrder();
}
@Test
public void sourceFile() throws Exception {
writeFile(
"test/BUILD",
"my_rule(",
" name = 'my_rule',",
" deps = select({",
" ':garfield': ['lasagna.java', 'naps.java'],",
" '//conditions:default': ['mondays.java']",
" })",
")",
"config_setting(",
" name = 'garfield',",
" values = {'test_arg': 'cat'}",
")");
assertThat(getOutput("//test:lasagna.java")).containsExactly("");
}
@Test
public void outputFile() throws Exception {
writeFile(
"test/BUILD",
"my_rule(",
" name = 'my_rule',",
" deps = select({",
" ':garfield': ['lasagna.java', 'naps.java'],",
" '//conditions:default': ['mondays.java']",
" }),",
" out = 'output.txt'",
")",
"config_setting(",
" name = 'garfield',",
" values = {'test_arg': 'cat'}",
")");
getHelper().useConfiguration("--test_arg=cat");
assertThat(getOutput("//test:output.txt"))
.containsExactly(
"# /workspace/test/BUILD:1:8",
"my_rule(",
" name = \"my_rule\",",
" deps = [\"//test:lasagna.java\", \"//test:naps.java\"],",
" out = \"//test:output.txt\",",
")",
"# Rule my_rule instantiated at (most recent call last):",
"# /workspace/test/BUILD:1:8 in <toplevel>")
.inOrder();
getHelper().useConfiguration("--test_arg=hound");
assertThat(getOutput("//test:output.txt"))
.containsExactly(
"# /workspace/test/BUILD:1:8",
"my_rule(",
" name = \"my_rule\",",
" deps = [\"//test:mondays.java\"],",
" out = \"//test:output.txt\",",
")",
"# Rule my_rule instantiated at (most recent call last):",
"# /workspace/test/BUILD:1:8 in <toplevel>")
.inOrder();
}
}
| apache-2.0 |
dagnir/aws-sdk-java | aws-java-sdk-gamelift/src/main/java/com/amazonaws/services/gamelift/model/UpdateFleetCapacityRequest.java | 8662 | /*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.gamelift.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* Represents the input for a request action.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/gamelift-2015-10-01/UpdateFleetCapacity" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class UpdateFleetCapacityRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* Unique identifier for a fleet to update capacity for.
* </p>
*/
private String fleetId;
/**
* <p>
* Number of EC2 instances you want this fleet to host.
* </p>
*/
private Integer desiredInstances;
/**
* <p>
* Minimum value allowed for the fleet's instance count. Default if not set is 0.
* </p>
*/
private Integer minSize;
/**
* <p>
* Maximum value allowed for the fleet's instance count. Default if not set is 1.
* </p>
*/
private Integer maxSize;
/**
* <p>
* Unique identifier for a fleet to update capacity for.
* </p>
*
* @param fleetId
* Unique identifier for a fleet to update capacity for.
*/
public void setFleetId(String fleetId) {
this.fleetId = fleetId;
}
/**
* <p>
* Unique identifier for a fleet to update capacity for.
* </p>
*
* @return Unique identifier for a fleet to update capacity for.
*/
public String getFleetId() {
return this.fleetId;
}
/**
* <p>
* Unique identifier for a fleet to update capacity for.
* </p>
*
* @param fleetId
* Unique identifier for a fleet to update capacity for.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateFleetCapacityRequest withFleetId(String fleetId) {
setFleetId(fleetId);
return this;
}
/**
* <p>
* Number of EC2 instances you want this fleet to host.
* </p>
*
* @param desiredInstances
* Number of EC2 instances you want this fleet to host.
*/
public void setDesiredInstances(Integer desiredInstances) {
this.desiredInstances = desiredInstances;
}
/**
* <p>
* Number of EC2 instances you want this fleet to host.
* </p>
*
* @return Number of EC2 instances you want this fleet to host.
*/
public Integer getDesiredInstances() {
return this.desiredInstances;
}
/**
* <p>
* Number of EC2 instances you want this fleet to host.
* </p>
*
* @param desiredInstances
* Number of EC2 instances you want this fleet to host.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateFleetCapacityRequest withDesiredInstances(Integer desiredInstances) {
setDesiredInstances(desiredInstances);
return this;
}
/**
* <p>
* Minimum value allowed for the fleet's instance count. Default if not set is 0.
* </p>
*
* @param minSize
* Minimum value allowed for the fleet's instance count. Default if not set is 0.
*/
public void setMinSize(Integer minSize) {
this.minSize = minSize;
}
/**
* <p>
* Minimum value allowed for the fleet's instance count. Default if not set is 0.
* </p>
*
* @return Minimum value allowed for the fleet's instance count. Default if not set is 0.
*/
public Integer getMinSize() {
return this.minSize;
}
/**
* <p>
* Minimum value allowed for the fleet's instance count. Default if not set is 0.
* </p>
*
* @param minSize
* Minimum value allowed for the fleet's instance count. Default if not set is 0.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateFleetCapacityRequest withMinSize(Integer minSize) {
setMinSize(minSize);
return this;
}
/**
* <p>
* Maximum value allowed for the fleet's instance count. Default if not set is 1.
* </p>
*
* @param maxSize
* Maximum value allowed for the fleet's instance count. Default if not set is 1.
*/
public void setMaxSize(Integer maxSize) {
this.maxSize = maxSize;
}
/**
* <p>
* Maximum value allowed for the fleet's instance count. Default if not set is 1.
* </p>
*
* @return Maximum value allowed for the fleet's instance count. Default if not set is 1.
*/
public Integer getMaxSize() {
return this.maxSize;
}
/**
* <p>
* Maximum value allowed for the fleet's instance count. Default if not set is 1.
* </p>
*
* @param maxSize
* Maximum value allowed for the fleet's instance count. Default if not set is 1.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateFleetCapacityRequest withMaxSize(Integer maxSize) {
setMaxSize(maxSize);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getFleetId() != null)
sb.append("FleetId: ").append(getFleetId()).append(",");
if (getDesiredInstances() != null)
sb.append("DesiredInstances: ").append(getDesiredInstances()).append(",");
if (getMinSize() != null)
sb.append("MinSize: ").append(getMinSize()).append(",");
if (getMaxSize() != null)
sb.append("MaxSize: ").append(getMaxSize());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof UpdateFleetCapacityRequest == false)
return false;
UpdateFleetCapacityRequest other = (UpdateFleetCapacityRequest) obj;
if (other.getFleetId() == null ^ this.getFleetId() == null)
return false;
if (other.getFleetId() != null && other.getFleetId().equals(this.getFleetId()) == false)
return false;
if (other.getDesiredInstances() == null ^ this.getDesiredInstances() == null)
return false;
if (other.getDesiredInstances() != null && other.getDesiredInstances().equals(this.getDesiredInstances()) == false)
return false;
if (other.getMinSize() == null ^ this.getMinSize() == null)
return false;
if (other.getMinSize() != null && other.getMinSize().equals(this.getMinSize()) == false)
return false;
if (other.getMaxSize() == null ^ this.getMaxSize() == null)
return false;
if (other.getMaxSize() != null && other.getMaxSize().equals(this.getMaxSize()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getFleetId() == null) ? 0 : getFleetId().hashCode());
hashCode = prime * hashCode + ((getDesiredInstances() == null) ? 0 : getDesiredInstances().hashCode());
hashCode = prime * hashCode + ((getMinSize() == null) ? 0 : getMinSize().hashCode());
hashCode = prime * hashCode + ((getMaxSize() == null) ? 0 : getMaxSize().hashCode());
return hashCode;
}
@Override
public UpdateFleetCapacityRequest clone() {
return (UpdateFleetCapacityRequest) super.clone();
}
}
| apache-2.0 |
mikewalch/accumulo | proxy/src/main/java/org/apache/accumulo/proxy/thrift/CompactionStrategyConfig.java | 19029 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Autogenerated by Thrift Compiler (0.10.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.accumulo.proxy.thrift;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.10.0)")
public class CompactionStrategyConfig implements org.apache.thrift.TBase<CompactionStrategyConfig, CompactionStrategyConfig._Fields>, java.io.Serializable, Cloneable, Comparable<CompactionStrategyConfig> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("CompactionStrategyConfig");
private static final org.apache.thrift.protocol.TField CLASS_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("className", org.apache.thrift.protocol.TType.STRING, (short)1);
private static final org.apache.thrift.protocol.TField OPTIONS_FIELD_DESC = new org.apache.thrift.protocol.TField("options", org.apache.thrift.protocol.TType.MAP, (short)2);
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new CompactionStrategyConfigStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new CompactionStrategyConfigTupleSchemeFactory();
public java.lang.String className; // required
public java.util.Map<java.lang.String,java.lang.String> options; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
CLASS_NAME((short)1, "className"),
OPTIONS((short)2, "options");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // CLASS_NAME
return CLASS_NAME;
case 2: // OPTIONS
return OPTIONS;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.CLASS_NAME, new org.apache.thrift.meta_data.FieldMetaData("className", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.OPTIONS, new org.apache.thrift.meta_data.FieldMetaData("options", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING),
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(CompactionStrategyConfig.class, metaDataMap);
}
public CompactionStrategyConfig() {
}
public CompactionStrategyConfig(
java.lang.String className,
java.util.Map<java.lang.String,java.lang.String> options)
{
this();
this.className = className;
this.options = options;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public CompactionStrategyConfig(CompactionStrategyConfig other) {
if (other.isSetClassName()) {
this.className = other.className;
}
if (other.isSetOptions()) {
java.util.Map<java.lang.String,java.lang.String> __this__options = new java.util.HashMap<java.lang.String,java.lang.String>(other.options);
this.options = __this__options;
}
}
public CompactionStrategyConfig deepCopy() {
return new CompactionStrategyConfig(this);
}
@Override
public void clear() {
this.className = null;
this.options = null;
}
public java.lang.String getClassName() {
return this.className;
}
public CompactionStrategyConfig setClassName(java.lang.String className) {
this.className = className;
return this;
}
public void unsetClassName() {
this.className = null;
}
/** Returns true if field className is set (has been assigned a value) and false otherwise */
public boolean isSetClassName() {
return this.className != null;
}
public void setClassNameIsSet(boolean value) {
if (!value) {
this.className = null;
}
}
public int getOptionsSize() {
return (this.options == null) ? 0 : this.options.size();
}
public void putToOptions(java.lang.String key, java.lang.String val) {
if (this.options == null) {
this.options = new java.util.HashMap<java.lang.String,java.lang.String>();
}
this.options.put(key, val);
}
public java.util.Map<java.lang.String,java.lang.String> getOptions() {
return this.options;
}
public CompactionStrategyConfig setOptions(java.util.Map<java.lang.String,java.lang.String> options) {
this.options = options;
return this;
}
public void unsetOptions() {
this.options = null;
}
/** Returns true if field options is set (has been assigned a value) and false otherwise */
public boolean isSetOptions() {
return this.options != null;
}
public void setOptionsIsSet(boolean value) {
if (!value) {
this.options = null;
}
}
public void setFieldValue(_Fields field, java.lang.Object value) {
switch (field) {
case CLASS_NAME:
if (value == null) {
unsetClassName();
} else {
setClassName((java.lang.String)value);
}
break;
case OPTIONS:
if (value == null) {
unsetOptions();
} else {
setOptions((java.util.Map<java.lang.String,java.lang.String>)value);
}
break;
}
}
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
case CLASS_NAME:
return getClassName();
case OPTIONS:
return getOptions();
}
throw new java.lang.IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
case CLASS_NAME:
return isSetClassName();
case OPTIONS:
return isSetOptions();
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that == null)
return false;
if (that instanceof CompactionStrategyConfig)
return this.equals((CompactionStrategyConfig)that);
return false;
}
public boolean equals(CompactionStrategyConfig that) {
if (that == null)
return false;
if (this == that)
return true;
boolean this_present_className = true && this.isSetClassName();
boolean that_present_className = true && that.isSetClassName();
if (this_present_className || that_present_className) {
if (!(this_present_className && that_present_className))
return false;
if (!this.className.equals(that.className))
return false;
}
boolean this_present_options = true && this.isSetOptions();
boolean that_present_options = true && that.isSetOptions();
if (this_present_options || that_present_options) {
if (!(this_present_options && that_present_options))
return false;
if (!this.options.equals(that.options))
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = hashCode * 8191 + ((isSetClassName()) ? 131071 : 524287);
if (isSetClassName())
hashCode = hashCode * 8191 + className.hashCode();
hashCode = hashCode * 8191 + ((isSetOptions()) ? 131071 : 524287);
if (isSetOptions())
hashCode = hashCode * 8191 + options.hashCode();
return hashCode;
}
@Override
public int compareTo(CompactionStrategyConfig other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = java.lang.Boolean.valueOf(isSetClassName()).compareTo(other.isSetClassName());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetClassName()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.className, other.className);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.valueOf(isSetOptions()).compareTo(other.isSetOptions());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetOptions()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.options, other.options);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("CompactionStrategyConfig(");
boolean first = true;
sb.append("className:");
if (this.className == null) {
sb.append("null");
} else {
sb.append(this.className);
}
first = false;
if (!first) sb.append(", ");
sb.append("options:");
if (this.options == null) {
sb.append("null");
} else {
sb.append(this.options);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class CompactionStrategyConfigStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public CompactionStrategyConfigStandardScheme getScheme() {
return new CompactionStrategyConfigStandardScheme();
}
}
private static class CompactionStrategyConfigStandardScheme extends org.apache.thrift.scheme.StandardScheme<CompactionStrategyConfig> {
public void read(org.apache.thrift.protocol.TProtocol iprot, CompactionStrategyConfig struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // CLASS_NAME
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.className = iprot.readString();
struct.setClassNameIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // OPTIONS
if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
{
org.apache.thrift.protocol.TMap _map154 = iprot.readMapBegin();
struct.options = new java.util.HashMap<java.lang.String,java.lang.String>(2*_map154.size);
java.lang.String _key155;
java.lang.String _val156;
for (int _i157 = 0; _i157 < _map154.size; ++_i157)
{
_key155 = iprot.readString();
_val156 = iprot.readString();
struct.options.put(_key155, _val156);
}
iprot.readMapEnd();
}
struct.setOptionsIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, CompactionStrategyConfig struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.className != null) {
oprot.writeFieldBegin(CLASS_NAME_FIELD_DESC);
oprot.writeString(struct.className);
oprot.writeFieldEnd();
}
if (struct.options != null) {
oprot.writeFieldBegin(OPTIONS_FIELD_DESC);
{
oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, struct.options.size()));
for (java.util.Map.Entry<java.lang.String, java.lang.String> _iter158 : struct.options.entrySet())
{
oprot.writeString(_iter158.getKey());
oprot.writeString(_iter158.getValue());
}
oprot.writeMapEnd();
}
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class CompactionStrategyConfigTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public CompactionStrategyConfigTupleScheme getScheme() {
return new CompactionStrategyConfigTupleScheme();
}
}
private static class CompactionStrategyConfigTupleScheme extends org.apache.thrift.scheme.TupleScheme<CompactionStrategyConfig> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, CompactionStrategyConfig struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet optionals = new java.util.BitSet();
if (struct.isSetClassName()) {
optionals.set(0);
}
if (struct.isSetOptions()) {
optionals.set(1);
}
oprot.writeBitSet(optionals, 2);
if (struct.isSetClassName()) {
oprot.writeString(struct.className);
}
if (struct.isSetOptions()) {
{
oprot.writeI32(struct.options.size());
for (java.util.Map.Entry<java.lang.String, java.lang.String> _iter159 : struct.options.entrySet())
{
oprot.writeString(_iter159.getKey());
oprot.writeString(_iter159.getValue());
}
}
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, CompactionStrategyConfig struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet incoming = iprot.readBitSet(2);
if (incoming.get(0)) {
struct.className = iprot.readString();
struct.setClassNameIsSet(true);
}
if (incoming.get(1)) {
{
org.apache.thrift.protocol.TMap _map160 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, iprot.readI32());
struct.options = new java.util.HashMap<java.lang.String,java.lang.String>(2*_map160.size);
java.lang.String _key161;
java.lang.String _val162;
for (int _i163 = 0; _i163 < _map160.size; ++_i163)
{
_key161 = iprot.readString();
_val162 = iprot.readString();
struct.options.put(_key161, _val162);
}
}
struct.setOptionsIsSet(true);
}
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
}
private static void unusedMethod() {}
}
| apache-2.0 |
ifnul/ums-backend | is-lnu-converter/src/test/java/org/lnu/is/converter/mark/scale/type/MarkScaleTypeConverterTest.java | 1612 | package org.lnu.is.converter.mark.scale.type;
import static org.junit.Assert.assertEquals;
import java.util.Arrays;
import java.util.List;
import org.junit.Test;
import org.lnu.is.converter.mark.scale.type.MarkScaleTypeConverter;
import org.lnu.is.domain.mark.scale.type.MarkScaleType;
import org.lnu.is.resource.mark.scale.type.MarkScaleTypeResource;
public class MarkScaleTypeConverterTest {
private MarkScaleTypeConverter unit = new MarkScaleTypeConverter();
@Test
public void testConvert() throws Exception {
// Given
String name = "MarkScaleN";
String abbrName = "AN";
MarkScaleType source = new MarkScaleType();
source.setName(name);
source.setAbbrName(abbrName);
MarkScaleTypeResource expected = new MarkScaleTypeResource();
expected.setName(name);
expected.setAbbrName(abbrName);
// When
MarkScaleTypeResource actual = unit.convert(source);
// Then
assertEquals(expected, actual);
}
@Test
public void testConvertAll() throws Exception {
// Given
Long id = 1l;
String abbrName = "abbr Name";
String name = "n a m e";
MarkScaleType source = new MarkScaleType();
source.setId(id);
source.setAbbrName(abbrName);
source.setName(name);
List<MarkScaleType> sources = Arrays.asList(source);
MarkScaleTypeResource expected = new MarkScaleTypeResource();
expected.setId(id);
expected.setAbbrName(abbrName);
expected.setName(name);
List<MarkScaleTypeResource> expecteds = Arrays.asList(expected);
// Where
List<MarkScaleTypeResource> actuals = unit.convertAll(sources);
//Then
assertEquals(expecteds, actuals);
}
}
| apache-2.0 |
jentfoo/aws-sdk-java | aws-java-sdk-ssm/src/main/java/com/amazonaws/services/simplesystemsmanagement/model/RegisterTargetWithMaintenanceWindowResult.java | 4195 | /*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.simplesystemsmanagement.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ssm-2014-11-06/RegisterTargetWithMaintenanceWindow"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class RegisterTargetWithMaintenanceWindowResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable,
Cloneable {
/**
* <p>
* The ID of the target definition in this maintenance window.
* </p>
*/
private String windowTargetId;
/**
* <p>
* The ID of the target definition in this maintenance window.
* </p>
*
* @param windowTargetId
* The ID of the target definition in this maintenance window.
*/
public void setWindowTargetId(String windowTargetId) {
this.windowTargetId = windowTargetId;
}
/**
* <p>
* The ID of the target definition in this maintenance window.
* </p>
*
* @return The ID of the target definition in this maintenance window.
*/
public String getWindowTargetId() {
return this.windowTargetId;
}
/**
* <p>
* The ID of the target definition in this maintenance window.
* </p>
*
* @param windowTargetId
* The ID of the target definition in this maintenance window.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public RegisterTargetWithMaintenanceWindowResult withWindowTargetId(String windowTargetId) {
setWindowTargetId(windowTargetId);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getWindowTargetId() != null)
sb.append("WindowTargetId: ").append(getWindowTargetId());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof RegisterTargetWithMaintenanceWindowResult == false)
return false;
RegisterTargetWithMaintenanceWindowResult other = (RegisterTargetWithMaintenanceWindowResult) obj;
if (other.getWindowTargetId() == null ^ this.getWindowTargetId() == null)
return false;
if (other.getWindowTargetId() != null && other.getWindowTargetId().equals(this.getWindowTargetId()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getWindowTargetId() == null) ? 0 : getWindowTargetId().hashCode());
return hashCode;
}
@Override
public RegisterTargetWithMaintenanceWindowResult clone() {
try {
return (RegisterTargetWithMaintenanceWindowResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| apache-2.0 |
jentfoo/aws-sdk-java | aws-java-sdk-glue/src/main/java/com/amazonaws/services/glue/model/TransformParameters.java | 8899 | /*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.glue.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* The algorithm-specific parameters that are associated with the machine learning transform.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/TransformParameters" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class TransformParameters implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The type of machine learning transform.
* </p>
* <p>
* For information about the types of machine learning transforms, see <a
* href="http://docs.aws.amazon.com/glue/latest/dg/add-job-machine-learning-transform.html">Creating Machine
* Learning Transforms</a>.
* </p>
*/
private String transformType;
/**
* <p>
* The parameters for the find matches algorithm.
* </p>
*/
private FindMatchesParameters findMatchesParameters;
/**
* <p>
* The type of machine learning transform.
* </p>
* <p>
* For information about the types of machine learning transforms, see <a
* href="http://docs.aws.amazon.com/glue/latest/dg/add-job-machine-learning-transform.html">Creating Machine
* Learning Transforms</a>.
* </p>
*
* @param transformType
* The type of machine learning transform.</p>
* <p>
* For information about the types of machine learning transforms, see <a
* href="http://docs.aws.amazon.com/glue/latest/dg/add-job-machine-learning-transform.html">Creating Machine
* Learning Transforms</a>.
* @see TransformType
*/
public void setTransformType(String transformType) {
this.transformType = transformType;
}
/**
* <p>
* The type of machine learning transform.
* </p>
* <p>
* For information about the types of machine learning transforms, see <a
* href="http://docs.aws.amazon.com/glue/latest/dg/add-job-machine-learning-transform.html">Creating Machine
* Learning Transforms</a>.
* </p>
*
* @return The type of machine learning transform.</p>
* <p>
* For information about the types of machine learning transforms, see <a
* href="http://docs.aws.amazon.com/glue/latest/dg/add-job-machine-learning-transform.html">Creating Machine
* Learning Transforms</a>.
* @see TransformType
*/
public String getTransformType() {
return this.transformType;
}
/**
* <p>
* The type of machine learning transform.
* </p>
* <p>
* For information about the types of machine learning transforms, see <a
* href="http://docs.aws.amazon.com/glue/latest/dg/add-job-machine-learning-transform.html">Creating Machine
* Learning Transforms</a>.
* </p>
*
* @param transformType
* The type of machine learning transform.</p>
* <p>
* For information about the types of machine learning transforms, see <a
* href="http://docs.aws.amazon.com/glue/latest/dg/add-job-machine-learning-transform.html">Creating Machine
* Learning Transforms</a>.
* @return Returns a reference to this object so that method calls can be chained together.
* @see TransformType
*/
public TransformParameters withTransformType(String transformType) {
setTransformType(transformType);
return this;
}
/**
* <p>
* The type of machine learning transform.
* </p>
* <p>
* For information about the types of machine learning transforms, see <a
* href="http://docs.aws.amazon.com/glue/latest/dg/add-job-machine-learning-transform.html">Creating Machine
* Learning Transforms</a>.
* </p>
*
* @param transformType
* The type of machine learning transform.</p>
* <p>
* For information about the types of machine learning transforms, see <a
* href="http://docs.aws.amazon.com/glue/latest/dg/add-job-machine-learning-transform.html">Creating Machine
* Learning Transforms</a>.
* @return Returns a reference to this object so that method calls can be chained together.
* @see TransformType
*/
public TransformParameters withTransformType(TransformType transformType) {
this.transformType = transformType.toString();
return this;
}
/**
* <p>
* The parameters for the find matches algorithm.
* </p>
*
* @param findMatchesParameters
* The parameters for the find matches algorithm.
*/
public void setFindMatchesParameters(FindMatchesParameters findMatchesParameters) {
this.findMatchesParameters = findMatchesParameters;
}
/**
* <p>
* The parameters for the find matches algorithm.
* </p>
*
* @return The parameters for the find matches algorithm.
*/
public FindMatchesParameters getFindMatchesParameters() {
return this.findMatchesParameters;
}
/**
* <p>
* The parameters for the find matches algorithm.
* </p>
*
* @param findMatchesParameters
* The parameters for the find matches algorithm.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public TransformParameters withFindMatchesParameters(FindMatchesParameters findMatchesParameters) {
setFindMatchesParameters(findMatchesParameters);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getTransformType() != null)
sb.append("TransformType: ").append(getTransformType()).append(",");
if (getFindMatchesParameters() != null)
sb.append("FindMatchesParameters: ").append(getFindMatchesParameters());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof TransformParameters == false)
return false;
TransformParameters other = (TransformParameters) obj;
if (other.getTransformType() == null ^ this.getTransformType() == null)
return false;
if (other.getTransformType() != null && other.getTransformType().equals(this.getTransformType()) == false)
return false;
if (other.getFindMatchesParameters() == null ^ this.getFindMatchesParameters() == null)
return false;
if (other.getFindMatchesParameters() != null && other.getFindMatchesParameters().equals(this.getFindMatchesParameters()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getTransformType() == null) ? 0 : getTransformType().hashCode());
hashCode = prime * hashCode + ((getFindMatchesParameters() == null) ? 0 : getFindMatchesParameters().hashCode());
return hashCode;
}
@Override
public TransformParameters clone() {
try {
return (TransformParameters) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.glue.model.transform.TransformParametersMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| apache-2.0 |
richharms/IntroToLucene | Examples/09/src/java/Example.java | 2541 | import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
import org.apache.lucene.analysis.standard.StandardTokenizer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
public class Example {
// Based on: http://lucidbox.com/?p=26
public static void main(String args[])
throws Exception {
final String input = "This is a test. How about that?! Huh?";
System.out.println("input = \"" + input + "\"");
System.out.println();
TokenizerExamples(input);
}
public static void TokenizerExamples(String input)
throws IOException {
StandardTokenizerExample(input);
WhitespaceLowerCaseAnalyzerExample(input);
}
public static void StandardTokenizerExample(String input)
throws IOException {
try (Tokenizer tokenizer = new StandardTokenizer()) {
ExecuteTokenizerExample(input, tokenizer);
}
}
public static void WhitespaceLowerCaseAnalyzerExample(String input)
throws IOException {
try (Tokenizer tokenizer = new WhitespaceTokenizer()) {
ExecuteTokenizerExample(input, tokenizer);
}
}
public static List<String> ExecuteTokenizerExample(String input, Tokenizer tokenizer)
throws IOException {
System.out.println("Tokenizer = " + tokenizer.getClass().getSimpleName());
tokenizer.setReader(new StringReader(input));
CharTermAttribute charTermAttribute = tokenizer.getAttribute(CharTermAttribute.class);
TypeAttribute typeAttribute = tokenizer.getAttribute(TypeAttribute.class);
OffsetAttribute offsetAttribute = tokenizer.getAttribute(OffsetAttribute.class);
List<String> tokens = new ArrayList<>();
tokenizer.reset();
while (tokenizer.incrementToken()) {
tokens.add(charTermAttribute.toString());
System.out.println(" type = " + typeAttribute.type() + ", startOffset = " + offsetAttribute.startOffset() + ", endOffset = " + offsetAttribute.endOffset() + ": \"" + charTermAttribute.toString() + "\"");
}
tokenizer.end();
System.out.println();
return tokens;
}
}
| apache-2.0 |
l-ra/openeet-java | openeet-lite/src/main/java/openeet/lite/SSLSocketFactoryTLS11.java | 1937 | package openeet.lite;
import java.io.IOException;
import java.net.InetAddress;
import java.net.Socket;
import java.net.UnknownHostException;
import javax.net.ssl.SSLSocket;
import javax.net.ssl.SSLSocketFactory;
public class SSLSocketFactoryTLS11 extends SSLSocketFactory {
private SSLSocketFactory delegate;
private String[] enableProtocols;
public SSLSocketFactoryTLS11(SSLSocketFactory delegate, String[] enableProtocols) {
this.delegate=delegate;
this.enableProtocols=enableProtocols;
}
@Override
public Socket createSocket(Socket socket, String host, int port, boolean autoClose) throws IOException {
return enableTLSOnSocket(delegate.createSocket(socket, host, port, autoClose));
}
@Override
public String[] getDefaultCipherSuites() {
return delegate.getDefaultCipherSuites();
}
@Override
public String[] getSupportedCipherSuites() {
return delegate.getSupportedCipherSuites();
}
@Override
public Socket createSocket(String host, int port) throws IOException, UnknownHostException {
return enableTLSOnSocket(delegate.createSocket(host, port));
}
@Override
public Socket createSocket(InetAddress host, int port) throws IOException {
return enableTLSOnSocket(delegate.createSocket(host, port));
}
@Override
public Socket createSocket(String host, int port, InetAddress localHost, int localPort)
throws IOException, UnknownHostException {
return enableTLSOnSocket(delegate.createSocket(host, port, localHost, localPort));
}
@Override
public Socket createSocket(InetAddress address, int port, InetAddress localAddress, int localPort) throws IOException {
return enableTLSOnSocket(delegate.createSocket(address, port, localAddress, localPort));
}
private Socket enableTLSOnSocket(Socket socket) {
if(socket != null && (socket instanceof SSLSocket)) {
((SSLSocket)socket).setEnabledProtocols(enableProtocols);
}
return socket;
}
}
| apache-2.0 |
euprogramador/nimbleway | nimbleway-client/src/main/java/br/com/aexo/nimbleway/client/subprotocols/json/encoder/RegisteredMessageJsonEncoder.java | 1062 | package br.com.aexo.nimbleway.client.subprotocols.json.encoder;
import java.io.IOException;
import java.io.StringWriter;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import br.com.aexo.nimbleway.client.messages.RegisteredMessage;
import br.com.aexo.nimbleway.client.messages.ClientMessage;
import br.com.aexo.nimbleway.client.subprotocols.json.JsonEncoderMessage;
public class RegisteredMessageJsonEncoder implements JsonEncoderMessage<RegisteredMessage> {
@Override
public Object encode(RegisteredMessage msg) {
try {
ObjectMapper mapper = new ObjectMapper();
ArrayNode node = mapper.createArrayNode();
node.add(65);
node.add(msg.getRequestId());
node.add(msg.getRegistrationId());
StringWriter writer = new StringWriter();
mapper.writeValue(writer, node);
return writer.toString();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public boolean isEncodeOf(ClientMessage type) {
return type instanceof RegisteredMessage;
}
}
| apache-2.0 |
wxiaoqi/ace-cache | src/main/java/com/ace/cache/config/RedisConfig.java | 3421 | package com.ace.cache.config;
import com.ace.cache.utils.PropertiesLoaderUtils;
import org.apache.commons.lang3.StringUtils;
import javax.annotation.PostConstruct;
public class RedisConfig {
private RedisPoolConfig pool = new RedisPoolConfig();
private String host;
private String password;
private String timeout;
private String database;
private String port;
private String enable;
private String sysName;
private String userKey;
private Long refreshTimeout;
@PostConstruct
public void init() {
PropertiesLoaderUtils prop = new PropertiesLoaderUtils("application.properties");
if (!StringUtils.isBlank(prop.getProperty("redis.host"))) {
host = prop.getProperty("redis.host");
pool.setMaxActive(prop.getProperty("redis.pool.maxActive"));
pool.setMaxIdle(prop.getProperty("redis.pool.maxIdle"));
pool.setMaxWait(prop.getProperty("redis.pool.maxWait"));
password = prop.getProperty("redis.password");
timeout = prop.getProperty("redis.timeout");
database = prop.getProperty("redis.database");
port = prop.getProperty("redis.port");
sysName = prop.getProperty("redis.sysName");
enable = prop.getProperty("redis.enable");
String refreshTimeoutStr = prop.getProperty("redis.cache.refreshTimeout");
if (StringUtils.isNotBlank(refreshTimeoutStr)) {
refreshTimeout = Long.parseLong(refreshTimeoutStr.trim());
} else {
refreshTimeout = 0L;
}
userKey = prop.getProperty("redis.userkey");
}
}
public String addSys(String key) {
String result = key;
String sys = this.getSysName();
if (key.startsWith(sys))
result = key;
else
result = sys + ":" + key;
return result;
}
public RedisPoolConfig getPool() {
return pool;
}
public void setPool(RedisPoolConfig pool) {
this.pool = pool;
}
public void setRefreshTimeout(Long refreshTimeout) {
this.refreshTimeout = refreshTimeout;
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getTimeout() {
return timeout;
}
public void setTimeout(String timeout) {
this.timeout = timeout;
}
public String getDatabase() {
return database;
}
public void setDatabase(String database) {
this.database = database;
}
public String getSysName() {
return sysName;
}
public void setSysName(String sysName) {
this.sysName = sysName;
}
public String getEnable() {
return enable;
}
public void setEnable(String enable) {
this.enable = enable;
}
public String getPort() {
return port;
}
public void setPort(String port) {
this.port = port;
}
public String getUserKey() {
return userKey;
}
public void setUserKey(String userKey) {
this.userKey = userKey;
}
public Long getRefreshTimeout() {
return this.refreshTimeout;
}
}
| apache-2.0 |
DIA-NZ/webcurator | wct-core/src/main/java/org/webcurator/ui/profiles/renderers/ComplexTypeRenderer.java | 2645 | /*
* Copyright 2006 The National Library of New Zealand
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.webcurator.ui.profiles.renderers;
import java.io.IOException;
import java.util.List;
import javax.servlet.jsp.JspWriter;
import javax.servlet.jsp.PageContext;
import org.webcurator.core.profiles.ComplexProfileElement;
import org.webcurator.core.profiles.ProfileElement;
import org.webcurator.core.profiles.RendererManager;
/**
* Renders a complex element.
*
* @see org.webcurator.ui.profiles.renderers.Renderer
* @author bbeaumont
*
*/
public class ComplexTypeRenderer extends Renderer {
/* (non-Javadoc)
* @see org.webcurator.ui.profiles.renderers.Renderer#render(org.webcurator.core.profiles.ProfileElement, javax.servlet.jsp.PageContext, org.webcurator.ui.profiles.renderers.RendererFilter)
*/
public void render(ProfileElement element, PageContext context, RendererFilter filter) throws IOException {
// Get the writer.
JspWriter out = context.getOut();
ComplexProfileElement complexElement = (ComplexProfileElement) element;
out.print("<div class=\"profileMainHeading\">");
out.print("<a href=\"javascript:maximise('sub_"+ element.getAbsoluteName() +"')\">Max</a> ");
out.print("<a href=\"javascript:minimise('sub_"+ element.getAbsoluteName() +"')\">Min</a> ");
out.print(element.getName() + "</div>");
out.println("<div id=\"sub_"+element.getAbsoluteName()+"\" class=\"profileSublevel\">");
// Render all the simple elements.
List<ProfileElement> simpleChildren = complexElement.getSimpleChildren();
if(simpleChildren.size() > 0) {
out.println("<table>");
for(ProfileElement p: simpleChildren) {
out.print("<tr><td>");
out.print(p.getName());
out.print("</td><td>");
RendererManager.getRenderer(p).render(p, context);
out.println("</td></tr>");
}
out.println("</table>");
}
// Render the items in the map.
for(ProfileElement p: complexElement.getComplexChildren()) {
if( filter.accepts(p)) {
RendererManager.getRenderer(p).render(p, context, filter);
}
}
out.println("</div>");
}
}
| apache-2.0 |
arpitgautam/AsyncFBClient | src/main/java/org/async/fbclient/beans/user/Hometown.java | 1017 |
package org.async.fbclient.beans.user;
import javax.annotation.Generated;
import com.google.gson.annotations.Expose;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.commons.lang.builder.ToStringBuilder;
@Generated("com.googlecode.jsonschema2pojo")
public class Hometown {
@Expose
private String id;
@Expose
private String name;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Override
public String toString() {
return ToStringBuilder.reflectionToString(this);
}
@Override
public int hashCode() {
return HashCodeBuilder.reflectionHashCode(this);
}
@Override
public boolean equals(Object other) {
return EqualsBuilder.reflectionEquals(this, other);
}
}
| apache-2.0 |
rjptegelaar/liquid-relay-sonic-interceptor | src/main/java/com/pte/liquid/relay/sonic/converter/SonicInterceptorMessageConverterImpl.java | 2517 | //Copyright 2015 Paul Tegelaar
//
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//
//http://www.apache.org/licenses/LICENSE-2.0
//
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
package com.pte.liquid.relay.sonic.converter;
import java.util.Date;
import java.util.Iterator;
import javax.jms.Destination;
import com.pte.liquid.relay.Converter;
import com.pte.liquid.relay.exception.RelayException;
import com.pte.liquid.relay.model.Message;
import com.sonicsw.xq.XQMessage;
import com.sonicsw.xq.XQMessageException;
import com.sonicsw.xq.XQPart;
public class SonicInterceptorMessageConverterImpl implements Converter<XQMessage> {
public Message convert(XQMessage xqMsg) throws RelayException {
try {
return convertXQMessage(xqMsg);
} catch (XQMessageException e) {
throw new RelayException(e);
}
}
private Message convertXQMessage(XQMessage xqMsg) throws XQMessageException{
Message newMsg = new Message();
for (int i=0; i< xqMsg.getPartCount(); i++) {
XQPart xqPart = xqMsg.getPart(i);
String label = xqPart.getContentId();
String content = xqPart.getContent().toString();
if(label==null || "".equals(label))
label = "PART_" + i;
newMsg.createPart(label, content);
newMsg.setSnapshotTime(new Date());
newMsg.setSnapshotTimeMillis(new Date().getTime());
}
Iterator<String> xqMsgHeaders = xqMsg.getHeaderNames();
if(xqMsgHeaders!=null){
while (xqMsgHeaders.hasNext()) {
String xqMsgHeader = xqMsgHeaders.next();
if(xqMsgHeader!=null){
Object headerValue = xqMsg.getHeaderValue(xqMsgHeader);
if((headerValue instanceof Destination)){
newMsg.setHeader(xqMsgHeader, headerValue.toString());
}else{
newMsg.setHeader(xqMsgHeader, xqMsg.getStringHeader(xqMsgHeader));
}
}
}
}
return newMsg;
}
}
| apache-2.0 |
ruspl-afed/dbeaver | plugins/org.jkiss.dbeaver.core/src/org/jkiss/dbeaver/ui/editors/sql/syntax/rules/SQLDelimiterSetRule.java | 3437 | /*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2017 Serge Rider (serge@jkiss.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ui.editors.sql.syntax.rules;
import org.eclipse.jface.text.rules.ICharacterScanner;
import org.eclipse.jface.text.rules.IRule;
import org.eclipse.jface.text.rules.IToken;
import org.eclipse.jface.text.rules.Token;
import org.jkiss.dbeaver.ui.editors.sql.syntax.SQLRuleManager;
import org.jkiss.dbeaver.ui.editors.sql.syntax.tokens.SQLSetDelimiterToken;
/**
* Delimiter redefien rule
*/
public class SQLDelimiterSetRule implements IRule {
private final String setDelimiterWord;
private final SQLSetDelimiterToken setDelimiterToken;
private final SQLDelimiterRule delimiterRule;
public SQLDelimiterSetRule(String setDelimiterWord, SQLSetDelimiterToken setDelimiterToken, SQLDelimiterRule delimiterRule) {
this.setDelimiterWord = setDelimiterWord;
this.setDelimiterToken = setDelimiterToken;
this.delimiterRule = delimiterRule;
}
@Override
public IToken evaluate(ICharacterScanner scanner) {
// Must be in the beginning of line
{
scanner.unread();
int prevChar = scanner.read();
if (prevChar != ICharacterScanner.EOF && prevChar != '\r' && prevChar != '\n') {
return Token.UNDEFINED;
}
}
for (int i = 0; i < setDelimiterWord.length(); i++) {
char c = setDelimiterWord.charAt(i);
final int nextChar = scanner.read();
if (Character.toUpperCase(nextChar) != c) {
// Doesn't match
for (int k = 0; k <= i; k++) {
scanner.unread();
}
return Token.UNDEFINED;
}
}
StringBuilder delimBuffer = new StringBuilder();
int next = scanner.read();
if (next == ICharacterScanner.EOF || next == '\n' || next == '\r') {
// Empty delimiter
scanner.unread();
} else {
if (!Character.isWhitespace(next)) {
for (int k = 0; k < setDelimiterWord.length() + 1; k++) {
scanner.unread();
}
return Token.UNDEFINED;
}
// Get everything till the end of line
for (; ; ) {
next = scanner.read();
if (next == ICharacterScanner.EOF || next == '\n' || next == '\r') {
break;
}
delimBuffer.append((char) next);
}
scanner.unread();
}
if (scanner instanceof SQLRuleManager && ((SQLRuleManager) scanner).isEvalMode()) {
final String newDelimiter = delimBuffer.toString().trim();
delimiterRule.changeDelimiter(newDelimiter);
}
return setDelimiterToken;
}
}
| apache-2.0 |
mmaro/giraph | giraph-core/src/main/java/org/apache/giraph/types/IntToIntWritableWrapper.java | 1124 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.giraph.types;
import org.apache.hadoop.io.IntWritable;
/**
* Converts Integers to IntWritables
*/
public class IntToIntWritableWrapper
implements WritableWrapper<IntWritable, Integer> {
@Override
public IntWritable wrap(Integer javaValue) {
return new IntWritable(javaValue);
}
}
| apache-2.0 |
orsjb/HappyBrackets | IntelliJ Plugin/src/net/happybrackets/intellij_plugin/templates/project/HappyBracketsGeneratorPeer.java | 3020 | /*
* Copyright 2014 Hippo B.V. (http://www.onehippo.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.happybrackets.intellij_plugin.templates.project;
import javax.swing.JCheckBox;
import javax.swing.JComboBox;
import javax.swing.JComponent;
import javax.swing.JPanel;
import javax.swing.JTextField;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import com.intellij.ide.util.projectWizard.SettingsStep;
import com.intellij.openapi.ui.ValidationInfo;
import com.intellij.platform.WebProjectGenerator;
/**
* @version "$Id$"
*/
public class HappyBracketsGeneratorPeer implements WebProjectGenerator.GeneratorPeer<SettingsData> {
SettingsData data = new SettingsData();
private JPanel myMainPanel;
private JTextField vendorName;
public HappyBracketsGeneratorPeer() {
}
@NotNull
@Override
public JComponent getComponent() {
return myMainPanel;
}
@Override
public void buildUI(@NotNull final SettingsStep settingsStep) {
//settingsStep.addSettingsField("Vendor", vendorName);
//settingsStep.addSettingsField("Plugin id", pluginName);
//settingsStep.addSettingsField("Group id", groupId);
//settingsStep.addSettingsField("Artifact id", artifactId);
//settingsStep.addSettingsField("Version", version);
//settingsStep.addSettingsField("Create REST skeleton", createRESTClassCheckBox);
//settingsStep.addSettingsField("Plugin group", pluginGroup);
}
@NotNull
@Override
public SettingsData getSettings() {
getData(data);
return data;
}
@Nullable
@Override
public ValidationInfo validate() {
// Returning null is the same as it is OK
return null;
}
@Override
public boolean isBackgroundJobRunning() {
return false;
}
@Override
public void addSettingsStateListener(@NotNull final WebProjectGenerator.SettingsStateListener settingsStateListener) {
}
private void createUIComponents() {
}
public void setData(SettingsData data) {
vendorName.setText(data.getVendor());
}
public void getData(SettingsData data) {
data.setVendor(vendorName.getText());
}
public boolean isModified(SettingsData data) {
if (vendorName.getText() != null ? !vendorName.getText().equals(data.getVendor()) : data.getVendor() != null) {
return true;
}
return false;
}
}
| apache-2.0 |
WangGanxin/DoingDaily | app/src/main/java/com/ganxin/doingdaily/framework/BaseListFragment.java | 3158 | package com.ganxin.doingdaily.framework;
import android.support.v7.widget.RecyclerView;
import android.view.View;
import android.view.ViewGroup;
import com.ganxin.doingdaily.R;
import com.ganxin.doingdaily.common.widgets.pullrecycler.BaseListAdapter;
import com.ganxin.doingdaily.common.widgets.pullrecycler.BaseViewHolder;
import com.ganxin.doingdaily.common.widgets.pullrecycler.ItemDecoration.DividerItemDecoration;
import com.ganxin.doingdaily.common.widgets.pullrecycler.PullRecycler;
import com.ganxin.doingdaily.common.widgets.pullrecycler.layoutmanager.ILayoutManager;
import com.ganxin.doingdaily.common.widgets.pullrecycler.layoutmanager.MyLinearLayoutManager;
import java.util.ArrayList;
import butterknife.BindView;
/**
* Description : BaseListFragment <br/>
* author : WangGanxin <br/>
* date : 2016/10/9 <br/>
* email : ganxinvip@163.com <br/>
* @param <V> Vjew
* @param <T> Presenter
* @param <D> data数据类型
*/
public abstract class BaseListFragment<V extends BaseView,T extends BasePresenter<V>,D> extends BaseFragment<V,T> implements PullRecycler.OnRecyclerRefreshListener {
@BindView(R.id.pullRecycler)
protected PullRecycler pullRecycler;
protected BaseListAdapter adapter;
protected ArrayList<D> mDataList=new ArrayList<>();
@Override
public int setContentLayout() {
return R.layout.fragment_base_list;
}
protected abstract void setUpData();
@Override
public void setUpView(View view) {
setUpAdapter();
pullRecycler.setOnRefreshListener(this);
pullRecycler.enablePullToRefresh(true);
pullRecycler.enableLoadMore(true);
pullRecycler.setLayoutManager(getLayoutManager());
pullRecycler.addItemDecoration(getItemDecoration());
pullRecycler.setAdapter(adapter);
}
@Override
protected void initPresenter() {
super.initPresenter();
setUpData();
}
protected void setUpAdapter() {
adapter = new ListAdapter();
}
protected ILayoutManager getLayoutManager() {
return new MyLinearLayoutManager(getContext());
}
protected RecyclerView.ItemDecoration getItemDecoration() {
return new DividerItemDecoration(getContext(), R.drawable.list_divider);
}
public class ListAdapter extends BaseListAdapter {
@Override
protected BaseViewHolder onCreateNormalViewHolder(ViewGroup parent, int viewType) {
return getViewHolder(parent, viewType);
}
@Override
protected int getDataCount() {
return mDataList != null ? mDataList.size() : 0;
}
@Override
protected int getDataViewType(int position) {
return getItemType(position);
}
@Override
public boolean isSectionHeader(int position) {
return BaseListFragment.this.isSectionHeader(position);
}
}
protected boolean isSectionHeader(int position) {
return false;
}
protected int getItemType(int position) {
return 0;
}
protected abstract BaseViewHolder getViewHolder(ViewGroup parent, int viewType);
}
| apache-2.0 |
xfournet/intellij-community | python/src/com/jetbrains/python/documentation/PythonDocumentationMap.java | 7119 | // Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.jetbrains.python.documentation;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Sets;
import com.intellij.openapi.components.PersistentStateComponent;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.openapi.components.State;
import com.intellij.openapi.components.Storage;
import com.intellij.psi.PsiNamedElement;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.QualifiedName;
import com.jetbrains.python.psi.PyClass;
import com.jetbrains.python.psi.PyFunction;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author yole
*/
@State(name = "PythonDocumentationMap", storages = @Storage("other.xml"))
public class PythonDocumentationMap implements PersistentStateComponent<PythonDocumentationMap.State> {
public static PythonDocumentationMap getInstance() {
return ServiceManager.getService(PythonDocumentationMap.class);
}
public static class Entry {
private String myPrefix;
private String myUrlPattern;
public Entry() {
}
public Entry(String prefix, String urlPattern) {
myPrefix = prefix;
myUrlPattern = urlPattern;
}
public String getPrefix() {
return myPrefix;
}
public String getUrlPattern() {
return myUrlPattern;
}
public void setPrefix(String prefix) {
myPrefix = prefix;
}
public void setUrlPattern(String urlPattern) {
myUrlPattern = urlPattern;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Entry entry = (Entry)o;
if (!myPrefix.equals(entry.myPrefix)) return false;
if (!myUrlPattern.equals(entry.myUrlPattern)) return false;
return true;
}
@Override
public int hashCode() {
int result = myPrefix.hashCode();
result = 31 * result + myUrlPattern.hashCode();
return result;
}
}
public static class State {
private List<Entry> myEntries = new ArrayList<>();
public State() {
addEntry("PyQt4", "http://www.riverbankcomputing.co.uk/static/Docs/PyQt4/html/{class.name.lower}.html#{function.name}");
addEntry("PySide", "http://pyside.github.io/docs/pyside/{module.name.slashes}/{class.name}.html#{module.name}.{element.qname}");
addEntry("gtk", "http://library.gnome.org/devel/pygtk/stable/class-gtk{class.name.lower}.html#method-gtk{class.name.lower}--{function.name.dashes}");
addEntry("wx", "http://www.wxpython.org/docs/api/{module.name}.{class.name}-class.html#{function.name}");
addEntry("numpy", "http://docs.scipy.org/doc/numpy/reference/{}generated/{module.name}.{element.name}.html");
addEntry("scipy", "http://docs.scipy.org/doc/scipy/reference/{}generated/{module.name}.{element.name}.html");
addEntry("kivy", "http://kivy.org/docs/api-{module.name}.html");
}
public List<Entry> getEntries() {
return myEntries;
}
public void setEntries(List<Entry> entries) {
myEntries = entries;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
State state = (State)o;
return Sets.newHashSet(myEntries).equals(Sets.newHashSet(state.getEntries()));
}
@Override
public int hashCode() {
return myEntries != null ? myEntries.hashCode() : 0;
}
private void addEntry(String qName, String pattern) {
myEntries.add(new Entry(qName, pattern));
}
}
private State myState = new State();
@Override
public State getState() {
return myState;
}
@Override
public void loadState(@NotNull State state) {
myState = state;
}
public List<Entry> getEntries() {
return ImmutableList.copyOf(myState.getEntries());
}
public void setEntries(List<Entry> entries) {
myState.setEntries(entries);
}
@Nullable
public String urlFor(QualifiedName moduleQName, @Nullable PsiNamedElement element, String pyVersion) {
for (Entry entry : myState.myEntries) {
if (moduleQName.matchesPrefix(QualifiedName.fromDottedString(entry.myPrefix))) {
return transformPattern(entry.myUrlPattern, moduleQName, element, pyVersion);
}
}
return null;
}
@Nullable
public String rootUrlFor(QualifiedName moduleQName) {
for (Entry entry : myState.myEntries) {
if (moduleQName.matchesPrefix(QualifiedName.fromDottedString(entry.myPrefix))) {
return rootForPattern(entry.myUrlPattern);
}
}
return null;
}
private static String rootForPattern(String urlPattern) {
int pos = urlPattern.indexOf('{');
return pos >= 0 ? urlPattern.substring(0, pos) : urlPattern;
}
@Nullable
private static String transformPattern(@NotNull String urlPattern, QualifiedName moduleQName, @Nullable PsiNamedElement element,
String pyVersion) {
Map<String, String> macros = new HashMap<>();
macros.put("element.name", element == null ? null : element.getName());
PyClass pyClass = element == null ? null : PsiTreeUtil.getParentOfType(element, PyClass.class, false);
macros.put("class.name", pyClass == null ? null : pyClass.getName());
if (element != null) {
StringBuilder qName = new StringBuilder(moduleQName.toString()).append(".");
if (element instanceof PyFunction && ((PyFunction)element).getContainingClass() != null) {
qName.append(((PyFunction)element).getContainingClass().getName()).append(".");
}
qName.append(element.getName());
macros.put("element.qname", qName.toString());
}
else {
macros.put("element.qname", "");
}
macros.put("function.name", element instanceof PyFunction ? element.getName() : "");
macros.put("module.name", moduleQName.toString());
macros.put("python.version", pyVersion);
final String pattern = transformPattern(urlPattern, macros);
if (pattern == null) {
return rootForPattern(urlPattern);
}
return pattern;
}
@Nullable
private static String transformPattern(@NotNull String urlPattern, Map<String, String> macroValues) {
for (Map.Entry<String, String> entry : macroValues.entrySet()) {
if (entry.getValue() == null) {
if (urlPattern.contains("{" + entry.getKey())) {
return null;
}
continue;
}
urlPattern = urlPattern
.replace("{" + entry.getKey() + "}", entry.getValue())
.replace("{" + entry.getKey() + ".lower}", entry.getValue().toLowerCase())
.replace("{" + entry.getKey() + ".slashes}", entry.getValue().replace(".", "/"))
.replace("{" + entry.getKey() + ".dashes}", entry.getValue().replace("_", "-"));
}
return urlPattern.replace("{}", "");
}
}
| apache-2.0 |
MegafonWebLab/histone-java2 | core/src/test/java/ru/histone/v2/ParserTest.java | 1119 | /*
* Copyright (c) 2016 MegaFon
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ru.histone.v2;
import org.junit.jupiter.api.DynamicTest;
import org.junit.jupiter.api.TestFactory;
import ru.histone.v2.acceptance.CasePack;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.stream.Stream;
/**
* @author Alexey Nevinsky
*/
@CasePack("parser")
public class ParserTest extends HistoneTest {
@Override
@TestFactory
public Stream<DynamicTest> loadCases(String param) throws IOException, URISyntaxException {
return super.loadCases(param);
}
}
| apache-2.0 |
arquillian/arquillian-extension-portal | warp/jsf-portlet/src/main/java/org/jboss/arquillian/portal/warp/jsf/WarpPortletJSFExtension.java | 1279 | package org.jboss.arquillian.portal.warp.jsf;
import org.jboss.arquillian.container.test.spi.RemoteLoadableExtension;
import org.jboss.arquillian.core.spi.LoadableExtension;
import org.jboss.arquillian.warp.spi.WarpDeploymentEnrichmentExtension;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.spec.JavaArchive;
import org.jboss.shrinkwrap.api.spec.WebArchive;
/**
* @author <a href="http://community.jboss.org/people/kenfinni">Ken Finnigan</a>
*/
public class WarpPortletJSFExtension implements LoadableExtension, WarpDeploymentEnrichmentExtension {
@Override
public void register(ExtensionBuilder builder) {
builder.service(WarpDeploymentEnrichmentExtension.class, this.getClass());
}
@Override
public void enrichWebArchive(WebArchive webArchive) {
// Do Nothing
}
@Override
public JavaArchive getEnrichmentLibrary() {
return ShrinkWrap.create(JavaArchive.class, "arquillian-portal-warp-jsf.jar")
.addAsManifestResource("META-INF/portal-extensions/faces-config.xml", "faces-config.xml")
.addPackage("org.jboss.arquillian.portal.warp.jsf")
.addAsServiceProvider(RemoteLoadableExtension.class, WarpPortletJSFRemoteExtension.class);
}
}
| apache-2.0 |
actframework/actframework | legacy-testapp/src/test/java/testapp/endpoint/binding/collection/BigDecimalArrayActionParameterBindingTest.java | 727 | package testapp.endpoint.binding.collection;
import org.osgl.util.C;
import java.math.BigDecimal;
import java.util.List;
public class BigDecimalArrayActionParameterBindingTest extends SimpleTypeArrayActionParameterBindingTestBase<BigDecimal> {
@Override
protected String listPath() {
return "b_dec_list";
}
@Override
protected String setPath() {
return "b_dec_set";
}
@Override
protected String wrapperArrayPath() {
return "b_dec_wa";
}
@Override
protected List<BigDecimal> nonEmptyList() {
return C.list(b(-1.03), b(0.0), b(-1.03), b(123421421.32342));
}
private BigDecimal b(double i) {
return BigDecimal.valueOf(i);
}
}
| apache-2.0 |
quarkusio/quarkus | extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/TransactionConfiguration.java | 2048 | package io.quarkus.narayana.jta.runtime;
import java.lang.annotation.ElementType;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* This annotation can be used to configure a different transaction timeout than the default one for a method or a class.
* <p>
* When defined on a method, it needs to be used on the entry method of the transaction.
* <p>
* If defined on a class, it is equivalent to defining it on all the methods of the class marked with {@code @Transactional}.
* The configuration defined on a method takes precedence over the configuration defined on a class.
*/
@Inherited
@Target({ ElementType.METHOD, ElementType.TYPE })
@Retention(value = RetentionPolicy.RUNTIME)
public @interface TransactionConfiguration {
/**
* This value is used to specify that no transaction timeout is configured.
*/
int UNSET_TIMEOUT = -1;
/**
* The transaction timeout in seconds.
* Defaults to UNSET_TIMEOUT: no timeout configured.
*
* @return The transaction timeout in seconds.
*/
int timeout() default UNSET_TIMEOUT;
String UNSET_TIMEOUT_CONFIG_PROPERTY = "<<unset>>";
/**
* The configuration property to use in order to determine the value of the timeout in seconds.
* If the property exists, it must be an integer value representing the transaction timeout in seconds.
*
* An example configuration in {@code application.properties} could be: {@code my-transaction.timeout=5}.
*
* If both {@code timeoutFromConfigProperty} and {@code timeout} are set, then Quarkus will attempt to resolve
* {@code timeoutFromConfigProperty} and if a value for it has been provided, the timeout is set to that value.
* If no value has been provided at runtime for the property, then the value of {@code timeout} will be used
* as the fallback.
*/
String timeoutFromConfigProperty() default UNSET_TIMEOUT_CONFIG_PROPERTY;
}
| apache-2.0 |
pyranja/asio | server/src/test/java/at/ac/univie/isc/asio/engine/sql/ValuePresenterTest.java | 2995 | /*
* #%L
* asio server
* %%
* Copyright (C) 2013 - 2015 Research Group Scientific Computing, University of Vienna
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package at.ac.univie.isc.asio.engine.sql;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class ValuePresenterTest {
private Representation representation;
private ValuePresenter subject;
@Before
public void setUp() throws Exception {
representation = Mockito.mock(Representation.class);
when(representation.apply(any())).thenReturn("test");
}
@Test
public void should_use_registered_function_for_value() throws Exception {
subject = ValuePresenter.withDefault(ValuePresenter.FAIL).register(representation, Long.class).build();
subject.format(1L, Long.class);
verify(representation).apply(1L);
}
@Test
public void should_yield_registered_functions_return_value() throws Exception {
subject = ValuePresenter.withDefault(ValuePresenter.FAIL).register(representation, Long.class).build();
final String actual = subject.format(1L, Long.class);
assertThat(actual, is("test"));
}
@Test
public void should_use_void_representation_for_null_value() throws Exception {
subject = ValuePresenter.withDefault(ValuePresenter.FAIL).register(representation, Void.class).build();
subject.format(null, Object.class);
verify(representation).apply(null);
}
@Test
public void fall_back_to_default_representation() throws Exception {
subject = ValuePresenter.withDefault(representation).build();
final String formatted = subject.format("fallback", Object.class);
assertThat(formatted, is("test"));
}
@Test(expected = ValuePresenter.NoRepresentationFound.class)
public void fail_if_no_representation_found() throws Exception {
subject = ValuePresenter.withDefault(ValuePresenter.FAIL).build();
subject.format(new Object(), Object.class);
}
@Test(expected = AssertionError.class)
public void fail_if_representation_yields_null() throws Exception {
when(representation.apply(any())).thenReturn(null);
subject = ValuePresenter.withDefault(ValuePresenter.FAIL).register(representation, Object.class).build();
subject.format(new Object(), Object.class);
}
}
| apache-2.0 |
CMPUT301W14T14/Team14RecipeFinder | AppTests/src/ca/ualberta/cs/app/testPart4/model/UserProfileModelTest.java | 3914 | /**
*
*/
package ca.ualberta.cs.app.testPart4.model;
import model.UserProfile;
import activity.AllTopicPageActivity;
import android.graphics.Bitmap;
import android.test.ActivityInstrumentationTestCase2;
/**
* JUnit test cases for UserProfile model.
*
* @author Yilu Su
*
*/
public class UserProfileModelTest extends ActivityInstrumentationTestCase2<AllTopicPageActivity> {
/**
* Constructor
*/
public UserProfileModelTest() {
super(AllTopicPageActivity.class);
}
/**
* Test whether the user name of a profile can be retrieved and edited. <br>
* First, create a profile and check if the user name retrieved by getUserName method is correct.
* Then, use the setUserName method to change the user name and check if the new user name is correct. <br>
* Methods tested: getName and setName.
* @throws Exception
*/
public void testGetAndSetUserName() throws Exception {
UserProfile profile = new UserProfile("userName", "biography", "twitter", "facebook", null);
assertEquals("userName", profile.getUserName());
profile.setUserName("new userName");
assertEquals("new userName", profile.getUserName());
tearDown();
}
/**
* Test whether the biography of a profile can be retrieved and edited. <br>
* First, create a profile and check if the biography retrieved by getBiography method is correct.
* Then, use the setBiography method to change the user biography check if the new biography is correct. <br>
* Methods tested: getBiography and setBiography.
* @throws Exception
*/
public void testGetAndSetBiography() throws Exception {
UserProfile profile = new UserProfile("userName", "biography", "twitter", "facebook", null);
assertEquals("biography", profile.getBiography());
profile.setBiography("new biography");
assertEquals("new biography", profile.getBiography());
tearDown();
}
/**
* Test whether the twitter of a profile can be retrieved and edited. <br>
* First, create a profile and check if the twitter retrieved by getTwitter method is correct.
* Then, use the setTwitter method to change the twitter and check if the new twitter is correct. <br>
* Methods tested: getTwitter and setTwitter.
* @throws Exception
*/
public void testGetAndSetTwitter() throws Exception {
UserProfile profile = new UserProfile("userName", "biography", "twitter", "facebook", null);
assertEquals("twitter", profile.getTwitter());
profile.setTwitter("new twitter");
assertEquals("new twitter", profile.getTwitter());
tearDown();
}
/**
* Test whether the facebook of a profile can be retrieved and edited. <br>
* First, create a profile and check if the facebook retrieved by getFacebook method is correct.
* Then, use the setFacebook method to change the facebook and check if the new facebook is correct. <br>
* Methods tested: getFacebook and setFacebook.
* @throws Exception
*/
public void testGetAndSetFacebook() throws Exception {
UserProfile profile = new UserProfile("userName", "biography", "twitter", "facebook", null);
assertEquals("facebook", profile.getFacebook());
profile.setFacebook("new facebook");
assertEquals("new facebook", profile.getFacebook());
tearDown();
}
/**
* Test whether the photo of a profile can be retrieved and edited. <br>
* First, create a profile and check if the photo retrieved by getPhoto method is correct.
* Then, use the setPhoto method to change the photo and check if the new photo is correct. <br>
* Methods tested: getPhoto and setPhoto.
* @throws Exception
*/
public void testGetAndSetPhoto() throws Exception {
UserProfile profile = new UserProfile("userName", "biography", "twitter", "facebook", null);
assertEquals(null, profile.getPhoto());
Bitmap pic = Bitmap.createBitmap(10,10 ,Bitmap.Config.ARGB_8888);
profile.setPhoto(pic);
assertEquals(pic, profile.getPhoto());
tearDown();
}
}
| apache-2.0 |
j-coll/java-common-libs | commons-lib/src/main/java/org/opencb/commons/utils/FileUtils.java | 5085 | package org.opencb.commons.utils;
import java.io.*;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.OpenOption;
import java.nio.file.Path;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
/**
* Created with IntelliJ IDEA.
* User: imedina
* Date: 9/25/13
* Time: 1:06 PM
* To change this template use File | Settings | File Templates.
*/
public class FileUtils {
public static void checkPath(Path path) throws IOException {
checkPath(path, false);
}
public static void checkPath(Path path, boolean writable) throws IOException {
if (path == null) {
throw new IOException("Path is null");
}
if (!Files.exists(path)) {
throw new IOException("Path '" + path.toAbsolutePath() + "' does not exist");
}
if (!Files.isReadable(path)) {
throw new IOException("Path '" + path.toAbsolutePath() + "' cannot be read");
}
if (writable && !Files.isWritable(path)) {
throw new IOException("Path '" + path.toAbsolutePath() + "' cannot be written");
}
}
public static void checkFile(Path path) throws IOException {
checkFile(path, false);
}
public static void checkFile(Path path, boolean writable) throws IOException {
checkPath(path, writable);
if (Files.isDirectory(path)) {
throw new IOException("Path '" + path.toAbsolutePath() + "' must be a file");
}
}
public static void checkDirectory(Path path) throws IOException {
checkDirectory(path, false);
}
public static void checkDirectory(Path path, boolean writable) throws IOException {
checkPath(path, writable);
if (!Files.isDirectory(path)) {
throw new IOException("Path '" + path.toAbsolutePath() + "' must be a directory");
}
}
/**
* This method is able to determine whether a file is GZipped and return a {@link BufferedReader} in any case.
*
* @param path to be read
* @return BufferedReader object
* @throws java.io.IOException IOException
*/
public static BufferedReader newBufferedReader(Path path) throws IOException {
return newBufferedReader(path, Charset.defaultCharset());
}
/**
* This method is able to determine whether a file is GZipped and return a {@link BufferedReader} in any case.
*
* @param path to be read
* @param charset to be read
* @return BufferedReader object
* @throws java.io.IOException IOException
*/
public static BufferedReader newBufferedReader(Path path, Charset charset) throws IOException {
return new BufferedReader(new InputStreamReader(newInputStream(path), charset));
}
/**
* This method is able to determine whether a file is GZipped and return an {@link InputStream} in any case.
*
* @param path the path to the file to open
* @param options options specifying how the file is opened
* @return a new input stream
* @throws IOException if an I/O error occurs
*/
public static InputStream newInputStream(Path path, OpenOption... options) throws IOException {
FileUtils.checkFile(path);
InputStream inputStream = Files.newInputStream(path, options);
if (path.toFile().getName().endsWith(".gz")) {
inputStream = new GZIPInputStream(inputStream);
}
return inputStream;
}
/**
* This method is able to determine whether a file is GZipped and return a {@link BufferedWriter} in any case.
*
* @param path to be write
* @return BufferedWriter object
* @throws java.io.IOException IOException
*/
public static BufferedWriter newBufferedWriter(Path path) throws IOException {
FileUtils.checkDirectory(path.getParent());
BufferedWriter bufferedWriter;
if (path.toFile().getName().endsWith(".gz")) {
bufferedWriter = new BufferedWriter(new OutputStreamWriter(new GZIPOutputStream(new FileOutputStream(path.toFile()))));
} else {
bufferedWriter = Files.newBufferedWriter(path, Charset.defaultCharset());
}
return bufferedWriter;
}
/**
* This method is able to determine whether a file is GZipped and return a {@link BufferedWriter} in any case.
*
* @param path to be write
* @param charset to be write
* @return BufferedWriter object
* @throws java.io.IOException IOException
*/
public static BufferedWriter newBufferedWriter(Path path, Charset charset) throws IOException {
FileUtils.checkDirectory(path.getParent());
BufferedWriter bufferedWriter;
if (path.toFile().getName().endsWith(".gz")) {
bufferedWriter = new BufferedWriter(new OutputStreamWriter(new GZIPOutputStream(new FileOutputStream(path.toFile())), charset));
} else {
bufferedWriter = Files.newBufferedWriter(path, charset);
}
return bufferedWriter;
}
}
| apache-2.0 |
djsilenceboy/LearnTest | Java_Test/AppMavenWsSample1/Backup/CodesSet_CXF24_2/generated-sources/src/org/csapi/wsdl/parlayx/singtel/ndp/ext/wappush/v1_0/_interface/ExtWapPush.java | 1878 | package org.csapi.wsdl.parlayx.djs.sample.ext.wappush.v1_0._interface;
import javax.jws.WebMethod;
import javax.jws.WebParam;
import javax.jws.WebResult;
import javax.jws.WebService;
import javax.xml.bind.annotation.XmlSeeAlso;
import javax.xml.ws.RequestWrapper;
import javax.xml.ws.ResponseWrapper;
/**
* This class was generated by Apache CXF 2.4.4
* 2011-11-10T10:21:50.647+08:00
* Generated source version: 2.4.4
*
*/
@WebService(targetNamespace = "http://www.csapi.org/wsdl/parlayx/djs/sample/ext/wappush/v1_0/interface", name = "ExtWapPush")
@XmlSeeAlso({org.csapi.schema.parlayx.common.v2_1.ObjectFactory.class, org.csapi.schema.parlayx.djs.sample.ext.wappush.v1_0.local.ObjectFactory.class, org.csapi.schema.parlayx.djs.sample.ext.wappush.v1_0.ObjectFactory.class})
public interface ExtWapPush {
@WebResult(name = "wapPushResponse", targetNamespace = "http://www.csapi.org/schema/parlayx/djs/sample/ext/wappush/v1_0/local")
@RequestWrapper(localName = "sendWapPush", targetNamespace = "http://www.csapi.org/schema/parlayx/djs/sample/ext/wappush/v1_0/local", className = "org.csapi.schema.parlayx.djs.sample.ext.wappush.v1_0.local.SendWapPush")
@WebMethod
@ResponseWrapper(localName = "sendWapPushResponse", targetNamespace = "http://www.csapi.org/schema/parlayx/djs/sample/ext/wappush/v1_0/local", className = "org.csapi.schema.parlayx.djs.sample.ext.wappush.v1_0.local.SendWapPushResponse")
public org.csapi.schema.parlayx.djs.sample.ext.wappush.v1_0.WapPushRespData sendWapPush(
@WebParam(name = "wapPushRequest", targetNamespace = "http://www.csapi.org/schema/parlayx/djs/sample/ext/wappush/v1_0/local")
org.csapi.schema.parlayx.djs.sample.ext.wappush.v1_0.WapPushReqData wapPushRequest
) throws org.csapi.wsdl.parlayx.common.v2_0.faults.PolicyException, org.csapi.wsdl.parlayx.common.v2_0.faults.ServiceException;
}
| apache-2.0 |
srirammails/emf-fragments | de.hub.emffrag.testmodels/gen-src/de/hub/emffrag/testmodels/reflective/testmodel/impl/TestModelPackageImpl.java | 10568 | /**
* Copyright 2012 Markus Scheidgen
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.hub.emffrag.testmodels.reflective.testmodel.impl;
import org.eclipse.emf.ecore.EAttribute;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.emf.ecore.EReference;
import org.eclipse.emf.ecore.impl.EPackageImpl;
import de.hub.emffrag.testmodels.reflective.testmodel.Contents;
import de.hub.emffrag.testmodels.reflective.testmodel.TestModelFactory;
import de.hub.emffrag.testmodels.reflective.testmodel.TestModelPackage;
import de.hub.emffrag.testmodels.reflective.testmodel.TestObject;
/**
* <!-- begin-user-doc -->
* An implementation of the model <b>Package</b>.
* <!-- end-user-doc -->
* @generated
*/
public class TestModelPackageImpl extends EPackageImpl implements TestModelPackage {
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass containerEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass contentsEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass testObjectEClass = null;
/**
* Creates an instance of the model <b>Package</b>, registered with
* {@link org.eclipse.emf.ecore.EPackage.Registry EPackage.Registry} by the package
* package URI value.
* <p>Note: the correct way to create the package is via the static
* factory method {@link #init init()}, which also performs
* initialization of the package, or returns the registered package,
* if one already exists.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see org.eclipse.emf.ecore.EPackage.Registry
* @see de.hub.emffrag.testmodels.reflective.testmodel.TestModelPackage#eNS_URI
* @see #init()
* @generated
*/
private TestModelPackageImpl() {
super(eNS_URI, TestModelFactory.eINSTANCE);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private static boolean isInited = false;
/**
* Creates, registers, and initializes the <b>Package</b> for this model, and for any others upon which it depends.
*
* <p>This method is used to initialize {@link TestModelPackage#eINSTANCE} when that field is accessed.
* Clients should not invoke it directly. Instead, they should simply access that field to obtain the package.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #eNS_URI
* @see #createPackageContents()
* @see #initializePackageContents()
* @generated
*/
public static TestModelPackage init() {
if (isInited) return (TestModelPackage)EPackage.Registry.INSTANCE.getEPackage(TestModelPackage.eNS_URI);
// Obtain or create and register package
TestModelPackageImpl theTestModelPackage = (TestModelPackageImpl)(EPackage.Registry.INSTANCE.get(eNS_URI) instanceof TestModelPackageImpl ? EPackage.Registry.INSTANCE.get(eNS_URI) : new TestModelPackageImpl());
isInited = true;
// Create package meta-data objects
theTestModelPackage.createPackageContents();
// Initialize created meta-data
theTestModelPackage.initializePackageContents();
// Mark meta-data to indicate it can't be changed
theTestModelPackage.freeze();
// Update the registry and return the package
EPackage.Registry.INSTANCE.put(TestModelPackage.eNS_URI, theTestModelPackage);
return theTestModelPackage;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getContainer() {
return containerEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getContainer_Contents() {
return (EReference)containerEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getContainer_FragmentedContents() {
return (EReference)containerEClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getContents() {
return contentsEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getContents_Value() {
return (EAttribute)contentsEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getTestObject() {
return testObjectEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getTestObject_Name() {
return (EAttribute)testObjectEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getTestObject_RegularContents() {
return (EReference)testObjectEClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getTestObject_FragmentedContents() {
return (EReference)testObjectEClass.getEStructuralFeatures().get(2);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getTestObject_CrossReferences() {
return (EReference)testObjectEClass.getEStructuralFeatures().get(3);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public TestModelFactory getTestModelFactory() {
return (TestModelFactory)getEFactoryInstance();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private boolean isCreated = false;
/**
* Creates the meta-model objects for the package. This method is
* guarded to have no affect on any invocation but its first.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void createPackageContents() {
if (isCreated) return;
isCreated = true;
// Create classes and their features
containerEClass = createEClass(CONTAINER);
createEReference(containerEClass, CONTAINER__CONTENTS);
createEReference(containerEClass, CONTAINER__FRAGMENTED_CONTENTS);
contentsEClass = createEClass(CONTENTS);
createEAttribute(contentsEClass, CONTENTS__VALUE);
testObjectEClass = createEClass(TEST_OBJECT);
createEAttribute(testObjectEClass, TEST_OBJECT__NAME);
createEReference(testObjectEClass, TEST_OBJECT__REGULAR_CONTENTS);
createEReference(testObjectEClass, TEST_OBJECT__FRAGMENTED_CONTENTS);
createEReference(testObjectEClass, TEST_OBJECT__CROSS_REFERENCES);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private boolean isInitialized = false;
/**
* Complete the initialization of the package and its meta-model. This
* method is guarded to have no affect on any invocation but its first.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void initializePackageContents() {
if (isInitialized) return;
isInitialized = true;
// Initialize package
setName(eNAME);
setNsPrefix(eNS_PREFIX);
setNsURI(eNS_URI);
// Create type parameters
// Set bounds for type parameters
// Add supertypes to classes
contentsEClass.getESuperTypes().add(this.getContainer());
// Initialize classes and features; add operations and parameters
initEClass(containerEClass, de.hub.emffrag.testmodels.reflective.testmodel.Container.class, "Container", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEReference(getContainer_Contents(), this.getContents(), null, "contents", null, 0, -1, de.hub.emffrag.testmodels.reflective.testmodel.Container.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getContainer_FragmentedContents(), this.getContents(), null, "fragmentedContents", null, 0, -1, de.hub.emffrag.testmodels.reflective.testmodel.Container.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(contentsEClass, Contents.class, "Contents", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEAttribute(getContents_Value(), ecorePackage.getEString(), "value", null, 0, 1, Contents.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(testObjectEClass, TestObject.class, "TestObject", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEAttribute(getTestObject_Name(), ecorePackage.getEString(), "name", null, 0, 1, TestObject.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getTestObject_RegularContents(), this.getTestObject(), null, "regularContents", null, 0, -1, TestObject.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getTestObject_FragmentedContents(), this.getTestObject(), null, "fragmentedContents", null, 0, -1, TestObject.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getTestObject_CrossReferences(), this.getTestObject(), null, "crossReferences", null, 0, -1, TestObject.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_COMPOSITE, IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
// Create resource
createResource(eNS_URI);
// Create annotations
// de.hub.emfhbase
createDeAnnotations();
}
/**
* Initializes the annotations for <b>de.hub.emfhbase</b>.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected void createDeAnnotations() {
String source = "de.hub.emfhbase";
addAnnotation
(getContainer_FragmentedContents(),
source,
new String[] {
"Fragmentation", "true"
});
addAnnotation
(getTestObject_FragmentedContents(),
source,
new String[] {
"Fragmentation", "true"
});
}
} //TestModelPackageImpl
| apache-2.0 |
satyajitdey02/blue-bits-parent | blue-bits-url-shorten-webapp/src/main/java/org/bluebits/us/filters/ShortenResourceFilter.java | 1645 | package org.bluebits.us.filters;
import com.sun.jersey.spi.container.*;
import org.apache.commons.lang.StringUtils;
import org.apache.jcs.access.exception.CacheException;
import org.bluebits.us.utils.CacheUtil;
import org.json.JSONException;
import org.json.JSONObject;
import javax.ws.rs.core.Response;
/**
* Created by satyajit on 5/28/15.
*/
public class ShortenResourceFilter implements ResourceFilter{
@Override
public ContainerRequestFilter getRequestFilter() {
return new ContainerRequestFilter() {
@Override
public ContainerRequest filter(ContainerRequest request) {
return request;
}
};
}
@Override
public ContainerResponseFilter getResponseFilter() {
return new ContainerResponseFilter() {
@Override
public ContainerResponse filter(ContainerRequest request, ContainerResponse response) {
if(response.getStatus() == 200) {
try {
JSONObject entity = new JSONObject((String)response.getEntity());
String base62Code = entity.getString("base62Code");
String longUrl = entity.getString("longUrl");
if(StringUtils.isBlank(CacheUtil.lookup(base62Code))) {
CacheUtil.cache(base62Code, longUrl);
}
response.setResponse(Response.ok(new JSONObject().put("shortUrl",
entity.getString("shortUrl")).toString()).type("application/json").build());
} catch (JSONException je) {
je.printStackTrace();
} catch (CacheException ce) {
ce.printStackTrace();
}
}
return response;
}
};
}
}
| apache-2.0 |
mcwarman/interlok | logging/src/main/java/com/adaptris/logging/jmx/log4j/package-info.java | 82 | /**
* LOG4J 2 specific implementation
*/
package com.adaptris.logging.jmx.log4j; | apache-2.0 |
drlebedev/nd4j | nd4j-buffer/src/main/java/org/nd4j/linalg/api/buffer/factory/DefaultDataBufferFactory.java | 9669 | /*
*
* * Copyright 2015 Skymind,Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*
*/
package org.nd4j.linalg.api.buffer.factory;
import io.netty.buffer.Unpooled;
import org.nd4j.linalg.api.buffer.DataBuffer;
import org.nd4j.linalg.api.buffer.DoubleBuffer;
import org.nd4j.linalg.api.buffer.FloatBuffer;
import org.nd4j.linalg.api.buffer.IntBuffer;
import org.nd4j.linalg.util.ArrayUtil;
import java.nio.ByteBuffer;
/**
* Normal data buffer creation
*
* @author Adam Gibson
*/
public class DefaultDataBufferFactory implements DataBufferFactory {
protected DataBuffer.AllocationMode allocationMode;
@Override
public void setAllocationMode(DataBuffer.AllocationMode allocationMode) {
this.allocationMode = allocationMode;
}
@Override
public DataBuffer.AllocationMode allocationMode() {
if(allocationMode == null) {
String otherAlloc = System.getProperty("alloc");
if(otherAlloc.equals("heap"))
setAllocationMode(DataBuffer.AllocationMode.HEAP);
else if(otherAlloc.equals("direct"))
setAllocationMode(DataBuffer.AllocationMode.DIRECT);
else if(otherAlloc.equals("javacpp"))
setAllocationMode(DataBuffer.AllocationMode.JAVACPP);
}
return allocationMode;
}
@Override
public DataBuffer create(DataBuffer underlyingBuffer, long offset, long length) {
if(underlyingBuffer.dataType() == DataBuffer.Type.DOUBLE) {
return new DoubleBuffer(underlyingBuffer,length,offset);
}
else if(underlyingBuffer.dataType() == DataBuffer.Type.FLOAT) {
return new FloatBuffer(underlyingBuffer,length,offset);
}
else if(underlyingBuffer.dataType() == DataBuffer.Type.INT) {
return new IntBuffer(underlyingBuffer,length,offset);
}
return null;
}
@Override
public DataBuffer createInt(int offset, ByteBuffer buffer, int length) {
return new IntBuffer(buffer,length,offset);
}
@Override
public DataBuffer createFloat(int offset, ByteBuffer buffer, int length) {
return new FloatBuffer(buffer,length,offset);
}
@Override
public DataBuffer createDouble(int offset, ByteBuffer buffer, int length) {
return new DoubleBuffer(buffer,length,offset);
}
@Override
public DataBuffer createDouble(int offset, int length) {
return new DoubleBuffer(length,8,offset);
}
@Override
public DataBuffer createFloat(int offset, int length) {
return new FloatBuffer(length,4,offset);
}
@Override
public DataBuffer createInt(int offset, int length) {
return new IntBuffer(length,4,offset);
}
@Override
public DataBuffer createDouble(int offset, int[] data) {
return createDouble(offset,data,true);
}
@Override
public DataBuffer createFloat(int offset, int[] data) {
FloatBuffer ret = new FloatBuffer(ArrayUtil.toFloats(data),true,offset);
return ret;
}
@Override
public DataBuffer createInt(int offset, int[] data) {
return new IntBuffer(data,true,offset);
}
@Override
public DataBuffer createDouble(int offset, double[] data) {
return new DoubleBuffer(data,true,offset);
}
@Override
public DataBuffer createDouble(int offset, byte[] data, int length) {
return createDouble(offset,ArrayUtil.toDoubleArray(data),true);
}
@Override
public DataBuffer createFloat(int offset, byte[] data, int length) {
return createFloat(offset,ArrayUtil.toFloatArray(data),true);
}
@Override
public DataBuffer createFloat(int offset, double[] data) {
return new FloatBuffer(ArrayUtil.toFloats(data),true,offset);
}
@Override
public DataBuffer createInt(int offset, double[] data) {
return new IntBuffer(ArrayUtil.toInts(data),true,offset);
}
@Override
public DataBuffer createDouble(int offset, float[] data) {
return new DoubleBuffer(ArrayUtil.toDoubles(data),true,offset);
}
@Override
public DataBuffer createFloat(int offset, float[] data) {
return new FloatBuffer(data,true,offset);
}
@Override
public DataBuffer createInt(int offset, float[] data) {
return new IntBuffer(ArrayUtil.toInts(data),true,offset);
}
@Override
public DataBuffer createDouble(int offset, int[] data, boolean copy) {
return new DoubleBuffer(ArrayUtil.toDoubles(data),true,offset);
}
@Override
public DataBuffer createFloat(int offset, int[] data, boolean copy) {
return new FloatBuffer(ArrayUtil.toFloats(data),copy,offset);
}
@Override
public DataBuffer createInt(int offset, int[] data, boolean copy) {
return new IntBuffer(data,copy,offset);
}
@Override
public DataBuffer createDouble(int offset, double[] data, boolean copy) {
return new DoubleBuffer(data,copy,offset);
}
@Override
public DataBuffer createFloat(int offset, double[] data, boolean copy) {
return new FloatBuffer(ArrayUtil.toFloats(data),copy,offset);
}
@Override
public DataBuffer createInt(int offset, double[] data, boolean copy) {
return new IntBuffer(ArrayUtil.toInts(data),copy,offset);
}
@Override
public DataBuffer createDouble(int offset, float[] data, boolean copy) {
return new DoubleBuffer(ArrayUtil.toDoubles(data),copy,offset);
}
@Override
public DataBuffer createFloat(int offset, float[] data, boolean copy) {
return new FloatBuffer(data,copy,offset);
}
@Override
public DataBuffer createInt(int offset, float[] data, boolean copy) {
return new IntBuffer(ArrayUtil.toInts(data),copy,offset);
}
@Override
public DataBuffer createInt(ByteBuffer buffer, int length) {
return new IntBuffer(buffer,length);
}
@Override
public DataBuffer createFloat(ByteBuffer buffer, int length) {
return new FloatBuffer(buffer,length);
}
@Override
public DataBuffer createDouble(ByteBuffer buffer, int length) {
return new DoubleBuffer(buffer,length);
}
@Override
public DataBuffer createDouble(long length) {
return new DoubleBuffer(length);
}
@Override
public DataBuffer createFloat(long length) {
return new FloatBuffer(length);
}
@Override
public DataBuffer createInt(long length) {
return new IntBuffer(length);
}
@Override
public DataBuffer createDouble(int[] data) {
return createDouble(data, true);
}
@Override
public DataBuffer createFloat(int[] data) {
return createFloat(data, true);
}
@Override
public DataBuffer createInt(int[] data) {
return createInt(data, true);
}
@Override
public DataBuffer createDouble(double[] data) {
return createDouble(data, true);
}
@Override
public DataBuffer createDouble(byte[] data,int length) {
return new DoubleBuffer(Unpooled.wrappedBuffer(data),length);
}
@Override
public DataBuffer createFloat(byte[] data,int length) {
return new FloatBuffer(Unpooled.wrappedBuffer(data),length);
}
@Override
public DataBuffer createFloat(double[] data) {
return createFloat(data, true);
}
@Override
public DataBuffer createInt(double[] data) {
return createInt(data, true);
}
@Override
public DataBuffer createDouble(float[] data) {
return createDouble(data, true);
}
@Override
public DataBuffer createFloat(float[] data) {
return createFloat(data, true);
}
@Override
public DataBuffer createInt(float[] data) {
return createInt(data, true);
}
@Override
public DataBuffer createDouble(int[] data, boolean copy) {
return new DoubleBuffer(ArrayUtil.toDoubles(data), copy);
}
@Override
public DataBuffer createFloat(int[] data, boolean copy) {
return new FloatBuffer(ArrayUtil.toFloats(data), copy);
}
@Override
public DataBuffer createInt(int[] data, boolean copy) {
return new IntBuffer(data, copy);
}
@Override
public DataBuffer createDouble(double[] data, boolean copy) {
return new DoubleBuffer(data, copy);
}
@Override
public DataBuffer createFloat(double[] data, boolean copy) {
return new FloatBuffer(ArrayUtil.toFloats(data), copy);
}
@Override
public DataBuffer createInt(double[] data, boolean copy) {
return new IntBuffer(ArrayUtil.toInts(data), copy);
}
@Override
public DataBuffer createDouble(float[] data, boolean copy) {
return new DoubleBuffer(data, copy);
}
@Override
public DataBuffer createFloat(float[] data, boolean copy) {
return new FloatBuffer(data, copy);
}
@Override
public DataBuffer createInt(float[] data, boolean copy) {
return new IntBuffer(ArrayUtil.toInts(data), copy);
}
}
| apache-2.0 |
OmarHP/ud851-Excercises | Lesson09-ToDo-List/T09.03-Exercise-UriMatcher/app/src/main/java/com/example/android/todolist/data/TaskContentProvider.java | 3580 | /*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.todolist.data;
import android.content.ContentProvider;
import android.content.ContentValues;
import android.content.Context;
import android.content.UriMatcher;
import android.database.Cursor;
import android.net.Uri;
import android.support.annotation.NonNull;
// Verify that TaskContentProvider extends from ContentProvider and implements required methods
public class TaskContentProvider extends ContentProvider {
// COMPLETED (1) Define final integer constants for the directory of tasks and a single item.
// It's convention to use 100, 200, 300, etc for directories,
// and related ints (101, 102, ..) for items in that directory.
public static final int TASKS = 100;
public static final int TASK_WITH_ID = 101;
// COMPLETED (3) Declare a static variable for the Uri matcher that you construct
public static UriMatcher sUriMatecher = buildUriMatcher();
// COMPLETED (2) Define a static buildUriMatcher method that associates URI's with their int match
public static UriMatcher buildUriMatcher(){
UriMatcher uriMatcher = new UriMatcher(UriMatcher.NO_MATCH);
uriMatcher.addURI(TaskContract.AUTHORITY, TaskContract.PATH_TASKS, TASKS);
uriMatcher.addURI(TaskContract.AUTHORITY, TaskContract.PATH_TASKS + "/#", TASK_WITH_ID);
return uriMatcher;
}
// Member variable for a TaskDbHelper that's initialized in the onCreate() method
private TaskDbHelper mTaskDbHelper;
/* onCreate() is where you should initialize anything you’ll need to setup
your underlying data source.
In this case, you’re working with a SQLite database, so you’ll need to
initialize a DbHelper to gain access to it.
*/
@Override
public boolean onCreate() {
// Complete onCreate() and initialize a TaskDbhelper on startup
// [Hint] Declare the DbHelper as a global variable
Context context = getContext();
mTaskDbHelper = new TaskDbHelper(context);
return true;
}
@Override
public Uri insert(@NonNull Uri uri, ContentValues values) {
throw new UnsupportedOperationException("Not yet implemented");
}
@Override
public Cursor query(@NonNull Uri uri, String[] projection, String selection,
String[] selectionArgs, String sortOrder) {
throw new UnsupportedOperationException("Not yet implemented");
}
@Override
public int delete(@NonNull Uri uri, String selection, String[] selectionArgs) {
throw new UnsupportedOperationException("Not yet implemented");
}
@Override
public int update(@NonNull Uri uri, ContentValues values, String selection,
String[] selectionArgs) {
throw new UnsupportedOperationException("Not yet implemented");
}
@Override
public String getType(@NonNull Uri uri) {
throw new UnsupportedOperationException("Not yet implemented");
}
}
| apache-2.0 |
prestodb/presto | presto-main/src/main/java/com/facebook/presto/sql/planner/PlanFragmenter.java | 70871 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.planner;
import com.facebook.presto.Session;
import com.facebook.presto.SystemSessionProperties;
import com.facebook.presto.common.predicate.TupleDomain;
import com.facebook.presto.cost.StatsAndCosts;
import com.facebook.presto.execution.QueryManagerConfig;
import com.facebook.presto.execution.QueryManagerConfig.ExchangeMaterializationStrategy;
import com.facebook.presto.execution.scheduler.BucketNodeMap;
import com.facebook.presto.metadata.Metadata;
import com.facebook.presto.metadata.NewTableLayout;
import com.facebook.presto.metadata.PartitioningMetadata;
import com.facebook.presto.metadata.TableLayout.TablePartitioning;
import com.facebook.presto.metadata.TableLayoutResult;
import com.facebook.presto.metadata.TableMetadata;
import com.facebook.presto.operator.StageExecutionDescriptor;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.ColumnMetadata;
import com.facebook.presto.spi.ConnectorId;
import com.facebook.presto.spi.ConnectorNewTableLayout;
import com.facebook.presto.spi.Constraint;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.PrestoWarning;
import com.facebook.presto.spi.SchemaTableName;
import com.facebook.presto.spi.SourceLocation;
import com.facebook.presto.spi.TableHandle;
import com.facebook.presto.spi.WarningCollector;
import com.facebook.presto.spi.connector.ConnectorPartitionHandle;
import com.facebook.presto.spi.plan.AggregationNode;
import com.facebook.presto.spi.plan.Assignments;
import com.facebook.presto.spi.plan.MarkDistinctNode;
import com.facebook.presto.spi.plan.PlanNode;
import com.facebook.presto.spi.plan.PlanNodeId;
import com.facebook.presto.spi.plan.PlanNodeIdAllocator;
import com.facebook.presto.spi.plan.ProjectNode;
import com.facebook.presto.spi.plan.ProjectNode.Locality;
import com.facebook.presto.spi.plan.TableScanNode;
import com.facebook.presto.spi.plan.ValuesNode;
import com.facebook.presto.spi.relation.ConstantExpression;
import com.facebook.presto.spi.relation.RowExpression;
import com.facebook.presto.spi.relation.VariableReferenceExpression;
import com.facebook.presto.spi.statistics.TableStatisticsMetadata;
import com.facebook.presto.sql.analyzer.FeaturesConfig;
import com.facebook.presto.sql.parser.SqlParser;
import com.facebook.presto.sql.planner.plan.ExchangeNode;
import com.facebook.presto.sql.planner.plan.ExplainAnalyzeNode;
import com.facebook.presto.sql.planner.plan.InternalPlanVisitor;
import com.facebook.presto.sql.planner.plan.JoinNode;
import com.facebook.presto.sql.planner.plan.MetadataDeleteNode;
import com.facebook.presto.sql.planner.plan.OutputNode;
import com.facebook.presto.sql.planner.plan.PlanFragmentId;
import com.facebook.presto.sql.planner.plan.RemoteSourceNode;
import com.facebook.presto.sql.planner.plan.RowNumberNode;
import com.facebook.presto.sql.planner.plan.SimplePlanRewriter;
import com.facebook.presto.sql.planner.plan.StatisticAggregations;
import com.facebook.presto.sql.planner.plan.StatisticsWriterNode;
import com.facebook.presto.sql.planner.plan.TableFinishNode;
import com.facebook.presto.sql.planner.plan.TableWriterMergeNode;
import com.facebook.presto.sql.planner.plan.TableWriterNode;
import com.facebook.presto.sql.planner.plan.TableWriterNode.CreateName;
import com.facebook.presto.sql.planner.plan.TableWriterNode.InsertReference;
import com.facebook.presto.sql.planner.plan.TableWriterNode.RefreshMaterializedViewReference;
import com.facebook.presto.sql.planner.plan.TableWriterNode.WriterTarget;
import com.facebook.presto.sql.planner.plan.TopNRowNumberNode;
import com.facebook.presto.sql.planner.plan.WindowNode;
import com.facebook.presto.sql.planner.sanity.PlanChecker;
import com.google.common.base.VerifyException;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import javax.inject.Inject;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalInt;
import java.util.Set;
import java.util.stream.Collectors;
import static com.facebook.presto.SystemSessionProperties.getExchangeMaterializationStrategy;
import static com.facebook.presto.SystemSessionProperties.getQueryMaxStageCount;
import static com.facebook.presto.SystemSessionProperties.getTaskPartitionedWriterCount;
import static com.facebook.presto.SystemSessionProperties.isForceSingleNodeOutput;
import static com.facebook.presto.SystemSessionProperties.isGroupedExecutionEnabled;
import static com.facebook.presto.SystemSessionProperties.isRecoverableGroupedExecutionEnabled;
import static com.facebook.presto.SystemSessionProperties.isTableWriterMergeOperatorEnabled;
import static com.facebook.presto.common.type.BigintType.BIGINT;
import static com.facebook.presto.common.type.VarbinaryType.VARBINARY;
import static com.facebook.presto.spi.StandardErrorCode.NOT_SUPPORTED;
import static com.facebook.presto.spi.StandardErrorCode.QUERY_HAS_TOO_MANY_STAGES;
import static com.facebook.presto.spi.StandardWarningCode.TOO_MANY_STAGES;
import static com.facebook.presto.spi.connector.ConnectorCapabilities.SUPPORTS_PAGE_SINK_COMMIT;
import static com.facebook.presto.spi.connector.ConnectorCapabilities.SUPPORTS_REWINDABLE_SPLIT_SOURCE;
import static com.facebook.presto.spi.connector.NotPartitionedPartitionHandle.NOT_PARTITIONED;
import static com.facebook.presto.sql.planner.SchedulingOrderVisitor.scheduleOrder;
import static com.facebook.presto.sql.planner.StatisticsAggregationPlanner.TableStatisticAggregation;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.COORDINATOR_DISTRIBUTION;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.SINGLE_DISTRIBUTION;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.SOURCE_DISTRIBUTION;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.isCompatibleSystemPartitioning;
import static com.facebook.presto.sql.planner.VariablesExtractor.extractOutputVariables;
import static com.facebook.presto.sql.planner.plan.ExchangeNode.Scope.LOCAL;
import static com.facebook.presto.sql.planner.plan.ExchangeNode.Scope.REMOTE_MATERIALIZED;
import static com.facebook.presto.sql.planner.plan.ExchangeNode.Scope.REMOTE_STREAMING;
import static com.facebook.presto.sql.planner.plan.ExchangeNode.Type.REPARTITION;
import static com.facebook.presto.sql.planner.plan.ExchangeNode.Type.REPLICATE;
import static com.facebook.presto.sql.planner.plan.ExchangeNode.ensureSourceOrderingGatheringExchange;
import static com.facebook.presto.sql.planner.plan.ExchangeNode.gatheringExchange;
import static com.facebook.presto.sql.planner.plan.ExchangeNode.partitionedExchange;
import static com.facebook.presto.sql.planner.planPrinter.PlanPrinter.jsonFragmentPlan;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Verify.verify;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.ImmutableMap.toImmutableMap;
import static com.google.common.collect.ImmutableSet.toImmutableSet;
import static com.google.common.collect.Iterables.concat;
import static com.google.common.collect.Iterables.getOnlyElement;
import static com.google.common.collect.Streams.stream;
import static com.google.common.graph.Traverser.forTree;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
import static java.util.function.Function.identity;
/**
* Splits a logical plan into fragments that can be shipped and executed on distributed nodes
*/
public class PlanFragmenter
{
public static final int ROOT_FRAGMENT_ID = 0;
public static final String TOO_MANY_STAGES_MESSAGE = "If the query contains multiple DISTINCTs, please set the 'use_mark_distinct' session property to false. " +
"If the query contains multiple CTEs that are referenced more than once, please create temporary table(s) for one or more of the CTEs.";
private final Metadata metadata;
private final NodePartitioningManager nodePartitioningManager;
private final QueryManagerConfig config;
private final SqlParser sqlParser;
private final PlanChecker distributedPlanChecker;
private final PlanChecker singleNodePlanChecker;
@Inject
public PlanFragmenter(Metadata metadata, NodePartitioningManager nodePartitioningManager, QueryManagerConfig queryManagerConfig, SqlParser sqlParser, FeaturesConfig featuresConfig)
{
this.metadata = requireNonNull(metadata, "metadata is null");
this.nodePartitioningManager = requireNonNull(nodePartitioningManager, "nodePartitioningManager is null");
this.config = requireNonNull(queryManagerConfig, "queryManagerConfig is null");
this.sqlParser = requireNonNull(sqlParser, "sqlParser is null");
this.distributedPlanChecker = new PlanChecker(requireNonNull(featuresConfig, "featuresConfig is null"), false);
this.singleNodePlanChecker = new PlanChecker(requireNonNull(featuresConfig, "featuresConfig is null"), true);
}
public SubPlan createSubPlans(Session session, Plan plan, boolean forceSingleNode, PlanNodeIdAllocator idAllocator, WarningCollector warningCollector)
{
PlanVariableAllocator variableAllocator = new PlanVariableAllocator(plan.getTypes().allVariables());
return createSubPlans(session, plan, forceSingleNode, idAllocator, variableAllocator, warningCollector);
}
public SubPlan createSubPlans(Session session, Plan plan, boolean forceSingleNode, PlanNodeIdAllocator idAllocator, PlanVariableAllocator variableAllocator, WarningCollector warningCollector)
{
Fragmenter fragmenter = new Fragmenter(
session,
metadata,
plan.getStatsAndCosts(),
forceSingleNode ? singleNodePlanChecker : distributedPlanChecker,
warningCollector,
sqlParser,
idAllocator,
variableAllocator,
getTableWriterNodeIds(plan.getRoot()));
FragmentProperties properties = new FragmentProperties(new PartitioningScheme(
Partitioning.create(SINGLE_DISTRIBUTION, ImmutableList.of()),
plan.getRoot().getOutputVariables()));
if (forceSingleNode || isForceSingleNodeOutput(session)) {
properties = properties.setSingleNodeDistribution();
}
PlanNode root = SimplePlanRewriter.rewriteWith(fragmenter, plan.getRoot(), properties);
SubPlan subPlan = fragmenter.buildRootFragment(root, properties);
subPlan = reassignPartitioningHandleIfNecessary(session, subPlan);
if (!forceSingleNode) {
// grouped execution is not supported for SINGLE_DISTRIBUTION
subPlan = analyzeGroupedExecution(session, subPlan, false);
}
checkState(!isForceSingleNodeOutput(session) || subPlan.getFragment().getPartitioning().isSingleNode(), "Root of PlanFragment is not single node");
// TODO: Remove query_max_stage_count session property and use queryManagerConfig.getMaxStageCount() here
sanityCheckFragmentedPlan(
subPlan,
warningCollector,
getExchangeMaterializationStrategy(session),
getQueryMaxStageCount(session),
config.getStageCountWarningThreshold());
return subPlan;
}
private void sanityCheckFragmentedPlan(
SubPlan subPlan,
WarningCollector warningCollector,
ExchangeMaterializationStrategy exchangeMaterializationStrategy,
int maxStageCount,
int stageCountSoftLimit)
{
subPlan.sanityCheck();
int fragmentCount = subPlan.getAllFragments().size();
if (fragmentCount > maxStageCount) {
throw new PrestoException(QUERY_HAS_TOO_MANY_STAGES, format(
"Number of stages in the query (%s) exceeds the allowed maximum (%s). " + TOO_MANY_STAGES_MESSAGE,
fragmentCount, maxStageCount));
}
// When exchange materialization is enabled, only a limited number of stages will be executed concurrently
// (controlled by session property max_concurrent_materializations)
if (exchangeMaterializationStrategy != ExchangeMaterializationStrategy.ALL) {
if (fragmentCount > stageCountSoftLimit) {
warningCollector.add(new PrestoWarning(TOO_MANY_STAGES, format(
"Number of stages in the query (%s) exceeds the soft limit (%s). " + TOO_MANY_STAGES_MESSAGE,
fragmentCount, stageCountSoftLimit)));
}
}
}
/*
* In theory, recoverable grouped execution should be decided at query section level (i.e. a connected component of stages connected by remote exchanges).
* This is because supporting mixed recoverable execution and non-recoverable execution within a query section adds unnecessary complications but provides little benefit,
* because a single task failure is still likely to fail the non-recoverable stage.
* However, since the concept of "query section" is not introduced until execution time as of now, it needs significant hacks to decide at fragmenting time.
* TODO: We should introduce "query section" and make recoverability analysis done at query section level.
*/
private SubPlan analyzeGroupedExecution(Session session, SubPlan subPlan, boolean parentContainsTableFinish)
{
PlanFragment fragment = subPlan.getFragment();
GroupedExecutionProperties properties = fragment.getRoot().accept(new GroupedExecutionTagger(session, metadata, nodePartitioningManager), null);
if (properties.isSubTreeUseful()) {
boolean preferDynamic = fragment.getRemoteSourceNodes().stream().allMatch(node -> node.getExchangeType() == REPLICATE);
BucketNodeMap bucketNodeMap = nodePartitioningManager.getBucketNodeMap(session, fragment.getPartitioning(), preferDynamic);
if (bucketNodeMap.isDynamic()) {
/*
* We currently only support recoverable grouped execution if the following statements hold true:
* - Current session enables recoverable grouped execution and table writer merge operator
* - Parent sub plan contains TableFinishNode
* - Current sub plan's root is TableWriterMergeNode or TableWriterNode
* - Input connectors supports split source rewind
* - Output connectors supports partition commit
* - Bucket node map uses dynamic scheduling
* - One table writer per task
*/
boolean recoverable = isRecoverableGroupedExecutionEnabled(session) &&
isTableWriterMergeOperatorEnabled(session) &&
parentContainsTableFinish &&
(fragment.getRoot() instanceof TableWriterMergeNode || fragment.getRoot() instanceof TableWriterNode) &&
properties.isRecoveryEligible();
if (recoverable) {
fragment = fragment.withRecoverableGroupedExecution(properties.getCapableTableScanNodes(), properties.getTotalLifespans());
}
else {
fragment = fragment.withDynamicLifespanScheduleGroupedExecution(properties.getCapableTableScanNodes(), properties.getTotalLifespans());
}
}
else {
fragment = fragment.withFixedLifespanScheduleGroupedExecution(properties.getCapableTableScanNodes(), properties.getTotalLifespans());
}
}
ImmutableList.Builder<SubPlan> result = ImmutableList.builder();
boolean containsTableFinishNode = containsTableFinishNode(fragment);
for (SubPlan child : subPlan.getChildren()) {
result.add(analyzeGroupedExecution(session, child, containsTableFinishNode));
}
return new SubPlan(fragment, result.build());
}
private static boolean containsTableFinishNode(PlanFragment planFragment)
{
PlanNode root = planFragment.getRoot();
return root instanceof OutputNode && getOnlyElement(root.getSources()) instanceof TableFinishNode;
}
private SubPlan reassignPartitioningHandleIfNecessary(Session session, SubPlan subPlan)
{
return reassignPartitioningHandleIfNecessaryHelper(session, subPlan, subPlan.getFragment().getPartitioning());
}
private SubPlan reassignPartitioningHandleIfNecessaryHelper(Session session, SubPlan subPlan, PartitioningHandle newOutputPartitioningHandle)
{
PlanFragment fragment = subPlan.getFragment();
PlanNode newRoot = fragment.getRoot();
// If the fragment's partitioning is SINGLE or COORDINATOR_ONLY, leave the sources as is (this is for single-node execution)
if (!fragment.getPartitioning().isSingleNode()) {
PartitioningHandleReassigner partitioningHandleReassigner = new PartitioningHandleReassigner(fragment.getPartitioning(), metadata, session);
newRoot = SimplePlanRewriter.rewriteWith(partitioningHandleReassigner, newRoot);
}
PartitioningScheme outputPartitioningScheme = fragment.getPartitioningScheme();
Partitioning newOutputPartitioning = outputPartitioningScheme.getPartitioning();
if (outputPartitioningScheme.getPartitioning().getHandle().getConnectorId().isPresent()) {
// Do not replace the handle if the source's output handle is a system one, e.g. broadcast.
newOutputPartitioning = newOutputPartitioning.withAlternativePartitioningHandle(newOutputPartitioningHandle);
}
PlanFragment newFragment = new PlanFragment(
fragment.getId(),
newRoot,
fragment.getVariables(),
fragment.getPartitioning(),
fragment.getTableScanSchedulingOrder(),
new PartitioningScheme(
newOutputPartitioning,
outputPartitioningScheme.getOutputLayout(),
outputPartitioningScheme.getHashColumn(),
outputPartitioningScheme.isReplicateNullsAndAny(),
outputPartitioningScheme.getBucketToPartition()),
fragment.getStageExecutionDescriptor(),
fragment.isOutputTableWriterFragment(),
fragment.getStatsAndCosts(),
fragment.getJsonRepresentation());
ImmutableList.Builder<SubPlan> childrenBuilder = ImmutableList.builder();
for (SubPlan child : subPlan.getChildren()) {
childrenBuilder.add(reassignPartitioningHandleIfNecessaryHelper(session, child, fragment.getPartitioning()));
}
return new SubPlan(newFragment, childrenBuilder.build());
}
private static Set<PlanNodeId> getTableWriterNodeIds(PlanNode plan)
{
return stream(forTree(PlanNode::getSources).depthFirstPreOrder(plan))
.filter(node -> node instanceof TableWriterNode)
.map(PlanNode::getId)
.collect(toImmutableSet());
}
private static class Fragmenter
extends SimplePlanRewriter<FragmentProperties>
{
private final Session session;
private final Metadata metadata;
private final PlanNodeIdAllocator idAllocator;
private final PlanVariableAllocator variableAllocator;
private final StatsAndCosts statsAndCosts;
private final PlanChecker planChecker;
private final WarningCollector warningCollector;
private final SqlParser sqlParser;
private final Set<PlanNodeId> outputTableWriterNodeIds;
private int nextFragmentId = ROOT_FRAGMENT_ID + 1;
private final StatisticsAggregationPlanner statisticsAggregationPlanner;
public Fragmenter(
Session session,
Metadata metadata,
StatsAndCosts statsAndCosts,
PlanChecker planChecker,
WarningCollector warningCollector,
SqlParser sqlParser,
PlanNodeIdAllocator idAllocator,
PlanVariableAllocator variableAllocator,
Set<PlanNodeId> outputTableWriterNodeIds)
{
this.session = requireNonNull(session, "session is null");
this.metadata = requireNonNull(metadata, "metadata is null");
this.statsAndCosts = requireNonNull(statsAndCosts, "statsAndCosts is null");
this.planChecker = requireNonNull(planChecker, "planChecker is null");
this.warningCollector = requireNonNull(warningCollector, "warningCollector is null");
this.sqlParser = requireNonNull(sqlParser, "sqlParser is null");
this.idAllocator = requireNonNull(idAllocator, "idAllocator is null");
this.variableAllocator = requireNonNull(variableAllocator, "variableAllocator is null");
this.outputTableWriterNodeIds = ImmutableSet.copyOf(requireNonNull(outputTableWriterNodeIds, "outputTableWriterNodeIds is null"));
this.statisticsAggregationPlanner = new StatisticsAggregationPlanner(variableAllocator, metadata);
}
public SubPlan buildRootFragment(PlanNode root, FragmentProperties properties)
{
return buildFragment(root, properties, new PlanFragmentId(ROOT_FRAGMENT_ID));
}
private PlanFragmentId nextFragmentId()
{
return new PlanFragmentId(nextFragmentId++);
}
private SubPlan buildFragment(PlanNode root, FragmentProperties properties, PlanFragmentId fragmentId)
{
List<PlanNodeId> schedulingOrder = scheduleOrder(root);
checkArgument(
properties.getPartitionedSources().equals(ImmutableSet.copyOf(schedulingOrder)),
"Expected scheduling order (%s) to contain an entry for all partitioned sources (%s)",
schedulingOrder,
properties.getPartitionedSources());
Set<VariableReferenceExpression> fragmentVariableTypes = extractOutputVariables(root);
planChecker.validatePlanFragment(root, session, metadata, sqlParser, TypeProvider.fromVariables(fragmentVariableTypes), warningCollector);
Set<PlanNodeId> tableWriterNodeIds = getTableWriterNodeIds(root);
boolean outputTableWriterFragment = tableWriterNodeIds.stream().anyMatch(outputTableWriterNodeIds::contains);
if (outputTableWriterFragment) {
verify(
outputTableWriterNodeIds.containsAll(tableWriterNodeIds),
"outputTableWriterNodeIds %s must include either all or none of tableWriterNodeIds %s",
outputTableWriterNodeIds,
tableWriterNodeIds);
}
PlanFragment fragment = new PlanFragment(
fragmentId,
root,
fragmentVariableTypes,
properties.getPartitioningHandle(),
schedulingOrder,
properties.getPartitioningScheme(),
StageExecutionDescriptor.ungroupedExecution(),
outputTableWriterFragment,
statsAndCosts.getForSubplan(root),
Optional.of(jsonFragmentPlan(root, fragmentVariableTypes, metadata.getFunctionAndTypeManager(), session)));
return new SubPlan(fragment, properties.getChildren());
}
@Override
public PlanNode visitOutput(OutputNode node, RewriteContext<FragmentProperties> context)
{
if (isForceSingleNodeOutput(session)) {
context.get().setSingleNodeDistribution();
}
return context.defaultRewrite(node, context.get());
}
@Override
public PlanNode visitExplainAnalyze(ExplainAnalyzeNode node, RewriteContext<FragmentProperties> context)
{
context.get().setCoordinatorOnlyDistribution();
return context.defaultRewrite(node, context.get());
}
@Override
public PlanNode visitStatisticsWriterNode(StatisticsWriterNode node, RewriteContext<FragmentProperties> context)
{
context.get().setCoordinatorOnlyDistribution();
return context.defaultRewrite(node, context.get());
}
@Override
public PlanNode visitTableFinish(TableFinishNode node, RewriteContext<FragmentProperties> context)
{
context.get().setCoordinatorOnlyDistribution();
return context.defaultRewrite(node, context.get());
}
@Override
public PlanNode visitMetadataDelete(MetadataDeleteNode node, RewriteContext<FragmentProperties> context)
{
context.get().setCoordinatorOnlyDistribution();
return context.defaultRewrite(node, context.get());
}
@Override
public PlanNode visitTableScan(TableScanNode node, RewriteContext<FragmentProperties> context)
{
PartitioningHandle partitioning = metadata.getLayout(session, node.getTable())
.getTablePartitioning()
.map(TablePartitioning::getPartitioningHandle)
.orElse(SOURCE_DISTRIBUTION);
context.get().addSourceDistribution(node.getId(), partitioning, metadata, session);
return context.defaultRewrite(node, context.get());
}
@Override
public PlanNode visitTableWriter(TableWriterNode node, RewriteContext<FragmentProperties> context)
{
if (node.getTablePartitioningScheme().isPresent()) {
context.get().setDistribution(node.getTablePartitioningScheme().get().getPartitioning().getHandle(), metadata, session);
}
if (node.getPreferredShufflePartitioningScheme().isPresent()) {
context.get().setDistribution(node.getPreferredShufflePartitioningScheme().get().getPartitioning().getHandle(), metadata, session);
}
return context.defaultRewrite(node, context.get());
}
@Override
public PlanNode visitValues(ValuesNode node, RewriteContext<FragmentProperties> context)
{
context.get().setSingleNodeDistribution();
return context.defaultRewrite(node, context.get());
}
@Override
public PlanNode visitExchange(ExchangeNode exchange, RewriteContext<FragmentProperties> context)
{
switch (exchange.getScope()) {
case LOCAL:
return context.defaultRewrite(exchange, context.get());
case REMOTE_STREAMING:
return createRemoteStreamingExchange(exchange, context);
case REMOTE_MATERIALIZED:
return createRemoteMaterializedExchange(exchange, context);
default:
throw new IllegalArgumentException("Unexpected exchange scope: " + exchange.getScope());
}
}
private PlanNode createRemoteStreamingExchange(ExchangeNode exchange, RewriteContext<FragmentProperties> context)
{
checkArgument(exchange.getScope() == REMOTE_STREAMING, "Unexpected exchange scope: %s", exchange.getScope());
PartitioningScheme partitioningScheme = exchange.getPartitioningScheme();
if (exchange.getType() == ExchangeNode.Type.GATHER) {
context.get().setSingleNodeDistribution();
}
else if (exchange.getType() == ExchangeNode.Type.REPARTITION) {
context.get().setDistribution(partitioningScheme.getPartitioning().getHandle(), metadata, session);
}
ImmutableList.Builder<SubPlan> builder = ImmutableList.builder();
for (int sourceIndex = 0; sourceIndex < exchange.getSources().size(); sourceIndex++) {
FragmentProperties childProperties = new FragmentProperties(partitioningScheme.translateOutputLayout(exchange.getInputs().get(sourceIndex)));
builder.add(buildSubPlan(exchange.getSources().get(sourceIndex), childProperties, context));
}
List<SubPlan> children = builder.build();
context.get().addChildren(children);
List<PlanFragmentId> childrenIds = children.stream()
.map(SubPlan::getFragment)
.map(PlanFragment::getId)
.collect(toImmutableList());
return new RemoteSourceNode(exchange.getSourceLocation(), exchange.getId(), childrenIds, exchange.getOutputVariables(), exchange.isEnsureSourceOrdering(), exchange.getOrderingScheme(), exchange.getType());
}
private PlanNode createRemoteMaterializedExchange(ExchangeNode exchange, RewriteContext<FragmentProperties> context)
{
checkArgument(exchange.getType() == REPARTITION, "Unexpected exchange type: %s", exchange.getType());
checkArgument(exchange.getScope() == REMOTE_MATERIALIZED, "Unexpected exchange scope: %s", exchange.getScope());
PartitioningScheme partitioningScheme = exchange.getPartitioningScheme();
PartitioningHandle partitioningHandle = partitioningScheme.getPartitioning().getHandle();
ConnectorId connectorId = partitioningHandle.getConnectorId()
.orElseThrow(() -> new PrestoException(
NOT_SUPPORTED,
"The \"partitioning_provider_catalog\" session property must be set to enable the exchanges materialization. " +
"The catalog must support providing a custom partitioning and storing temporary tables."));
Partitioning partitioning = partitioningScheme.getPartitioning();
PartitioningVariableAssignments partitioningVariableAssignments = assignPartitioningVariables(partitioning);
Map<VariableReferenceExpression, ColumnMetadata> variableToColumnMap = assignTemporaryTableColumnNames(exchange.getOutputVariables(), partitioningVariableAssignments.getConstants().keySet());
List<VariableReferenceExpression> partitioningVariables = partitioningVariableAssignments.getVariables();
List<String> partitionColumns = partitioningVariables.stream()
.map(variable -> variableToColumnMap.get(variable).getName())
.collect(toImmutableList());
PartitioningMetadata partitioningMetadata = new PartitioningMetadata(partitioningHandle, partitionColumns);
TableHandle temporaryTableHandle;
try {
temporaryTableHandle = metadata.createTemporaryTable(
session,
connectorId.getCatalogName(),
ImmutableList.copyOf(variableToColumnMap.values()),
Optional.of(partitioningMetadata));
}
catch (PrestoException e) {
if (e.getErrorCode().equals(NOT_SUPPORTED.toErrorCode())) {
throw new PrestoException(
NOT_SUPPORTED,
format("Temporary table cannot be created in catalog \"%s\": %s", connectorId.getCatalogName(), e.getMessage()),
e);
}
throw e;
}
TableScanNode scan = createTemporaryTableScan(
exchange.getSourceLocation(),
temporaryTableHandle,
exchange.getOutputVariables(),
variableToColumnMap,
partitioningMetadata);
checkArgument(
!exchange.getPartitioningScheme().isReplicateNullsAndAny(),
"materialized remote exchange is not supported when replicateNullsAndAny is needed");
TableFinishNode write = createTemporaryTableWrite(
scan.getSourceLocation(),
temporaryTableHandle,
variableToColumnMap,
exchange.getOutputVariables(),
exchange.getInputs(),
exchange.getSources(),
partitioningVariableAssignments.getConstants(),
partitioningMetadata);
FragmentProperties writeProperties = new FragmentProperties(new PartitioningScheme(
Partitioning.create(SINGLE_DISTRIBUTION, ImmutableList.of()),
write.getOutputVariables()));
writeProperties.setCoordinatorOnlyDistribution();
List<SubPlan> children = ImmutableList.of(buildSubPlan(write, writeProperties, context));
context.get().addChildren(children);
return visitTableScan(scan, context);
}
private PartitioningVariableAssignments assignPartitioningVariables(Partitioning partitioning)
{
ImmutableList.Builder<VariableReferenceExpression> variables = ImmutableList.builder();
ImmutableMap.Builder<VariableReferenceExpression, RowExpression> constants = ImmutableMap.builder();
for (RowExpression argument : partitioning.getArguments()) {
checkArgument(argument instanceof ConstantExpression || argument instanceof VariableReferenceExpression, format("Expect argument to be ConstantExpression or VariableReferenceExpression, get %s (%s)", argument.getClass(), argument));
VariableReferenceExpression variable;
if (argument instanceof ConstantExpression) {
variable = variableAllocator.newVariable(argument.getSourceLocation(), "constant_partition", argument.getType());
constants.put(variable, argument);
}
else {
variable = (VariableReferenceExpression) argument;
}
variables.add(variable);
}
return new PartitioningVariableAssignments(variables.build(), constants.build());
}
private Map<VariableReferenceExpression, ColumnMetadata> assignTemporaryTableColumnNames(Collection<VariableReferenceExpression> outputVariables, Collection<VariableReferenceExpression> constantPartitioningVariables)
{
ImmutableMap.Builder<VariableReferenceExpression, ColumnMetadata> result = ImmutableMap.builder();
int column = 0;
for (VariableReferenceExpression outputVariable : concat(outputVariables, constantPartitioningVariables)) {
String columnName = format("_c%d_%s", column, outputVariable.getName());
result.put(outputVariable, new ColumnMetadata(columnName, outputVariable.getType()));
column++;
}
return result.build();
}
private TableScanNode createTemporaryTableScan(
Optional<SourceLocation> sourceLocation,
TableHandle tableHandle,
List<VariableReferenceExpression> outputVariables,
Map<VariableReferenceExpression, ColumnMetadata> variableToColumnMap,
PartitioningMetadata expectedPartitioningMetadata)
{
Map<String, ColumnHandle> columnHandles = metadata.getColumnHandles(session, tableHandle);
Map<VariableReferenceExpression, ColumnMetadata> outputColumns = outputVariables.stream()
.collect(toImmutableMap(identity(), variableToColumnMap::get));
Set<ColumnHandle> outputColumnHandles = outputColumns.values().stream()
.map(ColumnMetadata::getName)
.map(columnHandles::get)
.collect(toImmutableSet());
TableLayoutResult selectedLayout = metadata.getLayout(session, tableHandle, Constraint.alwaysTrue(), Optional.of(outputColumnHandles));
verify(selectedLayout.getUnenforcedConstraint().equals(TupleDomain.all()), "temporary table layout shouldn't enforce any constraints");
verify(!selectedLayout.getLayout().getColumns().isPresent(), "temporary table layout must provide all the columns");
TablePartitioning expectedPartitioning = new TablePartitioning(
expectedPartitioningMetadata.getPartitioningHandle(),
expectedPartitioningMetadata.getPartitionColumns().stream()
.map(columnHandles::get)
.collect(toImmutableList()));
verify(selectedLayout.getLayout().getTablePartitioning().equals(Optional.of(expectedPartitioning)), "invalid temporary table partitioning");
Map<VariableReferenceExpression, ColumnHandle> assignments = outputVariables.stream()
.collect(toImmutableMap(identity(), variable -> columnHandles.get(outputColumns.get(variable).getName())));
return new TableScanNode(
sourceLocation,
idAllocator.getNextId(),
selectedLayout.getLayout().getNewTableHandle(),
outputVariables,
assignments,
TupleDomain.all(),
TupleDomain.all());
}
private TableFinishNode createTemporaryTableWrite(
Optional<SourceLocation> sourceLocation, TableHandle tableHandle,
Map<VariableReferenceExpression, ColumnMetadata> variableToColumnMap,
List<VariableReferenceExpression> outputs,
List<List<VariableReferenceExpression>> inputs,
List<PlanNode> sources,
Map<VariableReferenceExpression, RowExpression> constantExpressions,
PartitioningMetadata partitioningMetadata)
{
if (!constantExpressions.isEmpty()) {
List<VariableReferenceExpression> constantVariables = ImmutableList.copyOf(constantExpressions.keySet());
// update outputs
outputs = ImmutableList.<VariableReferenceExpression>builder()
.addAll(outputs)
.addAll(constantVariables)
.build();
// update inputs
inputs = inputs.stream()
.map(input -> ImmutableList.<VariableReferenceExpression>builder()
.addAll(input)
.addAll(constantVariables)
.build())
.collect(toImmutableList());
// update sources
sources = sources.stream()
.map(source -> {
Assignments.Builder assignments = Assignments.builder();
source.getOutputVariables().forEach(variable -> assignments.put(variable, new VariableReferenceExpression(variable.getSourceLocation(), variable.getName(), variable.getType())));
constantVariables.forEach(variable -> assignments.put(variable, constantExpressions.get(variable)));
return new ProjectNode(source.getSourceLocation(), idAllocator.getNextId(), source, assignments.build(), Locality.LOCAL);
})
.collect(toImmutableList());
}
NewTableLayout insertLayout = metadata.getInsertLayout(session, tableHandle)
// TODO: support insert into non partitioned table
.orElseThrow(() -> new IllegalArgumentException("insertLayout for the temporary table must be present"));
PartitioningHandle partitioningHandle = partitioningMetadata.getPartitioningHandle();
List<String> partitionColumns = partitioningMetadata.getPartitionColumns();
ConnectorNewTableLayout expectedNewTableLayout = new ConnectorNewTableLayout(partitioningHandle.getConnectorHandle(), partitionColumns);
verify(insertLayout.getLayout().equals(expectedNewTableLayout), "unexpected new table layout");
Map<String, VariableReferenceExpression> columnNameToVariable = variableToColumnMap.entrySet().stream()
.collect(toImmutableMap(entry -> entry.getValue().getName(), Map.Entry::getKey));
List<VariableReferenceExpression> partitioningVariables = partitionColumns.stream()
.map(columnNameToVariable::get)
.collect(toImmutableList());
List<String> outputColumnNames = outputs.stream()
.map(variableToColumnMap::get)
.map(ColumnMetadata::getName)
.collect(toImmutableList());
Set<VariableReferenceExpression> outputNotNullColumnVariables = outputs.stream()
.filter(variable -> variableToColumnMap.get(variable) != null && !(variableToColumnMap.get(variable).isNullable()))
.collect(Collectors.toSet());
SchemaTableName schemaTableName = metadata.getTableMetadata(session, tableHandle).getTable();
InsertReference insertReference = new InsertReference(tableHandle, schemaTableName);
PartitioningScheme partitioningScheme = new PartitioningScheme(
Partitioning.create(partitioningHandle, partitioningVariables),
outputs,
Optional.empty(),
false,
Optional.empty());
ExchangeNode writerRemoteSource = new ExchangeNode(
sourceLocation,
idAllocator.getNextId(),
REPARTITION,
REMOTE_STREAMING,
partitioningScheme,
sources,
inputs,
false,
Optional.empty());
ExchangeNode writerSource;
if (getTaskPartitionedWriterCount(session) == 1) {
writerSource = gatheringExchange(
idAllocator.getNextId(),
LOCAL,
writerRemoteSource);
}
else {
writerSource = partitionedExchange(
idAllocator.getNextId(),
LOCAL,
writerRemoteSource,
partitioningScheme);
}
String catalogName = tableHandle.getConnectorId().getCatalogName();
TableMetadata tableMetadata = metadata.getTableMetadata(session, tableHandle);
TableStatisticsMetadata statisticsMetadata = metadata.getStatisticsCollectionMetadataForWrite(session, catalogName, tableMetadata.getMetadata());
TableStatisticAggregation statisticsResult = statisticsAggregationPlanner.createStatisticsAggregation(statisticsMetadata, columnNameToVariable, false);
StatisticAggregations.Parts aggregations = statisticsResult.getAggregations().splitIntoPartialAndFinal(variableAllocator, metadata.getFunctionAndTypeManager());
PlanNode tableWriterMerge;
// Disabled by default. Enable when the column statistics are essential for future runtime adaptive plan optimizations
boolean enableStatsCollectionForTemporaryTable = SystemSessionProperties.isEnableStatsCollectionForTemporaryTable(session);
if (isTableWriterMergeOperatorEnabled(session)) {
StatisticAggregations.Parts localAggregations = aggregations.getPartialAggregation().splitIntoPartialAndIntermediate(variableAllocator, metadata.getFunctionAndTypeManager());
tableWriterMerge = new TableWriterMergeNode(
sourceLocation,
idAllocator.getNextId(),
gatheringExchange(
idAllocator.getNextId(),
LOCAL,
new TableWriterNode(
sourceLocation,
idAllocator.getNextId(),
writerSource,
Optional.of(insertReference),
variableAllocator.newVariable("partialrows", BIGINT),
variableAllocator.newVariable("partialfragments", VARBINARY),
variableAllocator.newVariable("partialtablecommitcontext", VARBINARY),
outputs,
outputColumnNames,
outputNotNullColumnVariables,
Optional.of(partitioningScheme),
Optional.empty(),
enableStatsCollectionForTemporaryTable ? Optional.of(localAggregations.getPartialAggregation()) : Optional.empty())),
variableAllocator.newVariable("intermediaterows", BIGINT),
variableAllocator.newVariable("intermediatefragments", VARBINARY),
variableAllocator.newVariable("intermediatetablecommitcontext", VARBINARY),
enableStatsCollectionForTemporaryTable ? Optional.of(localAggregations.getIntermediateAggregation()) : Optional.empty());
}
else {
tableWriterMerge = new TableWriterNode(
sourceLocation,
idAllocator.getNextId(),
writerSource,
Optional.of(insertReference),
variableAllocator.newVariable("partialrows", BIGINT),
variableAllocator.newVariable("partialfragments", VARBINARY),
variableAllocator.newVariable("partialtablecommitcontext", VARBINARY),
outputs,
outputColumnNames,
outputNotNullColumnVariables,
Optional.of(partitioningScheme),
Optional.empty(),
enableStatsCollectionForTemporaryTable ? Optional.of(aggregations.getPartialAggregation()) : Optional.empty());
}
return new TableFinishNode(
sourceLocation,
idAllocator.getNextId(),
ensureSourceOrderingGatheringExchange(
idAllocator.getNextId(),
REMOTE_STREAMING,
tableWriterMerge),
Optional.of(insertReference),
variableAllocator.newVariable("rows", BIGINT),
enableStatsCollectionForTemporaryTable ? Optional.of(aggregations.getFinalAggregation()) : Optional.empty(),
enableStatsCollectionForTemporaryTable ? Optional.of(statisticsResult.getDescriptor()) : Optional.empty());
}
private SubPlan buildSubPlan(PlanNode node, FragmentProperties properties, RewriteContext<FragmentProperties> context)
{
PlanFragmentId planFragmentId = nextFragmentId();
PlanNode child = context.rewrite(node, properties);
return buildFragment(child, properties, planFragmentId);
}
}
private static class FragmentProperties
{
private final List<SubPlan> children = new ArrayList<>();
private final PartitioningScheme partitioningScheme;
private Optional<PartitioningHandle> partitioningHandle = Optional.empty();
private final Set<PlanNodeId> partitionedSources = new HashSet<>();
public FragmentProperties(PartitioningScheme partitioningScheme)
{
this.partitioningScheme = partitioningScheme;
}
public List<SubPlan> getChildren()
{
return children;
}
public FragmentProperties setSingleNodeDistribution()
{
if (partitioningHandle.isPresent() && partitioningHandle.get().isSingleNode()) {
// already single node distribution
return this;
}
checkState(!partitioningHandle.isPresent(),
"Cannot overwrite partitioning with %s (currently set to %s)",
SINGLE_DISTRIBUTION,
partitioningHandle);
partitioningHandle = Optional.of(SINGLE_DISTRIBUTION);
return this;
}
public FragmentProperties setDistribution(PartitioningHandle distribution, Metadata metadata, Session session)
{
if (!partitioningHandle.isPresent()) {
partitioningHandle = Optional.of(distribution);
return this;
}
PartitioningHandle currentPartitioning = this.partitioningHandle.get();
if (isCompatibleSystemPartitioning(currentPartitioning, distribution)) {
return this;
}
if (currentPartitioning.equals(SOURCE_DISTRIBUTION)) {
this.partitioningHandle = Optional.of(distribution);
return this;
}
// If already system SINGLE or COORDINATOR_ONLY, leave it as is (this is for single-node execution)
if (currentPartitioning.isSingleNode()) {
return this;
}
if (currentPartitioning.equals(distribution)) {
return this;
}
Optional<PartitioningHandle> commonPartitioning = metadata.getCommonPartitioning(session, currentPartitioning, distribution);
if (commonPartitioning.isPresent()) {
partitioningHandle = commonPartitioning;
return this;
}
if (metadata.isRefinedPartitioningOver(session, distribution, currentPartitioning)) {
return this;
}
throw new IllegalStateException(format(
"Cannot set distribution to %s. Already set to %s",
distribution,
this.partitioningHandle));
}
public FragmentProperties setCoordinatorOnlyDistribution()
{
if (partitioningHandle.isPresent() && partitioningHandle.get().isCoordinatorOnly()) {
// already single node distribution
return this;
}
// only system SINGLE can be upgraded to COORDINATOR_ONLY
checkState(!partitioningHandle.isPresent() || partitioningHandle.get().equals(SINGLE_DISTRIBUTION),
"Cannot overwrite partitioning with %s (currently set to %s)",
COORDINATOR_DISTRIBUTION,
partitioningHandle);
partitioningHandle = Optional.of(COORDINATOR_DISTRIBUTION);
return this;
}
public FragmentProperties addSourceDistribution(PlanNodeId source, PartitioningHandle distribution, Metadata metadata, Session session)
{
requireNonNull(source, "source is null");
requireNonNull(distribution, "distribution is null");
partitionedSources.add(source);
return setDistribution(distribution, metadata, session);
}
public FragmentProperties addChildren(List<SubPlan> children)
{
this.children.addAll(children);
return this;
}
public PartitioningScheme getPartitioningScheme()
{
return partitioningScheme;
}
public PartitioningHandle getPartitioningHandle()
{
return partitioningHandle.get();
}
public Set<PlanNodeId> getPartitionedSources()
{
return partitionedSources;
}
}
private static class GroupedExecutionTagger
extends InternalPlanVisitor<GroupedExecutionProperties, Void>
{
private final Session session;
private final Metadata metadata;
private final NodePartitioningManager nodePartitioningManager;
private final boolean groupedExecutionEnabled;
public GroupedExecutionTagger(Session session, Metadata metadata, NodePartitioningManager nodePartitioningManager)
{
this.session = requireNonNull(session, "session is null");
this.metadata = requireNonNull(metadata, "metadata is null");
this.nodePartitioningManager = requireNonNull(nodePartitioningManager, "nodePartitioningManager is null");
this.groupedExecutionEnabled = isGroupedExecutionEnabled(session);
}
@Override
public GroupedExecutionProperties visitPlan(PlanNode node, Void context)
{
if (node.getSources().isEmpty()) {
return GroupedExecutionProperties.notCapable();
}
return processChildren(node);
}
@Override
public GroupedExecutionProperties visitJoin(JoinNode node, Void context)
{
GroupedExecutionProperties left = node.getLeft().accept(this, null);
GroupedExecutionProperties right = node.getRight().accept(this, null);
if (!node.getDistributionType().isPresent() || !groupedExecutionEnabled) {
// This is possible when the optimizers is invoked with `forceSingleNode` set to true.
return GroupedExecutionProperties.notCapable();
}
if ((node.getType() == JoinNode.Type.RIGHT || node.getType() == JoinNode.Type.FULL) && !right.currentNodeCapable) {
// For a plan like this, if the fragment participates in grouped execution,
// the LookupOuterOperator corresponding to the RJoin will not work execute properly.
//
// * The operator has to execute as not-grouped because it can only look at the "used" flags in
// join build after all probe has finished.
// * The operator has to execute as grouped the subsequent LJoin expects that incoming
// operators are grouped. Otherwise, the LJoin won't be able to throw out the build side
// for each group as soon as the group completes.
//
// LJoin
// / \
// RJoin Scan
// / \
// Scan Remote
//
// TODO:
// The RJoin can still execute as grouped if there is no subsequent operator that depends
// on the RJoin being executed in a grouped manner. However, this is not currently implemented.
// Support for this scenario is already implemented in the execution side.
return GroupedExecutionProperties.notCapable();
}
switch (node.getDistributionType().get()) {
case REPLICATED:
// Broadcast join maintains partitioning for the left side.
// Right side of a broadcast is not capable of grouped execution because it always comes from a remote exchange.
checkState(!right.currentNodeCapable);
return left;
case PARTITIONED:
if (left.currentNodeCapable && right.currentNodeCapable) {
checkState(left.totalLifespans == right.totalLifespans, format("Mismatched number of lifespans on left(%s) and right(%s) side of join", left.totalLifespans, right.totalLifespans));
return new GroupedExecutionProperties(
true,
true,
ImmutableList.<PlanNodeId>builder()
.addAll(left.capableTableScanNodes)
.addAll(right.capableTableScanNodes)
.build(),
left.totalLifespans,
left.recoveryEligible && right.recoveryEligible);
}
// right.subTreeUseful && !left.currentNodeCapable:
// It's not particularly helpful to do grouped execution on the right side
// because the benefit is likely cancelled out due to required buffering for hash build.
// In theory, it could still be helpful (e.g. when the underlying aggregation's intermediate group state maybe larger than aggregation output).
// However, this is not currently implemented. JoinBridgeManager need to support such a lifecycle.
// !right.currentNodeCapable:
// The build/right side needs to buffer fully for this JOIN, but the probe/left side will still stream through.
// As a result, there is no reason to change currentNodeCapable or subTreeUseful to false.
//
return left;
default:
throw new UnsupportedOperationException("Unknown distribution type: " + node.getDistributionType());
}
}
@Override
public GroupedExecutionProperties visitAggregation(AggregationNode node, Void context)
{
GroupedExecutionProperties properties = node.getSource().accept(this, null);
if (groupedExecutionEnabled && properties.isCurrentNodeCapable()) {
switch (node.getStep()) {
case SINGLE:
case FINAL:
return new GroupedExecutionProperties(true, true, properties.capableTableScanNodes, properties.totalLifespans, properties.recoveryEligible);
case PARTIAL:
case INTERMEDIATE:
return properties;
}
}
return GroupedExecutionProperties.notCapable();
}
@Override
public GroupedExecutionProperties visitWindow(WindowNode node, Void context)
{
return processWindowFunction(node);
}
@Override
public GroupedExecutionProperties visitRowNumber(RowNumberNode node, Void context)
{
return processWindowFunction(node);
}
@Override
public GroupedExecutionProperties visitTopNRowNumber(TopNRowNumberNode node, Void context)
{
return processWindowFunction(node);
}
private GroupedExecutionProperties processWindowFunction(PlanNode node)
{
GroupedExecutionProperties properties = getOnlyElement(node.getSources()).accept(this, null);
if (groupedExecutionEnabled && properties.isCurrentNodeCapable()) {
return new GroupedExecutionProperties(true, true, properties.capableTableScanNodes, properties.totalLifespans, properties.recoveryEligible);
}
return GroupedExecutionProperties.notCapable();
}
@Override
public GroupedExecutionProperties visitMarkDistinct(MarkDistinctNode node, Void context)
{
GroupedExecutionProperties properties = getOnlyElement(node.getSources()).accept(this, null);
if (groupedExecutionEnabled && properties.isCurrentNodeCapable()) {
return new GroupedExecutionProperties(true, true, properties.capableTableScanNodes, properties.totalLifespans, properties.recoveryEligible);
}
return GroupedExecutionProperties.notCapable();
}
@Override
public GroupedExecutionProperties visitTableWriter(TableWriterNode node, Void context)
{
GroupedExecutionProperties properties = node.getSource().accept(this, null);
boolean recoveryEligible = properties.isRecoveryEligible();
WriterTarget target = node.getTarget().orElseThrow(() -> new VerifyException("target is absent"));
if (target instanceof CreateName || target instanceof InsertReference || target instanceof RefreshMaterializedViewReference) {
recoveryEligible &= metadata.getConnectorCapabilities(session, target.getConnectorId()).contains(SUPPORTS_PAGE_SINK_COMMIT);
}
else {
recoveryEligible = false;
}
return new GroupedExecutionProperties(
properties.isCurrentNodeCapable(),
properties.isSubTreeUseful(),
properties.getCapableTableScanNodes(),
properties.getTotalLifespans(),
recoveryEligible);
}
@Override
public GroupedExecutionProperties visitTableScan(TableScanNode node, Void context)
{
Optional<TablePartitioning> tablePartitioning = metadata.getLayout(session, node.getTable()).getTablePartitioning();
if (!tablePartitioning.isPresent()) {
return GroupedExecutionProperties.notCapable();
}
List<ConnectorPartitionHandle> partitionHandles = nodePartitioningManager.listPartitionHandles(session, tablePartitioning.get().getPartitioningHandle());
if (ImmutableList.of(NOT_PARTITIONED).equals(partitionHandles)) {
return GroupedExecutionProperties.notCapable();
}
else {
return new GroupedExecutionProperties(
true,
false,
ImmutableList.of(node.getId()),
partitionHandles.size(),
metadata.getConnectorCapabilities(session, node.getTable().getConnectorId()).contains(SUPPORTS_REWINDABLE_SPLIT_SOURCE));
}
}
private GroupedExecutionProperties processChildren(PlanNode node)
{
// Each fragment has a partitioning handle, which is derived from leaf nodes in the fragment.
// Leaf nodes with different partitioning handle are not allowed to share a single fragment
// (except for special cases as detailed in addSourceDistribution).
// As a result, it is not necessary to check the compatibility between node.getSources because
// they are guaranteed to be compatible.
// * If any child is "not capable", return "not capable"
// * When all children are capable ("capable and useful" or "capable but not useful")
// * Usefulness:
// * if any child is "useful", this node is "useful"
// * if no children is "useful", this node is "not useful"
// * Recovery Eligibility:
// * if all children is "recovery eligible", this node is "recovery eligible"
// * if any child is "not recovery eligible", this node is "not recovery eligible"
boolean anyUseful = false;
OptionalInt totalLifespans = OptionalInt.empty();
boolean allRecoveryEligible = true;
ImmutableList.Builder<PlanNodeId> capableTableScanNodes = ImmutableList.builder();
for (PlanNode source : node.getSources()) {
GroupedExecutionProperties properties = source.accept(this, null);
if (!properties.isCurrentNodeCapable()) {
return GroupedExecutionProperties.notCapable();
}
anyUseful |= properties.isSubTreeUseful();
allRecoveryEligible &= properties.isRecoveryEligible();
if (!totalLifespans.isPresent()) {
totalLifespans = OptionalInt.of(properties.totalLifespans);
}
else {
checkState(totalLifespans.getAsInt() == properties.totalLifespans, format("Mismatched number of lifespans among children nodes. Expected: %s, actual: %s", totalLifespans.getAsInt(), properties.totalLifespans));
}
capableTableScanNodes.addAll(properties.capableTableScanNodes);
}
return new GroupedExecutionProperties(true, anyUseful, capableTableScanNodes.build(), totalLifespans.getAsInt(), allRecoveryEligible);
}
}
private static class GroupedExecutionProperties
{
// currentNodeCapable:
// Whether grouped execution is possible with the current node.
// For example, a table scan is capable iff it supports addressable split discovery.
// subTreeUseful:
// Whether grouped execution is beneficial in the current node, or any node below it.
// For example, a JOIN can benefit from grouped execution because build can be flushed early, reducing peak memory requirement.
//
// In the current implementation, subTreeUseful implies currentNodeCapable.
// In theory, this doesn't have to be the case. Take an example where a GROUP BY feeds into the build side of a JOIN.
// Even if JOIN cannot take advantage of grouped execution, it could still be beneficial to execute the GROUP BY with grouped execution
// (e.g. when the underlying aggregation's intermediate group state may be larger than aggregation output).
private final boolean currentNodeCapable;
private final boolean subTreeUseful;
private final List<PlanNodeId> capableTableScanNodes;
private final int totalLifespans;
private final boolean recoveryEligible;
public GroupedExecutionProperties(boolean currentNodeCapable, boolean subTreeUseful, List<PlanNodeId> capableTableScanNodes, int totalLifespans, boolean recoveryEligible)
{
this.currentNodeCapable = currentNodeCapable;
this.subTreeUseful = subTreeUseful;
this.capableTableScanNodes = ImmutableList.copyOf(requireNonNull(capableTableScanNodes, "capableTableScanNodes is null"));
this.totalLifespans = totalLifespans;
this.recoveryEligible = recoveryEligible;
// Verify that `subTreeUseful` implies `currentNodeCapable`
checkArgument(!subTreeUseful || currentNodeCapable);
// Verify that `recoveryEligible` implies `currentNodeCapable`
checkArgument(!recoveryEligible || currentNodeCapable);
checkArgument(currentNodeCapable == !capableTableScanNodes.isEmpty());
}
public static GroupedExecutionProperties notCapable()
{
return new GroupedExecutionProperties(false, false, ImmutableList.of(), 1, false);
}
public boolean isCurrentNodeCapable()
{
return currentNodeCapable;
}
public boolean isSubTreeUseful()
{
return subTreeUseful;
}
public List<PlanNodeId> getCapableTableScanNodes()
{
return capableTableScanNodes;
}
public int getTotalLifespans()
{
return totalLifespans;
}
public boolean isRecoveryEligible()
{
return recoveryEligible;
}
}
private static final class PartitioningHandleReassigner
extends SimplePlanRewriter<Void>
{
private final PartitioningHandle fragmentPartitioningHandle;
private final Metadata metadata;
private final Session session;
public PartitioningHandleReassigner(PartitioningHandle fragmentPartitioningHandle, Metadata metadata, Session session)
{
this.fragmentPartitioningHandle = fragmentPartitioningHandle;
this.metadata = metadata;
this.session = session;
}
@Override
public PlanNode visitTableScan(TableScanNode node, RewriteContext<Void> context)
{
PartitioningHandle partitioning = metadata.getLayout(session, node.getTable())
.getTablePartitioning()
.map(TablePartitioning::getPartitioningHandle)
.orElse(SOURCE_DISTRIBUTION);
if (partitioning.equals(fragmentPartitioningHandle)) {
// do nothing if the current scan node's partitioning matches the fragment's
return node;
}
TableHandle newTableHandle = metadata.getAlternativeTableHandle(session, node.getTable(), fragmentPartitioningHandle);
return new TableScanNode(
node.getSourceLocation(),
node.getId(),
newTableHandle,
node.getOutputVariables(),
node.getAssignments(),
node.getCurrentConstraint(),
node.getEnforcedConstraint());
}
}
private static class PartitioningVariableAssignments
{
private final List<VariableReferenceExpression> variables;
private final Map<VariableReferenceExpression, RowExpression> constants;
private PartitioningVariableAssignments(List<VariableReferenceExpression> variables, Map<VariableReferenceExpression, RowExpression> constants)
{
this.variables = ImmutableList.copyOf(requireNonNull(variables, "variables is null"));
this.constants = ImmutableMap.copyOf(requireNonNull(constants, "constants is null"));
checkArgument(
ImmutableSet.copyOf(variables).containsAll(constants.keySet()),
"partitioningVariables list must contain all partitioning variables including constants");
}
public List<VariableReferenceExpression> getVariables()
{
return variables;
}
public Map<VariableReferenceExpression, RowExpression> getConstants()
{
return constants;
}
}
}
| apache-2.0 |
Comcast/flume2storm | core/src/main/java/com/comcast/viper/flume2storm/connection/sender/EventSenderFactory.java | 1751 | /**
* Copyright 2014 Comcast Cable Communications Management, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.comcast.viper.flume2storm.connection.sender;
import org.apache.commons.configuration.Configuration;
import com.comcast.viper.flume2storm.F2SConfigurationException;
import com.comcast.viper.flume2storm.connection.parameters.ConnectionParameters;
/**
* Interface to build an Event Sender. It follows the abstract factory design
* pattern. Implementation of this factory must have a no-argument constructor.
*
* @param <CP>
* The Connection Parameters class
*/
public interface EventSenderFactory<CP extends ConnectionParameters> {
/**
* Creates a new {@link EventSender} based on the connection parameters
* provided
*
* @param connectionParams
* Connections parameters to use to configure the {@link EventSender}
* @param config
* Additional configuration for the creation of the event sender
* @return The newly created {@link EventSender}
* @throws F2SConfigurationException
* If the configuration specified is invalid
*/
EventSender<CP> create(CP connectionParams, Configuration config) throws F2SConfigurationException;
}
| apache-2.0 |
jdgwartney/vsphere-ws | java/JAXWS/samples/com/vmware/vim25/ActionType.java | 1746 |
package com.vmware.vim25;
import javax.xml.bind.annotation.XmlEnum;
import javax.xml.bind.annotation.XmlEnumValue;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for ActionType.
*
* <p>The following schema fragment specifies the expected content contained within this class.
* <p>
* <pre>
* <simpleType name="ActionType">
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <enumeration value="MigrationV1"/>
* <enumeration value="VmPowerV1"/>
* <enumeration value="HostPowerV1"/>
* <enumeration value="HostMaintenanceV1"/>
* <enumeration value="StorageMigrationV1"/>
* <enumeration value="StoragePlacementV1"/>
* </restriction>
* </simpleType>
* </pre>
*
*/
@XmlType(name = "ActionType")
@XmlEnum
public enum ActionType {
@XmlEnumValue("MigrationV1")
MIGRATION_V_1("MigrationV1"),
@XmlEnumValue("VmPowerV1")
VM_POWER_V_1("VmPowerV1"),
@XmlEnumValue("HostPowerV1")
HOST_POWER_V_1("HostPowerV1"),
@XmlEnumValue("HostMaintenanceV1")
HOST_MAINTENANCE_V_1("HostMaintenanceV1"),
@XmlEnumValue("StorageMigrationV1")
STORAGE_MIGRATION_V_1("StorageMigrationV1"),
@XmlEnumValue("StoragePlacementV1")
STORAGE_PLACEMENT_V_1("StoragePlacementV1");
private final String value;
ActionType(String v) {
value = v;
}
public String value() {
return value;
}
public static ActionType fromValue(String v) {
for (ActionType c: ActionType.values()) {
if (c.value.equals(v)) {
return c;
}
}
throw new IllegalArgumentException(v);
}
}
| apache-2.0 |
echalkpad/t4f-data | sql/dao/src/main/java/io/datalayer/sql/dao/finder/FinderArgumentTypeFactory.java | 1452 | /****************************************************************
* Licensed to the AOS Community (AOS) under one or more *
* contributor license agreements. See the NOTICE file *
* distributed with this work for additional information *
* regarding copyright ownership. The AOS licenses this file *
* to you under the Apache License, Version 2.0 (the *
* "License"); you may not use this file except in compliance *
* with the License. You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, *
* software distributed under the License is distributed on an *
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
* KIND, either express or implied. See the License for the *
* specific language governing permissions and limitations *
* under the License. *
****************************************************************/
package io.datalayer.sql.dao.finder;
import org.hibernate.type.Type;
/**
* Used to locate any specific type mappings that might be necessary for a dao
* implementation
*/
public interface FinderArgumentTypeFactory {
Type getArgumentType(Object arg);
}
| apache-2.0 |
wenhao/tdd-workshop | 03-fizz-buzz-whizz/src/test/java/com/github/wenhao/fizz/buzz/whizz/handler/FizzBuzzWhizzHandlerTest.java | 2949 | /*
* Copyright © 2019, Wen Hao <wenhao@126.com>.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.github.wenhao.fizz.buzz.whizz.handler;
import static com.github.wenhao.fizz.buzz.whizz.domain.Constants.BUZZ;
import static com.github.wenhao.fizz.buzz.whizz.domain.Constants.FIZZ;
import static com.github.wenhao.fizz.buzz.whizz.domain.Constants.FIZZ_BUZZ_WHIZZ;
import static com.github.wenhao.fizz.buzz.whizz.domain.Constants.WHIZZ;
import com.github.wenhao.fizz.buzz.whizz.domain.Words;
import static org.assertj.core.api.Assertions.assertThat;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class FizzBuzzWhizzHandlerTest {
private FizzBuzzWhizzHandler fizzBuzzWhizzHandler;
private Words words;
@BeforeEach
public void setUp() throws Exception {
words = mock(Words.class);
when(words.getFirst()).thenReturn(3);
when(words.getSecond()).thenReturn(5);
when(words.getThird()).thenReturn(7);
fizzBuzzWhizzHandler = new FizzBuzzWhizzHandler(new FizzHandler(new BuzzHandler(new WhizzHandler(null))));
}
@Test
public void should_be_able_to_return_fizz_buzz_whizz_when_student_count_is_multiple_of_3_and_5_and_7() throws Exception {
// when
String result = fizzBuzzWhizzHandler.handle(105, words);
// then
assertThat(result).isEqualTo(FIZZ_BUZZ_WHIZZ);
}
@Test
public void should_next_handler_process() throws Exception {
// when
String fizz = fizzBuzzWhizzHandler.handle(3, words);
String buzz = fizzBuzzWhizzHandler.handle(5, words);
String whizz = fizzBuzzWhizzHandler.handle(7, words);
// then
assertThat(fizz).isEqualTo(FIZZ);
assertThat(buzz).isEqualTo(BUZZ);
assertThat(whizz).isEqualTo(WHIZZ);
}
} | apache-2.0 |
timrdf/csv2rdf4lod | src/edu/rpi/tw/data/csv/valuehandlers/YearValueHandler.java | 2111 | package edu.rpi.tw.data.csv.valuehandlers;
import java.util.HashMap;
import org.openrdf.model.Resource;
import org.openrdf.model.URI;
import org.openrdf.model.Value;
import org.openrdf.model.impl.ValueFactoryImpl;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;
import edu.rpi.tw.data.csv.impl.CSVRecordTemplateFiller;
/**
* http://www.w3.org/TR/xmlschema-2/#gYear
*/
public class YearValueHandler extends DefaultValueHandler {
protected HashMap<String,Value> codebook = new HashMap<String,Value>();
/**
*
*/
public YearValueHandler() {
this(null);
}
/**
*
* @param codebook
*/
public YearValueHandler(HashMap<String,Value> codebook) {
super();
if( codebook != null ) this.codebook = codebook;
}
@Override
public URI getRange() {
return ValueFactoryImpl.getInstance().createURI("http://www.w3.org/TR/xmlschema-2/#gYear");
}
@Override
public void handleValue(Resource subjectR, URI predicate, String predicateLocalName, String value,
RepositoryConnection conn, String resourceURIbase, CSVRecordTemplateFiller rec,
RepositoryConnection conn2) {
try {
Value nonIntegerCode = this.codebook.get(value);
if( nonIntegerCode != null ) {
conn.add(subjectR, predicate, this.codebook.get(value));
}else {
//int intVal = Integer.parseInt(value.replaceAll(",", "").replaceAll(" ", ""));
int intVal = Integer.parseInt(IntegerMultiplierValueHandler.tweak(value));
conn.add(subjectR, predicate, vf.createLiteral(""+intVal,getRange()));
}
} catch (RepositoryException e) {
e.printStackTrace();
} catch (NumberFormatException e) {
//e.printStackTrace();
this.failedOnValue(value, subjectR, predicate);
try {
conn.add(subjectR, predicate, vf.createLiteral(value));
} catch (RepositoryException e1) {
e1.printStackTrace();
}
}
}
} | apache-2.0 |
DariusX/camel | core/camel-support/src/main/java/org/apache/camel/component/ResourceEndpoint.java | 7635 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import org.apache.camel.Component;
import org.apache.camel.api.management.ManagedAttribute;
import org.apache.camel.api.management.ManagedOperation;
import org.apache.camel.api.management.ManagedResource;
import org.apache.camel.api.management.mbean.ManagedResourceEndpointMBean;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.UriParam;
import org.apache.camel.spi.UriPath;
import org.apache.camel.support.ProcessorEndpoint;
import org.apache.camel.support.ResourceHelper;
import org.apache.camel.util.IOHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A useful base class for endpoints which depend on a resource
* such as things like Velocity or XQuery based components.
*/
@ManagedResource(description = "Managed ResourceEndpoint")
public abstract class ResourceEndpoint extends ProcessorEndpoint implements ManagedResourceEndpointMBean {
protected final Logger log = LoggerFactory.getLogger(getClass());
private volatile byte[] buffer;
@UriPath(description = "Path to the resource."
+ " You can prefix with: classpath, file, http, ref, or bean."
+ " classpath, file and http loads the resource using these protocols (classpath is default)."
+ " ref will lookup the resource in the registry."
+ " bean will call a method on a bean to be used as the resource."
+ " For bean you can specify the method name after dot, eg bean:myBean.myMethod.")
@Metadata(required = true)
private String resourceUri;
@UriParam(defaultValue = "false", description = "Sets whether to use resource content cache or not")
private boolean contentCache;
@UriParam(defaultValue = "false", description = "Sets whether the context map should allow access to all details."
+ " By default only the message body and headers can be accessed."
+ " This option can be enabled for full access to the current Exchange and CamelContext."
+ " Doing so impose a potential security risk as this opens access to the full power of CamelContext API.")
private boolean allowContextMapAll;
public ResourceEndpoint() {
}
public ResourceEndpoint(String endpointUri, Component component, String resourceUri) {
super(endpointUri, component);
this.resourceUri = resourceUri;
}
/**
* Gets the resource as an input stream considering the cache flag as well.
* <p/>
* If cache is enabled then the resource content is cached in an internal buffer and this content is
* returned to avoid loading the resource over and over again.
*
* @return the input stream
* @throws IOException is thrown if error loading the content of the resource to the local cache buffer
*/
public InputStream getResourceAsInputStream() throws IOException {
// try to get the resource input stream
if (isContentCache()) {
synchronized (this) {
if (buffer == null) {
log.debug("Reading resource: {} into the content cache", resourceUri);
try (InputStream is = getResourceAsInputStreamWithoutCache()) {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
IOHelper.copy(IOHelper.buffered(is), bos);
buffer = bos.toByteArray();
}
}
}
log.debug("Using resource: {} from the content cache", resourceUri);
return new ByteArrayInputStream(buffer);
}
return getResourceAsInputStreamWithoutCache();
}
protected InputStream getResourceAsInputStreamWithoutCache() throws IOException {
return loadResource(resourceUri);
}
/**
* Loads the given resource.
*
* @param uri uri of the resource.
* @return the loaded resource
* @throws IOException is thrown if resource is not found or cannot be loaded
*/
protected InputStream loadResource(String uri) throws IOException {
return ResourceHelper.resolveMandatoryResourceAsInputStream(getCamelContext(), uri);
}
@Override
@ManagedAttribute(description = "Whether the resource is cached")
public boolean isContentCache() {
return contentCache;
}
@Override
@ManagedOperation(description = "Clears the cached resource, forcing to re-load the resource on next request")
public void clearContentCache() {
log.debug("Clearing resource: {} from the content cache", resourceUri);
buffer = null;
}
public boolean isContentCacheCleared() {
return buffer == null;
}
@ManagedAttribute(description = "Whether the context map is limited to only include the message body and headers")
public boolean isAllowContextMapAll() {
return allowContextMapAll;
}
/**
* Sets whether the context map should allow access to all details.
* By default only the message body and headers can be accessed.
* This option can be enabled for full access to the current Exchange and CamelContext.
* Doing so impose a potential security risk as this opens access to the full power of CamelContext API.
*/
public void setAllowContextMapAll(boolean allowContextMapAll) {
this.allowContextMapAll = allowContextMapAll;
}
@Override
@ManagedAttribute(description = "Camel context ID")
public String getCamelId() {
return getCamelContext().getName();
}
@Override
@ManagedAttribute(description = "Camel ManagementName")
public String getCamelManagementName() {
return getCamelContext().getManagementName();
}
@Override
@ManagedAttribute(description = "Endpoint service state")
public String getState() {
return getStatus().name();
}
/**
* Sets whether to use resource content cache or not.
*/
@Override
public void setContentCache(boolean contentCache) {
this.contentCache = contentCache;
}
public String getResourceUri() {
return resourceUri;
}
/**
* Path to the resource.
* <p/>
* You can prefix with: classpath, file, http, ref, or bean.
* classpath, file and http loads the resource using these protocols (classpath is default).
* ref will lookup the resource in the registry.
* bean will call a method on a bean to be used as the resource.
* For bean you can specify the method name after dot, eg bean:myBean.myMethod
*
* @param resourceUri the resource path
*/
public void setResourceUri(String resourceUri) {
this.resourceUri = resourceUri;
}
}
| apache-2.0 |
greghogan/flink | flink-runtime/src/test/java/org/apache/flink/runtime/io/network/partition/FileBufferReaderITCase.java | 7500 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.io.network.partition;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.MemorySize;
import org.apache.flink.configuration.NettyShuffleEnvironmentOptions;
import org.apache.flink.configuration.RestOptions;
import org.apache.flink.configuration.TaskManagerOptions;
import org.apache.flink.runtime.execution.Environment;
import org.apache.flink.runtime.io.network.api.reader.RecordReader;
import org.apache.flink.runtime.io.network.api.writer.RecordWriter;
import org.apache.flink.runtime.io.network.api.writer.RecordWriterBuilder;
import org.apache.flink.runtime.jobgraph.DistributionPattern;
import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.runtime.jobgraph.JobVertex;
import org.apache.flink.runtime.jobgraph.ScheduleMode;
import org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable;
import org.apache.flink.runtime.jobmanager.scheduler.SlotSharingGroup;
import org.apache.flink.runtime.minicluster.MiniCluster;
import org.apache.flink.runtime.minicluster.MiniClusterConfiguration;
import org.apache.flink.runtime.net.SSLUtilsTest;
import org.apache.flink.testutils.serialization.types.ByteArrayType;
import org.apache.flink.util.TestLogger;
import org.apache.flink.shaded.netty4.io.netty.channel.ChannelHandlerContext;
import org.apache.flink.shaded.netty4.io.netty.channel.ChannelPromise;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.util.Arrays;
import java.util.List;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
/**
* Tests the bug reported in FLINK-131O0.
*
* <p>The implementation of {@link org.apache.flink.runtime.io.network.partition.BoundedData.Reader#nextBuffer()}
* for {@link BoundedBlockingSubpartitionType#FILE} assumes that there is always an available buffer, otherwise
* an IOException is thrown and it always assumes that pool of two buffers is enough (before using the 3rd buffer,
* first one was expected to be recycled already). But in the case of pending flush operation (when the socket channel
* is not writable while netty thread is calling {@link ChannelHandlerContext#writeAndFlush(Object, ChannelPromise)}),
* the first fetched buffer from {@link org.apache.flink.runtime.io.network.partition.FileChannelBoundedData} has not
* been recycled while fetching the second buffer to trigger next read ahead, which breaks the above assumption.
*/
@RunWith(Parameterized.class)
public class FileBufferReaderITCase extends TestLogger {
private static final int parallelism = 8;
private static final int numRecords = 100_000;
private static final int bufferSize = 4096;
private static final int headerSize = 8;
private static final int recordSize = bufferSize - headerSize;
private static final byte[] dataSource = new byte[recordSize];
@Parameterized.Parameters(name = "SSL Enabled = {0}")
public static List<Boolean> paras() {
return Arrays.asList(true, false);
}
@Parameterized.Parameter
public boolean sslEnabled;
@BeforeClass
public static void setup() {
for (int i = 0; i < dataSource.length; i++) {
dataSource[i] = 0;
}
}
@Test
public void testSequentialReading() throws Exception {
// setup
final Configuration configuration;
if (sslEnabled) {
configuration = SSLUtilsTest.createInternalSslConfigWithKeyAndTrustStores("JDK");
} else {
configuration = new Configuration();
}
configuration.setString(RestOptions.BIND_PORT, "0");
configuration.setString(NettyShuffleEnvironmentOptions.NETWORK_BLOCKING_SHUFFLE_TYPE, "file");
configuration.set(TaskManagerOptions.TOTAL_FLINK_MEMORY, MemorySize.parse("1g"));
configuration.set(TaskManagerOptions.MEMORY_SEGMENT_SIZE, MemorySize.parse(bufferSize + "b"));
final MiniClusterConfiguration miniClusterConfiguration = new MiniClusterConfiguration.Builder()
.setConfiguration(configuration)
.setNumTaskManagers(parallelism)
.setNumSlotsPerTaskManager(1)
.build();
try (final MiniCluster miniCluster = new MiniCluster(miniClusterConfiguration)) {
miniCluster.start();
final JobGraph jobGraph = createJobGraph();
// the job needs to complete without throwing an exception
miniCluster.executeJobBlocking(jobGraph);
}
}
private static JobGraph createJobGraph() {
final SlotSharingGroup group1 = new SlotSharingGroup();
final SlotSharingGroup group2 = new SlotSharingGroup();
final JobVertex source = new JobVertex("source");
source.setInvokableClass(TestSourceInvokable.class);
source.setParallelism(parallelism);
source.setSlotSharingGroup(group1);
final JobVertex sink = new JobVertex("sink");
sink.setInvokableClass(TestSinkInvokable.class);
sink.setParallelism(parallelism);
sink.setSlotSharingGroup(group2);
sink.connectNewDataSetAsInput(source, DistributionPattern.ALL_TO_ALL, ResultPartitionType.BLOCKING);
final JobGraph jobGraph = new JobGraph(source, sink);
jobGraph.setScheduleMode(ScheduleMode.LAZY_FROM_SOURCES);
return jobGraph;
}
/**
* Basic source {@link AbstractInvokable} which sends the elements to the
* {@link TestSinkInvokable}.
*/
public static final class TestSourceInvokable extends AbstractInvokable {
/**
* Create an Invokable task and set its environment.
*
* @param environment The environment assigned to this invokable.
*/
public TestSourceInvokable(Environment environment) {
super(environment);
}
@Override
public void invoke() throws Exception {
final RecordWriter<ByteArrayType> writer = new RecordWriterBuilder<ByteArrayType>().build(getEnvironment().getWriter(0));
final ByteArrayType bytes = new ByteArrayType(dataSource);
int counter = 0;
while (counter++ < numRecords) {
writer.emit(bytes);
writer.flushAll();
}
}
}
/**
* Basic sink {@link AbstractInvokable} which verifies the sent elements
* from the {@link TestSourceInvokable}.
*/
public static final class TestSinkInvokable extends AbstractInvokable {
private int numReceived = 0;
/**
* Create an Invokable task and set its environment.
*
* @param environment The environment assigned to this invokable.
*/
public TestSinkInvokable(Environment environment) {
super(environment);
}
@Override
public void invoke() throws Exception {
final RecordReader<ByteArrayType> reader = new RecordReader<>(
getEnvironment().getInputGate(0),
ByteArrayType.class,
getEnvironment().getTaskManagerInfo().getTmpDirectories());
while (reader.hasNext()) {
reader.next();
numReceived++;
}
assertThat(numReceived, is(numRecords));
}
}
}
| apache-2.0 |
leapframework/framework | base/lang/src/main/java/leap/lang/xml/package-info.java | 649 | /*
* Copyright 2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
*/
package leap.lang.xml; | apache-2.0 |
vladmihalcea/high-performance-java-persistence | core/src/test/java/com/vladmihalcea/book/hpjp/jdbc/transaction/locking/advisory/PostgreSQLSessionAdvisoryLocksTest.java | 1256 | package com.vladmihalcea.book.hpjp.jdbc.transaction.locking.advisory;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import com.vladmihalcea.book.hpjp.jdbc.transaction.locking.advisory.AbstractPostgreSQLAdvisoryLocksTest;
/**
* @author Vlad Mihalcea
*/
public class PostgreSQLSessionAdvisoryLocksTest extends AbstractPostgreSQLAdvisoryLocksTest {
@Override
protected int acquireLock(Connection connection, int logIndex, int workerId) {
LOGGER.info( "Worker {} writes to log {}", workerId, logIndex );
try(PreparedStatement statement =
connection.prepareStatement("select pg_advisory_lock(?)")) {
statement.setInt( 1, logIndex );
statement.executeQuery();
}
catch (SQLException e) {
LOGGER.error( "Worker {} failed with this message: {}", workerId, e.getMessage() );
}
return logIndex;
}
@Override
protected void releaseLock(Connection connection, int logIndex, int workerId) {
try(PreparedStatement statement =
connection.prepareStatement("select pg_advisory_unlock(?)")) {
statement.setInt( 1, logIndex );
statement.executeQuery();
}
catch (SQLException e) {
LOGGER.error( "Worker {} failed with this message: {}", workerId, e.getMessage() );
}
}
}
| apache-2.0 |
google/fest | third_party/fest-swing/src/test/java/org/fest/swing/hierarchy/JInternalFrameDesktopPaneQuery_desktopPaneOf_Test.java | 2737 | /*
* Created on Aug 26, 2008
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright @2008-2013 the original author or authors.
*/
package org.fest.swing.hierarchy;
import static org.fest.assertions.Assertions.assertThat;
import static org.fest.swing.edt.GuiActionRunner.execute;
import javax.swing.JDesktopPane;
import javax.swing.JInternalFrame;
import org.fest.swing.annotation.RunsInEDT;
import org.fest.swing.edt.GuiQuery;
import org.fest.swing.test.core.SequentialEDTSafeTestCase;
import org.fest.swing.test.swing.TestMdiWindow;
import org.junit.Test;
/**
* Tests for {@link JInternalFrameDesktopPaneQuery#desktopPaneOf(JInternalFrame)}.
*
* @author Alex Ruiz
* @author Yvonne Wang
*/
public class JInternalFrameDesktopPaneQuery_desktopPaneOf_Test extends SequentialEDTSafeTestCase {
private TestMdiWindow window;
private JInternalFrame internalFrame;
@Override
protected final void onSetUp() {
window = TestMdiWindow.createAndShowNewWindow(getClass());
internalFrame = window.internalFrame();
}
@Override
protected final void onTearDown() {
window.destroy();
}
@Test
public void should_return_null_if_JDesktopIcon_in_JInternalFrame_is_null() {
JDesktopPane desktopPane = setNullIconAndReturnDesktopPane(internalFrame);
assertThat(desktopPane).isNull();
}
@RunsInEDT
private static JDesktopPane setNullIconAndReturnDesktopPane(final JInternalFrame internalFrame) {
JDesktopPane desktopPane = execute(new GuiQuery<JDesktopPane>() {
@Override
protected JDesktopPane executeInEDT() {
internalFrame.setDesktopIcon(null);
return JInternalFrameDesktopPaneQuery.desktopPaneOf(internalFrame);
}
});
return desktopPane;
}
@Test
public void should_return_JDesktopPane_from_JDesktopIcon() {
JDesktopPane desktopPane = desktopPaneOf(internalFrame);
assertThat(desktopPane).isSameAs(window.desktop());
}
@RunsInEDT
private static JDesktopPane desktopPaneOf(final JInternalFrame internalFrame) {
return execute(new GuiQuery<JDesktopPane>() {
@Override
protected JDesktopPane executeInEDT() {
return JInternalFrameDesktopPaneQuery.desktopPaneOf(internalFrame);
}
});
}
}
| apache-2.0 |
julianhyde/hydromatic-resource | src/test/java/net/hydromatic/resource/test/ResourceTest.java | 23437 | /*
* Licensed to Julian Hyde under one or more contributor license
* agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership. Julian Hyde
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.hydromatic.resource.test;
import org.junit.Test;
import java.lang.reflect.Method;
import java.util.EnumSet;
import java.util.Locale;
import java.util.Properties;
import net.hydromatic.resource.Resources;
import static org.hamcrest.CoreMatchers.*;
import static org.junit.Assert.*;
import static net.hydromatic.resource.Resources.*;
/**
* Tests for the {@link Resources} framework.
*/
public class ResourceTest {
final FooResource fooResource =
Resources.create("net.hydromatic.resource.test.ResourceTest",
FooResource.class);
@Test public void testSimple() {
assertThat(fooResource.helloWorld().str(), equalTo("hello, world!"));
assertThat(fooResource.differentMessageInPropertiesFile().str(),
equalTo("message in properties file"));
assertThat(fooResource.onlyInClass().str(),
equalTo("only in class"));
assertThat(fooResource.onlyInPropertiesFile().str(),
equalTo("message in properties file"));
}
@Test public void testProperty() {
assertThat(fooResource.helloWorld().getProperties().size(), equalTo(0));
assertThat(fooResource.withProperty(0).str(), equalTo("with properties 0"));
assertThat(fooResource.withProperty(0).getProperties().size(), equalTo(1));
assertThat(fooResource.withProperty(0).getProperties().get("prop"),
equalTo("my value"));
assertThat(fooResource.withProperty(1000).getProperties().get("prop"),
equalTo("my value"));
}
@SuppressWarnings("ThrowableResultOfMethodCallIgnored")
@Test public void testException() {
assertThat(fooResource.illArg("xyz").ex().getMessage(),
equalTo("bad arg xyz"));
assertThat(fooResource.illArg("xyz").ex().getCause(), nullValue());
final Throwable npe = new NullPointerException();
assertThat(fooResource.illArg("").ex(npe).getCause(), equalTo(npe));
}
@SuppressWarnings("ThrowableResultOfMethodCallIgnored")
@Test public void testSuperChainException() {
assertThat(fooResource.exceptionSuperChain().ex().getMessage(),
equalTo("super chain exception"));
assertThat(fooResource.exceptionSuperChain().ex().getClass().getName(),
equalTo(IllegalStateException.class.getName()));
}
/** Tests that get validation error if bundle does not contain resource. */
@Test public void testValidateBundleHasResource() {
try {
fooResource.onlyInClass().validate(
EnumSet.of(Validation.BUNDLE_HAS_RESOURCE));
fail("should have thrown");
} catch (AssertionError e) {
assertThat(e.getMessage(),
startsWith(
"key 'OnlyInClass' not found for resource 'onlyInClass' in "
+ "bundle 'java.util.PropertyResourceBundle@"));
}
}
@Test public void testValidateAtLeastOne() {
// succeeds - has several resources
Resources.validate(fooResource, EnumSet.of(Validation.AT_LEAST_ONE));
// fails validation - has no resources
try {
Resources.validate("foo", EnumSet.of(Validation.AT_LEAST_ONE));
fail("should have thrown");
} catch (AssertionError e) {
assertThat(e.getMessage(),
equalTo("resource object foo contains no resources"));
}
}
@Test public void testValidateMessageSpecified() {
try {
Resources.validate(fooResource, EnumSet.of(Validation.MESSAGE_SPECIFIED));
fail("should have thrown");
} catch (AssertionError e) {
assertThat(e.getMessage(),
equalTo("resource 'onlyInPropertiesFile' must specify BaseMessage"));
}
}
@Test public void testValidateMessageMatchDifferentMessageInPropertiesFile() {
try {
fooResource.differentMessageInPropertiesFile().validate(
EnumSet.of(Validation.MESSAGE_MATCH));
fail("should have thrown");
} catch (AssertionError e) {
assertThat(e.getMessage(),
equalTo(
"message for resource 'differentMessageInPropertiesFile' is different between class and resource file"));
}
}
@Test public void testValidateOddQuotes() {
try {
fooResource.oddQuotes().validate(EnumSet.of(Validation.EVEN_QUOTES));
fail("should have thrown");
} catch (AssertionError e) {
assertThat(e.getMessage(),
equalTo("resource 'oddQuotes' should have even number of quotes"));
}
}
@Test public void testValidateCreateException() {
try {
fooResource.myException().validate(
EnumSet.of(Validation.CREATE_EXCEPTION));
fail("should have thrown");
} catch (AssertionError e) {
assertThat(e.getMessage(),
equalTo("error instantiating exception for resource 'myException'"));
assertThat(e.getCause().getMessage(),
equalTo(
"java.lang.NoSuchMethodException: net.hydromatic.resource.test.ResourceTest$MyException.<init>(java.lang.String, java.lang.Throwable)"));
}
}
@Test public void testValidateCauselessFail() {
try {
fooResource.causelessFail().validate(
EnumSet.of(Validation.CREATE_EXCEPTION));
fail("should have thrown");
} catch (AssertionError e) {
assertThat(e.getMessage(),
equalTo("error instantiating exception for resource "
+ "'causelessFail'"));
assertThat(e.getCause().getMessage(),
equalTo(
"Cause is required, message = can't be used causeless"));
}
}
@Test public void testValidateExceptionWithCause() {
fooResource.exceptionWithCause().validate(
EnumSet.of(Validation.CREATE_EXCEPTION));
}
@Test public void testValidateMatchArguments() {
try {
Resources.validate(fooResource, EnumSet.of(Validation.ARGUMENT_MATCH));
fail("should have thrown");
} catch (AssertionError e) {
assertThat(e.getMessage(),
equalTo(
"type mismatch in method 'mismatchedArguments' between message format elements [class java.lang.String, int] and method parameters [class java.lang.String, int, class java.lang.String]"));
}
}
@Test public void testDeduceExceptionType() throws NoSuchMethodException {
// Parse "ExInst<MyException>" --> "MyException"
final Method method = FooResource.class.getMethod("myException");
assertThat(
Resources.ExInstWithCause.getExceptionClass(
method.getGenericReturnType()),
equalTo((Class) MyException.class));
}
@Test public void testDeduceExceptionType2() throws NoSuchMethodException {
// Parse "MyExInst<NumberFormatException>" --> "NumberFormatException"
final Method method =
FooResource.class.getMethod("customParameterizedExceptionClass");
assertThat(
Resources.ExInstWithCause.getExceptionClass(
method.getGenericReturnType()),
equalTo((Class) NumberFormatException.class));
}
@Test public void testDeduceExceptionType3() throws NoSuchMethodException {
// Parse "MyExInstImpl extends MyExInst<IllegalStateException>"
// --> "IllegalStateException"
final Method method = FooResource.class.getMethod("exceptionSuperChain");
assertThat(
Resources.ExInstWithCause.getExceptionClass(
method.getGenericReturnType()),
equalTo((Class) IllegalStateException.class));
}
@Test public void testIntPropEmpty() {
final FooResource r = Resources.create(FooResource.class);
final IntProp p = r.intPropNoDefault();
try {
final int actual = p.get();
fail("expected error, got " + actual);
} catch (RuntimeException e) {
assertThat(e.getMessage(),
is("Property IntPropNoDefault has no default value"));
}
assertThat(p.get(1), is(1));
assertThat(p.isSet(), is(false));
}
@Test public void testIntProp() {
final Properties properties = new Properties();
final FooResource r = Resources.create(properties, FooResource.class);
final IntProp p = r.intPropNoDefault();
final IntProp p5 = r.intPropDefaultFive();
assertThat(p.hasDefault(), is(false));
assertThat(p5.hasDefault(), is(true));
try {
final int actual = p.defaultValue();
fail("expected error, got " + actual);
} catch (NoDefaultValueException e) {
assertThat(e.getMessage(),
is("Property IntPropNoDefault has no default value"));
}
assertThat(p5.defaultValue(), is(-50));
try {
final int actual = p.get();
fail("expected error, got " + actual);
} catch (NoDefaultValueException e) {
assertThat(e.getMessage(),
is("Property IntPropNoDefault is not set and has no default value"));
}
assertThat(p.get(1), is(1));
assertThat(p.isSet(), is(false));
assertThat(p5.get(), is(-50));
assertThat(p5.get(1), is(1));
assertThat(p5.isSet(), is(false));
properties.setProperty("OtherProperty", "111");
assertThat(p.isSet(), is(false));
assertThat(p5.isSet(), is(false));
properties.setProperty("IntPropNoDefault", "3 ");
try {
final int actual = p.get();
fail("expected error, got " + actual);
} catch (NumberFormatException e) {
assertThat(e.getMessage(),
is("For input string: \"3 \""));
}
try {
final int actual = p.get(1);
fail("expected error, got " + actual);
} catch (NumberFormatException e) {
assertThat(e.getMessage(),
is("For input string: \"3 \""));
}
assertThat(p.isSet(), is(true));
properties.setProperty("IntPropNoDefault", "3");
assertThat(p.get(), is(3));
assertThat(p.get(1), is(3));
assertThat(p.isSet(), is(true));
properties.setProperty("IntPropDefaultFive", "-50");
assertThat(p5.get(), is(-50));
assertThat(p5.get(1), is(-50));
assertThat(p5.isSet(), is(true));
}
@Test public void testDoubleProp() {
final Properties properties = new Properties();
final FooResource r = Resources.create(properties, FooResource.class);
final DoubleProp p = r.doublePropNoDefault();
final DoubleProp pHalf = r.doublePropDefaultHalf();
assertThat(p.hasDefault(), is(false));
assertThat(pHalf.hasDefault(), is(true));
try {
final double actual = p.defaultValue();
fail("expected error, got " + actual);
} catch (NoDefaultValueException e) {
assertThat(e.getMessage(),
is("Property DoublePropNoDefault has no default value"));
}
assertThat(pHalf.defaultValue(), is(0.5d));
try {
final double actual = p.get();
fail("expected error, got " + actual);
} catch (NoDefaultValueException e) {
assertThat(e.getMessage(),
is("Property DoublePropNoDefault is not set and has no default "
+ "value"));
}
assertThat(p.get(1d), is(1d));
assertThat(p.isSet(), is(false));
assertThat(pHalf.get(), is(0.5d));
assertThat(pHalf.get(1), is(1d));
assertThat(pHalf.isSet(), is(false));
properties.setProperty("OtherProperty", "111");
assertThat(p.isSet(), is(false));
assertThat(pHalf.isSet(), is(false));
// Trailing spaces are OK for parsing doubles
properties.setProperty("DoublePropNoDefault", "3 ");
assertThat(p.get(), is(3d));
assertThat(p.get(1), is(3d));
assertThat(p.isSet(), is(true));
properties.setProperty("DoublePropNoDefault", "3z");
try {
final double actual = p.get();
fail("expected error, got " + actual);
} catch (NumberFormatException e) {
assertThat(e.getMessage(),
is("For input string: \"3z\""));
}
try {
final double actual = p.get(1);
fail("expected error, got " + actual);
} catch (NumberFormatException e) {
assertThat(e.getMessage(),
is("For input string: \"3z\""));
}
assertThat(p.isSet(), is(true));
properties.setProperty("DoublePropNoDefault", "-3.25");
assertThat(p.get(), is(-3.25d));
assertThat(p.get(1), is(-3.25d));
assertThat(p.isSet(), is(true));
properties.setProperty("DoublePropDefaultHalf", "-8.50");
assertThat(pHalf.get(), is(-8.5d));
assertThat(pHalf.get(1), is(-8.5d));
assertThat(pHalf.isSet(), is(true));
}
@Test public void testBooleanProp() {
final Properties properties = new Properties();
final FooResource r = Resources.create(properties, FooResource.class);
final BooleanProp p = r.booleanPropNoDefault();
final BooleanProp pTrue = r.booleanPropDefaultTrue();
final BooleanProp pBad = r.booleanPropBadDefault();
assertThat(p.hasDefault(), is(false));
assertThat(pTrue.hasDefault(), is(true));
assertThat(pBad.hasDefault(), is(true));
try {
final boolean actual = p.defaultValue();
fail("expected error, got " + actual);
} catch (NoDefaultValueException e) {
assertThat(e.getMessage(),
is("Property BooleanPropNoDefault has no default value"));
}
assertThat(pTrue.defaultValue(), is(true));
assertThat(pBad.defaultValue(), is(false));
try {
final boolean actual = p.get();
fail("expected error, got " + actual);
} catch (NoDefaultValueException e) {
assertThat(e.getMessage(),
is("Property BooleanPropNoDefault is not set and has no default "
+ "value"));
}
assertThat(p.get(true), is(true));
assertThat(p.get(false), is(false));
assertThat(p.isSet(), is(false));
assertThat(pTrue.get(), is(true));
assertThat(pTrue.get(false), is(false));
assertThat(pTrue.get(true), is(true));
assertThat(pTrue.isSet(), is(false));
assertThat(pBad.get(), is(false));
assertThat(pBad.get(true), is(true));
assertThat(pBad.get(false), is(false));
assertThat(pBad.isSet(), is(false));
properties.setProperty("OtherProperty", "111");
assertThat(p.isSet(), is(false));
assertThat(pTrue.isSet(), is(false));
assertThat(pBad.isSet(), is(false));
// Boolean properties are lenient in parsing.
// Everything that is not "true" or "TRUE" is false.
// Never throws.
properties.setProperty("BooleanPropNoDefault", "3 ");
assertThat(pBad.get(), is(false));
assertThat(pBad.get(true), is(true));
assertThat(p.isSet(), is(true));
properties.setProperty("BooleanPropNoDefault", "false");
assertThat(p.get(), is(false));
assertThat(p.get(false), is(false));
assertThat(p.get(true), is(false));
assertThat(p.isSet(), is(true));
properties.setProperty("BooleanPropDefaultTrue", "false");
assertThat(pTrue.get(), is(false));
assertThat(pTrue.get(true), is(false));
assertThat(pTrue.get(false), is(false));
assertThat(pTrue.isSet(), is(true));
properties.setProperty("BooleanPropDefaultTrue", "true");
assertThat(pTrue.get(), is(true));
assertThat(pTrue.get(true), is(true));
assertThat(pTrue.get(false), is(true));
assertThat(pTrue.isSet(), is(true));
properties.setProperty("BooleanPropBadDefault", "false");
assertThat(pBad.get(), is(false));
assertThat(pBad.get(true), is(false));
assertThat(pBad.get(false), is(false));
assertThat(pBad.isSet(), is(true));
}
@Test public void testStringProp() {
final Properties properties = new Properties();
final FooResource r = Resources.create(properties, FooResource.class);
final StringProp p = r.stringPropNoDefault();
final StringProp p5 = r.stringPropDefaultXyz();
assertThat(p.hasDefault(), is(false));
assertThat(p5.hasDefault(), is(true));
try {
final String actual = p.defaultValue();
fail("expected error, got " + actual);
} catch (NoDefaultValueException e) {
assertThat(e.getMessage(),
is("Property StringPropNoDefault has no default value"));
}
assertThat(p5.defaultValue(), is("xyz"));
try {
final String actual = p.get();
fail("expected error, got " + actual);
} catch (NoDefaultValueException e) {
assertThat(e.getMessage(),
is("Property StringPropNoDefault is not set and has no default "
+ "value"));
}
assertThat(p.get(""), is(""));
assertThat(p.get("a b"), is("a b"));
assertThat(p.isSet(), is(false));
assertThat(p5.get(), is("xyz"));
assertThat(p5.get("a b"), is("a b"));
assertThat(p5.isSet(), is(false));
properties.setProperty("OtherProperty", "111");
assertThat(p.isSet(), is(false));
assertThat(p5.isSet(), is(false));
properties.setProperty("StringPropNoDefault", "3 ");
assertThat(p.get(), is("3 "));
assertThat(p.isSet(), is(true));
properties.setProperty("StringPropNoDefault", "3 ");
assertThat(p.get(), is("3 "));
assertThat(p.get("1"), is("3 "));
assertThat(p.isSet(), is(true));
properties.setProperty("StringPropDefaultXyz", "-50");
assertThat(p5.get(), is("-50"));
assertThat(p5.get("1"), is("-50"));
assertThat(p5.isSet(), is(true));
}
@Test public void testPropPath() {
final Properties properties = new Properties();
final FooResource r = Resources.create(properties, FooResource.class);
final IntProp p = r.intPropPathDefault();
assertThat(p.hasDefault(), is(true));
assertThat(p.defaultValue(), is(56));
assertThat(p.get(), is(56));
assertThat(p.get(1), is(1));
assertThat(p.isSet(), is(false));
properties.setProperty("OtherProperty", "111");
assertThat(p.isSet(), is(false));
// Setting its method name has no effect
properties.setProperty("IntPropPathDefault", "3");
assertThat(p.get(), is(56));
assertThat(p.get(1), is(1));
assertThat(p.isSet(), is(false));
properties.setProperty("com.example.my.int.property", "3");
assertThat(p.get(), is(3));
assertThat(p.get(1), is(3));
assertThat(p.isSet(), is(true));
properties.setProperty("com.example.my.int.property", "3 ");
try {
final int actual = p.get();
fail("expected error, got " + actual);
} catch (NumberFormatException e) {
assertThat(e.getMessage(),
is("For input string: \"3 \""));
}
try {
final int actual = p.get(1);
fail("expected error, got " + actual);
} catch (NumberFormatException e) {
assertThat(e.getMessage(),
is("For input string: \"3 \""));
}
assertThat(p.isSet(), is(true));
}
@Test public void testBadDefaultProp() {
final Properties properties = new Properties();
final BadIntResource r =
Resources.create(properties, BadIntResource.class);
try {
final IntProp p = r.intPropBadDefault();
fail("expected error, got " + p);
} catch (NumberFormatException e) {
assertThat(e.getMessage(),
is("For input string: \"a3\""));
}
final BadDoubleResource r2 =
Resources.create(properties, BadDoubleResource.class);
try {
final DoubleProp p = r2.doublePropBadDefault();
fail("expected error, got " + p);
} catch (NumberFormatException e) {
assertThat(e.getMessage(),
is("For input string: \"1.5xx\""));
}
}
// TODO: check that each resource in the bundle is used by precisely
// one method
/** Exception that cannot be thrown by {@link ExInst} because it does not have
* a (String, Throwable) constructor, nor does it have a (String)
* constructor. */
public static class MyException extends RuntimeException {
public MyException() {
super();
}
}
/** Abstract class used to test identification of exception classes via
* superclass chains */
public abstract static class MyExInst<W extends Exception> extends ExInst<W> {
public MyExInst(String base, Locale locale, Method method, Object... args) {
super(base, locale, method, args);
}
}
public static class MyConcreteExInst<W extends Exception> extends ExInst<W> {
public MyConcreteExInst(String base, Locale locale, Method method,
Object... args) {
super(base, locale, method, args);
}
}
/** Subtype of ExInst, however exception type is not directly
* passed to ExInst. The test must still detect the correct class. */
public static class MyExInstImpl extends MyExInst<IllegalStateException> {
public MyExInstImpl(String base, Locale locale, Method method,
Object... args) {
super(base, locale, method, args);
}
}
/** Exception that always requires cause
*/
public static class MyExceptionRequiresCause extends RuntimeException {
public MyExceptionRequiresCause(String message, Throwable cause) {
super(message, cause);
if (cause == null) {
throw new IllegalArgumentException("Cause is required, "
+ "message = " + message);
}
}
}
/** A resource object to be tested. Has one of each flaw. */
public interface FooResource {
@BaseMessage("hello, world!")
Inst helloWorld();
@BaseMessage("message in class")
Inst differentMessageInPropertiesFile();
@BaseMessage("only in class")
Inst onlyInClass();
Inst onlyInPropertiesFile();
@BaseMessage("with properties {0,number}")
@Property(name = "prop", value = "my value")
Inst withProperty(int x);
@BaseMessage("bad arg {0}")
ExInst<IllegalArgumentException> illArg(String s);
@BaseMessage("should return inst")
String shouldReturnInst();
@BaseMessage("exception isn''t throwable")
ExInst<MyException> myException();
@BaseMessage("Can't use odd quotes")
Inst oddQuotes();
@BaseMessage("can''t be used causeless")
ExInst<MyExceptionRequiresCause> causelessFail();
@BaseMessage("should work since cause is provided")
ExInstWithCause<MyExceptionRequiresCause> exceptionWithCause();
@BaseMessage("argument {0} does not match {1,number,#}")
Inst mismatchedArguments(String s, int i, String s2);
@BaseMessage("custom parameterized exception class")
MyConcreteExInst<NumberFormatException> customParameterizedExceptionClass();
@BaseMessage("super chain exception")
MyExInstImpl exceptionSuperChain();
IntProp intPropNoDefault();
@Default("-50")
IntProp intPropDefaultFive();
@Default("56")
@Resource("com.example.my.int.property")
IntProp intPropPathDefault();
StringProp stringPropNoDefault();
@Default("xyz")
StringProp stringPropDefaultXyz();
BooleanProp booleanPropNoDefault();
@Default("true")
BooleanProp booleanPropDefaultTrue();
@Default("null")
BooleanProp booleanPropBadDefault();
DoubleProp doublePropNoDefault();
@Default("0.5")
DoubleProp doublePropDefaultHalf();
}
interface BadIntResource {
@Default("a3")
IntProp intPropBadDefault();
}
interface BadDoubleResource {
@Default("1.5xx")
DoubleProp doublePropBadDefault();
}
}
// End ResourceTest.java
| apache-2.0 |
Intel-bigdata/OAP | oap-cache/oap/src/main/java/org/apache/parquet/hadoop/utils/Collections3.java | 1480 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.parquet.hadoop.utils;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* Jdk has Collections, Guava has Colletions2,
* We use Collections3.
*/
public final class Collections3 {
public static <K, V> Map<K, Set<V>> toSetMultiMap(Map<K, V> map) {
Map<K, Set<V>> setMultiMap = new HashMap<>();
for (Map.Entry<K, V> entry : map.entrySet()) {
Set<V> set = new HashSet<>();
set.add(entry.getValue());
setMultiMap.put(entry.getKey(), Collections.unmodifiableSet(set));
}
return Collections.unmodifiableMap(setMultiMap);
}
}
| apache-2.0 |
pedroartsimao/easyguide | android/app/src/main/java/com/easyguide/data/entity/mapper/UserMapper.java | 1188 | package com.easyguide.data.entity.mapper;
import com.easyguide.data.entity.User;
import com.google.android.gms.auth.api.signin.GoogleSignInAccount;
import com.google.firebase.auth.FirebaseUser;
public class UserMapper {
public static User transform(FirebaseUser firebaseUser) {
User user = new User();
user.setUid(firebaseUser.getUid());
user.setName(firebaseUser.getDisplayName());
user.setEmail(firebaseUser.getEmail());
if(firebaseUser.getPhotoUrl() != null){
user.setPhotoUrl(firebaseUser.getPhotoUrl().toString());
}
return user;
}
public static User transform(GoogleSignInAccount googleSignInAccount) {
User user = new User();
user.setUid(googleSignInAccount.getId());
user.setName(googleSignInAccount.getDisplayName());
user.setEmail(googleSignInAccount.getEmail());
if(googleSignInAccount.getPhotoUrl() != null){
user.setPhotoUrl(googleSignInAccount.getPhotoUrl().toString());
}
if(googleSignInAccount.getIdToken() != null){
user.setIdToken(googleSignInAccount.getIdToken());
}
return user;
}
}
| apache-2.0 |
pdxrunner/geode | geode-assembly/src/upgradeTest/java/org/apache/geode/session/tests/TomcatSessionBackwardsCompatibilityTestBase.java | 8560 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.session.tests;
import static org.junit.Assert.assertEquals;
import java.io.File;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.Collection;
import java.util.List;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.experimental.categories.Category;
import org.junit.rules.TemporaryFolder;
import org.junit.rules.TestName;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.apache.geode.internal.AvailablePortHelper;
import org.apache.geode.management.internal.cli.i18n.CliStrings;
import org.apache.geode.management.internal.cli.util.CommandStringBuilder;
import org.apache.geode.test.dunit.standalone.VersionManager;
import org.apache.geode.test.junit.categories.BackwardCompatibilityTest;
import org.apache.geode.test.junit.rules.GfshCommandRule;
import org.apache.geode.test.junit.runners.CategoryWithParameterizedRunnerFactory;
/**
* This test iterates through the versions of Geode and executes session client compatibility with
* the current version of Geode.
*/
@Category({BackwardCompatibilityTest.class})
@RunWith(Parameterized.class)
@Parameterized.UseParametersRunnerFactory(CategoryWithParameterizedRunnerFactory.class)
public abstract class TomcatSessionBackwardsCompatibilityTestBase {
@Parameterized.Parameters
public static Collection<String> data() {
List<String> result = VersionManager.getInstance().getVersionsWithoutCurrent();
result.removeIf(s -> Integer.parseInt(s) < 120);
if (result.size() < 1) {
throw new RuntimeException("No older versions of Geode were found to test against");
}
return result;
}
@Rule
public transient GfshCommandRule gfsh = new GfshCommandRule();
@Rule
public TemporaryFolder tempFolder = new TemporaryFolder();
@Rule
public transient TestName testName = new TestName();
protected transient Client client;
protected transient ContainerManager manager;
protected File oldBuild;
protected File oldModules;
protected TomcatInstall tomcat7079AndOldModules;
protected TomcatInstall tomcat7079AndCurrentModules;
protected TomcatInstall tomcat8AndOldModules;
protected TomcatInstall tomcat8AndCurrentModules;
protected int locatorPort;
protected String classPathTomcat7079;
protected String classPathTomcat8;
protected String serverDir;
protected String locatorDir;
protected TomcatSessionBackwardsCompatibilityTestBase(String version) {
VersionManager versionManager = VersionManager.getInstance();
String installLocation = versionManager.getInstall(version);
oldBuild = new File(installLocation);
oldModules = new File(installLocation + "/tools/Modules/");
}
protected void startServer(String name, String classPath, int locatorPort) throws Exception {
serverDir = tempFolder.newFolder("server").getPath();
CommandStringBuilder command = new CommandStringBuilder(CliStrings.START_SERVER);
command.addOption(CliStrings.START_SERVER__NAME, name);
command.addOption(CliStrings.START_SERVER__SERVER_PORT, "0");
command.addOption(CliStrings.START_SERVER__CLASSPATH, classPath);
command.addOption(CliStrings.START_SERVER__LOCATORS, "localhost[" + locatorPort + "]");
command.addOption(CliStrings.START_SERVER__DIR, serverDir);
gfsh.executeAndAssertThat(command.toString()).statusIsSuccess();
}
protected void startLocator(String name, String classPath, int port) throws Exception {
locatorDir = tempFolder.newFolder("locator").getPath();
CommandStringBuilder locStarter = new CommandStringBuilder(CliStrings.START_LOCATOR);
locStarter.addOption(CliStrings.START_LOCATOR__MEMBER_NAME, name);
locStarter.addOption(CliStrings.START_LOCATOR__CLASSPATH, classPath);
locStarter.addOption(CliStrings.START_LOCATOR__PORT, Integer.toString(port));
locStarter.addOption(CliStrings.START_LOCATOR__DIR, locatorDir);
gfsh.executeAndAssertThat(locStarter.toString()).statusIsSuccess();
}
@Before
public void setup() throws Exception {
tomcat7079AndOldModules = new TomcatInstall(TomcatInstall.TomcatVersion.TOMCAT7,
ContainerInstall.ConnectionType.CLIENT_SERVER,
ContainerInstall.DEFAULT_INSTALL_DIR + "Tomcat7079AndOldModules",
oldModules.getAbsolutePath(), oldBuild.getAbsolutePath() + "/lib");
tomcat7079AndCurrentModules = new TomcatInstall(TomcatInstall.TomcatVersion.TOMCAT7,
ContainerInstall.ConnectionType.CLIENT_SERVER,
ContainerInstall.DEFAULT_INSTALL_DIR + "Tomcat7079AndCurrentModules");
tomcat8AndOldModules = new TomcatInstall(TomcatInstall.TomcatVersion.TOMCAT8,
ContainerInstall.ConnectionType.CLIENT_SERVER,
ContainerInstall.DEFAULT_INSTALL_DIR + "Tomcat8AndOldModules", oldModules.getAbsolutePath(),
oldBuild.getAbsolutePath() + "/lib");
tomcat8AndCurrentModules = new TomcatInstall(TomcatInstall.TomcatVersion.TOMCAT8,
ContainerInstall.ConnectionType.CLIENT_SERVER,
ContainerInstall.DEFAULT_INSTALL_DIR + "Tomcat8AndCurrentModules");
classPathTomcat7079 = tomcat7079AndCurrentModules.getHome() + "/lib/*" + File.pathSeparator
+ tomcat7079AndCurrentModules.getHome() + "/bin/*";
classPathTomcat8 = tomcat8AndCurrentModules.getHome() + "/lib/*" + File.pathSeparator
+ tomcat8AndCurrentModules.getHome() + "/bin/*";
// Get available port for the locator
locatorPort = AvailablePortHelper.getRandomAvailableTCPPort();
tomcat7079AndOldModules.setDefaultLocator("localhost", locatorPort);
tomcat7079AndCurrentModules.setDefaultLocator("localhost", locatorPort);
tomcat8AndOldModules.setDefaultLocator("localhost", locatorPort);
tomcat8AndCurrentModules.setDefaultLocator("localhost", locatorPort);
client = new Client();
manager = new ContainerManager();
// Due to parameterization of the test name, the URI would be malformed. Instead, it strips off
// the [] symbols
manager.setTestName(testName.getMethodName().replace("[", "").replace("]", ""));
}
protected void startClusterWithTomcat(String tomcatClassPath) throws Exception {
startLocator("loc", tomcatClassPath, locatorPort);
startServer("server", tomcatClassPath, locatorPort);
}
/**
* Stops all containers that were previously started and cleans up their configurations
*/
@After
public void stop() throws Exception {
manager.stopAllActiveContainers();
manager.cleanUp();
CommandStringBuilder command = new CommandStringBuilder(CliStrings.STOP_SERVER);
command.addOption(CliStrings.STOP_SERVER__DIR, serverDir);
gfsh.executeAndAssertThat(command.toString()).statusIsSuccess();
CommandStringBuilder locStop = new CommandStringBuilder(CliStrings.STOP_LOCATOR);
locStop.addOption(CliStrings.STOP_LOCATOR__DIR, locatorDir);
gfsh.executeAndAssertThat(locStop.toString()).statusIsSuccess();
}
protected void doPutAndGetSessionOnAllClients() throws IOException, URISyntaxException {
// This has to happen at the start of every test
manager.startAllInactiveContainers();
String key = "value_testSessionPersists";
String value = "Foo";
client.setPort(Integer.parseInt(manager.getContainerPort(0)));
Client.Response resp = client.set(key, value);
String cookie = resp.getSessionCookie();
for (int i = 0; i < manager.numContainers(); i++) {
System.out.println("Checking get for container:" + i);
client.setPort(Integer.parseInt(manager.getContainerPort(i)));
resp = client.get(key);
assertEquals("Sessions are not replicating properly", cookie, resp.getSessionCookie());
assertEquals("Session data is not replicating properly", value, resp.getResponse());
}
}
}
| apache-2.0 |
AlexXZQ/CoolWeather | app/src/main/java/com/example/coolweather/ChooseAreaFragment.java | 9648 | package com.example.coolweather;
import android.app.ProgressDialog;
import android.content.Intent;
import android.os.Build;
import android.os.Bundle;
import android.provider.ContactsContract;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.fragment.app.Fragment;
import com.example.coolweather.db.City;
import com.example.coolweather.db.County;
import com.example.coolweather.db.Province;
import com.example.coolweather.util.HttpUtil;
import com.example.coolweather.util.Utility;
import org.jetbrains.annotations.NotNull;
import org.litepal.crud.DataSupport;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import okhttp3.Call;
import okhttp3.Callback;
import okhttp3.Response;
public class ChooseAreaFragment extends Fragment {
public static final int LEVEL_PROVINCE = 0;
public static final int LEVEL_CITY = 1;
public static final int LEVEL_COUNTY = 2;
private ProgressDialog progressDialog;
private TextView titleText;
private Button backButton;
private ListView listView;
private ArrayAdapter<String> adapter;
private List<String> dataList = new ArrayList<>();
/**
* 省列表
*/
private List<Province> provinceList;
/**
* 市列表
*/
private List<City> cityList;
/**
* 县列表
*/
private List<County> countyList;
/**
* 选中的省份
*/
private Province selectedProvince;
/**
* 选中的城市
*/
private City selectedCity;
/**
* 当前选中的级别
*/
private int currentLevel;
@RequiresApi(api = Build.VERSION_CODES.KITKAT)
@Nullable
@Override
public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.choose_area, container, false);
titleText = (TextView) view.findViewById(R.id.title_text);
backButton = (Button) view.findViewById(R.id.back_button);
listView = (ListView) view.findViewById(R.id.list_view);
adapter = new ArrayAdapter<>(Objects.requireNonNull(getContext()), android.R.layout.simple_list_item_1, dataList);
listView.setAdapter(adapter);
return view;
}
@Override
public void onActivityCreated(@Nullable Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
listView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> adapterView, View view, int position, long id) {
if (currentLevel == LEVEL_PROVINCE) {
selectedProvince = provinceList.get(position);
queryCities();
} else if (currentLevel == LEVEL_CITY) {
selectedCity = cityList.get(position);
queryCounties();
} else if (currentLevel == LEVEL_COUNTY) {
String weatherId = countyList.get(position).getWeatherId();
if(getActivity() instanceof MainActivity){
Intent intent = new Intent(getActivity(), WeatherActivity.class);
intent.putExtra("weather_id", weatherId);
startActivity(intent);
getActivity().finish();
}else if(getActivity() instanceof WeatherActivity){
WeatherActivity activity = (WeatherActivity) getActivity();
activity.drawerLayout.closeDrawers();
activity.swipeRefreshLayout.setRefreshing(true);
activity.requestWeather(weatherId);
}
}
}
});
backButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (currentLevel == LEVEL_COUNTY) {
queryCities();
} else if (currentLevel == LEVEL_CITY) {
queryProvinces();
}
}
});
queryProvinces();
}
/**
* 查询全国所有的省,优先从数据库查询,查不到则去服务器查询
*/
private void queryProvinces() {
titleText.setText("中国");
backButton.setVisibility(View.GONE);
provinceList = DataSupport.findAll(Province.class);
if (provinceList.size() > 0) {
dataList.clear();
for (Province province : provinceList) {
dataList.add(province.getProvinceName());
}
adapter.notifyDataSetChanged();
listView.setSelection(0);
currentLevel = LEVEL_PROVINCE;
} else {
String address = "http://guolin.tech/api/china";
queryFromServer(address, "province");
}
}
/**
* 查询选中的省中所有的市,也是优先从数据库查询
*/
private void queryCities() {
titleText.setText(selectedProvince.getProvinceName());
backButton.setVisibility(View.VISIBLE);
cityList = DataSupport.where("provinceid = ?", String.valueOf(selectedProvince.getId())).find(City.class);
if (cityList.size() > 0) {
dataList.clear();
for (City city : cityList) {
dataList.add(city.getCityName());
}
adapter.notifyDataSetChanged();
listView.setSelection(0);
currentLevel = LEVEL_CITY;
} else {
int provinceCode = selectedProvince.getProvinceCode();
String address = "http://guolin.tech/api/china/" + provinceCode;
queryFromServer(address, "city");
}
}
/**
* 查询选中市内所有的县
*/
private void queryCounties() {
titleText.setText(selectedCity.getCityName());
backButton.setVisibility(View.VISIBLE);
countyList = DataSupport.where("cityid = ?", String.valueOf(selectedCity.getId())).find(County.class);
if (countyList.size() > 0) {
dataList.clear();
for (County county : countyList) {
dataList.add(county.getCountyName());
}
adapter.notifyDataSetChanged();
listView.setSelection(0);
currentLevel = LEVEL_COUNTY;
} else {
int provinceCode = selectedProvince.getProvinceCode();
int cityCode = selectedCity.getCityCode();
String address = "http://guolin.tech/api/china/" + provinceCode + "/" + cityCode;
queryFromServer(address, "county");
}
}
/**
* 根据传入的地址和类型从服务器上查询省市县数据
*/
private void queryFromServer(String address, final String type) {
showProgressDialog();
HttpUtil.sendOkHttpRequest(address, new Callback() {
@Override
public void onFailure(@NotNull Call call, @NotNull IOException e) {
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
closeProgressDialog();
Toast.makeText(getContext(), "加载失败", Toast.LENGTH_SHORT).show();
}
});
}
@Override
public void onResponse(@NotNull Call call, @NotNull Response response) throws IOException {
String responseText = response.body().string();
boolean result = false;
if ("province".equals(type)) {
result = Utility.handleProvinceResponse(responseText);
} else if ("city".equals(type)) {
result = Utility.handleCityResponse(responseText, selectedProvince.getId());
} else if ("county".equals(type)) {
result = Utility.handleCountyResponse(responseText, selectedCity.getId());
}
if (result) {
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
closeProgressDialog();
if ("province".equals(type)) {
queryProvinces();
} else if ("city".equals(type)) {
queryCities();
} else if ("county".equals(type)) {
queryCounties();
}
}
});
}
}
});
}
/**
* 显示进度对话框
*/
private void showProgressDialog() {
if (progressDialog == null) {
progressDialog = new ProgressDialog(getActivity());
progressDialog.setMessage("正在加载.");
progressDialog.setCanceledOnTouchOutside(false);
}
progressDialog.show();
}
/**
* 关闭进度对话框
*/
private void closeProgressDialog() {
if (progressDialog != null) {
progressDialog.dismiss();
}
}
}
| apache-2.0 |
lvweiwolf/poi-3.16 | src/scratchpad/src/org/apache/poi/hslf/usermodel/HSLFTextRun.java | 13324 | /* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.hslf.usermodel;
import java.awt.Color;
import java.util.List;
import org.apache.poi.hslf.exceptions.HSLFException;
import org.apache.poi.hslf.model.textproperties.BitMaskTextProp;
import org.apache.poi.hslf.model.textproperties.CharFlagsTextProp;
import org.apache.poi.hslf.model.textproperties.TextProp;
import org.apache.poi.hslf.model.textproperties.TextPropCollection;
import org.apache.poi.hslf.model.textproperties.TextPropCollection.TextPropType;
import org.apache.poi.sl.draw.DrawPaint;
import org.apache.poi.sl.usermodel.MasterSheet;
import org.apache.poi.sl.usermodel.PaintStyle;
import org.apache.poi.sl.usermodel.PaintStyle.SolidPaint;
import org.apache.poi.sl.usermodel.Placeholder;
import org.apache.poi.sl.usermodel.TextParagraph;
import org.apache.poi.sl.usermodel.TextRun;
import org.apache.poi.sl.usermodel.TextShape;
import org.apache.poi.util.Internal;
import org.apache.poi.util.POILogFactory;
import org.apache.poi.util.POILogger;
/**
* Represents a run of text, all with the same style
*
*/
public final class HSLFTextRun implements TextRun {
protected POILogger logger = POILogFactory.getLogger(this.getClass());
/** The TextRun we belong to */
private HSLFTextParagraph parentParagraph;
private String _runText = "";
private String _fontFamily;
private HSLFHyperlink link;
/**
* Our paragraph and character style.
* Note - we may share these styles with other RichTextRuns
*/
private TextPropCollection characterStyle = new TextPropCollection(1, TextPropType.character);
private TextPropCollection masterStyle;
/**
* Create a new wrapper around a rich text string
* @param parentParagraph the parent paragraph
*/
public HSLFTextRun(HSLFTextParagraph parentParagraph) {
this.parentParagraph = parentParagraph;
}
public TextPropCollection getCharacterStyle() {
return characterStyle;
}
public void setCharacterStyle(TextPropCollection characterStyle) {
this.characterStyle.copy(characterStyle);
this.characterStyle.updateTextSize(_runText.length());
}
/**
* Setting a master style reference
*
* @param characterStyle the master style reference
*
* @since POI 3.14-Beta1
*/
@Internal
/* package */ void setMasterStyleReference(TextPropCollection masterStyle) {
this.masterStyle = masterStyle;
}
/**
* Supply the SlideShow we belong to
*/
public void updateSheet() {
if (_fontFamily != null) {
setFontFamily(_fontFamily);
_fontFamily = null;
}
}
/**
* Get the length of the text
*/
public int getLength() {
return _runText.length();
}
/**
* Fetch the text, in raw storage form
*/
@Override
public String getRawText() {
return _runText;
}
/**
* Change the text
*/
@Override
public void setText(String text) {
if (text == null) {
throw new HSLFException("text must not be null");
}
String newText = HSLFTextParagraph.toInternalString(text);
if (!newText.equals(_runText)) {
_runText = newText;
if (HSLFSlideShow.getLoadSavePhase() == HSLFSlideShow.LoadSavePhase.LOADED) {
parentParagraph.setDirty();
}
}
}
// --------------- Internal helpers on rich text properties -------
/**
* Fetch the value of the given flag in the CharFlagsTextProp.
* Returns false if the CharFlagsTextProp isn't present, since the
* text property won't be set if there's no CharFlagsTextProp.
*/
private boolean isCharFlagsTextPropVal(int index) {
return getFlag(index);
}
protected boolean getFlag(int index) {
if (characterStyle == null) {
return false;
}
BitMaskTextProp prop = (BitMaskTextProp)characterStyle.findByName(CharFlagsTextProp.NAME);
if (prop == null || !prop.getSubPropMatches()[index]) {
int txtype = parentParagraph.getRunType();
HSLFSheet sheet = parentParagraph.getSheet();
if (sheet != null) {
HSLFMasterSheet master = sheet.getMasterSheet();
if (master != null){
prop = (BitMaskTextProp)master.getStyleAttribute(txtype, parentParagraph.getIndentLevel(), CharFlagsTextProp.NAME, true);
}
} else {
logger.log(POILogger.WARN, "MasterSheet is not available");
}
}
return prop == null ? false : prop.getSubValue(index);
}
/**
* Set the value of the given flag in the CharFlagsTextProp, adding
* it if required.
*/
private void setCharFlagsTextPropVal(int index, boolean value) {
// TODO: check if paragraph/chars can be handled the same ...
if (getFlag(index) != value) {
setFlag(index, value);
parentParagraph.setDirty();
}
}
/**
* Sets the value of the given Paragraph TextProp, add if required
* @param propName The name of the Paragraph TextProp
* @param val The value to set for the TextProp
*/
public void setCharTextPropVal(String propName, Integer val) {
getTextParagraph().setPropVal(characterStyle, masterStyle, propName, val);
getTextParagraph().setDirty();
}
// --------------- Friendly getters / setters on rich text properties -------
@Override
public boolean isBold() {
return isCharFlagsTextPropVal(CharFlagsTextProp.BOLD_IDX);
}
@Override
public void setBold(boolean bold) {
setCharFlagsTextPropVal(CharFlagsTextProp.BOLD_IDX, bold);
}
@Override
public boolean isItalic() {
return isCharFlagsTextPropVal(CharFlagsTextProp.ITALIC_IDX);
}
@Override
public void setItalic(boolean italic) {
setCharFlagsTextPropVal(CharFlagsTextProp.ITALIC_IDX, italic);
}
@Override
public boolean isUnderlined() {
return isCharFlagsTextPropVal(CharFlagsTextProp.UNDERLINE_IDX);
}
@Override
public void setUnderlined(boolean underlined) {
setCharFlagsTextPropVal(CharFlagsTextProp.UNDERLINE_IDX, underlined);
}
/**
* Does the text have a shadow?
*/
public boolean isShadowed() {
return isCharFlagsTextPropVal(CharFlagsTextProp.SHADOW_IDX);
}
/**
* Does the text have a shadow?
*/
public void setShadowed(boolean flag) {
setCharFlagsTextPropVal(CharFlagsTextProp.SHADOW_IDX, flag);
}
/**
* Is this text embossed?
*/
public boolean isEmbossed() {
return isCharFlagsTextPropVal(CharFlagsTextProp.RELIEF_IDX);
}
/**
* Is this text embossed?
*/
public void setEmbossed(boolean flag) {
setCharFlagsTextPropVal(CharFlagsTextProp.RELIEF_IDX, flag);
}
@Override
public boolean isStrikethrough() {
return isCharFlagsTextPropVal(CharFlagsTextProp.STRIKETHROUGH_IDX);
}
@Override
public void setStrikethrough(boolean flag) {
setCharFlagsTextPropVal(CharFlagsTextProp.STRIKETHROUGH_IDX, flag);
}
/**
* Gets the subscript/superscript option
*
* @return the percentage of the font size. If the value is positive, it is superscript, otherwise it is subscript
*/
public int getSuperscript() {
TextProp tp = getTextParagraph().getPropVal(characterStyle, masterStyle, "superscript");
return tp == null ? 0 : tp.getValue();
}
/**
* Sets the subscript/superscript option
*
* @param val the percentage of the font size. If the value is positive, it is superscript, otherwise it is subscript
*/
public void setSuperscript(int val) {
setCharTextPropVal("superscript", val);
}
@Override
public Double getFontSize() {
TextProp tp = getTextParagraph().getPropVal(characterStyle, masterStyle, "font.size");
return tp == null ? null : (double)tp.getValue();
}
@Override
public void setFontSize(Double fontSize) {
Integer iFontSize = (fontSize == null) ? null : fontSize.intValue();
setCharTextPropVal("font.size", iFontSize);
}
/**
* Gets the font index
*/
public int getFontIndex() {
TextProp tp = getTextParagraph().getPropVal(characterStyle, masterStyle, "font.index");
return tp == null ? -1 : tp.getValue();
}
/**
* Sets the font index
*/
public void setFontIndex(int idx) {
setCharTextPropVal("font.index", idx);
}
@Override
public void setFontFamily(String fontFamily) {
HSLFSheet sheet = parentParagraph.getSheet();
@SuppressWarnings("resource")
HSLFSlideShow slideShow = (sheet == null) ? null : sheet.getSlideShow();
if (sheet == null || slideShow == null) {
//we can't set font since slideshow is not assigned yet
_fontFamily = fontFamily;
return;
}
// Get the index for this font (adding if needed)
Integer fontIdx = (fontFamily == null) ? null : slideShow.getFontCollection().addFont(fontFamily);
setCharTextPropVal("font.index", fontIdx);
}
@Override
public String getFontFamily() {
HSLFSheet sheet = parentParagraph.getSheet();
@SuppressWarnings("resource")
HSLFSlideShow slideShow = (sheet == null) ? null : sheet.getSlideShow();
if (sheet == null || slideShow == null) {
return _fontFamily;
}
TextProp tp = getTextParagraph().getPropVal(characterStyle, masterStyle, "font.index,asian.font.index,ansi.font.index,symbol.font.index");
if (tp == null) { return null; }
return slideShow.getFontCollection().getFontWithId(tp.getValue());
}
/**
* @return font color as PaintStyle
*/
@Override
public SolidPaint getFontColor() {
TextProp tp = getTextParagraph().getPropVal(characterStyle, masterStyle, "font.color");
if (tp == null) {
return null;
}
Color color = HSLFTextParagraph.getColorFromColorIndexStruct(tp.getValue(), parentParagraph.getSheet());
SolidPaint ps = DrawPaint.createSolidPaint(color);
return ps;
}
/**
* Sets color of the text, as a int bgr.
* (PowerPoint stores as BlueGreenRed, not the more
* usual RedGreenBlue)
* @see java.awt.Color
*/
public void setFontColor(int bgr) {
setCharTextPropVal("font.color", bgr);
}
@Override
public void setFontColor(Color color) {
setFontColor(DrawPaint.createSolidPaint(color));
}
@Override
public void setFontColor(PaintStyle color) {
if (!(color instanceof SolidPaint)) {
throw new IllegalArgumentException("HSLF only supports solid paint");
}
// In PowerPont RGB bytes are swapped, as BGR
SolidPaint sp = (SolidPaint)color;
Color c = DrawPaint.applyColorTransform(sp.getSolidColor());
int rgb = new Color(c.getBlue(), c.getGreen(), c.getRed(), 254).getRGB();
setFontColor(rgb);
}
protected void setFlag(int index, boolean value) {
BitMaskTextProp prop = (BitMaskTextProp)characterStyle.addWithName(CharFlagsTextProp.NAME);
prop.setSubValue(value, index);
}
public HSLFTextParagraph getTextParagraph() {
return parentParagraph;
}
@Override
public TextCap getTextCap() {
return TextCap.NONE;
}
@Override
public boolean isSubscript() {
return getSuperscript() < 0;
}
@Override
public boolean isSuperscript() {
return getSuperscript() > 0;
}
@Override
public byte getPitchAndFamily() {
return 0;
}
/**
* Sets the hyperlink - used when parsing the document
*
* @param link the hyperlink
*/
protected void setHyperlink(HSLFHyperlink link) {
this.link = link;
}
@Override
public HSLFHyperlink getHyperlink() {
return link;
}
@Override
public HSLFHyperlink createHyperlink() {
if (link == null) {
link = HSLFHyperlink.createHyperlink(this);
parentParagraph.setDirty();
}
return link;
}
@Override
public FieldType getFieldType() {
HSLFTextShape ts = getTextParagraph().getParentShape();
Placeholder ph = ts.getPlaceholder();
if (ph != null) {
switch (ph) {
case SLIDE_NUMBER:
return FieldType.SLIDE_NUMBER;
case DATETIME:
return FieldType.DATE_TIME;
default:
break;
}
}
if (ts.getSheet() instanceof MasterSheet) {
TextShape<?,? extends TextParagraph<?,?,? extends TextRun>> ms = ts.getMetroShape();
if (ms == null || ms.getTextParagraphs().isEmpty()) {
return null;
}
List<? extends TextRun> trList = ms.getTextParagraphs().get(0).getTextRuns();
if (trList.isEmpty()) {
return null;
}
return trList.get(0).getFieldType();
}
return null;
}
}
| apache-2.0 |
mozsoy/SecureLayerAndroid | app/src/main/java/com/google/android/gms/drive/sample/quickstart/MyWebSocket.java | 5751 | package com.google.android.gms.drive.sample.quickstart;
import android.util.Log;
import org.java_websocket.client.WebSocketClient;
import org.java_websocket.drafts.Draft;
import org.java_websocket.handshake.ServerHandshake;
import org.json.JSONException;
import org.json.JSONObject;
import java.math.BigInteger;
import java.net.URI;
import java.nio.channels.NotYetConnectedException;
import java.util.Iterator;
import java.util.Map;
/**
* Created by JAY on 10/22/15.
*/
public class MyWebSocket extends WebSocketClient {
private boolean connected;
private static MyWebSocket instance = null;
private static URI uri;
private String reply; // server's reply
private boolean receivedReply; // did server replied;
private boolean sent; // message sent by calling send method
private MyKeyStore myKeyStore;
private String id;
private String hp;
private boolean sign_up;
public static MyWebSocket getInstance(URI _uri,
Draft _draft,
Map<String, String> headers,
int timeout,
MyKeyStore _myKeyStore) {
if (uri != null && _uri.compareTo(uri) == 0) {
if (instance == null) {
instance = new MyWebSocket(_uri, _draft, headers, timeout, _myKeyStore);
}
} else {
instance = new MyWebSocket(_uri, _draft, headers, timeout, _myKeyStore);
}
return instance;
}
private MyWebSocket(URI _uri,
Draft _draft,
Map<String, String> headers,
int timeout,
MyKeyStore _myKeyStore) {
super(_uri, _draft, headers, timeout);
uri = _uri;
connected = false;
receivedReply = false;
sent = false;
myKeyStore = _myKeyStore;
id = "";
hp = "";
sign_up = false;
}
/**
* login or signup with secure server
*
* @param _id user name
* @param _hp hash of password
* @param _sign_up signup with the credentials
*/
public void connect(String _id, String _hp, boolean _sign_up) {
id = _id;
hp = _hp;
sign_up = _sign_up;
sent = false;
super.connect();
}
@Override
public void send(String text) throws NotYetConnectedException {
super.send(text);
sent = true;
}
@Override
public void send(byte[] data) throws NotYetConnectedException {
super.send(data);
sent = true;
}
@Override
public void onOpen(ServerHandshake handshakedata) {
try {
// connected = true;
Log.e("Websocket", "Opened");
Log.e("Connection established", ":::::::::::::::::::::::::::::::::::::::::::::");
JSONObject json = new JSONObject();
json.put(Constants.id, id);
json.put(Constants.hp, hp);
if (sign_up) {
super.send(new JSONObject().put(Constants.sign_up, json).toString());
} else {
super.send(new JSONObject().put(Constants.verify, json).toString());
}
} catch (Exception e) {
Log.e("onOpen", e.getMessage());
}
}
@Override
public void onMessage(String message) {
Log.e("Server Reply", message);
if (sent) {
receivedReply = true;
reply = message;
return;
}
JSONObject json = null;
try {
json = new JSONObject(message);
} catch (Exception e) {
Log.e("on message", e.getMessage());
}
if (json != null) {
Iterator<String> keys = json.keys();
try {
while (keys.hasNext()) {
String command = keys.next();
if (command.equals(Constants.verify)) { // verified -> save server's pubkey | establish session key
JSONObject data = json.getJSONObject(Constants.verify);
myKeyStore.setServerKey(data.getString(Constants.publickey));
BigInteger dhA = myKeyStore
.getdhA((BigInteger) data.get(Constants.g), (BigInteger) data.get(Constants.p));
myKeyStore
.generateSessionKey((BigInteger) data.get(Constants.dh), (BigInteger) data.get(Constants.p));
String dh = myKeyStore.encryptRSA(dhA).toString();
super.send(new JSONObject().put(Constants.dh, dh).toString());
connected = true;
} else if (command.equals(Constants.error)) {
Log.e("Error", "from server");
}
}
} catch (JSONException e) {
Log.e("not verified", e.getMessage());
connected = false;
}
}
}
@Override
public void onClose(int code, String reason, boolean remote) {
Log.e("Closing", ":::::::::::::::::::::::::::::::::::::::::::::");
Log.e("Reason", reason);
connected = false;
}
@Override
public void onError(Exception ex) {
Log.i("Websocket", "Error " + ex.getMessage());
}
public String getReply() {
receivedReply = false;
String re = reply;
reply = null;
return re;
}
public boolean isConnected() {
return connected;
}
public boolean receivedReply() {
return sent ? receivedReply : (receivedReply = false);
}
}
| apache-2.0 |
lufei1344/cloud-oa | src/main/java/com/cloudoa/framework/flow/dao/BpmInstanceDao.java | 285 | package com.cloudoa.framework.flow.dao;
import org.springframework.stereotype.Component;
import com.cloudoa.framework.flow.entity.BpmInstance;
import com.cloudoa.framework.orm.hibernate.HibernateDao;
@Component
public class BpmInstanceDao extends HibernateDao<BpmInstance,Long> {
}
| apache-2.0 |
uustory/U8Server | src/main/java/com/u8/server/sdk/chuyou/ChuYouSDK.java | 2939 | package com.u8.server.sdk.chuyou;
import com.u8.server.data.UChannel;
import com.u8.server.data.UOrder;
import com.u8.server.data.UUser;
import com.u8.server.log.Log;
import com.u8.server.sdk.*;
import com.u8.server.utils.EncryptUtils;
import net.sf.json.JSONObject;
import java.util.HashMap;
import java.util.Map;
/**
* 武汉楚游 07073
* Created by xiaohei on 15/12/22.
*/
public class ChuYouSDK implements ISDKScript{
@Override
public void verify(final UChannel channel, String extension, final ISDKVerifyListener callback) {
try{
JSONObject json = JSONObject.fromObject(extension);
final String username = json.getString("username");
String token = json.getString("token");
StringBuilder sb = new StringBuilder();
sb.append("pid=").append(channel.getCpID()).append("&")
.append("token=").append(token).append("&")
.append("username=").append(username)
.append(channel.getCpAppKey());
String sign = EncryptUtils.md5(sb.toString());
Map<String,String> params = new HashMap<String, String>();
params.put("username", username);
params.put("token", token);
params.put("pid", channel.getCpID());
params.put("sign", sign);
String url = channel.getChannelAuthUrl();
UHttpAgent.getInstance().post(url, params, new UHttpFutureCallback() {
@Override
public void completed(String result) {
try {
Log.e("The auth result is " + result);
JSONObject json = JSONObject.fromObject(result);
int code = json.getInt("state");
if(code == 1){
JSONObject rt = json.getJSONObject("data");
callback.onSuccess(new SDKVerifyResult(true, rt.getString("uid"), rt.getString("username"), ""));
return;
}
} catch (Exception e) {
e.printStackTrace();
}
callback.onFailed(channel.getMaster().getSdkName() + " verify failed. the post result is " + result);
}
@Override
public void failed(String e) {
callback.onFailed(channel.getMaster().getSdkName() + " verify failed. " + e);
}
});
}catch (Exception e){
e.printStackTrace();
callback.onFailed(channel.getMaster().getSdkName() + " verify execute failed. the exception is "+e.getMessage());
}
}
@Override
public void onGetOrderID(UUser user, UOrder order, ISDKOrderListener callback) {
if(callback != null){
callback.onSuccess("");
}
}
}
| apache-2.0 |
sangupta/am | src/main/java/com/sangupta/am/servlet/MockHttpServletResponse.java | 7178 | /**
* am: Assert-Mocks for unit-testing Java servlet API code
* Copyright (c) 2016, Sandeep Gupta
*
* https://sangupta.com/projects/am
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sangupta.am.servlet;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.Enumeration;
import java.util.List;
import java.util.Set;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletResponse;
import com.sangupta.am.servlet.support.MockUrlEncoder;
import com.sangupta.jerry.constants.HttpHeaderName;
import com.sangupta.jerry.constants.HttpStatusCode;
import com.sangupta.jerry.ds.SimpleMultiMap;
/**
* Implementation of the {@link HttpServletResponse} for unit-testing that keeps
* all parameters within memory and provides useful accessor methods to modify
* the values.
*
* Meant to be used only for unit-testing.
*
* @author sangupta
*
* @since 1.0.0
*/
public class MockHttpServletResponse extends MockServletResponse implements HttpServletResponse {
/**
* {@link Cookie}s associated with this response
*/
public final List<Cookie> cookies = new ArrayList<>();
/**
* As per RFC 7230 header names are case-insensitive. Exercise caution when you
* add and read headers directly.
*/
public final SimpleMultiMap<String, String> headers = new SimpleMultiMap<>();
public int status;
public String statusMessage;
public MockUrlEncoder urlEncoder = new MockUrlEncoder();
public int getStatus() {
return status;
}
public String getStatusMessage() {
return statusMessage;
}
public void setUrlEncoder(MockUrlEncoder urlEncoder) {
this.urlEncoder = urlEncoder;
}
public int getNumCookies() {
return this.cookies.size();
}
public Cookie getCookie(String name) {
for (Cookie cookie : this.cookies) {
if (cookie.getName().equals(name)) {
return cookie;
}
}
return null;
}
public boolean hasCookie(String name) {
Cookie cookie = this.getCookie(name);
if (cookie == null) {
return false;
}
return true;
}
public String getHeader(String name) {
return this.headers.getOne(name.toLowerCase());
}
public List<String> getHeaderValues(String name) {
List<String> result = this.headers.getValues(name.toLowerCase());
if(result != null) {
return result;
}
return new ArrayList<>();
}
public int getIntHeader(String name) {
String value = this.getHeader(name);
return Integer.parseInt(value);
}
/**
* Get a header as a date value returning <code>long</code>. If multiple values
* are present for the header, the first one is picked.
*
* @param name the name of the header
*
* @return the <code>long</code> value for the header if present, else
* <code>-1</code> if absent.
*/
public long getDateHeader(String name) {
String value = this.getHeader(name);
try {
return new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss zzz").parse(value).getTime();
} catch (ParseException e) {
throw new IllegalArgumentException(e);
}
}
public Enumeration<String> getHeaders(String name) {
name = name.toLowerCase();
if (!this.headers.containsKey(name)) {
return null;
}
return Collections.enumeration(this.headers.getValues(name));
}
public Enumeration<String> getHeaderNames() {
return Collections.enumeration(this.headers.keySet());
}
public Set<String> getHeaderNamesAsSet() {
return this.headers.keySet();
}
/**
* Indicates if the response has headers.
*
* @return <code>true</code> if headers are present, <code>false</code>
* otherwise
*/
public boolean hasHeaders() {
return !this.headers.isEmpty();
}
// Overridden methods follow
@Override
public void addCookie(Cookie cookie) {
if (cookie == null) {
return;
}
this.cookies.add(cookie);
}
@Override
public boolean containsHeader(String name) {
return this.headers.containsKey(name.toLowerCase());
}
@Override
public String encodeURL(String url) {
return this.urlEncoder.encodeURL(url);
}
@Override
public String encodeRedirectURL(String url) {
return this.urlEncoder.encodeRedirectURL(url);
}
@Override
public String encodeUrl(String url) {
return this.urlEncoder.encodeUrl(url);
}
@Override
public String encodeRedirectUrl(String url) {
return this.urlEncoder.encodeRedirectUrl(url);
}
@Override
public void sendError(int sc, String msg) {
this.status = sc;
this.statusMessage = msg;
}
@Override
public void sendError(int sc) {
this.setStatus(sc);
}
@Override
public void sendRedirect(String location) {
this.status = HttpStatusCode.TEMPORARY_REDIRECT;
this.headers.put(HttpHeaderName.LOCATION, location);
}
@Override
public void setDateHeader(String name, long date) {
name = name.toLowerCase();
this.headers.remove(name);
this.addDateHeader(name, date);
}
@Override
public void addDateHeader(String name, long date) {
name = name.toLowerCase();
this.headers.put(name, new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss zzz").format(new Date(date)));
}
@Override
public void setHeader(String name, String value) {
name = name.toLowerCase();
this.headers.remove(name);
this.headers.put(name, value);
}
@Override
public void addHeader(String name, String value) {
name = name.toLowerCase();
this.headers.put(name, value);
}
@Override
public void setIntHeader(String name, int value) {
name = name.toLowerCase();
this.headers.remove(name);
this.headers.put(name, String.valueOf(value));
}
@Override
public void addIntHeader(String name, int value) {
name = name.toLowerCase();
this.headers.put(name, String.valueOf(value));
}
@Override
public void setStatus(int sc) {
this.status = sc;
}
@Override
public void setStatus(int sc, String sm) {
this.status = sc;
this.statusMessage = sm;
}
}
| apache-2.0 |
george-zhang-work/dove | Dove/reader/src/main/java/com/dove/reader/ui/DrawerFragment.java | 14357 | package com.dove.reader.ui;
import android.app.ActionBar;
import android.app.Activity;
import android.app.Fragment;
import android.app.LoaderManager;
import android.content.Loader;
import android.content.SharedPreferences;
import android.content.res.Configuration;
import android.database.Cursor;
import android.database.DataSetObserver;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.support.v4.app.ActionBarDrawerToggle;
import android.support.v4.view.GravityCompat;
import android.support.v4.widget.DrawerLayout;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.BaseAdapter;
import android.widget.CursorAdapter;
import android.widget.ListView;
import android.widget.Toast;
import com.dove.common.content.ObjectCursor;
import com.dove.common.content.ObjectCursorLoader;
import com.dove.common.log.LogTag;
import com.dove.reader.R;
import com.dove.reader.provider.ReaderContract;
import java.util.ArrayList;
import java.util.List;
/**
* Fragment used for managing interactions for and presentation of a navigation drawer.
* See the <a href="https://developer.android.com/design/patterns/navigation-drawer.html#Interaction">
* design guidelines</a> for a complete explanation of the behaviors implemented here.
*/
public class DrawerFragment extends Fragment implements LoaderManager.LoaderCallbacks<Cursor> {
private static final String TAG = "DrawerFragment";
private static final String LOG_TAG = LogTag.getLogTag();
/**
* Remember the position of the selected item.
*/
private static final String STATE_SELECTED_POSITION = "selected_navigation_drawer_position";
/**
* Per the design guidelines, you should show the drawer on launch until the user manually
* expands it. This shared preference tracks this.
*/
private static final String PREF_USER_LEARNED_DRAWER = "navigation_drawer_learned";
/**
* A pointer to this fragment attached activity.
*/
private ReaderActivity mActivity;
/**
* Callbacks interface that all activities using this fragment must implement.
*/
public static interface NavigationDrawerCallbacks {
/**
* Called when an item in the navigation drawer is selected.
*/
void onNavigationDrawerItemSelected(int position);
}
/**
* A pointer to the current callbacks instance (the Activity).
*/
private NavigationDrawerCallbacks mDrawerCallbacks;
/**
* Helper component that ties the action bar to the navigation drawer.
*/
private ActionBarDrawerToggle mDrawerToggle;
/**
* Observer to be notified when the selected account changed.
*/
private DataSetObserver mAccountObserver;
/**
* Observer to be notified when the drawer is operated.
*/
private DataSetObserver mDrawerObserver;
private DrawerLayout mDrawerLayout;
private ListView mListView;
private DrawerAdapter mDrawerAdapter;
private View mFragmentContainerView;
private int mCurrentSelectedPosition = 0;
private boolean mFromSavedInstanceState;
private boolean mUserLearnedDrawer;
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
if (!(activity instanceof ReaderActivity)) {
throw new ClassCastException("DrawerFragment expects only a ReaderActivity " +
"to create it. Cannot proceed.");
}
mActivity = (ReaderActivity) activity;
if (!(activity instanceof NavigationDrawerCallbacks)) {
throw new ClassCastException("ReaderActivity must implement NavigationDrawerCallbacks.");
}
mDrawerCallbacks = (NavigationDrawerCallbacks) activity;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Read in the flag indicating whether or not the user has demonstrated awareness of the
// drawer. See PREF_USER_LEARNED_DRAWER for details.
// final SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(getActivity());
// mUserLearnedDrawer = sp.getBoolean(PREF_USER_LEARNED_DRAWER, false);
if (savedInstanceState != null) {
mCurrentSelectedPosition = savedInstanceState.getInt(STATE_SELECTED_POSITION);
mFromSavedInstanceState = true;
}
// Select either the default item (0) or the last selected item.
selectItem(mCurrentSelectedPosition);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
final View v = inflater.inflate(R.layout.fragment_navigation_drawer, container, false);
mListView = (ListView) v.findViewById(R.id.list);
mListView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
selectItem(position);
}
});
mDrawerAdapter = new DrawerAdapter();
mListView.setAdapter(mDrawerAdapter);
mListView.setItemChecked(mCurrentSelectedPosition, true);
return v;
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
// Indicate that this fragment would like to influence the set of actions in the action bar.
setHasOptionsMenu(true);
mAccountObserver = new DataSetObserver() {
@Override
public void onChanged() {
super.onChanged();
}
};
mActivity.getAccountObservable().registerObserver(mAccountObserver);
mDrawerObserver = new DataSetObserver() {
@Override
public void onChanged() {
}
};
mActivity.getDrawerObservable().registerObserver(mDrawerObserver);
if (mActivity.isFinishing()) {
// Activity is finished, just bail.
return;
}
}
@Override
public void onDestroyView() {
// Remember to unregister the observers.
if (mAccountObserver != null) {
mActivity.getAccountObservable().unregisterObserver(mAccountObserver);
mAccountObserver = null;
}
if (mDrawerObserver != null) {
mActivity.getDrawerObservable().unregisterObserver(mDrawerObserver);
mDrawerObserver = null;
}
super.onDestroyView();
}
public boolean isDrawerOpen() {
return mDrawerLayout != null && mDrawerLayout.isDrawerOpen(mFragmentContainerView);
}
/**
* Users of this fragment must call this method to set up the navigation drawer interactions.
*
* @param fragmentId The android:id of this fragment in its activity's layout.
* @param drawerLayout The DrawerLayout containing this fragment's UI.
*/
public void setUp(int fragmentId, DrawerLayout drawerLayout) {
mFragmentContainerView = getActivity().findViewById(fragmentId);
mDrawerLayout = drawerLayout;
// set a custom shadow that overlays the main content when the drawer opens
mDrawerLayout.setDrawerShadow(R.drawable.drawer_shadow, GravityCompat.START);
// set up the drawer's list view with items and click listener
ActionBar actionBar = getActionBar();
actionBar.setDisplayHomeAsUpEnabled(true);
actionBar.setHomeButtonEnabled(true);
// ActionBarDrawerToggle ties together the the proper interactions
// between the navigation drawer and the action bar app icon.
mDrawerToggle = new ActionBarDrawerToggle(
getActivity(), /* host Activity */
mDrawerLayout, /* DrawerLayout object */
R.drawable.ic_drawer, /* nav drawer image to replace 'Up' caret */
R.string.navigation_drawer_open, /* "open drawer" description for accessibility */
R.string.navigation_drawer_close /* "close drawer" description for accessibility */
) {
@Override
public void onDrawerClosed(View drawerView) {
super.onDrawerClosed(drawerView);
if (!isAdded()) {
return;
}
getActivity().invalidateOptionsMenu(); // calls onPrepareOptionsMenu()
}
@Override
public void onDrawerOpened(View drawerView) {
super.onDrawerOpened(drawerView);
if (!isAdded()) {
return;
}
if (!mUserLearnedDrawer) {
// The user manually opened the drawer; store this flag to prevent auto-showing
// the navigation drawer automatically in the future.
mUserLearnedDrawer = true;
SharedPreferences sp = PreferenceManager
.getDefaultSharedPreferences(getActivity());
sp.edit().putBoolean(PREF_USER_LEARNED_DRAWER, true).apply();
}
getActivity().invalidateOptionsMenu(); // calls onPrepareOptionsMenu()
}
};
// If the user hasn'Contents 'learned' about the drawer, open it to introduce them to the drawer,
// per the navigation drawer design guidelines.
if (!mUserLearnedDrawer && !mFromSavedInstanceState) {
mDrawerLayout.openDrawer(mFragmentContainerView);
}
// Defer code dependent on restoration of previous instance state.
mDrawerLayout.post(new Runnable() {
@Override
public void run() {
mDrawerToggle.syncState();
}
});
mDrawerLayout.setDrawerListener(mDrawerToggle);
}
private void selectItem(int position) {
mCurrentSelectedPosition = position;
if (mListView != null) {
mListView.setItemChecked(position, true);
}
if (mDrawerLayout != null) {
mDrawerLayout.closeDrawer(mFragmentContainerView);
}
if (mDrawerCallbacks != null) {
mDrawerCallbacks.onNavigationDrawerItemSelected(position);
}
}
@Override
public void onDetach() {
super.onDetach();
mDrawerCallbacks = null;
mActivity = null;
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putInt(STATE_SELECTED_POSITION, mCurrentSelectedPosition);
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
// Forward the new configuration the drawer toggle component.
mDrawerToggle.onConfigurationChanged(newConfig);
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
// If the drawer is open, show the global app actions in the action bar. See also
// showGlobalContextActionBar, which controls the top-left area of the action bar.
if (mDrawerLayout != null && isDrawerOpen()) {
inflater.inflate(R.menu.global, menu);
showGlobalContextActionBar();
}
super.onCreateOptionsMenu(menu, inflater);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (mDrawerToggle.onOptionsItemSelected(item)) {
return true;
}
if (item.getItemId() == R.id.action_example) {
Toast.makeText(getActivity(), "Example action.", Toast.LENGTH_SHORT).show();
return true;
}
return super.onOptionsItemSelected(item);
}
/**
* Per the navigation drawer design guidelines, updates the action bar to show the global app
* 'context', rather than just what's in the current screen.
*/
private void showGlobalContextActionBar() {
ActionBar actionBar = getActionBar();
actionBar.setDisplayShowTitleEnabled(true);
actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD);
actionBar.setTitle(R.string.app_name);
}
private ActionBar getActionBar() {
return getActivity().getActionBar();
}
static class DrawerAdapter extends BaseAdapter {
CursorAdapter c;
private List<DrawerItem> mItems;
public DrawerAdapter() {
mItems = new ArrayList<DrawerItem>();
}
@Override
public int getCount() {
return mItems.size();
}
@Override
public Object getItem(int position) {
return mItems.get(position);
}
@Override
public boolean hasStableIds() {
return true;
}
@Override
public int getItemViewType(int position) {
return mItems.get(position).getDrawerItemType();
}
@Override
public int getViewTypeCount() {
return ReaderContract.DrawerItemTypes.COUNT;
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
final DrawerItem item = (DrawerItem) getItem(position);
return item.getView(position, convertView, parent);
}
private void setCursor(ObjectCursor<Content> cursor) {
cursor.getModel();
}
}
@Override
public Loader<Cursor> onCreateLoader(int id, Bundle args) {
return new ObjectCursorLoader<Content>(mActivity, Content.FACTORY);
}
@Override
public void onLoadFinished(Loader<Cursor> loader, Cursor data) {
if (mDrawerAdapter != null) {
final ObjectCursor<Content> cursor = (ObjectCursor<Content>) data;
mDrawerAdapter.setCursor(cursor);
}
}
@Override
public void onLoaderReset(Loader<Cursor> loader) {
if (mDrawerAdapter != null) {
mDrawerAdapter.setCursor(null);
}
}
}
| apache-2.0 |
java110/MicroCommunity | service-front/src/main/java/com/java110/front/smo/fee/impl/ListPayFeeSMOImpl.java | 2844 | package com.java110.front.smo.fee.impl;
import com.alibaba.fastjson.JSONObject;
import com.java110.core.component.AbstractComponentSMO;
import com.java110.core.context.IPageData;
import com.java110.entity.component.ComponentValidateResult;
import com.java110.front.smo.fee.IListPayFeeSMO;
import com.java110.utils.constant.PrivilegeCodeConstant;
import com.java110.utils.constant.ServiceConstant;
import com.java110.utils.exception.SMOException;
import com.java110.utils.util.Assert;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpMethod;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Service;
import org.springframework.web.client.RestTemplate;
/**
* 查询app服务类
*/
@Service("listPayFeeSMOImpl")
public class ListPayFeeSMOImpl extends AbstractComponentSMO implements IListPayFeeSMO {
@Autowired
private RestTemplate restTemplate;
@Override
public ResponseEntity<String> list(IPageData pd) throws SMOException {
return businessProcess(pd);
}
@Override
protected void validate(IPageData pd, JSONObject paramIn) {
super.validatePageInfo(pd);
Assert.hasKeyAndValue(paramIn, "communityId", "未包含小区信息");
super.checkUserHasPrivilege(pd, restTemplate, PrivilegeCodeConstant.LIST_PAY_FEE);
}
@Override
protected ResponseEntity<String> doBusinessProcess(IPageData pd, JSONObject paramIn) {
ComponentValidateResult result = super.validateStoreStaffCommunityRelationship(pd, restTemplate);
// Map paramMap = BeanConvertUtil.beanCovertMap(result);
// paramIn.putAll(paramMap);
int page = paramIn.getInteger("page");
int row = paramIn.getInteger("row");
paramIn.put("storeId", result.getStoreId());
paramIn.put("page", (page - 1) * row);
paramIn.put("row", row);
String apiUrl = "";
if (!paramIn.containsKey("payObjType") || "3333".equals(paramIn.getString("payObjType"))) {
apiUrl = ServiceConstant.SERVICE_API_URL + "/api/api.getPropertyPayFee" + mapToUrlParam(paramIn);
} else if ("6666".equals(paramIn.getString("payObjType"))) {
apiUrl = ServiceConstant.SERVICE_API_URL + "/api/api.getParkingSpacePayFee" + mapToUrlParam(paramIn);
} else {
apiUrl = ServiceConstant.SERVICE_API_URL + "/api/api.getListPayFee" + mapToUrlParam(paramIn);
}
ResponseEntity<String> responseEntity = this.callCenterService(restTemplate, pd, "",
apiUrl,
HttpMethod.GET);
return responseEntity;
}
public RestTemplate getRestTemplate() {
return restTemplate;
}
public void setRestTemplate(RestTemplate restTemplate) {
this.restTemplate = restTemplate;
}
}
| apache-2.0 |
jagheterfredrik/java-idp | src/main/java/edu/internet2/middleware/shibboleth/idp/profile/saml2/SLOProfileHandler.java | 48335 | /*
* Copyright 2009 NIIF Institute.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* under the License.
*/
package edu.internet2.middleware.shibboleth.idp.profile.saml2;
import edu.internet2.middleware.shibboleth.common.profile.ProfileException;
import edu.internet2.middleware.shibboleth.common.profile.provider.BaseSAMLProfileRequestContext;
import edu.internet2.middleware.shibboleth.common.relyingparty.RelyingPartyConfiguration;
import edu.internet2.middleware.shibboleth.common.relyingparty.provider.saml2.LogoutRequestConfiguration;
import edu.internet2.middleware.shibboleth.common.session.SessionManager;
import edu.internet2.middleware.shibboleth.common.util.HttpHelper;
import edu.internet2.middleware.shibboleth.idp.session.Session;
import edu.internet2.middleware.shibboleth.idp.slo.HTTPClientInTransportAdapter;
import edu.internet2.middleware.shibboleth.idp.slo.HTTPClientOutTransportAdapter;
import edu.internet2.middleware.shibboleth.idp.slo.SingleLogoutContext;
import edu.internet2.middleware.shibboleth.idp.slo.SingleLogoutContext.LogoutInformation;
import edu.internet2.middleware.shibboleth.idp.slo.SingleLogoutContextStorageHelper;
import java.io.IOException;
import java.net.SocketTimeoutException;
import java.security.GeneralSecurityException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.httpclient.ConnectionPoolTimeoutException;
import org.apache.commons.httpclient.HostConfiguration;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.HttpConnection;
import org.apache.commons.httpclient.HttpException;
import org.apache.commons.httpclient.HttpState;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.httpclient.URI;
import org.apache.commons.httpclient.URIException;
import org.apache.commons.httpclient.contrib.ssl.EasySSLProtocolSocketFactory;
import org.apache.commons.httpclient.methods.EntityEnclosingMethod;
import org.apache.commons.httpclient.methods.PostMethod;
import org.apache.commons.httpclient.params.HttpConnectionParams;
import org.apache.commons.httpclient.protocol.SecureProtocolSocketFactory;
import org.joda.time.DateTime;
import org.opensaml.common.SAMLObjectBuilder;
import org.opensaml.common.SAMLVersion;
import org.opensaml.common.binding.BasicEndpointSelector;
import org.opensaml.common.binding.BasicSAMLMessageContext;
import org.opensaml.common.binding.decoding.SAMLMessageDecoder;
import org.opensaml.common.binding.encoding.SAMLMessageEncoder;
import org.opensaml.common.xml.SAMLConstants;
import org.opensaml.saml2.binding.decoding.HTTPSOAP11Decoder;
import org.opensaml.saml2.binding.encoding.HTTPSOAP11Encoder;
import org.opensaml.saml2.core.Issuer;
import org.opensaml.saml2.core.LogoutRequest;
import org.opensaml.saml2.core.LogoutResponse;
import org.opensaml.saml2.core.NameID;
import org.opensaml.saml2.core.Status;
import org.opensaml.saml2.core.StatusCode;
import org.opensaml.saml2.core.impl.NameIDImpl;
import org.opensaml.saml2.metadata.AttributeConsumingService;
import org.opensaml.saml2.metadata.Endpoint;
import org.opensaml.saml2.metadata.EntityDescriptor;
import org.opensaml.saml2.metadata.Organization;
import org.opensaml.saml2.metadata.OrganizationDisplayName;
import org.opensaml.saml2.metadata.RoleDescriptor;
import org.opensaml.saml2.metadata.SPSSODescriptor;
import org.opensaml.saml2.metadata.ServiceName;
import org.opensaml.saml2.metadata.SingleLogoutService;
import org.opensaml.saml2.metadata.provider.MetadataProvider;
import org.opensaml.saml2.metadata.provider.MetadataProviderException;
import org.opensaml.ws.message.decoder.MessageDecodingException;
import org.opensaml.ws.message.encoder.MessageEncodingException;
import org.opensaml.ws.soap.client.http.HttpClientBuilder;
import org.opensaml.ws.transport.http.HTTPInTransport;
import org.opensaml.ws.transport.http.HTTPOutTransport;
import org.opensaml.ws.transport.http.HttpServletRequestAdapter;
import org.opensaml.ws.transport.http.HttpServletResponseAdapter;
import org.opensaml.xml.security.SecurityException;
import org.opensaml.xml.security.credential.Credential;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
*/
public class SLOProfileHandler extends AbstractSAML2ProfileHandler {
private static final Logger log =
LoggerFactory.getLogger(SLOProfileHandler.class);
public static final String IDP_INITIATED_LOGOUT_ATTR =
"IDP_INITIATED_LOGOUT";
public static final String SKIP_LOGOUT_QUESTION_ATTR =
"SKIP_LOGOUT_QUESTION";
private final SAMLObjectBuilder<SingleLogoutService> sloServiceBuilder;
private final SAMLObjectBuilder<LogoutResponse> responseBuilder;
private final SAMLObjectBuilder<NameID> nameIDBuilder;
private final SAMLObjectBuilder<LogoutRequest> requestBuilder;
private final SAMLObjectBuilder<Issuer> issuerBuilder;
public SLOProfileHandler() {
super();
sloServiceBuilder = (SAMLObjectBuilder<SingleLogoutService>) getBuilderFactory().getBuilder(
SingleLogoutService.DEFAULT_ELEMENT_NAME);
responseBuilder =
(SAMLObjectBuilder<LogoutResponse>) getBuilderFactory().getBuilder(LogoutResponse.DEFAULT_ELEMENT_NAME);
nameIDBuilder =
(SAMLObjectBuilder<NameID>) getBuilderFactory().getBuilder(NameID.DEFAULT_ELEMENT_NAME);
requestBuilder =
(SAMLObjectBuilder<LogoutRequest>) getBuilderFactory().getBuilder(LogoutRequest.DEFAULT_ELEMENT_NAME);
issuerBuilder =
(SAMLObjectBuilder<Issuer>) getBuilderFactory().getBuilder(Issuer.DEFAULT_ELEMENT_NAME);
}
@Override
protected void populateSAMLMessageInformation(BaseSAMLProfileRequestContext requestContext)
throws ProfileException {
if (requestContext.getInboundSAMLMessage() instanceof LogoutRequest) {
LogoutRequest request =
(LogoutRequest) requestContext.getInboundSAMLMessage();
if (request != null) {
request.getSessionIndexes(); //TODO session indexes?
requestContext.setPeerEntityId(request.getIssuer().getValue());
requestContext.setInboundSAMLMessageId(request.getID());
if (request.getNameID() != null) {
requestContext.setSubjectNameIdentifier(request.getNameID());
} else if (request.getEncryptedID() != null) {
requestContext.setSubjectNameIdentifier(request.getEncryptedID());
} else {
throw new ProfileException("Incoming Logout Request did not contain SAML2 NameID.");
}
}
}
}
/** {@inheritDoc} */
@Override
protected void populateRelyingPartyInformation(BaseSAMLProfileRequestContext requestContext)
throws ProfileException {
super.populateRelyingPartyInformation(requestContext);
EntityDescriptor relyingPartyMetadata =
requestContext.getPeerEntityMetadata();
if (relyingPartyMetadata != null) {
requestContext.setPeerEntityRole(SPSSODescriptor.DEFAULT_ELEMENT_NAME);
requestContext.setPeerEntityRoleMetadata(relyingPartyMetadata.getSPSSODescriptor(SAMLConstants.SAML20P_NS));
}
}
@Override
protected Endpoint selectEndpoint(BaseSAMLProfileRequestContext requestContext)
throws ProfileException {
Endpoint endpoint = null;
if (getInboundBinding().equals(SAMLConstants.SAML2_SOAP11_BINDING_URI)) {
endpoint = sloServiceBuilder.buildObject();
endpoint.setBinding(SAMLConstants.SAML2_SOAP11_BINDING_URI);
} else {
BasicEndpointSelector endpointSelector = new BasicEndpointSelector();
endpointSelector.setEndpointType(SingleLogoutService.DEFAULT_ELEMENT_NAME);
endpointSelector.setMetadataProvider(getMetadataProvider());
endpointSelector.setEntityMetadata(requestContext.getPeerEntityMetadata());
endpointSelector.setEntityRoleMetadata(requestContext.getPeerEntityRoleMetadata());
endpointSelector.setSamlRequest(requestContext.getInboundSAMLMessage());
endpointSelector.getSupportedIssuerBindings().addAll(getSupportedOutboundBindings());
endpoint = endpointSelector.selectEndpoint();
}
return endpoint;
}
@Override
public String getProfileId() {
return LogoutRequestConfiguration.PROFILE_ID;
}
public void processRequest(HTTPInTransport inTransport, HTTPOutTransport outTransport)
throws ProfileException {
HttpServletRequest servletRequest =
((HttpServletRequestAdapter) inTransport).getWrappedRequest();
SingleLogoutContext sloContext =
SingleLogoutContextStorageHelper.getSingleLogoutContext(servletRequest);
//TODO RelayState is lost?!
//TODO catch profileexception and respond with saml error.
if (servletRequest.getParameter("SAMLResponse") != null) {
log.debug("Processing incoming SAML LogoutResponse");
processLogoutResponse(sloContext, inTransport, outTransport);
} else if (servletRequest.getParameter("finish") != null) { //Front-channel case only
//TODO this is just a hack
if (sloContext.getRequesterEntityID() != null) {
InitialLogoutRequestContext initialRequest =
buildRequestContext(sloContext, inTransport, outTransport);
respondToInitialRequest(sloContext, initialRequest);
}
} else if (servletRequest.getParameter("action") != null) { //Front-channel case only, called by SLOServlet?action
LogoutInformation nextActive = null;
//try to retrieve the sp from request parameter
String spEntityID = servletRequest.getParameter("entityID");
if (spEntityID != null) {
spEntityID = spEntityID.trim();
nextActive = sloContext.getServiceInformation().get(spEntityID);
}
if (nextActive == null) {
throw new ProfileException("Requested SP could not be found");
}
if (!nextActive.isLoggedIn()) {
throw new ProfileException("Already attempted to log out this service");
}
initiateFrontChannelLogout(sloContext, nextActive, outTransport);
} else {
processLogoutRequest(inTransport, outTransport);
}
}
/**
* Tries to decode logout response.
*
* @param inTransport
* @param outTransport
* @throws ProfileException
*/
protected boolean processLogoutResponse(SingleLogoutContext sloContext,
HTTPInTransport inTransport, HTTPOutTransport outTransport)
throws ProfileException {
LogoutRequestContext requestCtx = new LogoutRequestContext();
requestCtx.setInboundMessageTransport(inTransport);
SAMLMessageDecoder decoder =
getMessageDecoders().get(getInboundBinding());
LogoutResponse logoutResponse;
try {
decoder.decode(requestCtx);
logoutResponse = requestCtx.getInboundSAMLMessage();
} catch (MessageDecodingException ex) {
log.warn("Cannot decode LogoutResponse", ex);
throw new ProfileException(ex);
} catch (SecurityException ex) {
log.warn("Exception while validating LogoutResponse", ex);
throw new ProfileException(ex);
} catch (ClassCastException ex) {
log.debug("Cannot decode LogoutResponse", ex);
//this is the case when inbound message is LogoutRequest, so return silently
return false;
}
String inResponseTo = logoutResponse.getInResponseTo();
String spEntityID = requestCtx.getInboundMessageIssuer();
log.debug("Received response from '{}' to request '{}'", spEntityID, inResponseTo);
LogoutInformation serviceLogoutInfo =
sloContext.getServiceInformation().get(spEntityID);
if (serviceLogoutInfo == null) {
throw new ProfileException("LogoutResponse issuer is unknown");
}
if (!serviceLogoutInfo.getLogoutRequestId().equals(inResponseTo)) {
serviceLogoutInfo.setLogoutFailed();
throw new ProfileException("LogoutResponse InResponseTo does not match the LogoutRequest ID");
}
log.info("Logout status is '{}'", logoutResponse.getStatus().getStatusCode().getValue().toString());
if (logoutResponse.getStatus().getStatusCode().getValue().equals(StatusCode.SUCCESS_URI)) {
serviceLogoutInfo.setLogoutSucceeded();
} else {
serviceLogoutInfo.setLogoutFailed();
}
return true;
}
/**
* Continue logout processing.
*
* @param inTransport
* @param outTransport
* @throws ProfileException
*/
protected void processLogoutRequest(HTTPInTransport inTransport, HTTPOutTransport outTransport)
throws ProfileException {
HttpServletRequest servletRequest =
((HttpServletRequestAdapter) inTransport).getWrappedRequest();
Session idpSession = getUserSession(inTransport);
boolean idpInitiatedLogout =
servletRequest.getAttribute(IDP_INITIATED_LOGOUT_ATTR) != null;
InitialLogoutRequestContext initialRequest = null;
if (idpInitiatedLogout) {
//idp initiated logout
log.info("Starting the IdP-initiated logout process");
initialRequest = createInitialLogoutRequestContext();
initialRequest.setInboundMessageTransport(inTransport);
servletRequest.setAttribute(SKIP_LOGOUT_QUESTION_ATTR, true);
} else {
//sp initiated logout
initialRequest = new InitialLogoutRequestContext();
log.info("Processing incoming LogoutRequest");
decodeRequest(initialRequest, inTransport, outTransport);
checkSamlVersion(initialRequest);
//if session is null, try to find nameid-bound one
if (idpSession == null) {
NameID nameID =
initialRequest.getInboundSAMLMessage().getNameID();
SessionManager<Session> sessionManager = getSessionManager();
String nameIDIndex = sessionManager.getIndexFromNameID(nameID);
log.info("Session not found in request, trying to resolve session from NameID '{}'",
nameIDIndex);
idpSession = sessionManager.getSession(nameIDIndex);
}
}
if (idpSession == null) {
log.warn("Cannot find IdP Session");
initialRequest.setFailureStatus(buildStatus(StatusCode.RESPONDER_URI, StatusCode.UNKNOWN_PRINCIPAL_URI, null));
throw new ProfileException("Cannot find IdP Session for principal");
}
if (!idpInitiatedLogout
&& !idpSession.getServicesInformation().keySet().
contains(initialRequest.getInboundMessageIssuer())) {
String msg = "Requesting entity is not session participant";
log.warn(msg);
initialRequest.setFailureStatus(buildStatus(StatusCode.REQUESTER_URI, StatusCode.REQUEST_DENIED_URI, msg));
throw new ProfileException(msg);
}
SingleLogoutContext sloContext =
buildSingleLogoutContext(initialRequest, idpSession);
destroySession(sloContext);
if (getInboundBinding().equals(SAMLConstants.SAML2_SOAP11_BINDING_URI)) {
log.info("Issuing Backchannel logout requests");
initiateBackChannelLogout(sloContext);
respondToInitialRequest(sloContext, initialRequest);
} else {
//skip logout question if the requesting sp is the only session participant
if (!idpInitiatedLogout && sloContext.getServiceInformation().size() == 1) {
servletRequest.setAttribute(SKIP_LOGOUT_QUESTION_ATTR, true);
}
HttpServletResponse servletResponse =
((HttpServletResponseAdapter) outTransport).getWrappedResponse();
SingleLogoutContextStorageHelper.bindSingleLogoutContext(sloContext, servletRequest);
populateServiceDisplayNames(sloContext);
try {
servletRequest.getRequestDispatcher("/SLOServlet").forward(servletRequest, servletResponse);
} catch (ServletException ex) {
String msg = "Cannot forward request to SLO Servlet";
log.error(msg, ex);
initialRequest.setFailureStatus(buildStatus(StatusCode.RESPONDER_URI, null, msg));
throw new ProfileException(ex);
} catch (IOException ex) {
String msg = "Cannot forward request to SLO Servlet";
log.error(msg, ex);
initialRequest.setFailureStatus(buildStatus(StatusCode.RESPONDER_URI, null, msg));
throw new ProfileException(ex);
}
}
}
/**
* Issue back-channel logout requests to all session participants.
*
* @param idpSession
* @return
* @throws ProfileException
*/
public SingleLogoutContext administrativeLogout(Session idpSession) throws ProfileException {
log.info("Administratively logging out user '{}'", idpSession.getPrincipalName());
InitialLogoutRequestContext initialRequest = createInitialLogoutRequestContext();
SingleLogoutContext sloContext = SingleLogoutContext.createInstance(null, initialRequest, idpSession);
try {
initiateBackChannelLogout(sloContext);
} catch (ProfileException e) {
log.error("Exception was caught while administratively logging out user '{}'",
idpSession.getPrincipalName(), e);
}
destroySession(sloContext);
return sloContext;
}
/**
* Creates SAML2 LogoutRequest and corresponding context.
*
* @param sloContext
* @param serviceLogoutInfo
* @param endpoint
* @return
*/
private LogoutRequestContext createLogoutRequestContext(
SingleLogoutContext sloContext,
LogoutInformation serviceLogoutInfo,
Endpoint endpoint) {
String spEntityID = serviceLogoutInfo.getEntityID();
log.debug("Trying SP: {}", spEntityID);
LogoutRequest request = buildLogoutRequest(sloContext);
serviceLogoutInfo.setLogoutRequestId(request.getID());
NameID nameId = buildNameID(serviceLogoutInfo);
if (nameId == null) {
log.info("NameID is null, cannot crete logout request context");
return null;
}
request.setNameID(nameId);
request.setDestination(endpoint.getLocation());
LogoutRequestContext requestCtx = new LogoutRequestContext();
requestCtx.setCommunicationProfileId(getProfileId());
requestCtx.setSecurityPolicyResolver(getSecurityPolicyResolver());
requestCtx.setOutboundMessageIssuer(sloContext.getResponderEntityID());
requestCtx.setInboundMessageIssuer(spEntityID);
requestCtx.setPeerEntityEndpoint(endpoint);
requestCtx.setPeerEntityRole(SPSSODescriptor.DEFAULT_ELEMENT_NAME);
//TODO get credential configured for relying party
Credential signingCredential =
getRelyingPartyConfigurationManager().
getDefaultRelyingPartyConfiguration().getDefaultSigningCredential();
requestCtx.setOutboundSAMLMessageSigningCredential(signingCredential);
requestCtx.setOutboundSAMLMessage(request);
return requestCtx;
}
/**
* Destroy idp session.
*
* @param sloContext
*/
private void destroySession(SingleLogoutContext sloContext) {
log.info("Invalidating session '{}'.", sloContext.getIdpSessionID());
getSessionManager().destroySession(sloContext.getIdpSessionID());
}
/**
* Issues back channel logout request to every session participant.
*
* @param sloContext
* @throws ProfileException
*/
private void initiateBackChannelLogout(SingleLogoutContext sloContext) throws ProfileException {
for (LogoutInformation serviceLogoutInfo : sloContext.getServiceInformation().values()) {
if (serviceLogoutInfo.isLoggedIn()) {
try {
initiateBackChannelLogout(sloContext, serviceLogoutInfo);
} catch (ProfileException ex) {
log.warn("Caught exception while trying to issue LogoutRequest to '{}'",
serviceLogoutInfo.getEntityID(), ex);
serviceLogoutInfo.setLogoutFailed();
}
}
}
}
/**
* Issues back channel logout request to session participant.
*
* @param sloContext
* @param serviceLogoutInfo
* @throws ProfileException
*/
private void initiateBackChannelLogout(SingleLogoutContext sloContext, LogoutInformation serviceLogoutInfo)
throws ProfileException {
if (!serviceLogoutInfo.isLoggedIn()) {
log.info("Logout status for entity is '{}', not attempting logout", serviceLogoutInfo.getLogoutStatus().toString());
return;
}
String spEntityID = serviceLogoutInfo.getEntityID();
Endpoint endpoint =
getEndpointForBinding(spEntityID, SAMLConstants.SAML2_SOAP11_BINDING_URI);
if (endpoint == null) {
log.info("No SAML2 LogoutRequest SOAP endpoint found for entity '{}'", spEntityID);
serviceLogoutInfo.setLogoutUnsupported();
return;
}
serviceLogoutInfo.setLogoutAttempted();
LogoutRequestContext requestCtx =
createLogoutRequestContext(sloContext, serviceLogoutInfo, endpoint);
if (requestCtx == null) {
log.info("Cannot create LogoutRequest Context for entity '{}'", spEntityID);
serviceLogoutInfo.setLogoutFailed();
return;
}
HttpConnection httpConn = null;
try {
//prepare http message exchange for soap
log.debug("Preparing HTTP transport for SOAP request");
httpConn = createHttpConnection(serviceLogoutInfo, endpoint);
if (httpConn == null) {
log.warn("Unable to acquire usable http connection from the pool");
serviceLogoutInfo.setLogoutFailed();
return;
}
log.debug("Opening HTTP connection to '{}'", endpoint.getLocation());
httpConn.open();
if (!httpConn.isOpen()) {
log.warn("HTTP connection could not be opened");
serviceLogoutInfo.setLogoutFailed();
return;
}
log.debug("Preparing transports and encoders/decoders");
prepareSOAPTransport(requestCtx, httpConn, endpoint);
SAMLMessageEncoder encoder = new HTTPSOAP11Encoder();
SAMLMessageDecoder decoder =
new HTTPSOAP11Decoder(getParserPool());
//encode and sign saml request
encoder.encode(requestCtx);
//TODO: audit log is still missing
log.info("Issuing back-channel logout request to SP '{}'", spEntityID);
//execute SOAP/HTTP call
log.debug("Executing HTTP POST");
if (!requestCtx.execute(httpConn)) {
log.warn("Logout execution failed on SP '{}', HTTP status is '{}'",
spEntityID, requestCtx.getHttpStatus());
serviceLogoutInfo.setLogoutFailed();
return;
}
//decode saml response
decoder.decode(requestCtx);
LogoutResponse spResponse = requestCtx.getInboundSAMLMessage();
StatusCode statusCode = spResponse.getStatus().getStatusCode();
if (statusCode.getValue().equals(StatusCode.SUCCESS_URI)) {
log.info("Logout was successful on SP '{}'.", spEntityID);
serviceLogoutInfo.setLogoutSucceeded();
} else {
log.warn("Logout failed on SP '{}', logout status code is '{}'.", spEntityID, statusCode.getValue());
StatusCode secondaryCode = statusCode.getStatusCode();
if (secondaryCode != null) {
log.warn("Additional status code: '{}'", secondaryCode.getValue());
}
serviceLogoutInfo.setLogoutFailed();
}
} catch (SocketTimeoutException e) { //socket connect or read timeout
log.info("Socket timeout while sending SOAP request to SP '{}'",
serviceLogoutInfo.getEntityID());
serviceLogoutInfo.setLogoutFailed();
} catch (IOException e) { //other networking error
log.info("IOException caught while sending SOAP request", e);
serviceLogoutInfo.setLogoutFailed();
} catch (Throwable t) { //unexpected
log.error("Unexpected exception caught while sending SAML Logout request", t);
serviceLogoutInfo.setLogoutFailed();
} finally { //
requestCtx.releaseConnection();
if (httpConn != null && httpConn.isOpen()) {
log.debug("Closing HTTP connection");
try {
httpConn.close();
} catch (Throwable t) {
log.warn("Caught exception while closing HTTP Connection", t);
}
}
}
}
private InitialLogoutRequestContext createInitialLogoutRequestContext() {
InitialLogoutRequestContext initialRequest = new InitialLogoutRequestContext();
RelyingPartyConfiguration defaultRPC =
getRelyingPartyConfigurationManager().getDefaultRelyingPartyConfiguration();
initialRequest.setLocalEntityId(defaultRPC.getProviderId());
initialRequest.setProfileConfiguration(
(LogoutRequestConfiguration) defaultRPC.getProfileConfiguration(getProfileId()));
return initialRequest;
}
/**
* Reads SAML2 SingleLogoutService endpoint of the entity or
* null if no metadata or endpoint found.
*
* @param spEntityID
* @param bindingURI which binding to use
* @return
*/
private Endpoint getEndpointForBinding(String spEntityID, String bindingURI) {
RoleDescriptor spMetadata = null;
try {
//retrieve metadata
spMetadata =
getMetadataProvider().getRole(spEntityID, SPSSODescriptor.DEFAULT_ELEMENT_NAME, SAMLConstants.SAML20P_NS);
if (spMetadata == null) {
log.warn("SP Metadata is null");
return null;
}
} catch (MetadataProviderException ex) {
log.info("Cannot get SAML2 metadata for SP '{}'.", spEntityID);
return null;
}
//find endpoint for SingleLogoutService
BasicEndpointSelector es = new BasicEndpointSelector();
es.setEndpointType(SingleLogoutService.DEFAULT_ELEMENT_NAME);
es.setMetadataProvider(getMetadataProvider());
es.getSupportedIssuerBindings().add(bindingURI);
es.setEntityRoleMetadata(spMetadata);
Endpoint endpoint = es.selectEndpoint();
if (endpoint == null) {
log.info("Cannot get SAML2 SingleLogoutService endpoint for SP '{}' and binding '{}'.", spEntityID, bindingURI);
return null;
}
return endpoint;
}
/**
* Builds NameID for the principal and the SP.
*
* TODO support encrypted nameid?
*
* @param serviceLogoutInfo
* @return
*/
private NameID buildNameID(LogoutInformation serviceLogoutInfo) {
if (serviceLogoutInfo.getNameIdentifier() == null) {
return null;
}
NameID nameId = nameIDBuilder.buildObject();
nameId.setFormat(serviceLogoutInfo.getNameIdentifierFormat());
nameId.setValue(serviceLogoutInfo.getNameIdentifier());
nameId.setNameQualifier(serviceLogoutInfo.getNameQualifier());
nameId.setSPNameQualifier(serviceLogoutInfo.getSPNameQualifier());
return nameId;
}
/**
* Build SAML request for issuing LogoutRequest.
*
* @param sloContext
* @param spEntityID
* @return
*/
private LogoutRequest buildLogoutRequest(SingleLogoutContext sloContext) {
LogoutRequest request = requestBuilder.buildObject();
//build saml request
DateTime issueInstant = new DateTime();
request.setIssueInstant(issueInstant);
request.setID(getIdGenerator().generateIdentifier());
request.setVersion(SAMLVersion.VERSION_20);
Issuer issuer = issuerBuilder.buildObject();
issuer.setValue(sloContext.getResponderEntityID());
request.setIssuer(issuer);
return request;
}
/**
* Populate service display names from metadata.
* This method must be called once.
*
* @param sloContext
*/
private void populateServiceDisplayNames(SingleLogoutContext sloContext) {
MetadataProvider mdProvider = getMetadataProvider();
for (LogoutInformation serviceInfo : sloContext.getServiceInformation().values()) {
EntityDescriptor spMetadata;
String spEntityID = serviceInfo.getEntityID();
try {
spMetadata = mdProvider.getEntityDescriptor(spEntityID);
} catch (MetadataProviderException ex) {
log.warn("Can not get metadata for relying party '{}'", spEntityID);
continue;
}
Map<String, String> serviceNames = extractServiceNames(spMetadata);
if (serviceNames != null && serviceNames.size() > 0) {
serviceInfo.setDisplayName(serviceNames);
} else {
Map<String, String> organizationDNames = extractOrganizationDisplayNames(spMetadata);
if (organizationDNames != null && organizationDNames.size() > 0) {
serviceInfo.setDisplayName(organizationDNames);
}
}
}
}
/**
* Extracts ServiceName information from SP Entity Descriptor.
*
* @param spMetadata
* @return
*/
private Map<String, String> extractServiceNames(EntityDescriptor spMetadata) {
String spEntityID = spMetadata.getEntityID();
SPSSODescriptor spDescr = spMetadata.getSPSSODescriptor(SAMLConstants.SAML20P_NS);
if (spDescr == null) {
log.debug("No SAML SPSSODescriptor found for relying party '{}'", spEntityID);
return null;
}
AttributeConsumingService attrCs = spDescr.getDefaultAttributeConsumingService();
if (attrCs == null) {
List<AttributeConsumingService> attrCSList = spDescr.getAttributeConsumingServices();
if (attrCSList != null && !attrCSList.isEmpty()) {
attrCs = attrCSList.get(0);
}
}
if (attrCs == null) {
log.debug("No AttributeConsumingService found for relying party '{}'", spEntityID);
return null;
}
List<ServiceName> sNameList = attrCs.getNames();
if (sNameList == null) {
log.debug("No ServiceName found for relying party '{}'", spEntityID);
return null;
}
Map<String, String> serviceNames =
new HashMap<String, String>(sNameList.size());
for (ServiceName sName : sNameList) {
serviceNames.put(sName.getName().getLanguage(), sName.getName().getLocalString());
}
return serviceNames;
}
/**
* Extracts OrganizationDisplayName information from SP Entity Descriptor.
*
* @param spMetadata
* @return
*/
private Map<String, String> extractOrganizationDisplayNames(EntityDescriptor spMetadata) {
String spEntityID = spMetadata.getEntityID();
Organization spOrg = spMetadata.getOrganization();
if (spOrg == null) {
log.debug("Organization is not set for relying party '{}'", spEntityID);
return null;
}
List<OrganizationDisplayName> dNameList =
spOrg.getDisplayNames();
if (dNameList == null) {
log.debug("DisplayName is unset for relying party '{}'", spEntityID);
return null;
}
Map<String, String> oDNames = new HashMap<String, String>(dNameList.size());
for (OrganizationDisplayName dName : dNameList) {
oDNames.put(dName.getName().getLanguage(), dName.getName().getLocalString());
}
return oDNames;
}
/**
* Creates Http connection.
*
* @param serviceLogoutInfo
* @param endpoint
* @return
* @throws URIException
* @throws GeneralSecurityException
* @throws IOException
*/
private HttpConnection createHttpConnection(
LogoutInformation serviceLogoutInfo, Endpoint endpoint)
throws URIException, GeneralSecurityException, IOException {
HttpClientBuilder httpClientBuilder =
new HttpClientBuilder();
httpClientBuilder.setContentCharSet("UTF-8");
SecureProtocolSocketFactory sf = new EasySSLProtocolSocketFactory();
httpClientBuilder.setHttpsProtocolSocketFactory(sf);
//build http connection
HttpClient httpClient = httpClientBuilder.buildClient();
HostConfiguration hostConfig = new HostConfiguration();
URI location = new URI(endpoint.getLocation());
hostConfig.setHost(location);
LogoutRequestConfiguration config = (LogoutRequestConfiguration) getProfileConfiguration(
serviceLogoutInfo.getEntityID(), getProfileId());
if (log.isDebugEnabled()) {
log.debug("Creating new HTTP connection with the following timeouts:");
log.debug("Maximum waiting time for the connection pool is {}",
config.getBackChannelConnectionPoolTimeout());
log.debug("Timeout for connection establishment is {}",
config.getBackChannelConnectionTimeout());
log.debug("Timeout for soap response is {}",
config.getBackChannelResponseTimeout());
}
HttpConnection httpConn = null;
try {
httpConn = httpClient.getHttpConnectionManager().
getConnectionWithTimeout(hostConfig, config.getBackChannelConnectionPoolTimeout());
} catch (ConnectionPoolTimeoutException e) {
return null;
}
HttpConnectionParams params = new HttpConnectionParams();
params.setConnectionTimeout(config.getBackChannelConnectionTimeout());
params.setSoTimeout(config.getBackChannelResponseTimeout());
httpConn.setParams(params);
return httpConn;
}
/**
* Adapts SOAP/HTTP client transport to SAML transports.
* @param requestCtx
* @param httpConn
* @param endpoint
*/
private void prepareSOAPTransport(LogoutRequestContext requestCtx,
HttpConnection httpConn, Endpoint endpoint) {
EntityEnclosingMethod method =
new PostMethod(endpoint.getLocation());
requestCtx.setPostMethod(method);
HTTPOutTransport soapOutTransport =
new HTTPClientOutTransportAdapter(httpConn, method);
HTTPInTransport soapInTransport =
new HTTPClientInTransportAdapter(httpConn, method);
requestCtx.setOutboundMessageTransport(soapOutTransport);
requestCtx.setInboundMessageTransport(soapInTransport);
}
/**
* Issues front and back channel logout requests to session participants.
*
* @param inTransport
* @param outTransport
* @param initialRequest
* @param idpSession
* @throws ProfileException
*/
private void initiateFrontChannelLogout(
SingleLogoutContext sloContext,
LogoutInformation serviceLogoutInfo,
HTTPOutTransport outTransport)
throws ProfileException {
if (!serviceLogoutInfo.isLoggedIn()) {
log.info("Logout status for entity is '{}', not attempting logout", serviceLogoutInfo.getLogoutStatus().toString());
return;
}
String spEntityID = serviceLogoutInfo.getEntityID();
//prefer HTTP-Redirect binding
Endpoint endpoint =
getEndpointForBinding(spEntityID, SAMLConstants.SAML2_REDIRECT_BINDING_URI);
if (endpoint == null) {
//fallback to HTTP-POST when no HTTP-Redirect is set
endpoint =
getEndpointForBinding(spEntityID, SAMLConstants.SAML2_POST_BINDING_URI);
}
if (endpoint == null) {
log.info("No SAML2 LogoutRequest front-channel endpoint found for entity '{}'", spEntityID);
endpoint =
getEndpointForBinding(spEntityID, SAMLConstants.SAML2_SOAP11_BINDING_URI);
if (endpoint != null) {
//fallback to SOAP1.1 when no HTTP-POST is set
initiateBackChannelLogout(sloContext, serviceLogoutInfo);
} else {
//no supported endpoints found
serviceLogoutInfo.setLogoutUnsupported();
}
return;
}
SAMLMessageEncoder encoder =
getMessageEncoders().get(endpoint.getBinding());
if (encoder == null) {
log.warn("No message encoder found for binding '{}'", endpoint.getBinding());
serviceLogoutInfo.setLogoutUnsupported();
return;
}
serviceLogoutInfo.setLogoutAttempted();
LogoutRequestContext requestCtx =
createLogoutRequestContext(sloContext, serviceLogoutInfo, endpoint);
if (requestCtx == null) {
log.info("Cannot create LogoutRequest Context for entity '{}'", spEntityID);
serviceLogoutInfo.setLogoutFailed();
return;
}
requestCtx.setOutboundMessageTransport(outTransport);
try {
encoder.encode(requestCtx);
} catch (MessageEncodingException ex) {
log.warn("Cannot encode LogoutRequest", ex);
serviceLogoutInfo.setLogoutFailed();
return;
}
}
/**
* Respond to LogoutRequest.
*
* @param sloContext
* @param initialRequest
* @throws ProfileException
*/
protected void respondToInitialRequest(SingleLogoutContext sloContext, InitialLogoutRequestContext initialRequest)
throws ProfileException {
boolean success = true;
for (SingleLogoutContext.LogoutInformation info : sloContext.getServiceInformation().values()) {
if (!info.getLogoutStatus().equals(SingleLogoutContext.LogoutStatus.LOGOUT_SUCCEEDED)) {
success = false;
}
}
Status status;
if (success) {
log.info("Status of Single Log-out: success");
status = buildStatus(StatusCode.SUCCESS_URI, null, null);
} else {
log.info("Status of Single Log-out: partial");
status =
buildStatus(StatusCode.SUCCESS_URI, StatusCode.PARTIAL_LOGOUT_URI, null);
}
LogoutResponse samlResponse =
buildLogoutResponse(initialRequest, status);
populateRelyingPartyInformation(initialRequest);
Endpoint endpoint = selectEndpoint(initialRequest);
initialRequest.setPeerEntityEndpoint(endpoint);
initialRequest.setOutboundSAMLMessage(samlResponse);
initialRequest.setOutboundSAMLMessageId(samlResponse.getID());
initialRequest.setOutboundSAMLMessageIssueInstant(samlResponse.getIssueInstant());
Credential signingCredential =
initialRequest.getProfileConfiguration().getSigningCredential();
if (signingCredential == null) {
initialRequest.getRelyingPartyConfiguration().getDefaultSigningCredential();
}
initialRequest.setOutboundSAMLMessageSigningCredential(signingCredential);
log.debug("Sending response to the original LogoutRequest");
encodeResponse(initialRequest);
writeAuditLogEntry(initialRequest);
}
/**
* Builds new single log-out context for session store between logout events.
*
* @param initialRequest
* @param idpSession
* @return
*/
private SingleLogoutContext buildSingleLogoutContext(InitialLogoutRequestContext initialRequest, Session idpSession) {
HttpServletRequest servletRequest =
((HttpServletRequestAdapter) initialRequest.getInboundMessageTransport()).getWrappedRequest();
return SingleLogoutContext.createInstance(
HttpHelper.getRequestUriWithoutContext(servletRequest),
initialRequest,
idpSession);
}
/**
* Builds request context from information available after logout events.
*
* @param sloContext
* @return
*/
protected InitialLogoutRequestContext buildRequestContext(SingleLogoutContext sloContext,
HTTPInTransport in, HTTPOutTransport out) throws ProfileException {
InitialLogoutRequestContext initialRequest = new InitialLogoutRequestContext();
initialRequest.setCommunicationProfileId(getProfileId());
initialRequest.setMessageDecoder(getMessageDecoders().get(getInboundBinding()));
initialRequest.setInboundMessageTransport(in);
initialRequest.setInboundSAMLProtocol(SAMLConstants.SAML20P_NS);
initialRequest.setOutboundMessageTransport(out);
initialRequest.setOutboundSAMLProtocol(SAMLConstants.SAML20P_NS);
initialRequest.setMetadataProvider(getMetadataProvider());
initialRequest.setInboundSAMLMessageId(sloContext.getRequestSAMLMessageID());
initialRequest.setInboundMessageIssuer(sloContext.getRequesterEntityID());
initialRequest.setLocalEntityId(sloContext.getResponderEntityID());
initialRequest.setPeerEntityId(sloContext.getRequesterEntityID());
initialRequest.setSecurityPolicyResolver(getSecurityPolicyResolver());
initialRequest.setProfileConfiguration(
(LogoutRequestConfiguration) getProfileConfiguration(
sloContext.getRequesterEntityID(), getProfileId()));
initialRequest.setRelyingPartyConfiguration(
getRelyingPartyConfiguration(sloContext.getRequesterEntityID()));
return initialRequest;
}
/**
* Builds Logout Response.
*
* @param initialRequest
* @return
* @throws edu.internet2.middleware.shibboleth.common.profile.ProfileException
*/
protected LogoutResponse buildLogoutResponse(
BaseSAML2ProfileRequestContext<?, ?, ?> initialRequest,
Status status)
throws ProfileException {
DateTime issueInstant = new DateTime();
LogoutResponse logoutResponse = responseBuilder.buildObject();
logoutResponse.setIssueInstant(issueInstant);
populateStatusResponse(initialRequest, logoutResponse);
logoutResponse.setStatus(status);
return logoutResponse;
}
/**
* Decodes an incoming request and populates a created request context with the resultant information.
*
* @param inTransport inbound message transport
* @param outTransport outbound message transport *
* @param initialRequest request context to which decoded information should be added
*
* @throws ProfileException throw if there is a problem decoding the request
*/
protected void decodeRequest(InitialLogoutRequestContext initialRequest,
HTTPInTransport inTransport, HTTPOutTransport outTransport)
throws ProfileException {
log.debug("Decoding message with decoder binding '{}'", getInboundBinding());
initialRequest.setCommunicationProfileId(getProfileId());
MetadataProvider metadataProvider = getMetadataProvider();
initialRequest.setMetadataProvider(metadataProvider);
initialRequest.setInboundMessageTransport(inTransport);
initialRequest.setInboundSAMLProtocol(SAMLConstants.SAML20P_NS);
initialRequest.setSecurityPolicyResolver(getSecurityPolicyResolver());
initialRequest.setPeerEntityRole(SPSSODescriptor.DEFAULT_ELEMENT_NAME);
initialRequest.setOutboundMessageTransport(outTransport);
initialRequest.setOutboundSAMLProtocol(SAMLConstants.SAML20P_NS);
try {
SAMLMessageDecoder decoder =
getInboundMessageDecoder(null);
initialRequest.setMessageDecoder(decoder);
decoder.decode(initialRequest);
log.debug("Decoded request from relying party '{}'", initialRequest.getInboundMessage());
//TODO
/*if (!(initialRequest.getInboundSAMLMessage() instanceof LogoutRequest)) {
log.warn("Incoming message was not a LogoutRequest, it was a {}", initialRequest.getInboundSAMLMessage().getClass().getName());
initialRequest.setFailureStatus(buildStatus(StatusCode.REQUESTER_URI, null,
"Invalid SAML LogoutRequest message."));
throw new ProfileException("Invalid SAML LogoutRequest message.");
}*/
} catch (MessageDecodingException e) {
String msg = "Error decoding logout request message";
log.warn(msg, e);
initialRequest.setFailureStatus(buildStatus(StatusCode.RESPONDER_URI, null, msg));
throw new ProfileException(msg);
} catch (SecurityException e) {
String msg = "Message did not meet security requirements";
log.warn(msg, e);
initialRequest.setFailureStatus(buildStatus(StatusCode.RESPONDER_URI, StatusCode.REQUEST_DENIED_URI, msg));
throw new ProfileException(msg, e);
} finally {
// Set as much information as can be retrieved from the decoded message
populateRequestContext(initialRequest);
}
}
public class InitialLogoutRequestContext
extends BaseSAML2ProfileRequestContext<LogoutRequest, LogoutResponse, LogoutRequestConfiguration> {
}
public class LogoutRequestContext
extends BasicSAMLMessageContext<LogoutResponse, LogoutRequest, NameIDImpl> {
EntityEnclosingMethod postMethod;
EntityEnclosingMethod getPostMethod() {
return postMethod;
}
void setPostMethod(EntityEnclosingMethod postMethod) {
this.postMethod = postMethod;
}
boolean execute(HttpConnection conn) throws HttpException,
IOException {
return postMethod.execute(new HttpState(), conn) == HttpStatus.SC_OK;
}
String getHttpStatus() {
return postMethod.getStatusCode() + " " + postMethod.getStatusText();
}
void releaseConnection() {
if (postMethod != null) {
postMethod.releaseConnection();
}
}
}
}
| apache-2.0 |
Jianfu-She/algorithms | algorithms-leetcode/0230 Kth Smallest Element in a BST/Solution.java | 833 | /*
Given a binary search tree, write a function kthSmallest to find the kth smallest element in it.
Note:
You may assume k is always valid, 1 ? k ? BST's total elements.
*/
/**
* Definition for a binary tree node.
* public class TreeNode {
* int val;
* TreeNode left;
* TreeNode right;
* TreeNode(int x) { val = x; }
* }
*/
public class Solution {
public int kthSmallest(TreeNode root, int k) {
int count = getNodeNum(root.left);
if (k <= count)
return kthSmallest(root.left, k);
else if (k > count + 1)
return kthSmallest(root.right, k - count - 1);
else
return root.val;
}
private int getNodeNum(TreeNode root) {
if (root == null)
return 0;
return 1 + getNodeNum(root.left) + getNodeNum(root.right);
}
} | apache-2.0 |
AdeptInternet/prtg-java | prtg-sshd/src/main/java/org/adeptnet/prtg/sshd/BaseCommand.java | 4818 | /*
* Copyright 2014 Francois Steyn - Adept Internet (PTY) LTD (francois.s@adept.co.za).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.adeptnet.prtg.sshd;
import org.adeptnet.prtg.config.ConfigInterface;
import org.adeptnet.prtg.config.SensorProcess;
import java.io.Closeable;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.xml.bind.JAXBException;
import org.apache.sshd.server.Command;
import org.apache.sshd.server.Environment;
import org.apache.sshd.server.ExitCallback;
/**
*
* @author Francois Steyn - Adept Internet (PTY) LTD (francois.s@adept.co.za)
*/
public abstract class BaseCommand implements Command {
private static final Logger LOG = Logger.getLogger(BaseCommand.class.getName());
private final String CRLF = "\r\n";
private final SshdConfigInterface configInterface;
private InputStream in;
private OutputStream out;
private OutputStream err;
private ExitCallback callback;
private Environment environment;
public BaseCommand(SshdConfigInterface configInterface) {
this.configInterface = configInterface;
}
public ConfigInterface getConfigInterface() {
return configInterface;
}
public void printLine(final String line) throws IOException {
out.write(String.format("%s%s", line, CRLF).getBytes());
out.flush();
}
public void printLineError(final String line) throws IOException {
err.write(String.format("%s%s", line, CRLF).getBytes());
err.flush();
}
public void doExit(final int exitValue) {
doClose();
if (callback != null) {
callback.onExit(exitValue);
}
}
public void doClose() {
doClose(in, out, err);
}
public void doClose(Closeable... closeables) {
for (Closeable c : closeables) {
try {
if (c != null) {
c.close();
}
} catch (IOException e) {
// Ignore
}
}
}
public void doExit(final int exitValue, final String exitMessage) {
doClose();
if (callback != null) {
callback.onExit(exitValue, exitMessage);
}
}
protected boolean handlePRTG(final String line) throws IOException {
if (!line.startsWith(configInterface.getPrtgPathPrefix())) {
return false;
}
try {
printLine(new SensorProcess(configInterface)
.withSensorName(line.replace(configInterface.getPrtgPathPrefix(), "").trim())
.run());
} catch (JAXBException ex) {
final String error = String.format("(%s) %s",ex.getClass().getName(),ex.getMessage());
final StringBuilder sb = new StringBuilder();
sb.append("<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>");
sb.append("<prtg xmlns=\"urn:ietf:params:xml:ns:prtg-1.0\">");
sb.append("<error>1</error>");
sb.append("<text>");
sb.append(error);
sb.append("</text>");
sb.append("</prtg>");
printLineError(sb.toString());
LOG.log(Level.SEVERE, error, ex);
}
return true;
}
public InputStream getInputStream() {
return in;
}
public OutputStream getOutputStream() {
return out;
}
public OutputStream getErrorStream() {
return err;
}
public Environment getEnvironment() {
return environment;
}
public void setEnvironment(Environment environment) {
this.environment = environment;
}
@Override
public void setInputStream(InputStream in) {
this.in = in;
}
@Override
public void setOutputStream(OutputStream out) {
this.out = out;
}
@Override
public void setErrorStream(OutputStream err) {
this.err = err;
}
@Override
public void setExitCallback(ExitCallback callback) {
this.callback = callback;
}
}
| apache-2.0 |
multiscripter/job4j | junior/pack1_trainee/p4_lambda/ch2_stream_api/src/main/java/ru/job4j/list2map/UserConvert.java | 797 | package ru.job4j.list2map;
import java.util.HashMap;
import java.util.List;
/**
* Класс UserConvert реализует функционал конвертации List в HashMap.
*
* @author Gureyev Ilya (mailto:ill-jah@yandex.ru)
* @version 2018-11-29
* @since 2017-05-11
*/
class UserConvert {
/**
* Конвертирует список пользователей в отображение и возвращает его.
* @param list список пользователей.
* @return отображение с пользователями.
*/
public HashMap<Integer, User> process(List<User> list) {
HashMap<Integer, User> hm = new HashMap<>();
list.forEach(x -> hm.put(x.getId(), x));
return hm;
}
} | apache-2.0 |
miyakawataku/piggybank-ltsv | src/org/apache/pig/data/DataType.java | 62565 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.data;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.TreeMap;
import org.apache.hadoop.io.WritableComparable;
import org.apache.pig.PigException;
import org.apache.pig.ResourceSchema;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.builtin.ToDate;
import org.apache.pig.classification.InterfaceAudience;
import org.apache.pig.classification.InterfaceStability;
import org.apache.pig.impl.logicalLayer.FrontendException;
import org.apache.pig.impl.logicalLayer.schema.Schema;
import org.apache.pig.impl.logicalLayer.schema.SchemaMergeException;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
/**
* A class of static final values used to encode data type and a number of
* static helper functions for manipulating data objects. The data type
* values could be
* done as an enumeration, but it is done as byte codes instead to save
* creating objects.
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public class DataType {
// IMPORTANT! This list can be used to record values of data on disk,
// so do not change the values. You may strand user data.
// IMPORTANT! Order matters here, as compare() below uses the order to
// order unlike datatypes. Don't change this ordering.
// Spaced unevenly to leave room for new entries without changing
// values or creating order issues.
public static final byte UNKNOWN = 0;
public static final byte NULL = 1;
public static final byte BOOLEAN = 5;
public static final byte BYTE = 6; // internal use only
public static final byte INTEGER = 10;
public static final byte LONG = 15;
public static final byte FLOAT = 20;
public static final byte DOUBLE = 25;
public static final byte DATETIME = 30;
public static final byte BYTEARRAY = 50;
public static final byte CHARARRAY = 55;
public static final byte BIGINTEGER = 65;
public static final byte BIGDECIMAL = 70;
/**
* Internal use only.
*/
public static final byte BIGCHARARRAY = 60; //internal use only; for storing/loading chararray bigger than 64K characters in BinStorage
public static final byte MAP = 100;
public static final byte TUPLE = 110;
public static final byte BAG = 120;
/**
* Internal use only; used to store WriteableComparable objects
* for creating ordered index in MergeJoin. Expecting a object that
* implements Writable interface and has default constructor
*/
public static final byte GENERIC_WRITABLECOMPARABLE = 123;
/**
* Internal use only.
*/
public static final byte INTERNALMAP = 127; // internal use only; for maps that are object->object. Used by FindQuantiles.
public static final byte ERROR = -1;
/**
* Determine the datatype of an object.
* @param o Object to test.
* @return byte code of the type, or ERROR if we don't know.
*/
public static byte findType(Object o) {
if (o == null) {
return NULL;
}
// Try to put the most common first
if (o instanceof DataByteArray) {
return BYTEARRAY;
} else if (o instanceof String) {
return CHARARRAY;
} else if (o instanceof Tuple) {
return TUPLE;
} else if (o instanceof DataBag) {
return BAG;
} else if (o instanceof Integer) {
return INTEGER;
} else if (o instanceof Long) {
return LONG;
} else if (o instanceof InternalMap) {
return INTERNALMAP;
} else if (o instanceof Map) {
return MAP;
} else if (o instanceof Float) {
return FLOAT;
} else if (o instanceof Double) {
return DOUBLE;
} else if (o instanceof Boolean) {
return BOOLEAN;
} else if (o instanceof DateTime) {
return DATETIME;
} else if (o instanceof Byte) {
return BYTE;
} else if (o instanceof BigInteger) {
return BIGINTEGER;
} else if (o instanceof BigDecimal) {
return BIGDECIMAL;
} else if (o instanceof WritableComparable) {
return GENERIC_WRITABLECOMPARABLE;
} else {return ERROR;}
}
/**
* Given a Type object determine the data type it represents. This isn't
* cheap, as it uses reflection, so use sparingly.
* @param t Type to examine
* @return byte code of the type, or ERROR if we don't know.
*/
public static byte findType(Type t) {
if (t == null) {
return NULL;
}
// Try to put the most common first
if (t == DataByteArray.class) {
return BYTEARRAY;
} else if (t == String.class) {
return CHARARRAY;
} else if (t == Integer.class) {
return INTEGER;
} else if (t == Long.class) {
return LONG;
} else if (t == Float.class) {
return FLOAT;
} else if (t == Double.class) {
return DOUBLE;
} else if (t == Boolean.class) {
return BOOLEAN;
} else if (t == Byte.class) {
return BYTE;
} else if (t == BigInteger.class) {
return BIGINTEGER;
} else if (t == BigDecimal.class) {
return BIGDECIMAL;
} else if (t == DateTime.class) {
return DATETIME;
} else if (t == InternalMap.class) {
return INTERNALMAP;
} else {
// Might be a tuple or a bag, need to check the interfaces it
// implements
if (t instanceof Class) {
return extractTypeFromClass(t);
}else if (t instanceof ParameterizedType){
ParameterizedType impl=(ParameterizedType)t;
Class c=(Class)impl.getRawType();
return extractTypeFromClass(c);
}
return ERROR;
}
}
private static byte extractTypeFromClass(Type t) {
Class c = (Class)t;
Class[] ioeInterfaces = c.getInterfaces();
Class[] interfaces = null;
if(c.isInterface()){
interfaces = new Class[ioeInterfaces.length+1];
interfaces[0] = c;
for (int i = 1; i < interfaces.length; i++) {
interfaces[i] = ioeInterfaces[i-1];
}
} else {
interfaces = ioeInterfaces;
}
boolean matchedWritableComparable = false;
for (int i = 0; i < interfaces.length; i++) {
if (interfaces[i].getName().equals("org.apache.pig.data.Tuple")) {
return TUPLE;
} else if (interfaces[i].getName().equals("org.apache.pig.data.DataBag")) {
return BAG;
} else if (interfaces[i].getName().equals("java.util.Map")) {
return MAP;
} else if (interfaces[i].getName().equals("org.apache.hadoop.io.WritableComparable")) {
// use GENERIC_WRITABLECOMPARABLE type only as last resort
matchedWritableComparable = true;
}
}
if(matchedWritableComparable) {
return GENERIC_WRITABLECOMPARABLE;
}
return ERROR;
}
/**
* Return the number of types Pig knows about.
* @return number of types
*/
public static int numTypes(){
byte[] types = genAllTypes();
return types.length;
}
/**
* Get an array of all type values.
* @return byte array with an entry for each type.
*/
public static byte[] genAllTypes(){
byte[] types = { DataType.BAG, DataType.BIGCHARARRAY, DataType.BOOLEAN, DataType.BYTE, DataType.BYTEARRAY,
DataType.CHARARRAY, DataType.DOUBLE, DataType.FLOAT, DataType.DATETIME,
DataType.GENERIC_WRITABLECOMPARABLE,
DataType.INTEGER, DataType.INTERNALMAP,
DataType.LONG, DataType.MAP, DataType.TUPLE, DataType.BIGINTEGER, DataType.BIGDECIMAL};
return types;
}
private static String[] genAllTypeNames(){
String[] names = { "BAG", "BIGCHARARRAY", "BOOLEAN", "BYTE", "BYTEARRAY",
"CHARARRAY", "DOUBLE", "FLOAT", "DATETIME",
"GENERIC_WRITABLECOMPARABLE",
"INTEGER",
"INTERNALMAP",
"LONG",
"MAP",
"TUPLE",
"BIGINTEGER",
"BIGDECIMAL"
};
return names;
}
/**
* Get a map of type values to type names.
* @return map
*/
public static Map<Byte, String> genTypeToNameMap(){
byte[] types = genAllTypes();
String[] names = genAllTypeNames();
Map<Byte,String> ret = new HashMap<Byte, String>();
for(int i=0;i<types.length;i++){
ret.put(types[i], names[i]);
}
return ret;
}
/**
* Get a map of type names to type values.
* @return map
*/
public static Map<String, Byte> genNameToTypeMap(){
byte[] types = genAllTypes();
String[] names = genAllTypeNames();
Map<String, Byte> ret = new HashMap<String, Byte>();
for(int i=0;i<types.length;i++){
ret.put(names[i], types[i]);
}
return ret;
}
/**
* Get the type name.
* @param o Object to test.
* @return type name, as a String.
*/
public static String findTypeName(Object o) {
return findTypeName(findType(o));
}
/**
* Get the type name from the type byte code
* @param dt Type byte code
* @return type name, as a String.
*/
public static String findTypeName(byte dt) {
switch (dt) {
case NULL: return "NULL";
case BOOLEAN: return "boolean";
case BYTE: return "byte";
case INTEGER: return "int";
case BIGINTEGER: return "biginteger";
case BIGDECIMAL: return "bigdecimal";
case LONG: return "long";
case FLOAT: return "float";
case DOUBLE: return "double";
case DATETIME: return "datetime";
case BYTEARRAY: return "bytearray";
case BIGCHARARRAY: return "bigchararray";
case CHARARRAY: return "chararray";
case MAP: return "map";
case INTERNALMAP: return "internalmap";
case TUPLE: return "tuple";
case BAG: return "bag";
case GENERIC_WRITABLECOMPARABLE: return "generic_writablecomparable";
default: return "Unknown";
}
}
/**
* Determine whether the this data type is complex.
* @param dataType Data type code to test.
* @return true if dataType is bag, tuple, or map.
*/
public static boolean isComplex(byte dataType) {
return ((dataType == BAG) || (dataType == TUPLE) ||
(dataType == MAP) || (dataType == INTERNALMAP));
}
/**
* Determine whether the object is complex or atomic.
* @param o Object to determine type of.
* @return true if dataType is bag, tuple, or map.
*/
public static boolean isComplex(Object o) {
return isComplex(findType(o));
}
/**
* Determine whether the this data type is atomic.
* @param dataType Data type code to test.
* @return true if dataType is bytearray, bigchararray, chararray, integer, long,
* float, or boolean.
*/
public static boolean isAtomic(byte dataType) {
return ((dataType == BYTEARRAY) ||
(dataType == CHARARRAY) ||
(dataType == BIGCHARARRAY) ||
(dataType == INTEGER) ||
(dataType == BIGINTEGER) ||
(dataType == BIGDECIMAL) ||
(dataType == LONG) ||
(dataType == FLOAT) ||
(dataType == DOUBLE) ||
(dataType == BOOLEAN) ||
(dataType == BYTE) ||
(dataType == DATETIME) ||
(dataType == GENERIC_WRITABLECOMPARABLE));
}
/**
* Determine whether the this data type is atomic.
* @param o Object to determine type of.
* @return true if dataType is bytearray, chararray, integer, long,
* float, or boolean.
*/
public static boolean isAtomic(Object o) {
return isAtomic(findType(o));
}
/**
* Determine whether the this object can have a schema.
* @param o Object to determine if it has a schema
* @return true if the type can have a valid schema (i.e., bag or tuple)
*/
public static boolean isSchemaType(Object o) {
return isSchemaType(findType(o));
}
/**
* Determine whether the this data type can have a schema.
* @param dataType dataType to determine if it has a schema
* @return true if the type can have a valid schema (i.e., bag or tuple)
*/
public static boolean isSchemaType(byte dataType) {
return ((dataType == BAG) || (dataType == TUPLE) || dataType == MAP);
}
/**
/**
* Compare two objects to each other. This function is necessary
* because there's no super class that implements compareTo. This
* function provides an (arbitrary) ordering of objects of different
* types as follows: NULL < BOOLEAN < BYTE < INTEGER < LONG <
* FLOAT < DOUBLE < DATETIME < BYTEARRAY < STRING < MAP <
* TUPLE < BAG. No other functions should implement this cross
* object logic. They should call this function for it instead.
* @param o1 First object
* @param o2 Second object
* @return -1 if o1 is less, 0 if they are equal, 1 if o2 is less.
*/
public static int compare(Object o1, Object o2) {
byte dt1 = findType(o1);
byte dt2 = findType(o2);
return compare(o1, o2, dt1, dt2);
}
/**
* Same as {@link #compare(Object, Object)}, but does not use reflection to determine the type
* of passed in objects, relying instead on the caller to provide the appropriate values, as
* determined by {@link DataType#findType(Object)}.
*
* Use this version in cases where multiple objects of the same type have to be repeatedly compared.
* @param o1 first object
* @param o2 second object
* @param dt1 type, as byte value, of o1
* @param dt2 type, as byte value, of o2
* @return -1 if o1 is < o2, 0 if they are equal, 1 if o1 > o2
*/
@SuppressWarnings("unchecked")
public static int compare(Object o1, Object o2, byte dt1, byte dt2) {
if (dt1 == dt2) {
switch (dt1) {
case NULL:
return 0;
case BOOLEAN:
return ((Boolean)o1).compareTo((Boolean)o2);
case BYTE:
return ((Byte)o1).compareTo((Byte)o2);
case INTEGER:
return ((Integer)o1).compareTo((Integer)o2);
case LONG:
return ((Long)o1).compareTo((Long)o2);
case FLOAT:
return ((Float)o1).compareTo((Float)o2);
case DOUBLE:
return ((Double)o1).compareTo((Double)o2);
case DATETIME:
return ((DateTime)o1).compareTo((DateTime)o2);
case BYTEARRAY:
return ((DataByteArray)o1).compareTo(o2);
case CHARARRAY:
return ((String)o1).compareTo((String)o2);
case BIGINTEGER:
return ((BigInteger)o1).compareTo((BigInteger)o2);
case BIGDECIMAL:
return ((BigDecimal)o1).compareTo((BigDecimal)o2);
case MAP: {
Map<String, Object> m1 = (Map<String, Object>)o1;
Map<String, Object> m2 = (Map<String, Object>)o2;
int sz1 = m1.size();
int sz2 = m2.size();
if (sz1 < sz2) {
return -1;
} else if (sz1 > sz2) {
return 1;
} else {
// This is bad, but we have to sort the keys of the maps in order
// to be commutative.
TreeMap<String, Object> tm1 = new TreeMap<String, Object>(m1);
TreeMap<String, Object> tm2 = new TreeMap<String, Object>(m2);
Iterator<Map.Entry<String, Object> > i1 =
tm1.entrySet().iterator();
Iterator<Map.Entry<String, Object> > i2 =
tm2.entrySet().iterator();
while (i1.hasNext()) {
Map.Entry<String, Object> entry1 = i1.next();
Map.Entry<String, Object> entry2 = i2.next();
int c = entry1.getKey().compareTo(entry2.getKey());
if (c != 0) {
return c;
} else {
c = compare(entry1.getValue(), entry2.getValue());
if (c != 0) {
return c;
}
}
}
return 0;
}
}
case GENERIC_WRITABLECOMPARABLE:
return ((Comparable)o1).compareTo(o2);
case INTERNALMAP:
return -1; // Don't think anyway will want to do this.
case TUPLE:
return ((Tuple)o1).compareTo(o2);
case BAG:
return ((DataBag)o1).compareTo(o2);
default:
throw new RuntimeException("Unkown type " + dt1 +
" in compare");
}
} else if (dt1 < dt2) {
return -1;
} else {
return 1;
}
}
public static byte[] toBytes(Object o) throws ExecException {
return toBytes(o, findType(o));
}
@SuppressWarnings("unchecked")
public static byte[] toBytes(Object o, byte type) throws ExecException {
switch (type) {
case BOOLEAN:
//return ((Boolean) o).booleanValue() ? new byte[] {1} : new byte[] {0};
return ((Boolean) o).toString().getBytes();
case BYTE:
return new byte[] {((Byte) o)};
case BIGINTEGER:
case BIGDECIMAL:
case INTEGER:
case DOUBLE:
case FLOAT:
case LONG:
return ((Number) o).toString().getBytes();
case DATETIME:
return ((DateTime) o).toString().getBytes();
case CHARARRAY:
return ((String) o).getBytes();
case MAP:
return mapToString((Map<String, Object>) o).getBytes();
case TUPLE:
return ((Tuple) o).toString().getBytes();
case BYTEARRAY:
return ((DataByteArray) o).get();
case BAG:
return ((DataBag) o).toString().getBytes();
case NULL:
return null;
default:
int errCode = 1071;
String msg = "Cannot convert a " + findTypeName(o) +
" to a ByteArray";
throw new ExecException(msg, errCode, PigException.INPUT);
}
}
/**
* Force a data object to a Boolean, if possible. Any numeric type can be
* forced to a Boolean, as well as CharArray, ByteArray. Complex types
* cannot be forced to a Boolean. This isn't particularly efficient, so if
* you already <b>know</b> that the object you have is a Boolean you should
* just cast it.
*
* @param o
* object to cast
* @param type
* of the object you are casting
* @return The object as a Boolean.
* @throws ExecException
* if the type can't be forced to a Boolean.
*/
public static Boolean toBoolean(Object o, byte type) throws ExecException {
try {
switch (type) {
case NULL:
return null;
case BOOLEAN:
return (Boolean) o;
case BYTE:
return Boolean.valueOf(((Byte) o).byteValue() != 0);
case INTEGER:
return Boolean.valueOf(((Integer) o).intValue() != 0);
case LONG:
return Boolean.valueOf(((Long) o).longValue() != 0L);
case BIGINTEGER:
return Boolean.valueOf(!BigInteger.ZERO.equals(((BigInteger) o)));
case BIGDECIMAL:
return Boolean.valueOf(!BigDecimal.ZERO.equals(((BigDecimal) o)));
case FLOAT:
return Boolean.valueOf(((Float) o).floatValue() != 0.0F);
case DOUBLE:
return Boolean.valueOf(((Double) o).doubleValue() != 0.0D);
case BYTEARRAY:
String str = ((DataByteArray) o).toString();
if (str.equalsIgnoreCase("true")) {
return Boolean.TRUE;
} else if (str.equalsIgnoreCase("false")) {
return Boolean.FALSE;
} else {
return null;
}
case CHARARRAY:
if (((String) o).equalsIgnoreCase("true")) {
return Boolean.TRUE;
} else if (((String) o).equalsIgnoreCase("false")) {
return Boolean.FALSE;
} else {
return null;
}
case DATETIME:
case MAP:
case INTERNALMAP:
case TUPLE:
case BAG:
case UNKNOWN:
default:
int errCode = 1071;
String msg = "Cannot convert a " + findTypeName(o) + " to a Boolean";
throw new ExecException(msg, errCode, PigException.INPUT);
}
} catch (ClassCastException cce) {
throw cce;
} catch (ExecException ee) {
throw ee;
} catch (NumberFormatException nfe) {
int errCode = 1074;
String msg = "Problem with formatting. Could not convert " + o + " to Float.";
throw new ExecException(msg, errCode, PigException.INPUT, nfe);
} catch (Exception e) {
int errCode = 2054;
String msg = "Internal error. Could not convert " + o + " to Float.";
throw new ExecException(msg, errCode, PigException.BUG);
}
}
public static Boolean toBoolean(Object o) throws ExecException {
return toBoolean(o, findType(o));
}
/**
* Force a data object to an Integer, if possible. Any numeric type
* can be forced to an Integer (though precision may be lost), as well
* as CharArray, ByteArray, or Boolean. Complex types cannot be
* forced to an Integer. This isn't particularly efficient, so if you
* already <b>know</b> that the object you have is an Integer you
* should just cast it.
* @param o object to cast
* @param type of the object you are casting
* @return The object as an Integer.
* @throws ExecException if the type can't be forced to an Integer.
*/
public static Integer toInteger(Object o,byte type) throws ExecException {
try {
switch (type) {
case BOOLEAN:
if (((Boolean)o) == true) {
return Integer.valueOf(1);
} else {
return Integer.valueOf(0);
}
case BYTE:
return Integer.valueOf(((Byte)o).intValue());
case INTEGER:
return (Integer)o;
case LONG:
return Integer.valueOf(((Long)o).intValue());
case FLOAT:
return Integer.valueOf(((Float)o).intValue());
case DOUBLE:
return Integer.valueOf(((Double)o).intValue());
case BYTEARRAY:
return Integer.valueOf(((DataByteArray)o).toString());
case CHARARRAY:
return Integer.valueOf((String)o);
case BIGINTEGER:
return Integer.valueOf(((BigInteger)o).intValue());
case BIGDECIMAL:
return Integer.valueOf(((BigDecimal)o).intValue());
case NULL:
return null;
case DATETIME:
return Integer.valueOf(Long.valueOf(((DateTime)o).getMillis()).intValue());
case MAP:
case INTERNALMAP:
case TUPLE:
case BAG:
case UNKNOWN:
default:
int errCode = 1071;
String msg = "Cannot convert a " + findTypeName(o) +
" to an Integer";
throw new ExecException(msg, errCode, PigException.INPUT);
}
} catch (ClassCastException cce) {
throw cce;
} catch (ExecException ee) {
throw ee;
} catch (NumberFormatException nfe) {
int errCode = 1074;
String msg = "Problem with formatting. Could not convert " + o + " to Integer.";
throw new ExecException(msg, errCode, PigException.INPUT, nfe);
} catch (Exception e) {
int errCode = 2054;
String msg = "Internal error. Could not convert " + o + " to Integer.";
throw new ExecException(msg, errCode, PigException.BUG);
}
}
/**
* Force a data object to an Integer, if possible. Any numeric type
* can be forced to an Integer (though precision may be lost), as well
* as CharArray, ByteArray, or Boolean. Complex types cannot be
* forced to an Integer. This isn't particularly efficient, so if you
* already <b>know</b> that the object you have is an Integer you
* should just cast it. Unlike {@link #toInteger(Object, byte)} this
* method will first determine the type of o and then do the cast.
* Use {@link #toInteger(Object, byte)} if you already know the type.
* @param o object to cast
* @return The object as an Integer.
* @throws ExecException if the type can't be forced to an Integer.
*/
public static Integer toInteger(Object o) throws ExecException {
return toInteger(o, findType(o));
}
/**
* Force a data object to a Long, if possible. Any numeric type
* can be forced to a Long (though precision may be lost), as well
* as CharArray, ByteArray, or Boolean. Complex types cannot be
* forced to a Long. This isn't particularly efficient, so if you
* already <b>know</b> that the object you have is a Long you
* should just cast it.
* @param o object to cast
* @param type of the object you are casting
* @return The object as a Long.
* @throws ExecException if the type can't be forced to a Long.
*/
public static Long toLong(Object o,byte type) throws ExecException {
try {
switch (type) {
case BOOLEAN:
if (((Boolean)o) == true) {
return Long.valueOf(1);
} else {
return Long.valueOf(0);
}
case BYTE:
return Long.valueOf(((Byte)o).longValue());
case INTEGER:
return Long.valueOf(((Integer)o).longValue());
case LONG:
return (Long)o;
case FLOAT:
return Long.valueOf(((Float)o).longValue());
case DOUBLE:
return Long.valueOf(((Double)o).longValue());
case BYTEARRAY:
return Long.valueOf(((DataByteArray)o).toString());
case CHARARRAY:
return Long.valueOf((String)o);
case BIGINTEGER:
return Long.valueOf(((BigInteger)o).longValue());
case BIGDECIMAL:
return Long.valueOf(((BigDecimal)o).longValue());
case NULL:
return null;
case DATETIME:
return Long.valueOf(((DateTime)o).getMillis());
case MAP:
case INTERNALMAP:
case TUPLE:
case BAG:
case UNKNOWN:
default:
int errCode = 1071;
String msg = "Cannot convert a " + findTypeName(o) +
" to a Long";
throw new ExecException(msg, errCode, PigException.INPUT);
}
} catch (ClassCastException cce) {
throw cce;
} catch (ExecException ee) {
throw ee;
} catch (NumberFormatException nfe) {
int errCode = 1074;
String msg = "Problem with formatting. Could not convert " + o + " to Long.";
throw new ExecException(msg, errCode, PigException.INPUT, nfe);
} catch (Exception e) {
int errCode = 2054;
String msg = "Internal error. Could not convert " + o + " to Long.";
throw new ExecException(msg, errCode, PigException.BUG);
}
}
/**
* Force a data object to a Long, if possible. Any numeric type
* can be forced to a Long (though precision may be lost), as well
* as CharArray, ByteArray, or Boolean. Complex types cannot be
* forced to an Long. This isn't particularly efficient, so if you
* already <b>know</b> that the object you have is a Long you
* should just cast it. Unlike {@link #toLong(Object, byte)} this
* method will first determine the type of o and then do the cast.
* Use {@link #toLong(Object, byte)} if you already know the type.
* @param o object to cast
* @return The object as a Long.
* @throws ExecException if the type can't be forced to an Long.
*/
public static Long toLong(Object o) throws ExecException {
return toLong(o, findType(o));
}
/**
* Force a data object to a Float, if possible. Any numeric type
* can be forced to a Float (though precision may be lost), as well
* as CharArray, ByteArray. Complex types cannot be
* forced to a Float. This isn't particularly efficient, so if you
* already <b>know</b> that the object you have is a Float you
* should just cast it.
* @param o object to cast
* @param type of the object you are casting
* @return The object as a Float.
* @throws ExecException if the type can't be forced to a Float.
*/
public static Float toFloat(Object o,byte type) throws ExecException {
try {
switch (type) {
case BOOLEAN:
return (Boolean) o ? Float.valueOf(1.0F) : Float.valueOf(0.0F);
case INTEGER:
return new Float(((Integer)o).floatValue());
case LONG:
return new Float(((Long)o).floatValue());
case FLOAT:
return (Float)o;
case DOUBLE:
return new Float(((Double)o).floatValue());
case DATETIME:
return new Float(Long.valueOf(((DateTime)o).getMillis()).floatValue());
case BYTEARRAY:
return Float.valueOf(((DataByteArray)o).toString());
case CHARARRAY:
return Float.valueOf((String)o);
case BIGINTEGER:
return Float.valueOf(((BigInteger)o).floatValue());
case BIGDECIMAL:
return Float.valueOf(((BigDecimal)o).floatValue());
case NULL:
return null;
case BYTE:
case MAP:
case INTERNALMAP:
case TUPLE:
case BAG:
case UNKNOWN:
default:
int errCode = 1071;
String msg = "Cannot convert a " + findTypeName(o) +
" to a Float";
throw new ExecException(msg, errCode, PigException.INPUT);
}
} catch (ClassCastException cce) {
throw cce;
} catch (ExecException ee) {
throw ee;
} catch (NumberFormatException nfe) {
int errCode = 1074;
String msg = "Problem with formatting. Could not convert " + o + " to Float.";
throw new ExecException(msg, errCode, PigException.INPUT, nfe);
} catch (Exception e) {
int errCode = 2054;
String msg = "Internal error. Could not convert " + o + " to Float.";
throw new ExecException(msg, errCode, PigException.BUG);
}
}
/**
* Force a data object to a Float, if possible. Any numeric type
* can be forced to a Float (though precision may be lost), as well
* as CharArray, ByteArray, or Boolean. Complex types cannot be
* forced to an Float. This isn't particularly efficient, so if you
* already <b>know</b> that the object you have is a Float you
* should just cast it. Unlike {@link #toFloat(Object, byte)} this
* method will first determine the type of o and then do the cast.
* Use {@link #toFloat(Object, byte)} if you already know the type.
* @param o object to cast
* @return The object as a Float.
* @throws ExecException if the type can't be forced to an Float.
*/
public static Float toFloat(Object o) throws ExecException {
return toFloat(o, findType(o));
}
/**
* Force a data object to a Double, if possible. Any numeric type
* can be forced to a Double, as well
* as CharArray, ByteArray. Complex types cannot be
* forced to a Double. This isn't particularly efficient, so if you
* already <b>know</b> that the object you have is a Double you
* should just cast it.
* @param o object to cast
* @param type of the object you are casting
* @return The object as a Double.
* @throws ExecException if the type can't be forced to a Double.
*/
public static Double toDouble(Object o,byte type) throws ExecException {
try {
switch (type) {
case BOOLEAN:
return (Boolean) o ? Double.valueOf(1.0D) : Double.valueOf(0.0D);
case INTEGER:
return new Double(((Integer)o).doubleValue());
case LONG:
return new Double(((Long)o).doubleValue());
case FLOAT:
return new Double(((Float)o).doubleValue());
case DOUBLE:
return (Double)o;
case DATETIME:
return new Double(Long.valueOf(((DateTime)o).getMillis()).doubleValue());
case BYTEARRAY:
return Double.valueOf(((DataByteArray)o).toString());
case CHARARRAY:
return Double.valueOf((String)o);
case BIGINTEGER:
return Double.valueOf(((BigInteger)o).doubleValue());
case BIGDECIMAL:
return Double.valueOf(((BigDecimal)o).doubleValue());
case NULL:
return null;
case BYTE:
case MAP:
case INTERNALMAP:
case TUPLE:
case BAG:
case UNKNOWN:
default:
int errCode = 1071;
String msg = "Cannot convert a " + findTypeName(o) +
" to a Double";
throw new ExecException(msg, errCode, PigException.INPUT);
}
} catch (ClassCastException cce) {
throw cce;
} catch (ExecException ee) {
throw ee;
} catch (NumberFormatException nfe) {
int errCode = 1074;
String msg = "Problem with formatting. Could not convert " + o + " to Double.";
throw new ExecException(msg, errCode, PigException.INPUT, nfe);
} catch (Exception e) {
int errCode = 2054;
String msg = "Internal error. Could not convert " + o + " to Double.";
throw new ExecException(msg, errCode, PigException.BUG);
}
}
/**
* Force a data object to a DateTime, if possible. Only CharArray, ByteArray
* can be forced to a DateTime. Numeric types and complex types
* cannot be forced to a DateTime. This isn't particularly efficient, so if
* you already <b>know</b> that the object you have is a DateTime you should
* just cast it.
*
* @param o
* object to cast
* @param type
* of the object you are casting
* @return The object as a Boolean.
* @throws ExecException
* if the type can't be forced to a Boolean.
*/
public static DateTime toDateTime(Object o, byte type) throws ExecException {
try {
switch (type) {
case NULL:
return null;
case BYTEARRAY:
return new DateTime(((DataByteArray) o).toString());
case CHARARRAY:
// the string can contain just date part or date part plus time part
DateTimeZone dtz = ToDate.extractDateTimeZone((String) o);
if (dtz == null) {
return new DateTime((String) o);
} else {
return new DateTime((String) o, dtz);
}
case INTEGER:
return new DateTime(((Integer) o).longValue());
case LONG:
return new DateTime(((Long) o).longValue());
case FLOAT:
return new DateTime(((Float) o).longValue());
case DOUBLE:
return new DateTime(((Double) o).longValue());
case BIGINTEGER:
return new DateTime(((BigInteger) o).longValue());
case BIGDECIMAL:
return new DateTime(((BigDecimal) o).longValue());
case DATETIME:
return (DateTime) o;
case BOOLEAN:
case BYTE:
case MAP:
case INTERNALMAP:
case TUPLE:
case BAG:
case UNKNOWN:
default:
int errCode = 1071;
String msg = "Cannot convert a " + findTypeName(o) + " to a Boolean";
throw new ExecException(msg, errCode, PigException.INPUT);
}
} catch (ClassCastException cce) {
throw cce;
} catch (ExecException ee) {
throw ee;
} catch (NumberFormatException nfe) {
int errCode = 1074;
String msg = "Problem with formatting. Could not convert " + o + " to Float.";
throw new ExecException(msg, errCode, PigException.INPUT, nfe);
} catch (Exception e) {
int errCode = 2054;
String msg = "Internal error. Could not convert " + o + " to Float.";
throw new ExecException(msg, errCode, PigException.BUG);
}
}
public static DateTime toDateTime(Object o) throws ExecException {
return toDateTime(o, findType(o));
}
/**
* Force a data object to a Double, if possible. Any numeric type
* can be forced to a Double, as well
* as CharArray, ByteArray, or Boolean. Complex types cannot be
* forced to an Double. This isn't particularly efficient, so if you
* already <b>know</b> that the object you have is a Double you
* should just cast it. Unlike {@link #toDouble(Object, byte)} this
* method will first determine the type of o and then do the cast.
* Use {@link #toDouble(Object, byte)} if you already know the type.
* @param o object to cast
* @return The object as a Double.
* @throws ExecException if the type can't be forced to an Double.
*/
public static Double toDouble(Object o) throws ExecException {
return toDouble(o, findType(o));
}
public static BigInteger toBigInteger(Object o) throws ExecException {
return toBigInteger(o, findType(o));
}
public static BigInteger toBigInteger(Object o,byte type) throws ExecException {
try {
switch (type) {
case BOOLEAN:
return (Boolean) o ? BigInteger.ONE : BigInteger.ZERO;
case INTEGER:
return BigInteger.valueOf(((Integer)o).longValue());
case LONG:
return BigInteger.valueOf(((Long)o).longValue());
case FLOAT:
return BigInteger.valueOf(((Float)o).longValue());
case DOUBLE:
return BigInteger.valueOf(((Double)o).longValue());
case BYTEARRAY:
return new BigInteger(((DataByteArray)o).toString());
case CHARARRAY:
return new BigInteger((String)o);
case BIGINTEGER:
return (BigInteger)o;
case BIGDECIMAL:
return ((BigDecimal)o).toBigInteger();
case DATETIME:
return BigInteger.valueOf(((DateTime)o).getMillis());
case NULL:
return null;
case BYTE:
case MAP:
case INTERNALMAP:
case TUPLE:
case BAG:
case UNKNOWN:
default:
int errCode = 1071;
String msg = "Cannot convert a " + findTypeName(o) +
" to a BigInteger.";
throw new ExecException(msg, errCode, PigException.INPUT);
}
} catch (ClassCastException cce) {
throw cce;
} catch (ExecException ee) {
throw ee;
} catch (NumberFormatException nfe) {
int errCode = 1074;
String msg = "Problem with formatting. Could not convert " + o + " to BigInteger.";
throw new ExecException(msg, errCode, PigException.INPUT, nfe);
} catch (Exception e) {
int errCode = 2054;
String msg = "Internal error. Could not convert " + o + " to BigInteger.";
throw new ExecException(msg, errCode, PigException.BUG);
}
}
public static BigDecimal toBigDecimal(Object o) throws ExecException {
return toBigDecimal(o, findType(o));
}
public static BigDecimal toBigDecimal(Object o,byte type) throws ExecException {
try {
switch (type) {
case BOOLEAN:
return (Boolean) o ? BigDecimal.ONE : BigDecimal.ZERO;
case INTEGER:
return BigDecimal.valueOf(((Integer)o).longValue());
case LONG:
return BigDecimal.valueOf(((Long)o).longValue());
case FLOAT:
return BigDecimal.valueOf(((Float)o).doubleValue());
case DOUBLE:
return BigDecimal.valueOf(((Double)o).doubleValue());
case BYTEARRAY:
return new BigDecimal(((DataByteArray)o).toString());
case CHARARRAY:
return new BigDecimal((String)o);
case BIGINTEGER:
return new BigDecimal((BigInteger)o);
case BIGDECIMAL:
return (BigDecimal)o;
case DATETIME:
return BigDecimal.valueOf(((DateTime)o).getMillis());
case NULL:
return null;
case BYTE:
case MAP:
case INTERNALMAP:
case TUPLE:
case BAG:
case UNKNOWN:
default:
int errCode = 1071;
String msg = "Cannot convert a " + findTypeName(o) +
" to a BigDecimal.";
throw new ExecException(msg, errCode, PigException.INPUT);
}
} catch (ClassCastException cce) {
throw cce;
} catch (ExecException ee) {
throw ee;
} catch (NumberFormatException nfe) {
int errCode = 1074;
String msg = "Problem with formatting. Could not convert " + o + " to BigDecimal.";
throw new ExecException(msg, errCode, PigException.INPUT, nfe);
} catch (Exception e) {
int errCode = 2054;
String msg = "Internal error. Could not convert " + o + " to BigDecimal.";
throw new ExecException(msg, errCode, PigException.BUG);
}
}
/**
* Force a data object to a String, if possible. Any simple (atomic) type
* can be forced to a String including ByteArray. Complex types cannot be
* forced to a String. This isn't particularly efficient, so if you
* already <b>know</b> that the object you have is a String you
* should just cast it.
* @param o object to cast
* @param type of the object you are casting
* @return The object as a String.
* @throws ExecException if the type can't be forced to a String.
*/
public static String toString(Object o,byte type) throws ExecException {
try {
switch (type) {
case INTEGER:
return ((Integer)o).toString();
case LONG:
return ((Long)o).toString();
case FLOAT:
return ((Float)o).toString();
case DOUBLE:
return ((Double)o).toString();
case DATETIME:
return ((DateTime)o).toString();
case BYTEARRAY:
return ((DataByteArray)o).toString();
case CHARARRAY:
return ((String)o);
case BIGINTEGER:
return ((BigInteger)o).toString();
case BIGDECIMAL:
return ((BigDecimal)o).toString();
case NULL:
return null;
case BOOLEAN:
return ((Boolean)o).toString();
case BYTE:
return ((Byte)o).toString();
case MAP:
case INTERNALMAP:
case TUPLE:
case BAG:
case UNKNOWN:
default:
int errCode = 1071;
String msg = "Cannot convert a " + findTypeName(o) +
" to a String";
throw new ExecException(msg, errCode, PigException.INPUT);
}
} catch (ClassCastException cce) {
throw cce;
} catch (ExecException ee) {
throw ee;
} catch (Exception e) {
int errCode = 2054;
String msg = "Internal error. Could not convert " + o + " to String.";
throw new ExecException(msg, errCode, PigException.BUG);
}
}
/**
* Force a data object to a String, if possible. Any simple (atomic) type
* can be forced to a String including ByteArray. Complex types cannot be
* forced to a String. This isn't particularly efficient, so if you
* already <b>know</b> that the object you have is a String you
* should just cast it. Unlike {@link #toString(Object, byte)} this
* method will first determine the type of o and then do the cast.
* Use {@link #toString(Object, byte)} if you already know the type.
* @param o object to cast
* @return The object as a String.
* @throws ExecException if the type can't be forced to a String.
*/
public static String toString(Object o) throws ExecException {
return toString(o, findType(o));
}
/**
* If this object is a map, return it as a map.
* This isn't particularly efficient, so if you
* already <b>know</b> that the object you have is a Map you
* should just cast it.
* @param o object to cast
* @return The object as a Map.
* @throws ExecException if the type can't be forced to a Double.
*/
@SuppressWarnings("unchecked")
public static Map<String, Object> toMap(Object o) throws ExecException {
if (o == null) {
return null;
}
if (o instanceof Map && !(o instanceof InternalMap)) {
try {
return (Map<String, Object>)o;
} catch (Exception e) {
int errCode = 2054;
String msg = "Internal error. Could not convert " + o + " to Map.";
throw new ExecException(msg, errCode, PigException.BUG);
}
} else {
int errCode = 1071;
String msg = "Cannot convert a " + findTypeName(o) +
" to a Map";
throw new ExecException(msg, errCode, PigException.INPUT);
}
}
/**
* If this object is a tuple, return it as a tuple.
* This isn't particularly efficient, so if you
* already <b>know</b> that the object you have is a Tuple you
* should just cast it.
* @param o object to cast
* @return The object as a Double.
* @throws ExecException if the type can't be forced to a Double.
*/
public static Tuple toTuple(Object o) throws ExecException {
if (o == null) {
return null;
}
if (o instanceof Tuple) {
try {
return (Tuple)o;
} catch (Exception e) {
int errCode = 2054;
String msg = "Internal error. Could not convert " + o + " to Tuple.";
throw new ExecException(msg, errCode, PigException.BUG);
}
} else {
int errCode = 1071;
String msg = "Cannot convert a " + findTypeName(o) +
" to a Tuple";
throw new ExecException(msg, errCode, PigException.INPUT);
}
}
/**
* If this object is a bag, return it as a bag.
* This isn't particularly efficient, so if you
* already <b>know</b> that the object you have is a bag you
* should just cast it.
* @param o object to cast
* @return The object as a Double.
* @throws ExecException if the type can't be forced to a Double.
*/
public static DataBag toBag(Object o) throws ExecException {
if (o == null) {
return null;
}
if (o instanceof DataBag) {
try {
return (DataBag)o;
} catch (Exception e) {
int errCode = 2054;
String msg = "Internal error. Could not convert " + o + " to Bag.";
throw new ExecException(msg, errCode, PigException.BUG);
}
} else {
int errCode = 1071;
String msg = "Cannot convert a " + findTypeName(o) +
" to a DataBag";
throw new ExecException(msg, errCode, PigException.INPUT);
}
}
/**
* Purely for debugging
*/
public static void spillTupleContents(Tuple t, String label) {
System.out.print("Tuple " + label + " ");
Iterator<Object> i = t.getAll().iterator();
for (int j = 0; i.hasNext(); j++) {
System.out.print(j + ":" + i.next().getClass().getName() + " ");
}
System.out.println(t.toString());
}
/**
* Determine if this type is a numeric type.
* @param t type (as byte value) to test
* @return true if this is a numeric type, false otherwise
*/
public static boolean isNumberType(byte t) {
switch (t) {
case INTEGER: return true ;
case LONG: return true ;
case FLOAT: return true ;
case DOUBLE: return true ;
case BIGINTEGER: return true ;
case BIGDECIMAL: return true ;
default: return false ;
}
}
/**
* Determine if this is a type that can work can be done on.
* @param t type (as a byte value) to test
* @return false if the type is unknown, null, or error; true otherwise.
*/
public static boolean isUsableType(byte t) {
switch (t) {
case UNKNOWN: return false ;
case NULL: return false ;
case ERROR: return false ;
default :return true ;
}
}
/**
* Test if one type can cast to the other.
* @param castType data type of the cast type
* @param inputType data type of the input
* @return true or false
*/
public static boolean castable(byte castType, byte inputType) {
// Only legal types can be cast to
if ( (!DataType.isUsableType(castType)) ||
(!DataType.isUsableType(inputType)) ) {
return false;
}
// Same type is castable
if (castType==inputType) {
return true;
}
// Numerical type is castable
if ( (DataType.isNumberType(castType)) &&
(DataType.isNumberType(inputType)) ) {
return true;
}
// databyte can cast to anything
if (inputType == DataType.BYTEARRAY) {
return true;
}
// Cast numerical type to string, or vice versa is valid
if (DataType.isNumberType(inputType)&&castType==DataType.CHARARRAY ||
DataType.isNumberType(castType)&&inputType==DataType.CHARARRAY)
return true;
// else return false
return false;
}
/**
* Merge types if possible. Merging types means finding a type that one
* or both types can be upcast to.
* @param type1
* @param type2
* @return the merged type, or DataType.ERROR if not successful
*/
public static byte mergeType(byte type1, byte type2) {
// Only legal types can be merged
if ( (!DataType.isUsableType(type1)) ||
(!DataType.isUsableType(type2)) ) {
return DataType.ERROR ;
}
// Same type is OK
if (type1==type2) {
return type1 ;
}
// Both are number so we return the bigger type
if ( (DataType.isNumberType(type1)) &&
(DataType.isNumberType(type2)) ) {
return type1>type2 ? type1:type2 ;
}
// One is bytearray and the other is (number or chararray)
if (type1 == DataType.BYTEARRAY) {
return type2 ;
}
if (type2 == DataType.BYTEARRAY) {
return type1 ;
}
// else return just ERROR
return DataType.ERROR ;
}
/**
* Given a map, turn it into a String.
* @param m map
* @return string representation of the map
*/
public static String mapToString(Map<String, Object> m) {
boolean hasNext = false;
StringBuilder sb = new StringBuilder();
sb.append("[");
for(Map.Entry<String, Object> e: m.entrySet()) {
if(hasNext) {
sb.append(",");
} else {
hasNext = true;
}
sb.append(e.getKey());
sb.append("#");
Object val = e.getValue();
if(val != null) {
sb.append(val.toString());
}
}
sb.append("]");
return sb.toString();
}
/**
* Test whether two byte arrays (Java byte arrays not Pig byte arrays) are
* equal. I have no idea why we have this function.
* @param lhs byte array 1
* @param rhs byte array 2
* @return true if both are null or the two are the same length and have
* the same bytes.
*/
public static boolean equalByteArrays(byte[] lhs, byte[] rhs) {
if(lhs == null && rhs == null) {
return true;
}
if(lhs == null || rhs == null) {
return false;
}
if(lhs.length != rhs.length) {
return false;
}
for(int i = 0; i < lhs.length; ++i) {
if(lhs[i] != rhs[i]) {
return false;
}
}
return true;
}
/**
* Utility method that determines the schema from the passed in dataType.
* If the dataType is Bag or Tuple, then we need to determine the schemas inside this dataType;
* for this we iterate through the fields inside this field. This method works both for raw objects
* and ResourceSchema.ResourceFieldSchema field descriptions; the specific behavior is determined by the klass
* parameter.
* @param dataType DataType.CHARARRAY, DataType.TUPLE, and so on
* @param fieldIter iterator over the fields if this is a tuple or a bag
* @param fieldNum number of fields inside the field if a tuple
* @param klass should be Object or ResourceSchema.ResourceFieldSchema
* @return
* @throws ExecException
* @throws FrontendException
* @throws SchemaMergeException
*/
@SuppressWarnings("deprecation")
private static Schema.FieldSchema determineFieldSchema(byte dataType, Iterator fieldIter,
long fieldNum, Class klass ) throws ExecException, FrontendException, SchemaMergeException {
switch (dataType) {
case NULL:
return new Schema.FieldSchema(null, BYTEARRAY);
case BOOLEAN:
case INTEGER:
case LONG:
case FLOAT:
case DOUBLE:
case BIGINTEGER:
case BIGDECIMAL:
case DATETIME:
case BYTEARRAY:
case CHARARRAY:
case MAP:
return new Schema.FieldSchema(null, dataType);
case TUPLE: {
Schema schema = null;
if(fieldNum != 0) {
schema = new Schema();
for(int i = 0; i < fieldNum; ++i) {
schema.add(determineFieldSchema(klass.cast(fieldIter.next())));
}
}
return new Schema.FieldSchema(null, schema, TUPLE);
}
case BAG: {
Schema schema = null;
Schema bagSchema = null;
if(fieldNum != 0) {
ArrayList<Schema> schemas = new ArrayList<Schema>();
while (fieldIter.hasNext() ) {
schemas.add(determineFieldSchema(klass.cast(fieldIter.next())).schema);
}
schema = schemas.get(0);
if(null == schema) {
Schema.FieldSchema tupleFs = new Schema.FieldSchema(null, null, TUPLE);
bagSchema = new Schema(tupleFs);
bagSchema.setTwoLevelAccessRequired(true);
return new Schema.FieldSchema(null, bagSchema, BAG);
}
int schemaSize = schema.size();
for(int i = 1; i < schemas.size(); ++i) {
Schema currSchema = schemas.get(i);
if((null == currSchema) || (currSchema.size() != schemaSize)) {
Schema.FieldSchema tupleFs = new Schema.FieldSchema(null, null, TUPLE);
bagSchema = new Schema(tupleFs);
bagSchema.setTwoLevelAccessRequired(true);
return new Schema.FieldSchema(null, bagSchema, BAG);
}
schema = Schema.mergeSchema(schema, currSchema, false, false, false);
}
Schema.FieldSchema tupleFs = new Schema.FieldSchema(null, schema, TUPLE);
bagSchema = new Schema(tupleFs);
// since this schema has tuple field schema which internally
// has a list of field schemas for the actual items in the bag
// an access to any field in the bag is a two level access
bagSchema.setTwoLevelAccessRequired(true);
}
return new Schema.FieldSchema(null, bagSchema, BAG);
}
default: {
int errCode = 1073;
String msg = "Cannot determine field schema";
throw new ExecException(msg, errCode, PigException.INPUT);
}
}
}
/***
* Determine the field schema of an ResourceFieldSchema
* @param rcFieldSchema the rcFieldSchema we want translated
* @return the field schema corresponding to the object
* @throws ExecException,FrontendException,SchemaMergeException
*/
public static Schema.FieldSchema determineFieldSchema(ResourceSchema.ResourceFieldSchema rcFieldSchema)
throws ExecException, FrontendException, SchemaMergeException {
byte dt = rcFieldSchema.getType();
Iterator<ResourceSchema.ResourceFieldSchema> fieldIter = null;
long fieldNum = 0;
if (dt == TUPLE || dt == BAG ) {
fieldIter = Arrays.asList(rcFieldSchema.getSchema().getFields()).iterator();
fieldNum = rcFieldSchema.getSchema().getFields().length;
}
return determineFieldSchema(dt, fieldIter, fieldNum, ResourceSchema.ResourceFieldSchema.class);
}
/***
* Determine the field schema of an object
* @param o the object whose field schema is to be determined
* @return the field schema corresponding to the object
* @throws ExecException,FrontendException,SchemaMergeException
*/
public static Schema.FieldSchema determineFieldSchema(Object o)
throws ExecException, FrontendException, SchemaMergeException {
byte dt = findType(o);
Iterator fieldIter = null;
long fieldNum = 0;
if ( dt == TUPLE ) {
fieldIter = ((Tuple) o).getAll().iterator();
fieldNum = ((Tuple) o).size();
} else if ( dt == BAG ) {
fieldNum = ((DataBag) o).size();
fieldIter = ((DataBag)o).iterator();
}
return determineFieldSchema(dt, fieldIter, fieldNum, Object.class);
}
}
| apache-2.0 |
paulstapleton/flowable-engine | modules/flowable-engine/src/main/java/org/flowable/engine/impl/persistence/entity/data/impl/MybatisActivityInstanceDataManager.java | 6066 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.engine.impl.persistence.entity.data.impl;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.flowable.common.engine.impl.db.DbSqlSession;
import org.flowable.common.engine.impl.db.SingleCachedEntityMatcher;
import org.flowable.common.engine.impl.persistence.cache.CachedEntityMatcher;
import org.flowable.engine.impl.ActivityInstanceQueryImpl;
import org.flowable.engine.impl.cfg.ProcessEngineConfigurationImpl;
import org.flowable.engine.impl.persistence.entity.ActivityInstanceEntity;
import org.flowable.engine.impl.persistence.entity.ActivityInstanceEntityImpl;
import org.flowable.engine.impl.persistence.entity.data.AbstractProcessDataManager;
import org.flowable.engine.impl.persistence.entity.data.ActivityInstanceDataManager;
import org.flowable.engine.impl.persistence.entity.data.impl.cachematcher.ActivityInstanceMatcher;
import org.flowable.engine.impl.persistence.entity.data.impl.cachematcher.UnfinishedActivityInstanceMatcher;
import org.flowable.engine.runtime.ActivityInstance;
/**
* @author martin.grofcik
*/
public class MybatisActivityInstanceDataManager extends AbstractProcessDataManager<ActivityInstanceEntity> implements ActivityInstanceDataManager {
protected CachedEntityMatcher<ActivityInstanceEntity> unfinishedActivityInstanceMatcher = new UnfinishedActivityInstanceMatcher();
protected CachedEntityMatcher<ActivityInstanceEntity> activityInstanceMatcher = new ActivityInstanceMatcher();
protected CachedEntityMatcher<ActivityInstanceEntity> activitiesByProcessInstanceIdMatcher = new ActivityByProcessInstanceIdMatcher();
protected SingleCachedEntityMatcher<ActivityInstanceEntity> activityInstanceByTaskIdMatcher = (entity, param) -> param.equals(entity.getTaskId());
public MybatisActivityInstanceDataManager(ProcessEngineConfigurationImpl processEngineConfiguration) {
super(processEngineConfiguration);
}
@Override
public Class<? extends ActivityInstanceEntity> getManagedEntityClass() {
return ActivityInstanceEntityImpl.class;
}
@Override
public ActivityInstanceEntity create() {
return new ActivityInstanceEntityImpl();
}
@Override
public List<ActivityInstanceEntity> findUnfinishedActivityInstancesByExecutionAndActivityId(final String executionId, final String activityId) {
Map<String, Object> params = new HashMap<>();
params.put("executionId", executionId);
params.put("activityId", activityId);
return getList("selectUnfinishedActivityInstanceExecutionIdAndActivityId", params, unfinishedActivityInstanceMatcher, true);
}
@Override
public List<ActivityInstanceEntity> findActivityInstancesByExecutionIdAndActivityId(final String executionId, final String activityId) {
Map<String, Object> params = new HashMap<>();
params.put("executionId", executionId);
params.put("activityId", activityId);
return getList("selectActivityInstanceExecutionIdAndActivityId", params, activityInstanceMatcher, true);
}
@Override
public List<ActivityInstanceEntity> findActivityInstancesByProcessInstanceId(String processInstanceId, boolean includeDeleted) {
List<ActivityInstanceEntity> activityInstances = getList(getDbSqlSession(), "selectActivityInstancesByProcessInstanceId", processInstanceId,
activitiesByProcessInstanceIdMatcher, true, includeDeleted);
Collections.sort(activityInstances, Comparator.comparing(ActivityInstanceEntity::getStartTime)
.thenComparing(Comparator.nullsFirst(Comparator.comparing(ActivityInstanceEntity::getTransactionOrder))));
return activityInstances;
}
@Override
public ActivityInstanceEntity findActivityInstanceByTaskId(String taskId) {
return getEntity("selectActivityInstanceByTaskId", taskId, activityInstanceByTaskIdMatcher, true);
}
@Override
public void deleteActivityInstancesByProcessInstanceId(String processInstanceId) {
DbSqlSession dbSqlSession = getDbSqlSession();
deleteCachedEntities(dbSqlSession, activitiesByProcessInstanceIdMatcher, processInstanceId);
if (!isEntityInserted(dbSqlSession, "execution", processInstanceId)) {
dbSqlSession.delete("deleteActivityInstancesByProcessInstanceId", processInstanceId, ActivityInstanceEntityImpl.class);
}
}
@Override
public long findActivityInstanceCountByQueryCriteria(ActivityInstanceQueryImpl activityInstanceQuery) {
return (Long) getDbSqlSession().selectOne("selectActivityInstanceCountByQueryCriteria", activityInstanceQuery);
}
@Override
@SuppressWarnings("unchecked")
public List<ActivityInstance> findActivityInstancesByQueryCriteria(ActivityInstanceQueryImpl activityInstanceQuery) {
return getDbSqlSession().selectList("selectActivityInstancesByQueryCriteria", activityInstanceQuery);
}
@Override
@SuppressWarnings("unchecked")
public List<ActivityInstance> findActivityInstancesByNativeQuery(Map<String, Object> parameterMap) {
return getDbSqlSession().selectListWithRawParameter("selectActivityInstanceByNativeQuery", parameterMap);
}
@Override
public long findActivityInstanceCountByNativeQuery(Map<String, Object> parameterMap) {
return (Long) getDbSqlSession().selectOne("selectActivityInstanceCountByNativeQuery", parameterMap);
}
}
| apache-2.0 |
trasa/aws-sdk-java | aws-java-sdk-sts/src/main/java/com/amazonaws/services/securitytoken/AWSSecurityTokenServiceClient.java | 55486 | /*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.securitytoken;
import org.w3c.dom.*;
import java.net.*;
import java.util.*;
import java.util.Map.Entry;
import org.apache.commons.logging.*;
import com.amazonaws.*;
import com.amazonaws.auth.*;
import com.amazonaws.handlers.*;
import com.amazonaws.http.*;
import com.amazonaws.internal.*;
import com.amazonaws.metrics.*;
import com.amazonaws.regions.*;
import com.amazonaws.transform.*;
import com.amazonaws.util.*;
import com.amazonaws.util.AWSRequestMetrics.Field;
import com.amazonaws.services.securitytoken.model.*;
import com.amazonaws.services.securitytoken.model.transform.*;
/**
* Client for accessing AWS STS. All service calls made using this client are
* blocking, and will not return until the service call completes.
* <p>
* <fullname>AWS Security Token Service</fullname>
* <p>
* The AWS Security Token Service (STS) is a web service that enables you to
* request temporary, limited-privilege credentials for AWS Identity and Access
* Management (IAM) users or for users that you authenticate (federated users).
* This guide provides descriptions of the STS API. For more detailed
* information about using this service, go to <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp.html"
* >Temporary Security Credentials</a>.
* </p>
* <note> As an alternative to using the API, you can use one of the AWS SDKs,
* which consist of libraries and sample code for various programming languages
* and platforms (Java, Ruby, .NET, iOS, Android, etc.). The SDKs provide a
* convenient way to create programmatic access to STS. For example, the SDKs
* take care of cryptographically signing requests, managing errors, and
* retrying requests automatically. For information about the AWS SDKs,
* including how to download and install them, see the <a
* href="http://aws.amazon.com/tools/">Tools for Amazon Web Services page</a>.
* </note>
* <p>
* For information about setting up signatures and authorization through the
* API, go to <a href=
* "http://docs.aws.amazon.com/general/latest/gr/signing_aws_api_requests.html"
* target="_blank">Signing AWS API Requests</a> in the <i>AWS General
* Reference</i>. For general information about the Query API, go to <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/IAM_UsingQueryAPI.html"
* target="_blank">Making Query Requests</a> in <i>Using IAM</i>. For
* information about using security tokens with other AWS products, go to <a
* href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/reference_aws-services-that-work-with-iam.html"
* >AWS Services That Work with IAM</a> in the <i>Using IAM</i>.
* </p>
* <p>
* If you're new to AWS and need additional technical information about a
* specific AWS product, you can find the product's technical documentation at
* <a href="http://aws.amazon.com/documentation/"
* target="_blank">http://aws.amazon.com/documentation/</a>.
* </p>
* <p>
* <b>Endpoints</b>
* </p>
* <p>
* The AWS Security Token Service (STS) has a default endpoint of
* https://sts.amazonaws.com that maps to the US East (N. Virginia) region.
* Additional regions are available, but must first be activated in the AWS
* Management Console before you can use a different region's endpoint. For more
* information about activating a region for STS see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_enable-regions.html"
* >Activating STS in a New Region</a> in the <i>Using IAM</i>.
* </p>
* <p>
* For information about STS endpoints, see <a
* href="http://docs.aws.amazon.com/general/latest/gr/rande.html#sts_region"
* >Regions and Endpoints</a> in the <i>AWS General Reference</i>.
* </p>
* <p>
* <b>Recording API requests</b>
* </p>
* <p>
* STS supports AWS CloudTrail, which is a service that records AWS calls for
* your AWS account and delivers log files to an Amazon S3 bucket. By using
* information collected by CloudTrail, you can determine what requests were
* successfully made to STS, who made the request, when it was made, and so on.
* To learn more about CloudTrail, including how to turn it on and find your log
* files, see the <a href=
* "http://docs.aws.amazon.com/awscloudtrail/latest/userguide/what_is_cloud_trail_top_level.html"
* >AWS CloudTrail User Guide</a>.
* </p>
*/
public class AWSSecurityTokenServiceClient extends AmazonWebServiceClient
implements AWSSecurityTokenService {
/** Provider for AWS credentials. */
private AWSCredentialsProvider awsCredentialsProvider;
private static final Log log = LogFactory
.getLog(AWSSecurityTokenService.class);
/** Default signing name for the service. */
private static final String DEFAULT_SIGNING_NAME = "sts";
/** The region metadata service name for computing region endpoints. */
private static final String DEFAULT_ENDPOINT_PREFIX = "sts";
/**
* List of exception unmarshallers for all AWS STS exceptions.
*/
protected final List<Unmarshaller<AmazonServiceException, Node>> exceptionUnmarshallers = new ArrayList<Unmarshaller<AmazonServiceException, Node>>();
/**
* Constructs a new client to invoke service methods on AWS STS. A
* credentials provider chain will be used that searches for credentials in
* this order:
* <ul>
* <li>Environment Variables - AWS_ACCESS_KEY_ID and AWS_SECRET_KEY</li>
* <li>Java System Properties - aws.accessKeyId and aws.secretKey</li>
* <li>Instance profile credentials delivered through the Amazon EC2
* metadata service</li>
* </ul>
*
* <p>
* All service calls made using this new client object are blocking, and
* will not return until the service call completes.
*
* @see DefaultAWSCredentialsProviderChain
*/
public AWSSecurityTokenServiceClient() {
this(new DefaultAWSCredentialsProviderChain(),
com.amazonaws.PredefinedClientConfigurations.defaultConfig());
}
/**
* Constructs a new client to invoke service methods on AWS STS. A
* credentials provider chain will be used that searches for credentials in
* this order:
* <ul>
* <li>Environment Variables - AWS_ACCESS_KEY_ID and AWS_SECRET_KEY</li>
* <li>Java System Properties - aws.accessKeyId and aws.secretKey</li>
* <li>Instance profile credentials delivered through the Amazon EC2
* metadata service</li>
* </ul>
*
* <p>
* All service calls made using this new client object are blocking, and
* will not return until the service call completes.
*
* @param clientConfiguration
* The client configuration options controlling how this client
* connects to AWS STS (ex: proxy settings, retry counts, etc.).
*
* @see DefaultAWSCredentialsProviderChain
*/
public AWSSecurityTokenServiceClient(ClientConfiguration clientConfiguration) {
this(new DefaultAWSCredentialsProviderChain(), clientConfiguration);
}
/**
* Constructs a new client to invoke service methods on AWS STS using the
* specified AWS account credentials.
*
* <p>
* All service calls made using this new client object are blocking, and
* will not return until the service call completes.
*
* @param awsCredentials
* The AWS credentials (access key ID and secret key) to use when
* authenticating with AWS services.
*/
public AWSSecurityTokenServiceClient(AWSCredentials awsCredentials) {
this(awsCredentials, com.amazonaws.PredefinedClientConfigurations
.defaultConfig());
}
/**
* Constructs a new client to invoke service methods on AWS STS using the
* specified AWS account credentials and client configuration options.
*
* <p>
* All service calls made using this new client object are blocking, and
* will not return until the service call completes.
*
* @param awsCredentials
* The AWS credentials (access key ID and secret key) to use when
* authenticating with AWS services.
* @param clientConfiguration
* The client configuration options controlling how this client
* connects to AWS STS (ex: proxy settings, retry counts, etc.).
*/
public AWSSecurityTokenServiceClient(AWSCredentials awsCredentials,
ClientConfiguration clientConfiguration) {
super(clientConfiguration);
this.awsCredentialsProvider = new StaticCredentialsProvider(
awsCredentials);
init();
}
/**
* Constructs a new client to invoke service methods on AWS STS using the
* specified AWS account credentials provider.
*
* <p>
* All service calls made using this new client object are blocking, and
* will not return until the service call completes.
*
* @param awsCredentialsProvider
* The AWS credentials provider which will provide credentials to
* authenticate requests with AWS services.
*/
public AWSSecurityTokenServiceClient(
AWSCredentialsProvider awsCredentialsProvider) {
this(awsCredentialsProvider,
com.amazonaws.PredefinedClientConfigurations.defaultConfig());
}
/**
* Constructs a new client to invoke service methods on AWS STS using the
* specified AWS account credentials provider and client configuration
* options.
*
* <p>
* All service calls made using this new client object are blocking, and
* will not return until the service call completes.
*
* @param awsCredentialsProvider
* The AWS credentials provider which will provide credentials to
* authenticate requests with AWS services.
* @param clientConfiguration
* The client configuration options controlling how this client
* connects to AWS STS (ex: proxy settings, retry counts, etc.).
*/
public AWSSecurityTokenServiceClient(
AWSCredentialsProvider awsCredentialsProvider,
ClientConfiguration clientConfiguration) {
this(awsCredentialsProvider, clientConfiguration, null);
}
/**
* Constructs a new client to invoke service methods on AWS STS using the
* specified AWS account credentials provider, client configuration options,
* and request metric collector.
*
* <p>
* All service calls made using this new client object are blocking, and
* will not return until the service call completes.
*
* @param awsCredentialsProvider
* The AWS credentials provider which will provide credentials to
* authenticate requests with AWS services.
* @param clientConfiguration
* The client configuration options controlling how this client
* connects to AWS STS (ex: proxy settings, retry counts, etc.).
* @param requestMetricCollector
* optional request metric collector
*/
public AWSSecurityTokenServiceClient(
AWSCredentialsProvider awsCredentialsProvider,
ClientConfiguration clientConfiguration,
RequestMetricCollector requestMetricCollector) {
super(clientConfiguration, requestMetricCollector);
this.awsCredentialsProvider = awsCredentialsProvider;
init();
}
private void init() {
exceptionUnmarshallers
.add(new PackedPolicyTooLargeExceptionUnmarshaller());
exceptionUnmarshallers.add(new ExpiredTokenExceptionUnmarshaller());
exceptionUnmarshallers
.add(new IDPCommunicationErrorExceptionUnmarshaller());
exceptionUnmarshallers
.add(new MalformedPolicyDocumentExceptionUnmarshaller());
exceptionUnmarshallers
.add(new InvalidIdentityTokenExceptionUnmarshaller());
exceptionUnmarshallers
.add(new InvalidAuthorizationMessageExceptionUnmarshaller());
exceptionUnmarshallers.add(new IDPRejectedClaimExceptionUnmarshaller());
exceptionUnmarshallers.add(new StandardErrorUnmarshaller());
// calling this.setEndPoint(...) will also modify the signer accordingly
this.setEndpoint("sts.amazonaws.com");
setServiceNameIntern(DEFAULT_SIGNING_NAME);
setEndpointPrefix(DEFAULT_ENDPOINT_PREFIX);
HandlerChainFactory chainFactory = new HandlerChainFactory();
requestHandler2s
.addAll(chainFactory
.newRequestHandlerChain("/com/amazonaws/services/securitytoken/request.handlers"));
requestHandler2s
.addAll(chainFactory
.newRequestHandler2Chain("/com/amazonaws/services/securitytoken/request.handler2s"));
}
/**
* <p>
* Returns a set of temporary security credentials (consisting of an access
* key ID, a secret access key, and a security token) that you can use to
* access AWS resources that you might not normally have access to.
* Typically, you use <code>AssumeRole</code> for cross-account access or
* federation.
* </p>
* <p>
* <b>Important:</b> You cannot call <code>AssumeRole</code> by using AWS
* account credentials; access will be denied. You must use IAM user
* credentials or temporary security credentials to call
* <code>AssumeRole</code>.
* </p>
* <p>
* For cross-account access, imagine that you own multiple accounts and need
* to access resources in each account. You could create long-term
* credentials in each account to access those resources. However, managing
* all those credentials and remembering which one can access which account
* can be time consuming. Instead, you can create one set of long-term
* credentials in one account and then use temporary security credentials to
* access all the other accounts by assuming roles in those accounts. For
* more information about roles, see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/roles-toplevel.html">IAM
* Roles (Delegation and Federation)</a> in the <i>Using IAM</i>.
* </p>
* <p>
* For federation, you can, for example, grant single sign-on access to the
* AWS Management Console. If you already have an identity and
* authentication system in your corporate network, you don't have to
* recreate user identities in AWS in order to grant those user identities
* access to AWS. Instead, after a user has been authenticated, you call
* <code>AssumeRole</code> (and specify the role with the appropriate
* permissions) to get temporary security credentials for that user. With
* those temporary security credentials, you construct a sign-in URL that
* users can use to access the console. For more information, see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp.html#sts-introduction"
* >Common Scenarios for Temporary Credentials</a> in the <i>Using IAM</i>.
* </p>
* <p>
* The temporary security credentials are valid for the duration that you
* specified when calling <code>AssumeRole</code>, which can be from 900
* seconds (15 minutes) to 3600 seconds (1 hour). The default is 1 hour.
* </p>
* <p>
* Optionally, you can pass an IAM access policy to this operation. If you
* choose not to pass a policy, the temporary security credentials that are
* returned by the operation have the permissions that are defined in the
* access policy of the role that is being assumed. If you pass a policy to
* this operation, the temporary security credentials that are returned by
* the operation have the permissions that are allowed by both the access
* policy of the role that is being assumed, <i><b>and</b></i> the policy
* that you pass. This gives you a way to further restrict the permissions
* for the resulting temporary security credentials. You cannot use the
* passed policy to grant permissions that are in excess of those allowed by
* the access policy of the role that is being assumed. For more
* information, see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_control-access_assumerole.html"
* >Permissions for AssumeRole, AssumeRoleWithSAML, and
* AssumeRoleWithWebIdentity</a> in the <i>Using IAM</i>.
* </p>
* <p>
* To assume a role, your AWS account must be trusted by the role. The trust
* relationship is defined in the role's trust policy when the role is
* created. You must also have a policy that allows you to call
* <code>sts:AssumeRole</code>.
* </p>
* <p>
* <b>Using MFA with AssumeRole</b>
* </p>
* <p>
* You can optionally include multi-factor authentication (MFA) information
* when you call <code>AssumeRole</code>. This is useful for cross-account
* scenarios in which you want to make sure that the user who is assuming
* the role has been authenticated using an AWS MFA device. In that
* scenario, the trust policy of the role being assumed includes a condition
* that tests for MFA authentication; if the caller does not include valid
* MFA information, the request to assume the role is denied. The condition
* in a trust policy that tests for MFA authentication might look like the
* following example.
* </p>
* <p>
* <code>"Condition": {"Bool": {"aws:MultiFactorAuthPresent": true}}</code>
* </p>
* <p>
* For more information, see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/MFAProtectedAPI.html"
* >Configuring MFA-Protected API Access</a> in the <i>Using IAM</i> guide.
* </p>
* <p>
* To use MFA with <code>AssumeRole</code>, you pass values for the
* <code>SerialNumber</code> and <code>TokenCode</code> parameters. The
* <code>SerialNumber</code> value identifies the user's hardware or virtual
* MFA device. The <code>TokenCode</code> is the time-based one-time
* password (TOTP) that the MFA devices produces.
* </p>
* <member name="RoleArn" target="arnType"/> <member name="RoleSessionName"
* target="userNameType"/> <member name="Policy"
* target="sessionPolicyDocumentType"/> <member name="DurationSeconds"
* target="roleDurationSecondsType"/> <member name="ExternalId"
* target="externalIdType"/>
*
* @param assumeRoleRequest
* @return Result of the AssumeRole operation returned by the service.
* @throws MalformedPolicyDocumentException
* The request was rejected because the policy document was
* malformed. The error message describes the specific error.
* @throws PackedPolicyTooLargeException
* The request was rejected because the policy document was too
* large. The error message describes how big the policy document
* is, in packed form, as a percentage of what the API allows.
* @sample AWSSecurityTokenService.AssumeRole
*/
@Override
public AssumeRoleResult assumeRole(AssumeRoleRequest assumeRoleRequest) {
ExecutionContext executionContext = createExecutionContext(assumeRoleRequest);
AWSRequestMetrics awsRequestMetrics = executionContext
.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<AssumeRoleRequest> request = null;
Response<AssumeRoleResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new AssumeRoleRequestMarshaller().marshall(super
.beforeMarshalling(assumeRoleRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
StaxResponseHandler<AssumeRoleResult> responseHandler = new StaxResponseHandler<AssumeRoleResult>(
new AssumeRoleResultStaxUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Returns a set of temporary security credentials for users who have been
* authenticated via a SAML authentication response. This operation provides
* a mechanism for tying an enterprise identity store or directory to
* role-based AWS access without user-specific credentials or configuration.
* </p>
* <p>
* The temporary security credentials returned by this operation consist of
* an access key ID, a secret access key, and a security token. Applications
* can use these temporary security credentials to sign calls to AWS
* services. The credentials are valid for the duration that you specified
* when calling <code>AssumeRoleWithSAML</code>, which can be up to 3600
* seconds (1 hour) or until the time specified in the SAML authentication
* response's <code>SessionNotOnOrAfter</code> value, whichever is shorter.
* </p>
* <note>The maximum duration for a session is 1 hour, and the minimum
* duration is 15 minutes, even if values outside this range are specified.
* </note>
* <p>
* Optionally, you can pass an IAM access policy to this operation. If you
* choose not to pass a policy, the temporary security credentials that are
* returned by the operation have the permissions that are defined in the
* access policy of the role that is being assumed. If you pass a policy to
* this operation, the temporary security credentials that are returned by
* the operation have the permissions that are allowed by both the access
* policy of the role that is being assumed, <i><b>and</b></i> the policy
* that you pass. This gives you a way to further restrict the permissions
* for the resulting temporary security credentials. You cannot use the
* passed policy to grant permissions that are in excess of those allowed by
* the access policy of the role that is being assumed. For more
* information, see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_control-access_assumerole.html"
* >Permissions for AssumeRole, AssumeRoleWithSAML, and
* AssumeRoleWithWebIdentity</a> in the <i>Using IAM</i>.
* </p>
* <p>
* Before your application can call <code>AssumeRoleWithSAML</code>, you
* must configure your SAML identity provider (IdP) to issue the claims
* required by AWS. Additionally, you must use AWS Identity and Access
* Management (IAM) to create a SAML provider entity in your AWS account
* that represents your identity provider, and create an IAM role that
* specifies this SAML provider in its trust policy.
* </p>
* <p>
* Calling <code>AssumeRoleWithSAML</code> does not require the use of AWS
* security credentials. The identity of the caller is validated by using
* keys in the metadata document that is uploaded for the SAML provider
* entity for your identity provider.
* </p>
* <p>
* For more information, see the following resources:
* </p>
* <ul>
* <li><a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_providers_saml.html"
* >About SAML 2.0-based Federation</a> in the <i>Using IAM</i>.</li>
* <li><a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_providers_create_saml.html"
* >Creating SAML Identity Providers</a> in the <i>Using IAM</i>.</li>
* <li><a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_providers_create_saml_relying-party.html"
* >Configuring a Relying Party and Claims</a> in the <i>Using IAM</i>.</li>
* <li><a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_create_for-idp_saml.html"
* >Creating a Role for SAML 2.0 Federation</a> in the <i>Using IAM</i>.</li>
* </ul>
* <member name="RoleArn" target="arnType"/> <member name="SAMLAssertion"
* target="SAMLAssertionType"/> <member name="Policy"
* target="sessionPolicyDocumentType"/> <member name="DurationSeconds"
* target="roleDurationSecondsType"/>
*
* @param assumeRoleWithSAMLRequest
* @return Result of the AssumeRoleWithSAML operation returned by the
* service.
* @throws MalformedPolicyDocumentException
* The request was rejected because the policy document was
* malformed. The error message describes the specific error.
* @throws PackedPolicyTooLargeException
* The request was rejected because the policy document was too
* large. The error message describes how big the policy document
* is, in packed form, as a percentage of what the API allows.
* @throws IDPRejectedClaimException
* The identity provider (IdP) reported that authentication failed.
* This might be because the claim is invalid.</p>
* <p>
* If this error is returned for the
* <code>AssumeRoleWithWebIdentity</code> operation, it can also
* mean that the claim has expired or has been explicitly revoked.
* @throws InvalidIdentityTokenException
* The web identity token that was passed could not be validated by
* AWS. Get a new identity token from the identity provider and then
* retry the request.
* @throws ExpiredTokenException
* The web identity token that was passed is expired or is not
* valid. Get a new identity token from the identity provider and
* then retry the request.
* @sample AWSSecurityTokenService.AssumeRoleWithSAML
*/
@Override
public AssumeRoleWithSAMLResult assumeRoleWithSAML(
AssumeRoleWithSAMLRequest assumeRoleWithSAMLRequest) {
ExecutionContext executionContext = createExecutionContext(assumeRoleWithSAMLRequest);
AWSRequestMetrics awsRequestMetrics = executionContext
.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<AssumeRoleWithSAMLRequest> request = null;
Response<AssumeRoleWithSAMLResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new AssumeRoleWithSAMLRequestMarshaller()
.marshall(super
.beforeMarshalling(assumeRoleWithSAMLRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
StaxResponseHandler<AssumeRoleWithSAMLResult> responseHandler = new StaxResponseHandler<AssumeRoleWithSAMLResult>(
new AssumeRoleWithSAMLResultStaxUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Returns a set of temporary security credentials for users who have been
* authenticated in a mobile or web application with a web identity
* provider, such as Amazon Cognito, Login with Amazon, Facebook, Google, or
* any OpenID Connect-compatible identity provider.
* </p>
* <note>
* <p>
* For mobile applications, we recommend that you use Amazon Cognito. You
* can use Amazon Cognito with the <a
* href="http://aws.amazon.com/sdkforios/">AWS SDK for iOS</a> and the <a
* href="http://aws.amazon.com/sdkforandroid/">AWS SDK for Android</a> to
* uniquely identify a user and supply the user with a consistent identity
* throughout the lifetime of an application.
* </p>
* <p>
* To learn more about Amazon Cognito, see <a href=
* "http://docs.aws.amazon.com/mobile/sdkforandroid/developerguide/cognito-auth.html#d0e840"
* >Amazon Cognito Overview</a> in the <i>AWS SDK for Android Developer
* Guide</i> guide and <a href=
* "http://docs.aws.amazon.com/mobile/sdkforios/developerguide/cognito-auth.html#d0e664"
* >Amazon Cognito Overview</a> in the <i>AWS SDK for iOS Developer
* Guide</i>.
* </p>
* </note>
* <p>
* Calling <code>AssumeRoleWithWebIdentity</code> does not require the use
* of AWS security credentials. Therefore, you can distribute an application
* (for example, on mobile devices) that requests temporary security
* credentials without including long-term AWS credentials in the
* application, and without deploying server-based proxy services that use
* long-term AWS credentials. Instead, the identity of the caller is
* validated by using a token from the web identity provider.
* </p>
* <p>
* The temporary security credentials returned by this API consist of an
* access key ID, a secret access key, and a security token. Applications
* can use these temporary security credentials to sign calls to AWS service
* APIs. The credentials are valid for the duration that you specified when
* calling <code>AssumeRoleWithWebIdentity</code>, which can be from 900
* seconds (15 minutes) to 3600 seconds (1 hour). By default, the temporary
* security credentials are valid for 1 hour.
* </p>
* <p>
* Optionally, you can pass an IAM access policy to this operation. If you
* choose not to pass a policy, the temporary security credentials that are
* returned by the operation have the permissions that are defined in the
* access policy of the role that is being assumed. If you pass a policy to
* this operation, the temporary security credentials that are returned by
* the operation have the permissions that are allowed by both the access
* policy of the role that is being assumed, <i><b>and</b></i> the policy
* that you pass. This gives you a way to further restrict the permissions
* for the resulting temporary security credentials. You cannot use the
* passed policy to grant permissions that are in excess of those allowed by
* the access policy of the role that is being assumed. For more
* information, see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_control-access_assumerole.html"
* >Permissions for AssumeRole, AssumeRoleWithSAML, and
* AssumeRoleWithWebIdentity</a> in the <i>Using IAM</i>.
* </p>
* <p>
* Before your application can call <code>AssumeRoleWithWebIdentity</code>,
* you must have an identity token from a supported identity provider and
* create a role that the application can assume. The role that your
* application assumes must trust the identity provider that is associated
* with the identity token. In other words, the identity provider must be
* specified in the role's trust policy.
* </p>
* <p>
* For more information about how to use web identity federation and the
* <code>AssumeRoleWithWebIdentity</code> API, see the following resources:
* </p>
* <ul>
* <li><a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_providers_oidc_manual"
* >Using Web Identity Federation APIs for Mobile Apps</a> and <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html#api_assumerolewithwebidentity"
* >Federation Through a Web-based Identity Provider</a>.</li>
* <li><a href=
* "https://web-identity-federation-playground.s3.amazonaws.com/index.html">
* Web Identity Federation Playground</a>. This interactive website lets you
* walk through the process of authenticating via Login with Amazon,
* Facebook, or Google, getting temporary security credentials, and then
* using those credentials to make a request to AWS.</li>
* <li><a href="http://aws.amazon.com/sdkforios/">AWS SDK for iOS</a> and <a
* href="http://aws.amazon.com/sdkforandroid/">AWS SDK for Android</a>.
* These toolkits contain sample apps that show how to invoke the identity
* providers, and then how to use the information from these providers to
* get and use temporary security credentials.</li>
* <li><a href="http://aws.amazon.com/articles/4617974389850313">Web
* Identity Federation with Mobile Applications</a>. This article discusses
* web identity federation and shows an example of how to use web identity
* federation to get access to content in Amazon S3.</li>
* </ul>
*
* @param assumeRoleWithWebIdentityRequest
* @return Result of the AssumeRoleWithWebIdentity operation returned by the
* service.
* @throws MalformedPolicyDocumentException
* The request was rejected because the policy document was
* malformed. The error message describes the specific error.
* @throws PackedPolicyTooLargeException
* The request was rejected because the policy document was too
* large. The error message describes how big the policy document
* is, in packed form, as a percentage of what the API allows.
* @throws IDPRejectedClaimException
* The identity provider (IdP) reported that authentication failed.
* This might be because the claim is invalid.</p>
* <p>
* If this error is returned for the
* <code>AssumeRoleWithWebIdentity</code> operation, it can also
* mean that the claim has expired or has been explicitly revoked.
* @throws IDPCommunicationErrorException
* The request could not be fulfilled because the non-AWS identity
* provider (IDP) that was asked to verify the incoming identity
* token could not be reached. This is often a transient error
* caused by network conditions. Retry the request a limited number
* of times so that you don't exceed the request rate. If the error
* persists, the non-AWS identity provider might be down or not
* responding.
* @throws InvalidIdentityTokenException
* The web identity token that was passed could not be validated by
* AWS. Get a new identity token from the identity provider and then
* retry the request.
* @throws ExpiredTokenException
* The web identity token that was passed is expired or is not
* valid. Get a new identity token from the identity provider and
* then retry the request.
* @sample AWSSecurityTokenService.AssumeRoleWithWebIdentity
*/
@Override
public AssumeRoleWithWebIdentityResult assumeRoleWithWebIdentity(
AssumeRoleWithWebIdentityRequest assumeRoleWithWebIdentityRequest) {
ExecutionContext executionContext = createExecutionContext(assumeRoleWithWebIdentityRequest);
AWSRequestMetrics awsRequestMetrics = executionContext
.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<AssumeRoleWithWebIdentityRequest> request = null;
Response<AssumeRoleWithWebIdentityResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new AssumeRoleWithWebIdentityRequestMarshaller()
.marshall(super
.beforeMarshalling(assumeRoleWithWebIdentityRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
StaxResponseHandler<AssumeRoleWithWebIdentityResult> responseHandler = new StaxResponseHandler<AssumeRoleWithWebIdentityResult>(
new AssumeRoleWithWebIdentityResultStaxUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Decodes additional information about the authorization status of a
* request from an encoded message returned in response to an AWS request.
* </p>
* <p>
* For example, if a user is not authorized to perform an action that he or
* she has requested, the request returns a
* <code>Client.UnauthorizedOperation</code> response (an HTTP 403
* response). Some AWS actions additionally return an encoded message that
* can provide details about this authorization failure.
* </p>
* <note> Only certain AWS actions return an encoded authorization message.
* The documentation for an individual action indicates whether that action
* returns an encoded message in addition to returning an HTTP code. </note>
* <p>
* The message is encoded because the details of the authorization status
* can constitute privileged information that the user who requested the
* action should not see. To decode an authorization status message, a user
* must be granted permissions via an IAM policy to request the
* <code>DecodeAuthorizationMessage</code> (
* <code>sts:DecodeAuthorizationMessage</code>) action.
* </p>
* <p>
* The decoded message includes the following type of information:
* </p>
* <ul>
* <li>Whether the request was denied due to an explicit deny or due to the
* absence of an explicit allow. For more information, see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_evaluation-logic.html#policy-eval-denyallow"
* >Determining Whether a Request is Allowed or Denied</a> in the <i>Using
* IAM</i>.</li>
* <li>The principal who made the request.</li>
* <li>The requested action.</li>
* <li>The requested resource.</li>
* <li>The values of condition keys in the context of the user's request.</li>
* </ul>
*
* @param decodeAuthorizationMessageRequest
* @return Result of the DecodeAuthorizationMessage operation returned by
* the service.
* @throws InvalidAuthorizationMessageException
* The error returned if the message passed to
* <code>DecodeAuthorizationMessage</code> was invalid. This can
* happen if the token contains invalid characters, such as
* linebreaks.
* @sample AWSSecurityTokenService.DecodeAuthorizationMessage
*/
@Override
public DecodeAuthorizationMessageResult decodeAuthorizationMessage(
DecodeAuthorizationMessageRequest decodeAuthorizationMessageRequest) {
ExecutionContext executionContext = createExecutionContext(decodeAuthorizationMessageRequest);
AWSRequestMetrics awsRequestMetrics = executionContext
.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<DecodeAuthorizationMessageRequest> request = null;
Response<DecodeAuthorizationMessageResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new DecodeAuthorizationMessageRequestMarshaller()
.marshall(super
.beforeMarshalling(decodeAuthorizationMessageRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
StaxResponseHandler<DecodeAuthorizationMessageResult> responseHandler = new StaxResponseHandler<DecodeAuthorizationMessageResult>(
new DecodeAuthorizationMessageResultStaxUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Returns a set of temporary security credentials (consisting of an access
* key ID, a secret access key, and a security token) for a federated user.
* A typical use is in a proxy application that gets temporary security
* credentials on behalf of distributed applications inside a corporate
* network. Because you must call the <code>GetFederationToken</code> action
* using the long-term security credentials of an IAM user, this call is
* appropriate in contexts where those credentials can be safely stored,
* usually in a server-based application.
* </p>
* <note>
* <p>
* If you are creating a mobile-based or browser-based app that can
* authenticate users using a web identity provider like Login with Amazon,
* Facebook, Google, or an OpenID Connect-compatible identity provider, we
* recommend that you use <a href="http://aws.amazon.com/cognito/">Amazon
* Cognito</a> or <code>AssumeRoleWithWebIdentity</code>. For more
* information, see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html#api_assumerolewithwebidentity"
* >Federation Through a Web-based Identity Provider</a>.
* </p>
* </note>
* <p>
* The <code>GetFederationToken</code> action must be called by using the
* long-term AWS security credentials of an IAM user. You can also call
* <code>GetFederationToken</code> using the security credentials of an AWS
* account (root), but this is not recommended. Instead, we recommend that
* you create an IAM user for the purpose of the proxy application and then
* attach a policy to the IAM user that limits federated users to only the
* actions and resources they need access to. For more information, see <a
* href
* ="http://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html"
* >IAM Best Practices</a> in the <i>Using IAM</i>.
* </p>
* <p>
* The temporary security credentials that are obtained by using the
* long-term credentials of an IAM user are valid for the specified
* duration, between 900 seconds (15 minutes) and 129600 seconds (36 hours).
* Temporary credentials that are obtained by using AWS account (root)
* credentials have a maximum duration of 3600 seconds (1 hour)
* </p>
* <p>
* <b>Permissions</b>
* </p>
* <p>
* The permissions for the temporary security credentials returned by
* <code>GetFederationToken</code> are determined by a combination of the
* following:
* </p>
* <ul>
* <li>The policy or policies that are attached to the IAM user whose
* credentials are used to call <code>GetFederationToken</code>.</li>
* <li>The policy that is passed as a parameter in the call.</li>
* </ul>
* <p>
* The passed policy is attached to the temporary security credentials that
* result from the <code>GetFederationToken</code> API call--that is, to the
* <i>federated user</i>. When the federated user makes an AWS request, AWS
* evaluates the policy attached to the federated user in combination with
* the policy or policies attached to the IAM user whose credentials were
* used to call <code>GetFederationToken</code>. AWS allows the federated
* user's request only when both the federated user <i><b>and</b></i> the
* IAM user are explicitly allowed to perform the requested action. The
* passed policy cannot grant more permissions than those that are defined
* in the IAM user policy.
* </p>
* <p>
* A typical use case is that the permissions of the IAM user whose
* credentials are used to call <code>GetFederationToken</code> are designed
* to allow access to all the actions and resources that any federated user
* will need. Then, for individual users, you pass a policy to the operation
* that scopes down the permissions to a level that's appropriate to that
* individual user, using a policy that allows only a subset of permissions
* that are granted to the IAM user.
* </p>
* <p>
* If you do not pass a policy, the resulting temporary security credentials
* have no effective permissions. The only exception is when the temporary
* security credentials are used to access a resource that has a
* resource-based policy that specifically allows the federated user to
* access the resource.
* </p>
* <p>
* For more information about how permissions work, see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_control-access_getfederationtoken.html"
* >Permissions for GetFederationToken</a>. For information about using
* <code>GetFederationToken</code> to create temporary security credentials,
* see <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html#api_getfederationtoken"
* >GetFederationToken???Federation Through a Custom Identity Broker</a>.
* </p>
*
* @param getFederationTokenRequest
* @return Result of the GetFederationToken operation returned by the
* service.
* @throws MalformedPolicyDocumentException
* The request was rejected because the policy document was
* malformed. The error message describes the specific error.
* @throws PackedPolicyTooLargeException
* The request was rejected because the policy document was too
* large. The error message describes how big the policy document
* is, in packed form, as a percentage of what the API allows.
* @sample AWSSecurityTokenService.GetFederationToken
*/
@Override
public GetFederationTokenResult getFederationToken(
GetFederationTokenRequest getFederationTokenRequest) {
ExecutionContext executionContext = createExecutionContext(getFederationTokenRequest);
AWSRequestMetrics awsRequestMetrics = executionContext
.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<GetFederationTokenRequest> request = null;
Response<GetFederationTokenResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new GetFederationTokenRequestMarshaller()
.marshall(super
.beforeMarshalling(getFederationTokenRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
StaxResponseHandler<GetFederationTokenResult> responseHandler = new StaxResponseHandler<GetFederationTokenResult>(
new GetFederationTokenResultStaxUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* <p>
* Returns a set of temporary credentials for an AWS account or IAM user.
* The credentials consist of an access key ID, a secret access key, and a
* security token. Typically, you use <code>GetSessionToken</code> if you
* want to use MFA to protect programmatic calls to specific AWS APIs like
* Amazon EC2 <code>StopInstances</code>. MFA-enabled IAM users would need
* to call <code>GetSessionToken</code> and submit an MFA code that is
* associated with their MFA device. Using the temporary security
* credentials that are returned from the call, IAM users can then make
* programmatic calls to APIs that require MFA authentication. If you do not
* supply a correct MFA code, then the API returns an access denied error.
* </p>
* <p>
* The <code>GetSessionToken</code> action must be called by using the
* long-term AWS security credentials of the AWS account or an IAM user.
* Credentials that are created by IAM users are valid for the duration that
* you specify, between 900 seconds (15 minutes) and 129600 seconds (36
* hours); credentials that are created by using account credentials have a
* maximum duration of 3600 seconds (1 hour).
* </p>
* <note>
* <p>
* We recommend that you do not call <code>GetSessionToken</code> with root
* account credentials. Instead, follow our <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html#create-iam-users"
* >best practices</a> by creating one or more IAM users, giving them the
* necessary permissions, and using IAM users for everyday interaction with
* AWS.
* </p>
* </note>
* <p>
* The permissions associated with the temporary security credentials
* returned by <code>GetSessionToken</code> are based on the permissions
* associated with account or IAM user whose credentials are used to call
* the action. If <code>GetSessionToken</code> is called using root account
* credentials, the temporary credentials have root account permissions.
* Similarly, if <code>GetSessionToken</code> is called using the
* credentials of an IAM user, the temporary credentials have the same
* permissions as the IAM user.
* </p>
* <p>
* For more information about using <code>GetSessionToken</code> to create
* temporary credentials, go to <a href=
* "http://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_request.html#api_getsessiontoken"
* >Temporary Credentials for Users in Untrusted Environments</a> in the
* <i>Using IAM</i>.
* </p>
*
* @param getSessionTokenRequest
* @return Result of the GetSessionToken operation returned by the service.
* @sample AWSSecurityTokenService.GetSessionToken
*/
@Override
public GetSessionTokenResult getSessionToken(
GetSessionTokenRequest getSessionTokenRequest) {
ExecutionContext executionContext = createExecutionContext(getSessionTokenRequest);
AWSRequestMetrics awsRequestMetrics = executionContext
.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<GetSessionTokenRequest> request = null;
Response<GetSessionTokenResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new GetSessionTokenRequestMarshaller().marshall(super
.beforeMarshalling(getSessionTokenRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
StaxResponseHandler<GetSessionTokenResult> responseHandler = new StaxResponseHandler<GetSessionTokenResult>(
new GetSessionTokenResultStaxUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
@Override
public GetSessionTokenResult getSessionToken() {
return getSessionToken(new GetSessionTokenRequest());
}
/**
* Returns additional metadata for a previously executed successful,
* request, typically used for debugging issues where a service isn't acting
* as expected. This data isn't considered part of the result data returned
* by an operation, so it's available through this separate, diagnostic
* interface.
* <p>
* Response metadata is only cached for a limited period of time, so if you
* need to access this extra diagnostic information for an executed request,
* you should use this method to retrieve it as soon as possible after
* executing the request.
*
* @param request
* The originally executed request
*
* @return The response metadata for the specified request, or null if none
* is available.
*/
public ResponseMetadata getCachedResponseMetadata(
AmazonWebServiceRequest request) {
return client.getResponseMetadataForRequest(request);
}
private <X, Y extends AmazonWebServiceRequest> Response<X> invoke(
Request<Y> request,
HttpResponseHandler<AmazonWebServiceResponse<X>> responseHandler,
ExecutionContext executionContext) {
request.setEndpoint(endpoint);
request.setTimeOffset(timeOffset);
AWSRequestMetrics awsRequestMetrics = executionContext
.getAwsRequestMetrics();
AWSCredentials credentials;
awsRequestMetrics.startEvent(Field.CredentialsRequestTime);
try {
credentials = awsCredentialsProvider.getCredentials();
} finally {
awsRequestMetrics.endEvent(Field.CredentialsRequestTime);
}
AmazonWebServiceRequest originalRequest = request.getOriginalRequest();
if (originalRequest != null
&& originalRequest.getRequestCredentials() != null) {
credentials = originalRequest.getRequestCredentials();
}
executionContext.setCredentials(credentials);
DefaultErrorResponseHandler errorResponseHandler = new DefaultErrorResponseHandler(
exceptionUnmarshallers);
return client.execute(request, responseHandler, errorResponseHandler,
executionContext);
}
}
| apache-2.0 |
mingxin6/incu | inc2_tw2/src/com/mod/cashier/controller/CashierViewController.java | 1095 | package com.mod.cashier.controller;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.web.servlet.mvc.Controller;
import com.mod.cashier.controller.MySQLAccessCashier;
import com.model.Cashier;
public class CashierViewController implements Controller {
protected final Log logger = LogFactory.getLog(getClass());
public ModelAndView handleRequest(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
String uid = request.getParameter("uid");
Cashier obj = new Cashier();
MySQLAccessCashier o = new MySQLAccessCashier();
try {
obj = o.findCashierById(uid);
obj.setEntityStatusRS(request);
} catch (Exception e) {
e.printStackTrace();
}
return new ModelAndView("cashierView", "cashier", obj);
}
}
| apache-2.0 |
arnost-starosta/midpoint | gui/admin-gui/src/main/java/com/evolveum/midpoint/web/component/wizard/resource/component/capability/CapabilityScriptPanel.java | 4829 | /*
* Copyright (c) 2010-2014 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.web.component.wizard.resource.component.capability;
import com.evolveum.midpoint.gui.api.component.BasePanel;
import com.evolveum.midpoint.web.component.form.multivalue.MultiValueTextPanel;
import com.evolveum.midpoint.web.component.wizard.resource.dto.CapabilityDto;
import com.evolveum.midpoint.web.page.admin.configuration.component.EmptyOnChangeAjaxFormUpdatingBehavior;
import com.evolveum.midpoint.web.page.admin.resources.PageResourceWizard;
import com.evolveum.midpoint.web.util.InfoTooltipBehavior;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ProvisioningScriptHostType;
import com.evolveum.midpoint.xml.ns._public.resource.capabilities_3.ScriptCapabilityType;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.markup.html.WebMarkupContainer;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.form.CheckBox;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.Model;
import org.apache.wicket.model.PropertyModel;
import java.util.ArrayList;
import java.util.List;
/**
* @author shood
* */
public class CapabilityScriptPanel extends BasePanel<CapabilityDto<ScriptCapabilityType>> {
private static final String ID_ENABLED = "enabled";
private static final String ID_ON_CONNECTOR = "onConnectorValue";
private static final String ID_ON_RESOURCE = "onResourceValue";
private static final String ID_T_ENABLED = "enabledTooltip";
private static final String ID_T_ON_CONNECTOR = "onConnectorTooltip";
private static final String ID_T_ON_RESOURCE = "onResourceTooltip";
public CapabilityScriptPanel(String componentId, IModel<CapabilityDto<ScriptCapabilityType>> model, WebMarkupContainer capabilitiesTable,
PageResourceWizard parentPage){
super(componentId, model);
initLayout(capabilitiesTable, parentPage);
}
protected void initLayout(final WebMarkupContainer capabilitiesTable, PageResourceWizard parentPage) {
parentPage.addEditingEnabledBehavior(this);
CheckBox enabled = new CheckBox(ID_ENABLED, new PropertyModel<>(getModel(), "capability.enabled"));
enabled.add(new EmptyOnChangeAjaxFormUpdatingBehavior() {
@Override
protected void onUpdate(AjaxRequestTarget target) {
target.add(capabilitiesTable);
}
});
add(enabled);
MultiValueTextPanel onConnector = new MultiValueTextPanel(ID_ON_CONNECTOR, prepareOnConnectorModel(), parentPage.getReadOnlyModel(), true);
add(onConnector);
MultiValueTextPanel onResource = new MultiValueTextPanel(ID_ON_RESOURCE, Model.of(prepareOnResourceModel()), parentPage.getReadOnlyModel(),
true);
add(onResource);
Label enabledTooltip = new Label(ID_T_ENABLED);
enabledTooltip.add(new InfoTooltipBehavior());
add(enabledTooltip);
Label onConnectorTooltip = new Label(ID_T_ON_CONNECTOR);
onConnectorTooltip.add(new InfoTooltipBehavior());
add(onConnectorTooltip);
Label onResourceTooltip = new Label(ID_T_ON_RESOURCE);
onResourceTooltip.add(new InfoTooltipBehavior());
add(onResourceTooltip);
}
private IModel prepareOnConnectorModel(){
CapabilityDto dto = (CapabilityDto)getModel().getObject();
ScriptCapabilityType script = (ScriptCapabilityType)dto.getCapability();
for(ScriptCapabilityType.Host host: script.getHost()){
if(ProvisioningScriptHostType.CONNECTOR.equals(host.getType())){
return new PropertyModel<List<String>>(host, "language");
}
}
List<String> emptyList = new ArrayList<>();
return Model.of(emptyList);
}
private IModel prepareOnResourceModel(){
CapabilityDto dto = (CapabilityDto)getModel().getObject();
ScriptCapabilityType script = (ScriptCapabilityType)dto.getCapability();
for(ScriptCapabilityType.Host host: script.getHost()){
if(ProvisioningScriptHostType.RESOURCE.equals(host.getType())){
return new PropertyModel<List<String>>(host, "language");
}
}
List<String> emptyList = new ArrayList<>();
return Model.of(emptyList);
}
}
| apache-2.0 |
KITSABHIJIT/Ihalkhata-WebService | IhalkhataWebService/src/com/exp/cemk/AndroidNotification/GCMBroadcast.java | 4283 | package com.exp.cemk.AndroidNotification;
import java.util.ArrayList;
import java.util.List;
import com.exp.cemk.constants.GCMBroadCastConstants;
import com.google.android.gcm.server.Message;
import com.google.android.gcm.server.MulticastResult;
import com.google.android.gcm.server.Sender;
/**
* Servlet implementation class GCMBroadcast
*/
public class GCMBroadcast {
private static GCMBroadcast _instance = new GCMBroadcast();
public static GCMBroadcast getInstance() {
// log.debug("AutoFillDemo::getInstance ");
return _instance;
}
// The SENDER_ID here is the "Browser Key" that was generated when I
// created the API keys for my Google APIs project.
public void sendBroadCastMessage(String userMessage,
List<String> androidTargets) {
String collapseKey = "CollapseKey";
// Instance of com.android.gcm.server.Sender, that does the
// transmission of a Message to the Google Cloud Messaging service.
Sender sender = new Sender(GCMBroadCastConstants.SENDER_ID);
// This Message object will hold the data that is being transmitted
// to the Android client devices. For this demo, it is a simple text
// string, but could certainly be a JSON object.
Message message = new Message.Builder()
// If multiple messages are sent using the same .collapseKey()
// the android target device, if it was offline during earlier
// message
// transmissions, will only receive the latest message for that
// key when
// it goes back on-line.
.collapseKey(collapseKey).timeToLive(30).delayWhileIdle(true)
.addData("message", userMessage).build();
System.out.println("Broadcast Message: " + userMessage);
System.out.println("SENDER_ID: " + GCMBroadCastConstants.SENDER_ID);
for (String s : androidTargets)
System.out.println("androidTargets: " + s);
try {
// use this for multicast messages. The second parameter
// of sender.send() will need to be an array of register ids.
MulticastResult result = sender.send(message, androidTargets, 1);
if (result.getResults() != null) {
int canonicalRegId = result.getCanonicalIds();
if (canonicalRegId != 0) {
}
System.out.println("Broadcast Succesfull: " + canonicalRegId);
} else {
int error = result.getFailure();
System.out.println("Broadcast failure: " + error);
}
} catch (Exception e) {
e.printStackTrace();
}
}
public static void main(String[] args) {
List<String> androidTargets = new ArrayList<String>();
//ABHIJIT
androidTargets.add("APA91bHz41JYp3jHXLdBCw88DOnWQDk2yaaeyoe5Fthdr0IFd_51jbWd9GaVKo8f6K1rKNJm5a28qrJGG4TIh2U24SUVdzng3YgARuiVTOlbYqNpR2YoZXA4F2dBmLIpWME7g2nFENEs6pOxI7cb0KTBkap7clTbf1Hw7LaLDpQfAvf2yO1Urdg");
//ARANI
//androidTargets.add("APA91bHfuJ27Z36tzQN-VSQwBxQJxwYO6OfO3hj7MlneMRLM1g_LTZxNWG5rlT1p0lh0tYluhEy9jpfvI7VUPJzW9Eayjv-2Z88e9XjN2qUeylfwdKUX0_uAO6h_2pxBA-o0Ua0oxzCek75Kkxf2UExAaBO3a0qP28rKdLIcStupa123JRFQOOY");
//PABITRA
//androidTargets.add("APA91bFIoPUtn121pz1jjJOhgjOF1Dv3z8M2xbDbcWB-oJo9Ot9PxIukxA9WRxUXZ74Ve_DX3WuofgAXxYZJQzXEC3uFaFhYyLpaFLSEyKig5Jos7iIrLNxm6PPZocEZs9tG5JbOtN4ga_vGbGxyVErP05V3WvdP6v04xRbKDBt57riEnQYF_bU");
//SANDIPAN
//androidTargets.add("APA91bGH5TbNPwVx6bsM9rRC7qhWwGZJf7r3pdigegHFM_SBMUtijmBpEuGnZOFsX6mkMsyG_GQl5-y19anGqTgCCThauIf-9V1T_hFn-V8_fz2E6JA6BwTfx5uyrz9N6W2Y7DGxaVQV1F1MScAkUV9kN1VBdGhGLg");
//SENA
//androidTargets.add("APA91bE-_NPiaGB5SeT31vK0HNCvXA30RcMTcBsWn3AgLJtkAZjOHYogpqD3FrVfNeX3lAi3X8ywnA5JQ6BwgJmUpj_pMw8lviRUi6P7wMoFvrJxvAQK2TFjBlebS-aLVBAVc7Wpbkmnel11c5L9vrhO-x_Wo0Zk9yMDQP6Yp1tRVh_jfIx6poo");
//androidTargets.add(GCMBroadCastConstants.ANDROID_DEVICE);
//androidTargets.add(GCMBroadCastConstants.ANDROID_DEVICE);
//androidTargets.add(GCMBroadCastConstants.ANDROID_DEVICE);
//String msg = "{\"date\":\"22/11/2013\",\"item\":\"WEEKLY MARKETTING\",\"desc\":\"jfdnkkc\",\"amount\":\"5887.50\",\"perHead\":\"1177.50\",\"shareholderCount\":\"5\",\"paidBy\":\"Sandipan\",\"paidById\":\"sandipan\",\"shareHolders\":\"Abhijit,Arani,Pabitra,Sandipan,Sayan\",\"notiUsers\":\"abhijit,arani,pabitra,sandipan,sayan\"}";
String msg = "{\"msg\":\"Enjoy the new features by updating your Ihalkhata App from www.ihalkhata.com.\"}";
GCMBroadcast.getInstance().sendBroadCastMessage(msg, androidTargets);
}
}
| apache-2.0 |
alien4cloud/alien4cloud | alien4cloud-core/src/main/java/alien4cloud/orchestrators/services/OrchestratorStateService.java | 15535 | package alien4cloud.orchestrators.services;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Executors;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import javax.annotation.Resource;
import javax.inject.Inject;
import alien4cloud.events.DeploymentRecoveredEvent;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import org.alien4cloud.tosca.model.CSARDependency;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.stereotype.Component;
import alien4cloud.dao.IGenericSearchDAO;
import alien4cloud.dao.model.GetMultipleDataResult;
import alien4cloud.deployment.DeploymentService;
import alien4cloud.exception.AlreadyExistException;
import alien4cloud.model.common.Usage;
import alien4cloud.model.deployment.Deployment;
import alien4cloud.model.orchestrators.Orchestrator;
import alien4cloud.model.orchestrators.OrchestratorConfiguration;
import alien4cloud.model.orchestrators.OrchestratorState;
import alien4cloud.orchestrators.events.AfterOrchestratorEnabled;
import alien4cloud.orchestrators.events.BeforeOrchestratorDisabled;
import alien4cloud.orchestrators.locations.services.LocationService;
import alien4cloud.orchestrators.locations.services.PluginArchiveIndexer;
import alien4cloud.orchestrators.plugin.ILocationAutoConfigurer;
import alien4cloud.orchestrators.plugin.IOrchestratorPlugin;
import alien4cloud.orchestrators.plugin.IOrchestratorPluginFactory;
import alien4cloud.paas.IPaaSProviderConfiguration;
import alien4cloud.paas.OrchestratorPluginService;
import alien4cloud.paas.exception.PluginConfigurationException;
import alien4cloud.utils.MapUtil;
import lombok.extern.slf4j.Slf4j;
/**
* Service to manage state of an orchestrator
*/
@Slf4j
@Component
public class OrchestratorStateService {
@Resource(name = "alien-es-dao")
private IGenericSearchDAO alienDAO;
@Inject
private OrchestratorConfigurationService orchestratorConfigurationService;
@Inject
private OrchestratorPluginService orchestratorPluginService;
@Inject
private DeploymentService deploymentService;
@Inject
private OrchestratorService orchestratorService;
@Inject
private LocationService locationService;
@Inject
private PluginArchiveIndexer archiveIndexer;
@Inject
private ApplicationEventPublisher publisher;
// Lock per Orchestrator ID
private final Map<String, ReentrantLock> mapLock = Maps.newConcurrentMap();
/**
* Unload all orchestrators from JVM memory, it's typically to refresh/reload code
*/
public void unloadAllOrchestrators() {
List<Orchestrator> enabledOrchestratorList = orchestratorService.getAllEnabledOrchestrators();
if (enabledOrchestratorList != null && !enabledOrchestratorList.isEmpty()) {
log.info("Unloading orchestrators");
for (final Orchestrator orchestrator : enabledOrchestratorList) {
// un-register the orchestrator.
IOrchestratorPlugin orchestratorInstance = orchestratorPluginService.unregister(orchestrator.getId());
if (orchestratorInstance != null) {
IOrchestratorPluginFactory orchestratorFactory = orchestratorService.getPluginFactory(orchestrator);
orchestratorFactory.destroy(orchestratorInstance);
}
}
log.info("{} Orchestrators Unloaded", enabledOrchestratorList.size());
}
}
/**
* Initialize all orchestrator that have a non-disabled state.
*/
public ListenableFuture<?> initialize() {
return initialize(null);
}
/**
* Initialize all orchestrator that have a non-disabled state.
* Note: Each orchestrator initialization is down in it's own thread so it doesn't impact application startup or other orchestrator connection.
*
* @param callback the callback to be executed when initialize finish
*/
public ListenableFuture<?> initialize(FutureCallback callback) {
ListeningExecutorService executorService = MoreExecutors.listeningDecorator(Executors.newCachedThreadPool());
try {
List<ListenableFuture<?>> futures = new ArrayList<>();
// get all the orchestrator that are not disabled
final List<Orchestrator> enabledOrchestratorList = orchestratorService.getAllEnabledOrchestrators();
if (enabledOrchestratorList == null || enabledOrchestratorList.isEmpty()) {
return Futures.immediateFuture(null);
}
log.info("Initializing orchestrators");
for (final Orchestrator orchestrator : enabledOrchestratorList) {
// error in initialization and timeouts should not impact startup time of Alien 4 cloud and other PaaS Providers.
ListenableFuture<?> future = executorService.submit(new Runnable() {
@Override
public void run() {
try {
load(orchestrator);
} catch (AlreadyExistException e) {
log.info("Orchestrator was already loaded at initialization for {}.", orchestrator.getId());
} catch (Exception e) {
// we have to catch everything as we don't know what a plugin can do here and cannot interrupt startup.
// Any orchestrator that failed to load will be considered as DISABLED as the registration didn't occurred
log.error("Unexpected error in plugin", e);
orchestrator.setState(OrchestratorState.DISABLED);
alienDAO.save(orchestrator);
}
}
});
futures.add(future);
}
ListenableFuture<?> combinedFuture = Futures.allAsList(futures);
if (callback != null) {
Futures.addCallback(combinedFuture, callback);
}
Futures.addCallback(combinedFuture, new FutureCallback<Object>() {
@Override
public void onSuccess(Object result) {
log.info("{} Orchestrators loaded", enabledOrchestratorList.size());
}
@Override
public void onFailure(Throwable t) {
log.error("Unable to load orchestrators", t);
}
});
return combinedFuture;
} finally {
executorService.shutdown();
}
}
/**
* Enable an orchestrator.
*
* @param orchestrator The orchestrator to enable.
*/
public void enable(Orchestrator orchestrator) throws PluginConfigurationException {
Lock lock = lockFor(orchestrator);
try {
lock.lock();
if (orchestrator.getState().equals(OrchestratorState.DISABLED)) {
load(orchestrator);
} else {
log.debug("Request to enable ignored: orchestrator {} (id: {}) is already enabled", orchestrator.getName(), orchestrator.getId());
throw new AlreadyExistException("Orchestrator {} is already instanciated.");
}
} finally {
lock.unlock();
}
}
/**
* Load and connect the given orchestrator.
*
* @param orchestrator the orchestrator to load and connect.
*/
private void load(Orchestrator orchestrator) throws PluginConfigurationException {
log.info("Loading and connecting orchestrator {} (id: {})", orchestrator.getName(), orchestrator.getId());
// check that the orchestrator is not already loaded.
if (orchestratorPluginService.get(orchestrator.getId()) != null) {
throw new AlreadyExistException("Plugin is already loaded.");
}
// switch the state to connecting
orchestrator.setState(OrchestratorState.CONNECTING);
alienDAO.save(orchestrator);
// Set the configuration for the provider
OrchestratorConfiguration orchestratorConfiguration = orchestratorConfigurationService.getConfigurationOrFail(orchestrator.getId());
try {
Object configuration = orchestratorConfigurationService.configurationAsValidObject(orchestrator.getId(),
orchestratorConfiguration.getConfiguration());
if (configuration instanceof IPaaSProviderConfiguration) {
((IPaaSProviderConfiguration)configuration).setOrchestratorName(orchestrator.getName());
((IPaaSProviderConfiguration)configuration).setOrchestratorId(orchestrator.getId());
}
IOrchestratorPluginFactory orchestratorFactory = orchestratorService.getPluginFactory(orchestrator);
IOrchestratorPlugin<Object> orchestratorInstance = orchestratorFactory.newInstance(configuration);
// just kept for backward compatibility
orchestratorInstance.setConfiguration(orchestrator.getId(), configuration);
// index the archive in alien catalog
archiveIndexer.indexOrchestratorArchives(orchestratorFactory, orchestratorInstance);
try {
Deployment[] deployments = deploymentService.getOrchestratorActiveDeployments(orchestrator.getId());
Map<String, String> deploymentIdsMap = Maps.newHashMap();
Map<String, Deployment> deploymentsMap = Maps.newHashMap();
if (deployments != null) {
for (Deployment deployment : deployments) {
deploymentIdsMap.put(deployment.getOrchestratorDeploymentId(), deployment.getId());
deploymentsMap.put(deployment.getOrchestratorDeploymentId(), deployment);
}
}
// connect the orchestrator
Set<String> reallyActiveDeployments = orchestratorInstance.init(deploymentIdsMap);
deploymentsMap.forEach((deploymentPaasId, deployment) -> {
if (!reallyActiveDeployments.contains(deploymentPaasId)) {
// this deployment is not known by the orchestrator, maybe it has been undeployed during downtime
log.info("Deployment {} ({}) no longer exists in orchestrator deployments, maybe undeployed during downtime, marking it as undeployed", deployment.getId(), deployment.getOrchestratorDeploymentId());
deploymentService.markUndeployed(deployment);
} else {
publisher.publishEvent(new DeploymentRecoveredEvent(this, deployment.getId()));
}
});
} catch(Exception e) {
// Destroy contexts
orchestratorFactory.destroy(orchestratorInstance);
// Reset the connection state
orchestrator.setState(OrchestratorState.DISABLED);
alienDAO.save(orchestrator);
// Propagate the exception
throw e;
}
// register the orchestrator instance to be polled for updates
orchestratorPluginService.register(orchestrator.getId(), orchestratorInstance);
orchestrator.setState(OrchestratorState.CONNECTED);
alienDAO.save(orchestrator);
if (orchestratorInstance instanceof ILocationAutoConfigurer) {
// trigger locations auto-configurations
locationService.autoConfigure(orchestrator, (ILocationAutoConfigurer) orchestratorInstance);
}
indexLocationsArchives(orchestrator);
publisher.publishEvent(new AfterOrchestratorEnabled(this, orchestrator));
} catch (IOException e) {
// TODO: change orchestrator state ?
throw new PluginConfigurationException("Failed convert configuration json in object.", e);
}
// TODO move below in a thread to perform plugin loading and connection asynchronously
}
private void indexLocationsArchives(Orchestrator orchestrator) {
locationService.getAll(orchestrator.getId()).forEach(location -> {
Set<CSARDependency> dependencies = archiveIndexer.indexLocationArchives(orchestrator, location);
location.getDependencies().addAll(dependencies);
alienDAO.save(location);
});
}
/**
* Disable an orchestrator.
*
* @param orchestrator The orchestrator to disable.
* @param force If true the orchestrator is disabled even if some deployments are currently running.
*/
public List<Usage> disable(Orchestrator orchestrator, boolean force) {
Lock lock = lockFor(orchestrator);
try {
lock.lock();
if (!force) {
// If there is at least one active deployment.
GetMultipleDataResult<Deployment> result = alienDAO.buildQuery(Deployment.class)
.setFilters(MapUtil.newHashMap(new String[]{"orchestratorId", "endDate"},
new String[][]{new String[]{orchestrator.getId()}, new String[]{null}}))
.prepareSearch().setFieldSort("_timestamp", "long", true).search(0, 1);
// TODO place a lock to avoid deployments during the disabling of the orchestrator.
if (result.getData().length > 0) {
List<Usage> usages = generateDeploymentUsages(result.getData());
return usages;
}
}
publisher.publishEvent(new BeforeOrchestratorDisabled(this, orchestrator));
try {
// unregister the orchestrator.
IOrchestratorPlugin orchestratorInstance = orchestratorPluginService.unregister(orchestrator.getId());
if (orchestratorInstance != null) {
IOrchestratorPluginFactory orchestratorFactory = orchestratorService.getPluginFactory(orchestrator);
orchestratorFactory.destroy(orchestratorInstance);
}
} catch (Exception e) {
log.info("Unable to destroy orchestrator, it may not be created yet", e);
} finally {
// Mark the orchestrator as disabled
orchestrator.setState(OrchestratorState.DISABLED);
alienDAO.save(orchestrator);
}
} finally {
lock.unlock();
}
return null;
}
private List<Usage> generateDeploymentUsages(Deployment[] data) {
List<Usage> usages = Lists.newArrayList();
for (Deployment deployment : data) {
usages.add(new Usage(deployment.getSourceName(), deployment.getSourceType().getSourceType().getSimpleName(), deployment.getSourceId(), null));
}
return usages;
}
private Lock lockFor(Orchestrator orchestrator) {
return mapLock.computeIfAbsent(orchestrator.getId(), k -> new ReentrantLock());
}
}
| apache-2.0 |
ataylor4/codeu_project_2017 | src/codeu/chat/client/Controller.java | 6269 | // Copyright 2017 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package codeu.chat.client;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.lang.Thread;
import codeu.chat.common.BasicController;
import codeu.chat.common.Conversation;
import codeu.chat.common.Message;
import codeu.chat.common.NetworkCode;
import codeu.chat.common.User;
import codeu.chat.common.Uuid;
import codeu.chat.common.Uuids;
import codeu.chat.util.Logger;
import codeu.chat.util.Serializers;
import codeu.chat.util.connections.Connection;
import codeu.chat.util.connections.ConnectionSource;
public class Controller implements BasicController {
private final static Logger.Log LOG = Logger.newLog(Controller.class);
private final ConnectionSource source;
public Controller(ConnectionSource source) {
this.source = source;
}
@Override
public Message newMessage(Uuid author, Uuid conversation, String body) {
Message response = null;
try (final Connection connection = source.connect()) {
Serializers.INTEGER.write(connection.out(), NetworkCode.NEW_MESSAGE_REQUEST);
Uuids.SERIALIZER.write(connection.out(), author);
Uuids.SERIALIZER.write(connection.out(), conversation);
Serializers.STRING.write(connection.out(), body);
if (Serializers.INTEGER.read(connection.in()) == NetworkCode.NEW_MESSAGE_RESPONSE) {
response = Serializers.nullable(Message.SERIALIZER).read(connection.in());
} else {
LOG.error("Response from server failed.");
}
} catch (Exception ex) {
System.out.println("ERROR: Exception during call on server. Check log for details.");
LOG.error(ex, "Exception during call on server.");
}
return response;
}
@Override
public void removeMessage(Message message, Uuid conversation) {
try (final Connection connection = source.connect()) {
Serializers.INTEGER.write(connection.out(), NetworkCode.REMOVE_MESSAGE_REQUEST);
Message.SERIALIZER.write(connection.out(), message);
Uuids.SERIALIZER.write(connection.out(), conversation);
if (Serializers.INTEGER.read(connection.in()) == NetworkCode.REMOVE_MESSAGE_RESPONSE) {
LOG.info("removeMessage: Response completed.");
} else {
LOG.error("Response from server failed.");
}
} catch (Exception ex) {
System.out.println("ERROR: Exception during call on server. Check log for details.");
LOG.error(ex, "Exception during call on server.");
}
}
@Override
public User newUser(String name, String security) {
User response = null;
try (final Connection connection = source.connect()) {
Serializers.INTEGER.write(connection.out(), NetworkCode.NEW_USER_REQUEST);
Serializers.STRING.write(connection.out(), name);
Serializers.STRING.write(connection.out(), security);//TODO: added
LOG.info("newUser: Request completed.");
if (Serializers.INTEGER.read(connection.in()) == NetworkCode.NEW_USER_RESPONSE) {
response = Serializers.nullable(User.SERIALIZER).read(connection.in());
LOG.info("newUser: Response completed.");
} else {
LOG.error("Response from server failed.");
}
} catch (Exception ex) {
System.out.println("ERROR: Exception during call on server. Check log for details.");
LOG.error(ex, "Exception during call on server.");
}
return response;
}
@Override
public void removeUser(User user) {
try (final Connection connection = source.connect()) {
Serializers.INTEGER.write(connection.out(), NetworkCode.REMOVE_USER_REQUEST);
User.SERIALIZER.write(connection.out(), user);
System.out.println("removeUser: Request completed.");
LOG.info("removeUser: Request completed.");
if (Serializers.INTEGER.read(connection.in()) == NetworkCode.REMOVE_USER_RESPONSE) {
LOG.info("removeUser: Response completed.");
} else {
LOG.error("Response from server failed.");
}
} catch (Exception ex) {
System.out.println("ERROR: Exception during call on server. Check log for details.");
LOG.error(ex, "Exception during call on server.");
}
}
@Override
public Conversation newConversation(String title, Uuid owner) {
Conversation response = null;
try (final Connection connection = source.connect()) {
Serializers.INTEGER.write(connection.out(), NetworkCode.NEW_CONVERSATION_REQUEST);
Serializers.STRING.write(connection.out(), title);
Uuids.SERIALIZER.write(connection.out(), owner);
if (Serializers.INTEGER.read(connection.in()) == NetworkCode.NEW_CONVERSATION_RESPONSE) {
response = Serializers.nullable(Conversation.SERIALIZER).read(connection.in());
} else {
LOG.error("Response from server failed.");
}
} catch (Exception ex) {
System.out.println("ERROR: Exception during call on server. Check log for details.");
LOG.error(ex, "Exception during call on server.");
}
return response;
}
@Override
public void removeConversation(Conversation conversation) {
try (final Connection connection = source.connect()) {
Serializers.INTEGER.write(connection.out(), NetworkCode.REMOVE_CONVERSATION_REQUEST);
Conversation.SERIALIZER.write(connection.out(), conversation);
if (Serializers.INTEGER.read(connection.in()) == NetworkCode.REMOVE_CONVERSATION_RESPONSE) {
LOG.info("removeConversation: Response completed.");
} else {
LOG.error("Response from server failed.");
}
} catch (Exception ex) {
System.out.println("ERROR: Exception during call on server. Check log for details.");
LOG.error(ex, "Exception during call on server.");
}
}
}
| apache-2.0 |
android-art-intel/Nougat | art-extension/opttests/src/OptimizationTests/Devirtualization/InvokeVirtualShort/Main.java | 973 | /*
* Copyright (C) 2016 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package OptimizationTests.Devirtualization.InvokeVirtualShort;
class Main {
public static short runTest() {
CondVirtBase test = new CondVirtExt();
short result = test.getThingies();
return result;
}
public static void main(String[] args) {
short result = runTest();
System.out.println("Result " + result);
}
}
| apache-2.0 |
nicolaferraro/camel | components/camel-saga/src/generated/java/org/apache/camel/component/saga/SagaEndpointUriFactory.java | 1784 | /* Generated by camel build tools - do NOT edit this file! */
package org.apache.camel.component.saga;
import java.net.URISyntaxException;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.apache.camel.spi.EndpointUriFactory;
/**
* Generated by camel build tools - do NOT edit this file!
*/
public class SagaEndpointUriFactory extends org.apache.camel.support.component.EndpointUriFactorySupport implements EndpointUriFactory {
private static final String BASE = ":action";
private static final Set<String> PROPERTY_NAMES;
private static final Set<String> SECRET_PROPERTY_NAMES;
static {
Set<String> props = new HashSet<>(3);
props.add("lazyStartProducer");
props.add("synchronous");
props.add("action");
PROPERTY_NAMES = Collections.unmodifiableSet(props);
SECRET_PROPERTY_NAMES = Collections.emptySet();
}
@Override
public boolean isEnabled(String scheme) {
return "saga".equals(scheme);
}
@Override
public String buildUri(String scheme, Map<String, Object> properties, boolean encode) throws URISyntaxException {
String syntax = scheme + BASE;
String uri = syntax;
Map<String, Object> copy = new HashMap<>(properties);
uri = buildPathParameter(syntax, uri, "action", null, true, copy);
uri = buildQueryParameters(uri, copy, encode);
return uri;
}
@Override
public Set<String> propertyNames() {
return PROPERTY_NAMES;
}
@Override
public Set<String> secretPropertyNames() {
return SECRET_PROPERTY_NAMES;
}
@Override
public boolean isLenientProperties() {
return false;
}
}
| apache-2.0 |
cucina/opencucina | core/src/main/java/org/cucina/core/marshal/JacksonMarshaller.java | 3843 | package org.cucina.core.marshal;
import com.fasterxml.jackson.core.Version;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.module.SimpleModule;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.MapUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.util.Assert;
import java.util.Collection;
import java.util.Map;
/**
* Marshaller implementation using Jackson JSON object mapper
*
* @author $Author: $
* @version $Revision: $
*/
public class JacksonMarshaller
implements Marshaller, InitializingBean {
private static final Logger LOG = LoggerFactory.getLogger(JacksonMarshaller.class);
private Collection<JsonSerializer<?>> serialisers;
@SuppressWarnings("rawtypes")
private Map<Class, JsonDeserializer> deserialisers;
//Is thread safe as long as not configured
private ObjectMapper mapper = new ObjectMapper();
/**
* Creates a new JacksonMarshaller object.
*
* @param persistenceService JAVADOC.
*/
public JacksonMarshaller(Collection<JsonSerializer<?>> serialisers,
@SuppressWarnings("rawtypes")
Map<Class, JsonDeserializer> deserialisers) {
super();
this.serialisers = serialisers;
this.deserialisers = deserialisers;
}
/**
* JAVADOC Method Level Comments
*
* @throws Exception JAVADOC.
*/
@SuppressWarnings({"rawtypes", "unchecked"})
@Override
public void afterPropertiesSet()
throws Exception {
//Set up
SimpleModule module = new SimpleModule("user",
new Version(1, 0, 0, "User serializer/deserializer", null, null));
if (CollectionUtils.isNotEmpty(serialisers)) {
for (JsonSerializer<?> serialiser : serialisers) {
module.addSerializer(serialiser);
}
}
if (MapUtils.isNotEmpty(deserialisers)) {
for (Map.Entry<Class, JsonDeserializer> deserialiserEntry : deserialisers.entrySet()) {
module.addDeserializer(deserialiserEntry.getKey(), deserialiserEntry.getValue());
}
}
mapper.registerModule(module);
}
/**
* marshalls the object graph.
*
* @param graph Object.
* @return marshalled graph String.
*/
@Override
public String marshall(Object graph) {
Assert.notNull(graph, "Should provide graph as argument");
try {
return mapper.writeValueAsString(graph);
} catch (Exception e) {
LOG.warn("Marshalling failed for [" + graph + "]", e);
throw new RuntimeException(e);
}
}
/**
* Unmarshall the source object to an object of the targetClass.
*
* @param <T> Object type to convert to.
* @param source String.
* @param targetClass Class.
* @return unmarshalled source.
*/
@Override
public <T> T unmarshall(String source, Class<T> targetClass) {
Assert.notNull(source, "Should provide source as argument");
Assert.notNull(targetClass, "Should provide targetClass as argument");
try {
return mapper.readValue(source, targetClass);
} catch (Exception e) {
LOG.warn("Unmarshalling failed for [" + source + "]", e);
throw new RuntimeException(e);
}
}
/**
* JAVADOC Method Level Comments
*
* @param <T> JAVADOC.
* @param source JAVADOC.
* @param valueTypeRef JAVADOC.
* @return JAVADOC.
*/
@Override
public <T> T unmarshall(String source, TypeReference<T> valueTypeRef) {
Assert.notNull(source, "source is required!");
Assert.notNull(valueTypeRef, "valueTypeRef is required!");
try {
return mapper.<T>readValue(source, valueTypeRef);
} catch (Exception e) {
LOG.warn("Unmarshalling failed for [" + source + "]", e);
throw new RuntimeException(e);
}
}
}
| apache-2.0 |
ITMAOO/scenic | scenic-wechatserver/src/main/java/com/scenic/wechat/server/message/request/PhotoItem.java | 466 | package com.scenic.wechat.server.message.request;
import lombok.Data;
import java.io.Serializable;
/**
* Author: jonny
* Time: 2017-08-27 21:07.
*/
@Data
public class PhotoItem implements Serializable {
/**
* 图片的MD5值,开发者若需要,可用于验证接收到图片
*/
private String PicMd5Sum;
public String getPicMd5Sum() {
return PicMd5Sum;
}
public void setPicMd5Sum(String picMd5Sum) {
PicMd5Sum = picMd5Sum;
}
}
| apache-2.0 |
goutham106/GmArchMvvm | lifecycle/src/main/java/com/gm/lifecycle/delegate/ILifecycle.java | 1341 | /*
* Copyright (c) 2017 Gowtham Parimelazhagan.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gm.lifecycle.delegate;
import com.gm.lifecycle.di.component.LifecycleComponent;
import com.gm.lifecycle.di.module.LifecycleModule;
/**
* Author : Gowtham
* Email : goutham.gm11@gmail.com
* Github : https://github.com/goutham106
* Created on : 9/18/17.
* <p>
* Application inherits the interface and can have the methods provided by LifecycleComponent.
*/
public interface ILifecycle {
/**
* Gets the global LifecycleComponent
*
* @return LifecycleComponent
*/
LifecycleComponent getLifecycleComponent();
/**
* Get global Lifecycle Module
*
* @return LifecycleModule
*/
LifecycleModule getLifecycleModule();
}
| apache-2.0 |
idemura/owl_lang | old/compiler/src/main/owl/compiler/AstNode.java | 366 | package owl.compiler;
abstract class AstNode {
private int line;
private int charPosition;
void setPosition(int line, int charPosition) {
this.line = line;
this.charPosition = charPosition;
}
int getLine() { return line; }
int getCharPosition() { return charPosition; }
abstract <T> T accept(AstVisitor<T> visitor);
}
| apache-2.0 |
consulo/consulo-sql | src/com/dci/intellij/dbn/generator/AliasBundle.java | 1533 | /*
* Copyright 2012-2014 Dan Cioca
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dci.intellij.dbn.generator;
import com.dci.intellij.dbn.common.util.NamingUtil;
import com.dci.intellij.dbn.object.common.DBObject;
import java.util.HashMap;
import java.util.Map;
public class AliasBundle {
private Map<DBObject, String> aliases = new HashMap<DBObject, String>();
public String getAlias(DBObject object) {
String alias = aliases.get(object);
if (alias == null) {
alias = NamingUtil.createAliasName(object.getName());
alias = getNextAvailable(alias);
aliases.put(object, alias);
}
return alias;
}
private String getNextAvailable(String alias) {
for (String availableAlias : aliases.values()) {
if (alias.equals(availableAlias)) {
alias = NamingUtil.getNextNumberedName(alias, false);
}
}
return alias;
}
}
| apache-2.0 |
raidentrance/design-patterns | src/main/java/com/raidentrance/builder/package-info.java | 80 | /**
*
*/
/**
* @author raidentrance
*
*/
package com.raidentrance.builder; | apache-2.0 |
angcyo/RLibrary | uiview/src/main/java/com/angcyo/uiview/github/item/touch/helper/OnDragVHListener.java | 360 | package com.angcyo.uiview.github.item.touch.helper;
/**
* ViewHolder 被选中 以及 拖拽释放 触发监听器
* Created by YoKeyword on 15/12/29.
*/
public interface OnDragVHListener {
/**
* Item被选中时触发
*/
void onItemSelected();
/**
* Item在拖拽结束/滑动结束后触发
*/
void onItemFinish();
}
| apache-2.0 |
miguel250/WIFITimeZone | android_app/app/src/main/java/com/mpz/wifitimezone/app/MainActivity.java | 2736 | package com.mpz.wifitimezone.app;
import android.app.Activity;
import android.net.wifi.WifiInfo;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.ProgressBar;
import android.util.Log;
import android.content.Context;
import android.net.wifi.WifiManager;
import android.text.format.Formatter;
import uk.co.senab.actionbarpulltorefresh.library.ActionBarPullToRefresh;
import uk.co.senab.actionbarpulltorefresh.library.listeners.OnRefreshListener;
import uk.co.senab.actionbarpulltorefresh.library.PullToRefreshLayout;
public class MainActivity extends Activity implements OnRefreshListener {
private ProgressBar mProgress;
private PullToRefreshLayout mPullToRefreshLayout;
private WifiManager wifiManager;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mProgress = (ProgressBar) findViewById(R.id.progressBar);
mPullToRefreshLayout = (PullToRefreshLayout) findViewById(R.id.ptr_layout);
ActionBarPullToRefresh.from(this)
// Mark All Children as pullable
.allChildrenArePullable()
// Set the OnRefreshListener
.listener(this)
// Finally commit the setup to our PullToRefreshLayout
.setup(mPullToRefreshLayout);
final Thread timerThread = new Thread() {
@Override
public void run() {
mProgress.setVisibility(View.INVISIBLE);
wifiManager = (WifiManager) getSystemService(Context.WIFI_SERVICE);
WifiInfo wifiInfo = wifiManager.getConnectionInfo();
int ip = wifiInfo.getIpAddress();
String ipString = Formatter.formatIpAddress(ip);
Log.i("onCreate", "Thread " + ipString );
}
};
timerThread.start();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
public void onRefreshStarted(View view) {
mProgress.setVisibility(View.VISIBLE);
mPullToRefreshLayout.setRefreshComplete();
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
return super.onOptionsItemSelected(item);
}
}
| apache-2.0 |
saasxx-open/framework | saasxx-framework/src/test/java/com/saasxx/framework/testcase/data/copier/CopierTest.java | 941 | package com.saasxx.framework.testcase.data.copier;
import java.io.IOException;
import java.util.Date;
import org.joda.time.DateTime;
import org.junit.Test;
import com.saasxx.framework.Lang;
import com.saasxx.framework.data.Beans;
public class CopierTest {
@Test
public void testCopy() throws IOException {
final Pojo1 pojo1 = new Pojo1();
pojo1.setName("tester");
pojo1.setPassword("123456");
pojo1.setDate(DateTime.parse("2010-03-02").toDate());
final Pojo2 pojo2 = new Pojo2();
pojo2.setDate(new Date());
pojo2.setInterger(333);
pojo2.setBool(true);
System.out.println(pojo2);
System.out.println(Lang.timing(new Runnable() {
@Override
public void run() {
for (int i = 0; i < 10000000; i++) {
Beans.from(pojo1)
// .excludePrimitiveZero()
// .excludePrimitiveFalse()
// .excludes("password")
.caseInsensitive().to(pojo2);
}
}
}));
System.out.println(pojo2);
}
}
| apache-2.0 |
openfurther/further-open-core | security/security-api/src/main/java/edu/utah/further/security/api/services/AuditService.java | 2311 | /**
* Copyright (C) [2013] [The FURTHeR Project]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.utah.further.security.api.services;
import edu.utah.further.fqe.ds.api.domain.ExportContext;
import edu.utah.further.fqe.ds.api.domain.QueryContext;
/**
* An interface providing read and write operations on auditable events.
* <p>
* -----------------------------------------------------------------------------------<br>
* (c) 2008-2010 FURTHeR Project, Health Sciences IT, University of Utah<br>
* Contact: {@code <further@utah.edu>}<br>
* Biomedical Informatics, 26 South 2000 East<br>
* Room 5775 HSEB, Salt Lake City, UT 84112<br>
* Day Phone: 1-801-581-4080<br>
* -----------------------------------------------------------------------------------
*
* @author Shan He {@code <shan.he@utah.edu>}
* @version May 4, 2011
*/
public interface AuditService
{
// =======================================METHODS===========================================
/**
* Save the logical (parent) query context into persistent stores.
*
* @param queryContext
*/
void logQuery(QueryContext queryContext);
/**
* Save the query result for physical (child) query context into persistent stores.
* The query result at this stage is the aggregated number counts.
*
* @param queryContext
*/
void logQueryResult(QueryContext queryContext);
/**
* Logs a requested data export - only logs the details of the export and not the
* query or the results. Refer to the query log event to get more details about the
* query. Main purpose is for reporting how many exports have occurred. TODO: if
* exported data is ever PHI, we must get more strict with what is logged on export.
*
* @param context
*/
void logExportRequest(ExportContext context);
}
| apache-2.0 |
shybovycha/buck | src/com/facebook/buck/jvm/java/JavacToJarStepFactory.java | 8045 | /*
* Copyright 2012-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.jvm.java;
import static com.facebook.buck.jvm.java.AbstractJavacOptions.SpoolMode;
import com.facebook.buck.io.BuildCellRelativePath;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.log.Logger;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.rules.AddToRuleKey;
import com.facebook.buck.rules.AddsToRuleKey;
import com.facebook.buck.rules.BuildContext;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildableContext;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.SourcePathRuleFinder;
import com.facebook.buck.rules.Tool;
import com.facebook.buck.step.Step;
import com.facebook.buck.step.fs.MakeCleanDirectoryStep;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import java.nio.file.Path;
import java.util.Optional;
public class JavacToJarStepFactory extends CompileToJarStepFactory implements AddsToRuleKey {
private static final Logger LOG = Logger.get(JavacToJarStepFactory.class);
@AddToRuleKey private final Javac javac;
@AddToRuleKey private JavacOptions javacOptions;
@AddToRuleKey private final ExtraClasspathFromContextFunction extraClasspathFromContextFunction;
public JavacToJarStepFactory(
Javac javac,
JavacOptions javacOptions,
ExtraClasspathFromContextFunction extraClasspathFromContextFunction) {
this.javac = javac;
this.javacOptions = javacOptions;
this.extraClasspathFromContextFunction = extraClasspathFromContextFunction;
}
@Override
public void createCompileStep(
BuildContext context,
BuildTarget invokingRule,
SourcePathResolver resolver,
ProjectFilesystem filesystem,
CompilerParameters parameters,
/* output params */
ImmutableList.Builder<Step> steps,
BuildableContext buildableContext) {
final JavacOptions buildTimeOptions =
javacOptions.withBootclasspathFromContext(extraClasspathFromContextFunction, context);
boolean generatingCode = !javacOptions.getAnnotationProcessingParams().isEmpty();
if (generatingCode) {
// Javac requires that the root directory for generated sources already exist.
addAnnotationGenFolderStep(
parameters.getGeneratedCodeDirectory(), filesystem, steps, buildableContext, context);
}
final ClassUsageFileWriter usedClassesFileWriter =
parameters.shouldTrackClassUsage()
? new DefaultClassUsageFileWriter(parameters.getDepFilePath())
: NoOpClassUsageFileWriter.instance();
steps.add(
new JavacStep(
usedClassesFileWriter,
javac,
buildTimeOptions,
invokingRule,
resolver,
filesystem,
new ClasspathChecker(),
parameters,
Optional.empty(),
parameters.shouldGenerateAbiJar() ? parameters.getAbiJarPath() : null));
}
@Override
protected Optional<String> getBootClasspath(BuildContext context) {
JavacOptions buildTimeOptions =
javacOptions.withBootclasspathFromContext(extraClasspathFromContextFunction, context);
return buildTimeOptions.getBootclasspath();
}
@Override
public Tool getCompiler() {
return javac;
}
@Override
public Iterable<BuildRule> getExtraDeps(SourcePathRuleFinder ruleFinder) {
// If any dep of an annotation processor changes, we need to recompile, so we add those as
// extra deps
return Iterables.concat(
super.getExtraDeps(ruleFinder),
ruleFinder.filterBuildRuleInputs(javacOptions.getAnnotationProcessingParams().getInputs()));
}
@Override
public void createCompileToJarStepImpl(
BuildContext context,
BuildTarget invokingRule,
SourcePathResolver resolver,
SourcePathRuleFinder ruleFinder,
ProjectFilesystem filesystem,
CompilerParameters compilerParameters,
ImmutableList<String> postprocessClassesCommands,
JarParameters jarParameters,
/* output params */
ImmutableList.Builder<Step> steps,
BuildableContext buildableContext) {
Preconditions.checkArgument(
jarParameters.getEntriesToJar().contains(compilerParameters.getOutputDirectory()));
String spoolMode = javacOptions.getSpoolMode().name();
// In order to use direct spooling to the Jar:
// (1) It must be enabled through a .buckconfig.
// (2) The target must have 0 postprocessing steps.
// (3) Tha compile API must be JSR 199.
boolean isSpoolingToJarEnabled =
compilerParameters.getAbiGenerationMode().isSourceAbi()
|| (postprocessClassesCommands.isEmpty()
&& javacOptions.getSpoolMode() == AbstractJavacOptions.SpoolMode.DIRECT_TO_JAR
&& javac instanceof Jsr199Javac);
LOG.info(
"Target: %s SpoolMode: %s Expected SpoolMode: %s Postprocessing steps: %s",
invokingRule.getBaseName(),
(isSpoolingToJarEnabled) ? (SpoolMode.DIRECT_TO_JAR) : (SpoolMode.INTERMEDIATE_TO_DISK),
spoolMode,
postprocessClassesCommands.toString());
if (isSpoolingToJarEnabled) {
final JavacOptions buildTimeOptions =
javacOptions.withBootclasspathFromContext(extraClasspathFromContextFunction, context);
boolean generatingCode = !buildTimeOptions.getAnnotationProcessingParams().isEmpty();
if (generatingCode) {
// Javac requires that the root directory for generated sources already exists.
addAnnotationGenFolderStep(
compilerParameters.getGeneratedCodeDirectory(),
filesystem,
steps,
buildableContext,
context);
}
final ClassUsageFileWriter usedClassesFileWriter =
compilerParameters.shouldTrackClassUsage()
? new DefaultClassUsageFileWriter(compilerParameters.getDepFilePath())
: NoOpClassUsageFileWriter.instance();
steps.add(
new JavacStep(
usedClassesFileWriter,
javac,
buildTimeOptions,
invokingRule,
resolver,
filesystem,
new ClasspathChecker(),
compilerParameters,
Optional.of(jarParameters),
compilerParameters.shouldGenerateAbiJar()
? compilerParameters.getAbiJarPath()
: null));
} else {
super.createCompileToJarStepImpl(
context,
invokingRule,
resolver,
ruleFinder,
filesystem,
compilerParameters,
postprocessClassesCommands,
jarParameters,
steps,
buildableContext);
}
}
private static void addAnnotationGenFolderStep(
Path annotationGenFolder,
ProjectFilesystem filesystem,
ImmutableList.Builder<Step> steps,
BuildableContext buildableContext,
BuildContext buildContext) {
steps.addAll(
MakeCleanDirectoryStep.of(
BuildCellRelativePath.fromCellRelativePath(
buildContext.getBuildCellRootPath(), filesystem, annotationGenFolder)));
buildableContext.recordArtifact(annotationGenFolder);
}
@VisibleForTesting
public JavacOptions getJavacOptions() {
return javacOptions;
}
}
| apache-2.0 |
DanGrew/Nuts | nuts/test/uk/dangrew/nuts/goal/proportion/ProportionFunctionsTest.java | 2988 | package uk.dangrew.nuts.goal.proportion;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
import org.junit.Before;
import org.junit.Test;
import org.mockito.MockitoAnnotations;
import uk.dangrew.kode.launch.TestApplication;
import uk.dangrew.nuts.goal.proportion.ProportionFunctions.CalorieProportionFunction;
import uk.dangrew.nuts.goal.proportion.ProportionFunctions.PercentageOfCaloriesProportionFunction;
import uk.dangrew.nuts.goal.proportion.ProportionFunctions.PercentageOfWeightProportionFunction;
import uk.dangrew.nuts.goal.proportion.ProportionFunctions.WeightProportionFunction;
public class ProportionFunctionsTest {
@Before public void initialiseSystemUnderTest() {
TestApplication.startPlatform();
MockitoAnnotations.initMocks( this );
}//End Method
@Test public void shouldCalculateWeightProgress() {
WeightProportionFunction sut = new WeightProportionFunction();
assertThat( sut.calculateProgress( 0, 0, 0, 50, 100 ), is( 50.0 ) );
assertThat( sut.calculateProgress( 0, 0, 0, 25, 100 ), is( 25.0 ) );
assertThat( sut.calculateProgress( 0, 0, 0, 100, 100 ), is( 100.0 ) );
assertThat( sut.calculateProgress( 0, 0, 0, 120, 100 ), is( 120.0 ) );
assertThat( sut.calculateProgress( 0, 0, 0, 18, 20 ), is( 90.0 ) );
}//End Method
@Test public void shouldCalculateCalorieProgress() {
CalorieProportionFunction sut = new CalorieProportionFunction();
assertThat( sut.calculateProgress( 0, 0, 50, 0, 100 ), is( 50.0 ) );
assertThat( sut.calculateProgress( 0, 0, 25, 0,100 ), is( 25.0 ) );
assertThat( sut.calculateProgress( 0, 0, 100, 0,100 ), is( 100.0 ) );
assertThat( sut.calculateProgress( 0, 0, 120, 0,100 ), is( 120.0 ) );
assertThat( sut.calculateProgress( 0, 0, 18, 0,20 ), is( 90.0 ) );
}//End Method
@Test public void shouldCalculatePercentageOfWeightProgress() {
PercentageOfWeightProportionFunction sut = new PercentageOfWeightProportionFunction();
assertThat( sut.calculateProgress( 0, 0, 0, 0, 20 ), is( 0.0 ) );
assertThat( sut.calculateProgress( 0, 100, 0, 0, 20 ), is( 0.0 ) );
assertThat( sut.calculateProgress( 0, 50, 0, 10, 20 ), is( 100.0 ) );
assertThat( sut.calculateProgress( 0, 50, 0, 15, 20 ), is( 150.0 ) );
assertThat( sut.calculateProgress( 0, 200, 0, 5, 25 ), is( 10.0 ) );
}//End Method
@Test public void shouldCalculatePercentageOfCaloriesProgress() {
PercentageOfCaloriesProportionFunction sut = new PercentageOfCaloriesProportionFunction();
assertThat( sut.calculateProgress( 0, 0, 0, 0, 20 ), is( 0.0 ) );
assertThat( sut.calculateProgress( 100, 0, 0, 0, 20 ), is( 0.0 ) );
assertThat( sut.calculateProgress( 50, 0, 10, 0, 20 ), is( 100.0 ) );
assertThat( sut.calculateProgress( 50, 0, 15, 0, 20 ), is( 150.0 ) );
assertThat( sut.calculateProgress( 200, 0, 5, 0, 25 ), is( 10.0 ) );
}//End Method
}//End Class
| apache-2.0 |
medjed/JsonPathCompiler | src/main/java/io/github/medjed/jsonpathcompiler/expressions/path/PathCompiler.java | 24370 | package io.github.medjed.jsonpathcompiler.expressions.path;
import io.github.medjed.jsonpathcompiler.InvalidPathException;
import io.github.medjed.jsonpathcompiler.expressions.CharacterIndex;
import io.github.medjed.jsonpathcompiler.expressions.Utils;
import io.github.medjed.jsonpathcompiler.Predicate;
import io.github.medjed.jsonpathcompiler.expressions.Path;
import io.github.medjed.jsonpathcompiler.expressions.function.ParamType;
import io.github.medjed.jsonpathcompiler.expressions.filter.FilterCompiler;
import io.github.medjed.jsonpathcompiler.expressions.function.Parameter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import static java.lang.Character.isDigit;
import static java.util.Arrays.asList;
public class PathCompiler {
private static final char DOC_CONTEXT = '$';
private static final char EVAL_CONTEXT = '@';
private static final char OPEN_SQUARE_BRACKET = '[';
private static final char CLOSE_SQUARE_BRACKET = ']';
private static final char OPEN_PARENTHESIS = '(';
private static final char CLOSE_PARENTHESIS = ')';
private static final char OPEN_BRACE = '{';
private static final char CLOSE_BRACE = '}';
private static final char WILDCARD = '*';
private static final char PERIOD = '.';
private static final char SPACE = ' ';
private static final char TAB = '\t';
private static final char CR = '\r';
private static final char LF = '\r';
private static final char BEGIN_FILTER = '?';
private static final char COMMA = ',';
private static final char SPLIT = ':';
private static final char MINUS = '-';
private static final char SINGLE_QUOTE = '\'';
private static final char DOUBLE_QUOTE = '"';
private static final String DOT_NOTATION_ROOT_PATH = new String(new char[]{DOC_CONTEXT, PERIOD});
private static final String BRACKET_NOTATION_ROOT_PATH = new String(new char[]{DOC_CONTEXT, OPEN_SQUARE_BRACKET});
private final LinkedList<Predicate> filterStack;
private final CharacterIndex path;
public static boolean isProbablyJsonPath(String path)
{
return path.startsWith(DOT_NOTATION_ROOT_PATH) || path.startsWith(BRACKET_NOTATION_ROOT_PATH);
}
private PathCompiler(String path, LinkedList<Predicate> filterStack) {
this.filterStack = filterStack;
this.path = new CharacterIndex(path);
}
private Path compile() {
RootPathToken root = readContextToken();
return new CompiledPath(root, root.getPathFragment().equals("$"));
}
public static Path compile(String path, final Predicate... filters) {
try {
path = path.trim();
if(!(path.charAt(0) == DOC_CONTEXT) && !(path.charAt(0) == EVAL_CONTEXT)){
path = "$." + path;
}
if(path.endsWith(".")){
fail("Path must not end with a '.' or '..'");
}
LinkedList filterStack = new LinkedList<Predicate>(asList(filters));
Path p = new PathCompiler(path.trim(), filterStack).compile();
return p;
} catch (Exception e) {
InvalidPathException ipe;
if (e instanceof InvalidPathException) {
ipe = (InvalidPathException) e;
} else {
ipe = new InvalidPathException(e);
}
throw ipe;
}
}
private void readWhitespace() {
while (path.inBounds()) {
char c = path.currentChar();
if (!isWhitespace(c)) {
break;
}
path.incrementPosition(1);
}
}
private Boolean isPathContext(char c) {
return (c == DOC_CONTEXT || c == EVAL_CONTEXT);
}
//[$ | @]
private RootPathToken readContextToken() {
readWhitespace();
if (!isPathContext(path.currentChar())) {
throw new InvalidPathException("Path must start with '$' or '@'");
}
RootPathToken pathToken = PathTokenFactory.createRootPathToken(path.currentChar());
PathTokenAppender appender = pathToken.getPathTokenAppender();
if (path.currentIsTail()) {
return pathToken;
}
path.incrementPosition(1);
if(path.currentChar() != PERIOD && path.currentChar() != OPEN_SQUARE_BRACKET){
fail("Illegal character at position " + path.position() + " expected '.' or '[");
}
readNextToken(appender);
return pathToken;
}
//
//
//
private boolean readNextToken(PathTokenAppender appender) {
char c = path.currentChar();
switch (c) {
case OPEN_SQUARE_BRACKET:
return readBracketPropertyToken(appender) ||
readArrayToken(appender) ||
readWildCardToken(appender) ||
readFilterToken(appender) ||
readPlaceholderToken(appender) ||
fail("Could not parse token starting at position " + path.position() + ". Expected ?, ', 0-9, * ");
case PERIOD:
return readDotToken(appender) ||
fail("Could not parse token starting at position " + path.position());
case WILDCARD:
return readWildCardToken(appender) ||
fail("Could not parse token starting at position " + path.position());
default:
return readPropertyOrFunctionToken(appender) ||
fail("Could not parse token starting at position " + path.position());
}
}
//
// . and ..
//
private boolean readDotToken(PathTokenAppender appender) {
if (path.currentCharIs(PERIOD) && path.nextCharIs(PERIOD)) {
appender.appendPathToken(PathTokenFactory.crateScanToken());
path.incrementPosition(2);
} else if (!path.hasMoreCharacters()) {
throw new InvalidPathException("Path must not end with a '.");
} else {
path.incrementPosition(1);
}
if(path.currentCharIs(PERIOD)){
throw new InvalidPathException("Character '.' on position " + path.position() + " is not valid.");
}
return readNextToken(appender);
}
//
// fooBar or fooBar()
//
private boolean readPropertyOrFunctionToken(PathTokenAppender appender) {
if (path.currentCharIs(OPEN_SQUARE_BRACKET) || path.currentCharIs(WILDCARD) || path.currentCharIs(PERIOD) || path.currentCharIs(SPACE)) {
return false;
}
int startPosition = path.position();
int readPosition = startPosition;
int endPosition = 0;
boolean isFunction = false;
while (path.inBounds(readPosition)) {
char c = path.charAt(readPosition);
if (c == SPACE) {
throw new InvalidPathException("Use bracket notion ['my prop'] if your property contains blank characters. position: " + path.position());
}
else if (c == PERIOD || c == OPEN_SQUARE_BRACKET) {
endPosition = readPosition;
break;
}
else if (c == OPEN_PARENTHESIS) {
isFunction = true;
endPosition = readPosition++;
break;
}
readPosition++;
}
if (endPosition == 0) {
endPosition = path.length();
}
List<Parameter> functionParameters = null;
if (isFunction) {
if (path.inBounds(readPosition+1)) {
// read the next token to determine if we have a simple no-args function call
char c = path.charAt(readPosition + 1);
if (c != CLOSE_PARENTHESIS) {
path.setPosition(endPosition+1);
// parse the arguments of the function - arguments that are inner queries or JSON document(s)
String functionName = path.subSequence(startPosition, endPosition).toString();
functionParameters = parseFunctionParameters(functionName);
} else {
path.setPosition(readPosition + 1);
}
}
else {
path.setPosition(readPosition);
}
}
else {
path.setPosition(endPosition);
}
String property = path.subSequence(startPosition, endPosition).toString();
if(isFunction){
appender.appendPathToken(PathTokenFactory.createFunctionPathToken(property, functionParameters));
} else {
appender.appendPathToken(PathTokenFactory.createSinglePropertyPathToken(property, true));
}
return path.currentIsTail() || readNextToken(appender);
}
/**
* Parse the parameters of a function call, either the caller has supplied JSON data, or the caller has supplied
* another path expression which must be evaluated and in turn invoked against the root document. In this tokenizer
* we're only concerned with parsing the path thus the output of this function is a list of parameters with the Path
* set if the parameter is an expression. If the parameter is a JSON document then the value of the cachedValue is
* set on the object.
*
* Sequence for parsing out the parameters:
*
* This code has its own tokenizer - it does some rudimentary level of lexing in that it can distinguish between JSON block parameters
* and sub-JSON blocks - it effectively regex's out the parameters into string blocks that can then be passed along to the appropriate parser.
* Since sub-jsonpath expressions can themselves contain other function calls this routine needs to be sensitive to token counting to
* determine the boundaries. Since the Path parser isn't aware of JSON processing this uber routine is needed.
*
* Parameters are separated by COMMAs ','
*
* <pre>
* doc = {"numbers": [1,2,3,4,5,6,7,8,9,10]}
*
* $.sum({10}, $.numbers.avg())
* </pre>
*
* The above is a valid function call, we're first summing 10 + avg of 1...10 (5.5) so the total should be 15.5
*
* @return
* An ordered list of parameters that are to processed via the function. Typically functions either process
* an array of values and/or can consume parameters in addition to the values provided from the consumption of
* an array.
*/
private List<Parameter> parseFunctionParameters(String funcName) {
PathToken currentToken;
ParamType type = null;
// Parenthesis starts at 1 since we're marking the start of a function call, the close paren will denote the
// last parameter boundary
Integer groupParen = 1, groupBracket = 0, groupBrace = 0, groupQuote = 0;
Boolean endOfStream = false;
char priorChar = 0;
List<Parameter> parameters = new ArrayList<Parameter>();
StringBuffer parameter = new StringBuffer();
while (path.inBounds() && !endOfStream) {
char c = path.currentChar();
path.incrementPosition(1);
// we're at the start of the stream, and don't know what type of parameter we have
if (type == null) {
if (isWhitespace(c)) {
continue;
}
if (c == OPEN_BRACE || isDigit(c) || DOUBLE_QUOTE == c) {
type = ParamType.JSON;
}
else if (isPathContext(c)) {
type = ParamType.PATH; // read until we reach a terminating comma and we've reset grouping to zero
}
}
switch (c) {
case DOUBLE_QUOTE:
if (priorChar != '\\' && groupQuote > 0) {
if (groupQuote == 0) {
throw new InvalidPathException("Unexpected quote '\"' at character position: " + path.position());
}
groupQuote--;
}
else {
groupQuote++;
}
break;
case OPEN_PARENTHESIS:
groupParen++;
break;
case OPEN_BRACE:
groupBrace++;
break;
case OPEN_SQUARE_BRACKET:
groupBracket++;
break;
case CLOSE_BRACE:
if (0 == groupBrace) {
throw new InvalidPathException("Unexpected close brace '}' at character position: " + path.position());
}
groupBrace--;
break;
case CLOSE_SQUARE_BRACKET:
if (0 == groupBracket) {
throw new InvalidPathException("Unexpected close bracket ']' at character position: " + path.position());
}
groupBracket--;
break;
// In either the close paren case where we have zero paren groups left, capture the parameter, or where
// we've encountered a COMMA do the same
case CLOSE_PARENTHESIS:
groupParen--;
if (0 != groupParen) {
parameter.append(c);
}
case COMMA:
// In this state we've reach the end of a function parameter and we can pass along the parameter string
// to the parser
if ((0 == groupQuote && 0 == groupBrace && 0 == groupBracket
&& ((0 == groupParen && CLOSE_PARENTHESIS == c) || 1 == groupParen))) {
endOfStream = (0 == groupParen);
if (null != type) {
Parameter param = null;
switch (type) {
case JSON:
// parse the json and set the value
param = new Parameter(parameter.toString());
break;
case PATH:
LinkedList<Predicate> predicates = new LinkedList<Predicate>();
PathCompiler compiler = new PathCompiler(parameter.toString(), predicates);
param = new Parameter(compiler.compile());
break;
}
if (null != param) {
parameters.add(param);
}
parameter.delete(0, parameter.length());
type = null;
}
}
break;
}
if (type != null && !(c == COMMA && 0 == groupBrace && 0 == groupBracket && 1 == groupParen)) {
parameter.append(c);
}
priorChar = c;
}
if (0 != groupBrace || 0 != groupParen || 0 != groupBracket) {
throw new InvalidPathException("Arguments to function: '" + funcName + "' are not closed properly.");
}
return parameters;
}
private boolean isWhitespace(char c) {
return (c == SPACE || c == TAB || c == LF || c == CR);
}
//
// [?], [?,?, ..]
//
private boolean readPlaceholderToken(PathTokenAppender appender) {
if (!path.currentCharIs(OPEN_SQUARE_BRACKET)) {
return false;
}
int questionmarkIndex = path.indexOfNextSignificantChar(BEGIN_FILTER);
if (questionmarkIndex == -1) {
return false;
}
char nextSignificantChar = path.nextSignificantChar(questionmarkIndex);
if (nextSignificantChar != CLOSE_SQUARE_BRACKET && nextSignificantChar != COMMA) {
return false;
}
int expressionBeginIndex = path.position() + 1;
int expressionEndIndex = path.nextIndexOf(expressionBeginIndex, CLOSE_SQUARE_BRACKET);
if (expressionEndIndex == -1) {
return false;
}
String expression = path.subSequence(expressionBeginIndex, expressionEndIndex).toString();
String[] tokens = expression.split(",");
if (filterStack.size() < tokens.length) {
throw new InvalidPathException("Not enough predicates supplied for filter [" + expression + "] at position " + path.position());
}
Collection<Predicate> predicates = new ArrayList<Predicate>();
for (String token : tokens) {
token = token != null ? token.trim() : token;
if (!"?".equals(token == null ? "" : token)) {
throw new InvalidPathException("Expected '?' but found " + token);
}
predicates.add(filterStack.pop());
}
appender.appendPathToken(PathTokenFactory.createPredicatePathToken(predicates));
path.setPosition(expressionEndIndex + 1);
return path.currentIsTail() || readNextToken(appender);
}
//
// [?(...)]
//
private boolean readFilterToken(PathTokenAppender appender) {
if (!path.currentCharIs(OPEN_SQUARE_BRACKET) && !path.nextSignificantCharIs(BEGIN_FILTER)) {
return false;
}
int openStatementBracketIndex = path.position();
int questionMarkIndex = path.indexOfNextSignificantChar(BEGIN_FILTER);
if (questionMarkIndex == -1) {
return false;
}
int openBracketIndex = path.indexOfNextSignificantChar(questionMarkIndex, OPEN_PARENTHESIS);
if (openBracketIndex == -1) {
return false;
}
int closeBracketIndex = path.indexOfClosingBracket(openBracketIndex, true, true);
if (closeBracketIndex == -1) {
return false;
}
if (!path.nextSignificantCharIs(closeBracketIndex, CLOSE_SQUARE_BRACKET)) {
return false;
}
int closeStatementBracketIndex = path.indexOfNextSignificantChar(closeBracketIndex, CLOSE_SQUARE_BRACKET);
String criteria = path.subSequence(openStatementBracketIndex, closeStatementBracketIndex + 1).toString();
Predicate predicate = FilterCompiler.compile(criteria);
appender.appendPathToken(PathTokenFactory.createPredicatePathToken(predicate));
path.setPosition(closeStatementBracketIndex + 1);
return path.currentIsTail() || readNextToken(appender);
}
//
// [*]
// *
//
private boolean readWildCardToken(PathTokenAppender appender) {
boolean inBracket = path.currentCharIs(OPEN_SQUARE_BRACKET);
if (inBracket && !path.nextSignificantCharIs(WILDCARD)) {
return false;
}
if (!path.currentCharIs(WILDCARD) && path.isOutOfBounds(path.position() + 1)) {
return false;
}
if (inBracket) {
int wildCardIndex = path.indexOfNextSignificantChar(WILDCARD);
if (!path.nextSignificantCharIs(wildCardIndex, CLOSE_SQUARE_BRACKET)) {
throw new InvalidPathException("Expected wildcard token to end with ']' on position " + wildCardIndex + 1);
}
int bracketCloseIndex = path.indexOfNextSignificantChar(wildCardIndex, CLOSE_SQUARE_BRACKET);
path.setPosition(bracketCloseIndex + 1);
} else {
path.incrementPosition(1);
}
appender.appendPathToken(PathTokenFactory.createWildCardPathToken());
return path.currentIsTail() || readNextToken(appender);
}
//
// [1], [1,2, n], [1:], [1:2], [:2]
//
private boolean readArrayToken(PathTokenAppender appender) {
if (!path.currentCharIs(OPEN_SQUARE_BRACKET)) {
return false;
}
char nextSignificantChar = path.nextSignificantChar();
if (!isDigit(nextSignificantChar) && nextSignificantChar != MINUS && nextSignificantChar != SPLIT) {
return false;
}
int expressionBeginIndex = path.position() + 1;
int expressionEndIndex = path.nextIndexOf(expressionBeginIndex, CLOSE_SQUARE_BRACKET);
if (expressionEndIndex == -1) {
return false;
}
String expression = path.subSequence(expressionBeginIndex, expressionEndIndex).toString().trim();
if ("*".equals(expression)) {
return false;
}
//check valid chars
for (int i = 0; i < expression.length(); i++) {
char c = expression.charAt(i);
if (!isDigit(c) && c != COMMA && c != MINUS && c != SPLIT && c != SPACE) {
return false;
}
}
boolean isSliceOperation = expression.contains(":");
if (isSliceOperation) {
ArraySliceOperation arraySliceOperation = ArraySliceOperation.parse(expression);
appender.appendPathToken(PathTokenFactory.createSliceArrayPathToken(arraySliceOperation));
} else {
ArrayIndexOperation arrayIndexOperation = ArrayIndexOperation.parse(expression);
appender.appendPathToken(PathTokenFactory.createIndexArrayPathToken(arrayIndexOperation));
}
path.setPosition(expressionEndIndex + 1);
return path.currentIsTail() || readNextToken(appender);
}
//
// ['foo']
//
private boolean readBracketPropertyToken(PathTokenAppender appender) {
if (!path.currentCharIs(OPEN_SQUARE_BRACKET)) {
return false;
}
char potentialStringDelimiter = path.nextSignificantChar();
if (potentialStringDelimiter != SINGLE_QUOTE && potentialStringDelimiter != DOUBLE_QUOTE) {
return false;
}
List<String> properties = new ArrayList<String>();
int startPosition = path.position() + 1;
int readPosition = startPosition;
int endPosition = 0;
boolean inProperty = false;
boolean inEscape = false;
boolean lastSignificantWasComma = false;
while (path.inBounds(readPosition)) {
char c = path.charAt(readPosition);
if(inEscape){
inEscape = false;
} else if('\\' == c){
inEscape = true;
} else if (c == CLOSE_SQUARE_BRACKET && !inProperty) {
if (lastSignificantWasComma){
fail("Found empty property at index "+readPosition);
}
break;
} else if (c == potentialStringDelimiter) {
if (inProperty && !inEscape) {
char nextSignificantChar = path.nextSignificantChar(readPosition);
if (nextSignificantChar != CLOSE_SQUARE_BRACKET && nextSignificantChar != COMMA) {
fail("Property must be separated by comma or Property must be terminated close square bracket at index "+readPosition);
}
endPosition = readPosition;
String prop = path.subSequence(startPosition, endPosition).toString();
properties.add(Utils.unescape(prop));
inProperty = false;
} else {
startPosition = readPosition + 1;
inProperty = true;
lastSignificantWasComma = false;
}
} else if (c == COMMA){
if (lastSignificantWasComma){
fail("Found empty property at index "+readPosition);
}
lastSignificantWasComma = true;
}
readPosition++;
}
int endBracketIndex = path.indexOfNextSignificantChar(endPosition, CLOSE_SQUARE_BRACKET) + 1;
path.setPosition(endBracketIndex);
appender.appendPathToken(PathTokenFactory.createPropertyPathToken(properties, true));
return path.currentIsTail() || readNextToken(appender);
}
public static boolean fail(String message) {
throw new InvalidPathException(message);
}
}
| apache-2.0 |
rambird/rambird-new | src/main/java/com/rambird/repository/VetRepository.java | 1437 | /*
* Copyright 2002-2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rambird.repository;
import java.util.Collection;
import org.springframework.dao.DataAccessException;
import com.rambird.model.Vet;
/**
* Repository class for <code>Vet</code> domain objects All method names are compliant with Spring Data naming
* conventions so this interface can easily be extended for Spring Data See here: http://static.springsource.org/spring-data/jpa/docs/current/reference/html/jpa.repositories.html#jpa.query-methods.query-creation
*
* @author Ken Krebs
* @author Juergen Hoeller
* @author Sam Brannen
* @author Michael Isvy
*/
public interface VetRepository {
/**
* Retrieve all <code>Vet</code>s from the data store.
*
* @return a <code>Collection</code> of <code>Vet</code>s
*/
Collection<Vet> findAll() throws DataAccessException;
}
| apache-2.0 |
migesok/jaxb-java-time-adapters | threeten-jaxb-extra/src/test/java/io/github/threetenjaxb/extra/WeeksXmlAdapterTest.java | 610 | package io.github.threetenjaxb.extra;
import org.threeten.extra.Weeks;
import java.util.HashMap;
import java.util.Map;
class WeeksXmlAdapterTest extends AbstractXmlAdapterTest<String, Weeks, WeeksXmlAdapter> {
private static final Map<String, Weeks> STRING_WEEKS_MAP = new HashMap<>();
static {
STRING_WEEKS_MAP.put("P12W", Weeks.of(12));
STRING_WEEKS_MAP.put("P1W", Weeks.ONE);
STRING_WEEKS_MAP.put("P0W", Weeks.ZERO);
STRING_WEEKS_MAP.put("P-12W", Weeks.of(-12));
}
WeeksXmlAdapterTest() {
super(new WeeksXmlAdapter(), STRING_WEEKS_MAP);
}
}
| apache-2.0 |
reines/dropwizard-debpkg-maven-plugin | src/main/java/com/jamierf/dropwizard/debpkg/util/LogConsole.java | 551 | package com.jamierf.dropwizard.debpkg.util;
import org.apache.maven.plugin.logging.Log;
import org.vafer.jdeb.Console;
public class LogConsole implements Console {
private final Log log;
public LogConsole(final Log log) {
this.log = log;
}
@Override
public void debug(final String message) {
log.debug(message);
}
@Override
public void info(final String message) {
log.info(message);
}
@Override
public void warn(final String message) {
log.warn(message);
}
}
| apache-2.0 |
yzuzhang/zhang | src/main/java/com/feicent/zhang/util/base/MoreValidate.java | 2875 | package com.feicent.zhang.util.base;
/**
* 参数校验统一使用Apache Common Lange Validate, 补充一些缺少的.
*
* 为什么不用Guava的Preconditions? 无他,
*
* 一是少打几个字而已, 二是Validate的方法多,比如noNullElements()判断多个元素都不为空
*
* @see com.google.common.math.MathPreconditions
*
* @author calvin
*/
public class MoreValidate {
/**
* 校验为正数则返回该数字,否则抛出异常.
*/
public static int positive(String role, int x) {
if (x <= 0) {
throw new IllegalArgumentException(role + " (" + x + ") must be > 0");
}
return x;
}
/**
* 校验为正数则返回该数字,否则抛出异常.
*/
public static Integer positive(String role, Integer x) {
if (x.intValue() <= 0) {
throw new IllegalArgumentException(role + " (" + x + ") must be > 0");
}
return x;
}
/**
* 校验为正数则返回该数字,否则抛出异常.
*/
public static long positive(String role, long x) {
if (x <= 0) {
throw new IllegalArgumentException(role + " (" + x + ") must be > 0");
}
return x;
}
/**
* 校验为正数则返回该数字,否则抛出异常.
*/
public static Long positive(String role, Long x) {
if (x.longValue() <= 0) {
throw new IllegalArgumentException(role + " (" + x + ") must be > 0");
}
return x;
}
/**
* 校验为正数则返回该数字,否则抛出异常.
*/
public static double positive(String role, double x) {
if (!(x > 0)) { // not x < 0, to work with NaN.
throw new IllegalArgumentException(role + " (" + x + ") must be >= 0");
}
return x;
}
/**
* 校验为正数则返回该数字,否则抛出异常.
*/
public static int nonNegative(String role, int x) {
if (x < 0) {
throw new IllegalArgumentException(role + " (" + x + ") must be >= 0");
}
return x;
}
/**
* 校验为正数则返回该数字,否则抛出异常.
*/
public static Integer nonNegative(String role, Integer x) {
if (x.intValue() < 0) {
throw new IllegalArgumentException(role + " (" + x + ") must be >= 0");
}
return x;
}
/**
* 校验为正数则返回该数字,否则抛出异常.
*/
public static long nonNegative(String role, long x) {
if (x < 0) {
throw new IllegalArgumentException(role + " (" + x + ") must be >= 0");
}
return x;
}
/**
* 校验为正数则返回该数字,否则抛出异常.
*/
public static Long nonNegative(String role, Long x) {
if (x.longValue() < 0) {
throw new IllegalArgumentException(role + " (" + x + ") must be >= 0");
}
return x;
}
/**
* 校验为正数则返回该数字,否则抛出异常.
*/
public static double nonNegative(String role, double x) {
if (!(x >= 0)) { // not x < 0, to work with NaN.
throw new IllegalArgumentException(role + " (" + x + ") must be >= 0");
}
return x;
}
}
| apache-2.0 |
klinovp/pronto | src/uk/ac/manchester/cs/pronto/alg/HittingSetObject.java | 610 | /**
*
*/
package uk.ac.manchester.cs.pronto.alg;
import java.util.Set;
/**
* @author Pavel Klinov
*
* pklinov@cs.man.ac.uk, pklinov@clarkparsia.com
*/
public interface HittingSetObject<T> {
public Set<Set<T>> leftSubset(T element);
public Set<Set<T>> rightSubset(T element);
public T getElement();
public Set<Set<T>> getConflictSets();
public void setConflictSets(Set<Set<T>> conflictSets);
public Set<Set<T>> getMinimalHittingSets();
public void setMinimalHittingSets(Set<Set<T>> hittingSets);
public void setChanged(boolean changed);
public boolean isChanged();
}
| apache-2.0 |
lesaint/experimenting-annotation-processing | experimenting-rounds/massive-count-of-annotated-classes/src/main/java/fr/javatronic/blog/massive/annotation2/Class_477.java | 145 | package fr.javatronic.blog.massive.annotation2;
import fr.javatronic.blog.processor.Annotation_002;
@Annotation_002
public class Class_477 {
}
| apache-2.0 |
ulbricht/mds | src/test/java/org/datacite/mds/test/TestUtils.java | 8869 | package org.datacite.mds.test;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.util.HashSet;
import java.util.Set;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.apache.commons.io.FileUtils;
import org.datacite.mds.domain.Allocator;
import org.datacite.mds.domain.AllocatorOrDatacentre;
import org.datacite.mds.domain.Datacentre;
import org.datacite.mds.domain.Dataset;
import org.datacite.mds.domain.Media;
import org.datacite.mds.domain.Metadata;
import org.datacite.mds.domain.Prefix;
import org.datacite.mds.util.Utils;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.Resource;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.context.SecurityContextHolder;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
public abstract class TestUtils {
public static final String DEFAULT_ALLOCATOR_SYMBOL = "AL";
public static final String DEFAULT_DATACENTRE_SYMBOL = "AL.DC";
/**
* call a constructor of a given class even if it's private.
*
* @param cls
*/
public static void callConstructor(final Class<?> cls) {
final Constructor<?> c = cls.getDeclaredConstructors()[0];
c.setAccessible(true);
try {
final Object n = c.newInstance((Object[]) null);
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
public static void setUsernamePassword(String username, String password) {
SecurityContextHolder.getContext().setAuthentication(
new UsernamePasswordAuthenticationToken(username, password));
}
public static String getCurrentUsername() {
return SecurityContextHolder.getContext().getAuthentication().getName();
}
public static void logout() {
SecurityContextHolder.getContext().setAuthentication(null);
}
public static void login(AllocatorOrDatacentre user) {
if (user == null) {
logout();
} else {
setUsernamePassword(user.getSymbol(), user.getPassword());
}
}
public static void persist(Object... objects) {
for (Object object : objects) {
if (object instanceof AllocatorOrDatacentre)
((AllocatorOrDatacentre) object).persist();
else
throw new IllegalArgumentException("unknown type");
}
}
public static Allocator createAllocator(String symbol) {
Allocator allocator = new Allocator();
allocator.setSymbol(symbol);
allocator.setPassword("12345678");
allocator.setContactEmail("dummy@example.com");
allocator.setContactName("example contact");
allocator.setDoiQuotaAllowed(-1);
allocator.setDoiQuotaUsed(0);
allocator.setIsActive(true);
allocator.setName("example name");
allocator.setRoleName("ROLE_ALLOCATOR");
return allocator;
}
public static Allocator createAdmin(String symbol) {
Allocator admin = createAllocator(symbol);
admin.setRoleName("ROLE_ADMIN");
return admin;
}
public static Allocator createDev(String symbol) {
Allocator dev = createAllocator(symbol);
dev.setRoleName("ROLE_DEV");
return dev;
}
public static Datacentre createDatacentre(String symbol, Allocator allocator) {
Datacentre datacentre = new Datacentre();
datacentre.setSymbol(symbol);
datacentre.setAllocator(allocator);
datacentre.setContactEmail("dummy@example.com");
datacentre.setContactName("example contact");
datacentre.setDoiQuotaAllowed(-1);
datacentre.setDoiQuotaUsed(0);
datacentre.setDomains("example.com");
datacentre.setIsActive(true);
datacentre.setName("example name");
datacentre.setRoleName("ROLE_DATACENTRE");
return datacentre;
}
public static Datacentre createDefaultDatacentre(String... prefixes) {
Allocator allocator = TestUtils.createAllocator(DEFAULT_ALLOCATOR_SYMBOL);
allocator.setPrefixes(TestUtils.createPrefixes(prefixes));
allocator.persist();
Datacentre datacentre = TestUtils.createDatacentre(DEFAULT_DATACENTRE_SYMBOL, allocator);
datacentre.setPrefixes(allocator.getPrefixes());
datacentre.persist();
return datacentre;
}
public static Dataset createDataset(String doi, Datacentre datacentre) {
Dataset dataset = new Dataset();
dataset.setDoi(doi);
dataset.setDatacentre(datacentre);
return dataset;
}
public static Dataset createDefaultDataset(String doi) {
Datacentre datacentre = createDefaultDatacentre(Utils.getDoiPrefix(doi));
Dataset dataset = createDataset(doi, datacentre);
dataset.persist();
return dataset;
}
public static Prefix createPrefix(String prefix) {
Prefix prefixObj = new Prefix();
prefixObj.setPrefix(prefix);
return prefixObj;
}
public static Set<Prefix> createPrefixes(String... prefixes) {
Set<Prefix> prefixSet = new HashSet<Prefix>();
if (prefixes != null) {
for (String prefix : prefixes) {
prefixSet.add(createPrefix(prefix));
}
}
return prefixSet;
}
public static Metadata createMetadata(byte[] xml, Dataset dataset) {
Metadata metadata = new Metadata();
metadata.setDataset(dataset);
metadata.setXml(setDoiOfMetadata(xml, dataset.getDoi()));
return metadata;
}
public static Media createMedia(String mediaType, String url, Dataset dataset) {
Media media = new Media();
media.setDataset(dataset);
media.setMediaType(mediaType);
media.setUrl(url);
return media;
}
public static Media createMedia(String mediaType, Dataset dataset) {
return createMedia(mediaType, "http://example.com", dataset);
}
public static byte[] getTestMetadata() {
return getTestMetadata21();
}
// schema 2.0 is no longer accepted by production MDS.
@Deprecated
public static byte[] getTestMetadata20() {
return getTestMetadata("datacite-metadata-sample-v2.0.xml");
}
public static byte[] getTestMetadata21() {
return getTestMetadata("datacite-metadata-sample-v2.1.xml");
}
public static byte[] getTestMetadata30() {
return getTestMetadata("datacite-metadata-sample-v3.0.xml");
}
public static byte[] getTestMetadataDif() {
return getTestMetadata("dif-metadata-sample.xml");
}
public static byte[] getTestMetadataIso() {
return getTestMetadata("iso-metadata-sample.xml");
}
public static byte[] getTestMetadata31() {
return getTestMetadata("datacite-metadata-sample-v3.1.xml");
}
public static byte[] getTestMetadata(String filename) {
Resource resource = new ClassPathResource(filename);
try {
return FileUtils.readFileToByteArray(resource.getFile());
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public static byte[] setDoiOfMetadata(byte[] xml, String doi) {
try {
Document doc = bytesToDocument(xml);
Node identifier = doc.getElementsByTagName("identifier").item(0);
Node identifierContent = identifier.getFirstChild();
identifierContent.setNodeValue(doi);
return documentToBytes(doc);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public static Document bytesToDocument(byte[] bytes) throws Exception {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setNamespaceAware(true);
DocumentBuilder builder = factory.newDocumentBuilder();
ByteArrayInputStream input = new ByteArrayInputStream(bytes);
Document doc = builder.parse(input);
return doc;
}
public static byte[] documentToBytes(Document doc) throws Exception {
Transformer transformer = TransformerFactory.newInstance().newTransformer();
DOMSource source = new DOMSource(doc);
ByteArrayOutputStream output = new ByteArrayOutputStream();
StreamResult result = new StreamResult(output);
transformer.transform(source, result);
byte[] bytes = output.toByteArray();
return bytes;
}
}
| apache-2.0 |
dizitart/nitrite-database | nitrite-datagate/src/main/java/org/dizitart/no2/datagate/controllers/SyncController.java | 10512 | /*
*
* Copyright 2017-2018 Nitrite author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.dizitart.no2.datagate.controllers;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import io.swagger.annotations.ApiResponse;
import io.swagger.annotations.ApiResponses;
import lombok.extern.slf4j.Slf4j;
import org.dizitart.no2.Document;
import org.dizitart.no2.sync.data.*;
import org.dizitart.no2.sync.TimeSpan;
import org.dizitart.no2.datagate.models.*;
import org.dizitart.no2.datagate.services.DataGateService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.*;
import java.util.List;
import java.util.concurrent.TimeUnit;
/**
* Controller for sync endpoints.
*
* @since 1.0
* @author Anindya Chatterjee.
*/
@Slf4j
@RestController
@RequestMapping(path = "api/v1/collection/{collection}")
public class SyncController {
@Autowired
private DataGateService dataGateService;
@RequestMapping(path = "/changedSince",
method = RequestMethod.POST,
produces = MediaType.APPLICATION_JSON_VALUE,
consumes = MediaType.APPLICATION_JSON_VALUE)
@ApiOperation(value = "This operation gets the list of changes from remote collection.")
@ApiResponses(value = {
@ApiResponse(code = 400, message = "Invalid request", response = DataGateError.class),
@ApiResponse(code = 401, message = "Request is not authorized", response = DataGateError.class),
@ApiResponse(code = 500, message = "Error processing request", response = DataGateError.class),
})
public ChangeFeed changedSince(
@PathVariable
@ApiParam("Name of the remote collection.")
String collection,
@RequestBody
@ApiParam("Options for change feed")
FeedOptions feedOptions) {
log.debug("Validating changedSince request for " + collection);
dataGateService.validateRequest(collection);
ChangeFeed feed = dataGateService.changedSince(collection, feedOptions);
log.debug(collection + " changed since " + feedOptions.getFromSequence() + " : " + feed);
return feed;
}
@RequestMapping(path = "/change",
method = RequestMethod.POST,
produces = MediaType.APPLICATION_JSON_VALUE,
consumes = MediaType.APPLICATION_JSON_VALUE)
@ApiOperation(value = "This operation merges the changes into a remote collection.")
@ApiResponses(value = {
@ApiResponse(code = 400, message = "Invalid request", response = DataGateError.class),
@ApiResponse(code = 401, message = "Request is not authorized", response = DataGateError.class),
@ApiResponse(code = 500, message = "Error processing request", response = DataGateError.class),
})
public ChangeResponse change(
@PathVariable
@ApiParam("Name of the remote collection.")
String collection,
@RequestBody
@ApiParam("The list of changes to be merged.")
ChangeFeed changeFeed) {
log.debug("Validating change request for " + collection);
dataGateService.validateRequest(collection);
boolean result = dataGateService.change(collection, changeFeed);
log.debug(collection + " changed with " + changeFeed);
return new ChangeResponse(result);
}
@RequestMapping(path = "/fetch/offset/{offset}/limit/{limit}",
method = RequestMethod.GET,
produces = MediaType.APPLICATION_JSON_VALUE)
@ApiOperation(value = "This operation fetches all documents from remote replica server in pages.")
@ApiResponses(value = {
@ApiResponse(code = 400, message = "Invalid request", response = DataGateError.class),
@ApiResponse(code = 401, message = "Request is not authorized", response = DataGateError.class),
@ApiResponse(code = 500, message = "Error processing request", response = DataGateError.class),
})
public FetchResponse fetch(
@PathVariable
@ApiParam("Name of the remote collection.")
String collection,
@PathVariable(name = "offset")
int offset,
@PathVariable(name = "limit")
int limit) {
log.debug("Validating fetch request for " + collection);
dataGateService.validateRequest(collection);
List<Document> documents = dataGateService.fetch(collection, offset, limit);
log.debug("Fetch request for " + collection + " returned "
+ documents.size() + " records.");
return new FetchResponse(documents);
}
@RequestMapping(path = "/size",
method = RequestMethod.GET,
produces = MediaType.APPLICATION_JSON_VALUE)
@ApiOperation(value = "This operation gets the size of the remote collection.")
@ApiResponses(value = {
@ApiResponse(code = 400, message = "Invalid request", response = DataGateError.class),
@ApiResponse(code = 401, message = "Request is not authorized", response = DataGateError.class),
@ApiResponse(code = 500, message = "Error processing request", response = DataGateError.class),
})
public SizeResponse size(
@PathVariable
@ApiParam("Name of the remote collection.")
String collection) {
log.debug("Validating size request for " + collection);
dataGateService.validateRequest(collection);
long size = dataGateService.size(collection);
log.debug("Size of " + collection + " is " + size);
return new SizeResponse(size);
}
@RequestMapping(path = "/clear",
method = RequestMethod.DELETE,
produces = MediaType.APPLICATION_JSON_VALUE)
@ApiOperation(value = "This operation clears the remote collection.")
@ApiResponses(value = {
@ApiResponse(code = 400, message = "Invalid request", response = DataGateError.class),
@ApiResponse(code = 401, message = "Request is not authorized", response = DataGateError.class),
@ApiResponse(code = 500, message = "Error processing request", response = DataGateError.class),
})
public void clear(
@PathVariable
@ApiParam("Name of the remote collection.")
String collection) {
log.debug("Validating clear request for " + collection);
dataGateService.validateRequest(collection);
dataGateService.clear(collection);
log.debug(collection + " is cleared.");
}
@RequestMapping(path = "/tryLock/issuer/{issuer}/delay/{delay}",
method = RequestMethod.GET,
produces = MediaType.APPLICATION_JSON_VALUE)
@ApiOperation(value = "This operation tries to acquire a synchronization lock on the remote collection." +
"Before start of replication, a sync lock must be acquired on remote collection. If the acquisition " +
"is unsuccessful, replication will not occur and it will be retried in next iteration.",
response = TryLockResponse.class)
@ApiResponses(value = {
@ApiResponse(code = 400, message = "Invalid request", response = DataGateError.class),
@ApiResponse(code = 401, message = "Request is not authorized", response = DataGateError.class),
@ApiResponse(code = 500, message = "Error processing request", response = DataGateError.class),
})
public TryLockResponse tryLock(
@PathVariable
@ApiParam("Name of the remote collection.")
String collection,
@PathVariable
@ApiParam("The originator of the request.")
String issuer,
@RequestHeader(value = UserAgent.USER_AGENT, required = false)
@ApiParam("The user agent of the request.")
String userAgent,
@PathVariable
@ApiParam("The expiry delay. If the expiryDelay is expired, then a new lock " +
"will be acquired overwriting previous lock information.")
long delay) {
log.debug("Validating tryLock request for " + collection + " by " + issuer);
dataGateService.validateRequest(collection);
boolean result = dataGateService.tryLock(collection, issuer, userAgent, TimeSpan.timeSpan(delay, TimeUnit.MILLISECONDS));
if (result) {
log.debug("Lock acquired for " + collection + " by " + issuer);
} else {
log.debug(issuer + " failed to acquire lock for " + collection);
}
return new TryLockResponse(result);
}
@RequestMapping(path = "/releaseLock/issuer/{issuer}",
method = RequestMethod.GET,
produces = MediaType.APPLICATION_JSON_VALUE)
@ApiOperation(value = "This operation checks if the remote collection is online and reachable.")
@ApiResponses(value = {
@ApiResponse(code = 400, message = "Invalid request", response = DataGateError.class),
@ApiResponse(code = 401, message = "Request is not authorized", response = DataGateError.class),
@ApiResponse(code = 500, message = "Error processing request", response = DataGateError.class),
})
public void releaseLock(
@PathVariable
@ApiParam("Name of the remote collection.")
String collection,
@PathVariable
@ApiParam("The originator of the request.")
String issuer) {
log.debug("Validating releaseLock request for " + collection + " by " + issuer);
dataGateService.validateRequest(collection);
dataGateService.releaseLock(collection, issuer);
log.debug("Lock released for " + collection + " by " + issuer);
}
}
| apache-2.0 |
indashnet/InDashNet.Open.UN2000 | android/libcore/crypto/src/main/java/org/conscrypt/JSSEProvider.java | 4365 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.conscrypt;
import java.security.Provider;
/**
* JSSE Provider implementation.
*
* This implementation is based on TLS v 1.0 and SSL v3 protocol specifications.
*
* <ul>
* <li><a href="http://www.ietf.org/rfc/rfc2246.txt">TLS v 1.0 Protocol
* specification</a></li>
* <li><a href="http://wp.netscape.com/eng/ssl3">SSL v3 Protocol
* specification</a></li>
* </ul>
*
* Provider implementation supports the following cipher suites:
* TLS_NULL_WITH_NULL_NULL
* TLS_RSA_WITH_NULL_MD5
* TLS_RSA_WITH_NULL_SHA
* TLS_RSA_EXPORT_WITH_RC4_40_MD5
* TLS_RSA_WITH_RC4_128_MD5
* TLS_RSA_WITH_RC4_128_SHA
* TLS_RSA_EXPORT_WITH_RC2_CBC_40_MD5
* TLS_RSA_WITH_IDEA_CBC_SHA
* TLS_RSA_EXPORT_WITH_DES40_CBC_SHA
* TLS_RSA_WITH_DES_CBC_SHA
* TLS_RSA_WITH_3DES_EDE_CBC_SHA
* TLS_DH_DSS_EXPORT_WITH_DES40_CBC_SHA
* TLS_DH_DSS_WITH_DES_CBC_SHA
* TLS_DH_DSS_WITH_3DES_EDE_CBC_SHA
* TLS_DH_RSA_EXPORT_WITH_DES40_CBC_SHA
* TLS_DH_RSA_WITH_DES_CBC_SHA
* TLS_DH_RSA_WITH_3DES_EDE_CBC_SHA
* TLS_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA
* TLS_DHE_DSS_WITH_DES_CBC_SHA
* TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA
* TLS_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA
* TLS_DHE_RSA_WITH_DES_CBC_SHA
* TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA
* TLS_DH_anon_EXPORT_WITH_RC4_40_MD5
* TLS_DH_anon_WITH_RC4_128_MD5
* TLS_DH_anon_EXPORT_WITH_DES40_CBC_SHA
* TLS_DH_anon_WITH_DES_CBC_SHA
* TLS_DH_anon_WITH_3DES_EDE_CBC_SHA
*
* The real set of available cipher suites depends on set of available
* crypto algorithms. These algorithms must be provided by some crypto
* provider.
*
* The following cipher algorithms are used by different cipher suites:
* IDEA/CBC/NoPadding
* RC2/CBC/NoPadding
* RC4
* DES/CBC/NoPadding
* DES/CBC/NoPadding
* DESede/CBC/NoPadding
*
* Also the current JSSE provider implementation uses the following
* crypto algorithms:
*
* Algorithms that MUST be provided by crypto provider:
* Mac HmacMD5
* Mac HmacSHA1
* MessageDigest MD5
* MessageDigest SHA-1
* CertificateFactory X509
*
* The cipher suites with RSA key exchange may also require:
* Cipher RSA
* KeyPairGenerator RSA
* KeyFactory RSA
*
* The cipher suites with DH key exchange may also require:
* Signature NONEwithDSA
* KeyPairGenerator DiffieHellman or DH
* KeyFactory DiffieHellman or DH
* KeyAgreement DiffieHellman or DH
* KeyPairGenerator DiffieHellman or DH
*
* Trust manager implementation requires:
* CertPathValidator PKIX
* CertificateFactory X509
*
*/
public final class JSSEProvider extends Provider {
private static final long serialVersionUID = 3075686092260669675L;
public JSSEProvider() {
super("HarmonyJSSE", 1.0, "Harmony JSSE Provider");
put("SSLContext.SSL", SSLContextImpl.class.getName());
put("SSLContext.SSLv3", SSLContextImpl.class.getName());
put("SSLContext.TLS", SSLContextImpl.class.getName());
put("SSLContext.TLSv1", SSLContextImpl.class.getName());
put("KeyManagerFactory.PKIX", KeyManagerFactoryImpl.class.getName());
put("Alg.Alias.KeyManagerFactory.X509", "PKIX");
put("TrustManagerFactory.PKIX", TrustManagerFactoryImpl.class.getName());
put("Alg.Alias.TrustManagerFactory.X509", "PKIX");
put("KeyStore.AndroidCAStore", TrustedCertificateKeyStoreSpi.class.getName());
}
}
| apache-2.0 |
eemirtekin/Sakai-10.6-TR | rights/rights-api/api/src/java/org/sakaiproject/rights/api/RightsService.java | 2285 | /**********************************************************************************
* $URL: https://source.sakaiproject.org/svn/rights/tags/sakai-10.6/rights-api/api/src/java/org/sakaiproject/rights/api/RightsService.java $
* $Id: RightsService.java 105079 2012-02-24 23:08:11Z ottenhoff@longsight.com $
***********************************************************************************
*
* Copyright (c) 2003, 2004, 2005, 2006, 2008 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.rights.api;
import org.sakaiproject.exception.IdUnusedException;
public interface RightsService
{
/**
* @param entityRef
* @return
*/
public RightsAssignment addRightsAssignment(String entityRef);
/**
* @param context
* @return
*/
public SiteRightsPolicy addSiteRightsPolicy(String context);
/**
* @param context
* @param userId
* @return
*/
public UserRightsPolicy addUserRightsPolicy(String context, String userId);
/**
* @param entityRef
* @return
*/
public RightsAssignment getRightsAssignment(String entityRef) throws IdUnusedException;
/**
* @param context
* @return
*/
public SiteRightsPolicy getSiteRightsPolicy(String context);
/**
* @param context
* @param userId
* @return
*/
public UserRightsPolicy getUserRightsPolicy(String context, String userId);
/**
* @param rights
*/
public void save(RightsAssignment rights);
/**
* @param policy
*/
public void save(RightsPolicy policy);
/**
* @param entityRef
* @param rights
*/
public void setRightsAssignment(String entityRef, RightsAssignment rights);
} // interface CopyrightService
| apache-2.0 |
lklong/fuckproject | src/com/zhigu/controllers/test/AreaController.java | 1722 | package com.zhigu.controllers.test;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import com.zhigu.common.constant.Code;
import com.zhigu.model.Area;
import com.zhigu.model.dto.MsgBean;
import com.zhigu.service.common.AreaService;
@Controller
@RequestMapping("/area")
public class AreaController {
@Autowired
private AreaService areaService;
/**
* 区域选择
*
* @param parentId
* @param msgBean
* @return
*/
@RequestMapping("/{parentId}")
@ResponseBody
public MsgBean changeArea(@PathVariable String parentId, MsgBean msgBean) {
msgBean.setCode(Code.SUCCESS);
msgBean.setData(areaService.selectByParentId(parentId));
return msgBean;
}
/**
* 区域回显控制
*
* @param provinceId
* @param cityId
* @param districtId
* @param msgBean
* @return
*/
@RequestMapping("/default")
@ResponseBody
public MsgBean getDefaultArea(String provinceId, String cityId, String districtId, MsgBean msgBean) {
msgBean.setCode(Code.SUCCESS);
List<Area> provinces = areaService.selectByParentId("0");
List<Area> cities = areaService.selectByParentId(provinceId);
List<Area> districts = areaService.selectByParentId(cityId);
Map<String, List<Area>> areaMap = new HashMap<String, List<Area>>();
areaMap.put(provinceId, provinces);
areaMap.put(cityId, cities);
areaMap.put(districtId, districts);
msgBean.setData(areaMap);
return msgBean;
}
}
| apache-2.0 |
siara-cc/csv_ml | src/cc/siara/csv_ml/InputSource.java | 1988 | /*
* Copyright (C) 2015 Siara Logics (cc)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @author Arundale R.
*
*/
package cc.siara.csv_ml;
import java.io.InputStream;
import java.io.Reader;
/**
* InputSource enables the Input to be given either as a character stream
* (java.io.Reader) or a byte stream (InputStream)
*
* @author Arundale R.
* @since 1.0
*/
public class InputSource {
public static final short IS_BYTE_STREAM = 0;
public static final short IS_CHAR_STREAM = 1;
// Members - type determines which one
// is used (Reader or InputStream)
short type = 0;
Reader reader = null;
InputStream is = null;
/**
* Sets source as r
*
* @param r
* java.io.Reader
*/
public InputSource(Reader r) {
this.reader = r;
type = IS_CHAR_STREAM;
}
/**
* Sets source as i
*
* @param i
* java.io.InputStream
*/
public InputSource(InputStream i) {
this.is = i;
type = IS_BYTE_STREAM;
}
/**
* Returns reader
*
* @return java.io.Reader
*/
public Reader getReader() {
return reader;
}
/**
* Returns is
*
* @return java.io.InputStream
*/
public InputStream getInputStream() {
return is;
}
/**
* Returns the type of input
*
* @return type
*/
public short getType() {
return type;
}
}
| apache-2.0 |
TheProjecter/serene | src/serene/validation/schema/simplified/SExceptNameClass.java | 2080 | /*
Copyright 2010 Radu Cernuta
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package serene.validation.schema.simplified;
import org.xml.sax.SAXException;
import serene.bind.util.DocumentIndexedData;
public class SExceptNameClass extends SNameClass{
SNameClass child;
public SExceptNameClass(SNameClass child,
int recordIndex,
DocumentIndexedData documentIndexedData){
super(recordIndex, documentIndexedData);
asParent(child);
}
protected void asParent(SNameClass child){
this.child = child;
if(child != null){
child.setParent(this, 0);
}
}
public boolean matches(String namespace, String name){
if(child != null){
if(child.matches(namespace, name)) return false;
}
return true;
}
public SNameClass getChild(){
return child;
}
public void accept(SimplifiedComponentVisitor v){
v.visit(this);
}
public void accept(RestrictingVisitor v) throws SAXException{
v.visit(this);
}
public boolean equals(Object o){
if(o == null) return false;
if(!(o instanceof SExceptNameClass))return false;
SExceptNameClass other = (SExceptNameClass)o;
SNameClass otherChild = other.getChild();
if(child != null){
if(otherChild == null)return false;
return child.equals(otherChild);
}
if(otherChild != null)return false;
return true;
}
public String toString(){
String s = "SExceptNameClass ";
if(child!=null)s+=child.toString();
return s;
}
} | apache-2.0 |
adbrucker/SecureBPMN | designer/src/org.activiti.designer.model.edit/src/org/eclipse/bpmn2/provider/GlobalManualTaskItemProvider.java | 3673 | /**
* <copyright>
*
* Copyright (c) 2010 SAP AG.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Reiner Hille-Doering (SAP AG) - initial API and implementation and/or initial documentation
*
* </copyright>
*/
package org.eclipse.bpmn2.provider;
import java.util.Collection;
import java.util.List;
import org.eclipse.bpmn2.GlobalManualTask;
import org.eclipse.emf.common.notify.AdapterFactory;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.edit.provider.IEditingDomainItemProvider;
import org.eclipse.emf.edit.provider.IItemLabelProvider;
import org.eclipse.emf.edit.provider.IItemPropertyDescriptor;
import org.eclipse.emf.edit.provider.IItemPropertySource;
import org.eclipse.emf.edit.provider.IStructuredItemContentProvider;
import org.eclipse.emf.edit.provider.ITreeItemContentProvider;
/**
* This is the item provider adapter for a {@link org.eclipse.bpmn2.GlobalManualTask} object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public class GlobalManualTaskItemProvider extends GlobalTaskItemProvider
implements IEditingDomainItemProvider, IStructuredItemContentProvider,
ITreeItemContentProvider, IItemLabelProvider, IItemPropertySource {
/**
* This constructs an instance from a factory and a notifier.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public GlobalManualTaskItemProvider(AdapterFactory adapterFactory) {
super(adapterFactory);
}
/**
* This returns the property descriptors for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public List<IItemPropertyDescriptor> getPropertyDescriptors(Object object) {
if (itemPropertyDescriptors == null) {
super.getPropertyDescriptors(object);
}
return itemPropertyDescriptors;
}
/**
* This returns GlobalManualTask.gif.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object getImage(Object object) {
return overlayImage(object,
getResourceLocator().getImage("full/obj16/GlobalManualTask"));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected boolean shouldComposeCreationImage() {
return true;
}
/**
* This returns the label text for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String getText(Object object) {
String label = ((GlobalManualTask) object).getName();
return label == null || label.length() == 0 ? getString("_UI_GlobalManualTask_type")
: getString("_UI_GlobalManualTask_type") + " " + label;
}
/**
* This handles model notifications by calling {@link #updateChildren} to update any cached
* children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void notifyChanged(Notification notification) {
updateChildren(notification);
super.notifyChanged(notification);
}
/**
* This adds {@link org.eclipse.emf.edit.command.CommandParameter}s describing the children
* that can be created under this object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected void collectNewChildDescriptors(
Collection<Object> newChildDescriptors, Object object) {
super.collectNewChildDescriptors(newChildDescriptors, object);
}
}
| apache-2.0 |
debop/debop4k | debop4k-core/src/test/java/debop4k/core/java8/model/Order.java | 966 | /*
* Copyright (c) 2016. KESTI co, ltd
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package debop4k.core.java8.model;
import java.util.List;
/**
* @author sunghyouk.bae@gmail.com
*/
public abstract class Order {
protected final List<Album> albums;
public Order(List<Album> albums) {
this.albums = albums;
}
public abstract long countRunningTime();
public abstract long countMusicians();
public abstract long countTracks();
}
| apache-2.0 |
Crysty-Yui/smali2java | src/main/java/com/litecoding/smali2java/parser/smali/Rule_codeRegisterRet64.java | 2724 | /* -----------------------------------------------------------------------------
* Rule_codeRegisterRet64.java
* -----------------------------------------------------------------------------
*
* Producer : com.parse2.aparse.Parser 2.3
* Produced : Fri Apr 12 10:40:21 MUT 2013
*
* -----------------------------------------------------------------------------
*/
package com.litecoding.smali2java.parser.smali;
import java.util.ArrayList;
import com.litecoding.smali2java.builder.Visitor;
import com.litecoding.smali2java.parser.ParserContext;
import com.litecoding.smali2java.parser.Rule;
final public class Rule_codeRegisterRet64 extends Rule
{
private Rule_codeRegisterRet64(String spelling, ArrayList<Rule> rules)
{
super(spelling, rules);
}
public Object accept(Visitor visitor)
{
return visitor.visit(this);
}
public static Rule_codeRegisterRet64 parse(ParserContext context)
{
context.push("codeRegisterRet64");
boolean parsed = true;
int s0 = context.index;
ArrayList<Rule> e0 = new ArrayList<Rule>();
Rule rule;
parsed = false;
if (!parsed)
{
{
ArrayList<Rule> e1 = new ArrayList<Rule>();
int s1 = context.index;
parsed = true;
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Rule_codeRegisterP64.parse(context);
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
e0.addAll(e1);
else
context.index = s1;
}
}
if (!parsed)
{
{
ArrayList<Rule> e1 = new ArrayList<Rule>();
int s1 = context.index;
parsed = true;
if (parsed)
{
boolean f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Rule_codeRegisterV64.parse(context);
if ((f1 = rule != null))
{
e1.add(rule);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
e0.addAll(e1);
else
context.index = s1;
}
}
rule = null;
if (parsed)
rule = new Rule_codeRegisterRet64(context.text.substring(s0, context.index), e0);
else
context.index = s0;
context.pop("codeRegisterRet64", parsed);
return (Rule_codeRegisterRet64)rule;
}
}
/* -----------------------------------------------------------------------------
* eof
* -----------------------------------------------------------------------------
*/
| apache-2.0 |
potto007/druid-avro | server/src/main/java/io/druid/server/log/FileRequestLogger.java | 3956 | /*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.server.log;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Charsets;
import com.google.common.base.Throwables;
import io.druid.java.util.common.concurrent.ScheduledExecutors;
import io.druid.java.util.common.guava.CloseQuietly;
import io.druid.java.util.common.lifecycle.LifecycleStart;
import io.druid.java.util.common.lifecycle.LifecycleStop;
import io.druid.server.RequestLogLine;
import org.joda.time.DateTime;
import org.joda.time.Duration;
import org.joda.time.MutableDateTime;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.util.concurrent.Callable;
import java.util.concurrent.ScheduledExecutorService;
/**
*/
public class FileRequestLogger implements RequestLogger
{
private final ObjectMapper objectMapper;
private final ScheduledExecutorService exec;
private final File baseDir;
private final Object lock = new Object();
private DateTime currentDay;
private OutputStreamWriter fileWriter;
public FileRequestLogger(ObjectMapper objectMapper, ScheduledExecutorService exec, File baseDir)
{
this.exec = exec;
this.objectMapper = objectMapper;
this.baseDir = baseDir;
}
@LifecycleStart
public void start()
{
try {
baseDir.mkdirs();
MutableDateTime mutableDateTime = new DateTime().toMutableDateTime();
mutableDateTime.setMillisOfDay(0);
synchronized (lock) {
currentDay = mutableDateTime.toDateTime();
fileWriter = new OutputStreamWriter(
new FileOutputStream(new File(baseDir, currentDay.toString("yyyy-MM-dd'.log'")), true),
Charsets.UTF_8
);
}
long nextDay = currentDay.plusDays(1).getMillis();
Duration delay = new Duration(nextDay - new DateTime().getMillis());
ScheduledExecutors.scheduleWithFixedDelay(
exec,
delay,
Duration.standardDays(1),
new Callable<ScheduledExecutors.Signal>()
{
@Override
public ScheduledExecutors.Signal call()
{
try {
synchronized (lock) {
currentDay = currentDay.plusDays(1);
CloseQuietly.close(fileWriter);
fileWriter = new OutputStreamWriter(
new FileOutputStream(new File(baseDir, currentDay.toString()), true),
Charsets.UTF_8
);
}
}
catch (Exception e) {
Throwables.propagate(e);
}
return ScheduledExecutors.Signal.REPEAT;
}
}
);
}
catch (IOException e) {
Throwables.propagate(e);
}
}
@LifecycleStop
public void stop()
{
synchronized (lock) {
CloseQuietly.close(fileWriter);
}
}
@Override
public void log(RequestLogLine requestLogLine) throws IOException
{
synchronized (lock) {
fileWriter.write(
String.format("%s%n", requestLogLine.getLine(objectMapper))
);
fileWriter.flush();
}
}
}
| apache-2.0 |