repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
dagnir/aws-sdk-java
aws-java-sdk-logs/src/main/java/com/amazonaws/services/logs/model/transform/DescribeLogStreamsResultJsonUnmarshaller.java
3146
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.logs.model.transform; import java.math.*; import javax.annotation.Generated; import com.amazonaws.services.logs.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import com.fasterxml.jackson.core.JsonToken; import static com.fasterxml.jackson.core.JsonToken.*; /** * DescribeLogStreamsResult JSON Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DescribeLogStreamsResultJsonUnmarshaller implements Unmarshaller<DescribeLogStreamsResult, JsonUnmarshallerContext> { public DescribeLogStreamsResult unmarshall(JsonUnmarshallerContext context) throws Exception { DescribeLogStreamsResult describeLogStreamsResult = new DescribeLogStreamsResult(); int originalDepth = context.getCurrentDepth(); String currentParentElement = context.getCurrentParentElement(); int targetDepth = originalDepth + 1; JsonToken token = context.getCurrentToken(); if (token == null) token = context.nextToken(); if (token == VALUE_NULL) { return describeLogStreamsResult; } while (true) { if (token == null) break; if (token == FIELD_NAME || token == START_OBJECT) { if (context.testExpression("logStreams", targetDepth)) { context.nextToken(); describeLogStreamsResult.setLogStreams(new ListUnmarshaller<LogStream>(LogStreamJsonUnmarshaller.getInstance()).unmarshall(context)); } if (context.testExpression("nextToken", targetDepth)) { context.nextToken(); describeLogStreamsResult.setNextToken(context.getUnmarshaller(String.class).unmarshall(context)); } } else if (token == END_ARRAY || token == END_OBJECT) { if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) { if (context.getCurrentDepth() <= originalDepth) break; } } token = context.nextToken(); } return describeLogStreamsResult; } private static DescribeLogStreamsResultJsonUnmarshaller instance; public static DescribeLogStreamsResultJsonUnmarshaller getInstance() { if (instance == null) instance = new DescribeLogStreamsResultJsonUnmarshaller(); return instance; } }
apache-2.0
Will1229/LearnSpring
spring-framework-0.9.1/src/com/interface21/web/servlet/view/xslt/FormatHelper.java
4044
/* * Generic framework code included with * <a href="http://www.amazon.com/exec/obidos/tg/detail/-/1861007841/">Expert One-On-One J2EE Design and Development</a> * by Rod Johnson (Wrox, 2002). * This code is free to use and modify. However, please * acknowledge the source and include the above URL in each * class using or derived from this code. * Please contact <a href="mailto:rod.johnson@interface21.com">rod.johnson@interface21.com</a> * for commercial support. */ package com.interface21.web.servlet.view.xslt; import java.text.DateFormat; import java.text.NumberFormat; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Locale; import javax.xml.parsers.DocumentBuilderFactory; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import com.interface21.core.NestedRuntimeException; /** * Xalan extension functions to provide date and currency formatting * beyond the capabilities of XSLT 1.0 or 1.1. * * <p>Note that all extension functions are static. * These extension functions must be declared to use this class. * * <p>Based on an example by Taylor Cowan. * * @author Rod Johnson */ public class FormatHelper { /** * Creates a formatted-date node with the given ISO language and country strings. */ public static Node dateTimeElement(long time, String language, String country) { Locale l = new Locale(language, country); return dateTimeElement(time, l); } /** * Creates a formatted-date node with the default language. */ public static Node dateTimeElement(long time) { return dateTimeElement(time, Locale.getDefault()); } /** * Create an XML element to represent this system time in the current locale. * Enables XSLT stylesheets to display content, without needing to do the work * of internationalization. */ public static Node dateTimeElement(long time, Locale locale) { try { Document doc = DocumentBuilderFactory.newInstance().newDocumentBuilder().newDocument(); Element dateNode = doc.createElement("formatted-date"); // Works in most locales SimpleDateFormat df = (SimpleDateFormat) DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT, locale); Date d = new Date(time); df.applyPattern("MMMM"); addChild(dateNode, "month", df.format(d)); df.applyPattern("EEEE"); addChild(dateNode, "day-of-week", df.format(d)); df.applyPattern("yyyy"); addChild(dateNode, "year", df.format(d)); df.applyPattern("dd"); addChild(dateNode, "day-of-month", df.format(d)); df.applyPattern("h"); addChild(dateNode, "hours", df.format(d)); df.applyPattern("mm"); addChild(dateNode, "minutes", df.format(d)); df.applyPattern("a"); addChild(dateNode, "am-pm", df.format(d)); return dateNode; } catch (Exception ex) { throw new XsltFormattingException("Failed to create XML date element", ex); } } /** * Format a currency amount in a given locale. */ public static String currency(double amount, Locale locale) { NumberFormat nf = NumberFormat.getCurrencyInstance(locale); return nf.format(amount); } /** * Format a currency amount in a given locale. */ public static String currency(double amount, String language, String country) { Locale locale = null; if (language == null || country == null) { locale = Locale.getDefault(); } else { locale = new Locale(language, country); } return currency(amount, locale); } /** * Utility method for adding text nodes. */ private static void addChild(Node parent, String name, String text) { Element child = parent.getOwnerDocument().createElement(name); child.appendChild(parent.getOwnerDocument().createTextNode(text)); parent.appendChild(child); } public static class XsltFormattingException extends NestedRuntimeException { public XsltFormattingException(String msg, Throwable ex) { super(msg, ex); } } }
apache-2.0
nugraviton/chao-open
chao-core/src/main/java/com/nugraviton/chao/job/DefaultProcessInputReader.java
1692
package com.nugraviton.chao.job; import java.io.BufferedReader; import java.util.UUID; public class DefaultProcessInputReader implements ProcessInputReader{ private final String jobName; private final UUID sessionId; private final BufferedReader reader; public DefaultProcessInputReader(String jobName, UUID sessionId, BufferedReader reader) { this.jobName = jobName; this.sessionId = sessionId; this.reader = reader; } /* (non-Javadoc) * @see com.nugraviton.chao.app.ProcessInputReader#getJobId() */ @Override public String getJobName() { return jobName; } /* (non-Javadoc) * @see com.nugraviton.chao.app.ProcessInputReader#getJobHandleId() */ @Override public UUID getSessionId() { return sessionId; } /* (non-Javadoc) * @see com.nugraviton.chao.app.ProcessInputReader#getReader() */ @Override public BufferedReader getReader() { return reader; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((sessionId == null) ? 0 : sessionId.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; DefaultProcessInputReader other = (DefaultProcessInputReader) obj; if (sessionId == null) { if (other.sessionId != null) return false; } else if (!sessionId.equals(other.sessionId)) return false; return true; } @Override public String toString() { return "DefaultProcessInputReader [jobName=" + jobName + ", sessionId=" + sessionId + "]"; } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-memorydb/src/main/java/com/amazonaws/services/memorydb/model/ShardNotFoundException.java
1157
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.memorydb.model; import javax.annotation.Generated; /** * <p/> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ShardNotFoundException extends com.amazonaws.services.memorydb.model.AmazonMemoryDBException { private static final long serialVersionUID = 1L; /** * Constructs a new ShardNotFoundException with the specified error message. * * @param message * Describes the error encountered. */ public ShardNotFoundException(String message) { super(message); } }
apache-2.0
webuml/webuml-projectmanager
src/main/java/com/webuml/projectmanager/domain/metamodel/helper/OwnedAttributeHolder.java
289
package com.webuml.projectmanager.domain.metamodel.helper; import com.webuml.projectmanager.domain.primitives.PropertyId; import java.util.Set; public interface OwnedAttributeHolder { Set<PropertyId> getOwnedAttribute(); void setOwnedAttribute(Set<PropertyId> ownedAttributes); }
apache-2.0
pacozaa/BoofCV
main/feature/test/boofcv/alg/feature/detect/line/gridline/TestGridLineModelFitter.java
2361
/* * Copyright (c) 2011-2013, Peter Abeles. All Rights Reserved. * * This file is part of BoofCV (http://boofcv.org). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package boofcv.alg.feature.detect.line.gridline; import georegression.metric.UtilAngle; import georegression.struct.line.LinePolar2D_F32; import org.junit.Test; import java.util.ArrayList; import java.util.List; import static org.junit.Assert.*; /** * @author Peter Abeles */ public class TestGridLineModelFitter { /** * If only two points are passed in, they should fail if their orientations * are more than the specified tolerance apart */ @Test public void checkFailInCompatible() { GridLineModelFitter alg = new GridLineModelFitter(0.1f); // angle test should use half-circle and this should pass List<Edgel> l = new ArrayList<Edgel>(); l.add( new Edgel(0,0,(float)Math.PI/2f)); l.add( new Edgel(1,0,(float)-Math.PI/2f)); LinePolar2D_F32 model = new LinePolar2D_F32(); assertTrue(alg.generate(l, model)); // this one should fail l.clear(); l.add( new Edgel(0,0,(float)Math.PI/2f)); l.add( new Edgel(1,0,(float)Math.PI/2f-0.5f)); assertFalse(alg.generate(l, model)); } @Test public void checkFit() { GridLineModelFitter alg = new GridLineModelFitter(0.1f); // angle test should use half-circle and this should pass List<Edgel> l = new ArrayList<Edgel>(); l.add( new Edgel(1,0,0f)); l.add( new Edgel(1,2,(float)Math.PI)); LinePolar2D_F32 model = new LinePolar2D_F32(); assertTrue(alg.generate(l, model)); assertEquals(1,model.distance,1e-4f); assertTrue(UtilAngle.distHalf(0, model.angle) < 1e-4f); // three points l.add( new Edgel(1,3,(float)-Math.PI/2f)); assertTrue(alg.generate(l, model)); assertEquals(1,model.distance,1e-4f); assertTrue(UtilAngle.distHalf(0, model.angle) < 1e-4f); } }
apache-2.0
wayshall/onetwo
core/modules/common/src/main/java/org/onetwo/common/profiling/TimerOutputer.java
372
package org.onetwo.common.profiling; import java.util.Date; import org.onetwo.common.date.DateUtils; public class TimerOutputer implements TimeLogger { @Override public void log(Class<?> logSource, String msg, Object...args) { System.out.println("["+DateUtils.formatDateTime(new Date())+"]: "+logSource.getClass().getSimpleName()+" - "+msg); } }
apache-2.0
oleg-cherednik/hackerrank
Compete/Project Euler/#0000 - #0099/#002 - Even Fibonacci numbers/Solution.java
968
import java.util.Scanner; import java.util.Set; import java.util.TreeSet; /** * @author Oleg Cherednik * @since 19.07.2018 */ public class Solution { private static final Set<Long> EVEN_FIBONACCI = new TreeSet<>(); private static long f0 = 0; private static long f1 = 1; static long findEvenFibonacciSum(long n) { while (n > f1) { long num = f0 + f1; if (num % 2 == 0) EVEN_FIBONACCI.add(num); f0 = f1; f1 = num; } long sum = 0; for (long num : EVEN_FIBONACCI) { if (num > n) break; sum += num; } return sum; } public static void main(String[] args) { Scanner in = new Scanner(System.in); int t = in.nextInt(); for (int a0 = 0; a0 < t; a0++) { long n = in.nextLong(); System.out.println(findEvenFibonacciSum(n)); } } }
apache-2.0
cipicip/android
saq.forked/app/src/main/java/com/saq/android/data/CategoryReference.java
938
package com.saq.android.data; import java.util.ArrayList; import org.w3c.dom.Element; import org.w3c.dom.NodeList; public class CategoryReference extends ReferenceWithParent { private static final String SUB_CATEGORY_TAG = "sousCategorie"; public CategoryReference(ReferenceWithParent p) { super(p); } public void inflateFromXml(Element element) throws Exception { super.inflateFromXml(element); NodeList nodes = element.getChildNodes(); for (int i = 0; i < nodes.getLength(); i++) { if (nodes.item(i).getNodeName().equals(SUB_CATEGORY_TAG)) { if (this.subReferences == null) { this.subReferences = new ArrayList(); } CategoryReference cr = new CategoryReference(this); cr.inflateFromXml((Element) nodes.item(i)); this.subReferences.add(cr); } } } }
apache-2.0
zpao/buck
src/com/facebook/buck/features/python/PythonUtil.java
19663
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.features.python; import com.facebook.buck.core.cell.CellPathResolver; import com.facebook.buck.core.exceptions.HumanReadableException; import com.facebook.buck.core.model.BuildTarget; import com.facebook.buck.core.rules.ActionGraphBuilder; import com.facebook.buck.core.rules.BuildRule; import com.facebook.buck.core.rules.BuildRuleParams; import com.facebook.buck.core.sourcepath.SourcePath; import com.facebook.buck.core.util.graph.AbstractBreadthFirstTraversal; import com.facebook.buck.cxx.CxxGenruleDescription; import com.facebook.buck.cxx.Omnibus; import com.facebook.buck.cxx.OmnibusLibraries; import com.facebook.buck.cxx.OmnibusRoot; import com.facebook.buck.cxx.OmnibusRoots; import com.facebook.buck.cxx.config.CxxBuckConfig; import com.facebook.buck.cxx.toolchain.CxxPlatform; import com.facebook.buck.cxx.toolchain.nativelink.NativeLinkStrategy; import com.facebook.buck.cxx.toolchain.nativelink.NativeLinkTarget; import com.facebook.buck.cxx.toolchain.nativelink.NativeLinkTargetMode; import com.facebook.buck.cxx.toolchain.nativelink.NativeLinkable; import com.facebook.buck.cxx.toolchain.nativelink.NativeLinkableGroup; import com.facebook.buck.cxx.toolchain.nativelink.NativeLinkables; import com.facebook.buck.features.python.toolchain.PythonPlatform; import com.facebook.buck.io.file.MorePaths; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.facebook.buck.io.pathformat.PathFormatter; import com.facebook.buck.rules.args.Arg; import com.facebook.buck.rules.coercer.PatternMatchedCollection; import com.facebook.buck.rules.coercer.SourceSortedSet; import com.facebook.buck.rules.coercer.VersionMatchedCollection; import com.facebook.buck.rules.macros.AbsoluteOutputMacroExpander; import com.facebook.buck.rules.macros.LocationMacroExpander; import com.facebook.buck.rules.macros.Macro; import com.facebook.buck.rules.macros.MacroExpander; import com.facebook.buck.util.MoreMaps; import com.facebook.buck.util.stream.RichStream; import com.facebook.buck.versions.Version; import com.google.common.base.CaseFormat; import com.google.common.base.Preconditions; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedMap; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.function.BiConsumer; public class PythonUtil { static final String SOURCE_EXT = "py"; static final String NATIVE_EXTENSION_EXT = "so"; static final String INIT_PY = "__init__.py"; static final ImmutableList<MacroExpander<? extends Macro, ?>> MACRO_EXPANDERS = ImmutableList.of(new LocationMacroExpander(), new AbsoluteOutputMacroExpander()); private PythonUtil() {} public static boolean isModuleExt(String ext) { return ext.equals(NATIVE_EXTENSION_EXT) || ext.equals(SOURCE_EXT); } public static ImmutableList<BuildTarget> getDeps( PythonPlatform pythonPlatform, CxxPlatform cxxPlatform, ImmutableSortedSet<BuildTarget> deps, PatternMatchedCollection<ImmutableSortedSet<BuildTarget>> platformDeps) { return RichStream.from(deps) .concat( platformDeps.getMatchingValues(pythonPlatform.getFlavor().toString()).stream() .flatMap(Collection::stream)) .concat( platformDeps.getMatchingValues(cxxPlatform.getFlavor().toString()).stream() .flatMap(Collection::stream)) .toImmutableList(); } public static void forEachModule( BuildTarget target, ActionGraphBuilder graphBuilder, PythonPlatform pythonPlatform, CxxPlatform cxxPlatform, String parameter, Path baseModule, SourceSortedSet items, PatternMatchedCollection<SourceSortedSet> platformItems, Optional<VersionMatchedCollection<SourceSortedSet>> versionItems, Optional<ImmutableMap<BuildTarget, Version>> versions, BiConsumer<Path, SourcePath> consumer) { forEachModuleParam( target, graphBuilder, cxxPlatform, parameter, baseModule, ImmutableList.of(items), consumer); forEachModuleParam( target, graphBuilder, cxxPlatform, "platform" + CaseFormat.LOWER_HYPHEN.to(CaseFormat.UPPER_CAMEL, parameter), baseModule, Iterables.concat( platformItems.getMatchingValues(pythonPlatform.getFlavor().toString()), platformItems.getMatchingValues(cxxPlatform.getFlavor().toString())), consumer); forEachModuleParam( target, graphBuilder, cxxPlatform, "versioned" + CaseFormat.LOWER_HYPHEN.to(CaseFormat.UPPER_CAMEL, parameter), baseModule, versions.isPresent() && versionItems.isPresent() ? versionItems.get().getMatchingValues(versions.get()) : ImmutableList.of(), consumer); } public static void forEachSrc( BuildTarget target, ActionGraphBuilder graphBuilder, PythonPlatform pythonPlatform, CxxPlatform cxxPlatform, Optional<ImmutableMap<BuildTarget, Version>> versions, PythonLibraryDescription.CoreArg args, BiConsumer<Path, SourcePath> consumer) { forEachModule( target, graphBuilder, pythonPlatform, cxxPlatform, "srcs", PythonUtil.getBasePath(target, args.getBaseModule()), args.getSrcs(), args.getPlatformSrcs(), args.getVersionedSrcs(), versions, consumer); } public static ImmutableSortedMap<Path, SourcePath> parseSources( BuildTarget target, ActionGraphBuilder graphBuilder, PythonPlatform pythonPlatform, CxxPlatform cxxPlatform, Optional<ImmutableMap<BuildTarget, Version>> versions, PythonLibraryDescription.CoreArg args) { ImmutableSortedMap.Builder<Path, SourcePath> builder = ImmutableSortedMap.naturalOrder(); forEachSrc( target, graphBuilder, pythonPlatform, cxxPlatform, versions, args, (name, src) -> { if (MorePaths.getFileExtension(name).equals(SOURCE_EXT)) { builder.put(name, src); } }); return builder.build(); } public static ImmutableSortedMap<Path, SourcePath> parseModules( BuildTarget target, ActionGraphBuilder graphBuilder, PythonPlatform pythonPlatform, CxxPlatform cxxPlatform, Optional<ImmutableMap<BuildTarget, Version>> versions, PythonLibraryDescription.CoreArg args) { ImmutableSortedMap.Builder<Path, SourcePath> builder = ImmutableSortedMap.naturalOrder(); forEachSrc(target, graphBuilder, pythonPlatform, cxxPlatform, versions, args, builder::put); return builder.build(); } public static ImmutableSortedMap<Path, SourcePath> parseResources( BuildTarget target, ActionGraphBuilder graphBuilder, PythonPlatform pythonPlatform, CxxPlatform cxxPlatform, Optional<ImmutableMap<BuildTarget, Version>> versions, PythonLibraryDescription.CoreArg args) { ImmutableSortedMap.Builder<Path, SourcePath> builder = ImmutableSortedMap.naturalOrder(); forEachModule( target, graphBuilder, pythonPlatform, cxxPlatform, "resources", PythonUtil.getBasePath(target, args.getBaseModule()), args.getResources(), args.getPlatformResources(), args.getVersionedResources(), versions, builder::put); return builder.build(); } static void forEachModuleParam( BuildTarget target, ActionGraphBuilder actionGraphBuilder, CxxPlatform cxxPlatform, String parameter, Path baseModule, Iterable<SourceSortedSet> inputs, BiConsumer<Path, SourcePath> consumer) { for (SourceSortedSet input : inputs) { ImmutableMap<String, SourcePath> namesAndSourcePaths; if (input.getUnnamedSources().isPresent()) { namesAndSourcePaths = actionGraphBuilder .getSourcePathResolver() .getSourcePathNames(target, parameter, input.getUnnamedSources().get()); } else { namesAndSourcePaths = input.getNamedSources().get(); } for (ImmutableMap.Entry<String, SourcePath> entry : namesAndSourcePaths.entrySet()) { consumer.accept( baseModule.resolve(entry.getKey()), CxxGenruleDescription.fixupSourcePath( actionGraphBuilder, cxxPlatform, entry.getValue())); } } } /** Convert a path to a module to it's module name as referenced in import statements. */ static String toModuleName(BuildTarget target, String name) { int ext = name.lastIndexOf('.'); if (ext == -1) { throw new HumanReadableException("%s: missing extension for module path: %s", target, name); } return toModuleName(name); } /** Convert a path to a module to it's module name as referenced in import statements. */ static String toModuleName(String name) { int ext = name.lastIndexOf('.'); Preconditions.checkState(ext != -1); name = name.substring(0, ext); return PathFormatter.pathWithUnixSeparators(name).replace('/', '.'); } static PythonPackageComponents getAllComponents( CellPathResolver cellPathResolver, BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleParams params, ActionGraphBuilder graphBuilder, PythonPackagable binary, PythonPlatform pythonPlatform, CxxBuckConfig cxxBuckConfig, CxxPlatform cxxPlatform, ImmutableList<? extends Arg> extraLdflags, NativeLinkStrategy nativeLinkStrategy, ImmutableSet<BuildTarget> preloadDeps, boolean compile) { PythonPackageComponents.Builder allComponents = new PythonPackageComponents.Builder(); Map<BuildTarget, CxxPythonExtension> extensions = new LinkedHashMap<>(); Map<BuildTarget, NativeLinkable> nativeLinkableRoots = new LinkedHashMap<>(); OmnibusRoots.Builder omnibusRoots = OmnibusRoots.builder(preloadDeps, graphBuilder); // Walk all our transitive deps to build our complete package that we'll // turn into an executable. new AbstractBreadthFirstTraversal<Object>( Iterables.concat(ImmutableList.of(binary), graphBuilder.getAllRules(preloadDeps))) { private final ImmutableList<BuildRule> empty = ImmutableList.of(); @Override public Iterable<?> visit(Object node) { Iterable<?> deps = empty; if (node instanceof CxxPythonExtension) { CxxPythonExtension extension = (CxxPythonExtension) node; NativeLinkTarget target = extension.getNativeLinkTarget(pythonPlatform, cxxPlatform, graphBuilder); extensions.put(target.getBuildTarget(), extension); omnibusRoots.addIncludedRoot(target); List<BuildRule> cxxpydeps = new ArrayList<>(); for (BuildRule dep : extension.getPythonPackageDeps(pythonPlatform, cxxPlatform, graphBuilder)) { if (dep instanceof PythonPackagable) { cxxpydeps.add(dep); } } deps = cxxpydeps; } else if (node instanceof PythonPackagable) { PythonPackagable packagable = (PythonPackagable) node; packagable .getPythonModules(pythonPlatform, cxxPlatform, graphBuilder) .ifPresent(modules -> allComponents.putModules(packagable.getBuildTarget(), modules)); if (compile) { packagable .getPythonBytecode(pythonPlatform, cxxPlatform, graphBuilder) .ifPresent( bytecode -> allComponents.putModules(packagable.getBuildTarget(), bytecode)); } packagable .getPythonResources(pythonPlatform, cxxPlatform, graphBuilder) .ifPresent( resources -> allComponents.putResources(packagable.getBuildTarget(), resources)); allComponents.addZipSafe(packagable.isPythonZipSafe()); Iterable<BuildRule> packagableDeps = packagable.getPythonPackageDeps(pythonPlatform, cxxPlatform, graphBuilder); if (nativeLinkStrategy == NativeLinkStrategy.MERGED && packagable.doesPythonPackageDisallowOmnibus( pythonPlatform, cxxPlatform, graphBuilder)) { for (BuildRule dep : packagableDeps) { if (dep instanceof NativeLinkableGroup) { NativeLinkable linkable = ((NativeLinkableGroup) dep).getNativeLinkable(cxxPlatform, graphBuilder); nativeLinkableRoots.put(linkable.getBuildTarget(), linkable); omnibusRoots.addExcludedRoot(linkable); } } } deps = packagableDeps; } else if (node instanceof NativeLinkableGroup) { NativeLinkable linkable = ((NativeLinkableGroup) node).getNativeLinkable(cxxPlatform, graphBuilder); nativeLinkableRoots.put(linkable.getBuildTarget(), linkable); omnibusRoots.addPotentialRoot(linkable); } return deps; } }.start(); // For the merged strategy, build up the lists of included native linkable roots, and the // excluded native linkable roots. if (nativeLinkStrategy == NativeLinkStrategy.MERGED) { OmnibusRoots roots = omnibusRoots.build(); OmnibusLibraries libraries = Omnibus.getSharedLibraries( buildTarget, projectFilesystem, params, cellPathResolver, graphBuilder, cxxBuckConfig, cxxPlatform, extraLdflags, roots.getIncludedRoots().values(), roots.getExcludedRoots().values()); // Add all the roots from the omnibus link. If it's an extension, add it as a module. // Otherwise, add it as a native library. for (Map.Entry<BuildTarget, OmnibusRoot> root : libraries.getRoots().entrySet()) { CxxPythonExtension extension = extensions.get(root.getKey()); if (extension != null) { allComponents.putModules( root.getKey(), PythonMappedComponents.of( ImmutableSortedMap.of(extension.getModule(), root.getValue().getPath()))); } else { NativeLinkTarget target = Preconditions.checkNotNull( roots.getIncludedRoots().get(root.getKey()), "%s: linked unexpected omnibus root: %s", buildTarget, root.getKey()); NativeLinkTargetMode mode = target.getNativeLinkTargetMode(); String soname = Preconditions.checkNotNull( mode.getLibraryName().orElse(null), "%s: omnibus library for %s was built without soname", buildTarget, root.getKey()); allComponents.putNativeLibraries( root.getKey(), PythonMappedComponents.of( ImmutableSortedMap.of(Paths.get(soname), root.getValue().getPath()))); } } // Add all remaining libraries as native libraries. if (!libraries.getLibraries().isEmpty()) { libraries.getLibraries().stream() .forEach( lib -> allComponents.putNativeLibraries( buildTarget, PythonMappedComponents.of( ImmutableSortedMap.of(Paths.get(lib.getSoname()), lib.getPath())))); } } else { // For regular linking, add all extensions via the package components interface. Map<BuildTarget, NativeLinkable> extensionNativeDeps = new LinkedHashMap<>(); for (Map.Entry<BuildTarget, CxxPythonExtension> entry : extensions.entrySet()) { entry .getValue() .getPythonModules(pythonPlatform, cxxPlatform, graphBuilder) .ifPresent( modules -> allComponents.putModules(entry.getValue().getBuildTarget(), modules)); entry .getValue() .getPythonResources(pythonPlatform, cxxPlatform, graphBuilder) .ifPresent( resources -> allComponents.putResources(entry.getValue().getBuildTarget(), resources)); allComponents.addZipSafe(entry.getValue().isPythonZipSafe()); extensionNativeDeps.putAll( Maps.uniqueIndex( entry .getValue() .getNativeLinkTarget(pythonPlatform, cxxPlatform, graphBuilder) .getNativeLinkTargetDeps(graphBuilder), NativeLinkable::getBuildTarget)); } // Add all the native libraries. ImmutableList<? extends NativeLinkable> nativeLinkables = NativeLinkables.getTransitiveNativeLinkables( graphBuilder, Iterables.concat(nativeLinkableRoots.values(), extensionNativeDeps.values())); for (NativeLinkable nativeLinkable : nativeLinkables) { NativeLinkableGroup.Linkage linkage = nativeLinkable.getPreferredLinkage(); if (nativeLinkableRoots.containsKey(nativeLinkable.getBuildTarget()) || linkage != NativeLinkableGroup.Linkage.STATIC) { allComponents.putNativeLibraries( nativeLinkable.getBuildTarget(), PythonMappedComponents.of( ImmutableSortedMap.copyOf( MoreMaps.transformKeys( nativeLinkable.getSharedLibraries(graphBuilder), Paths::get)))); } } } return allComponents.build(); } public static Path getBasePath(BuildTarget target, Optional<String> override) { return override.isPresent() ? Paths.get(override.get().replace('.', '/')) : target.getCellRelativeBasePath().getPath().toPathDefaultFileSystem(); } static ImmutableSet<String> getPreloadNames( ActionGraphBuilder graphBuilder, CxxPlatform cxxPlatform, Iterable<BuildTarget> preloadDeps) { ImmutableSet.Builder<String> builder = ImmutableSet.builder(); for (NativeLinkableGroup nativeLinkableGroup : FluentIterable.from(preloadDeps) .transform(graphBuilder::getRule) .filter(NativeLinkableGroup.class)) { builder.addAll( nativeLinkableGroup .getNativeLinkable(cxxPlatform, graphBuilder) .getSharedLibraries(graphBuilder) .keySet()); } return builder.build(); } }
apache-2.0
UniTime/unitime
JavaSource/org/unitime/timetable/reports/PdfLegacyReport.java
10138
/* * Licensed to The Apereo Foundation under one or more contributor license * agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * The Apereo Foundation licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. * */ package org.unitime.timetable.reports; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.io.PrintWriter; import java.text.SimpleDateFormat; import java.util.Date; import org.unitime.timetable.reports.AbstractReport.Line; import org.unitime.timetable.util.Constants; import org.unitime.timetable.util.PdfFont; import com.lowagie.text.Document; import com.lowagie.text.DocumentException; import com.lowagie.text.PageSize; import com.lowagie.text.Paragraph; import com.lowagie.text.pdf.PdfWriter; /** * @author Tomas Muller */ public class PdfLegacyReport implements ReportWriter { protected int iNrChars = 133; protected int iNrLines = 50; private OutputStream iOut = null; private Document iDoc = null; private StringBuffer iBuffer = new StringBuffer(); private PrintWriter iPrint = null; private String iTitle, iTitle2, iSubject; private String iSession; private int iPageNo = 0; private int iLineNo = 0; private String iPageId = null; private String iCont = null; private String iHeader[] = null; private String iFooter = null; private Line iHeaderLine[] = null; private int iMode = sModeNormal; private Listener iListner; private boolean iEmpty = true; public static final int sModeNormal = 0; public static final int sModeLedger = 1; public static final int sModeText = 2; public PdfLegacyReport(int mode, File file, String title, String title2, String subject, String session) throws IOException, DocumentException{ iTitle = title; iTitle2 = title2; iSubject = subject; iSession = session; iMode = mode; if (file!=null) open(new FileOutputStream(file)); } public PdfLegacyReport(int mode, OutputStream out, String title, String title2, String subject, String session) throws IOException, DocumentException{ iTitle = title; iTitle2 = title2; iSubject = subject; iSession = session; iMode = mode; if (out!=null) open(out); } public void open(File file, int mode) throws DocumentException, IOException { iMode = mode; open(new FileOutputStream(file)); } public void open(OutputStream out, int mode) throws DocumentException, IOException { iMode = mode; open(out); } public void open(OutputStream out) throws DocumentException, IOException { iOut = out; if (iMode==sModeText) { iPrint = new PrintWriter(iOut); } else { iNrLines = (iMode==sModeLedger?116:50); iDoc = new Document(iMode==sModeLedger?PageSize.LEDGER.rotate():PageSize.LETTER.rotate()); PdfWriter.getInstance(iDoc, iOut); iDoc.addTitle(iTitle); iDoc.addAuthor("UniTime "+Constants.getVersion()+", www.unitime.org"); iDoc.addSubject(iSubject); iDoc.addCreator("UniTime "+Constants.getVersion()+", www.unitime.org"); iDoc.open(); } iEmpty = true; iPageNo = 0; iLineNo = 0; } @Override public void setPageName(String pageName) { iPageId = pageName; } @Override public void setCont(String cont) { iCont = cont; } public void setHeader(String[] header) { iHeader = header; } @Override public void setHeader(Line... line) { iHeaderLine = line; } @Override public Line[] getHeader() { return iHeaderLine; } @Override public void setFooter(String footer) { iFooter = footer; } protected void out(String text) throws DocumentException { if (iBuffer.length()>0) iBuffer.append("\n"); iBuffer.append(text); } protected static String rep(char ch, int cnt) { String ret = ""; for (int i=0;i<cnt;i++) ret+=ch; return ret; } protected void outln(char ch) throws DocumentException { out(rep(ch,iNrChars)); } public String lpad(String s, char ch, int len) { if (s==null) s=""; if (s.length()>len) return s.substring(0,len); while (s.length()<len) s = ch + s; return s; } public String lpad(String s, int len) { if (s==null) s=""; if (s.length()>len) return s.substring(0,len); return lpad(s,' ',len); } protected String rpad(String s, char ch, int len) { if (s==null) s=""; if (s.length()>len) return s.substring(0,len); while (s.length()<len) s = s + ch; return s; } public String rpad(String s, int len) { if (s==null) s=""; if (s.length()>len) return s.substring(0,len); return rpad(s,' ',len); } protected String mpad(String s, char ch, int len) { if (s==null) s=""; if (s.length()>len) return s.substring(0,len); while (s.length()<len) if (s.length()%2==0) s = s + ch; else s = ch + s; return s; } public String mpad(String s, int len) { return mpad(s,' ',len); } protected String mpad(String s1, String s2, char ch, int len) { String m = ""; while ((s1+m+s2).length()<len) m += ch; return s1+m+s2; } protected String render(String line, String s, int idx) { String a = (line.length()<=idx?rpad(line,' ',idx):line.substring(0,idx)); String b = (line.length()<=idx+s.length()?"":line.substring(idx+s.length())); return a + s + b; } protected String renderMiddle(String line, String s) { return render(line, s, (iNrChars - s.length())/2); } protected String renderEnd(String line, String s) { return render(line, s, iNrChars-s.length()); } public void printHeader() throws DocumentException { printHeader(true); } public void printHeader(boolean newPage) throws DocumentException { if (newPage) { out(renderEnd( renderMiddle("UniTime "+Constants.getVersion(),iTitle), iTitle2)); out(mpad( new SimpleDateFormat("EEE MMM dd, yyyy").format(new Date()), iSession,' ',iNrChars)); outln('='); iLineNo=0; if (iCont!=null && iCont.length()>0) println("("+iCont+" Continued)"); } if (iHeader!=null) for (int i=0;i<iHeader.length;i++) println(iHeader[i]); if (iHeaderLine != null) for (int i=0;i<iHeaderLine.length;i++) printLine(iHeaderLine[i]); if (iListner != null) iListner.headerPrinted(); } protected void printFooter() throws DocumentException { iEmpty=false; out(""); out(renderEnd(renderMiddle((iFooter==null?"":iFooter),"Page "+(iPageNo+1)),(iPageId==null||iPageId.length()==0?"":iPageId)+" ")); if (iPrint!=null) { iPrint.print(iBuffer); } else { //FIXME: For some reason when a line starts with space, the line is shifted by one space in the resulting PDF (when using iText 5.0.2) Paragraph p = new Paragraph(iBuffer.toString().replace("\n ", "\n "), PdfFont.getFixedFont()); p.setLeading(9.5f); //was 13.5f iDoc.add(p); } iBuffer = new StringBuffer(); iPageNo++; } public void lastPage() throws DocumentException { while (iLineNo<iNrLines) { out(""); iLineNo++; } printFooter(); } @Override public void newPage() throws DocumentException { while (iLineNo<iNrLines) { out(""); iLineNo++; } printFooter(); if (iPrint!=null) { iPrint.print("\f\n"); } else { iDoc.newPage(); } printHeader(); } public int getLineNumber() { return iLineNo; } public int getNrLinesPerPage() { return iNrLines; } public void println(String text) throws DocumentException { out(text); iLineNo++; if (iLineNo>=iNrLines) newPage(); } public boolean isEmpty() { return iEmpty; } public void close() throws IOException, DocumentException { if (isEmpty()) { println("Nothing to report."); lastPage(); } if (iPrint!=null) { iPrint.flush(); iPrint.close(); } else { iDoc.close(); iOut.close(); } } public int getNrCharsPerLine() { return iNrChars; } @Override public void printLine(Line line) throws DocumentException { println(line.render()); } @Override public void printSeparator(Line line) throws DocumentException { if (line != null) { printLine(line); } else { if (iHeader != null && iHeader.length > 0) println(iHeader[iHeader.length - 1]); if (iHeaderLine != null && iHeaderLine.length > 0) printLine(iHeaderLine[iHeaderLine.length - 1]); } } @Override public int getSeparatorNrLines() { return 1; } @Override public void setListener(Listener listener) { iListner = listener; } @Override public boolean isSkipRepeating() { return true; } }
apache-2.0
Esri/arcgis-runtime-samples-android
java/feature-layer-geodatabase/src/main/java/com/esri/arcgisruntime/sample/featurelayergeodatabase/MainActivity.java
3928
/* Copyright 2016 Esri * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.esri.arcgisruntime.sample.featurelayergeodatabase; import android.os.Bundle; import android.util.Log; import android.widget.Toast; import androidx.appcompat.app.AppCompatActivity; import com.esri.arcgisruntime.ArcGISRuntimeEnvironment; import com.esri.arcgisruntime.data.Geodatabase; import com.esri.arcgisruntime.data.GeodatabaseFeatureTable; import com.esri.arcgisruntime.layers.FeatureLayer; import com.esri.arcgisruntime.loadable.LoadStatus; import com.esri.arcgisruntime.mapping.ArcGISMap; import com.esri.arcgisruntime.mapping.BasemapStyle; import com.esri.arcgisruntime.mapping.Viewpoint; import com.esri.arcgisruntime.mapping.view.MapView; public class MainActivity extends AppCompatActivity { private static final String TAG = MainActivity.class.getSimpleName(); private MapView mMapView; // objects that implement Loadable must be class fields to prevent being garbage collected before loading private Geodatabase mGeodatabase; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); // authentication with an API key or named user is required to access basemaps and other // location services ArcGISRuntimeEnvironment.setApiKey(BuildConfig.API_KEY); // create map and add to map view ArcGISMap map = new ArcGISMap(BasemapStyle.ARCGIS_STREETS); mMapView = findViewById(R.id.mapView); mMapView.setMap(map); // create path to local geodatabase String path = getExternalFilesDir(null) + getString(R.string.config_geodb_name); // create a new geodatabase from local path mGeodatabase = new Geodatabase(path); // load the geodatabase mGeodatabase.loadAsync(); // create feature layer from geodatabase and add to the map mGeodatabase.addDoneLoadingListener(() -> { if (mGeodatabase.getLoadStatus() == LoadStatus.LOADED) { // access the geodatabase's feature table Trailheads GeodatabaseFeatureTable geodatabaseFeatureTable = mGeodatabase.getGeodatabaseFeatureTable("Trailheads"); geodatabaseFeatureTable.loadAsync(); // create a layer from the geodatabase feature table and add to map final FeatureLayer featureLayer = new FeatureLayer(geodatabaseFeatureTable); featureLayer.addDoneLoadingListener(() -> { if (featureLayer.getLoadStatus() == LoadStatus.LOADED) { // set viewpoint to the feature layer's extent mMapView.setViewpointAsync(new Viewpoint(featureLayer.getFullExtent())); } else { Toast.makeText(MainActivity.this, "Feature Layer failed to load!", Toast.LENGTH_LONG).show(); Log.e(TAG, "Feature Layer failed to load!"); } }); // add feature layer to the map mMapView.getMap().getOperationalLayers().add(featureLayer); } else { Toast.makeText(MainActivity.this, "Geodatabase failed to load!", Toast.LENGTH_LONG).show(); Log.e(TAG, "Geodatabase failed to load!"); } }); } @Override protected void onPause() { super.onPause(); mMapView.pause(); } @Override protected void onResume() { super.onResume(); mMapView.resume(); } @Override protected void onDestroy() { super.onDestroy(); mMapView.dispose(); } }
apache-2.0
wimsymons/sling
testing/org.apache.sling.testing.paxexam/src/main/java/org/apache/sling/testing/paxexam/SlingVersionResolver.java
14723
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sling.testing.paxexam; import java.util.HashMap; import java.util.Map; import org.ops4j.pax.exam.options.MavenUrlReference.VersionResolver; /** * NOTE: This file is generated from Sling's Launchpad Karaf Features */ public class SlingVersionResolver implements VersionResolver { private final Map<String, String> versions = new HashMap<>(); public SlingVersionResolver() { versions.put("org.apache.felix:org.apache.felix.configadmin", "1.8.8"); versions.put("org.apache.felix:org.apache.felix.eventadmin", "1.4.6"); versions.put("org.apache.felix:org.apache.felix.http.jetty", "3.2.0"); versions.put("org.apache.felix:org.apache.felix.http.servlet-api", "1.1.2"); versions.put("org.apache.felix:org.apache.felix.http.whiteboard", "3.0.0"); versions.put("org.apache.aries:org.apache.aries.util", "1.1.1"); versions.put("org.apache.aries.jmx:org.apache.aries.jmx.api", "1.1.5"); versions.put("org.apache.aries.jmx:org.apache.aries.jmx.core", "1.1.6"); versions.put("org.apache.aries.jmx:org.apache.aries.jmx.whiteboard", "1.1.5"); versions.put("org.apache.felix:org.apache.felix.inventory", "1.0.4"); versions.put("org.apache.felix:org.apache.felix.metatype", "1.1.2"); versions.put("org.apache.felix:org.apache.felix.scr", "2.0.2"); versions.put("org.apache.felix:org.apache.felix.webconsole", "4.2.16"); versions.put("com.composum.sling.core:composum-sling-core-commons", "1.5.3"); versions.put("com.composum.sling.core:composum-sling-core-console", "1.5.3"); versions.put("com.composum.sling.core:composum-sling-core-jslibs", "1.5.3"); versions.put("com.google.guava:guava", "15.0"); versions.put("com.sun.mail:javax.mail", "1.5.5"); versions.put("commons-codec:commons-codec", "1.10"); versions.put("commons-collections:commons-collections", "3.2.2"); versions.put("commons-fileupload:commons-fileupload", "1.3.2"); versions.put("commons-io:commons-io", "2.5"); versions.put("commons-lang:commons-lang", "2.6"); versions.put("io.dropwizard.metrics:metrics-core", "3.1.2"); versions.put("io.wcm.osgi.wrapper:io.wcm.osgi.wrapper.rxjava", "1.0.14-0000"); versions.put("javax.jcr:jcr", "2.0"); versions.put("javax.mail:javax.mail-api", "1.5.5"); versions.put("org.apache.commons:commons-email", "1.4"); versions.put("org.apache.commons:commons-lang3", "3.4"); versions.put("org.apache.commons:commons-math", "2.2"); versions.put("org.apache.felix:org.apache.felix.inventory", "1.0.4"); versions.put("org.apache.felix:org.apache.felix.jaas", "0.0.4"); versions.put("org.apache.felix:org.apache.felix.prefs", "1.0.6"); versions.put("org.apache.felix:org.apache.felix.webconsole.plugins.memoryusage", "1.0.6"); versions.put("org.apache.felix:org.apache.felix.webconsole.plugins.packageadmin", "1.0.2"); versions.put("org.apache.geronimo.bundles:commons-httpclient", "3.1_2"); versions.put("org.apache.geronimo.bundles:json", "20090211_1"); versions.put("org.apache.geronimo.specs:geronimo-atinject_1.0_spec", "1.0"); versions.put("org.apache.geronimo.specs:geronimo-el_2.2_spec", "1.0.4"); versions.put("org.apache.geronimo.specs:geronimo-interceptor_1.1_spec", "1.0"); versions.put("org.apache.geronimo.specs:geronimo-jcdi_1.0_spec", "1.0"); versions.put("org.apache.geronimo.specs:geronimo-jta_1.1_spec", "1.1.1"); versions.put("org.apache.geronimo.specs:geronimo-servlet_3.0_spec", "1.0"); versions.put("org.apache.httpcomponents:httpclient-osgi", "4.5.2"); versions.put("org.apache.httpcomponents:httpcore-osgi", "4.4.5"); versions.put("org.apache.jackrabbit:jackrabbit-api", "2.12.1"); versions.put("org.apache.jackrabbit:jackrabbit-data", "2.12.1"); versions.put("org.apache.jackrabbit:jackrabbit-jcr-commons", "2.12.1"); versions.put("org.apache.jackrabbit:jackrabbit-jcr-rmi", "2.12.1"); versions.put("org.apache.jackrabbit:jackrabbit-spi", "2.12.1"); versions.put("org.apache.jackrabbit:jackrabbit-spi-commons", "2.12.1"); versions.put("org.apache.jackrabbit:jackrabbit-webdav", "2.12.1"); versions.put("org.apache.jackrabbit:oak-blob", "1.5.3"); versions.put("org.apache.jackrabbit:oak-commons", "1.5.3"); versions.put("org.apache.jackrabbit:oak-core", "1.5.3"); versions.put("org.apache.jackrabbit:oak-jcr", "1.5.3"); versions.put("org.apache.jackrabbit:oak-lucene", "1.5.3"); versions.put("org.apache.jackrabbit:oak-segment", "1.5.3"); versions.put("org.apache.jackrabbit.vault:org.apache.jackrabbit.vault", "3.1.26"); versions.put("org.apache.servicemix.bundles:org.apache.servicemix.bundles.rhino", "1.7.7.1_1"); versions.put("org.apache.servicemix.bundles:org.apache.servicemix.bundles.urlrewritefilter", "4.0.4_1"); versions.put("org.apache.sling:org.apache.sling.adapter", "2.1.6"); versions.put("org.apache.sling:org.apache.sling.api", "2.11.1-SNAPSHOT"); versions.put("org.apache.sling:org.apache.sling.auth.core", "1.3.14"); versions.put("org.apache.sling:org.apache.sling.auth.form", "1.0.8"); versions.put("org.apache.sling:org.apache.sling.auth.openid", "1.0.4"); versions.put("org.apache.sling:org.apache.sling.auth.selector", "1.0.6"); versions.put("org.apache.sling:org.apache.sling.bundleresource.impl", "2.2.0"); versions.put("org.apache.sling:org.apache.sling.commons.classloader", "1.3.2"); versions.put("org.apache.sling:org.apache.sling.commons.compiler", "2.3.0"); versions.put("org.apache.sling:org.apache.sling.commons.fsclassloader", "1.0.2"); versions.put("org.apache.sling:org.apache.sling.commons.html", "1.0.0"); versions.put("org.apache.sling:org.apache.sling.commons.json", "2.0.18"); versions.put("org.apache.sling:org.apache.sling.commons.messaging", "0.0.1-SNAPSHOT"); versions.put("org.apache.sling:org.apache.sling.commons.messaging.mail", "0.0.1-SNAPSHOT"); versions.put("org.apache.sling:org.apache.sling.commons.metrics", "1.0.0"); versions.put("org.apache.sling:org.apache.sling.commons.mime", "2.1.8"); versions.put("org.apache.sling:org.apache.sling.commons.osgi", "2.4.0"); versions.put("org.apache.sling:org.apache.sling.commons.scheduler", "2.4.14"); versions.put("org.apache.sling:org.apache.sling.commons.threads", "3.2.6"); versions.put("org.apache.sling:org.apache.sling.discovery.api", "1.0.4"); versions.put("org.apache.sling:org.apache.sling.discovery.base", "1.1.3-SNAPSHOT"); versions.put("org.apache.sling:org.apache.sling.discovery.commons", "1.0.12"); versions.put("org.apache.sling:org.apache.sling.discovery.impl", "1.2.7-SNAPSHOT"); versions.put("org.apache.sling:org.apache.sling.discovery.oak", "1.2.7-SNAPSHOT"); versions.put("org.apache.sling:org.apache.sling.discovery.standalone", "1.0.2"); versions.put("org.apache.sling:org.apache.sling.discovery.support", "1.0.0"); versions.put("org.apache.sling:org.apache.sling.distribution.api", "0.3.0"); versions.put("org.apache.sling:org.apache.sling.distribution.core", "0.1.18"); versions.put("org.apache.sling:org.apache.sling.engine", "2.4.6"); versions.put("org.apache.sling:org.apache.sling.event", "4.0.2"); versions.put("org.apache.sling:org.apache.sling.event.dea", "1.0.4"); versions.put("org.apache.sling:org.apache.sling.extensions.explorer", "1.0.4"); versions.put("org.apache.sling:org.apache.sling.extensions.threaddump", "0.2.2"); versions.put("org.apache.sling:org.apache.sling.featureflags", "1.0.2"); versions.put("org.apache.sling:org.apache.sling.fsresource", "1.1.4"); versions.put("org.apache.sling:org.apache.sling.hc.core", "1.2.5-SNAPSHOT"); versions.put("org.apache.sling:org.apache.sling.hc.jmx", "1.0.6"); versions.put("org.apache.sling:org.apache.sling.hc.support", "1.0.4"); versions.put("org.apache.sling:org.apache.sling.hc.webconsole", "1.1.2"); versions.put("org.apache.sling:org.apache.sling.i18n", "2.4.6"); versions.put("org.apache.sling:org.apache.sling.installer.console", "1.0.0"); versions.put("org.apache.sling:org.apache.sling.installer.core", "3.6.8"); versions.put("org.apache.sling:org.apache.sling.installer.factory.configuration", "1.1.2"); versions.put("org.apache.sling:org.apache.sling.installer.provider.file", "1.1.0"); versions.put("org.apache.sling:org.apache.sling.installer.provider.jcr", "3.1.18"); versions.put("org.apache.sling:org.apache.sling.jcr.api", "2.3.1-SNAPSHOT"); versions.put("org.apache.sling:org.apache.sling.jcr.base", "2.3.3-SNAPSHOT"); versions.put("org.apache.sling:org.apache.sling.jcr.classloader", "3.2.2"); versions.put("org.apache.sling:org.apache.sling.jcr.compiler", "2.1.0"); versions.put("org.apache.sling:org.apache.sling.jcr.contentloader", "2.1.10"); versions.put("org.apache.sling:org.apache.sling.jcr.davex", "1.3.4"); versions.put("org.apache.sling:org.apache.sling.jcr.jackrabbit.accessmanager", "2.1.2"); versions.put("org.apache.sling:org.apache.sling.jcr.jackrabbit.usermanager", "2.2.4"); versions.put("org.apache.sling:org.apache.sling.jcr.oak.server", "1.0.1-SNAPSHOT"); versions.put("org.apache.sling:org.apache.sling.jcr.registration", "1.0.2"); versions.put("org.apache.sling:org.apache.sling.jcr.resource", "2.8.0"); versions.put("org.apache.sling:org.apache.sling.jcr.webconsole", "1.0.2"); versions.put("org.apache.sling:org.apache.sling.jcr.webdav", "2.3.4"); versions.put("org.apache.sling:org.apache.sling.jmx.provider", "1.0.2"); versions.put("org.apache.sling:org.apache.sling.launchpad.content", "2.0.8"); versions.put("org.apache.sling:org.apache.sling.models.api", "1.2.2"); versions.put("org.apache.sling:org.apache.sling.models.impl", "1.2.8"); versions.put("org.apache.sling:org.apache.sling.nosql.couchbase-client", "1.0.2"); versions.put("org.apache.sling:org.apache.sling.nosql.couchbase-resourceprovider", "1.1.0"); versions.put("org.apache.sling:org.apache.sling.nosql.generic", "1.1.0"); versions.put("org.apache.sling:org.apache.sling.nosql.mongodb-resourceprovider", "1.1.0"); versions.put("org.apache.sling:org.apache.sling.query", "3.0.0"); versions.put("org.apache.sling:org.apache.sling.resource.inventory", "1.0.4"); versions.put("org.apache.sling:org.apache.sling.resourcemerger", "1.3.0"); versions.put("org.apache.sling:org.apache.sling.resourceresolver", "1.4.12"); versions.put("org.apache.sling:org.apache.sling.rewriter", "1.1.2"); versions.put("org.apache.sling:org.apache.sling.scripting.api", "2.1.8"); versions.put("org.apache.sling:org.apache.sling.scripting.core", "2.0.37-SNAPSHOT"); versions.put("org.apache.sling:org.apache.sling.scripting.freemarker", "1.0.0-SNAPSHOT"); versions.put("org.apache.sling:org.apache.sling.scripting.groovy", "1.0.2"); versions.put("org.apache.sling:org.apache.sling.scripting.java", "2.0.14"); versions.put("org.apache.sling:org.apache.sling.scripting.javascript", "2.0.30"); versions.put("org.apache.sling:org.apache.sling.scripting.jsp", "2.1.8"); versions.put("org.apache.sling:org.apache.sling.scripting.jsp.taglib", "2.2.6"); versions.put("org.apache.sling:org.apache.sling.scripting.sightly", "1.0.18"); versions.put("org.apache.sling:org.apache.sling.scripting.sightly.js.provider", "1.0.10"); versions.put("org.apache.sling:org.apache.sling.scripting.thymeleaf", "0.1.7-SNAPSHOT"); versions.put("org.apache.sling:org.apache.sling.security", "1.0.18"); versions.put("org.apache.sling:org.apache.sling.serviceusermapper", "1.2.2"); versions.put("org.apache.sling:org.apache.sling.servlets.get", "2.1.14"); versions.put("org.apache.sling:org.apache.sling.servlets.post", "2.3.12"); versions.put("org.apache.sling:org.apache.sling.servlets.resolver", "2.4.3-SNAPSHOT"); versions.put("org.apache.sling:org.apache.sling.settings", "1.3.8"); versions.put("org.apache.sling:org.apache.sling.urlrewriter", "0.0.2"); versions.put("org.apache.sling:org.apache.sling.validation.api", "1.0.0-SNAPSHOT"); versions.put("org.apache.sling:org.apache.sling.validation.core", "1.0.0-SNAPSHOT"); versions.put("org.apache.sling:org.apache.sling.xss", "1.0.8"); versions.put("org.apache.sling.samples:org.apache.sling.samples.fling", "0.0.1-SNAPSHOT"); versions.put("org.apache.tika:tika-bundle", "1.13"); versions.put("org.apache.tika:tika-core", "1.13"); versions.put("org.codehaus.groovy:groovy", "2.4.7"); versions.put("org.codehaus.groovy:groovy-json", "2.4.7"); versions.put("org.codehaus.groovy:groovy-templates", "2.4.7"); versions.put("org.javassist:javassist", "3.20.0-GA"); versions.put("org.mongodb:mongo-java-driver", "2.14.2"); } private String key(final String groupId, final String artifactId) { return String.format("%s:%s", groupId, artifactId); } public String setVersion(final String groupId, final String artifactId, final String version) { return versions.put(key(groupId, artifactId), version); } @Override public String getVersion(final String groupId, final String artifactId) { return versions.get(key(groupId, artifactId)); } }
apache-2.0
beanvalidation/beanvalidation-tck
tests/src/main/java/org/hibernate/beanvalidation/tck/tests/xmlconfiguration/versioning/Version30InValidationXmlTest.java
1627
/** * Jakarta Bean Validation TCK * * License: Apache License, Version 2.0 * See the license.txt file in the root directory or <http://www.apache.org/licenses/LICENSE-2.0>. */ package org.hibernate.beanvalidation.tck.tests.xmlconfiguration.versioning; import static org.testng.Assert.assertEquals; import jakarta.validation.Configuration; import org.hibernate.beanvalidation.tck.beanvalidation.Sections; import org.hibernate.beanvalidation.tck.tests.AbstractTCKTest; import org.hibernate.beanvalidation.tck.util.TestUtil; import org.jboss.arquillian.container.test.api.Deployment; import org.jboss.shrinkwrap.api.spec.WebArchive; import org.jboss.test.audit.annotations.SpecAssertion; import org.jboss.test.audit.annotations.SpecVersion; import org.testng.annotations.Test; /** * @author Guillaume Smet */ @SpecVersion(spec = "beanvalidation", version = "3.0.0") public class Version30InValidationXmlTest extends AbstractTCKTest { @Deployment public static WebArchive createTestArchive() { return webArchiveBuilder() .withTestClass( Version30InValidationXmlTest.class ) .withClass( DummyClockProvider.class ) .withValidationXml( "validation-Version30InValidationXmlTest.xml" ) .build(); } @Test @SpecAssertion(section = Sections.XML_CONFIG_XSD, id = "a") public void testValidationXmlVersion20() { Configuration<?> config = TestUtil.getConfigurationUnderTest(); assertEquals( config.getBootstrapConfiguration().getClockProviderClassName(), "org.hibernate.beanvalidation.tck.tests.xmlconfiguration.versioning.DummyClockProvider", "Wrong clock provider class name." ); } }
apache-2.0
sekigor/hawkular-agent
hawkular-wildfly-agent/src/main/java/org/hawkular/agent/monitor/inventory/MeasurementType.java
1408
/* * Copyright 2015 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hawkular.agent.monitor.inventory; /** * A common superclass for {@link AvailType} and {@link MetricType}. * * @author <a href="https://github.com/ppalaga">Peter Palaga</a> * @param <L> the type of the protocol specific location typically a subclass of {@link NodeLocation} */ public class MeasurementType<L> extends AttributeLocationProvider<L> { private final Interval interval; public MeasurementType(ID id, Name name, AttributeLocation<L> location, Interval interval) { super(id, name, location); this.interval = interval; } /** * @return how often should instances of this type be measured */ public Interval getInterval() { return interval; } }
apache-2.0
phisolani/floodlight
lib/gen-java/org/sdnplatform/sync/thrift/FullSyncRequestMessage.java
11585
/** * Autogenerated by Thrift Compiler (0.9.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.sdnplatform.sync.thrift; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.EnumSet; import java.util.Collections; @SuppressWarnings("all") public class FullSyncRequestMessage implements org.apache.thrift.TBase<FullSyncRequestMessage, FullSyncRequestMessage._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("FullSyncRequestMessage"); private static final org.apache.thrift.protocol.TField HEADER_FIELD_DESC = new org.apache.thrift.protocol.TField("header", org.apache.thrift.protocol.TType.STRUCT, (short)1); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new FullSyncRequestMessageStandardSchemeFactory()); schemes.put(TupleScheme.class, new FullSyncRequestMessageTupleSchemeFactory()); } public AsyncMessageHeader header; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { HEADER((short)1, "header"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // HEADER return HEADER; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } @Override public short getThriftFieldId() { return _thriftId; } @Override public String getFieldName() { return _fieldName; } } // isset id assignments public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.HEADER, new org.apache.thrift.meta_data.FieldMetaData("header", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, AsyncMessageHeader.class))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(FullSyncRequestMessage.class, metaDataMap); } public FullSyncRequestMessage() { } public FullSyncRequestMessage( AsyncMessageHeader header) { this(); this.header = header; } /** * Performs a deep copy on <i>other</i>. */ public FullSyncRequestMessage(FullSyncRequestMessage other) { if (other.isSetHeader()) { this.header = new AsyncMessageHeader(other.header); } } @Override public FullSyncRequestMessage deepCopy() { return new FullSyncRequestMessage(this); } @Override public void clear() { this.header = null; } public AsyncMessageHeader getHeader() { return this.header; } public FullSyncRequestMessage setHeader(AsyncMessageHeader header) { this.header = header; return this; } public void unsetHeader() { this.header = null; } /** Returns true if field header is set (has been assigned a value) and false otherwise */ public boolean isSetHeader() { return this.header != null; } public void setHeaderIsSet(boolean value) { if (!value) { this.header = null; } } @Override public void setFieldValue(_Fields field, Object value) { switch (field) { case HEADER: if (value == null) { unsetHeader(); } else { setHeader((AsyncMessageHeader)value); } break; } } @Override public Object getFieldValue(_Fields field) { switch (field) { case HEADER: return getHeader(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ @Override public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case HEADER: return isSetHeader(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof FullSyncRequestMessage) return this.equals((FullSyncRequestMessage)that); return false; } public boolean equals(FullSyncRequestMessage that) { if (that == null) return false; boolean this_present_header = true && this.isSetHeader(); boolean that_present_header = true && that.isSetHeader(); if (this_present_header || that_present_header) { if (!(this_present_header && that_present_header)) return false; if (!this.header.equals(that.header)) return false; } return true; } @Override public int hashCode() { return 0; } @Override public int compareTo(FullSyncRequestMessage other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; FullSyncRequestMessage typedOther = other; lastComparison = Boolean.valueOf(isSetHeader()).compareTo(typedOther.isSetHeader()); if (lastComparison != 0) { return lastComparison; } if (isSetHeader()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.header, typedOther.header); if (lastComparison != 0) { return lastComparison; } } return 0; } @Override public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } @Override public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } @Override public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("FullSyncRequestMessage("); boolean first = true; sb.append("header:"); if (this.header == null) { sb.append("null"); } else { sb.append(this.header); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields if (header == null) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'header' was not present! Struct: " + toString()); } // check for sub-struct validity if (header != null) { header.validate(); } } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class FullSyncRequestMessageStandardSchemeFactory implements SchemeFactory { @Override public FullSyncRequestMessageStandardScheme getScheme() { return new FullSyncRequestMessageStandardScheme(); } } private static class FullSyncRequestMessageStandardScheme extends StandardScheme<FullSyncRequestMessage> { @Override public void read(org.apache.thrift.protocol.TProtocol iprot, FullSyncRequestMessage struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // HEADER if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.header = new AsyncMessageHeader(); struct.header.read(iprot); struct.setHeaderIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); // check for required fields of primitive type, which can't be checked in the validate method struct.validate(); } @Override public void write(org.apache.thrift.protocol.TProtocol oprot, FullSyncRequestMessage struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.header != null) { oprot.writeFieldBegin(HEADER_FIELD_DESC); struct.header.write(oprot); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class FullSyncRequestMessageTupleSchemeFactory implements SchemeFactory { @Override public FullSyncRequestMessageTupleScheme getScheme() { return new FullSyncRequestMessageTupleScheme(); } } private static class FullSyncRequestMessageTupleScheme extends TupleScheme<FullSyncRequestMessage> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, FullSyncRequestMessage struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; struct.header.write(oprot); } @Override public void read(org.apache.thrift.protocol.TProtocol prot, FullSyncRequestMessage struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; struct.header = new AsyncMessageHeader(); struct.header.read(iprot); struct.setHeaderIsSet(true); } } }
apache-2.0
wangchuyi11/loocweather
app/src/test/java/com/loocweather/app/ExampleUnitTest.java
312
package com.loocweather.app; import org.junit.Test; import static org.junit.Assert.*; /** * To work on unit tests, switch the Test Artifact in the Build Variants view. */ public class ExampleUnitTest { @Test public void addition_isCorrect() throws Exception { assertEquals(4, 2 + 2); } }
apache-2.0
smile59/credit_mut
src/com/credit/actions/party/RegisterBetAction.java
1732
package com.credit.actions.party; import com.credit.managers.EMF; import com.credit.managers.SessionManager; import com.opensymphony.xwork2.ActionContext; import com.opensymphony.xwork2.ActionSupport; import entities.ChoicesPartiesEntity; import entities.UsersEntity; import javax.persistence.EntityManager; import java.util.Map; /** * Created by Alexandre on 05/06/2016. */ public class RegisterBetAction extends ActionSupport { private int choiceId = -1; public String execute() { EntityManager em = EMF.createEntityManager(); UsersEntity user = SessionManager.getUser(em); if (user == null) { em.close(); return LOGIN; } try { ChoicesPartiesEntity choice = (ChoicesPartiesEntity)em.createQuery( "select c FROM ChoicesPartiesEntity c where id = :id" ).setParameter("id", choiceId).getSingleResult(); for(ChoicesPartiesEntity entity : user.getChoicesPartiesEntities()) { if (entity.getPartiesEntity() == choice.getPartiesEntity()) { em.close(); return ERROR; } } user.getChoicesPartiesEntities().add(choice); choice.getUsersEntities().add(user); em.getTransaction().begin(); em.persist(choice); em.flush(); em.getTransaction().commit(); } catch (Exception e) { em.close(); return ERROR; } em.close(); return SUCCESS; } public int getChoiceId() { return choiceId; } public void setChoiceId(int choiceId) { this.choiceId = choiceId; } }
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-acmpca/src/main/java/com/amazonaws/services/acmpca/model/transform/UpdateCertificateAuthorityResultJsonUnmarshaller.java
1742
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.acmpca.model.transform; import java.math.*; import javax.annotation.Generated; import com.amazonaws.services.acmpca.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import static com.fasterxml.jackson.core.JsonToken.*; /** * UpdateCertificateAuthorityResult JSON Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class UpdateCertificateAuthorityResultJsonUnmarshaller implements Unmarshaller<UpdateCertificateAuthorityResult, JsonUnmarshallerContext> { public UpdateCertificateAuthorityResult unmarshall(JsonUnmarshallerContext context) throws Exception { UpdateCertificateAuthorityResult updateCertificateAuthorityResult = new UpdateCertificateAuthorityResult(); return updateCertificateAuthorityResult; } private static UpdateCertificateAuthorityResultJsonUnmarshaller instance; public static UpdateCertificateAuthorityResultJsonUnmarshaller getInstance() { if (instance == null) instance = new UpdateCertificateAuthorityResultJsonUnmarshaller(); return instance; } }
apache-2.0
Johnny850807/Food-Sharing
Food-Sharing/FoodSharing/app/src/main/java/com/example/androidwork/foodsharing/Activities/Fragments/Lockers/LockersFragmentPagerAdapter.java
657
package com.example.androidwork.foodsharing.Activities.Fragments.Lockers; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentPagerAdapter; public class LockersFragmentPagerAdapter extends FragmentPagerAdapter { public static final int LOCKER_PAGE_AMOUNT = 3; public LockersFragmentPagerAdapter(FragmentManager fm) { super(fm); } @Override public Fragment getItem(int position) { return LockersFragment.getFragmentWithLockerAndPageNumber(position); } @Override public int getCount() { return LOCKER_PAGE_AMOUNT; } }
apache-2.0
core-lib/jestful
jestful-gson/src/main/java/org/qfox/jestful/gson/GsonRequestSerializer.java
2464
package org.qfox.jestful.gson; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import org.qfox.jestful.commons.IOKit; import org.qfox.jestful.core.*; import org.qfox.jestful.core.formatting.RequestSerializer; import org.qfox.jestful.core.io.MultipartOutputStream; import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.List; public class GsonRequestSerializer implements RequestSerializer { private final String contentType = "application/json"; private volatile Gson gson = new Gson(); public String getContentType() { return contentType; } public void setSerializationDateFormat(DateFormat dateFormat) { if (dateFormat instanceof SimpleDateFormat) gson = new GsonBuilder().setDateFormat(((SimpleDateFormat) dateFormat).toPattern()).create(); } public boolean supports(Action action) { List<Parameter> bodies = action.getParameters().all(Position.BODY); return bodies.size() == 0 || bodies.size() == 1 && supports(bodies.get(0)); } public boolean supports(Parameter parameter) { return true; } public void serialize(Action action, String charset, OutputStream out) throws IOException { List<Parameter> parameters = action.getParameters().all(Position.BODY); for (Parameter parameter : parameters) { OutputStreamWriter osw = null; try { action.getRequest().setContentType(contentType + ";charset=" + charset); osw = new OutputStreamWriter(out, charset); gson.toJson(parameter.getValue(), osw); break; } finally { IOKit.close(osw); } } } public void serialize(Action action, Parameter parameter, String charset, MultipartOutputStream out) throws IOException { OutputStreamWriter osw = null; try { Disposition disposition = new Disposition("form-data", parameter.getName()); MediaType type = MediaType.valueOf(contentType + ";charset=" + charset); Multihead multihead = new Multihead(disposition, type); out.setNextMultihead(multihead); osw = new OutputStreamWriter(out, charset); gson.toJson(parameter.getValue(), osw); } finally { IOKit.close(osw); } } }
apache-2.0
pcierpiatka/testing_presentation
src/test/edu/the/way/of/testing/infrastructure/FlightTestBuilder.java
2646
package edu.the.way.of.testing.infrastructure; import edu.the.way.of.testing.Flight; import edu.the.way.of.testing.SeatClass; import java.time.LocalDate; import java.time.format.DateTimeFormatter; /** * User: pcierpiatka */ public class FlightTestBuilder { private Flight flight; private DateTimeFormatter dateFormat = DateTimeFormatter.ofPattern("yyyy-MM-dd"); private FlightTestBuilder(String flightCode) { flight = new Flight(flightCode); } public static FlightTestBuilder flight(String flightCode) { return new FlightTestBuilder(flightCode); } public Flight build() { return flight; } public FlightTestBuilder withSeats(int seatCount) { return withSeatsInPrice(seatCount, 0); } public FlightTestBuilder withSeatsInPrice(int seatsCount, double price) { return withSeats(SeatClass.ECONOMIC, seatsCount, price); } public FlightTestBuilder withBookedSeatsInPrice(int seatsCount, double price) { int paddedSeatsCount = flight.getSeatsCount() + seatsCount; String seatNumber; for (int seatIndex = flight.getSeatsCount(); seatIndex < paddedSeatsCount; ++seatIndex) { seatNumber = "seat" + seatIndex; flight.addSeat(SeatClass.ECONOMIC, seatNumber, price); flight.bookSeat(seatNumber); } return this; } public FlightTestBuilder withSeatInPrice(String seatNumber, double price) { flight.addSeat(SeatClass.ECONOMIC, seatNumber, price); return this; } public FlightTestBuilder withSeat(String seatNumber) { flight.addSeat(SeatClass.ECONOMIC, seatNumber, 0); return this; } public FlightTestBuilder withBookedSeat(String seatNumber) { flight.addSeat(SeatClass.ECONOMIC, seatNumber, 0); flight.bookSeat(seatNumber); return this; } public FlightTestBuilder withSeats(SeatClass seatClass, int seatsCount, double price) { int paddedSeatsCount = flight.getSeatsCount() + seatsCount; for (int seatIndex = flight.getSeatsCount(); seatIndex < paddedSeatsCount; ++seatIndex) { flight.addSeat(seatClass, "seat" + seatIndex, price); } return this; } public FlightTestBuilder from(String from) { flight.setOrigin(from); return this; } public FlightTestBuilder to(String destination) { flight.setDestination(destination); return this; } public FlightTestBuilder on(String dateOfFlight) { flight.setFlightDate(LocalDate.parse(dateOfFlight, dateFormat)); return this; } }
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-directory/src/main/java/com/amazonaws/services/directory/model/transform/TrustMarshaller.java
5187
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.directory.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.services.directory.model.*; import com.amazonaws.protocol.*; import com.amazonaws.annotation.SdkInternalApi; /** * TrustMarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class TrustMarshaller { private static final MarshallingInfo<String> DIRECTORYID_BINDING = MarshallingInfo.builder(MarshallingType.STRING) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("DirectoryId").build(); private static final MarshallingInfo<String> TRUSTID_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("TrustId").build(); private static final MarshallingInfo<String> REMOTEDOMAINNAME_BINDING = MarshallingInfo.builder(MarshallingType.STRING) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("RemoteDomainName").build(); private static final MarshallingInfo<String> TRUSTTYPE_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("TrustType").build(); private static final MarshallingInfo<String> TRUSTDIRECTION_BINDING = MarshallingInfo.builder(MarshallingType.STRING) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("TrustDirection").build(); private static final MarshallingInfo<String> TRUSTSTATE_BINDING = MarshallingInfo.builder(MarshallingType.STRING) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("TrustState").build(); private static final MarshallingInfo<java.util.Date> CREATEDDATETIME_BINDING = MarshallingInfo.builder(MarshallingType.DATE) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("CreatedDateTime").timestampFormat("unixTimestamp").build(); private static final MarshallingInfo<java.util.Date> LASTUPDATEDDATETIME_BINDING = MarshallingInfo.builder(MarshallingType.DATE) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("LastUpdatedDateTime").timestampFormat("unixTimestamp").build(); private static final MarshallingInfo<java.util.Date> STATELASTUPDATEDDATETIME_BINDING = MarshallingInfo.builder(MarshallingType.DATE) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("StateLastUpdatedDateTime").timestampFormat("unixTimestamp").build(); private static final MarshallingInfo<String> TRUSTSTATEREASON_BINDING = MarshallingInfo.builder(MarshallingType.STRING) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("TrustStateReason").build(); private static final MarshallingInfo<String> SELECTIVEAUTH_BINDING = MarshallingInfo.builder(MarshallingType.STRING) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("SelectiveAuth").build(); private static final TrustMarshaller instance = new TrustMarshaller(); public static TrustMarshaller getInstance() { return instance; } /** * Marshall the given parameter object. */ public void marshall(Trust trust, ProtocolMarshaller protocolMarshaller) { if (trust == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(trust.getDirectoryId(), DIRECTORYID_BINDING); protocolMarshaller.marshall(trust.getTrustId(), TRUSTID_BINDING); protocolMarshaller.marshall(trust.getRemoteDomainName(), REMOTEDOMAINNAME_BINDING); protocolMarshaller.marshall(trust.getTrustType(), TRUSTTYPE_BINDING); protocolMarshaller.marshall(trust.getTrustDirection(), TRUSTDIRECTION_BINDING); protocolMarshaller.marshall(trust.getTrustState(), TRUSTSTATE_BINDING); protocolMarshaller.marshall(trust.getCreatedDateTime(), CREATEDDATETIME_BINDING); protocolMarshaller.marshall(trust.getLastUpdatedDateTime(), LASTUPDATEDDATETIME_BINDING); protocolMarshaller.marshall(trust.getStateLastUpdatedDateTime(), STATELASTUPDATEDDATETIME_BINDING); protocolMarshaller.marshall(trust.getTrustStateReason(), TRUSTSTATEREASON_BINDING); protocolMarshaller.marshall(trust.getSelectiveAuth(), SELECTIVEAUTH_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
apache-2.0
anton46/picasso
picasso/src/main/java/com/squareup/picasso/Downloader.java
4750
/* * Copyright (C) 2013 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.squareup.picasso; import android.graphics.Bitmap; import android.net.Uri; import java.io.IOException; import java.io.InputStream; /** A mechanism to load images from external resources such as a disk cache and/or the internet. */ public interface Downloader { /** * Download the specified image {@code url} from the internet. * * @param uri Remote image URL. * @param localCacheOnly If {@code true} the URL should only be loaded if available in a local * disk cache. * @return {@link Response} containing either a {@link Bitmap} representation of the request or an * {@link InputStream} for the image data. {@code null} can be returned to indicate a problem * loading the bitmap. * @throws IOException if the requested URL cannot successfully be loaded. */ Response load(Uri uri, boolean localCacheOnly) throws IOException; /** * Allows to perform a clean up for this {@link Downloader} including closing the disk cache and * other resources. */ void shutdown(); /** Thrown for non-2XX responses. */ class ResponseException extends IOException { public ResponseException(String message) { super(message); } } /** Response stream or bitmap and info. */ class Response { final InputStream stream; final Bitmap bitmap; final boolean cached; final long contentLength; /** * Response image and info. * * @param bitmap Image. * @param loadedFromCache {@code true} if the source of the image is from a local disk cache. * @deprecated Use {@link Response#Response(android.graphics.Bitmap, boolean, long)} instead. */ @Deprecated @SuppressWarnings("UnusedDeclaration") public Response(Bitmap bitmap, boolean loadedFromCache) { this(bitmap, loadedFromCache, -1); } /** * Response stream and info. * * @param stream Image data stream. * @param loadedFromCache {@code true} if the source of the stream is from a local disk cache. * @deprecated Use {@link Response#Response(java.io.InputStream, boolean, long)} instead. */ @Deprecated @SuppressWarnings("UnusedDeclaration") public Response(InputStream stream, boolean loadedFromCache) { this(stream, loadedFromCache, -1); } /** * Response image and info. * * @param bitmap Image. * @param loadedFromCache {@code true} if the source of the image is from a local disk cache. * @param contentLength The content length of the response, typically derived by the * {@code Content-Length} HTTP header. */ public Response(Bitmap bitmap, boolean loadedFromCache, long contentLength) { if (bitmap == null) { throw new IllegalArgumentException("Bitmap may not be null."); } this.stream = null; this.bitmap = bitmap; this.cached = loadedFromCache; this.contentLength = contentLength; } /** * Response stream and info. * * @param stream Image data stream. * @param loadedFromCache {@code true} if the source of the stream is from a local disk cache. * @param contentLength The content length of the response, typically derived by the * {@code Content-Length} HTTP header. */ public Response(InputStream stream, boolean loadedFromCache, long contentLength) { if (stream == null) { throw new IllegalArgumentException("Stream may not be null."); } this.stream = stream; this.bitmap = null; this.cached = loadedFromCache; this.contentLength = contentLength; } /** * Input stream containing image data. * <p> * If this returns {@code null}, image data will be available via {@link #getBitmap()}. */ public InputStream getInputStream() { return stream; } /** * Bitmap representing the image. * <p> * If this returns {@code null}, image data will be available via {@link #getInputStream()}. */ public Bitmap getBitmap() { return bitmap; } /** Content length of the response. */ public long getContentLength() { return contentLength; } } }
apache-2.0
grokcoder/leetcode
src/main/java/leetcode/medium/BestTimetoBuyandSellStock.java
841
package leetcode.medium; /** * Created by wangxiaoyi on 15/11/18. */ public class BestTimetoBuyandSellStock { public int maxProfit(int[] prices) { //int max = Integer.MIN_VALUE; int max = 0; int len = prices.length; if(len < 2) return 0; else { // prices[0] = 0; int currMax = 0; int currMin = prices[0];// 记录一个到当前最便宜的股票价格 for(int i = 1; i < len; ++ i){ currMax = Math.max(currMax, prices[i] - currMin); currMin = Math.min(currMin, prices[i]); max = Math.max(currMax, max); } } return max; } public static void main(String []args){ System.out.print(new BestTimetoBuyandSellStock().maxProfit(new int[]{1, 2, 4})); } }
apache-2.0
mta452/Tehreer-Android
tehreer-android/src/main/java/com/mta/tehreer/layout/TextAlignment.java
1175
/* * Copyright (C) 2018 Muhammad Tayyab Akram * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.mta.tehreer.layout; /** * Specifies the horizontal text alignment. */ public enum TextAlignment { /** * Aligns text to the left side of the line. */ LEFT, /** * Aligns text to the right side of the line. */ RIGHT, /** * Aligns text to the center of the line. */ CENTER, /** * Aligns text to the left side of the line if its paragraph level is even. */ INTRINSIC, /** * Aligns text to the right side of the line if its paragraph level is even. */ EXTRINSIC, }
apache-2.0
LevelFourAB/vibe
vibe-api/src/test/java/se/l4/vibe/sampling/SamplerTest.java
1245
package se.l4.vibe.sampling; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.MatcherAssert.assertThat; import java.util.concurrent.ThreadLocalRandom; import org.junit.Test; public class SamplerTest { @Test public void testAddingListenerStartsSampling() { TimeSampler<Double> randomSampler = TimeSampler.forProbe( () -> ThreadLocalRandom.current().nextDouble() ).build(); assertThat(randomSampler, is(notNullValue())); randomSampler.addListener(sample -> {}); assertThat(randomSampler.getLastSample(), is(notNullValue())); } @Test public void testSamplingReturnsValue() { TimeSampler<Integer> randomSampler = TimeSampler.forProbe(() -> 10) .build(); assertThat(randomSampler, is(notNullValue())); randomSampler.addListener(sample -> {}); assertThat(randomSampler.getLastSample().getValue(), is(10)); } @Test public void testSamplingWithApplyWorks() { TimeSampler<Integer> randomSampler = TimeSampler.forProbe(() -> 10) .apply(i -> i * 10) .build(); assertThat(randomSampler, is(notNullValue())); randomSampler.addListener(sample -> {}); assertThat(randomSampler.getLastSample().getValue(), is(100)); } }
apache-2.0
oliverwehrens/quant
src/test/java/com/maxheapsize/quant/infrastructure/AbstractConcordionTestNgTest.java
491
package com.maxheapsize.quant.infrastructure; import org.concordion.api.ResultSummary; import org.concordion.internal.ConcordionBuilder; import org.testng.annotations.Test; public abstract class AbstractConcordionTestNgTest { @Test(groups = {"concordion"}) public void processSpecification() throws Throwable { ResultSummary resultSummary = new ConcordionBuilder().build().process(this); resultSummary.print(System.out, this); resultSummary.assertIsSatisfied(this); } }
apache-2.0
chao-sun-kaazing/gateway
management/src/test/java/org/kaazing/gateway/management/test/util/TestLoginModule.java
1892
/** * Copyright 2007-2015, Kaazing Corporation. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kaazing.gateway.management.test.util; import java.security.Principal; import java.util.Map; import javax.security.auth.Subject; import javax.security.auth.callback.CallbackHandler; import javax.security.auth.login.LoginException; import javax.security.auth.spi.LoginModule; /** * Login module that always logs in * */ public class TestLoginModule implements LoginModule { private static final Principal ROLE_PRINCIPAL = new RolePrincipal(); private Subject subject; @Override public void initialize(Subject subject, CallbackHandler callbackHandler, Map<String, ?> sharedState, Map<String, ?> options) { this.subject = subject; } @Override public boolean login() throws LoginException { return true; } @Override public boolean commit() throws LoginException { subject.getPrincipals().add(ROLE_PRINCIPAL); return true; } @Override public boolean abort() throws LoginException { return true; } @Override public boolean logout() throws LoginException { return true; } static class RolePrincipal implements Principal { @Override public String getName() { return "AUTHORIZED"; } } }
apache-2.0
vam-google/google-cloud-java
google-cloud-clients/google-cloud-compute/src/main/java/com/google/cloud/compute/v1/stub/HttpJsonAutoscalerStub.java
18998
/* * Copyright 2019 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.compute.v1.stub; import static com.google.cloud.compute.v1.AutoscalerClient.AggregatedListAutoscalersPagedResponse; import static com.google.cloud.compute.v1.AutoscalerClient.ListAutoscalersPagedResponse; import com.google.api.client.http.HttpMethods; import com.google.api.core.BetaApi; import com.google.api.core.InternalApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.httpjson.ApiMessageHttpRequestFormatter; import com.google.api.gax.httpjson.ApiMessageHttpResponseParser; import com.google.api.gax.httpjson.ApiMethodDescriptor; import com.google.api.gax.httpjson.HttpJsonCallSettings; import com.google.api.gax.httpjson.HttpJsonStubCallableFactory; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.UnaryCallable; import com.google.api.pathtemplate.PathTemplate; import com.google.cloud.compute.v1.AggregatedListAutoscalersHttpRequest; import com.google.cloud.compute.v1.Autoscaler; import com.google.cloud.compute.v1.AutoscalerAggregatedList; import com.google.cloud.compute.v1.AutoscalerList; import com.google.cloud.compute.v1.DeleteAutoscalerHttpRequest; import com.google.cloud.compute.v1.GetAutoscalerHttpRequest; import com.google.cloud.compute.v1.InsertAutoscalerHttpRequest; import com.google.cloud.compute.v1.ListAutoscalersHttpRequest; import com.google.cloud.compute.v1.Operation; import com.google.cloud.compute.v1.PatchAutoscalerHttpRequest; import com.google.cloud.compute.v1.ProjectName; import com.google.cloud.compute.v1.ProjectZoneAutoscalerName; import com.google.cloud.compute.v1.ProjectZoneName; import com.google.cloud.compute.v1.UpdateAutoscalerHttpRequest; import com.google.common.collect.Sets; import java.io.IOException; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS /** * HTTP stub implementation for compute. * * <p>This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator") @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public class HttpJsonAutoscalerStub extends AutoscalerStub { @InternalApi public static final ApiMethodDescriptor< AggregatedListAutoscalersHttpRequest, AutoscalerAggregatedList> aggregatedListAutoscalersMethodDescriptor = ApiMethodDescriptor .<AggregatedListAutoscalersHttpRequest, AutoscalerAggregatedList>newBuilder() .setFullMethodName("compute.autoscalers.aggregatedList") .setHttpMethod(HttpMethods.GET) .setRequestFormatter( ApiMessageHttpRequestFormatter.<AggregatedListAutoscalersHttpRequest>newBuilder() .setPathTemplate(PathTemplate.create("{project}/aggregated/autoscalers")) .setQueryParams( Sets.<String>newHashSet("filter", "maxResults", "orderBy", "pageToken")) .setResourceNameFactory(ProjectName.newFactory()) .setResourceNameField("project") .build()) .setResponseParser( ApiMessageHttpResponseParser.<AutoscalerAggregatedList>newBuilder() .setResponseInstance(AutoscalerAggregatedList.getDefaultInstance()) .build()) .build(); @InternalApi public static final ApiMethodDescriptor<DeleteAutoscalerHttpRequest, Operation> deleteAutoscalerMethodDescriptor = ApiMethodDescriptor.<DeleteAutoscalerHttpRequest, Operation>newBuilder() .setFullMethodName("compute.autoscalers.delete") .setHttpMethod(HttpMethods.DELETE) .setRequestFormatter( ApiMessageHttpRequestFormatter.<DeleteAutoscalerHttpRequest>newBuilder() .setPathTemplate( PathTemplate.create("{project}/zones/{zone}/autoscalers/{autoscaler}")) .setQueryParams(Sets.<String>newHashSet("requestId")) .setResourceNameFactory(ProjectZoneAutoscalerName.newFactory()) .setResourceNameField("autoscaler") .build()) .setResponseParser( ApiMessageHttpResponseParser.<Operation>newBuilder() .setResponseInstance(Operation.getDefaultInstance()) .build()) .build(); @InternalApi public static final ApiMethodDescriptor<GetAutoscalerHttpRequest, Autoscaler> getAutoscalerMethodDescriptor = ApiMethodDescriptor.<GetAutoscalerHttpRequest, Autoscaler>newBuilder() .setFullMethodName("compute.autoscalers.get") .setHttpMethod(HttpMethods.GET) .setRequestFormatter( ApiMessageHttpRequestFormatter.<GetAutoscalerHttpRequest>newBuilder() .setPathTemplate( PathTemplate.create("{project}/zones/{zone}/autoscalers/{autoscaler}")) .setQueryParams(Sets.<String>newHashSet()) .setResourceNameFactory(ProjectZoneAutoscalerName.newFactory()) .setResourceNameField("autoscaler") .build()) .setResponseParser( ApiMessageHttpResponseParser.<Autoscaler>newBuilder() .setResponseInstance(Autoscaler.getDefaultInstance()) .build()) .build(); @InternalApi public static final ApiMethodDescriptor<InsertAutoscalerHttpRequest, Operation> insertAutoscalerMethodDescriptor = ApiMethodDescriptor.<InsertAutoscalerHttpRequest, Operation>newBuilder() .setFullMethodName("compute.autoscalers.insert") .setHttpMethod(HttpMethods.POST) .setRequestFormatter( ApiMessageHttpRequestFormatter.<InsertAutoscalerHttpRequest>newBuilder() .setPathTemplate(PathTemplate.create("{project}/zones/{zone}/autoscalers")) .setQueryParams(Sets.<String>newHashSet("requestId")) .setResourceNameFactory(ProjectZoneName.newFactory()) .setResourceNameField("zone") .build()) .setResponseParser( ApiMessageHttpResponseParser.<Operation>newBuilder() .setResponseInstance(Operation.getDefaultInstance()) .build()) .build(); @InternalApi public static final ApiMethodDescriptor<ListAutoscalersHttpRequest, AutoscalerList> listAutoscalersMethodDescriptor = ApiMethodDescriptor.<ListAutoscalersHttpRequest, AutoscalerList>newBuilder() .setFullMethodName("compute.autoscalers.list") .setHttpMethod(HttpMethods.GET) .setRequestFormatter( ApiMessageHttpRequestFormatter.<ListAutoscalersHttpRequest>newBuilder() .setPathTemplate(PathTemplate.create("{project}/zones/{zone}/autoscalers")) .setQueryParams( Sets.<String>newHashSet("filter", "maxResults", "orderBy", "pageToken")) .setResourceNameFactory(ProjectZoneName.newFactory()) .setResourceNameField("zone") .build()) .setResponseParser( ApiMessageHttpResponseParser.<AutoscalerList>newBuilder() .setResponseInstance(AutoscalerList.getDefaultInstance()) .build()) .build(); @InternalApi public static final ApiMethodDescriptor<PatchAutoscalerHttpRequest, Operation> patchAutoscalerMethodDescriptor = ApiMethodDescriptor.<PatchAutoscalerHttpRequest, Operation>newBuilder() .setFullMethodName("compute.autoscalers.patch") .setHttpMethod(HttpMethods.PATCH) .setRequestFormatter( ApiMessageHttpRequestFormatter.<PatchAutoscalerHttpRequest>newBuilder() .setPathTemplate(PathTemplate.create("{project}/zones/{zone}/autoscalers")) .setQueryParams(Sets.<String>newHashSet("autoscaler", "requestId")) .setResourceNameFactory(ProjectZoneName.newFactory()) .setResourceNameField("zone") .build()) .setResponseParser( ApiMessageHttpResponseParser.<Operation>newBuilder() .setResponseInstance(Operation.getDefaultInstance()) .build()) .build(); @InternalApi public static final ApiMethodDescriptor<UpdateAutoscalerHttpRequest, Operation> updateAutoscalerMethodDescriptor = ApiMethodDescriptor.<UpdateAutoscalerHttpRequest, Operation>newBuilder() .setFullMethodName("compute.autoscalers.update") .setHttpMethod(HttpMethods.PUT) .setRequestFormatter( ApiMessageHttpRequestFormatter.<UpdateAutoscalerHttpRequest>newBuilder() .setPathTemplate(PathTemplate.create("{project}/zones/{zone}/autoscalers")) .setQueryParams(Sets.<String>newHashSet("autoscaler", "requestId")) .setResourceNameFactory(ProjectZoneName.newFactory()) .setResourceNameField("zone") .build()) .setResponseParser( ApiMessageHttpResponseParser.<Operation>newBuilder() .setResponseInstance(Operation.getDefaultInstance()) .build()) .build(); private final BackgroundResource backgroundResources; private final UnaryCallable<AggregatedListAutoscalersHttpRequest, AutoscalerAggregatedList> aggregatedListAutoscalersCallable; private final UnaryCallable< AggregatedListAutoscalersHttpRequest, AggregatedListAutoscalersPagedResponse> aggregatedListAutoscalersPagedCallable; private final UnaryCallable<DeleteAutoscalerHttpRequest, Operation> deleteAutoscalerCallable; private final UnaryCallable<GetAutoscalerHttpRequest, Autoscaler> getAutoscalerCallable; private final UnaryCallable<InsertAutoscalerHttpRequest, Operation> insertAutoscalerCallable; private final UnaryCallable<ListAutoscalersHttpRequest, AutoscalerList> listAutoscalersCallable; private final UnaryCallable<ListAutoscalersHttpRequest, ListAutoscalersPagedResponse> listAutoscalersPagedCallable; private final UnaryCallable<PatchAutoscalerHttpRequest, Operation> patchAutoscalerCallable; private final UnaryCallable<UpdateAutoscalerHttpRequest, Operation> updateAutoscalerCallable; private final HttpJsonStubCallableFactory callableFactory; public static final HttpJsonAutoscalerStub create(AutoscalerStubSettings settings) throws IOException { return new HttpJsonAutoscalerStub(settings, ClientContext.create(settings)); } public static final HttpJsonAutoscalerStub create(ClientContext clientContext) throws IOException { return new HttpJsonAutoscalerStub(AutoscalerStubSettings.newBuilder().build(), clientContext); } public static final HttpJsonAutoscalerStub create( ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException { return new HttpJsonAutoscalerStub( AutoscalerStubSettings.newBuilder().build(), clientContext, callableFactory); } /** * Constructs an instance of HttpJsonAutoscalerStub, using the given settings. This is protected * so that it is easy to make a subclass, but otherwise, the static factory methods should be * preferred. */ protected HttpJsonAutoscalerStub(AutoscalerStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new HttpJsonAutoscalerCallableFactory()); } /** * Constructs an instance of HttpJsonAutoscalerStub, using the given settings. This is protected * so that it is easy to make a subclass, but otherwise, the static factory methods should be * preferred. */ protected HttpJsonAutoscalerStub( AutoscalerStubSettings settings, ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; HttpJsonCallSettings<AggregatedListAutoscalersHttpRequest, AutoscalerAggregatedList> aggregatedListAutoscalersTransportSettings = HttpJsonCallSettings .<AggregatedListAutoscalersHttpRequest, AutoscalerAggregatedList>newBuilder() .setMethodDescriptor(aggregatedListAutoscalersMethodDescriptor) .build(); HttpJsonCallSettings<DeleteAutoscalerHttpRequest, Operation> deleteAutoscalerTransportSettings = HttpJsonCallSettings.<DeleteAutoscalerHttpRequest, Operation>newBuilder() .setMethodDescriptor(deleteAutoscalerMethodDescriptor) .build(); HttpJsonCallSettings<GetAutoscalerHttpRequest, Autoscaler> getAutoscalerTransportSettings = HttpJsonCallSettings.<GetAutoscalerHttpRequest, Autoscaler>newBuilder() .setMethodDescriptor(getAutoscalerMethodDescriptor) .build(); HttpJsonCallSettings<InsertAutoscalerHttpRequest, Operation> insertAutoscalerTransportSettings = HttpJsonCallSettings.<InsertAutoscalerHttpRequest, Operation>newBuilder() .setMethodDescriptor(insertAutoscalerMethodDescriptor) .build(); HttpJsonCallSettings<ListAutoscalersHttpRequest, AutoscalerList> listAutoscalersTransportSettings = HttpJsonCallSettings.<ListAutoscalersHttpRequest, AutoscalerList>newBuilder() .setMethodDescriptor(listAutoscalersMethodDescriptor) .build(); HttpJsonCallSettings<PatchAutoscalerHttpRequest, Operation> patchAutoscalerTransportSettings = HttpJsonCallSettings.<PatchAutoscalerHttpRequest, Operation>newBuilder() .setMethodDescriptor(patchAutoscalerMethodDescriptor) .build(); HttpJsonCallSettings<UpdateAutoscalerHttpRequest, Operation> updateAutoscalerTransportSettings = HttpJsonCallSettings.<UpdateAutoscalerHttpRequest, Operation>newBuilder() .setMethodDescriptor(updateAutoscalerMethodDescriptor) .build(); this.aggregatedListAutoscalersCallable = callableFactory.createUnaryCallable( aggregatedListAutoscalersTransportSettings, settings.aggregatedListAutoscalersSettings(), clientContext); this.aggregatedListAutoscalersPagedCallable = callableFactory.createPagedCallable( aggregatedListAutoscalersTransportSettings, settings.aggregatedListAutoscalersSettings(), clientContext); this.deleteAutoscalerCallable = callableFactory.createUnaryCallable( deleteAutoscalerTransportSettings, settings.deleteAutoscalerSettings(), clientContext); this.getAutoscalerCallable = callableFactory.createUnaryCallable( getAutoscalerTransportSettings, settings.getAutoscalerSettings(), clientContext); this.insertAutoscalerCallable = callableFactory.createUnaryCallable( insertAutoscalerTransportSettings, settings.insertAutoscalerSettings(), clientContext); this.listAutoscalersCallable = callableFactory.createUnaryCallable( listAutoscalersTransportSettings, settings.listAutoscalersSettings(), clientContext); this.listAutoscalersPagedCallable = callableFactory.createPagedCallable( listAutoscalersTransportSettings, settings.listAutoscalersSettings(), clientContext); this.patchAutoscalerCallable = callableFactory.createUnaryCallable( patchAutoscalerTransportSettings, settings.patchAutoscalerSettings(), clientContext); this.updateAutoscalerCallable = callableFactory.createUnaryCallable( updateAutoscalerTransportSettings, settings.updateAutoscalerSettings(), clientContext); backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } @BetaApi public UnaryCallable<AggregatedListAutoscalersHttpRequest, AggregatedListAutoscalersPagedResponse> aggregatedListAutoscalersPagedCallable() { return aggregatedListAutoscalersPagedCallable; } @BetaApi public UnaryCallable<AggregatedListAutoscalersHttpRequest, AutoscalerAggregatedList> aggregatedListAutoscalersCallable() { return aggregatedListAutoscalersCallable; } @BetaApi public UnaryCallable<DeleteAutoscalerHttpRequest, Operation> deleteAutoscalerCallable() { return deleteAutoscalerCallable; } @BetaApi public UnaryCallable<GetAutoscalerHttpRequest, Autoscaler> getAutoscalerCallable() { return getAutoscalerCallable; } @BetaApi public UnaryCallable<InsertAutoscalerHttpRequest, Operation> insertAutoscalerCallable() { return insertAutoscalerCallable; } @BetaApi public UnaryCallable<ListAutoscalersHttpRequest, ListAutoscalersPagedResponse> listAutoscalersPagedCallable() { return listAutoscalersPagedCallable; } @BetaApi public UnaryCallable<ListAutoscalersHttpRequest, AutoscalerList> listAutoscalersCallable() { return listAutoscalersCallable; } @BetaApi public UnaryCallable<PatchAutoscalerHttpRequest, Operation> patchAutoscalerCallable() { return patchAutoscalerCallable; } @BetaApi public UnaryCallable<UpdateAutoscalerHttpRequest, Operation> updateAutoscalerCallable() { return updateAutoscalerCallable; } @Override public final void close() { shutdown(); } @Override public void shutdown() { backgroundResources.shutdown(); } @Override public boolean isShutdown() { return backgroundResources.isShutdown(); } @Override public boolean isTerminated() { return backgroundResources.isTerminated(); } @Override public void shutdownNow() { backgroundResources.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return backgroundResources.awaitTermination(duration, unit); } }
apache-2.0
tectronics/hyracks
hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/LSMInvertedIndexInsertTest.java
1072
/* * Copyright 2009-2013 by The Regents of the University of California * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * you may obtain a copy of the License from * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hyracks.storage.am.lsm.invertedindex; import org.apache.hyracks.storage.am.lsm.invertedindex.common.AbstractInvertedIndexLoadTest; import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType; public class LSMInvertedIndexInsertTest extends AbstractInvertedIndexLoadTest { public LSMInvertedIndexInsertTest() { super(InvertedIndexType.LSM, false); } }
apache-2.0
chyzh1689/pay
pay-product/src/main/java/com/qh/common/domain/Tree.java
2572
package com.qh.common.domain; import java.util.ArrayList; import java.util.List; import java.util.Map; import com.alibaba.fastjson.JSON; /** * tree TODO <br> * * @author kangxu2 2017-1-7 * */ public class Tree<T> { /** * 节点ID */ private String id; /** * 显示节点文本 */ private String text; /** * 节点状态,open closed */ private Map<String, Object> state; /** * 节点是否被选中 true false */ private boolean checked = false; /** * 节点属性 */ private Map<String, Object> attributes; /** * 节点的子节点 */ private List<Tree<T>> children = new ArrayList<Tree<T>>(); /** * 父ID */ private String parentId; /** * 是否有父节点 */ private boolean hasParent = false; /** * 是否有子节点 */ private boolean hasChildren = false; public String getId() { return id; } public void setId(String id) { this.id = id; } public String getText() { return text; } public void setText(String text) { this.text = text; } public Map<String, Object> getState() { return state; } public void setState(Map<String, Object> state) { this.state = state; } public boolean isChecked() { return checked; } public void setChecked(boolean checked) { this.checked = checked; } public Map<String, Object> getAttributes() { return attributes; } public void setAttributes(Map<String, Object> attributes) { this.attributes = attributes; } public List<Tree<T>> getChildren() { return children; } public void setChildren(List<Tree<T>> children) { this.children = children; } public boolean isHasParent() { return hasParent; } public void setHasParent(boolean isParent) { this.hasParent = isParent; } public boolean isHasChildren() { return hasChildren; } public void setChildren(boolean isChildren) { this.hasChildren = isChildren; } public String getParentId() { return parentId; } public void setParentId(String parentId) { this.parentId = parentId; } public Tree(String id, String text, Map<String, Object> state, boolean checked, Map<String, Object> attributes, List<Tree<T>> children, boolean isParent, boolean isChildren, String parentID) { super(); this.id = id; this.text = text; this.state = state; this.checked = checked; this.attributes = attributes; this.children = children; this.hasParent = isParent; this.hasChildren = isChildren; this.parentId = parentID; } public Tree() { super(); } @Override public String toString() { return JSON.toJSONString(this); } }
apache-2.0
juweiping/ocms
src/org/openuap/cms/survey/manager/impl/QuestionManagerImpl.java
12810
/** * $Id: QuestionManagerImpl.java 3951 2010-11-02 10:13:17Z orangeforjava $ */ package org.openuap.cms.survey.manager.impl; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.openuap.base.util.QueryInfo; import org.openuap.base.util.context.PageBuilder; import org.openuap.cms.survey.dao.QuestionDao; import org.openuap.cms.survey.dao.QuestionItemDao; import org.openuap.cms.survey.dao.VoterDao; import org.openuap.cms.survey.manager.QuestionManager; import org.openuap.cms.survey.model.Answer; import org.openuap.cms.survey.model.AnswerItem; import org.openuap.cms.survey.model.Question; import org.openuap.cms.survey.model.QuestionGroup; import org.openuap.cms.survey.model.QuestionItem; import org.openuap.cms.survey.model.QuestionPage; import org.openuap.cms.survey.model.SurveyAnswer; import org.openuap.cms.survey.model.Voter; /** * <p> * Title:VoteManagerImpl * </p> * * <p> * Description: * </p> * * <p> * Copyright: Copyright (c) 2006 * </p> * * <p> * Company: http://www.openuap.org * </p> * * @author Weiping Ju * @version 1.0 */ public class QuestionManagerImpl implements QuestionManager { private QuestionDao questionDao; private QuestionItemDao questionItemDao; private VoterDao voterDao; public QuestionManagerImpl() { } public List getQuestions(QueryInfo qi, PageBuilder pb) { return questionDao.getQuestions(qi, pb); } public Question getQuestionById(Long id) { return questionDao.getQuestionById(id); } public List getAllQuestions() { return questionDao.getAllQuestions(); } public List searchQuestions(String key, QueryInfo qi, PageBuilder pb) { return questionDao.searchQuestions(key, qi, pb); } public Long addQuestion(Question question) { return questionDao.addQuestion(question); } public void saveQuestion(Question question) { questionDao.saveQuestion(question); } public void deleteQuestion(Question question) { questionDao.deleteQuestion(question); } public int getQuestionCount() { return questionDao.getQuestionCount(); } public int getQuestionCount(Integer questionStatus) { return questionDao.getQuestionCount(questionStatus); } public List getQuestionList(String questionsId) { return questionDao.getQuestionList(questionsId); } public List getQuestionList(Integer questionStatus, QueryInfo qi, PageBuilder pb) { return questionDao.getQuestionList(questionStatus, qi, pb); } public List getQuestionItems(Long questionId) { return questionItemDao.getQuestionItems(questionId); } public QuestionItem getQuestionItemById(Long itemId) { return questionItemDao.getQuestionItemById(itemId); } public Long addQuestionItem(QuestionItem questionItem) { return questionItemDao.addQuestionItem(questionItem); } public void saveQuestionItem(QuestionItem questionItem) { questionItemDao.saveQuestionItem(questionItem); } public void deleteQuestionItem(QuestionItem questionItem) { questionItemDao.deleteQuestionItem(questionItem); } public void deleteQuestionItems(Long questionId) { questionItemDao.deleteQuestionItems(questionId); } public void deleteQuestionItemById(Long questionId) { questionItemDao.deleteQuestionItemById(questionId); } public int getQuestionItemsTotalCount(Long questionId) { return questionItemDao.getQuestionItemsTotalCount(questionId); } public Long addVoter(Voter voter) { return voterDao.addVoter(voter); } public Voter getVoterByName(String name) { return voterDao.getVoterByName(name); } public void saveVoter(Voter voter) { voterDao.saveVoter(voter); } public void deleteVoter(Voter voter) { voterDao.deleteVoter(voter); } public Voter searchVoter(String ipAddress, Long groupId) { return voterDao.searchVoter(ipAddress, groupId); } public void setVoterDao(VoterDao voterDao) { this.voterDao = voterDao; } public void setQuestionItemDao(QuestionItemDao questionItemDao) { this.questionItemDao = questionItemDao; } public void setQuestionDao(QuestionDao questionDao) { this.questionDao = questionDao; } public void updateQuestionItems(String voteItemsId) { questionItemDao.updateQuestionItems(voteItemsId); } public void updateQuestionItems2(String questionItemsId) { questionItemDao.updateQuestionItems2(questionItemsId); } public List getQuestionList(Long surveyId, QueryInfo qi, PageBuilder pb) { return questionDao.getQuestionList(surveyId, qi, pb); } public List getQuestionList(String hql, String hql_count, QueryInfo qi, PageBuilder pb) { return questionDao.getQuestionList(hql, hql_count, qi, pb); } public int getItemsCount(Long questionId) { return questionItemDao.getItemsCount(questionId); } public void executeHql(String hql, Object args[]) { questionDao.executeHql(hql, args); } public int getQuestionCountBySurvey(Long surveyId) { return questionDao.getQuestionCountBySurvey(surveyId); } public List getQuestionListByGroup(Long surveyId, Long groupId) { return questionDao.getQuestionListByGroup(surveyId, groupId); } public QuestionPage getQuestionPage(Long surveyId, Long pageId) { List questions = getQuestionListByPage(surveyId, pageId); String qids = ""; if (questions != null && questions.size() > 0) { Question fq = (Question) questions.get(0); QuestionPage questionPage = new QuestionPage(); questionPage.setPageId(pageId); // qids += fq.getQuestionId() + ","; // List groups = new ArrayList(); // Long groupId = fq.getGroupId(); int start = 0; int end = 0; for (int i = 0; i < questions.size(); i++) { Question q = (Question) questions.get(i); qids += q.getQuestionId() + ","; if (q.getGroupId().equals(groupId)) { end++; } else { if (end - start > 1) { // 一道题以上是1个组 QuestionGroup tmpGroup = new QuestionGroup(); List qs = new ArrayList(); Question q_first = (Question) questions.get(start); for (int j = start; j < end; j++) { Question tmp_q = (Question) questions.get(j); qs.add(tmp_q); } tmpGroup.setGroupTitle(q_first.getGroupTitle()); tmpGroup.setGroupType(new Integer(1)); tmpGroup.setGroupId(q_first.getGroupId()); tmpGroup.setQuestions(qs); groups.add(tmpGroup); } else { // 一道题是1个组 Question q_prev = (Question) questions.get(start); QuestionGroup tmpGroup = new QuestionGroup(); tmpGroup.setGroupTitle(q_prev.getQuestionTitle()); tmpGroup.setGroupType(new Integer(0)); tmpGroup.setGroupId(q_prev.getGroupId()); List qs = new ArrayList(); qs.add(q_prev); tmpGroup.setQuestions(qs); groups.add(tmpGroup); } start = end; end++; groupId = q.getGroupId(); } } // Question eq = (Question) questions.get(questions.size() - 1); if (end - start > 1) { // 一道题以上是1个组 QuestionGroup tmpGroup = new QuestionGroup(); List qs = new ArrayList(); Question q_first = (Question) questions.get(start); for (int j = start; j < end; j++) { Question tmp_q = (Question) questions.get(j); qs.add(tmp_q); } tmpGroup.setGroupTitle(q_first.getGroupTitle()); tmpGroup.setGroupType(new Integer(1)); tmpGroup.setGroupId(q_first.getGroupId()); tmpGroup.setQuestions(qs); groups.add(tmpGroup); } else { // 一道题是1个组 Question q_prev = (Question) questions.get(start); QuestionGroup tmpGroup = new QuestionGroup(); tmpGroup.setGroupTitle(q_prev.getQuestionTitle()); tmpGroup.setGroupType(new Integer(0)); tmpGroup.setGroupId(q_prev.getGroupId()); List qs = new ArrayList(); qs.add(q_prev); tmpGroup.setQuestions(qs); groups.add(tmpGroup); } // questionPage.setGroups(groups); qids = qids.substring(0, qids.length() - 1); questionPage.setQuestionIds(qids); return questionPage; } return null; } public List getQuestionListByPage(Long surveyId, Long pageId) { return questionDao.getQuestionListByPage(surveyId, pageId); } public QuestionGroup getQuestionGroup(Long surveyId, Long groupId) { List questions = getQuestionListByGroup(surveyId, groupId); String qids = ""; if (questions != null && questions.size() > 0) { Question fq = (Question) questions.get(0); QuestionGroup group = new QuestionGroup(); group.setGroupId(fq.getGroupId()); if (questions.size() > 1) { group.setGroupType(new Integer(1)); group.setGroupTitle(fq.getGroupTitle()); } else { group.setGroupType(new Integer(0)); group.setGroupTitle(fq.getQuestionTitle()); } qids = fq.getQuestionId() + ","; for (int i = 1; i < questions.size(); i++) { Question q = (Question) questions.get(i); qids += q.getQuestionId() + ","; } qids = qids.substring(0, qids.length() - 1); group.setQuestionIds(qids); group.setQuestions(questions); return group; } return null; } public List getPages(Long id) { return questionDao.getPages(id); } public Voter getVoterById(Long voterId) { return voterDao.getVoterById(voterId); } public void addAnswer(Answer Answer) { voterDao.addAnswer(Answer); } public void saveAnswer(Answer Answer) { voterDao.saveAnswer(Answer); } public void deleteAnswer(Answer Answer) { voterDao.deleteAnswer(Answer); } public void deleteAnswerByVoter(Long voterId) { voterDao.deleteAnswerByVoter(voterId); } public void deleteAnswerBySurvey(Long surveyId) { voterDao.deleteAnswerBySurvey(surveyId); } public List getAnswer(Long surveyId, QueryInfo qi, PageBuilder pb) { return voterDao.getAnswer(surveyId, qi, pb); } public List getAnswerByVoter(Long voterId, QueryInfo qi, PageBuilder pb) { return voterDao.getAnswerByVoter(voterId, qi, pb); } public List getAnswerByQuestion(Long questionId, QueryInfo qi, PageBuilder pb) { return voterDao.getAnswerByQuestion(questionId, qi, pb); } public Answer getAnswerById(Long surveyId, Long voterId, Long questionId) { return voterDao.getAnswerById(surveyId, voterId, questionId); } public void addAnswerItem(AnswerItem AnswerItem) { voterDao.addAnswerItem(AnswerItem); } public void saveAnswerItem(AnswerItem AnswerItem) { voterDao.saveAnswerItem(AnswerItem); } public void deleteAnswerItem(Answer AnswerItem) { voterDao.deleteAnswerItem(AnswerItem); } public void deleteAnswerItemByVoter(Long voterId) { voterDao.deleteAnswerByVoter(voterId); } public void deleteAnswerItemBySurvey(Long surveyId) { voterDao.deleteAnswerBySurvey(surveyId); } public List getAnswerItemBySurvey(Long surveyId, QueryInfo qi, PageBuilder pb) { return voterDao.getAnswerItemBySurvey(surveyId, qi, pb); } public List getAnswerItemByVoter(Long voterId, QueryInfo qi, PageBuilder pb) { return voterDao.getAnswerByVoter(voterId, qi, pb); } public List getAnswerItemByItem(Long itemId, QueryInfo qi, PageBuilder pb) { return voterDao.getAnswerItemByItem(itemId, qi, pb); } public AnswerItem getAnswerItemById(Long itemId, Long voterId, Long surveyId) { return voterDao.getAnswerItemById(itemId, voterId, surveyId); } public List getVoters(Long surveyId, Long surveyRecordId, QueryInfo qi, PageBuilder pb) { return voterDao.getVoters(surveyId, surveyRecordId, qi, pb); } public SurveyAnswer getSurveyAnswer(Long voterId) { Voter voter = voterDao.getVoterById(voterId); if (voter != null) { SurveyAnswer surveyAnswer = new SurveyAnswer(); List answers = voterDao.getAnswerByVoter(voterId, null, null); Map answerMap = new HashMap(); Map answerItemMap = new HashMap(); if (answers != null) { answerMap = new HashMap(); for (int i = 0; i < answers.size(); i++) { Answer answer = (Answer) answers.get(i); answerMap.put(answer.getQuestionId(), answer); } } List answerItems = voterDao.getAnswerItemByVoter(voterId, null, null); if (answerItems != null) { for (int i = 0; i < answerItems.size(); i++) { AnswerItem answerItem = (AnswerItem) answerItems.get(i); answerItemMap.put(answerItem.getQuestionItemId(), answerItem); } } // surveyAnswer.setAnswerDate(voter.getVoterPollDate()); surveyAnswer.setSurveyId(voter.getVoterSurveyId()); surveyAnswer.setVoter(voter); surveyAnswer.setQuestionAnswers(answerMap); surveyAnswer.setQuestionItemAnswers(answerItemMap); return surveyAnswer; } return null; } /** * */ public List getAnswer(Long surveyId, Long surveyRecordId, QueryInfo qi, PageBuilder pb) { return voterDao.getAnswer(surveyId, surveyRecordId, qi, pb); } /** * */ public List getAnswerByQuestion(Long questionId, Long surveyRecordId, QueryInfo qi, PageBuilder pb) { return voterDao.getAnswerByQuestion(questionId, surveyRecordId, qi, pb); } }
apache-2.0
trak17/Trak
src/main/java/pl/com/turski/service/shipment/ShipmentStatusServiceImpl.java
2100
package pl.com.turski.service.shipment; import com.google.appengine.repackaged.com.google.common.collect.Lists; import org.hibernate.validator.constraints.NotBlank; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import pl.com.turski.exception.BusinessErrorCode; import pl.com.turski.exception.BusinessException; import pl.com.turski.exception.TechnicalException; import pl.com.turski.model.domain.shipment.ShipmentStatus; import pl.com.turski.repository.shipment.ShipmentStatusRepository; import javax.validation.constraints.NotNull; import java.util.List; /** * User: Adam */ @Service @Transactional public class ShipmentStatusServiceImpl implements ShipmentStatusService { private static final Logger LOG = LoggerFactory.getLogger(ShipmentStatusServiceImpl.class); @Autowired private ShipmentStatusRepository shipmentStatusRepository; @Override public void create(@NotBlank String name, String description) throws TechnicalException, BusinessException { LOG.info("Creating ShipmentStatus[name={}, description={}]", name, description); ShipmentStatus shipmentStatus = new ShipmentStatus(name, description); shipmentStatusRepository.save(shipmentStatus); } @Override public ShipmentStatus get(@NotNull Long id) throws TechnicalException, BusinessException { LOG.info("Getting ShipmentStatus[id={}]", id); ShipmentStatus shipmentStatus = shipmentStatusRepository.findOne(id); if (shipmentStatus == null) { throw new BusinessException(String.format("ShipmentStatus[id=%d] not found)", id), BusinessErrorCode.SHIPMENT_STATUS_NOT_FOUND); } return shipmentStatus; } @Override public List<ShipmentStatus> getAll() throws TechnicalException, BusinessException { LOG.info("Getting all shipment statuses"); return Lists.newArrayList(shipmentStatusRepository.findAll()); } }
apache-2.0
icecp/icecp
icecp-node/src/test/java/com/intel/icecp/core/mock/MockChannelsTest.java
929
package com.intel.icecp.core.mock; import com.intel.icecp.core.Channel; import com.intel.icecp.core.metadata.Persistence; import com.intel.icecp.node.messages.UndefinedMessage; import org.junit.Test; import java.net.URI; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import static org.junit.Assert.assertTrue; public class MockChannelsTest { @Test public void mockChannelsHandleUndefinedMessageTest() throws Exception { MockChannels mockChannels = new MockChannels(); Channel<UndefinedMessage> channel = mockChannels.openChannel(new URI("foo:/bar"), UndefinedMessage.class, Persistence.DEFAULT); CountDownLatch latch = new CountDownLatch(1); channel.subscribe(message -> latch.countDown()); channel.publish(new UndefinedMessage()); assertTrue("message not received before timeout", latch.await(1000, TimeUnit.MILLISECONDS)); } }
apache-2.0
Madhuka/incubator-zeppelin
zeppelin-server/src/main/java/org/apache/zeppelin/socket/NotebookServer.java
54280
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zeppelin.socket; import com.google.common.base.Strings; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.reflect.TypeToken; import org.apache.commons.lang.StringUtils; import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.conf.ZeppelinConfiguration.ConfVars; import org.apache.zeppelin.display.AngularObject; import org.apache.zeppelin.display.AngularObjectRegistry; import org.apache.zeppelin.display.AngularObjectRegistryListener; import org.apache.zeppelin.helium.ApplicationEventListener; import org.apache.zeppelin.helium.HeliumPackage; import org.apache.zeppelin.interpreter.InterpreterGroup; import org.apache.zeppelin.interpreter.InterpreterOutput; import org.apache.zeppelin.interpreter.InterpreterResult; import org.apache.zeppelin.interpreter.InterpreterSetting; import org.apache.zeppelin.interpreter.remote.RemoteAngularObjectRegistry; import org.apache.zeppelin.interpreter.remote.RemoteInterpreterProcessListener; import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion; import org.apache.zeppelin.notebook.*; import org.apache.zeppelin.notebook.repo.NotebookRepo; import org.apache.zeppelin.notebook.repo.NotebookRepo.Revision; import org.apache.zeppelin.notebook.socket.Message; import org.apache.zeppelin.notebook.socket.Message.OP; import org.apache.zeppelin.scheduler.Job; import org.apache.zeppelin.scheduler.Job.Status; import org.apache.zeppelin.server.ZeppelinServer; import org.apache.zeppelin.ticket.TicketContainer; import org.apache.zeppelin.types.InterpreterSettingsList; import org.apache.zeppelin.user.AuthenticationInfo; import org.apache.zeppelin.utils.InterpreterBindingUtils; import org.apache.zeppelin.utils.SecurityUtils; import org.eclipse.jetty.websocket.servlet.WebSocketServlet; import org.eclipse.jetty.websocket.servlet.WebSocketServletFactory; import org.quartz.SchedulerException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.servlet.http.HttpServletRequest; import java.io.IOException; import java.net.URISyntaxException; import java.net.UnknownHostException; import java.util.*; import java.util.concurrent.ConcurrentLinkedQueue; /** * Zeppelin websocket service. */ public class NotebookServer extends WebSocketServlet implements NotebookSocketListener, JobListenerFactory, AngularObjectRegistryListener, RemoteInterpreterProcessListener, ApplicationEventListener { /** * Job manager service type */ protected enum JOB_MANAGER_SERVICE { JOB_MANAGER_PAGE("JOB_MANAGER_PAGE"); private String serviceTypeKey; JOB_MANAGER_SERVICE(String serviceType) { this.serviceTypeKey = serviceType; } String getKey() { return this.serviceTypeKey; } } private static final Logger LOG = LoggerFactory.getLogger(NotebookServer.class); Gson gson = new GsonBuilder().setDateFormat("yyyy-MM-dd'T'HH:mm:ssZ").create(); final Map<String, List<NotebookSocket>> noteSocketMap = new HashMap<>(); final Queue<NotebookSocket> connectedSockets = new ConcurrentLinkedQueue<>(); private Notebook notebook() { return ZeppelinServer.notebook; } @Override public void configure(WebSocketServletFactory factory) { factory.setCreator(new NotebookWebSocketCreator(this)); } public boolean checkOrigin(HttpServletRequest request, String origin) { try { return SecurityUtils.isValidOrigin(origin, ZeppelinConfiguration.create()); } catch (UnknownHostException e) { LOG.error(e.toString(), e); } catch (URISyntaxException e) { LOG.error(e.toString(), e); } return false; } public NotebookSocket doWebSocketConnect(HttpServletRequest req, String protocol) { return new NotebookSocket(req, protocol, this); } @Override public void onOpen(NotebookSocket conn) { LOG.info("New connection from {} : {}", conn.getRequest().getRemoteAddr(), conn.getRequest().getRemotePort()); connectedSockets.add(conn); } @Override public void onMessage(NotebookSocket conn, String msg) { Notebook notebook = notebook(); try { Message messagereceived = deserializeMessage(msg); LOG.debug("RECEIVE << " + messagereceived.op); LOG.debug("RECEIVE PRINCIPAL << " + messagereceived.principal); LOG.debug("RECEIVE TICKET << " + messagereceived.ticket); LOG.debug("RECEIVE ROLES << " + messagereceived.roles); if (LOG.isTraceEnabled()) { LOG.trace("RECEIVE MSG = " + messagereceived); } String ticket = TicketContainer.instance.getTicket(messagereceived.principal); if (ticket != null && !ticket.equals(messagereceived.ticket)){ /* not to pollute logs, log instead of exception */ if (StringUtils.isEmpty(messagereceived.ticket)) { LOG.debug("{} message: invalid ticket {} != {}", messagereceived.op, messagereceived.ticket, ticket); } else { LOG.warn("{} message: invalid ticket {} != {}", messagereceived.op, messagereceived.ticket, ticket); } return; } ZeppelinConfiguration conf = ZeppelinConfiguration.create(); boolean allowAnonymous = conf. getBoolean(ZeppelinConfiguration.ConfVars.ZEPPELIN_ANONYMOUS_ALLOWED); if (!allowAnonymous && messagereceived.principal.equals("anonymous")) { throw new Exception("Anonymous access not allowed "); } HashSet<String> userAndRoles = new HashSet<String>(); userAndRoles.add(messagereceived.principal); if (!messagereceived.roles.equals("")) { HashSet<String> roles = gson.fromJson(messagereceived.roles, new TypeToken<HashSet<String>>(){}.getType()); if (roles != null) { userAndRoles.addAll(roles); } } AuthenticationInfo subject = new AuthenticationInfo(messagereceived.principal); /** Lets be elegant here */ switch (messagereceived.op) { case LIST_NOTES: unicastNoteList(conn, subject); break; case RELOAD_NOTES_FROM_REPO: broadcastReloadedNoteList(subject); break; case GET_HOME_NOTE: sendHomeNote(conn, userAndRoles, notebook, messagereceived); break; case GET_NOTE: sendNote(conn, userAndRoles, notebook, messagereceived); break; case NEW_NOTE: createNote(conn, userAndRoles, notebook, messagereceived); break; case DEL_NOTE: removeNote(conn, userAndRoles, notebook, messagereceived); break; case CLONE_NOTE: cloneNote(conn, userAndRoles, notebook, messagereceived); break; case IMPORT_NOTE: importNote(conn, userAndRoles, notebook, messagereceived); break; case COMMIT_PARAGRAPH: updateParagraph(conn, userAndRoles, notebook, messagereceived); break; case RUN_PARAGRAPH: runParagraph(conn, userAndRoles, notebook, messagereceived); break; case CANCEL_PARAGRAPH: cancelParagraph(conn, userAndRoles, notebook, messagereceived); break; case MOVE_PARAGRAPH: moveParagraph(conn, userAndRoles, notebook, messagereceived); break; case INSERT_PARAGRAPH: insertParagraph(conn, userAndRoles, notebook, messagereceived); break; case PARAGRAPH_REMOVE: removeParagraph(conn, userAndRoles, notebook, messagereceived); break; case PARAGRAPH_CLEAR_OUTPUT: clearParagraphOutput(conn, userAndRoles, notebook, messagereceived); break; case NOTE_UPDATE: updateNote(conn, userAndRoles, notebook, messagereceived); break; case COMPLETION: completion(conn, userAndRoles, notebook, messagereceived); break; case PING: break; //do nothing case ANGULAR_OBJECT_UPDATED: angularObjectUpdated(conn, userAndRoles, notebook, messagereceived); break; case ANGULAR_OBJECT_CLIENT_BIND: angularObjectClientBind(conn, userAndRoles, notebook, messagereceived); break; case ANGULAR_OBJECT_CLIENT_UNBIND: angularObjectClientUnbind(conn, userAndRoles, notebook, messagereceived); break; case LIST_CONFIGURATIONS: sendAllConfigurations(conn, userAndRoles, notebook); break; case CHECKPOINT_NOTEBOOK: checkpointNotebook(conn, notebook, messagereceived); break; case LIST_REVISION_HISTORY: listRevisionHistory(conn, notebook, messagereceived); break; case NOTE_REVISION: getNoteRevision(conn, notebook, messagereceived); break; case LIST_NOTEBOOK_JOBS: unicastNotebookJobInfo(conn, messagereceived); break; case LIST_UPDATE_NOTEBOOK_JOBS: unicastUpdateNotebookJobInfo(conn, messagereceived); break; case GET_INTERPRETER_BINDINGS: getInterpreterBindings(conn, messagereceived); break; case SAVE_INTERPRETER_BINDINGS: saveInterpreterBindings(conn, messagereceived); break; default: break; } } catch (Exception e) { LOG.error("Can't handle message", e); } } @Override public void onClose(NotebookSocket conn, int code, String reason) { LOG.info("Closed connection to {} : {}. ({}) {}", conn.getRequest() .getRemoteAddr(), conn.getRequest().getRemotePort(), code, reason); removeConnectionFromAllNote(conn); connectedSockets.remove(conn); } protected Message deserializeMessage(String msg) { return gson.fromJson(msg, Message.class); } protected String serializeMessage(Message m) { return gson.toJson(m); } private void addConnectionToNote(String noteId, NotebookSocket socket) { synchronized (noteSocketMap) { removeConnectionFromAllNote(socket); // make sure a socket relates only a // single note. List<NotebookSocket> socketList = noteSocketMap.get(noteId); if (socketList == null) { socketList = new LinkedList<>(); noteSocketMap.put(noteId, socketList); } if (!socketList.contains(socket)) { socketList.add(socket); } } } private void removeConnectionFromNote(String noteId, NotebookSocket socket) { synchronized (noteSocketMap) { List<NotebookSocket> socketList = noteSocketMap.get(noteId); if (socketList != null) { socketList.remove(socket); } } } private void removeNote(String noteId) { synchronized (noteSocketMap) { List<NotebookSocket> socketList = noteSocketMap.remove(noteId); } } private void removeConnectionFromAllNote(NotebookSocket socket) { synchronized (noteSocketMap) { Set<String> keys = noteSocketMap.keySet(); for (String noteId : keys) { removeConnectionFromNote(noteId, socket); } } } private String getOpenNoteId(NotebookSocket socket) { String id = null; synchronized (noteSocketMap) { Set<String> keys = noteSocketMap.keySet(); for (String noteId : keys) { List<NotebookSocket> sockets = noteSocketMap.get(noteId); if (sockets.contains(socket)) { id = noteId; } } } return id; } private void broadcastToNoteBindedInterpreter(String interpreterGroupId, Message m) { Notebook notebook = notebook(); List<Note> notes = notebook.getAllNotes(); for (Note note : notes) { List<String> ids = notebook.getInterpreterFactory().getInterpreters(note.getId()); for (String id : ids) { if (id.equals(interpreterGroupId)) { broadcast(note.id(), m); } } } } private void broadcast(String noteId, Message m) { synchronized (noteSocketMap) { List<NotebookSocket> socketLists = noteSocketMap.get(noteId); if (socketLists == null || socketLists.size() == 0) { return; } LOG.debug("SEND >> " + m.op); for (NotebookSocket conn : socketLists) { try { conn.send(serializeMessage(m)); } catch (IOException e) { LOG.error("socket error", e); } } } } private void broadcastExcept(String noteId, Message m, NotebookSocket exclude) { synchronized (noteSocketMap) { List<NotebookSocket> socketLists = noteSocketMap.get(noteId); if (socketLists == null || socketLists.size() == 0) { return; } LOG.debug("SEND >> " + m.op); for (NotebookSocket conn : socketLists) { if (exclude.equals(conn)) { continue; } try { conn.send(serializeMessage(m)); } catch (IOException e) { LOG.error("socket error", e); } } } } private void broadcastAll(Message m) { for (NotebookSocket conn : connectedSockets) { try { conn.send(serializeMessage(m)); } catch (IOException e) { LOG.error("socket error", e); } } } private void unicast(Message m, NotebookSocket conn) { try { conn.send(serializeMessage(m)); } catch (IOException e) { LOG.error("socket error", e); } } public void unicastNotebookJobInfo(NotebookSocket conn, Message fromMessage) throws IOException { AuthenticationInfo subject = new AuthenticationInfo(fromMessage.principal); List<Map<String, Object>> notebookJobs = notebook().getJobListforNotebook(false, 0, subject); Map<String, Object> response = new HashMap<>(); response.put("lastResponseUnixTime", System.currentTimeMillis()); response.put("jobs", notebookJobs); conn.send(serializeMessage(new Message(OP.LIST_NOTEBOOK_JOBS) .put("notebookJobs", response))); } public void unicastUpdateNotebookJobInfo(NotebookSocket conn, Message fromMessage) throws IOException { double lastUpdateUnixTimeRaw = (double) fromMessage.get("lastUpdateUnixTime"); long lastUpdateUnixTime = new Double(lastUpdateUnixTimeRaw).longValue(); List<Map<String, Object>> notebookJobs; AuthenticationInfo subject = new AuthenticationInfo(fromMessage.principal); notebookJobs = notebook().getJobListforNotebook(false, lastUpdateUnixTime, subject); Map<String, Object> response = new HashMap<>(); response.put("lastResponseUnixTime", System.currentTimeMillis()); response.put("jobs", notebookJobs); conn.send(serializeMessage(new Message(OP.LIST_UPDATE_NOTEBOOK_JOBS) .put("notebookRunningJobs", response))); } public void saveInterpreterBindings(NotebookSocket conn, Message fromMessage) { String noteId = (String) fromMessage.data.get("noteID"); try { List<String> settingIdList = gson.fromJson(String.valueOf( fromMessage.data.get("selectedSettingIds")), new TypeToken<ArrayList<String>>() { }.getType()); notebook().bindInterpretersToNote(noteId, settingIdList); broadcastInterpreterBindings(noteId, InterpreterBindingUtils.getInterpreterBindings(notebook(), noteId)); } catch (Exception e) { LOG.error("Error while saving interpreter bindings", e); } } public void getInterpreterBindings(NotebookSocket conn, Message fromMessage) throws IOException { String noteID = (String) fromMessage.data.get("noteID"); List<InterpreterSettingsList> settingList = InterpreterBindingUtils.getInterpreterBindings(notebook(), noteID); conn.send(serializeMessage(new Message(OP.INTERPRETER_BINDINGS) .put("interpreterBindings", settingList))); } public List<Map<String, String>> generateNotebooksInfo(boolean needsReload, AuthenticationInfo subject) { Notebook notebook = notebook(); ZeppelinConfiguration conf = notebook.getConf(); String homescreenNotebookId = conf.getString(ConfVars.ZEPPELIN_NOTEBOOK_HOMESCREEN); boolean hideHomeScreenNotebookFromList = conf .getBoolean(ConfVars.ZEPPELIN_NOTEBOOK_HOMESCREEN_HIDE); if (needsReload) { try { notebook.reloadAllNotes(subject); } catch (IOException e) { LOG.error("Fail to reload notes from repository", e); } } List<Note> notes = notebook.getAllNotes(); List<Map<String, String>> notesInfo = new LinkedList<>(); for (Note note : notes) { Map<String, String> info = new HashMap<>(); if (hideHomeScreenNotebookFromList && note.id().equals(homescreenNotebookId)) { continue; } info.put("id", note.id()); info.put("name", note.getName()); notesInfo.add(info); } return notesInfo; } public void broadcastNote(Note note) { broadcast(note.id(), new Message(OP.NOTE).put("note", note)); } public void broadcastInterpreterBindings(String noteId, List settingList) { broadcast(noteId, new Message(OP.INTERPRETER_BINDINGS) .put("interpreterBindings", settingList)); } public void broadcastNoteList(AuthenticationInfo subject) { List<Map<String, String>> notesInfo = generateNotebooksInfo(false, subject); broadcastAll(new Message(OP.NOTES_INFO).put("notes", notesInfo)); } public void unicastNoteList(NotebookSocket conn, AuthenticationInfo subject) { List<Map<String, String>> notesInfo = generateNotebooksInfo(false, subject); unicast(new Message(OP.NOTES_INFO).put("notes", notesInfo), conn); } public void broadcastReloadedNoteList(AuthenticationInfo subject) { List<Map<String, String>> notesInfo = generateNotebooksInfo(true, subject); broadcastAll(new Message(OP.NOTES_INFO).put("notes", notesInfo)); } void permissionError(NotebookSocket conn, String op, String userName, Set<String> userAndRoles, Set<String> allowed) throws IOException { LOG.info("Cannot {}. Connection readers {}. Allowed readers {}", op, userAndRoles, allowed); conn.send(serializeMessage(new Message(OP.AUTH_INFO).put("info", "Insufficient privileges to " + op + " notebook.\n\n" + "Allowed users or roles: " + allowed.toString() + "\n\n" + "But the user " + userName + " belongs to: " + userAndRoles.toString()))); } private void sendNote(NotebookSocket conn, HashSet<String> userAndRoles, Notebook notebook, Message fromMessage) throws IOException { LOG.info("New operation from {} : {} : {} : {} : {}", conn.getRequest().getRemoteAddr(), conn.getRequest().getRemotePort(), fromMessage.principal, fromMessage.op, fromMessage.get("id") ); String noteId = (String) fromMessage.get("id"); if (noteId == null) { return; } Note note = notebook.getNote(noteId); NotebookAuthorization notebookAuthorization = notebook.getNotebookAuthorization(); if (note != null) { if (!notebookAuthorization.isReader(noteId, userAndRoles)) { permissionError(conn, "read", fromMessage.principal, userAndRoles, notebookAuthorization.getReaders(noteId)); return; } addConnectionToNote(note.id(), conn); conn.send(serializeMessage(new Message(OP.NOTE).put("note", note))); sendAllAngularObjects(note, conn); } else { conn.send(serializeMessage(new Message(OP.NOTE).put("note", null))); } } private void sendHomeNote(NotebookSocket conn, HashSet<String> userAndRoles, Notebook notebook, Message fromMessage) throws IOException { String noteId = notebook.getConf().getString(ConfVars.ZEPPELIN_NOTEBOOK_HOMESCREEN); Note note = null; if (noteId != null) { note = notebook.getNote(noteId); } if (note != null) { NotebookAuthorization notebookAuthorization = notebook.getNotebookAuthorization(); if (!notebookAuthorization.isReader(noteId, userAndRoles)) { permissionError(conn, "read", fromMessage.principal, userAndRoles, notebookAuthorization.getReaders(noteId)); return; } addConnectionToNote(note.id(), conn); conn.send(serializeMessage(new Message(OP.NOTE).put("note", note))); sendAllAngularObjects(note, conn); } else { removeConnectionFromAllNote(conn); conn.send(serializeMessage(new Message(OP.NOTE).put("note", null))); } } private void updateNote(NotebookSocket conn, HashSet<String> userAndRoles, Notebook notebook, Message fromMessage) throws SchedulerException, IOException { String noteId = (String) fromMessage.get("id"); String name = (String) fromMessage.get("name"); Map<String, Object> config = (Map<String, Object>) fromMessage .get("config"); if (noteId == null) { return; } if (config == null) { return; } NotebookAuthorization notebookAuthorization = notebook.getNotebookAuthorization(); if (!notebookAuthorization.isWriter(noteId, userAndRoles)) { permissionError(conn, "update", fromMessage.principal, userAndRoles, notebookAuthorization.getWriters(noteId)); return; } Note note = notebook.getNote(noteId); if (note != null) { boolean cronUpdated = isCronUpdated(config, note.getConfig()); note.setName(name); note.setConfig(config); if (cronUpdated) { notebook.refreshCron(note.id()); } AuthenticationInfo subject = new AuthenticationInfo(fromMessage.principal); note.persist(subject); broadcastNote(note); broadcastNoteList(subject); } } private boolean isCronUpdated(Map<String, Object> configA, Map<String, Object> configB) { boolean cronUpdated = false; if (configA.get("cron") != null && configB.get("cron") != null && configA.get("cron").equals(configB.get("cron"))) { cronUpdated = true; } else if (configA.get("cron") == null && configB.get("cron") == null) { cronUpdated = false; } else if (configA.get("cron") != null || configB.get("cron") != null) { cronUpdated = true; } return cronUpdated; } private void createNote(NotebookSocket conn, HashSet<String> userAndRoles, Notebook notebook, Message message) throws IOException { AuthenticationInfo subject = new AuthenticationInfo(message.principal); Note note = notebook.createNote(subject); note.addParagraph(); // it's an empty note. so add one paragraph if (message != null) { String noteName = (String) message.get("name"); if (noteName == null || noteName.isEmpty()){ noteName = "Note " + note.getId(); } note.setName(noteName); } note.persist(subject); addConnectionToNote(note.id(), (NotebookSocket) conn); conn.send(serializeMessage(new Message(OP.NEW_NOTE).put("note", note))); broadcastNoteList(subject); } private void removeNote(NotebookSocket conn, HashSet<String> userAndRoles, Notebook notebook, Message fromMessage) throws IOException { String noteId = (String) fromMessage.get("id"); if (noteId == null) { return; } Note note = notebook.getNote(noteId); NotebookAuthorization notebookAuthorization = notebook.getNotebookAuthorization(); if (!notebookAuthorization.isOwner(noteId, userAndRoles)) { permissionError(conn, "remove", fromMessage.principal, userAndRoles, notebookAuthorization.getOwners(noteId)); return; } AuthenticationInfo subject = new AuthenticationInfo(fromMessage.principal); notebook.removeNote(noteId, subject); removeNote(noteId); broadcastNoteList(subject); } private void updateParagraph(NotebookSocket conn, HashSet<String> userAndRoles, Notebook notebook, Message fromMessage) throws IOException { String paragraphId = (String) fromMessage.get("id"); if (paragraphId == null) { return; } Map<String, Object> params = (Map<String, Object>) fromMessage .get("params"); Map<String, Object> config = (Map<String, Object>) fromMessage .get("config"); String noteId = getOpenNoteId(conn); final Note note = notebook.getNote(noteId); NotebookAuthorization notebookAuthorization = notebook.getNotebookAuthorization(); AuthenticationInfo subject = new AuthenticationInfo(fromMessage.principal); if (!notebookAuthorization.isWriter(noteId, userAndRoles)) { permissionError(conn, "write", fromMessage.principal, userAndRoles, notebookAuthorization.getWriters(noteId)); return; } Paragraph p = note.getParagraph(paragraphId); p.settings.setParams(params); p.setConfig(config); p.setTitle((String) fromMessage.get("title")); p.setText((String) fromMessage.get("paragraph")); note.persist(subject); broadcast(note.id(), new Message(OP.PARAGRAPH).put("paragraph", p)); } private void cloneNote(NotebookSocket conn, HashSet<String> userAndRoles, Notebook notebook, Message fromMessage) throws IOException, CloneNotSupportedException { String noteId = getOpenNoteId(conn); String name = (String) fromMessage.get("name"); Note newNote = notebook.cloneNote(noteId, name, new AuthenticationInfo(fromMessage.principal)); AuthenticationInfo subject = new AuthenticationInfo(fromMessage.principal); addConnectionToNote(newNote.id(), (NotebookSocket) conn); conn.send(serializeMessage(new Message(OP.NEW_NOTE).put("note", newNote))); broadcastNoteList(subject); } protected Note importNote(NotebookSocket conn, HashSet<String> userAndRoles, Notebook notebook, Message fromMessage) throws IOException { Note note = null; if (fromMessage != null) { String noteName = (String) ((Map) fromMessage.get("notebook")).get("name"); String noteJson = gson.toJson(fromMessage.get("notebook")); AuthenticationInfo subject = new AuthenticationInfo(fromMessage.principal); note = notebook.importNote(noteJson, noteName, subject); note.persist(subject); broadcastNote(note); broadcastNoteList(subject); } return note; } private void removeParagraph(NotebookSocket conn, HashSet<String> userAndRoles, Notebook notebook, Message fromMessage) throws IOException { final String paragraphId = (String) fromMessage.get("id"); if (paragraphId == null) { return; } String noteId = getOpenNoteId(conn); final Note note = notebook.getNote(noteId); NotebookAuthorization notebookAuthorization = notebook.getNotebookAuthorization(); AuthenticationInfo subject = new AuthenticationInfo(SecurityUtils.getPrincipal()); if (!notebookAuthorization.isWriter(noteId, userAndRoles)) { permissionError(conn, "write", fromMessage.principal, userAndRoles, notebookAuthorization.getWriters(noteId)); return; } /** We dont want to remove the last paragraph */ if (!note.isLastParagraph(paragraphId)) { note.removeParagraph(paragraphId); note.persist(subject); broadcastNote(note); } } private void clearParagraphOutput(NotebookSocket conn, HashSet<String> userAndRoles, Notebook notebook, Message fromMessage) throws IOException { final String paragraphId = (String) fromMessage.get("id"); if (paragraphId == null) { return; } String noteId = getOpenNoteId(conn); final Note note = notebook.getNote(noteId); NotebookAuthorization notebookAuthorization = notebook.getNotebookAuthorization(); if (!notebookAuthorization.isWriter(noteId, userAndRoles)) { permissionError(conn, "write", fromMessage.principal, userAndRoles, notebookAuthorization.getWriters(noteId)); return; } note.clearParagraphOutput(paragraphId); broadcastNote(note); } private void completion(NotebookSocket conn, HashSet<String> userAndRoles, Notebook notebook, Message fromMessage) throws IOException { String paragraphId = (String) fromMessage.get("id"); String buffer = (String) fromMessage.get("buf"); int cursor = (int) Double.parseDouble(fromMessage.get("cursor").toString()); Message resp = new Message(OP.COMPLETION_LIST).put("id", paragraphId); if (paragraphId == null) { conn.send(serializeMessage(resp)); return; } final Note note = notebook.getNote(getOpenNoteId(conn)); List<InterpreterCompletion> candidates = note.completion(paragraphId, buffer, cursor); resp.put("completions", candidates); conn.send(serializeMessage(resp)); } /** * When angular object updated from client * * @param conn the web socket. * @param notebook the notebook. * @param fromMessage the message. */ private void angularObjectUpdated(NotebookSocket conn, HashSet<String> userAndRoles, Notebook notebook, Message fromMessage) { String noteId = (String) fromMessage.get("noteId"); String paragraphId = (String) fromMessage.get("paragraphId"); String interpreterGroupId = (String) fromMessage.get("interpreterGroupId"); String varName = (String) fromMessage.get("name"); Object varValue = fromMessage.get("value"); AngularObject ao = null; boolean global = false; // propagate change to (Remote) AngularObjectRegistry Note note = notebook.getNote(noteId); if (note != null) { List<InterpreterSetting> settings = notebook.getInterpreterFactory() .getInterpreterSettings(note.getId()); for (InterpreterSetting setting : settings) { if (setting.getInterpreterGroup(note.id()) == null) { continue; } if (interpreterGroupId.equals(setting.getInterpreterGroup(note.id()).getId())) { AngularObjectRegistry angularObjectRegistry = setting .getInterpreterGroup(note.id()).getAngularObjectRegistry(); // first trying to get local registry ao = angularObjectRegistry.get(varName, noteId, paragraphId); if (ao == null) { // then try notebook scope registry ao = angularObjectRegistry.get(varName, noteId, null); if (ao == null) { // then try global scope registry ao = angularObjectRegistry.get(varName, null, null); if (ao == null) { LOG.warn("Object {} is not binded", varName); } else { // path from client -> server ao.set(varValue, false); global = true; } } else { // path from client -> server ao.set(varValue, false); global = false; } } else { ao.set(varValue, false); global = false; } break; } } } if (global) { // broadcast change to all web session that uses related // interpreter. for (Note n : notebook.getAllNotes()) { List<InterpreterSetting> settings = notebook.getInterpreterFactory() .getInterpreterSettings(note.getId()); for (InterpreterSetting setting : settings) { if (setting.getInterpreterGroup(n.id()) == null) { continue; } if (interpreterGroupId.equals(setting.getInterpreterGroup(n.id()).getId())) { AngularObjectRegistry angularObjectRegistry = setting .getInterpreterGroup(n.id()).getAngularObjectRegistry(); this.broadcastExcept( n.id(), new Message(OP.ANGULAR_OBJECT_UPDATE).put("angularObject", ao) .put("interpreterGroupId", interpreterGroupId) .put("noteId", n.id()) .put("paragraphId", ao.getParagraphId()), conn); } } } } else { // broadcast to all web session for the note this.broadcastExcept( note.id(), new Message(OP.ANGULAR_OBJECT_UPDATE).put("angularObject", ao) .put("interpreterGroupId", interpreterGroupId) .put("noteId", note.id()) .put("paragraphId", ao.getParagraphId()), conn); } } /** * Push the given Angular variable to the target * interpreter angular registry given a noteId * and a paragraph id * @param conn * @param notebook * @param fromMessage * @throws Exception */ protected void angularObjectClientBind(NotebookSocket conn, HashSet<String> userAndRoles, Notebook notebook, Message fromMessage) throws Exception { String noteId = fromMessage.getType("noteId"); String varName = fromMessage.getType("name"); Object varValue = fromMessage.get("value"); String paragraphId = fromMessage.getType("paragraphId"); Note note = notebook.getNote(noteId); if (paragraphId == null) { throw new IllegalArgumentException("target paragraph not specified for " + "angular value bind"); } if (note != null) { final InterpreterGroup interpreterGroup = findInterpreterGroupForParagraph(note, paragraphId); final AngularObjectRegistry registry = interpreterGroup.getAngularObjectRegistry(); if (registry instanceof RemoteAngularObjectRegistry) { RemoteAngularObjectRegistry remoteRegistry = (RemoteAngularObjectRegistry) registry; pushAngularObjectToRemoteRegistry(noteId, paragraphId, varName, varValue, remoteRegistry, interpreterGroup.getId(), conn); } else { pushAngularObjectToLocalRepo(noteId, paragraphId, varName, varValue, registry, interpreterGroup.getId(), conn); } } } /** * Remove the given Angular variable to the target * interpreter(s) angular registry given a noteId * and an optional list of paragraph id(s) * @param conn * @param notebook * @param fromMessage * @throws Exception */ protected void angularObjectClientUnbind(NotebookSocket conn, HashSet<String> userAndRoles, Notebook notebook, Message fromMessage) throws Exception{ String noteId = fromMessage.getType("noteId"); String varName = fromMessage.getType("name"); String paragraphId = fromMessage.getType("paragraphId"); Note note = notebook.getNote(noteId); if (paragraphId == null) { throw new IllegalArgumentException("target paragraph not specified for " + "angular value unBind"); } if (note != null) { final InterpreterGroup interpreterGroup = findInterpreterGroupForParagraph(note, paragraphId); final AngularObjectRegistry registry = interpreterGroup.getAngularObjectRegistry(); if (registry instanceof RemoteAngularObjectRegistry) { RemoteAngularObjectRegistry remoteRegistry = (RemoteAngularObjectRegistry) registry; removeAngularFromRemoteRegistry(noteId, paragraphId, varName, remoteRegistry, interpreterGroup.getId(), conn); } else { removeAngularObjectFromLocalRepo(noteId, paragraphId, varName, registry, interpreterGroup.getId(), conn); } } } private InterpreterGroup findInterpreterGroupForParagraph(Note note, String paragraphId) throws Exception { final Paragraph paragraph = note.getParagraph(paragraphId); if (paragraph == null) { throw new IllegalArgumentException("Unknown paragraph with id : " + paragraphId); } return paragraph.getCurrentRepl().getInterpreterGroup(); } private void pushAngularObjectToRemoteRegistry(String noteId, String paragraphId, String varName, Object varValue, RemoteAngularObjectRegistry remoteRegistry, String interpreterGroupId, NotebookSocket conn) { final AngularObject ao = remoteRegistry.addAndNotifyRemoteProcess(varName, varValue, noteId, paragraphId); this.broadcastExcept( noteId, new Message(OP.ANGULAR_OBJECT_UPDATE).put("angularObject", ao) .put("interpreterGroupId", interpreterGroupId) .put("noteId", noteId) .put("paragraphId", paragraphId), conn); } private void removeAngularFromRemoteRegistry(String noteId, String paragraphId, String varName, RemoteAngularObjectRegistry remoteRegistry, String interpreterGroupId, NotebookSocket conn) { final AngularObject ao = remoteRegistry.removeAndNotifyRemoteProcess(varName, noteId, paragraphId); this.broadcastExcept( noteId, new Message(OP.ANGULAR_OBJECT_REMOVE).put("angularObject", ao) .put("interpreterGroupId", interpreterGroupId) .put("noteId", noteId) .put("paragraphId", paragraphId), conn); } private void pushAngularObjectToLocalRepo(String noteId, String paragraphId, String varName, Object varValue, AngularObjectRegistry registry, String interpreterGroupId, NotebookSocket conn) { AngularObject angularObject = registry.get(varName, noteId, paragraphId); if (angularObject == null) { angularObject = registry.add(varName, varValue, noteId, paragraphId); } else { angularObject.set(varValue, true); } this.broadcastExcept( noteId, new Message(OP.ANGULAR_OBJECT_UPDATE).put("angularObject", angularObject) .put("interpreterGroupId", interpreterGroupId) .put("noteId", noteId) .put("paragraphId", paragraphId), conn); } private void removeAngularObjectFromLocalRepo(String noteId, String paragraphId, String varName, AngularObjectRegistry registry, String interpreterGroupId, NotebookSocket conn) { final AngularObject removed = registry.remove(varName, noteId, paragraphId); if (removed != null) { this.broadcastExcept( noteId, new Message(OP.ANGULAR_OBJECT_REMOVE).put("angularObject", removed) .put("interpreterGroupId", interpreterGroupId) .put("noteId", noteId) .put("paragraphId", paragraphId), conn); } } private void moveParagraph(NotebookSocket conn, HashSet<String> userAndRoles, Notebook notebook, Message fromMessage) throws IOException { final String paragraphId = (String) fromMessage.get("id"); if (paragraphId == null) { return; } final int newIndex = (int) Double.parseDouble(fromMessage.get("index") .toString()); String noteId = getOpenNoteId(conn); final Note note = notebook.getNote(noteId); NotebookAuthorization notebookAuthorization = notebook.getNotebookAuthorization(); AuthenticationInfo subject = new AuthenticationInfo(SecurityUtils.getPrincipal()); if (!notebookAuthorization.isWriter(noteId, userAndRoles)) { permissionError(conn, "write", fromMessage.principal, userAndRoles, notebookAuthorization.getWriters(noteId)); return; } note.moveParagraph(paragraphId, newIndex); note.persist(subject); broadcastNote(note); } private void insertParagraph(NotebookSocket conn, HashSet<String> userAndRoles, Notebook notebook, Message fromMessage) throws IOException { final int index = (int) Double.parseDouble(fromMessage.get("index") .toString()); String noteId = getOpenNoteId(conn); final Note note = notebook.getNote(noteId); NotebookAuthorization notebookAuthorization = notebook.getNotebookAuthorization(); AuthenticationInfo subject = new AuthenticationInfo(SecurityUtils.getPrincipal()); if (!notebookAuthorization.isWriter(noteId, userAndRoles)) { permissionError(conn, "write", fromMessage.principal, userAndRoles, notebookAuthorization.getWriters(noteId)); return; } note.insertParagraph(index); note.persist(subject); broadcastNote(note); } private void cancelParagraph(NotebookSocket conn, HashSet<String> userAndRoles, Notebook notebook, Message fromMessage) throws IOException { final String paragraphId = (String) fromMessage.get("id"); if (paragraphId == null) { return; } String noteId = getOpenNoteId(conn); final Note note = notebook.getNote(noteId); NotebookAuthorization notebookAuthorization = notebook.getNotebookAuthorization(); if (!notebookAuthorization.isWriter(noteId, userAndRoles)) { permissionError(conn, "write", fromMessage.principal, userAndRoles, notebookAuthorization.getWriters(noteId)); return; } Paragraph p = note.getParagraph(paragraphId); p.abort(); } private void runParagraph(NotebookSocket conn, HashSet<String> userAndRoles, Notebook notebook, Message fromMessage) throws IOException { final String paragraphId = (String) fromMessage.get("id"); if (paragraphId == null) { return; } String noteId = getOpenNoteId(conn); final Note note = notebook.getNote(noteId); NotebookAuthorization notebookAuthorization = notebook.getNotebookAuthorization(); if (!notebookAuthorization.isWriter(noteId, userAndRoles)) { permissionError(conn, "write", fromMessage.principal, userAndRoles, notebookAuthorization.getWriters(noteId)); return; } Paragraph p = note.getParagraph(paragraphId); String text = (String) fromMessage.get("paragraph"); p.setText(text); p.setTitle((String) fromMessage.get("title")); if (!fromMessage.principal.equals("anonymous")) { AuthenticationInfo authenticationInfo = new AuthenticationInfo(fromMessage.principal, fromMessage.ticket); p.setAuthenticationInfo(authenticationInfo); } else { p.setAuthenticationInfo(new AuthenticationInfo()); } Map<String, Object> params = (Map<String, Object>) fromMessage .get("params"); p.settings.setParams(params); Map<String, Object> config = (Map<String, Object>) fromMessage .get("config"); p.setConfig(config); // if it's the last paragraph, let's add a new one boolean isTheLastParagraph = note.getLastParagraph().getId() .equals(p.getId()); note.setLastReplName(paragraphId); if (!(text.equals(note.getLastInterpreterName() + " ") || Strings.isNullOrEmpty(text)) && isTheLastParagraph) { note.addParagraph(); } AuthenticationInfo subject = new AuthenticationInfo(fromMessage.principal); note.persist(subject); try { note.run(paragraphId); } catch (Exception ex) { LOG.error("Exception from run", ex); if (p != null) { p.setReturn( new InterpreterResult(InterpreterResult.Code.ERROR, ex.getMessage()), ex); p.setStatus(Status.ERROR); broadcast(note.id(), new Message(OP.PARAGRAPH).put("paragraph", p)); } } } private void sendAllConfigurations(NotebookSocket conn, HashSet<String> userAndRoles, Notebook notebook) throws IOException { ZeppelinConfiguration conf = notebook.getConf(); Map<String, String> configurations = conf.dumpConfigurations(conf, new ZeppelinConfiguration.ConfigurationKeyPredicate() { @Override public boolean apply(String key) { return !key.contains("password") && !key.equals(ZeppelinConfiguration .ConfVars .ZEPPELIN_NOTEBOOK_AZURE_CONNECTION_STRING .getVarName()); } }); conn.send(serializeMessage(new Message(OP.CONFIGURATIONS_INFO) .put("configurations", configurations))); } private void checkpointNotebook(NotebookSocket conn, Notebook notebook, Message fromMessage) throws IOException { String noteId = (String) fromMessage.get("noteId"); String commitMessage = (String) fromMessage.get("commitMessage"); AuthenticationInfo subject = new AuthenticationInfo(fromMessage.principal); Revision revision = notebook.checkpointNote(noteId, commitMessage, subject); if (revision != null) { List<NotebookRepo.Revision> revisions = notebook.listRevisionHistory(noteId, subject); conn.send(serializeMessage(new Message(OP.LIST_REVISION_HISTORY) .put("revisionList", revisions))); } } private void listRevisionHistory(NotebookSocket conn, Notebook notebook, Message fromMessage) throws IOException { String noteId = (String) fromMessage.get("noteId"); AuthenticationInfo subject = new AuthenticationInfo(fromMessage.principal); List<NotebookRepo.Revision> revisions = notebook.listRevisionHistory(noteId, subject); conn.send(serializeMessage(new Message(OP.LIST_REVISION_HISTORY) .put("revisionList", revisions))); } private void getNoteRevision(NotebookSocket conn, Notebook notebook, Message fromMessage) throws IOException { String noteId = (String) fromMessage.get("noteId"); Revision revision = (Revision) fromMessage.get("revision"); AuthenticationInfo subject = new AuthenticationInfo(fromMessage.principal); Note revisionNote = notebook.getNoteRevision(noteId, revision, subject); conn.send(serializeMessage(new Message(OP.NOTE_REVISION) .put("noteId", noteId) .put("revisionId", revision) .put("data", revisionNote))); } /** * This callback is for the paragraph that runs on ZeppelinServer * @param noteId * @param paragraphId * @param output output to append */ @Override public void onOutputAppend(String noteId, String paragraphId, String output) { Message msg = new Message(OP.PARAGRAPH_APPEND_OUTPUT) .put("noteId", noteId) .put("paragraphId", paragraphId) .put("data", output); broadcast(noteId, msg); } /** * This callback is for the paragraph that runs on ZeppelinServer * @param noteId * @param paragraphId * @param output output to update (replace) */ @Override public void onOutputUpdated(String noteId, String paragraphId, String output) { Message msg = new Message(OP.PARAGRAPH_UPDATE_OUTPUT) .put("noteId", noteId) .put("paragraphId", paragraphId) .put("data", output); broadcast(noteId, msg); } /** * When application append output * @param noteId * @param paragraphId * @param appId * @param output */ @Override public void onOutputAppend(String noteId, String paragraphId, String appId, String output) { Message msg = new Message(OP.APP_APPEND_OUTPUT) .put("noteId", noteId) .put("paragraphId", paragraphId) .put("appId", appId) .put("data", output); broadcast(noteId, msg); } /** * When application update output * @param noteId * @param paragraphId * @param appId * @param output */ @Override public void onOutputUpdated(String noteId, String paragraphId, String appId, String output) { Message msg = new Message(OP.APP_UPDATE_OUTPUT) .put("noteId", noteId) .put("paragraphId", paragraphId) .put("appId", appId) .put("data", output); broadcast(noteId, msg); } @Override public void onLoad(String noteId, String paragraphId, String appId, HeliumPackage pkg) { Message msg = new Message(OP.APP_LOAD) .put("noteId", noteId) .put("paragraphId", paragraphId) .put("appId", appId) .put("pkg", pkg); broadcast(noteId, msg); } @Override public void onStatusChange(String noteId, String paragraphId, String appId, String status) { Message msg = new Message(OP.APP_STATUS_CHANGE) .put("noteId", noteId) .put("paragraphId", paragraphId) .put("appId", appId) .put("status", status); broadcast(noteId, msg); } /** * Need description here. * */ public static class ParagraphListenerImpl implements ParagraphJobListener { private NotebookServer notebookServer; private Note note; public ParagraphListenerImpl(NotebookServer notebookServer, Note note) { this.notebookServer = notebookServer; this.note = note; } @Override public void onProgressUpdate(Job job, int progress) { notebookServer.broadcast( note.id(), new Message(OP.PROGRESS).put("id", job.getId()).put("progress", job.progress())); } @Override public void beforeStatusChange(Job job, Status before, Status after) { } @Override public void afterStatusChange(Job job, Status before, Status after) { if (after == Status.ERROR) { if (job.getException() != null) { LOG.error("Error", job.getException()); } } if (job.isTerminated()) { LOG.info("Job {} is finished", job.getId()); try { //TODO(khalid): may change interface for JobListener and pass subject from interpreter note.persist(null); } catch (IOException e) { LOG.error(e.toString(), e); } } notebookServer.broadcastNote(note); } /** * This callback is for praragraph that runs on RemoteInterpreterProcess * @param paragraph * @param out * @param output */ @Override public void onOutputAppend(Paragraph paragraph, InterpreterOutput out, String output) { Message msg = new Message(OP.PARAGRAPH_APPEND_OUTPUT) .put("noteId", paragraph.getNote().getId()) .put("paragraphId", paragraph.getId()) .put("data", output); notebookServer.broadcast(paragraph.getNote().getId(), msg); } /** * This callback is for paragraph that runs on RemoteInterpreterProcess * @param paragraph * @param out * @param output */ @Override public void onOutputUpdate(Paragraph paragraph, InterpreterOutput out, String output) { Message msg = new Message(OP.PARAGRAPH_UPDATE_OUTPUT) .put("noteId", paragraph.getNote().getId()) .put("paragraphId", paragraph.getId()) .put("data", output); notebookServer.broadcast(paragraph.getNote().getId(), msg); } } @Override public ParagraphJobListener getParagraphJobListener(Note note) { return new ParagraphListenerImpl(this, note); } private void sendAllAngularObjects(Note note, NotebookSocket conn) throws IOException { List<InterpreterSetting> settings = notebook().getInterpreterFactory().getInterpreterSettings(note.getId()); if (settings == null || settings.size() == 0) { return; } for (InterpreterSetting intpSetting : settings) { AngularObjectRegistry registry = intpSetting.getInterpreterGroup(note.id()) .getAngularObjectRegistry(); List<AngularObject> objects = registry.getAllWithGlobal(note.id()); for (AngularObject object : objects) { conn.send(serializeMessage(new Message(OP.ANGULAR_OBJECT_UPDATE) .put("angularObject", object) .put("interpreterGroupId", intpSetting.getInterpreterGroup(note.id()).getId()) .put("noteId", note.id()) .put("paragraphId", object.getParagraphId()) )); } } } @Override public void onAdd(String interpreterGroupId, AngularObject object) { onUpdate(interpreterGroupId, object); } @Override public void onUpdate(String interpreterGroupId, AngularObject object) { Notebook notebook = notebook(); if (notebook == null) { return; } List<Note> notes = notebook.getAllNotes(); for (Note note : notes) { if (object.getNoteId() != null && !note.id().equals(object.getNoteId())) { continue; } List<InterpreterSetting> intpSettings = notebook.getInterpreterFactory() .getInterpreterSettings(note.getId()); if (intpSettings.isEmpty()) { continue; } broadcast( note.id(), new Message(OP.ANGULAR_OBJECT_UPDATE) .put("angularObject", object) .put("interpreterGroupId", interpreterGroupId) .put("noteId", note.id()) .put("paragraphId", object.getParagraphId())); } } @Override public void onRemove(String interpreterGroupId, String name, String noteId, String paragraphId) { Notebook notebook = notebook(); List<Note> notes = notebook.getAllNotes(); for (Note note : notes) { if (noteId != null && !note.id().equals(noteId)) { continue; } List<String> ids = notebook.getInterpreterFactory().getInterpreters(note.getId()); for (String id : ids) { if (id.equals(interpreterGroupId)) { broadcast( note.id(), new Message(OP.ANGULAR_OBJECT_REMOVE).put("name", name).put( "noteId", noteId).put("paragraphId", paragraphId)); } } } } }
apache-2.0
glorycloud/GloryMail
CloudyMail/src/mobi/cloudymail/mailclient/AlarmReceiver.java
2155
package mobi.cloudymail.mailclient; import mobi.cloudymail.mailclient.net.ServerAgent; import mobi.cloudymail.util.MyApp; import mobi.cloudymail.util.Utils; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.net.ConnectivityManager; import android.os.Bundle; import android.os.PowerManager; import android.os.PowerManager.WakeLock; public class AlarmReceiver extends BroadcastReceiver { @Override public void onReceive(Context context, Intent intent) { Utils.log("Alarm onReceive"); Bundle bundle= intent.getExtras(); if(bundle == null) { Utils.log("bundle is null"); return; } int pollInterval = Integer.parseInt(bundle.getString("minutes")); if (MyApp.userSetting.getPushFrequency() <= 0) { return; } // if(ReceiveMailService.wakeLock != null) // { // ReceiveMailService.wakeLock.acquire(5000);//acquire for 5s // Log.d(Utils.LOGTAG, "acquire lock for 5s"); // } PowerManager pm = (PowerManager) MyApp.instance().getSystemService(Context.POWER_SERVICE); // wakeLock = pm.newWakeLock(PowerManager.SCREEN_DIM_WAKE_LOCK, "CloudyMailLock"); WakeLock wakeLock = pm.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, "CloudyMailLock"); if(ServerAgent.hasNetworkConnection()) { ConnectivityManager cm = (ConnectivityManager) MyApp.instance() .getSystemService(Context.CONNECTIVITY_SERVICE); Utils.log("Net_connection OK:"+cm.getActiveNetworkInfo().getTypeName()); wakeLock.acquire(500);//wait 0.5s only, to give time for ReceiveMailService to create it's own lock } else { Utils.log("No Net_connection"); wakeLock.acquire(5000);//wait 5 seconds, give enough time for system to restore network connection //if a network connection is restored, CONNECTIVE_CHANGE event will happen, and //push service will be restarted then return; } ReceiveMailService.SyncRequest r = new ReceiveMailService.SyncRequest(); r.setAccountsToSync(AccountManager.getAccounts()); ReceiveMailService.addToSyncQueue(r); } }
apache-2.0
j0rg3n/skattejakt
app/src/main/java/com/Android2/model/MapNode.java
954
package com.Android2.model; import android.os.Parcel; import android.os.Parcelable; import com.Android2.controller.IMapNodeController; import com.google.android.gms.maps.model.LatLng; /** * Created by cirkus on 24.07.2017. */ public class MapNode implements Parcelable { public LatLng location = null; public IMapNodeController mapNodeController = null; public MapNode() { } protected MapNode(Parcel in) { location = in.readParcelable(null); } @Override public void writeToParcel(Parcel parcel, int i) { parcel.writeParcelable(location, 0); } @Override public int describeContents() { return 0; } public static final Creator<MapNode> CREATOR = new Creator<MapNode>() { @Override public MapNode createFromParcel(Parcel in) { return new MapNode(in); } @Override public MapNode[] newArray(int size) { return new MapNode[size]; } }; }
apache-2.0
tobias47n9e/apps-android-commons
app/src/main/java/fr/free/nrw/commons/utils/LengthUtils.java
2208
package fr.free.nrw.commons.utils; import fr.free.nrw.commons.location.LatLng; import java.text.NumberFormat; public class LengthUtils { /** Returns a formatted distance string between two points. * @param point1 LatLng type point1 * @param point2 LatLng type point2 * @return string distance */ public static String formatDistanceBetween(LatLng point1, LatLng point2) { if (point1 == null || point2 == null) { return null; } NumberFormat numberFormat = NumberFormat.getNumberInstance(); double distance = Math.round(computeDistanceBetween(point1, point2)); // Adjust to KM if M goes over 1000 (see javadoc of method for note // on only supporting metric) if (distance >= 1000) { numberFormat.setMaximumFractionDigits(1); return numberFormat.format(distance / 1000) + "km"; } return numberFormat.format(distance) + "m"; } /** * Computes the distance between two points. * @param from one of the two end points * @param to one of the two end points * @return distance between the points in meter */ public static double computeDistanceBetween(LatLng from, LatLng to) { return computeAngleBetween(from, to) * 6371009.0D; // Earth's radius in meter } private static double computeAngleBetween(LatLng from, LatLng to) { return distanceRadians(Math.toRadians(from.getLatitude()), Math.toRadians(from.getLongitude()), Math.toRadians(to.getLatitude()), Math.toRadians(to.getLongitude())); } private static double distanceRadians(double lat1, double lng1, double lat2, double lng2) { return arcHav(havDistance(lat1, lat2, lng1 - lng2)); } private static double arcHav(double x) { return 2.0D * Math.asin(Math.sqrt(x)); } private static double havDistance(double lat1, double lat2, double longitude) { return hav(lat1 - lat2) + hav(longitude) * Math.cos(lat1) * Math.cos(lat2); } private static double hav(double x) { double sinHalf = Math.sin(x * 0.5D); return sinHalf * sinHalf; } }
apache-2.0
milg0/onvif-java-lib
src/org/onvif/ver10/schema/MediaCapabilitiesExtension.java
3816
// // Diese Datei wurde mit der JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.5-2 generiert // Siehe <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // �nderungen an dieser Datei gehen bei einer Neukompilierung des Quellschemas verloren. // Generiert: 2014.02.04 um 12:22:03 PM CET // package org.onvif.ver10.schema; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAnyAttribute; import javax.xml.bind.annotation.XmlAnyElement; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; import javax.xml.namespace.QName; import org.w3c.dom.Element; /** * <p> * Java-Klasse f�r MediaCapabilitiesExtension complex type. * * <p> * Das folgende Schemafragment gibt den erwarteten Content an, der in dieser Klasse enthalten ist. * * <pre> * <complexType name="MediaCapabilitiesExtension"> * <complexContent> * <restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * <sequence> * <element name="ProfileCapabilities" type="{http://www.onvif.org/ver10/schema}ProfileCapabilities"/> * <any processContents='lax' namespace='http://www.onvif.org/ver10/schema' maxOccurs="unbounded" minOccurs="0"/> * </sequence> * <anyAttribute processContents='lax'/> * </restriction> * </complexContent> * </complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "MediaCapabilitiesExtension", propOrder = { "profileCapabilities", "any" }) public class MediaCapabilitiesExtension { @XmlElement(name = "ProfileCapabilities", required = true) protected ProfileCapabilities profileCapabilities; @XmlAnyElement(lax = true) protected List<java.lang.Object> any; @XmlAnyAttribute private Map<QName, String> otherAttributes = new HashMap<QName, String>(); /** * Ruft den Wert der profileCapabilities-Eigenschaft ab. * * @return possible object is {@link ProfileCapabilities } * */ public ProfileCapabilities getProfileCapabilities() { return profileCapabilities; } /** * Legt den Wert der profileCapabilities-Eigenschaft fest. * * @param value * allowed object is {@link ProfileCapabilities } * */ public void setProfileCapabilities(ProfileCapabilities value) { this.profileCapabilities = value; } /** * Gets the value of the any property. * * <p> * This accessor method returns a reference to the live list, not a snapshot. Therefore any modification you make to the returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the any property. * * <p> * For example, to add a new item, do as follows: * * <pre> * getAny().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list {@link Element } {@link java.lang.Object } * * */ public List<java.lang.Object> getAny() { if (any == null) { any = new ArrayList<java.lang.Object>(); } return this.any; } /** * Gets a map that contains attributes that aren't bound to any typed property on this class. * * <p> * the map is keyed by the name of the attribute and the value is the string value of the attribute. * * the map returned by this method is live, and you can add new attribute by updating the map directly. Because of this design, there's no setter. * * * @return always non-null */ public Map<QName, String> getOtherAttributes() { return otherAttributes; } }
apache-2.0
haikuowuya/android_system_code
src/com/sun/imageio/plugins/gif/GIFStreamMetadata.java
11058
/* * Copyright (c) 2000, 2005, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ package com.sun.imageio.plugins.gif; import javax.imageio.ImageTypeSpecifier; import javax.imageio.metadata.IIOInvalidTreeException; import javax.imageio.metadata.IIOMetadata; import javax.imageio.metadata.IIOMetadataNode; import javax.imageio.metadata.IIOMetadataFormat; import javax.imageio.metadata.IIOMetadataFormatImpl; import org.w3c.dom.Node; // TODO - document elimination of globalColorTableFlag public class GIFStreamMetadata extends GIFMetadata { // package scope static final String nativeMetadataFormatName = "javax_imageio_gif_stream_1.0"; public static final String[] versionStrings = { "87a", "89a" }; public String version; // 87a or 89a public int logicalScreenWidth; public int logicalScreenHeight; public int colorResolution; // 1 to 8 public int pixelAspectRatio; public int backgroundColorIndex; // Valid if globalColorTable != null public boolean sortFlag; // Valid if globalColorTable != null public static final String[] colorTableSizes = { "2", "4", "8", "16", "32", "64", "128", "256" }; // Set global color table flag in header to 0 if null, 1 otherwise public byte[] globalColorTable = null; protected GIFStreamMetadata(boolean standardMetadataFormatSupported, String nativeMetadataFormatName, String nativeMetadataFormatClassName, String[] extraMetadataFormatNames, String[] extraMetadataFormatClassNames) { super(standardMetadataFormatSupported, nativeMetadataFormatName, nativeMetadataFormatClassName, extraMetadataFormatNames, extraMetadataFormatClassNames); } public GIFStreamMetadata() { this(true, nativeMetadataFormatName, "com.sun.imageio.plugins.gif.GIFStreamMetadataFormat", null, null); } public boolean isReadOnly() { return true; } public Node getAsTree(String formatName) { if (formatName.equals(nativeMetadataFormatName)) { return getNativeTree(); } else if (formatName.equals (IIOMetadataFormatImpl.standardMetadataFormatName)) { return getStandardTree(); } else { throw new IllegalArgumentException("Not a recognized format!"); } } private Node getNativeTree() { IIOMetadataNode node; // scratch node IIOMetadataNode root = new IIOMetadataNode(nativeMetadataFormatName); node = new IIOMetadataNode("Version"); node.setAttribute("value", version); root.appendChild(node); // Image descriptor node = new IIOMetadataNode("LogicalScreenDescriptor"); /* NB: At the moment we use empty strings to support undefined * integer values in tree representation. * We need to add better support for undefined/default values later. */ node.setAttribute("logicalScreenWidth", logicalScreenWidth == UNDEFINED_INTEGER_VALUE ? "" : Integer.toString(logicalScreenWidth)); node.setAttribute("logicalScreenHeight", logicalScreenHeight == UNDEFINED_INTEGER_VALUE ? "" : Integer.toString(logicalScreenHeight)); // Stored value plus one node.setAttribute("colorResolution", colorResolution == UNDEFINED_INTEGER_VALUE ? "" : Integer.toString(colorResolution)); node.setAttribute("pixelAspectRatio", Integer.toString(pixelAspectRatio)); root.appendChild(node); if (globalColorTable != null) { node = new IIOMetadataNode("GlobalColorTable"); int numEntries = globalColorTable.length/3; node.setAttribute("sizeOfGlobalColorTable", Integer.toString(numEntries)); node.setAttribute("backgroundColorIndex", Integer.toString(backgroundColorIndex)); node.setAttribute("sortFlag", sortFlag ? "TRUE" : "FALSE"); for (int i = 0; i < numEntries; i++) { IIOMetadataNode entry = new IIOMetadataNode("ColorTableEntry"); entry.setAttribute("index", Integer.toString(i)); int r = globalColorTable[3*i] & 0xff; int g = globalColorTable[3*i + 1] & 0xff; int b = globalColorTable[3*i + 2] & 0xff; entry.setAttribute("red", Integer.toString(r)); entry.setAttribute("green", Integer.toString(g)); entry.setAttribute("blue", Integer.toString(b)); node.appendChild(entry); } root.appendChild(node); } return root; } public IIOMetadataNode getStandardChromaNode() { IIOMetadataNode chroma_node = new IIOMetadataNode("Chroma"); IIOMetadataNode node = null; // scratch node node = new IIOMetadataNode("ColorSpaceType"); node.setAttribute("name", "RGB"); chroma_node.appendChild(node); node = new IIOMetadataNode("BlackIsZero"); node.setAttribute("value", "TRUE"); chroma_node.appendChild(node); // NumChannels not in stream // Gamma not in format if (globalColorTable != null) { node = new IIOMetadataNode("Palette"); int numEntries = globalColorTable.length/3; for (int i = 0; i < numEntries; i++) { IIOMetadataNode entry = new IIOMetadataNode("PaletteEntry"); entry.setAttribute("index", Integer.toString(i)); entry.setAttribute("red", Integer.toString(globalColorTable[3*i] & 0xff)); entry.setAttribute("green", Integer.toString(globalColorTable[3*i + 1] & 0xff)); entry.setAttribute("blue", Integer.toString(globalColorTable[3*i + 2] & 0xff)); node.appendChild(entry); } chroma_node.appendChild(node); // backgroundColorIndex is valid iff there is a color table node = new IIOMetadataNode("BackgroundIndex"); node.setAttribute("value", Integer.toString(backgroundColorIndex)); chroma_node.appendChild(node); } return chroma_node; } public IIOMetadataNode getStandardCompressionNode() { IIOMetadataNode compression_node = new IIOMetadataNode("Compression"); IIOMetadataNode node = null; // scratch node node = new IIOMetadataNode("CompressionTypeName"); node.setAttribute("value", "lzw"); compression_node.appendChild(node); node = new IIOMetadataNode("Lossless"); node.setAttribute("value", "TRUE"); compression_node.appendChild(node); // NumProgressiveScans not in stream // BitRate not in format return compression_node; } public IIOMetadataNode getStandardDataNode() { IIOMetadataNode data_node = new IIOMetadataNode("Data"); IIOMetadataNode node = null; // scratch node // PlanarConfiguration node = new IIOMetadataNode("SampleFormat"); node.setAttribute("value", "Index"); data_node.appendChild(node); node = new IIOMetadataNode("BitsPerSample"); node.setAttribute("value", colorResolution == UNDEFINED_INTEGER_VALUE ? "" : Integer.toString(colorResolution)); data_node.appendChild(node); // SignificantBitsPerSample // SampleMSB return data_node; } public IIOMetadataNode getStandardDimensionNode() { IIOMetadataNode dimension_node = new IIOMetadataNode("Dimension"); IIOMetadataNode node = null; // scratch node node = new IIOMetadataNode("PixelAspectRatio"); float aspectRatio = 1.0F; if (pixelAspectRatio != 0) { aspectRatio = (pixelAspectRatio + 15)/64.0F; } node.setAttribute("value", Float.toString(aspectRatio)); dimension_node.appendChild(node); node = new IIOMetadataNode("ImageOrientation"); node.setAttribute("value", "Normal"); dimension_node.appendChild(node); // HorizontalPixelSize not in format // VerticalPixelSize not in format // HorizontalPhysicalPixelSpacing not in format // VerticalPhysicalPixelSpacing not in format // HorizontalPosition not in format // VerticalPosition not in format // HorizontalPixelOffset not in stream // VerticalPixelOffset not in stream node = new IIOMetadataNode("HorizontalScreenSize"); node.setAttribute("value", logicalScreenWidth == UNDEFINED_INTEGER_VALUE ? "" : Integer.toString(logicalScreenWidth)); dimension_node.appendChild(node); node = new IIOMetadataNode("VerticalScreenSize"); node.setAttribute("value", logicalScreenHeight == UNDEFINED_INTEGER_VALUE ? "" : Integer.toString(logicalScreenHeight)); dimension_node.appendChild(node); return dimension_node; } public IIOMetadataNode getStandardDocumentNode() { IIOMetadataNode document_node = new IIOMetadataNode("Document"); IIOMetadataNode node = null; // scratch node node = new IIOMetadataNode("FormatVersion"); node.setAttribute("value", version); document_node.appendChild(node); // SubimageInterpretation not in format // ImageCreationTime not in format // ImageModificationTime not in format return document_node; } public IIOMetadataNode getStandardTextNode() { // Not in stream return null; } public IIOMetadataNode getStandardTransparencyNode() { // Not in stream return null; } public void setFromTree(String formatName, Node root) throws IIOInvalidTreeException { throw new IllegalStateException("Metadata is read-only!"); } protected void mergeNativeTree(Node root) throws IIOInvalidTreeException { throw new IllegalStateException("Metadata is read-only!"); } protected void mergeStandardTree(Node root) throws IIOInvalidTreeException { throw new IllegalStateException("Metadata is read-only!"); } public void reset() { throw new IllegalStateException("Metadata is read-only!"); } }
apache-2.0
wildfly-clustering/wildfly-clustering-tomcat
common/src/test/java/org/wildfly/clustering/tomcat/AbstractSmokeITCase.java
5750
/* * JBoss, Home of Professional Open Source. * Copyright 2020, Red Hat, Inc., and individual contributors * as indicated by the @author tags. See the copyright.txt file in the * distribution for a full listing of individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.wildfly.clustering.tomcat; import java.net.URI; import java.net.URL; import java.util.Arrays; import org.apache.http.HttpStatus; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpHead; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.infinispan.protostream.SerializationContextInitializer; import org.infinispan.server.test.core.ServerRunMode; import org.infinispan.server.test.core.TestSystemPropertyNames; import org.infinispan.server.test.junit4.InfinispanServerRuleBuilder; import org.jboss.shrinkwrap.api.Archive; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.spec.WebArchive; import org.junit.Assert; import org.junit.ClassRule; import org.junit.rules.TestRule; import org.wildfly.clustering.marshalling.Externalizer; import org.wildfly.clustering.tomcat.servlet.MutableIntegerExternalizer; import org.wildfly.clustering.tomcat.servlet.ServletHandler; import org.wildfly.clustering.tomcat.servlet.TestSerializationContextInitializer; /** * @author Paul Ferraro */ public abstract class AbstractSmokeITCase { static final String INFINISPAN_SERVER_HOME = System.getProperty("infinispan.server.home"); static final String INFINISPAN_SERVER_PROFILE = System.getProperty("infinispan.server.profile"); static final String INFINISPAN_DRIVER_USERNAME = "testsuite-driver-user"; static final String INFINISPAN_DRIVER_PASSWORD = "testsuite-driver-password"; @ClassRule public static final TestRule SERVERS = InfinispanServerRuleBuilder.config(INFINISPAN_SERVER_PROFILE) .property(TestSystemPropertyNames.INFINISPAN_TEST_SERVER_DIR, INFINISPAN_SERVER_HOME) .property("infinispan.client.rest.auth_username", INFINISPAN_DRIVER_USERNAME) .property("infinispan.client.rest.auth_password", INFINISPAN_DRIVER_PASSWORD) .runMode(ServerRunMode.FORKED) .numServers(1) .build(); public static Archive<?> deployment(Class<? extends AbstractSmokeITCase> testClass, Class<? extends ServletHandler<?, ?>> servletClass) { return ShrinkWrap.create(WebArchive.class, testClass.getSimpleName() + ".war") .addPackage(ServletHandler.class.getPackage()) .addPackage(servletClass.getPackage()) .addAsServiceProvider(Externalizer.class, MutableIntegerExternalizer.class) .addAsServiceProvider(SerializationContextInitializer.class.getName(), TestSerializationContextInitializer.class.getName() + "Impl") ; } protected void test(URL baseURL1, URL baseURL2) throws Exception { URI uri1 = ServletHandler.createURI(baseURL1); URI uri2 = ServletHandler.createURI(baseURL2); try (CloseableHttpClient client = HttpClients.createDefault()) { String sessionId = null; int value = 0; for (int i = 0; i < 4; i++) { for (URI uri : Arrays.asList(uri1, uri2)) { for (int j = 0; j < 4; j++) { try (CloseableHttpResponse response = client.execute(new HttpGet(uri))) { Assert.assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); Assert.assertEquals(String.valueOf(value++), response.getFirstHeader(ServletHandler.VALUE).getValue()); String requestSessionId = response.getFirstHeader(ServletHandler.SESSION_ID).getValue(); if (sessionId == null) { sessionId = requestSessionId; } else { Assert.assertEquals(sessionId, requestSessionId); } } } // Grace time between failover requests Thread.sleep(500); } } try (CloseableHttpResponse response = client.execute(new HttpDelete(uri1))) { Assert.assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); Assert.assertEquals(sessionId, response.getFirstHeader(ServletHandler.SESSION_ID).getValue()); } try (CloseableHttpResponse response = client.execute(new HttpHead(uri2))) { Assert.assertEquals(HttpStatus.SC_OK, response.getStatusLine().getStatusCode()); Assert.assertFalse(response.containsHeader(ServletHandler.SESSION_ID)); } } } }
apache-2.0
sdnwiselab/onos
core/store/primitives/src/main/java/org/onosproject/store/primitives/resources/impl/AtomixAtomicCounterMapCommands.java
8989
/* * Copyright 2017-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.store.primitives.resources.impl; import io.atomix.catalyst.buffer.BufferInput; import io.atomix.catalyst.buffer.BufferOutput; import io.atomix.catalyst.serializer.CatalystSerializable; import io.atomix.catalyst.serializer.SerializableTypeResolver; import io.atomix.catalyst.serializer.Serializer; import io.atomix.catalyst.serializer.SerializerRegistry; import io.atomix.copycat.Command; import io.atomix.copycat.Query; /** * Atomic counter map commands. */ public final class AtomixAtomicCounterMapCommands { private AtomixAtomicCounterMapCommands() { } public abstract static class AtomicCounterMapCommand<V> implements Command<V>, CatalystSerializable { @Override public CompactionMode compaction() { return CompactionMode.SNAPSHOT; } @Override public void writeObject(BufferOutput<?> buffer, Serializer serializer) { } @Override public void readObject(BufferInput<?> buffer, Serializer serializer) { } } public abstract static class AtomicCounterMapQuery<V> implements Query<V>, CatalystSerializable { @Override public void writeObject(BufferOutput<?> buffer, Serializer serializer) { } @Override public void readObject(BufferInput<?> buffer, Serializer serializer) { } } public abstract static class KeyCommand<V> extends AtomicCounterMapCommand<V> { private String key; public KeyCommand() { } public KeyCommand(String key) { this.key = key; } public String key() { return key; } @Override public void writeObject(BufferOutput<?> buffer, Serializer serializer) { buffer.writeString(key); } @Override public void readObject(BufferInput<?> buffer, Serializer serializer) { key = buffer.readString(); } } public abstract static class KeyQuery<V> extends AtomicCounterMapQuery<V> { private String key; public KeyQuery() { } public KeyQuery(String key) { this.key = key; } public String key() { return key; } @Override public void writeObject(BufferOutput<?> buffer, Serializer serializer) { buffer.writeString(key); } @Override public void readObject(BufferInput<?> buffer, Serializer serializer) { key = buffer.readString(); } } public static class KeyValueCommand<V> extends KeyCommand<V> { private long value; public KeyValueCommand() { } public KeyValueCommand(String key, long value) { super(key); this.value = value; } public long value() { return value; } @Override public void writeObject(BufferOutput<?> buffer, Serializer serializer) { super.writeObject(buffer, serializer); buffer.writeLong(value); } @Override public void readObject(BufferInput<?> buffer, Serializer serializer) { super.readObject(buffer, serializer); value = buffer.readLong(); } } public static class Get extends KeyQuery<Long> { public Get() { } public Get(String key) { super(key); } } public static class Put extends KeyValueCommand<Long> { public Put() { } public Put(String key, long value) { super(key, value); } } public static class PutIfAbsent extends KeyValueCommand<Long> { public PutIfAbsent() { } public PutIfAbsent(String key, long value) { super(key, value); } } public static class Replace extends KeyCommand<Boolean> { private long replace; private long value; public Replace() { } public Replace(String key, long replace, long value) { super(key); this.replace = replace; this.value = value; } public long replace() { return replace; } public long value() { return value; } @Override public void writeObject(BufferOutput<?> buffer, Serializer serializer) { super.writeObject(buffer, serializer); buffer.writeLong(replace); buffer.writeLong(value); } @Override public void readObject(BufferInput<?> buffer, Serializer serializer) { super.readObject(buffer, serializer); replace = buffer.readLong(); value = buffer.readLong(); } } public static class Remove extends KeyCommand<Long> { public Remove() { } public Remove(String key) { super(key); } } public static class RemoveValue extends KeyValueCommand<Boolean> { public RemoveValue() { } public RemoveValue(String key, long value) { super(key, value); } } public static class IncrementAndGet extends KeyCommand<Long> { public IncrementAndGet() { } public IncrementAndGet(String key) { super(key); } } public static class DecrementAndGet extends KeyCommand<Long> { public DecrementAndGet(String key) { super(key); } public DecrementAndGet() { } } public static class GetAndIncrement extends KeyCommand<Long> { public GetAndIncrement() { } public GetAndIncrement(String key) { super(key); } } public static class GetAndDecrement extends KeyCommand<Long> { public GetAndDecrement() { } public GetAndDecrement(String key) { super(key); } } public abstract static class DeltaCommand extends KeyCommand<Long> { private long delta; public DeltaCommand() { } public DeltaCommand(String key, long delta) { super(key); this.delta = delta; } public long delta() { return delta; } @Override public void writeObject(BufferOutput<?> buffer, Serializer serializer) { super.writeObject(buffer, serializer); buffer.writeLong(delta); } @Override public void readObject(BufferInput<?> buffer, Serializer serializer) { super.readObject(buffer, serializer); delta = buffer.readLong(); } } public static class AddAndGet extends DeltaCommand { public AddAndGet() { } public AddAndGet(String key, long delta) { super(key, delta); } } public static class GetAndAdd extends DeltaCommand { public GetAndAdd() { } public GetAndAdd(String key, long delta) { super(key, delta); } } public static class Size extends AtomicCounterMapQuery<Integer> { } public static class IsEmpty extends AtomicCounterMapQuery<Boolean> { } public static class Clear extends AtomicCounterMapCommand<Void> { } /** * Counter map command type resolver. */ public static class TypeResolver implements SerializableTypeResolver { @Override public void resolve(SerializerRegistry registry) { registry.register(Get.class, -790); registry.register(Put.class, -791); registry.register(PutIfAbsent.class, -792); registry.register(Replace.class, -793); registry.register(Remove.class, -794); registry.register(RemoveValue.class, -795); registry.register(IncrementAndGet.class, -796); registry.register(DecrementAndGet.class, -797); registry.register(GetAndIncrement.class, -798); registry.register(GetAndDecrement.class, -799); registry.register(AddAndGet.class, -800); registry.register(GetAndAdd.class, -801); registry.register(Size.class, -801); registry.register(IsEmpty.class, -801); registry.register(Clear.class, -801); } } }
apache-2.0
CamelCookbook/camel-cookbook-examples
06-splitting-aggregating/src/test/java/org/camelcookbook/splitjoin/splitaggregate/SplitAggregateExceptionHandlingSpringTest.java
2164
/* * Copyright (C) Scott Cranton, Jakub Korab, and Christian Posta * https://github.com/CamelCookbook * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camelcookbook.splitjoin.splitaggregate; import java.util.Arrays; import java.util.Collections; import java.util.Set; import org.apache.camel.Exchange; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.test.spring.CamelSpringTestSupport; import org.junit.Test; import org.springframework.context.support.AbstractApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; /** * Demonstrates the handling of an exception during splitting and aggregation. */ public class SplitAggregateExceptionHandlingSpringTest extends CamelSpringTestSupport { @Override protected AbstractApplicationContext createApplicationContext() { return new ClassPathXmlApplicationContext("/META-INF/spring/splitAggregateExceptionHandling-context.xml"); } @Test public void testHandlesException() throws Exception { String[] array = new String[]{"one", "two", "three"}; MockEndpoint mockOut = getMockEndpoint("mock:out"); mockOut.expectedMessageCount(1); template.sendBody("direct:in", array); assertMockEndpointsSatisfied(); Exchange exchange = mockOut.getReceivedExchanges().get(0); @SuppressWarnings("unchecked") Set<String> backendResponses = Collections.checkedSet(exchange.getIn().getBody(Set.class), String.class); assertTrue(backendResponses.containsAll(Arrays.asList("Processed: one", "Failed: two", "Processed: three"))); } }
apache-2.0
officerteam/zhongshuyunwei
app/src/main/java/com/zhongshu/maintenanceexpert/bean/CameraOrderBean.java
3523
package com.zhongshu.maintenanceexpert.bean; import java.io.Serializable; import java.util.List; /** * Created by a on 2017/4/25. */ public class CameraOrderBean implements Serializable { /** * status : 调取成功 * data : [{"order_id":"1","uid":"654","title":"biaoti","details":"问题描述","address":"Jordan","img":null,"type":"1","price":"0.00","contact":"Xiehoujie","mobile":"2147483647","create_time":"1493087253"}] * code : 200 */ private String status; private int code; private List<DataBean> data; public String getStatus() { return status; } public void setStatus(String status) { this.status = status; } public int getCode() { return code; } public void setCode(int code) { this.code = code; } public List<DataBean> getData() { return data; } public void setData(List<DataBean> data) { this.data = data; } public static class DataBean implements Serializable{ /** * order_id : 1 * uid : 654 * title : biaoti * details : 问题描述 * address : Jordan * img : null * type : 1 * price : 0.00 * contact : Xiehoujie * mobile : 2147483647 * create_time : 1493087253 */ private String order_id; private String uid; private String title; private String details; private String address; private String img; private String type; private String price; private String contact; private String mobile; private String create_time; public String getOrder_id() { return order_id; } public void setOrder_id(String order_id) { this.order_id = order_id; } public String getUid() { return uid; } public void setUid(String uid) { this.uid = uid; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public String getDetails() { return details; } public void setDetails(String details) { this.details = details; } public String getAddress() { return address; } public void setAddress(String address) { this.address = address; } public String getImg() { return img; } public void setImg(String img) { this.img = img; } public String getType() { return type; } public void setType(String type) { this.type = type; } public String getPrice() { return price; } public void setPrice(String price) { this.price = price; } public String getContact() { return contact; } public void setContact(String contact) { this.contact = contact; } public String getMobile() { return mobile; } public void setMobile(String mobile) { this.mobile = mobile; } public String getCreate_time() { return create_time; } public void setCreate_time(String create_time) { this.create_time = create_time; } } }
apache-2.0
jefperito/nfe
src/main/java/com/fincatto/documentofiscal/mdfe3/webservices/recepcao/MDFeRecepcaoStub.java
93203
/* * MDFeRecepcaoStub.java <p> This file was auto-generated from WSDL by the Apache Axis2 version: 1.6.2 Built on : Apr 17, 2012 (05:33:49 IST) */ package com.fincatto.documentofiscal.mdfe3.webservices.recepcao; import javax.xml.namespace.QName; /* * MDFeRecepcaoStub java implementation */ import org.apache.axiom.om.OMAttribute; import org.apache.axis2.client.Stub; import org.apache.axis2.databinding.utils.Constants; public class MDFeRecepcaoStub extends org.apache.axis2.client.Stub { protected org.apache.axis2.description.AxisOperation[] _operations; // hashmaps to keep the fault mapping @SuppressWarnings("rawtypes") private final java.util.HashMap faultExceptionNameMap = new java.util.HashMap(); @SuppressWarnings("rawtypes") private final java.util.HashMap faultExceptionClassNameMap = new java.util.HashMap(); @SuppressWarnings("rawtypes") private final java.util.HashMap faultMessageMap = new java.util.HashMap(); private static int counter = 0; private static synchronized java.lang.String getUniqueSuffix() { // reset the counter if it is greater than 99999 if (MDFeRecepcaoStub.counter > 99999) { MDFeRecepcaoStub.counter = 0; } MDFeRecepcaoStub.counter = MDFeRecepcaoStub.counter + 1; return java.lang.Long.toString(java.lang.System.currentTimeMillis()) + "_" + MDFeRecepcaoStub.counter; } private void populateAxisService() { // creating the Service with a unique name this._service = new org.apache.axis2.description.AxisService("MDFeRecepcao" + MDFeRecepcaoStub.getUniqueSuffix()); this.addAnonymousOperations(); // creating the operations org.apache.axis2.description.AxisOperation __operation; this._operations = new org.apache.axis2.description.AxisOperation[1]; __operation = new org.apache.axis2.description.OutInAxisOperation(); __operation.setName(new javax.xml.namespace.QName("http://www.portalfiscal.inf.br/mdfe/wsdl/MDFeRecepcao", "mdfeRecepcaoLote")); this._service.addOperation(__operation); this._operations[0] = __operation; } // populates the faults private void populateFaults() { } /** * Constructor that takes in a configContext */ public MDFeRecepcaoStub(final org.apache.axis2.context.ConfigurationContext configurationContext, final java.lang.String targetEndpoint) throws org.apache.axis2.AxisFault { this(configurationContext, targetEndpoint, false); } /** * Constructor that takes in a configContext and useseperate listner */ public MDFeRecepcaoStub(final org.apache.axis2.context.ConfigurationContext configurationContext, final java.lang.String targetEndpoint, final boolean useSeparateListener) throws org.apache.axis2.AxisFault { // To populate AxisService this.populateAxisService(); this.populateFaults(); this._serviceClient = new org.apache.axis2.client.ServiceClient(configurationContext, this._service); this._serviceClient.getOptions().setTo(new org.apache.axis2.addressing.EndpointReference(targetEndpoint)); this._serviceClient.getOptions().setUseSeparateListener(useSeparateListener); // Set the soap version this._serviceClient.getOptions().setSoapVersionURI(org.apache.axiom.soap.SOAP12Constants.SOAP_ENVELOPE_NAMESPACE_URI); } /** * Default Constructor */ public MDFeRecepcaoStub(final org.apache.axis2.context.ConfigurationContext configurationContext) throws org.apache.axis2.AxisFault { this(configurationContext, "https://mdfe.sefaz.rs.gov.br/ws/MDFerecepcao/MDFeRecepcao.asmx"); } /** * Default Constructor */ public MDFeRecepcaoStub() throws org.apache.axis2.AxisFault { this("https://mdfe.sefaz.rs.gov.br/ws/MDFerecepcao/MDFeRecepcao.asmx"); } /** * Constructor taking the target endpoint */ public MDFeRecepcaoStub(final java.lang.String targetEndpoint) throws org.apache.axis2.AxisFault { this(null, targetEndpoint); } /** * Auto generated method signature * @param mdfeDadosMsg0 * @param mdfeCabecMsg1 */ @SuppressWarnings({ "rawtypes", "unchecked" }) public com.fincatto.documentofiscal.mdfe3.webservices.recepcao.MDFeRecepcaoStub.MdfeRecepcaoLoteResult mdfeRecepcaoLote(final com.fincatto.documentofiscal.mdfe3.webservices.recepcao.MDFeRecepcaoStub.MdfeDadosMsg mdfeDadosMsg0, final com.fincatto.documentofiscal.mdfe3.webservices.recepcao.MDFeRecepcaoStub.MdfeCabecMsgE mdfeCabecMsg1) throws java.rmi.RemoteException { org.apache.axis2.context.MessageContext _messageContext = null; try { final org.apache.axis2.client.OperationClient _operationClient = this._serviceClient.createClient(this._operations[0].getName()); _operationClient.getOptions().setAction("http://www.portalfiscal.inf.br/mdfe/wsdl/MDFeRecepcao/mdfeRecepcaoLote"); _operationClient.getOptions().setExceptionToBeThrownOnSOAPFault(true); this.addPropertyToOperationClient(_operationClient, org.apache.axis2.description.WSDL2Constants.ATTR_WHTTP_QUERY_PARAMETER_SEPARATOR, "&"); // create a message context _messageContext = new org.apache.axis2.context.MessageContext(); // create SOAP envelope with that payload org.apache.axiom.soap.SOAPEnvelope env; env = this.toEnvelope(Stub.getFactory(_operationClient.getOptions().getSoapVersionURI()), mdfeDadosMsg0, this.optimizeContent(new javax.xml.namespace.QName("http://www.portalfiscal.inf.br/mdfe/wsdl/MDFeRecepcao", "mdfeRecepcaoLote")), new javax.xml.namespace.QName("http://www.portalfiscal.inf.br/mdfe/wsdl/MDFeRecepcao", "mdfeRecepcaoLote")); env.build(); // add the children only if the parameter is not null if (mdfeCabecMsg1 != null) { final org.apache.axiom.om.OMElement omElementmdfeCabecMsg1 = this.toOM(mdfeCabecMsg1, this.optimizeContent(new javax.xml.namespace.QName("http://www.portalfiscal.inf.br/mdfe/wsdl/MDFeRecepcao", "mdfeRecepcaoLote"))); this.addHeader(omElementmdfeCabecMsg1, env); } // adding SOAP soap_headers this._serviceClient.addHeadersToEnvelope(env); // set the message context with that soap envelope _messageContext.setEnvelope(env); // add the message contxt to the operation client _operationClient.addMessageContext(_messageContext); // execute the operation client _operationClient.execute(true); final org.apache.axis2.context.MessageContext _returnMessageContext = _operationClient.getMessageContext(org.apache.axis2.wsdl.WSDLConstants.MESSAGE_LABEL_IN_VALUE); final org.apache.axiom.soap.SOAPEnvelope _returnEnv = _returnMessageContext.getEnvelope(); final java.lang.Object object = this.fromOM(_returnEnv.getBody().getFirstElement(), com.fincatto.documentofiscal.mdfe3.webservices.recepcao.MDFeRecepcaoStub.MdfeRecepcaoLoteResult.class, this.getEnvelopeNamespaces(_returnEnv)); return (com.fincatto.documentofiscal.mdfe3.webservices.recepcao.MDFeRecepcaoStub.MdfeRecepcaoLoteResult) object; } catch (final org.apache.axis2.AxisFault f) { final org.apache.axiom.om.OMElement faultElt = f.getDetail(); if (faultElt != null) { if (this.faultExceptionNameMap.containsKey(new org.apache.axis2.client.FaultMapKey(faultElt.getQName(), "mdfeRecepcaoLote"))) { // make the fault by reflection try { final java.lang.String exceptionClassName = (java.lang.String) this.faultExceptionClassNameMap.get(new org.apache.axis2.client.FaultMapKey(faultElt.getQName(), "mdfeRecepcaoLote")); final java.lang.Class exceptionClass = java.lang.Class.forName(exceptionClassName); final java.lang.reflect.Constructor constructor = exceptionClass.getConstructor(String.class); final java.lang.Exception ex = (java.lang.Exception) constructor.newInstance(f.getMessage()); // message class final java.lang.String messageClassName = (java.lang.String) this.faultMessageMap.get(new org.apache.axis2.client.FaultMapKey(faultElt.getQName(), "mdfeRecepcaoLote")); final java.lang.Class messageClass = java.lang.Class.forName(messageClassName); final java.lang.Object messageObject = this.fromOM(faultElt, messageClass, null); final java.lang.reflect.Method m = exceptionClass.getMethod("setFaultMessage", messageClass); m.invoke(ex, messageObject); throw new java.rmi.RemoteException(ex.getMessage(), ex); } catch (ClassCastException | InstantiationException | IllegalAccessException | java.lang.reflect.InvocationTargetException | NoSuchMethodException | ClassNotFoundException e) { // we cannot intantiate the class - throw the original Axis fault throw f; } } else { throw f; } } else { throw f; } } finally { if (_messageContext.getTransportOut() != null) { _messageContext.getTransportOut().getSender().cleanup(_messageContext); } } } /** * Auto generated method signature for Asynchronous Invocations * @param mdfeDadosMsg0 * @param mdfeCabecMsg1 */ public void startmdfeRecepcaoLote(final com.fincatto.documentofiscal.mdfe3.webservices.recepcao.MDFeRecepcaoStub.MdfeDadosMsg mdfeDadosMsg0, final com.fincatto.documentofiscal.mdfe3.webservices.recepcao.MDFeRecepcaoStub.MdfeCabecMsgE mdfeCabecMsg1, final com.fincatto.documentofiscal.mdfe3.webservices.recepcao.MDFeRecepcaoCallbackHandler callback) throws java.rmi.RemoteException { final org.apache.axis2.client.OperationClient _operationClient = this._serviceClient.createClient(this._operations[0].getName()); _operationClient.getOptions().setAction("http://www.portalfiscal.inf.br/mdfe/wsdl/MDFeRecepcao/mdfeRecepcaoLote"); _operationClient.getOptions().setExceptionToBeThrownOnSOAPFault(true); this.addPropertyToOperationClient(_operationClient, org.apache.axis2.description.WSDL2Constants.ATTR_WHTTP_QUERY_PARAMETER_SEPARATOR, "&"); // create SOAP envelope with that payload org.apache.axiom.soap.SOAPEnvelope env; final org.apache.axis2.context.MessageContext _messageContext = new org.apache.axis2.context.MessageContext(); // Style is Doc. env = this.toEnvelope(Stub.getFactory(_operationClient.getOptions().getSoapVersionURI()), mdfeDadosMsg0, this.optimizeContent(new javax.xml.namespace.QName("http://www.portalfiscal.inf.br/mdfe/wsdl/MDFeRecepcao", "mdfeRecepcaoLote")), new javax.xml.namespace.QName("http://www.portalfiscal.inf.br/mdfe/wsdl/MDFeRecepcao", "mdfeRecepcaoLote")); // add the soap_headers only if they are not null if (mdfeCabecMsg1 != null) { final org.apache.axiom.om.OMElement omElementmdfeCabecMsg1 = this.toOM(mdfeCabecMsg1, this.optimizeContent(new javax.xml.namespace.QName("http://www.portalfiscal.inf.br/mdfe/wsdl/MDFeRecepcao", "mdfeRecepcaoLote"))); this.addHeader(omElementmdfeCabecMsg1, env); } // adding SOAP soap_headers this._serviceClient.addHeadersToEnvelope(env); // create message context with that soap envelope _messageContext.setEnvelope(env); // add the message context to the operation client _operationClient.addMessageContext(_messageContext); _operationClient.setCallback(new org.apache.axis2.client.async.AxisCallback() { @Override public void onMessage(final org.apache.axis2.context.MessageContext resultContext) { try { final org.apache.axiom.soap.SOAPEnvelope resultEnv = resultContext.getEnvelope(); final java.lang.Object object = MDFeRecepcaoStub.this.fromOM(resultEnv.getBody().getFirstElement(), com.fincatto.documentofiscal.mdfe3.webservices.recepcao.MDFeRecepcaoStub.MdfeRecepcaoLoteResult.class, MDFeRecepcaoStub.this.getEnvelopeNamespaces(resultEnv)); callback.receiveResultmdfeRecepcaoLote((com.fincatto.documentofiscal.mdfe3.webservices.recepcao.MDFeRecepcaoStub.MdfeRecepcaoLoteResult) object); } catch (final org.apache.axis2.AxisFault e) { callback.receiveErrormdfeRecepcaoLote(e); } } @SuppressWarnings({ "rawtypes", "unchecked" }) @Override public void onError(final java.lang.Exception error) { if (error instanceof org.apache.axis2.AxisFault) { final org.apache.axis2.AxisFault f = (org.apache.axis2.AxisFault) error; final org.apache.axiom.om.OMElement faultElt = f.getDetail(); if (faultElt != null) { if (MDFeRecepcaoStub.this.faultExceptionNameMap.containsKey(new org.apache.axis2.client.FaultMapKey(faultElt.getQName(), "mdfeRecepcaoLote"))) { // make the fault by reflection try { final java.lang.String exceptionClassName = (java.lang.String) MDFeRecepcaoStub.this.faultExceptionClassNameMap.get(new org.apache.axis2.client.FaultMapKey(faultElt.getQName(), "mdfeRecepcaoLote")); final java.lang.Class exceptionClass = java.lang.Class.forName(exceptionClassName); final java.lang.reflect.Constructor constructor = exceptionClass.getConstructor(String.class); final java.lang.Exception ex = (java.lang.Exception) constructor.newInstance(f.getMessage()); // message class final java.lang.String messageClassName = (java.lang.String) MDFeRecepcaoStub.this.faultMessageMap.get(new org.apache.axis2.client.FaultMapKey(faultElt.getQName(), "mdfeRecepcaoLote")); final java.lang.Class messageClass = java.lang.Class.forName(messageClassName); final java.lang.Object messageObject = MDFeRecepcaoStub.this.fromOM(faultElt, messageClass, null); final java.lang.reflect.Method m = exceptionClass.getMethod("setFaultMessage", messageClass); m.invoke(ex, messageObject); callback.receiveErrormdfeRecepcaoLote(new java.rmi.RemoteException(ex.getMessage(), ex)); } catch (ClassCastException | org.apache.axis2.AxisFault | InstantiationException | IllegalAccessException | java.lang.reflect.InvocationTargetException | NoSuchMethodException | ClassNotFoundException e) { // we cannot intantiate the class - throw the original Axis fault callback.receiveErrormdfeRecepcaoLote(f); } } else { callback.receiveErrormdfeRecepcaoLote(f); } } else { callback.receiveErrormdfeRecepcaoLote(f); } } else { callback.receiveErrormdfeRecepcaoLote(error); } } @Override public void onFault(final org.apache.axis2.context.MessageContext faultContext) { final org.apache.axis2.AxisFault fault = org.apache.axis2.util.Utils.getInboundFaultFromMessageContext(faultContext); this.onError(fault); } @Override public void onComplete() { try { _messageContext.getTransportOut().getSender().cleanup(_messageContext); } catch (final org.apache.axis2.AxisFault axisFault) { callback.receiveErrormdfeRecepcaoLote(axisFault); } } }); org.apache.axis2.util.CallbackReceiver _callbackReceiver; if (this._operations[0].getMessageReceiver() == null && _operationClient.getOptions().isUseSeparateListener()) { _callbackReceiver = new org.apache.axis2.util.CallbackReceiver(); this._operations[0].setMessageReceiver(_callbackReceiver); } // execute the operation client _operationClient.execute(false); } /** * A utility method that copies the namepaces from the SOAPEnvelope */ @SuppressWarnings({ "rawtypes", "unchecked" }) private java.util.Map getEnvelopeNamespaces(final org.apache.axiom.soap.SOAPEnvelope env) { final java.util.Map returnMap = new java.util.HashMap(); final java.util.Iterator namespaceIterator = env.getAllDeclaredNamespaces(); while (namespaceIterator.hasNext()) { final org.apache.axiom.om.OMNamespace ns = (org.apache.axiom.om.OMNamespace) namespaceIterator.next(); returnMap.put(ns.getPrefix(), ns.getNamespaceURI()); } return returnMap; } private final javax.xml.namespace.QName[] opNameArray = null; private boolean optimizeContent(final javax.xml.namespace.QName opName) { if (this.opNameArray == null) { return false; } for (final QName anOpNameArray : this.opNameArray) { if (opName.equals(anOpNameArray)) { return true; } } return false; } // https://mdfe.sefaz.rs.gov.br/ws/MDFerecepcao/MDFeRecepcao.asmx public static class ExtensionMapper { public static java.lang.Object getTypeObject(final java.lang.String namespaceURI, final java.lang.String typeName, final javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception { if ("http://www.portalfiscal.inf.br/mdfe/wsdl/MDFeRecepcao".equals(namespaceURI) && "mdfeCabecMsg".equals(typeName)) { return MdfeCabecMsg.Factory.parse(reader); } throw new org.apache.axis2.databinding.ADBException("Unsupported type " + namespaceURI + " " + typeName); } } @SuppressWarnings("serial") public static class MdfeCabecMsg implements org.apache.axis2.databinding.ADBBean { /* * This type was generated from the piece of schema that had name = mdfeCabecMsg Namespace URI = http://www.portalfiscal.inf.br/mdfe/wsdl/MDFeRecepcao Namespace Prefix = ns2 */ /** * field for CUF */ protected java.lang.String localCUF; /* * This tracker boolean wil be used to detect whether the user called the set method for this attribute. It will be used to determine whether to include this field in the serialized XML */ protected boolean localCUFTracker = false; public boolean isCUFSpecified() { return this.localCUFTracker; } /** * Auto generated getter method * @return java.lang.String */ public java.lang.String getCUF() { return this.localCUF; } /** * Auto generated setter method * @param param CUF */ public void setCUF(final java.lang.String param) { this.localCUFTracker = param != null; this.localCUF = param; } /** * field for VersaoDados */ protected java.lang.String localVersaoDados; /* * This tracker boolean wil be used to detect whether the user called the set method for this attribute. It will be used to determine whether to include this field in the serialized XML */ protected boolean localVersaoDadosTracker = false; public boolean isVersaoDadosSpecified() { return this.localVersaoDadosTracker; } /** * Auto generated getter method * @return java.lang.String */ public java.lang.String getVersaoDados() { return this.localVersaoDados; } /** * Auto generated setter method * @param param VersaoDados */ public void setVersaoDados(final java.lang.String param) { this.localVersaoDadosTracker = param != null; this.localVersaoDados = param; } /** * field for ExtraAttributes This was an Attribute! This was an Array! */ protected org.apache.axiom.om.OMAttribute[] localExtraAttributes; /** * Auto generated getter method * @return org.apache.axiom.om.OMAttribute[] */ public org.apache.axiom.om.OMAttribute[] getExtraAttributes() { return this.localExtraAttributes; } /** * validate the array for ExtraAttributes */ protected void validateExtraAttributes(final org.apache.axiom.om.OMAttribute[] param) { if ((param != null) && (param.length > 1)) { throw new java.lang.RuntimeException(); } if ((param != null) && (param.length < 1)) { throw new java.lang.RuntimeException(); } } /** * Auto generated setter method * @param param ExtraAttributes */ public void setExtraAttributes(final org.apache.axiom.om.OMAttribute[] param) { this.validateExtraAttributes(param); this.localExtraAttributes = param; } /** * Auto generated add method for the array for convenience * @param param org.apache.axiom.om.OMAttribute */ @SuppressWarnings({ "rawtypes", "unchecked" }) public void addExtraAttributes(final org.apache.axiom.om.OMAttribute param) { if (this.localExtraAttributes == null) { this.localExtraAttributes = new org.apache.axiom.om.OMAttribute[] {}; } final java.util.List list = org.apache.axis2.databinding.utils.ConverterUtil.toList(this.localExtraAttributes); list.add(param); this.localExtraAttributes = (org.apache.axiom.om.OMAttribute[]) list.toArray(new org.apache.axiom.om.OMAttribute[0]); } /** * @param parentQName * @param factory * @return org.apache.axiom.om.OMElement */ @Override public org.apache.axiom.om.OMElement getOMElement(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory) { final org.apache.axiom.om.OMDataSource dataSource = new org.apache.axis2.databinding.ADBDataSource(this, parentQName); return factory.createOMElement(dataSource, parentQName); } @Override public void serialize(final javax.xml.namespace.QName parentQName, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { this.serialize(parentQName, xmlWriter, false); } @SuppressWarnings("deprecation") @Override public void serialize(final javax.xml.namespace.QName parentQName, final javax.xml.stream.XMLStreamWriter xmlWriter, final boolean serializeType) throws javax.xml.stream.XMLStreamException { java.lang.String prefix; java.lang.String namespace; prefix = parentQName.getPrefix(); namespace = parentQName.getNamespaceURI(); this.writeStartElement(prefix, namespace, parentQName.getLocalPart(), xmlWriter); if (serializeType) { final java.lang.String namespacePrefix = this.registerPrefix(xmlWriter, "http://www.portalfiscal.inf.br/mdfe/wsdl/MDFeRecepcao"); if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)) { this.writeAttribute("xsi", "http://www.w3.org/2001/XMLSchema-instance", "type", namespacePrefix + ":mdfeCabecMsg", xmlWriter); } else { this.writeAttribute("xsi", "http://www.w3.org/2001/XMLSchema-instance", "type", "mdfeCabecMsg", xmlWriter); } } if (this.localExtraAttributes != null) { for (final OMAttribute localExtraAttribute : this.localExtraAttributes) { this.writeAttribute(localExtraAttribute.getNamespace().getName(), localExtraAttribute.getLocalName(), localExtraAttribute.getAttributeValue(), xmlWriter); } } if (this.localCUFTracker) { namespace = "http://www.portalfiscal.inf.br/mdfe/wsdl/MDFeRecepcao"; this.writeStartElement(null, namespace, "cUF", xmlWriter); if (this.localCUF == null) { // write the nil attribute throw new org.apache.axis2.databinding.ADBException("cUF cannot be null!!"); } else { xmlWriter.writeCharacters(this.localCUF); } xmlWriter.writeEndElement(); } if (this.localVersaoDadosTracker) { namespace = "http://www.portalfiscal.inf.br/mdfe/wsdl/MDFeRecepcao"; this.writeStartElement(null, namespace, "versaoDados", xmlWriter); if (this.localVersaoDados == null) { // write the nil attribute throw new org.apache.axis2.databinding.ADBException("versaoDados cannot be null!!"); } else { xmlWriter.writeCharacters(this.localVersaoDados); } xmlWriter.writeEndElement(); } xmlWriter.writeEndElement(); } private static java.lang.String generatePrefix(final java.lang.String namespace) { if (namespace.equals("http://www.portalfiscal.inf.br/mdfe/wsdl/MDFeRecepcao")) { return "ns2"; } return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } /** * Utility method to write an element start tag. */ private void writeStartElement(java.lang.String prefix, final java.lang.String namespace, final java.lang.String localPart, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { final java.lang.String writerPrefix = xmlWriter.getPrefix(namespace); if (writerPrefix != null) { xmlWriter.writeStartElement(namespace, localPart); } else { if (namespace.length() == 0) { prefix = ""; } else if (prefix == null) { prefix = MdfeCabecMsg.generatePrefix(namespace); } xmlWriter.writeStartElement(prefix, localPart, namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } } /** * Util method to write an attribute with the ns prefix */ private void writeAttribute(final java.lang.String prefix, final java.lang.String namespace, final java.lang.String attName, final java.lang.String attValue, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (xmlWriter.getPrefix(namespace) == null) { xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } xmlWriter.writeAttribute(namespace, attName, attValue); } /** * Util method to write an attribute without the ns prefix */ private void writeAttribute(final java.lang.String namespace, final java.lang.String attName, final java.lang.String attValue, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attValue); } else { this.registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attValue); } } /** * Util method to write an attribute without the ns prefix */ @SuppressWarnings("unused") private void writeQNameAttribute(final java.lang.String namespace, final java.lang.String attName, final javax.xml.namespace.QName qname, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { final java.lang.String attributeNamespace = qname.getNamespaceURI(); java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace); if (attributePrefix == null) { attributePrefix = this.registerPrefix(xmlWriter, attributeNamespace); } java.lang.String attributeValue; if (attributePrefix.trim().length() > 0) { attributeValue = attributePrefix + ":" + qname.getLocalPart(); } else { attributeValue = qname.getLocalPart(); } if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attributeValue); } else { this.registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attributeValue); } } /** * method to handle Qnames */ @SuppressWarnings("unused") private void writeQName(final javax.xml.namespace.QName qname, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { final java.lang.String namespaceURI = qname.getNamespaceURI(); if (namespaceURI != null) { java.lang.String prefix = xmlWriter.getPrefix(namespaceURI); if (prefix == null) { prefix = MdfeCabecMsg.generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix, namespaceURI); } if (prefix.trim().length() > 0) { xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } else { // i.e this is the default namespace xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } else { xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } @SuppressWarnings("unused") private void writeQNames(final javax.xml.namespace.QName[] qnames, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (qnames != null) { // we have to store this data until last moment since it is not possible to write any // namespace data after writing the charactor data final StringBuilder stringToWrite = new StringBuilder(); java.lang.String namespaceURI; java.lang.String prefix; for (int i = 0; i < qnames.length; i++) { if (i > 0) { stringToWrite.append(" "); } namespaceURI = qnames[i].getNamespaceURI(); if (namespaceURI != null) { prefix = xmlWriter.getPrefix(namespaceURI); if ((prefix == null) || (prefix.length() == 0)) { prefix = MdfeCabecMsg.generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix, namespaceURI); } if (prefix.trim().length() > 0) { stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } xmlWriter.writeCharacters(stringToWrite.toString()); } } /** * Register a namespace prefix */ private java.lang.String registerPrefix(final javax.xml.stream.XMLStreamWriter xmlWriter, final java.lang.String namespace) throws javax.xml.stream.XMLStreamException { java.lang.String prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = MdfeCabecMsg.generatePrefix(namespace); final javax.xml.namespace.NamespaceContext nsContext = xmlWriter.getNamespaceContext(); while (true) { final java.lang.String uri = nsContext.getNamespaceURI(prefix); if (uri == null || uri.length() == 0) { break; } prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } return prefix; } /** * databinding method to get an XML representation of this object */ @SuppressWarnings({ "rawtypes", "unchecked" }) @Override public javax.xml.stream.XMLStreamReader getPullParser(final javax.xml.namespace.QName qName) throws org.apache.axis2.databinding.ADBException { final java.util.ArrayList elementList = new java.util.ArrayList(); final java.util.ArrayList attribList = new java.util.ArrayList(); if (this.localCUFTracker) { elementList.add(new javax.xml.namespace.QName("http://www.portalfiscal.inf.br/mdfe/wsdl/MDFeRecepcao", "cUF")); if (this.localCUF != null) { elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(this.localCUF)); } else { throw new org.apache.axis2.databinding.ADBException("cUF cannot be null!!"); } } if (this.localVersaoDadosTracker) { elementList.add(new javax.xml.namespace.QName("http://www.portalfiscal.inf.br/mdfe/wsdl/MDFeRecepcao", "versaoDados")); if (this.localVersaoDados != null) { elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(this.localVersaoDados)); } else { throw new org.apache.axis2.databinding.ADBException("versaoDados cannot be null!!"); } } for (final OMAttribute localExtraAttribute : this.localExtraAttributes) { attribList.add(Constants.OM_ATTRIBUTE_KEY); attribList.add(localExtraAttribute); } return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray()); } /** * Factory class that keeps the parse method */ public static class Factory { /** * static method to create the object Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable If this object is not an element, it is a complex type and the reader is at the event just after the outer start element Postcondition: If this object is an element, the reader is positioned at its end element If this object is a complex type, the reader is positioned at the end element of its outer element */ @SuppressWarnings({ "unused", "rawtypes" }) public static MdfeCabecMsg parse(final javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception { final MdfeCabecMsg object = new MdfeCabecMsg(); final int event; java.lang.String nillableValue; final java.lang.String prefix = ""; final java.lang.String namespaceuri = ""; try { while (!reader.isStartElement() && !reader.isEndElement()) { reader.next(); } if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance", "type") != null) { final java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance", "type"); if (fullTypeName != null) { java.lang.String nsPrefix = null; if (fullTypeName.contains(":")) { nsPrefix = fullTypeName.substring(0, fullTypeName.indexOf(":")); } nsPrefix = nsPrefix == null ? "" : nsPrefix; final java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":") + 1); if (!"mdfeCabecMsg".equals(type)) { // find namespace for the prefix final java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix); return (MdfeCabecMsg) ExtensionMapper.getTypeObject(nsUri, type, reader); } } } // Note all attributes that were handled. Used to differ normal attributes // from anyAttributes. final java.util.Vector handledAttributes = new java.util.Vector(); // now run through all any or extra attributes // which were not reflected until now for (int i = 0; i < reader.getAttributeCount(); i++) { if (!handledAttributes.contains(reader.getAttributeLocalName(i))) { // this is an anyAttribute and we create // an OMAttribute for this final org.apache.axiom.om.OMFactory factory = org.apache.axiom.om.OMAbstractFactory.getOMFactory(); final org.apache.axiom.om.OMAttribute attr = factory.createOMAttribute(reader.getAttributeLocalName(i), factory.createOMNamespace(reader.getAttributeNamespace(i), reader.getAttributePrefix(i)), reader.getAttributeValue(i)); // and add it to the extra attributes object.addExtraAttributes(attr); } } reader.next(); while (!reader.isStartElement() && !reader.isEndElement()) { reader.next(); } if (reader.isStartElement() && new javax.xml.namespace.QName("http://www.portalfiscal.inf.br/mdfe/wsdl/MDFeRecepcao", "cUF").equals(reader.getName())) { nillableValue = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance", "nil"); if ("true".equals(nillableValue) || "1".equals(nillableValue)) { throw new org.apache.axis2.databinding.ADBException("The element: " + "cUF" + " cannot be null"); } final java.lang.String content = reader.getElementText(); object.setCUF(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content)); reader.next(); } // End of if for expected property start element while (!reader.isStartElement() && !reader.isEndElement()) { reader.next(); } if (reader.isStartElement() && new javax.xml.namespace.QName("http://www.portalfiscal.inf.br/mdfe/wsdl/MDFeRecepcao", "versaoDados").equals(reader.getName())) { nillableValue = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance", "nil"); if ("true".equals(nillableValue) || "1".equals(nillableValue)) { throw new org.apache.axis2.databinding.ADBException("The element: " + "versaoDados" + " cannot be null"); } final java.lang.String content = reader.getElementText(); object.setVersaoDados(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content)); reader.next(); } // End of if for expected property start element while (!reader.isStartElement() && !reader.isEndElement()) { reader.next(); } if (reader.isStartElement()) { // A start element we are not expecting indicates a trailing invalid property throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getName()); } } catch (final javax.xml.stream.XMLStreamException e) { throw new java.lang.Exception(e); } return object; } }// end of factory class } @SuppressWarnings("serial") public static class MdfeRecepcaoLoteResult implements org.apache.axis2.databinding.ADBBean { public static final javax.xml.namespace.QName MY_QNAME = new javax.xml.namespace.QName("http://www.portalfiscal.inf.br/mdfe/wsdl/MDFeRecepcao", "mdfeRecepcaoLoteResult", "ns2"); /** * field for ExtraElement */ protected org.apache.axiom.om.OMElement localExtraElement; /** * Auto generated getter method * @return org.apache.axiom.om.OMElement */ public org.apache.axiom.om.OMElement getExtraElement() { return this.localExtraElement; } /** * Auto generated setter method * @param param ExtraElement */ public void setExtraElement(final org.apache.axiom.om.OMElement param) { this.localExtraElement = param; } /** * @param parentQName * @param factory * @return org.apache.axiom.om.OMElement */ @Override public org.apache.axiom.om.OMElement getOMElement(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory) { final org.apache.axiom.om.OMDataSource dataSource = new org.apache.axis2.databinding.ADBDataSource(this, MdfeRecepcaoLoteResult.MY_QNAME); return factory.createOMElement(dataSource, MdfeRecepcaoLoteResult.MY_QNAME); } @Override public void serialize(final javax.xml.namespace.QName parentQName, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { this.serialize(parentQName, xmlWriter, false); } @Override public void serialize(final javax.xml.namespace.QName parentQName, final javax.xml.stream.XMLStreamWriter xmlWriter, final boolean serializeType) throws javax.xml.stream.XMLStreamException { java.lang.String prefix; java.lang.String namespace; prefix = parentQName.getPrefix(); namespace = parentQName.getNamespaceURI(); this.writeStartElement(prefix, namespace, parentQName.getLocalPart(), xmlWriter); if (serializeType) { final java.lang.String namespacePrefix = this.registerPrefix(xmlWriter, "http://www.portalfiscal.inf.br/mdfe/wsdl/MDFeRecepcao"); if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)) { this.writeAttribute("xsi", "http://www.w3.org/2001/XMLSchema-instance", "type", namespacePrefix + ":mdfeRecepcaoLoteResult", xmlWriter); } else { this.writeAttribute("xsi", "http://www.w3.org/2001/XMLSchema-instance", "type", "mdfeRecepcaoLoteResult", xmlWriter); } } if (this.localExtraElement != null) { this.localExtraElement.serialize(xmlWriter); } else { throw new org.apache.axis2.databinding.ADBException("extraElement cannot be null!!"); } xmlWriter.writeEndElement(); } private static java.lang.String generatePrefix(final java.lang.String namespace) { if (namespace.equals("http://www.portalfiscal.inf.br/mdfe/wsdl/MDFeRecepcao")) { return "ns2"; } return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } /** * Utility method to write an element start tag. */ private void writeStartElement(java.lang.String prefix, final java.lang.String namespace, final java.lang.String localPart, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { final java.lang.String writerPrefix = xmlWriter.getPrefix(namespace); if (writerPrefix != null) { xmlWriter.writeStartElement(namespace, localPart); } else { if (namespace.length() == 0) { prefix = ""; } else if (prefix == null) { prefix = MdfeRecepcaoLoteResult.generatePrefix(namespace); } xmlWriter.writeStartElement(prefix, localPart, namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } } /** * Util method to write an attribute with the ns prefix */ private void writeAttribute(final java.lang.String prefix, final java.lang.String namespace, final java.lang.String attName, final java.lang.String attValue, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (xmlWriter.getPrefix(namespace) == null) { xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } xmlWriter.writeAttribute(namespace, attName, attValue); } /** * Util method to write an attribute without the ns prefix */ @SuppressWarnings("unused") private void writeAttribute(final java.lang.String namespace, final java.lang.String attName, final java.lang.String attValue, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attValue); } else { this.registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attValue); } } /** * Util method to write an attribute without the ns prefix */ @SuppressWarnings("unused") private void writeQNameAttribute(final java.lang.String namespace, final java.lang.String attName, final javax.xml.namespace.QName qname, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { final java.lang.String attributeNamespace = qname.getNamespaceURI(); java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace); if (attributePrefix == null) { attributePrefix = this.registerPrefix(xmlWriter, attributeNamespace); } java.lang.String attributeValue; if (attributePrefix.trim().length() > 0) { attributeValue = attributePrefix + ":" + qname.getLocalPart(); } else { attributeValue = qname.getLocalPart(); } if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attributeValue); } else { this.registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attributeValue); } } /** * method to handle Qnames */ @SuppressWarnings("unused") private void writeQName(final javax.xml.namespace.QName qname, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { final java.lang.String namespaceURI = qname.getNamespaceURI(); if (namespaceURI != null) { java.lang.String prefix = xmlWriter.getPrefix(namespaceURI); if (prefix == null) { prefix = MdfeRecepcaoLoteResult.generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix, namespaceURI); } if (prefix.trim().length() > 0) { xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } else { // i.e this is the default namespace xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } else { xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } @SuppressWarnings("unused") private void writeQNames(final javax.xml.namespace.QName[] qnames, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (qnames != null) { // we have to store this data until last moment since it is not possible to write any // namespace data after writing the charactor data final StringBuilder stringToWrite = new StringBuilder(); java.lang.String namespaceURI; java.lang.String prefix; for (int i = 0; i < qnames.length; i++) { if (i > 0) { stringToWrite.append(" "); } namespaceURI = qnames[i].getNamespaceURI(); if (namespaceURI != null) { prefix = xmlWriter.getPrefix(namespaceURI); if ((prefix == null) || (prefix.length() == 0)) { prefix = MdfeRecepcaoLoteResult.generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix, namespaceURI); } if (prefix.trim().length() > 0) { stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } xmlWriter.writeCharacters(stringToWrite.toString()); } } /** * Register a namespace prefix */ private java.lang.String registerPrefix(final javax.xml.stream.XMLStreamWriter xmlWriter, final java.lang.String namespace) throws javax.xml.stream.XMLStreamException { java.lang.String prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = MdfeRecepcaoLoteResult.generatePrefix(namespace); final javax.xml.namespace.NamespaceContext nsContext = xmlWriter.getNamespaceContext(); while (true) { final java.lang.String uri = nsContext.getNamespaceURI(prefix); if (uri == null || uri.length() == 0) { break; } prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } return prefix; } /** * databinding method to get an XML representation of this object */ @SuppressWarnings({ "rawtypes", "unchecked" }) @Override public javax.xml.stream.XMLStreamReader getPullParser(final javax.xml.namespace.QName qName) throws org.apache.axis2.databinding.ADBException { final java.util.ArrayList elementList = new java.util.ArrayList(); final java.util.ArrayList attribList = new java.util.ArrayList(); if (this.localExtraElement != null) { elementList.add(org.apache.axis2.databinding.utils.Constants.OM_ELEMENT_KEY); elementList.add(this.localExtraElement); } else { throw new org.apache.axis2.databinding.ADBException("extraElement cannot be null!!"); } return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray()); } /** * Factory class that keeps the parse method */ public static class Factory { /** * static method to create the object Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable If this object is not an element, it is a complex type and the reader is at the event just after the outer start element Postcondition: If this object is an element, the reader is positioned at its end element If this object is a complex type, the reader is positioned at the end element of its outer element */ @SuppressWarnings({ "unused", "rawtypes" }) public static MdfeRecepcaoLoteResult parse(final javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception { final MdfeRecepcaoLoteResult object = new MdfeRecepcaoLoteResult(); final int event; final java.lang.String nillableValue = null; final java.lang.String prefix = ""; final java.lang.String namespaceuri = ""; try { while (!reader.isStartElement() && !reader.isEndElement()) { reader.next(); } if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance", "type") != null) { final java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance", "type"); if (fullTypeName != null) { java.lang.String nsPrefix = null; if (fullTypeName.contains(":")) { nsPrefix = fullTypeName.substring(0, fullTypeName.indexOf(":")); } nsPrefix = nsPrefix == null ? "" : nsPrefix; final java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":") + 1); if (!"mdfeRecepcaoLoteResult".equals(type)) { // find namespace for the prefix final java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix); return (MdfeRecepcaoLoteResult) ExtensionMapper.getTypeObject(nsUri, type, reader); } } } // Note all attributes that were handled. Used to differ normal attributes // from anyAttributes. final java.util.Vector handledAttributes = new java.util.Vector(); reader.next(); while (!reader.isStartElement() && !reader.isEndElement()) { reader.next(); } if (reader.isStartElement()) { // use the QName from the parser as the name for the builder final javax.xml.namespace.QName startQname1 = reader.getName(); // We need to wrap the reader so that it produces a fake START_DOCUMENT event // this is needed by the builder classes final org.apache.axis2.databinding.utils.NamedStaxOMBuilder builder1 = new org.apache.axis2.databinding.utils.NamedStaxOMBuilder(new org.apache.axis2.util.StreamWrapper(reader), startQname1); object.setExtraElement(builder1.getOMElement()); reader.next(); } // End of if for expected property start element else { // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getName()); } while (!reader.isStartElement() && !reader.isEndElement()) { reader.next(); } if (reader.isStartElement()) { // A start element we are not expecting indicates a trailing invalid property throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getName()); } } catch (final javax.xml.stream.XMLStreamException e) { throw new java.lang.Exception(e); } return object; } }// end of factory class } @SuppressWarnings("serial") public static class MdfeCabecMsgE implements org.apache.axis2.databinding.ADBBean { public static final javax.xml.namespace.QName MY_QNAME = new javax.xml.namespace.QName("http://www.portalfiscal.inf.br/mdfe/wsdl/MDFeRecepcao", "mdfeCabecMsg", "ns2"); /** * field for MdfeCabecMsg */ protected MdfeCabecMsg localMdfeCabecMsg; /** * Auto generated getter method * @return MdfeCabecMsg */ public MdfeCabecMsg getMdfeCabecMsg() { return this.localMdfeCabecMsg; } /** * Auto generated setter method * @param param MdfeCabecMsg */ public void setMdfeCabecMsg(final MdfeCabecMsg param) { this.localMdfeCabecMsg = param; } /** * @param parentQName * @param factory * @return org.apache.axiom.om.OMElement */ @Override public org.apache.axiom.om.OMElement getOMElement(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory) { final org.apache.axiom.om.OMDataSource dataSource = new org.apache.axis2.databinding.ADBDataSource(this, MdfeCabecMsgE.MY_QNAME); return factory.createOMElement(dataSource, MdfeCabecMsgE.MY_QNAME); } @Override public void serialize(final javax.xml.namespace.QName parentQName, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { this.serialize(parentQName, xmlWriter, false); } @Override public void serialize(final javax.xml.namespace.QName parentQName, final javax.xml.stream.XMLStreamWriter xmlWriter, final boolean serializeType) throws javax.xml.stream.XMLStreamException { // We can safely assume an element has only one type associated with it if (this.localMdfeCabecMsg == null) { throw new org.apache.axis2.databinding.ADBException("mdfeCabecMsg cannot be null!"); } this.localMdfeCabecMsg.serialize(MdfeCabecMsgE.MY_QNAME, xmlWriter); } private static java.lang.String generatePrefix(final java.lang.String namespace) { if (namespace.equals("http://www.portalfiscal.inf.br/mdfe/wsdl/MDFeRecepcao")) { return "ns2"; } return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } /** * Utility method to write an element start tag. */ @SuppressWarnings("unused") private void writeStartElement(java.lang.String prefix, final java.lang.String namespace, final java.lang.String localPart, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { final java.lang.String writerPrefix = xmlWriter.getPrefix(namespace); if (writerPrefix != null) { xmlWriter.writeStartElement(namespace, localPart); } else { if (namespace.length() == 0) { prefix = ""; } else if (prefix == null) { prefix = MdfeCabecMsgE.generatePrefix(namespace); } xmlWriter.writeStartElement(prefix, localPart, namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } } /** * Util method to write an attribute with the ns prefix */ @SuppressWarnings("unused") private void writeAttribute(final java.lang.String prefix, final java.lang.String namespace, final java.lang.String attName, final java.lang.String attValue, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (xmlWriter.getPrefix(namespace) == null) { xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } xmlWriter.writeAttribute(namespace, attName, attValue); } /** * Util method to write an attribute without the ns prefix */ @SuppressWarnings("unused") private void writeAttribute(final java.lang.String namespace, final java.lang.String attName, final java.lang.String attValue, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attValue); } else { this.registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attValue); } } /** * Util method to write an attribute without the ns prefix */ @SuppressWarnings("unused") private void writeQNameAttribute(final java.lang.String namespace, final java.lang.String attName, final javax.xml.namespace.QName qname, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { final java.lang.String attributeNamespace = qname.getNamespaceURI(); java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace); if (attributePrefix == null) { attributePrefix = this.registerPrefix(xmlWriter, attributeNamespace); } java.lang.String attributeValue; if (attributePrefix.trim().length() > 0) { attributeValue = attributePrefix + ":" + qname.getLocalPart(); } else { attributeValue = qname.getLocalPart(); } if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attributeValue); } else { this.registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attributeValue); } } /** * method to handle Qnames */ @SuppressWarnings("unused") private void writeQName(final javax.xml.namespace.QName qname, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { final java.lang.String namespaceURI = qname.getNamespaceURI(); if (namespaceURI != null) { java.lang.String prefix = xmlWriter.getPrefix(namespaceURI); if (prefix == null) { prefix = MdfeCabecMsgE.generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix, namespaceURI); } if (prefix.trim().length() > 0) { xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } else { // i.e this is the default namespace xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } else { xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } @SuppressWarnings("unused") private void writeQNames(final javax.xml.namespace.QName[] qnames, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (qnames != null) { // we have to store this data until last moment since it is not possible to write any // namespace data after writing the charactor data final StringBuilder stringToWrite = new StringBuilder(); java.lang.String namespaceURI; java.lang.String prefix; for (int i = 0; i < qnames.length; i++) { if (i > 0) { stringToWrite.append(" "); } namespaceURI = qnames[i].getNamespaceURI(); if (namespaceURI != null) { prefix = xmlWriter.getPrefix(namespaceURI); if ((prefix == null) || (prefix.length() == 0)) { prefix = MdfeCabecMsgE.generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix, namespaceURI); } if (prefix.trim().length() > 0) { stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } xmlWriter.writeCharacters(stringToWrite.toString()); } } /** * Register a namespace prefix */ private java.lang.String registerPrefix(final javax.xml.stream.XMLStreamWriter xmlWriter, final java.lang.String namespace) throws javax.xml.stream.XMLStreamException { java.lang.String prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = MdfeCabecMsgE.generatePrefix(namespace); final javax.xml.namespace.NamespaceContext nsContext = xmlWriter.getNamespaceContext(); while (true) { final java.lang.String uri = nsContext.getNamespaceURI(prefix); if (uri == null || uri.length() == 0) { break; } prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } return prefix; } /** * databinding method to get an XML representation of this object */ @Override public javax.xml.stream.XMLStreamReader getPullParser(final javax.xml.namespace.QName qName) throws org.apache.axis2.databinding.ADBException { // We can safely assume an element has only one type associated with it return this.localMdfeCabecMsg.getPullParser(MdfeCabecMsgE.MY_QNAME); } /** * Factory class that keeps the parse method */ public static class Factory { /** * static method to create the object Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable If this object is not an element, it is a complex type and the reader is at the event just after the outer start element Postcondition: If this object is an element, the reader is positioned at its end element If this object is a complex type, the reader is positioned at the end element of its outer element */ @SuppressWarnings({ "unused", "rawtypes" }) public static MdfeCabecMsgE parse(final javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception { final MdfeCabecMsgE object = new MdfeCabecMsgE(); final int event; final java.lang.String nillableValue = null; final java.lang.String prefix = ""; final java.lang.String namespaceuri = ""; try { while (!reader.isStartElement() && !reader.isEndElement()) { reader.next(); } // Note all attributes that were handled. Used to differ normal attributes // from anyAttributes. final java.util.Vector handledAttributes = new java.util.Vector(); while (!reader.isEndElement()) { if (reader.isStartElement()) { if (reader.isStartElement() && new javax.xml.namespace.QName("http://www.portalfiscal.inf.br/mdfe/wsdl/MDFeRecepcao", "mdfeCabecMsg").equals(reader.getName())) { object.setMdfeCabecMsg(MdfeCabecMsg.Factory.parse(reader)); } // End of if for expected property start element else { // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getName()); } } else { reader.next(); } } // end of while loop } catch (final javax.xml.stream.XMLStreamException e) { throw new java.lang.Exception(e); } return object; } }// end of factory class } @SuppressWarnings("serial") public static class MdfeDadosMsg implements org.apache.axis2.databinding.ADBBean { public static final javax.xml.namespace.QName MY_QNAME = new javax.xml.namespace.QName("http://www.portalfiscal.inf.br/mdfe/wsdl/MDFeRecepcao", "mdfeDadosMsg", "ns2"); /** * field for ExtraElement */ protected org.apache.axiom.om.OMElement localExtraElement; /** * Auto generated getter method * @return org.apache.axiom.om.OMElement */ public org.apache.axiom.om.OMElement getExtraElement() { return this.localExtraElement; } /** * Auto generated setter method * @param param ExtraElement */ public void setExtraElement(final org.apache.axiom.om.OMElement param) { this.localExtraElement = param; } /** * @param parentQName * @param factory * @return org.apache.axiom.om.OMElement */ @Override public org.apache.axiom.om.OMElement getOMElement(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory) { final org.apache.axiom.om.OMDataSource dataSource = new org.apache.axis2.databinding.ADBDataSource(this, MdfeDadosMsg.MY_QNAME); return factory.createOMElement(dataSource, MdfeDadosMsg.MY_QNAME); } @Override public void serialize(final javax.xml.namespace.QName parentQName, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { this.serialize(parentQName, xmlWriter, false); } @Override public void serialize(final javax.xml.namespace.QName parentQName, final javax.xml.stream.XMLStreamWriter xmlWriter, final boolean serializeType) throws javax.xml.stream.XMLStreamException { java.lang.String prefix; java.lang.String namespace; prefix = parentQName.getPrefix(); namespace = parentQName.getNamespaceURI(); this.writeStartElement(prefix, namespace, parentQName.getLocalPart(), xmlWriter); if (serializeType) { final java.lang.String namespacePrefix = this.registerPrefix(xmlWriter, "http://www.portalfiscal.inf.br/mdfe/wsdl/MDFeRecepcao"); if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)) { this.writeAttribute("xsi", "http://www.w3.org/2001/XMLSchema-instance", "type", namespacePrefix + ":mdfeDadosMsg", xmlWriter); } else { this.writeAttribute("xsi", "http://www.w3.org/2001/XMLSchema-instance", "type", "mdfeDadosMsg", xmlWriter); } } if (this.localExtraElement != null) { this.localExtraElement.serialize(xmlWriter); } else { throw new org.apache.axis2.databinding.ADBException("extraElement cannot be null!!"); } xmlWriter.writeEndElement(); } private static java.lang.String generatePrefix(final java.lang.String namespace) { if (namespace.equals("http://www.portalfiscal.inf.br/mdfe/wsdl/MDFeRecepcao")) { return "ns2"; } return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } /** * Utility method to write an element start tag. */ private void writeStartElement(java.lang.String prefix, final java.lang.String namespace, final java.lang.String localPart, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { final java.lang.String writerPrefix = xmlWriter.getPrefix(namespace); if (writerPrefix != null) { xmlWriter.writeStartElement(namespace, localPart); } else { if (namespace.length() == 0) { prefix = ""; } else if (prefix == null) { prefix = MdfeDadosMsg.generatePrefix(namespace); } xmlWriter.writeStartElement(prefix, localPart, namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } } /** * Util method to write an attribute with the ns prefix */ private void writeAttribute(final java.lang.String prefix, final java.lang.String namespace, final java.lang.String attName, final java.lang.String attValue, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (xmlWriter.getPrefix(namespace) == null) { xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } xmlWriter.writeAttribute(namespace, attName, attValue); } /** * Util method to write an attribute without the ns prefix */ @SuppressWarnings("unused") private void writeAttribute(final java.lang.String namespace, final java.lang.String attName, final java.lang.String attValue, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attValue); } else { this.registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attValue); } } /** * Util method to write an attribute without the ns prefix */ @SuppressWarnings("unused") private void writeQNameAttribute(final java.lang.String namespace, final java.lang.String attName, final javax.xml.namespace.QName qname, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { final java.lang.String attributeNamespace = qname.getNamespaceURI(); java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace); if (attributePrefix == null) { attributePrefix = this.registerPrefix(xmlWriter, attributeNamespace); } java.lang.String attributeValue; if (attributePrefix.trim().length() > 0) { attributeValue = attributePrefix + ":" + qname.getLocalPart(); } else { attributeValue = qname.getLocalPart(); } if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attributeValue); } else { this.registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attributeValue); } } /** * method to handle Qnames */ @SuppressWarnings("unused") private void writeQName(final javax.xml.namespace.QName qname, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { final java.lang.String namespaceURI = qname.getNamespaceURI(); if (namespaceURI != null) { java.lang.String prefix = xmlWriter.getPrefix(namespaceURI); if (prefix == null) { prefix = MdfeDadosMsg.generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix, namespaceURI); } if (prefix.trim().length() > 0) { xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } else { // i.e this is the default namespace xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } else { xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } @SuppressWarnings("unused") private void writeQNames(final javax.xml.namespace.QName[] qnames, final javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (qnames != null) { // we have to store this data until last moment since it is not possible to write any // namespace data after writing the charactor data final StringBuilder stringToWrite = new StringBuilder(); java.lang.String namespaceURI; java.lang.String prefix; for (int i = 0; i < qnames.length; i++) { if (i > 0) { stringToWrite.append(" "); } namespaceURI = qnames[i].getNamespaceURI(); if (namespaceURI != null) { prefix = xmlWriter.getPrefix(namespaceURI); if ((prefix == null) || (prefix.length() == 0)) { prefix = MdfeDadosMsg.generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix, namespaceURI); } if (prefix.trim().length() > 0) { stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } xmlWriter.writeCharacters(stringToWrite.toString()); } } /** * Register a namespace prefix */ private java.lang.String registerPrefix(final javax.xml.stream.XMLStreamWriter xmlWriter, final java.lang.String namespace) throws javax.xml.stream.XMLStreamException { java.lang.String prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = MdfeDadosMsg.generatePrefix(namespace); final javax.xml.namespace.NamespaceContext nsContext = xmlWriter.getNamespaceContext(); while (true) { final java.lang.String uri = nsContext.getNamespaceURI(prefix); if (uri == null || uri.length() == 0) { break; } prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } return prefix; } /** * databinding method to get an XML representation of this object */ @SuppressWarnings({ "unchecked", "rawtypes" }) @Override public javax.xml.stream.XMLStreamReader getPullParser(final javax.xml.namespace.QName qName) throws org.apache.axis2.databinding.ADBException { final java.util.ArrayList elementList = new java.util.ArrayList(); final java.util.ArrayList attribList = new java.util.ArrayList(); if (this.localExtraElement != null) { elementList.add(org.apache.axis2.databinding.utils.Constants.OM_ELEMENT_KEY); elementList.add(this.localExtraElement); } else { throw new org.apache.axis2.databinding.ADBException("extraElement cannot be null!!"); } return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray()); } /** * Factory class that keeps the parse method */ public static class Factory { /** * static method to create the object Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable If this object is not an element, it is a complex type and the reader is at the event just after the outer start element Postcondition: If this object is an element, the reader is positioned at its end element If this object is a complex type, the reader is positioned at the end element of its outer element */ @SuppressWarnings({ "unused", "rawtypes" }) public static MdfeDadosMsg parse(final javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception { final MdfeDadosMsg object = new MdfeDadosMsg(); final int event; final java.lang.String nillableValue = null; final java.lang.String prefix = ""; final java.lang.String namespaceuri = ""; try { while (!reader.isStartElement() && !reader.isEndElement()) { reader.next(); } if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance", "type") != null) { final java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance", "type"); if (fullTypeName != null) { java.lang.String nsPrefix = null; if (fullTypeName.contains(":")) { nsPrefix = fullTypeName.substring(0, fullTypeName.indexOf(":")); } nsPrefix = nsPrefix == null ? "" : nsPrefix; final java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":") + 1); if (!"mdfeDadosMsg".equals(type)) { // find namespace for the prefix final java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix); return (MdfeDadosMsg) ExtensionMapper.getTypeObject(nsUri, type, reader); } } } // Note all attributes that were handled. Used to differ normal attributes // from anyAttributes. final java.util.Vector handledAttributes = new java.util.Vector(); reader.next(); while (!reader.isStartElement() && !reader.isEndElement()) { reader.next(); } if (reader.isStartElement()) { // use the QName from the parser as the name for the builder final javax.xml.namespace.QName startQname1 = reader.getName(); // We need to wrap the reader so that it produces a fake START_DOCUMENT event // this is needed by the builder classes final org.apache.axis2.databinding.utils.NamedStaxOMBuilder builder1 = new org.apache.axis2.databinding.utils.NamedStaxOMBuilder(new org.apache.axis2.util.StreamWrapper(reader), startQname1); object.setExtraElement(builder1.getOMElement()); reader.next(); } // End of if for expected property start element else { // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getName()); } while (!reader.isStartElement() && !reader.isEndElement()) { reader.next(); } if (reader.isStartElement()) { // A start element we are not expecting indicates a trailing invalid property throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getName()); } } catch (final javax.xml.stream.XMLStreamException e) { throw new java.lang.Exception(e); } return object; } }// end of factory class } // private org.apache.axiom.om.OMElement toOM(com.fincatto.documentofiscal.mdfe3.webservices.recepcao.MDFeRecepcaoStub.MdfeDadosMsg param, boolean optimizeContent) throws org.apache.axis2.AxisFault { // try { // return param.getOMElement(com.fincatto.documentofiscal.mdfe3.webservices.recepcao.MDFeRecepcaoStub.MdfeDadosMsg.MY_QNAME, org.apache.axiom.om.OMAbstractFactory.getOMFactory()); // } catch (org.apache.axis2.databinding.ADBException e) { // throw org.apache.axis2.AxisFault.makeFault(e); // } // } // // private org.apache.axiom.om.OMElement toOM(com.fincatto.documentofiscal.mdfe3.webservices.recepcao.MDFeRecepcaoStub.MdfeRecepcaoLoteResult param, boolean optimizeContent) throws org.apache.axis2.AxisFault { // try { // return param.getOMElement(com.fincatto.documentofiscal.mdfe3.webservices.recepcao.MDFeRecepcaoStub.MdfeRecepcaoLoteResult.MY_QNAME, org.apache.axiom.om.OMAbstractFactory.getOMFactory()); // } catch (org.apache.axis2.databinding.ADBException e) { // throw org.apache.axis2.AxisFault.makeFault(e); // } // } private org.apache.axiom.om.OMElement toOM(final com.fincatto.documentofiscal.mdfe3.webservices.recepcao.MDFeRecepcaoStub.MdfeCabecMsgE param, final boolean optimizeContent) { // try { return param.getOMElement(com.fincatto.documentofiscal.mdfe3.webservices.recepcao.MDFeRecepcaoStub.MdfeCabecMsgE.MY_QNAME, org.apache.axiom.om.OMAbstractFactory.getOMFactory()); // } catch (org.apache.axis2.databinding.ADBException e) { // throw org.apache.axis2.AxisFault.makeFault(e); // } } private org.apache.axiom.soap.SOAPEnvelope toEnvelope(final org.apache.axiom.soap.SOAPFactory factory, final com.fincatto.documentofiscal.mdfe3.webservices.recepcao.MDFeRecepcaoStub.MdfeDadosMsg param, final boolean optimizeContent, final javax.xml.namespace.QName methodQName) { // try { final org.apache.axiom.soap.SOAPEnvelope emptyEnvelope = factory.getDefaultEnvelope(); emptyEnvelope.getBody().addChild(param.getOMElement(com.fincatto.documentofiscal.mdfe3.webservices.recepcao.MDFeRecepcaoStub.MdfeDadosMsg.MY_QNAME, factory)); return emptyEnvelope; // } catch (org.apache.axis2.databinding.ADBException e) { // throw org.apache.axis2.AxisFault.makeFault(e); // } } /* methods to provide back word compatibility */ /** * get the default envelope */ @SuppressWarnings("unused") private org.apache.axiom.soap.SOAPEnvelope toEnvelope(final org.apache.axiom.soap.SOAPFactory factory) { return factory.getDefaultEnvelope(); } @SuppressWarnings("rawtypes") private java.lang.Object fromOM(final org.apache.axiom.om.OMElement param, final java.lang.Class type, final java.util.Map extraNamespaces) throws org.apache.axis2.AxisFault { try { if (com.fincatto.documentofiscal.mdfe3.webservices.recepcao.MDFeRecepcaoStub.MdfeDadosMsg.class.equals(type)) { return com.fincatto.documentofiscal.mdfe3.webservices.recepcao.MDFeRecepcaoStub.MdfeDadosMsg.Factory.parse(param.getXMLStreamReaderWithoutCaching()); } if (com.fincatto.documentofiscal.mdfe3.webservices.recepcao.MDFeRecepcaoStub.MdfeRecepcaoLoteResult.class.equals(type)) { return com.fincatto.documentofiscal.mdfe3.webservices.recepcao.MDFeRecepcaoStub.MdfeRecepcaoLoteResult.Factory.parse(param.getXMLStreamReaderWithoutCaching()); } if (com.fincatto.documentofiscal.mdfe3.webservices.recepcao.MDFeRecepcaoStub.MdfeCabecMsgE.class.equals(type)) { return com.fincatto.documentofiscal.mdfe3.webservices.recepcao.MDFeRecepcaoStub.MdfeCabecMsgE.Factory.parse(param.getXMLStreamReaderWithoutCaching()); } if (com.fincatto.documentofiscal.mdfe3.webservices.recepcao.MDFeRecepcaoStub.MdfeCabecMsgE.class.equals(type)) { return com.fincatto.documentofiscal.mdfe3.webservices.recepcao.MDFeRecepcaoStub.MdfeCabecMsgE.Factory.parse(param.getXMLStreamReaderWithoutCaching()); } } catch (final java.lang.Exception e) { throw org.apache.axis2.AxisFault.makeFault(e); } return null; } }
apache-2.0
atulsm/Test_Projects
src/TestIfElseIf.java
272
public class TestIfElseIf { /** * @param args */ public static void main(String[] args) { boolean a = true; boolean b = true; if(a){ System.out.println("a"); }else if(b){ System.out.println("b"); }else{ System.out.println("c"); } } }
apache-2.0
scholzj/barnabas
systemtest/src/main/java/io/strimzi/systemtest/resources/kubernetes/ClusterRoleBindingResource.java
2773
/* * Copyright Strimzi authors. * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). */ package io.strimzi.systemtest.resources.kubernetes; import io.fabric8.kubernetes.api.model.rbac.ClusterRoleBinding; import io.fabric8.kubernetes.api.model.rbac.ClusterRoleBindingBuilder; import io.strimzi.systemtest.Constants; import io.strimzi.systemtest.resources.ResourceManager; import io.strimzi.systemtest.resources.ResourceType; import io.strimzi.test.TestUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.junit.jupiter.api.extension.ExtensionContext; public class ClusterRoleBindingResource implements ResourceType<ClusterRoleBinding> { private static final Logger LOGGER = LogManager.getLogger(ClusterRoleBindingResource.class); @Override public String getKind() { return Constants.CLUSTER_ROLE_BINDING; } @Override public ClusterRoleBinding get(String namespace, String name) { return ResourceManager.kubeClient().namespace(namespace).getClusterRoleBinding(name); } @Override public void create(ClusterRoleBinding resource) { ResourceManager.kubeClient().namespace(resource.getMetadata().getNamespace()).createOrReplaceClusterRoleBinding(resource); } @Override public void delete(ClusterRoleBinding resource) { ResourceManager.kubeClient().namespace(resource.getMetadata().getNamespace()).deleteClusterRoleBinding(resource); } @Override public boolean waitForReadiness(ClusterRoleBinding resource) { return resource != null; } public static ClusterRoleBinding clusterRoleBinding(ExtensionContext extensionContext, String yamlPath, String namespace) { LOGGER.info("Creating ClusterRoleBinding in test case {} from {} in namespace {}", extensionContext.getDisplayName(), yamlPath, namespace); ClusterRoleBinding clusterRoleBinding = getClusterRoleBindingFromYaml(yamlPath); clusterRoleBinding = new ClusterRoleBindingBuilder(clusterRoleBinding) .editFirstSubject() .withNamespace(namespace) .endSubject().build(); ResourceManager.getInstance().createResource(extensionContext, clusterRoleBinding); return clusterRoleBinding; } public static ClusterRoleBinding clusterRoleBinding(ExtensionContext extensionContext, ClusterRoleBinding clusterRoleBinding) { ResourceManager.getInstance().createResource(extensionContext, clusterRoleBinding); return clusterRoleBinding; } private static ClusterRoleBinding getClusterRoleBindingFromYaml(String yamlPath) { return TestUtils.configFromYaml(yamlPath, ClusterRoleBinding.class); } }
apache-2.0
BlueSenseNetworks/Android
ProximitySenseSDK/lib/src/main/java/com/bluesensenetworks/proximitysense/model/RangingListener.java
211
package com.bluesensenetworks.proximitysense.model; import com.bluesensenetworks.proximitysense.model.actions.ActionBase; public interface RangingListener { void didReceiveAction(ActionBase action); }
apache-2.0
lpatino10/android-sdk
library/src/main/java/com/ibm/watson/developer_cloud/android/library/audio/StreamPlayer.java
3143
/* * Copyright 2017 IBM Corp. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package com.ibm.watson.developer_cloud.android.library.audio; import android.media.AudioFormat; import android.media.AudioManager; import android.media.AudioTrack; import android.util.Log; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; /** * Exposes the ability to play raw audio data from an InputStream. */ public final class StreamPlayer { private final String TAG = "StreamPlayer"; private AudioTrack audioTrack; private int sampleRate; private static byte[] convertStreamToByteArray(InputStream is) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); byte[] buff = new byte[10240]; int i; while ((i = is.read(buff, 0, buff.length)) > 0) { baos.write(buff, 0, i); } return baos.toByteArray(); } private static int readInt(final byte[] data, final int offset) { return (data[offset] & 0xff) | ((data[offset + 1] & 0xff) << 8) | ((data[offset + 2] & 0xff) << 16) | (data[offset + 3] << 24); // no 0xff on the last one to keep the sign } /** * Play the given InputStream. * * @param stream the stream */ public void playStream(InputStream stream) { try { byte[] data = convertStreamToByteArray(stream); if (data.length > 28) { sampleRate = readInt(data, 24); } int headSize = 44, metaDataSize = 48; int destPos = headSize + metaDataSize; int rawLength = data.length - destPos; byte[] d = new byte[rawLength]; System.arraycopy(data, destPos, d, 0, rawLength); initPlayer(); audioTrack.write(d, 0, d.length); stream.close(); if (audioTrack != null && audioTrack.getState() != AudioTrack.STATE_UNINITIALIZED) { audioTrack.release(); } } catch (IOException e2) { Log.e(TAG, e2.getMessage()); } } /** * Interrupt the audioStream. */ public void interrupt() { if (audioTrack != null) { audioTrack.flush(); audioTrack.stop(); audioTrack.release(); } } /** * Initialize AudioTrack by getting buffersize */ private void initPlayer() { synchronized (this) { int bs = AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT); audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, bs, AudioTrack.MODE_STREAM); if (audioTrack != null) audioTrack.play(); } } }
apache-2.0
Ile2/struts2-showcase-demo
src/plugins/embeddedjsp/src/main/java/org/apache/struts2/el/lang/FunctionMapperImpl.java
6042
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.struts2.el.lang; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.lang.reflect.Method; import java.util.HashMap; import java.util.Map; import javax.el.FunctionMapper; import org.apache.struts2.el.util.ReflectionUtil; /** * @author Jacob Hookom [jacob@hookom.net] * @version $Change: 181177 $$DateTime: 2001/06/26 08:45:09 $$Author: rjung $ */ public class FunctionMapperImpl extends FunctionMapper implements Externalizable { private static final long serialVersionUID = 1L; protected Map<String, Function> functions = null; /* * (non-Javadoc) * * @see javax.el.FunctionMapper#resolveFunction(java.lang.String, * java.lang.String) */ public Method resolveFunction(String prefix, String localName) { if (this.functions != null) { Function f = this.functions.get(prefix + ":" + localName); return f.getMethod(); } return null; } public void addFunction(String prefix, String localName, Method m) { if (this.functions == null) { this.functions = new HashMap<String, Function>(); } Function f = new Function(prefix, localName, m); synchronized (this) { this.functions.put(prefix+":"+localName, f); } } /* * (non-Javadoc) * * @see java.io.Externalizable#writeExternal(java.io.ObjectOutput) */ public void writeExternal(ObjectOutput out) throws IOException { out.writeObject(this.functions); } /* * (non-Javadoc) * * @see java.io.Externalizable#readExternal(java.io.ObjectInput) */ public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { this.functions = (Map<String, Function>) in.readObject(); } public static class Function implements Externalizable { protected transient Method m; protected String owner; protected String name; protected String[] types; protected String prefix; protected String localName; public Function(String prefix, String localName, Method m) { if (localName == null) { throw new NullPointerException("LocalName cannot be null"); } if (m == null) { throw new NullPointerException("Method cannot be null"); } this.prefix = prefix; this.localName = localName; this.m = m; } public Function() { // for serialization } /* * (non-Javadoc) * * @see java.io.Externalizable#writeExternal(java.io.ObjectOutput) */ public void writeExternal(ObjectOutput out) throws IOException { out.writeUTF((this.prefix != null) ? this.prefix : ""); out.writeUTF(this.localName); // make sure m isn't null getMethod(); out.writeUTF((this.owner != null) ? this.owner : this.m.getDeclaringClass().getName()); out.writeUTF((this.name != null) ? this.name : this.m.getName()); out.writeObject((this.types != null) ? this.types : ReflectionUtil.toTypeNameArray(this.m.getParameterTypes())); } /* * (non-Javadoc) * * @see java.io.Externalizable#readExternal(java.io.ObjectInput) */ public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { this.prefix = in.readUTF(); if ("".equals(this.prefix)) this.prefix = null; this.localName = in.readUTF(); this.owner = in.readUTF(); this.name = in.readUTF(); this.types = (String[]) in.readObject(); } public Method getMethod() { if (this.m == null) { try { Class t = ReflectionUtil.forName(this.owner); Class[] p = ReflectionUtil.toTypeArray(this.types); this.m = t.getMethod(this.name, p); } catch (Exception e) { e.printStackTrace(); } } return this.m; } public boolean matches(String prefix, String localName) { if (this.prefix != null) { if (prefix == null) return false; if (!this.prefix.equals(prefix)) return false; } return this.localName.equals(localName); } /* (non-Javadoc) * @see java.lang.Object#equals(java.lang.Object) */ public boolean equals(Object obj) { if (obj instanceof Function) { return this.hashCode() == obj.hashCode(); } return false; } /* (non-Javadoc) * @see java.lang.Object#hashCode() */ public int hashCode() { return (this.prefix + this.localName).hashCode(); } } }
apache-2.0
gerritjvv/cryptoplayground
hmac/java/hmac/src/test/java/org/funsec/hmac/TOTPTest.java
4078
package org.funsec.hmac; import org.junit.Test; import java.security.InvalidKeyException; import java.security.NoSuchAlgorithmException; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.time.Instant; import java.util.Arrays; import java.util.TimeZone; import static org.junit.Assert.assertEquals; public class TOTPTest { private static final byte[] SEED = "12345678901234567890".getBytes(); private static final byte[] SEED32 = "12345678901234567890123456789012".getBytes(); private static final byte[] SEED64 = "1234567890123456789012345678901234567890123456789012345678901234".getBytes(); private static final Object[][] TOTP_TEST_VALUES = new Object[][] { /* +-------------+--------------+------------------+----------+--------+ | Time (sec) | UTC Time | Value of T (hex) | TOTP | Mode | +-------------+--------------+------------------+----------+--------+ */ {59, "1970-01-01 00:00:59", "0000000000000001", 94287082, HOTP.SHA_1, SEED}, {59, "1970-01-01 00:00:59", "0000000000000001", 46119246, HOTP.SHA_256, SEED32}, {59, "1970-01-01 00:00:59", "0000000000000001", 90693936, HOTP.SHA_512, SEED64}, {1111111109, "2005-03-18 01:58:29", "00000000023523EC", 7081804, HOTP.SHA_1, SEED}, {1111111109, "2005-03-18 01:58:29", "00000000023523EC", 68084774, HOTP.SHA_256, SEED32}, {1111111109, "2005-03-18 01:58:29", "00000000023523EC", 25091201, HOTP.SHA_512, SEED64}, {1111111111, "2005-03-18 01:58:31", "00000000023523ED", 14050471, HOTP.SHA_1, SEED}, {1111111111, "2005-03-18 01:58:31", "00000000023523ED", 67062674, HOTP.SHA_256, SEED32}, {1111111111, "2005-03-18 01:58:31", "00000000023523ED", 99943326, HOTP.SHA_512, SEED64}, {1234567890, "2009-02-13 23:31:30", "000000000273EF07", 89005924, HOTP.SHA_1, SEED}, {1234567890, "2009-02-13 23:31:30", "000000000273EF07", 91819424, HOTP.SHA_256, SEED32}, {1234567890, "2009-02-13 23:31:30", "000000000273EF07", 93441116, HOTP.SHA_512, SEED64}, {2000000000, "2033-05-18 03:33:20", "0000000003F940AA", 69279037, HOTP.SHA_1, SEED}, {2000000000, "2033-05-18 03:33:20", "0000000003F940AA", 90698825, HOTP.SHA_256, SEED32}, {2000000000, "2033-05-18 03:33:20", "0000000003F940AA", 38618901, HOTP.SHA_512, SEED64}, {20000000000L, "2603-10-11 11:33:20", "0000000027BC86AA", 65353130, HOTP.SHA_1, SEED}, {20000000000L, "2603-10-11 11:33:20", "0000000027BC86AA", 77737706, HOTP.SHA_256, SEED32}, {20000000000L, "2603-10-11 11:33:20", "0000000027BC86AA", 47863826, HOTP.SHA_512, SEED64}, }; @Test public void testTOTPRFCValues() throws InvalidKeyException, NoSuchAlgorithmException { for (Object[] vals : TOTP_TEST_VALUES) { assertTOTP(((Number) vals[0]).longValue(), (int) vals[3], (String) vals[4], (byte[])vals[5]); } } public void assertTOTP(long seconds, int totp, String shaAlgo, byte[] seed) throws NoSuchAlgorithmException, InvalidKeyException { HOTP hotp = HOTP.newTOTPInstance(shaAlgo, 8, () -> seconds, 30); int given = hotp.calcOtp(seed); System.out.println("Matching: " + seconds + " " + totp + " == " + given + " mode " + shaAlgo); assertEquals(totp, given); } private static long getSecondsFrom(String utcString) { DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); try { return dateFormat.parse(utcString).toInstant().getEpochSecond(); } catch (ParseException e) { throw new RuntimeException(e); } } }
apache-2.0
pawanpal01/biodata
biodata-models/src/main/java/org/opencb/biodata/models/variant/avro/ProteinFeature.java
12009
/** * Autogenerated by Avro * * DO NOT EDIT DIRECTLY */ package org.opencb.biodata.models.variant.avro; @SuppressWarnings("all") @org.apache.avro.specific.AvroGenerated public class ProteinFeature extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"ProteinFeature\",\"namespace\":\"org.opencb.biodata.models.variant.avro\",\"fields\":[{\"name\":\"id\",\"type\":[\"null\",{\"type\":\"string\",\"avro.java.string\":\"String\"}]},{\"name\":\"start\",\"type\":\"int\"},{\"name\":\"end\",\"type\":\"int\"},{\"name\":\"type\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"description\",\"type\":[\"null\",{\"type\":\"string\",\"avro.java.string\":\"String\"}]}]}"); public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } private java.lang.String id; private int start; private int end; private java.lang.String type; private java.lang.String description; /** * Default constructor. Note that this does not initialize fields * to their default values from the schema. If that is desired then * one should use <code>newBuilder()</code>. */ public ProteinFeature() {} /** * All-args constructor. */ public ProteinFeature(java.lang.String id, java.lang.Integer start, java.lang.Integer end, java.lang.String type, java.lang.String description) { this.id = id; this.start = start; this.end = end; this.type = type; this.description = description; } public org.apache.avro.Schema getSchema() { return SCHEMA$; } // Used by DatumWriter. Applications should not call. public java.lang.Object get(int field$) { switch (field$) { case 0: return id; case 1: return start; case 2: return end; case 3: return type; case 4: return description; default: throw new org.apache.avro.AvroRuntimeException("Bad index"); } } // Used by DatumReader. Applications should not call. @SuppressWarnings(value="unchecked") public void put(int field$, java.lang.Object value$) { switch (field$) { case 0: id = (java.lang.String)value$; break; case 1: start = (java.lang.Integer)value$; break; case 2: end = (java.lang.Integer)value$; break; case 3: type = (java.lang.String)value$; break; case 4: description = (java.lang.String)value$; break; default: throw new org.apache.avro.AvroRuntimeException("Bad index"); } } /** * Gets the value of the 'id' field. */ public java.lang.String getId() { return id; } /** * Sets the value of the 'id' field. * @param value the value to set. */ public void setId(java.lang.String value) { this.id = value; } /** * Gets the value of the 'start' field. */ public java.lang.Integer getStart() { return start; } /** * Sets the value of the 'start' field. * @param value the value to set. */ public void setStart(java.lang.Integer value) { this.start = value; } /** * Gets the value of the 'end' field. */ public java.lang.Integer getEnd() { return end; } /** * Sets the value of the 'end' field. * @param value the value to set. */ public void setEnd(java.lang.Integer value) { this.end = value; } /** * Gets the value of the 'type' field. */ public java.lang.String getType() { return type; } /** * Sets the value of the 'type' field. * @param value the value to set. */ public void setType(java.lang.String value) { this.type = value; } /** * Gets the value of the 'description' field. */ public java.lang.String getDescription() { return description; } /** * Sets the value of the 'description' field. * @param value the value to set. */ public void setDescription(java.lang.String value) { this.description = value; } /** Creates a new ProteinFeature RecordBuilder */ public static org.opencb.biodata.models.variant.avro.ProteinFeature.Builder newBuilder() { return new org.opencb.biodata.models.variant.avro.ProteinFeature.Builder(); } /** Creates a new ProteinFeature RecordBuilder by copying an existing Builder */ public static org.opencb.biodata.models.variant.avro.ProteinFeature.Builder newBuilder(org.opencb.biodata.models.variant.avro.ProteinFeature.Builder other) { return new org.opencb.biodata.models.variant.avro.ProteinFeature.Builder(other); } /** Creates a new ProteinFeature RecordBuilder by copying an existing ProteinFeature instance */ public static org.opencb.biodata.models.variant.avro.ProteinFeature.Builder newBuilder(org.opencb.biodata.models.variant.avro.ProteinFeature other) { return new org.opencb.biodata.models.variant.avro.ProteinFeature.Builder(other); } /** * RecordBuilder for ProteinFeature instances. */ public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase<ProteinFeature> implements org.apache.avro.data.RecordBuilder<ProteinFeature> { private java.lang.String id; private int start; private int end; private java.lang.String type; private java.lang.String description; /** Creates a new Builder */ private Builder() { super(org.opencb.biodata.models.variant.avro.ProteinFeature.SCHEMA$); } /** Creates a Builder by copying an existing Builder */ private Builder(org.opencb.biodata.models.variant.avro.ProteinFeature.Builder other) { super(other); if (isValidValue(fields()[0], other.id)) { this.id = data().deepCopy(fields()[0].schema(), other.id); fieldSetFlags()[0] = true; } if (isValidValue(fields()[1], other.start)) { this.start = data().deepCopy(fields()[1].schema(), other.start); fieldSetFlags()[1] = true; } if (isValidValue(fields()[2], other.end)) { this.end = data().deepCopy(fields()[2].schema(), other.end); fieldSetFlags()[2] = true; } if (isValidValue(fields()[3], other.type)) { this.type = data().deepCopy(fields()[3].schema(), other.type); fieldSetFlags()[3] = true; } if (isValidValue(fields()[4], other.description)) { this.description = data().deepCopy(fields()[4].schema(), other.description); fieldSetFlags()[4] = true; } } /** Creates a Builder by copying an existing ProteinFeature instance */ private Builder(org.opencb.biodata.models.variant.avro.ProteinFeature other) { super(org.opencb.biodata.models.variant.avro.ProteinFeature.SCHEMA$); if (isValidValue(fields()[0], other.id)) { this.id = data().deepCopy(fields()[0].schema(), other.id); fieldSetFlags()[0] = true; } if (isValidValue(fields()[1], other.start)) { this.start = data().deepCopy(fields()[1].schema(), other.start); fieldSetFlags()[1] = true; } if (isValidValue(fields()[2], other.end)) { this.end = data().deepCopy(fields()[2].schema(), other.end); fieldSetFlags()[2] = true; } if (isValidValue(fields()[3], other.type)) { this.type = data().deepCopy(fields()[3].schema(), other.type); fieldSetFlags()[3] = true; } if (isValidValue(fields()[4], other.description)) { this.description = data().deepCopy(fields()[4].schema(), other.description); fieldSetFlags()[4] = true; } } /** Gets the value of the 'id' field */ public java.lang.String getId() { return id; } /** Sets the value of the 'id' field */ public org.opencb.biodata.models.variant.avro.ProteinFeature.Builder setId(java.lang.String value) { validate(fields()[0], value); this.id = value; fieldSetFlags()[0] = true; return this; } /** Checks whether the 'id' field has been set */ public boolean hasId() { return fieldSetFlags()[0]; } /** Clears the value of the 'id' field */ public org.opencb.biodata.models.variant.avro.ProteinFeature.Builder clearId() { id = null; fieldSetFlags()[0] = false; return this; } /** Gets the value of the 'start' field */ public java.lang.Integer getStart() { return start; } /** Sets the value of the 'start' field */ public org.opencb.biodata.models.variant.avro.ProteinFeature.Builder setStart(int value) { validate(fields()[1], value); this.start = value; fieldSetFlags()[1] = true; return this; } /** Checks whether the 'start' field has been set */ public boolean hasStart() { return fieldSetFlags()[1]; } /** Clears the value of the 'start' field */ public org.opencb.biodata.models.variant.avro.ProteinFeature.Builder clearStart() { fieldSetFlags()[1] = false; return this; } /** Gets the value of the 'end' field */ public java.lang.Integer getEnd() { return end; } /** Sets the value of the 'end' field */ public org.opencb.biodata.models.variant.avro.ProteinFeature.Builder setEnd(int value) { validate(fields()[2], value); this.end = value; fieldSetFlags()[2] = true; return this; } /** Checks whether the 'end' field has been set */ public boolean hasEnd() { return fieldSetFlags()[2]; } /** Clears the value of the 'end' field */ public org.opencb.biodata.models.variant.avro.ProteinFeature.Builder clearEnd() { fieldSetFlags()[2] = false; return this; } /** Gets the value of the 'type' field */ public java.lang.String getType() { return type; } /** Sets the value of the 'type' field */ public org.opencb.biodata.models.variant.avro.ProteinFeature.Builder setType(java.lang.String value) { validate(fields()[3], value); this.type = value; fieldSetFlags()[3] = true; return this; } /** Checks whether the 'type' field has been set */ public boolean hasType() { return fieldSetFlags()[3]; } /** Clears the value of the 'type' field */ public org.opencb.biodata.models.variant.avro.ProteinFeature.Builder clearType() { type = null; fieldSetFlags()[3] = false; return this; } /** Gets the value of the 'description' field */ public java.lang.String getDescription() { return description; } /** Sets the value of the 'description' field */ public org.opencb.biodata.models.variant.avro.ProteinFeature.Builder setDescription(java.lang.String value) { validate(fields()[4], value); this.description = value; fieldSetFlags()[4] = true; return this; } /** Checks whether the 'description' field has been set */ public boolean hasDescription() { return fieldSetFlags()[4]; } /** Clears the value of the 'description' field */ public org.opencb.biodata.models.variant.avro.ProteinFeature.Builder clearDescription() { description = null; fieldSetFlags()[4] = false; return this; } @Override public ProteinFeature build() { try { ProteinFeature record = new ProteinFeature(); record.id = fieldSetFlags()[0] ? this.id : (java.lang.String) defaultValue(fields()[0]); record.start = fieldSetFlags()[1] ? this.start : (java.lang.Integer) defaultValue(fields()[1]); record.end = fieldSetFlags()[2] ? this.end : (java.lang.Integer) defaultValue(fields()[2]); record.type = fieldSetFlags()[3] ? this.type : (java.lang.String) defaultValue(fields()[3]); record.description = fieldSetFlags()[4] ? this.description : (java.lang.String) defaultValue(fields()[4]); return record; } catch (Exception e) { throw new org.apache.avro.AvroRuntimeException(e); } } } }
apache-2.0
maichler/sbt-jupiter-interface
src/library/src/test/java/net/aichler/jupiter/internal/filter/GlobFilterTest.java
2253
/* * jupiter-interface * * Copyright (c) 2017, Michael Aichler. * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.aichler.jupiter.internal.filter; import net.aichler.jupiter.internal.event.Dispatcher; import org.junit.Ignore; import org.junit.Test; import org.junit.platform.engine.UniqueId; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; import java.util.HashSet; import static java.util.Arrays.asList; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; /** * @author Michael Aichler */ @RunWith(MockitoJUnitRunner.class) public class GlobFilterTest { @Mock Dispatcher dispatcher; @Test public void shouldMatchWildcardPattern() { GlobFilter filter = newGlobFilter("basic.*"); String testName = "basic.FooTest"; assertThat(testName, filter.findMatchingPattern(testName).isPresent(), is(true)); testName = "basic.FooTest#someTest()"; assertThat(testName, filter.findMatchingPattern(testName).isPresent(), is(true)); testName = "failure.AssumptionsTest"; assertThat(testName, filter.findMatchingPattern(testName).isPresent(), is(false)); } @Test @Ignore public void shouldSkipEngineWhenConvertingUniqueIds() { GlobFilter filter = newGlobFilter(""); filter.toTestName(UniqueId.parse("")); } /** * Creates a new glob filter from the specified patterns. * * @param patterns The test filter patterns. * @return A new glob filter. */ GlobFilter newGlobFilter(String... patterns) { return new GlobFilter(new HashSet<>(asList(patterns)), dispatcher); } }
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-iot/src/main/java/com/amazonaws/services/iot/model/ListThingGroupsRequest.java
10222
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.iot.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListThingGroupsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The token to retrieve the next set of results. * </p> */ private String nextToken; /** * <p> * The maximum number of results to return at one time. * </p> */ private Integer maxResults; /** * <p> * A filter that limits the results to those with the specified parent group. * </p> */ private String parentGroup; /** * <p> * A filter that limits the results to those with the specified name prefix. * </p> */ private String namePrefixFilter; /** * <p> * If true, return child groups as well. * </p> */ private Boolean recursive; /** * <p> * The token to retrieve the next set of results. * </p> * * @param nextToken * The token to retrieve the next set of results. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * The token to retrieve the next set of results. * </p> * * @return The token to retrieve the next set of results. */ public String getNextToken() { return this.nextToken; } /** * <p> * The token to retrieve the next set of results. * </p> * * @param nextToken * The token to retrieve the next set of results. * @return Returns a reference to this object so that method calls can be chained together. */ public ListThingGroupsRequest withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * <p> * The maximum number of results to return at one time. * </p> * * @param maxResults * The maximum number of results to return at one time. */ public void setMaxResults(Integer maxResults) { this.maxResults = maxResults; } /** * <p> * The maximum number of results to return at one time. * </p> * * @return The maximum number of results to return at one time. */ public Integer getMaxResults() { return this.maxResults; } /** * <p> * The maximum number of results to return at one time. * </p> * * @param maxResults * The maximum number of results to return at one time. * @return Returns a reference to this object so that method calls can be chained together. */ public ListThingGroupsRequest withMaxResults(Integer maxResults) { setMaxResults(maxResults); return this; } /** * <p> * A filter that limits the results to those with the specified parent group. * </p> * * @param parentGroup * A filter that limits the results to those with the specified parent group. */ public void setParentGroup(String parentGroup) { this.parentGroup = parentGroup; } /** * <p> * A filter that limits the results to those with the specified parent group. * </p> * * @return A filter that limits the results to those with the specified parent group. */ public String getParentGroup() { return this.parentGroup; } /** * <p> * A filter that limits the results to those with the specified parent group. * </p> * * @param parentGroup * A filter that limits the results to those with the specified parent group. * @return Returns a reference to this object so that method calls can be chained together. */ public ListThingGroupsRequest withParentGroup(String parentGroup) { setParentGroup(parentGroup); return this; } /** * <p> * A filter that limits the results to those with the specified name prefix. * </p> * * @param namePrefixFilter * A filter that limits the results to those with the specified name prefix. */ public void setNamePrefixFilter(String namePrefixFilter) { this.namePrefixFilter = namePrefixFilter; } /** * <p> * A filter that limits the results to those with the specified name prefix. * </p> * * @return A filter that limits the results to those with the specified name prefix. */ public String getNamePrefixFilter() { return this.namePrefixFilter; } /** * <p> * A filter that limits the results to those with the specified name prefix. * </p> * * @param namePrefixFilter * A filter that limits the results to those with the specified name prefix. * @return Returns a reference to this object so that method calls can be chained together. */ public ListThingGroupsRequest withNamePrefixFilter(String namePrefixFilter) { setNamePrefixFilter(namePrefixFilter); return this; } /** * <p> * If true, return child groups as well. * </p> * * @param recursive * If true, return child groups as well. */ public void setRecursive(Boolean recursive) { this.recursive = recursive; } /** * <p> * If true, return child groups as well. * </p> * * @return If true, return child groups as well. */ public Boolean getRecursive() { return this.recursive; } /** * <p> * If true, return child groups as well. * </p> * * @param recursive * If true, return child groups as well. * @return Returns a reference to this object so that method calls can be chained together. */ public ListThingGroupsRequest withRecursive(Boolean recursive) { setRecursive(recursive); return this; } /** * <p> * If true, return child groups as well. * </p> * * @return If true, return child groups as well. */ public Boolean isRecursive() { return this.recursive; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()).append(","); if (getMaxResults() != null) sb.append("MaxResults: ").append(getMaxResults()).append(","); if (getParentGroup() != null) sb.append("ParentGroup: ").append(getParentGroup()).append(","); if (getNamePrefixFilter() != null) sb.append("NamePrefixFilter: ").append(getNamePrefixFilter()).append(","); if (getRecursive() != null) sb.append("Recursive: ").append(getRecursive()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListThingGroupsRequest == false) return false; ListThingGroupsRequest other = (ListThingGroupsRequest) obj; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; if (other.getMaxResults() == null ^ this.getMaxResults() == null) return false; if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false) return false; if (other.getParentGroup() == null ^ this.getParentGroup() == null) return false; if (other.getParentGroup() != null && other.getParentGroup().equals(this.getParentGroup()) == false) return false; if (other.getNamePrefixFilter() == null ^ this.getNamePrefixFilter() == null) return false; if (other.getNamePrefixFilter() != null && other.getNamePrefixFilter().equals(this.getNamePrefixFilter()) == false) return false; if (other.getRecursive() == null ^ this.getRecursive() == null) return false; if (other.getRecursive() != null && other.getRecursive().equals(this.getRecursive()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode()); hashCode = prime * hashCode + ((getParentGroup() == null) ? 0 : getParentGroup().hashCode()); hashCode = prime * hashCode + ((getNamePrefixFilter() == null) ? 0 : getNamePrefixFilter().hashCode()); hashCode = prime * hashCode + ((getRecursive() == null) ? 0 : getRecursive().hashCode()); return hashCode; } @Override public ListThingGroupsRequest clone() { return (ListThingGroupsRequest) super.clone(); } }
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-codedeploy/src/main/java/com/amazonaws/services/codedeploy/model/transform/CreateDeploymentConfigRequestMarshaller.java
3230
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.codedeploy.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.services.codedeploy.model.*; import com.amazonaws.protocol.*; import com.amazonaws.annotation.SdkInternalApi; /** * CreateDeploymentConfigRequestMarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class CreateDeploymentConfigRequestMarshaller { private static final MarshallingInfo<String> DEPLOYMENTCONFIGNAME_BINDING = MarshallingInfo.builder(MarshallingType.STRING) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("deploymentConfigName").build(); private static final MarshallingInfo<StructuredPojo> MINIMUMHEALTHYHOSTS_BINDING = MarshallingInfo.builder(MarshallingType.STRUCTURED) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("minimumHealthyHosts").build(); private static final MarshallingInfo<StructuredPojo> TRAFFICROUTINGCONFIG_BINDING = MarshallingInfo.builder(MarshallingType.STRUCTURED) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("trafficRoutingConfig").build(); private static final MarshallingInfo<String> COMPUTEPLATFORM_BINDING = MarshallingInfo.builder(MarshallingType.STRING) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("computePlatform").build(); private static final CreateDeploymentConfigRequestMarshaller instance = new CreateDeploymentConfigRequestMarshaller(); public static CreateDeploymentConfigRequestMarshaller getInstance() { return instance; } /** * Marshall the given parameter object. */ public void marshall(CreateDeploymentConfigRequest createDeploymentConfigRequest, ProtocolMarshaller protocolMarshaller) { if (createDeploymentConfigRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(createDeploymentConfigRequest.getDeploymentConfigName(), DEPLOYMENTCONFIGNAME_BINDING); protocolMarshaller.marshall(createDeploymentConfigRequest.getMinimumHealthyHosts(), MINIMUMHEALTHYHOSTS_BINDING); protocolMarshaller.marshall(createDeploymentConfigRequest.getTrafficRoutingConfig(), TRAFFICROUTINGCONFIG_BINDING); protocolMarshaller.marshall(createDeploymentConfigRequest.getComputePlatform(), COMPUTEPLATFORM_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
apache-2.0
Cloudyle/hapi-fhir
hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoValueSetDstu3.java
15946
package ca.uhn.fhir.jpa.dao.dstu3; /* * #%L * HAPI FHIR JPA Server * %% * Copyright (C) 2014 - 2016 University Health Network * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.Set; import javax.annotation.PostConstruct; import org.apache.commons.codec.binary.StringUtils; import org.hl7.fhir.dstu3.hapi.validation.DefaultProfileValidationSupport; import org.hl7.fhir.dstu3.hapi.validation.HapiWorkerContext; import org.hl7.fhir.dstu3.hapi.validation.ValidationSupportChain; import org.hl7.fhir.dstu3.terminologies.ValueSetExpander; import org.hl7.fhir.dstu3.terminologies.ValueSetExpander.ValueSetExpansionOutcome; import org.hl7.fhir.dstu3.model.CodeableConcept; import org.hl7.fhir.dstu3.model.Coding; import org.hl7.fhir.dstu3.model.IdType; import org.hl7.fhir.dstu3.model.ValueSet; import org.hl7.fhir.dstu3.model.ValueSet.ConceptDefinitionComponent; import org.hl7.fhir.dstu3.model.ValueSet.ConceptReferenceComponent; import org.hl7.fhir.dstu3.model.ValueSet.ConceptSetComponent; import org.hl7.fhir.dstu3.model.ValueSet.ValueSetExpansionComponent; import org.hl7.fhir.dstu3.model.ValueSet.ValueSetExpansionContainsComponent; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IPrimitiveType; import org.springframework.beans.factory.annotation.Autowired; import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet; import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.LookupCodeResult; import ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult; import ca.uhn.fhir.rest.param.TokenParam; import ca.uhn.fhir.rest.param.UriParam; import ca.uhn.fhir.rest.server.IBundleProvider; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; public class FhirResourceDaoValueSetDstu3 extends FhirResourceDaoDstu3<ValueSet> implements IFhirResourceDaoValueSet<ValueSet, Coding, CodeableConcept> { @Autowired private IJpaValidationSupportDstu3 myJpaValidationSupport; private ValidationSupportChain myValidationSupport; private DefaultProfileValidationSupport myDefaultProfileValidationSupport; @Override @PostConstruct public void postConstruct() { super.postConstruct(); myDefaultProfileValidationSupport = new DefaultProfileValidationSupport(); myValidationSupport = new ValidationSupportChain(myDefaultProfileValidationSupport, myJpaValidationSupport); } @Override public ValueSet expand(IIdType theId, String theFilter) { HapiWorkerContext workerContext = new HapiWorkerContext(getContext(), myValidationSupport); ValueSet source = workerContext.fetchResource(ValueSet.class, theId.getValue()); ValueSetExpansionOutcome outcome = workerContext.expand(source); ValueSetExpansionComponent expansion = outcome.getValueset().getExpansion(); if (isNotBlank(theFilter)) { for (Iterator<ValueSetExpansionContainsComponent> containsIter = expansion.getContains().iterator(); containsIter.hasNext();) { ValueSetExpansionContainsComponent nextContains = containsIter.next(); if (!nextContains.getDisplay().toLowerCase().contains(theFilter.toLowerCase())) { containsIter.remove(); } } } ValueSet retVal = new ValueSet(); retVal.setExpansion(expansion); return retVal; } @Override public ValueSet expandByIdentifier(String theUri, String theFilter) { if (isBlank(theUri)) { throw new InvalidRequestException("URI must not be blank or missing"); } ValueSet source; org.hl7.fhir.instance.model.ValueSet defaultValueSet = myDefaultProfileValidationSupport.fetchResource(getContext(), org.hl7.fhir.instance.model.ValueSet.class, theUri); if (defaultValueSet != null) { source = getContext().newJsonParser().parseResource(ValueSet.class, getContext().newJsonParser().encodeResourceToString(defaultValueSet)); } else { IBundleProvider ids = search(ValueSet.SP_URL, new UriParam(theUri)); if (ids.size() == 0) { throw new InvalidRequestException("Unknown ValueSet URI: " + theUri); } source = (ValueSet) ids.getResources(0, 1).get(0); } return expand(source, theFilter); } @Override public ValueSet expand(ValueSet source, String theFilter) { ValueSet retVal = new ValueSet(); retVal.setDate(new Date()); /* * Add composed concepts */ for (ConceptSetComponent nextInclude : source.getCompose().getInclude()) { for (ConceptReferenceComponent next : nextInclude.getConcept()) { if (isBlank(theFilter)) { addCompose(retVal, nextInclude.getSystem(), next.getCode(), next.getDisplay()); } else { String filter = theFilter.toLowerCase(); if (next.getDisplay().toLowerCase().contains(filter) || next.getCode().toLowerCase().contains(filter)) { addCompose(retVal, nextInclude.getSystem(), next.getCode(), next.getDisplay()); } } } } /* * Add defined concepts */ for (ConceptDefinitionComponent next : source.getCodeSystem().getConcept()) { addCompose(theFilter, retVal, source, next); } return retVal; } private void addCompose(String theFilter, ValueSet theValueSetToPopulate, ValueSet theSourceValueSet, ConceptDefinitionComponent theConcept) { if (isBlank(theFilter)) { addCompose(theValueSetToPopulate, theSourceValueSet.getCodeSystem().getSystem(), theConcept.getCode(), theConcept.getDisplay()); } else { String filter = theFilter.toLowerCase(); if (theConcept.getDisplay().toLowerCase().contains(filter) || theConcept.getCode().toLowerCase().contains(filter)) { addCompose(theValueSetToPopulate, theSourceValueSet.getCodeSystem().getSystem(), theConcept.getCode(), theConcept.getDisplay()); } } for (ConceptDefinitionComponent nextChild : theConcept.getConcept()) { addCompose(theFilter, theValueSetToPopulate, theSourceValueSet, nextChild); } } private void addCompose(ValueSet retVal, String theSystem, String theCode, String theDisplay) { if (isBlank(theCode)) { return; } ValueSetExpansionContainsComponent contains = retVal.getExpansion().addContains(); contains.setSystem(theSystem); contains.setCode(theCode); contains.setDisplay(theDisplay); } @Override public ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult validateCode(IPrimitiveType<String> theValueSetIdentifier, IIdType theId, IPrimitiveType<String> theCode, IPrimitiveType<String> theSystem, IPrimitiveType<String> theDisplay, Coding theCoding, CodeableConcept theCodeableConcept) { List<IIdType> valueSetIds; boolean haveCodeableConcept = theCodeableConcept != null && theCodeableConcept.getCoding().size() > 0; boolean haveCoding = theCoding != null && theCoding.isEmpty() == false; boolean haveCode = theCode != null && theCode.isEmpty() == false; if (!haveCodeableConcept && !haveCoding && !haveCode) { throw new InvalidRequestException("No code, coding, or codeableConcept provided to validate"); } if (!multiXor(haveCodeableConcept, haveCoding, haveCode)) { throw new InvalidRequestException("$validate-code can only validate (system AND code) OR (coding) OR (codeableConcept)"); } boolean haveIdentifierParam = theValueSetIdentifier != null && theValueSetIdentifier.isEmpty() == false; if (theId != null) { valueSetIds = Collections.singletonList(theId); } else if (haveIdentifierParam) { Set<Long> ids = searchForIds(ValueSet.SP_IDENTIFIER, new TokenParam(null, theValueSetIdentifier.getValue())); valueSetIds = new ArrayList<IIdType>(); for (Long next : ids) { valueSetIds.add(new IdType("ValueSet", next)); } } else { if (theCode == null || theCode.isEmpty()) { throw new InvalidRequestException("Either ValueSet ID or ValueSet identifier or system and code must be provided. Unable to validate."); } String code = theCode.getValue(); String system = toStringOrNull(theSystem); valueSetIds = findValueSetIdsContainingSystemAndCode(code, system); } for (IIdType nextId : valueSetIds) { ValueSet expansion = expand(nextId, null); List<ValueSetExpansionContainsComponent> contains = expansion.getExpansion().getContains(); ValidateCodeResult result = validateCodeIsInContains(contains, toStringOrNull(theSystem), toStringOrNull(theCode), theCoding, theCodeableConcept); if (result != null) { if (theDisplay != null && isNotBlank(theDisplay.getValue()) && isNotBlank(result.getDisplay())) { if (!theDisplay.getValue().equals(result.getDisplay())) { return new ValidateCodeResult(false, "Display for code does not match", result.getDisplay()); } } return result; } } return new ValidateCodeResult(false, "Code not found", null); } private List<IIdType> findValueSetIdsContainingSystemAndCode(String theCode, String theSystem) { // if (theSystem != null && theSystem.startsWith("http://hl7.org/fhir/ValueSet")) { // return Collections.singletonList((IIdType) new IdType(theSystem)); // } List<IIdType> valueSetIds; Set<Long> ids = searchForIds(ValueSet.SP_CODE, new TokenParam(theSystem, theCode)); valueSetIds = new ArrayList<IIdType>(); for (Long next : ids) { valueSetIds.add(new IdType("ValueSet", next)); } return valueSetIds; } private static boolean multiXor(boolean... theValues) { int count = 0; for (int i = 0; i < theValues.length; i++) { if (theValues[i]) { count++; } } return count == 1; } private String toStringOrNull(IPrimitiveType<String> thePrimitive) { return thePrimitive != null ? thePrimitive.getValue() : null; } private ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult validateCodeIsInContains(List<ValueSetExpansionContainsComponent> contains, String theSystem, String theCode, Coding theCoding, CodeableConcept theCodeableConcept) { for (ValueSetExpansionContainsComponent nextCode : contains) { ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.ValidateCodeResult result = validateCodeIsInContains(nextCode.getContains(), theSystem, theCode, theCoding, theCodeableConcept); if (result != null) { return result; } String system = nextCode.getSystem(); String code = nextCode.getCode(); if (isNotBlank(theCode)) { if (theCode.equals(code) && (isBlank(theSystem) || theSystem.equals(system))) { return new ValidateCodeResult(true, "Validation succeeded", nextCode.getDisplay()); } } else if (theCoding != null) { if (StringUtils.equals(system, theCoding.getSystem()) && StringUtils.equals(code, theCoding.getCode())) { return new ValidateCodeResult(true, "Validation succeeded", nextCode.getDisplay()); } } else { for (Coding next : theCodeableConcept.getCoding()) { if (StringUtils.equals(system, next.getSystem()) && StringUtils.equals(code, next.getCode())) { return new ValidateCodeResult(true, "Validation succeeded", nextCode.getDisplay()); } } } } return null; } @Override public ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.LookupCodeResult lookupCode(IPrimitiveType<String> theCode, IPrimitiveType<String> theSystem, Coding theCoding) { boolean haveCoding = theCoding != null && isNotBlank(theCoding.getSystem()) && isNotBlank(theCoding.getCode()); boolean haveCode = theCode != null && theCode.isEmpty() == false; boolean haveSystem = theSystem != null && theSystem.isEmpty() == false; if (!haveCoding && !(haveSystem && haveCode)) { throw new InvalidRequestException("No code, coding, or codeableConcept provided to validate"); } if (!multiXor(haveCoding, (haveSystem && haveCode)) || (haveSystem != haveCode)) { throw new InvalidRequestException("$lookup can only validate (system AND code) OR (coding.system AND coding.code)"); } String code; String system; if (haveCoding) { code = theCoding.getCode(); system = theCoding.getSystem(); } else { code = theCode.getValue(); system = theSystem.getValue(); } // CodeValidationResult validateOutcome = myJpaValidationSupport.validateCode(getContext(), system, code, null); // // LookupCodeResult result = new LookupCodeResult(); // result.setSearchedForCode(code); // result.setSearchedForSystem(system); // result.setFound(false); // if (validateOutcome.isOk()) { // result.setFound(true); // result.setCodeIsAbstract(validateOutcome.asConceptDefinition().getAbstract()); // result.setCodeDisplay(validateOutcome.asConceptDefinition().getDisplay()); // } // return result; if (myValidationSupport.isCodeSystemSupported(getContext(), system)) { HapiWorkerContext ctx = new HapiWorkerContext(getContext(), myValidationSupport); ValueSetExpander expander = ctx.getExpander(); ValueSet source = new ValueSet(); source.getCompose().addInclude().setSystem(system).addConcept().setCode(code); ValueSetExpansionOutcome expansion; try { expansion = expander.expand(source); } catch (Exception e) { throw new InternalErrorException(e); } List<ValueSetExpansionContainsComponent> contains = expansion.getValueset().getExpansion().getContains(); ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.LookupCodeResult result = lookup(contains, system, code); if (result != null) { return result; } } else { /* * If it's not a built-in code system, use ones from the database */ List<IIdType> valueSetIds = findValueSetIdsContainingSystemAndCode(code, system); for (IIdType nextId : valueSetIds) { ValueSet expansion = read(nextId); for (ConceptDefinitionComponent next : expansion.getCodeSystem().getConcept()) { if (code.equals(next.getCode())) { ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.LookupCodeResult retVal = new LookupCodeResult(); retVal.setSearchedForCode(code); retVal.setSearchedForSystem(system); retVal.setFound(true); if (next.getAbstractElement().getValue() != null) { retVal.setCodeIsAbstract(next.getAbstractElement().booleanValue()); } retVal.setCodeDisplay(next.getDisplay()); retVal.setCodeSystemDisplayName("Unknown"); // TODO: implement return retVal; } } } } // We didn't find it.. LookupCodeResult retVal = new LookupCodeResult(); retVal.setFound(false); retVal.setSearchedForCode(code); retVal.setSearchedForSystem(system); return retVal; } private ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.LookupCodeResult lookup(List<ValueSetExpansionContainsComponent> theContains, String theSystem, String theCode) { for (ValueSetExpansionContainsComponent nextCode : theContains) { String system = nextCode.getSystem(); String code = nextCode.getCode(); if (theSystem.equals(system) && theCode.equals(code)) { ca.uhn.fhir.jpa.dao.IFhirResourceDaoValueSet.LookupCodeResult retVal = new LookupCodeResult(); retVal.setSearchedForCode(code); retVal.setSearchedForSystem(system); retVal.setFound(true); if (nextCode.getAbstractElement().getValue() != null) { retVal.setCodeIsAbstract(nextCode.getAbstractElement().booleanValue()); } retVal.setCodeDisplay(nextCode.getDisplay()); retVal.setCodeSystemVersion(nextCode.getVersion()); retVal.setCodeSystemDisplayName("Unknown"); // TODO: implement return retVal; } } return null; } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-cloudfront/src/main/java/com/amazonaws/services/cloudfront/model/transform/DeleteRealtimeLogConfigRequestMarshaller.java
3291
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.cloudfront.model.transform; import static com.amazonaws.util.StringUtils.UTF8; import java.io.StringWriter; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.Request; import com.amazonaws.DefaultRequest; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.cloudfront.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.util.StringInputStream; import com.amazonaws.util.XMLWriter; /** * DeleteRealtimeLogConfigRequest Marshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DeleteRealtimeLogConfigRequestMarshaller implements Marshaller<Request<DeleteRealtimeLogConfigRequest>, DeleteRealtimeLogConfigRequest> { public Request<DeleteRealtimeLogConfigRequest> marshall(DeleteRealtimeLogConfigRequest deleteRealtimeLogConfigRequest) { if (deleteRealtimeLogConfigRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } Request<DeleteRealtimeLogConfigRequest> request = new DefaultRequest<DeleteRealtimeLogConfigRequest>(deleteRealtimeLogConfigRequest, "AmazonCloudFront"); request.setHttpMethod(HttpMethodName.POST); String uriResourcePath = "/2020-05-31/delete-realtime-log-config/"; request.setResourcePath(uriResourcePath); try { StringWriter stringWriter = new StringWriter(); XMLWriter xmlWriter = new XMLWriter(stringWriter, "http://cloudfront.amazonaws.com/doc/2020-05-31/"); xmlWriter.startElement("DeleteRealtimeLogConfigRequest"); if (deleteRealtimeLogConfigRequest != null) { if (deleteRealtimeLogConfigRequest.getName() != null) { xmlWriter.startElement("Name").value(deleteRealtimeLogConfigRequest.getName()).endElement(); } if (deleteRealtimeLogConfigRequest.getARN() != null) { xmlWriter.startElement("ARN").value(deleteRealtimeLogConfigRequest.getARN()).endElement(); } } xmlWriter.endElement(); request.setContent(new StringInputStream(stringWriter.getBuffer().toString())); request.addHeader("Content-Length", Integer.toString(stringWriter.getBuffer().toString().getBytes(UTF8).length)); if (!request.getHeaders().containsKey("Content-Type")) { request.addHeader("Content-Type", "application/xml"); } } catch (Throwable t) { throw new SdkClientException("Unable to marshall request to XML: " + t.getMessage(), t); } return request; } }
apache-2.0
lehmann/BrainSimulator
app/src/main/java/com/github/neuralnetworks/calculation/neuronfunctions/AparapiTanh.java
1375
package com.github.neuralnetworks.calculation.neuronfunctions; import com.github.neuralnetworks.architecture.Connections; import com.github.neuralnetworks.architecture.Layer; import com.github.neuralnetworks.calculation.ConnectionCalculator; import com.github.neuralnetworks.calculation.memory.ValuesProvider; import java.util.List; /** * Tanh activation function */ public class AparapiTanh extends ConnectionCalculatorFullyConnected { private static final long serialVersionUID = 5869298546838843306L; @Override protected ConnectionCalculator createInputFunction(List<Connections> inputConnections, ValuesProvider valuesProvider, Layer targetLayer) { return new AparapiTanhFunction(inputConnections, valuesProvider, targetLayer); } public static class AparapiTanhFunction extends AparapiWeightedSum { private static final long serialVersionUID = -3409078521599849086L; public AparapiTanhFunction(List<Connections> inputConnections, ValuesProvider valuesProvider, Layer targetLayer) { super(inputConnections, valuesProvider, targetLayer); } @Override protected void after() { int end = outputStartPosition + getGlobalId() * outputRowStep + miniBatchSize * outputColumnStep; for (int i = outputStartPosition + getGlobalId() * outputRowStep; i < end; i += outputColumnStep) { output[i] = tan(output[i]); } } } }
apache-2.0
liuhea/Clever
app/src/main/java/com/clever/presenter/impl/SplashPresenterImpl.java
754
package com.clever.presenter.impl; import com.clever.presenter.SplashPresenter; import com.clever.ui.interf.SplashView; /** * @author liuhea * @date 2016-12-29</p> */ public class SplashPresenterImpl implements SplashPresenter { private SplashView mSplashView; public SplashPresenterImpl(SplashView splashView) { mSplashView = splashView; } @Override public void isFirstLogin() { // if (EMClient.getInstance().isConnected() && EMClient.getInstance().isLoggedInBefore()) { // // 已登录,暂时替代sp . mSplashView.onFirstLogin(false); // } else { // // 未登录 . // mSplashView.onFirstLogin(true); // } } }
apache-2.0
yahoo/athenz
servers/zts/src/main/java/com/yahoo/athenz/zts/cert/X509CertRequest.java
24840
/* * Copyright 2017 Yahoo Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.yahoo.athenz.zts.cert; import java.net.URI; import java.net.URISyntaxException; import java.security.cert.X509Certificate; import java.util.ArrayList; import java.util.List; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import com.yahoo.athenz.common.server.dns.HostnameResolver; import com.yahoo.athenz.zts.CertType; import com.yahoo.athenz.zts.ZTSConsts; import com.yahoo.athenz.zts.cache.DataCache; import org.bouncycastle.pkcs.PKCS10CertificationRequest; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.yahoo.athenz.auth.util.Crypto; import com.yahoo.athenz.auth.util.CryptoException; public class X509CertRequest { private static final Logger LOGGER = LoggerFactory.getLogger(X509CertRequest.class); private static final Pattern WHITESPACE_PATTERN = Pattern.compile("\\s+"); protected PKCS10CertificationRequest certReq; protected String instanceId = null; protected String uriHostname = null; protected String spiffeUri = null; protected String normCsrPublicKey = null; protected String cn; protected List<String> dnsNames; protected List<String> providerDnsNames; protected List<String> ipAddresses; protected List<String> uris; public X509CertRequest(String csr) throws CryptoException { certReq = Crypto.getPKCS10CertRequest(csr); if (certReq == null) { throw new CryptoException("Invalid csr provided"); } // extract the dns names but we can't process them now // since we need to know what the provider and domain // allowed dns suffix values dnsNames = Crypto.extractX509CSRDnsNames(certReq); providerDnsNames = new ArrayList<>(); ipAddresses = Crypto.extractX509CSRIPAddresses(certReq); // extract the common name for the request try { cn = Crypto.extractX509CSRCommonName(certReq); } catch (Exception ex) { throw new CryptoException("Unable to extract CN from CSR:" + ex.getMessage()); } // extract our URI values uris = Crypto.extractX509CSRURIs(certReq); // process to make sure we only have a single spiffe uri // present in our request if (!extractSpiffeURI()) { throw new CryptoException("Invalid SPIFFE URI present in CSR"); } // extract and set uriHostname, if present uriHostname = X509CertUtils.extractItemFromURI(uris, ZTSConsts.ZTS_CERT_HOSTNAME_URI); // extract instanceId // first check to see if we have the instance id is provided // in the athenz uri field instanceId = X509CertUtils.extractRequestInstanceIdFromURI(uris); // if we have no instance id from the URI, then we're going // to fetch it from the dns list if (instanceId == null) { instanceId = X509CertUtils.extractRequestInstanceIdFromDnsNames(dnsNames); } } public PKCS10CertificationRequest getCertReq() { return certReq; } public void setCertReq(PKCS10CertificationRequest certReq) { this.certReq = certReq; } /** * Verifies that the CSR contains dnsName entries that have * one of the following provided dns suffixes. * @param domainName name of the domain * @param serviceName name of the service * @param provider name of the provider for dns/hostname suffix checks * @param athenzSysDomainCache system domain cache object for suffix lists * @param serviceDnsSuffix dns suffix registered for the service * @param instanceHostname instance hostname * @param instanceHostCnames list of instance host cnames * @param hostnameResolver resolver to verify hostname is correct * @return true if all dnsNames in the CSR end with given suffixes */ public boolean validateDnsNames(final String domainName, final String serviceName, final String provider, final DataCache athenzSysDomainCache, final String serviceDnsSuffix, final String instanceHostname, final List<String> instanceHostCnames, HostnameResolver hostnameResolver) { // if the CSR has no dns names then we have nothing to check if (dnsNames.isEmpty()) { return true; } // if we're given an instance host and cname fields then we're going to validate // to make sure it's correct for the given request. Any invalid host/cname field // value will cause the request to be rejected if (!validateInstanceHostname(provider, athenzSysDomainCache, instanceHostname, hostnameResolver)) { LOGGER.error("Unable to validate Instance hostname: {}", instanceHostname); return false; } if (!validateInstanceCnames(provider, athenzSysDomainCache, domainName + "." + serviceName, instanceHostname, instanceHostCnames, hostnameResolver)) { return false; } // make sure our provider dns list is empty providerDnsNames.clear(); // verify that our dns name suffixes match before returning success // if we have a match with our provider dns suffix then we're going // to keep track of those entries in a separate list so we can // send them to the provider for verification (provider does not // have knowledge about the additional service dns domain entries // so it doesn't need to get those). We also support the case of // wildcard based on the service name in the format of: // *.<service>.<domain-with-dashes>.<provider-dns-suffix> // so we'll generate and pass the prefix to the function to verify // and automatically skip those from sending to the provider final String wildCardPrefix = "*." + serviceName + "." + domainName.replace('.', '-') + "."; final String serviceDnsSuffixCheck = (serviceDnsSuffix != null) ? "." + serviceDnsSuffix : null; final List<String> providerDnsSuffixList = athenzSysDomainCache.getProviderDnsSuffixList(provider); for (String dnsName : dnsNames) { if (!dnsSuffixCheck(dnsName, providerDnsSuffixList, serviceDnsSuffixCheck, wildCardPrefix, instanceHostname, instanceHostCnames)) { return false; } } return true; } private boolean validateInstanceHostname(final String provider, final DataCache athenzSysDomainCache, final String instanceHostname, HostnameResolver hostnameResolver) { // if we have no hostname configured then there is nothing to do if (instanceHostname == null || instanceHostname.isEmpty()) { return true; } // validate the provider is authorized to request hostnames with // the given prefix if (!isHostnameAllowed(provider, athenzSysDomainCache, instanceHostname)) { return false; } // final check comes from the hostname resolver return hostnameResolver == null ? true : hostnameResolver.isValidHostname(instanceHostname); } /** * validateUriHostname ensures that the instanceHostname passed in request matches the hostname in SanURI * @param instanceHostname hostname set in the input request * @return true or false */ boolean validateUriHostname(final String instanceHostname) { // If there is no hostname in SanURI, there is nothing to validate against input hostname if (uriHostname == null || uriHostname.isEmpty()) { return true; } return uriHostname.equals(instanceHostname); } boolean isHostnameAllowed(final String provider, final DataCache athenzSysDomainCache, final String instanceHostname) { // validate the provider is authorized to request hostnames with // the given prefix final List<String> providerHostnameAllowedSuffixList = athenzSysDomainCache.getProviderHostnameAllowedSuffixList(provider); final List<String> providerHostnameDeniedSuffixList = athenzSysDomainCache.getProviderHostnameDeniedSuffixList(provider); // make sure the hostname does not end with one of the denied // suffix values if (providerHostnameDeniedSuffixList != null) { for (String dnsSuffixCheck : providerHostnameDeniedSuffixList) { if (instanceHostname.endsWith(dnsSuffixCheck)) { LOGGER.error("isHostnameAllowed - denied hostname dns suffix {}/{}", instanceHostname, dnsSuffixCheck); return false; } } } // make sure the hostname ends with one of the allowed // suffix values boolean allowedHostName = false; if (providerHostnameAllowedSuffixList != null) { for (String dnsSuffixCheck : providerHostnameAllowedSuffixList) { if (instanceHostname.endsWith(dnsSuffixCheck)) { allowedHostName = true; break; } } } if (!allowedHostName) { LOGGER.error("isHostnameAllowed - not allowed hostname dns name {} in suffix list: {}", instanceHostname, providerHostnameAllowedSuffixList != null ? String.join(",", providerHostnameAllowedSuffixList) : ""); return false; } return true; } boolean validateInstanceCnames(final String provider, final DataCache athenzSysDomainCache, final String serviceFqn, final String instanceHostname, List<String> instanceHostCnames, HostnameResolver hostnameResolver) { // if we have no cname list provided then nothing to check if (instanceHostCnames == null || instanceHostCnames.isEmpty()) { return true; } // with a valid cname, we must have an instance hostname provided if (instanceHostname == null || instanceHostname.isEmpty()) { LOGGER.error("Instance Host CNAME list provided without Hostname"); return false; } // verify that all the cnames are valid hostnames and the provider // is authorized to request them for (String cname : instanceHostCnames) { if (!isHostnameAllowed(provider, athenzSysDomainCache, cname)) { return false; } } // we must also have a resolver present and configured if (hostnameResolver != null) { if (!hostnameResolver.isValidHostCnameList(serviceFqn, instanceHostname, instanceHostCnames, CertType.X509)) { LOGGER.error("{} does not have all hosts in {} as configured CNAMEs", instanceHostname, String.join(",", instanceHostCnames)); return false; } return true; } LOGGER.error("Instance host name CNAME list provided without a valid hostname resolver"); return false; } boolean dnsSuffixCheck(final String dnsName, final List<String> providerDnsSuffixList, final String serviceDnsSuffixCheck, final String wildCardPrefix, final String instanceHostname, final List<String> instanceHostCnames) { if (providerDnsSuffixList != null) { for (String dnsSuffixCheck : providerDnsSuffixList) { if (dnsName.endsWith(dnsSuffixCheck)) { // if this entry happens to be a cname for a configured // instance hostname then we're not going to add // the entry to the list for the provider to be approved if (instanceHostCnames != null && instanceHostCnames.contains(dnsName)) { return true; } // if the hostname has the wildcard prefix based on the // service identity, we're going to skip sending that to // the provider for verification. we allow components // between the service prefix name and the provider // suffix (in case the provider needs to include possibly // region/colo specific component if (dnsName.startsWith(wildCardPrefix)) { return true; } // add the name to the list to be verified providerDnsNames.add(dnsName); return true; } } } // if this is authorized by Athenz configuration then there is no need // to check with the provider if (serviceDnsSuffixCheck != null && dnsName.endsWith(serviceDnsSuffixCheck)) { return true; } // check if this is the requested hostname in which case we need the // provider to validate it if (instanceHostname != null && dnsName.equalsIgnoreCase(instanceHostname)) { providerDnsNames.add(dnsName); return true; } // finally check if this is one of the requested cnames in which case // there is no need for the provider to validate since Athenz // has already done so with the hostname resolver if (instanceHostCnames != null && instanceHostCnames.contains(dnsName)) { return true; } LOGGER.error("dnsSuffixCheck - dnsName {} does not end with provider {} / service {} suffix or hostname {}", dnsName, providerDnsSuffixList != null ? String.join(",", providerDnsSuffixList) : "", serviceDnsSuffixCheck, instanceHostname); return false; } /** * Compare dns Names specified in this CSR and given X509 Certificate * to make sure they match. * @param cert X509 Certificate to compare against * @return true if both CSR and X509 Cert contain identical dns names */ public boolean validateDnsNames(X509Certificate cert) { List<String> certDnsNames = Crypto.extractX509CertDnsNames(cert); if (certDnsNames.size() != dnsNames.size()) { LOGGER.error("compareDnsNames - Mismatch of dnsNames in certificate ({}: {}) and CSR ({}: {})", certDnsNames.size(), String.join(", ", certDnsNames), dnsNames.size(), String.join(", ", dnsNames)); return false; } for (String dnsName : dnsNames) { if (!certDnsNames.contains(dnsName)) { LOGGER.error("compareDnsNames - Unknown dnsName in csr {}, csr-set ({}), certificate-set ({})", dnsName, String.join(", ", dnsNames), String.join(", ", certDnsNames)); return false; } } return true; } /** * Compare instance id specified in this CSR and given X509 Certificate * to make sure they match. * @param reqInstanceId instance id specified in the request uri * @param cert X509 Certificate to compare against * @return true if both CSR and X509 Cert contain identical instance id */ public boolean validateInstanceId(final String reqInstanceId, X509Certificate cert) { // if specified, we must make sure it matches to the given value if (!instanceId.equals(reqInstanceId)) { LOGGER.error("Instanceid mismatch csr: {}, uri: {}", instanceId, reqInstanceId); return false; } final String certInstanceId = X509CertUtils.extractRequestInstanceId(cert); if (!instanceId.equals(certInstanceId)) { LOGGER.error("Instanceid mismatch csr: {}, cert: {}", instanceId, certInstanceId); return false; } return true; } public boolean validateCommonName(String reqCommonName) { if (!reqCommonName.equalsIgnoreCase(cn)) { LOGGER.error("compareCommonName - cn mismatch: {} vs. {}", reqCommonName, cn); return false; } return true; } public boolean validateSubjectOField(Set<String> validValues) { if (validValues == null || validValues.isEmpty()) { return true; } try { final String value = Crypto.extractX509CSRSubjectOField(certReq); if (value == null) { return true; } boolean res = validValues.contains(value); if (!res) { LOGGER.error("Failed to validate Subject O Field {}", value); } return res; } catch (CryptoException ex) { LOGGER.error("Unable to extract Subject O Field: {}", ex.getMessage()); return false; } } public boolean validateSubjectOUField(final String provider, final String certSubjectOU, Set<String> validValues) { try { final String value = Crypto.extractX509CSRSubjectOUField(certReq); if (value == null) { return true; } // we have three values that we want to possible match against // a) provider callback specified value // b) provider name // c) configured set of valid ou names if (value.equalsIgnoreCase(certSubjectOU)) { return true; } else if (value.equalsIgnoreCase(provider)) { return true; } else if (validValues != null && !validValues.isEmpty() && validValues.contains(value)) { return true; } else { LOGGER.error("Failed to validate Subject OU Field {}", value); } return false; } catch (CryptoException ex) { LOGGER.error("Unable to extract Subject OU Field: {}", ex.getMessage()); return false; } } boolean extractCsrPublicKey() { // if we have already extracted our public key // and normalized, then there is nothing to do if (normCsrPublicKey != null) { return true; } // otherwise process this request final String csrPublicKey = Crypto.extractX509CSRPublicKey(certReq); if (csrPublicKey == null) { LOGGER.error("comparePublicKeys: unable to get public key"); return false; } // we are going to remove all whitespace, new lines // in order to compare the pem encoded keys Matcher matcher = WHITESPACE_PATTERN.matcher(csrPublicKey); normCsrPublicKey = matcher.replaceAll(""); return true; } public boolean validatePublicKeys(final String publicKey) { if (publicKey == null) { LOGGER.error("comparePublicKeys: No public key provided for validation"); return false; } // we are going to remove all whitespace, new lines // in order to compare the pem encoded keys if (!extractCsrPublicKey()) { LOGGER.error("comparePublicKeys: Unable to extract CSR public key"); return false; } if (!compareCsrPublicKey(publicKey)) { LOGGER.error("comparePublicKeys: Public key mismatch"); return false; } return true; } public boolean validatePublicKeys(X509Certificate cert) { // we are going to remove all whitespace, new lines // in order to compare the pem encoded keys if (!extractCsrPublicKey()) { LOGGER.error("comparePublicKeys: Unable to extract CSR public key"); return false; } String certPublicKey = Crypto.extractX509CertPublicKey(cert); if (certPublicKey == null) { LOGGER.error("unable to extract certificate public key"); return false; } if (!compareCsrPublicKey(certPublicKey)) { LOGGER.error("comparePublicKeys: Public key mismatch"); return false; } return true; } boolean compareCsrPublicKey(final String publicKey) { Matcher matcher = WHITESPACE_PATTERN.matcher(publicKey); final String normPublicKey = matcher.replaceAll(""); return normPublicKey.equals(normCsrPublicKey); } public boolean validateIPAddress(final String ip) { // if we have no IP addresses in the request, then we're good if (ipAddresses.isEmpty()) { return true; } // if we have more than 1 IP address in the request then // we're going to reject it as we can't validate if those // multiple addresses are from the same host. In this // scenario a provider model must be used which supports // multiple IPs in a request if (ipAddresses.size() != 1) { LOGGER.error("Cert request contains multiple IP: {} addresses", ipAddresses.size()); return false; } return ipAddresses.get(0).equals(ip); } boolean extractSpiffeURI() { // first extract the URI list from the request if (uris == null || uris.isEmpty()) { return true; } // we must only have a single spiffe uri in the list String spUri = null; for (String uri : uris) { if (!uri.toLowerCase().startsWith(ZTSConsts.ZTS_CERT_SPIFFE_URI)) { continue; } if (spUri != null) { LOGGER.error("Multiple SPIFFE URIs in the CSR: {}/{}", uri, spUri); return false; } spUri = uri; } spiffeUri = spUri; return true; } boolean validateSpiffeURI(final String domain, final String name, final String value) { // the expected default format is // spiffe://[<provider-cluster>/ns/]<athenz-domain>/sa/<athenz-service> // spiffe://[<provider-cluster>/ns/]<athenz-domain>/ra/<athenz-role> // // so we'll be validating that our request has: // spiffe://<provider-cluster>/ns/<domain>/<name>/<value> or // spiffe://<domain>/<name>/<value> or if (spiffeUri == null) { return true; } URI uri; try { uri = new URI(spiffeUri); } catch (URISyntaxException ex) { LOGGER.error("validateSpiffeURI: Unable to parse {}: {}", spiffeUri, ex.getMessage()); return false; } final String uriPath = uri.getPath(); final String uriHost = uri.getHost(); if (uriPath == null || uriPath.isEmpty() || uriHost == null || uriHost.isEmpty()) { LOGGER.error("validateSpiffeURI: invalid uri {}", spiffeUri); return false; } // let's check to see if our path starts with our // namespace ns field and thus which format we're using boolean uriVerified = false; if (uriPath.startsWith("/ns/")) { final String path = "/ns/" + domain + "/" + name + "/" + value; uriVerified = uriPath.equalsIgnoreCase(path); } else { final String path = "/" + name + "/" + value; uriVerified = uriHost.equalsIgnoreCase(domain) && uriPath.equalsIgnoreCase(path); } if (!uriVerified) { LOGGER.error("validateSpiffeURI: invalid uri path/host: {}", spiffeUri); } return uriVerified; } public void setNormCsrPublicKey(String normCsrPublicKey) { this.normCsrPublicKey = normCsrPublicKey; } public String getCommonName() { return cn; } public String getInstanceId() { return instanceId; } public String getUriHostname() { return uriHostname; } public List<String> getDnsNames() { return dnsNames; } public List<String> getProviderDnsNames() { return providerDnsNames; } public List<String> getUris() { return uris; } public List<String> getIpAddresses() { return ipAddresses; } }
apache-2.0
clonetwin26/buck
src/com/facebook/buck/apple/project_generator/WorkspaceAndProjectGenerator.java
40488
/* * Copyright 2014-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.apple.project_generator; import com.facebook.buck.apple.AppleBuildRules; import com.facebook.buck.apple.AppleBuildRules.RecursiveDependenciesMode; import com.facebook.buck.apple.AppleBundleDescription; import com.facebook.buck.apple.AppleBundleDescriptionArg; import com.facebook.buck.apple.AppleConfig; import com.facebook.buck.apple.AppleDependenciesCache; import com.facebook.buck.apple.AppleTestDescriptionArg; import com.facebook.buck.apple.XcodeWorkspaceConfigDescription; import com.facebook.buck.apple.XcodeWorkspaceConfigDescriptionArg; import com.facebook.buck.apple.project_generator.ProjectGenerator.Option; import com.facebook.buck.apple.xcode.XCScheme; import com.facebook.buck.apple.xcode.xcodeproj.PBXTarget; import com.facebook.buck.cxx.toolchain.CxxBuckConfig; import com.facebook.buck.cxx.toolchain.CxxPlatform; import com.facebook.buck.event.BuckEventBus; import com.facebook.buck.event.ConsoleEvent; import com.facebook.buck.graph.TopologicalSort; import com.facebook.buck.halide.HalideBuckConfig; import com.facebook.buck.log.Logger; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.model.BuildTargets; import com.facebook.buck.model.UnflavoredBuildTarget; import com.facebook.buck.rules.BuildRuleResolver; import com.facebook.buck.rules.Cell; import com.facebook.buck.rules.HasTests; import com.facebook.buck.rules.TargetGraph; import com.facebook.buck.rules.TargetNode; import com.facebook.buck.rules.keys.config.RuleKeyConfiguration; import com.facebook.buck.swift.SwiftBuckConfig; import com.facebook.buck.util.HumanReadableException; import com.facebook.buck.util.ObjectMappers; import com.facebook.buck.util.Optionals; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSetMultimap; import com.google.common.collect.ImmutableSortedMap; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Iterables; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import java.io.IOException; import java.nio.file.Path; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.function.Function; import java.util.stream.Stream; public class WorkspaceAndProjectGenerator { private static final Logger LOG = Logger.get(WorkspaceAndProjectGenerator.class); private final Cell rootCell; private final TargetGraph projectGraph; private final AppleDependenciesCache dependenciesCache; private final ProjectGenerationStateCache projGenerationStateCache; private final XcodeWorkspaceConfigDescriptionArg workspaceArguments; private final BuildTarget workspaceBuildTarget; private final FocusedModuleTargetMatcher focusModules; private final ImmutableSet<ProjectGenerator.Option> projectGeneratorOptions; private final boolean combinedProject; private final boolean parallelizeBuild; private final CxxPlatform defaultCxxPlatform; private final ImmutableSet<String> appleCxxFlavors; private Optional<ProjectGenerator> combinedProjectGenerator; private final Map<String, SchemeGenerator> schemeGenerators = new HashMap<>(); private final String buildFileName; private final Function<TargetNode<?, ?>, BuildRuleResolver> buildRuleResolverForNode; private final BuckEventBus buckEventBus; private final RuleKeyConfiguration ruleKeyConfiguration; private final ImmutableSet.Builder<BuildTarget> requiredBuildTargetsBuilder = ImmutableSet.builder(); private final ImmutableSortedSet.Builder<Path> xcconfigPathsBuilder = ImmutableSortedSet.naturalOrder(); private final ImmutableList.Builder<CopyInXcode> filesToCopyInXcodeBuilder = ImmutableList.builder(); private final HalideBuckConfig halideBuckConfig; private final CxxBuckConfig cxxBuckConfig; private final AppleConfig appleConfig; private final SwiftBuckConfig swiftBuckConfig; public WorkspaceAndProjectGenerator( Cell cell, TargetGraph projectGraph, XcodeWorkspaceConfigDescriptionArg workspaceArguments, BuildTarget workspaceBuildTarget, Set<Option> projectGeneratorOptions, boolean combinedProject, FocusedModuleTargetMatcher focusModules, boolean parallelizeBuild, CxxPlatform defaultCxxPlatform, ImmutableSet<String> appleCxxFlavors, String buildFileName, Function<TargetNode<?, ?>, BuildRuleResolver> buildRuleResolverForNode, BuckEventBus buckEventBus, RuleKeyConfiguration ruleKeyConfiguration, HalideBuckConfig halideBuckConfig, CxxBuckConfig cxxBuckConfig, AppleConfig appleConfig, SwiftBuckConfig swiftBuckConfig) { this.rootCell = cell; this.projectGraph = projectGraph; this.dependenciesCache = new AppleDependenciesCache(projectGraph); this.ruleKeyConfiguration = ruleKeyConfiguration; this.projGenerationStateCache = new ProjectGenerationStateCache(); this.workspaceArguments = workspaceArguments; this.workspaceBuildTarget = workspaceBuildTarget; this.projectGeneratorOptions = ImmutableSet.copyOf(projectGeneratorOptions); this.combinedProject = combinedProject; this.parallelizeBuild = parallelizeBuild; this.defaultCxxPlatform = defaultCxxPlatform; this.appleCxxFlavors = appleCxxFlavors; this.buildFileName = buildFileName; this.buildRuleResolverForNode = buildRuleResolverForNode; this.buckEventBus = buckEventBus; this.swiftBuckConfig = swiftBuckConfig; this.combinedProjectGenerator = Optional.empty(); this.halideBuckConfig = halideBuckConfig; this.cxxBuckConfig = cxxBuckConfig; this.appleConfig = appleConfig; this.focusModules = focusModules.map( inputs -> // Update the focused modules list (if present) to contain srcTarget (if present). workspaceArguments .getSrcTarget() .map( srcTarget -> ImmutableSet.<UnflavoredBuildTarget>builder() .addAll(inputs) .add(srcTarget.getUnflavoredBuildTarget()) .build()) .orElse(inputs)); } @VisibleForTesting Optional<ProjectGenerator> getCombinedProjectGenerator() { return combinedProjectGenerator; } @VisibleForTesting Map<String, SchemeGenerator> getSchemeGenerators() { return schemeGenerators; } public ImmutableSet<BuildTarget> getRequiredBuildTargets() { return requiredBuildTargetsBuilder.build(); } private ImmutableSet<Path> getXcconfigPaths() { return xcconfigPathsBuilder.build(); } private ImmutableList<CopyInXcode> getFilesToCopyInXcode() { return filesToCopyInXcodeBuilder.build(); } public Path generateWorkspaceAndDependentProjects( Map<Path, ProjectGenerator> projectGenerators, ListeningExecutorService listeningExecutorService) throws IOException, InterruptedException { LOG.debug("Generating workspace for target %s", workspaceBuildTarget); String workspaceName = XcodeWorkspaceConfigDescription.getWorkspaceNameFromArg(workspaceArguments); Path outputDirectory; if (combinedProject) { workspaceName += "-Combined"; outputDirectory = BuildTargets.getGenPath(rootCell.getFilesystem(), workspaceBuildTarget, "%s") .getParent() .resolve(workspaceName + ".xcodeproj"); } else { outputDirectory = workspaceBuildTarget.getBasePath(); } WorkspaceGenerator workspaceGenerator = new WorkspaceGenerator( rootCell.getFilesystem(), combinedProject ? "project" : workspaceName, outputDirectory); ImmutableMap.Builder<String, XcodeWorkspaceConfigDescriptionArg> schemeConfigsBuilder = ImmutableMap.builder(); ImmutableSetMultimap.Builder<String, Optional<TargetNode<?, ?>>> schemeNameToSrcTargetNodeBuilder = ImmutableSetMultimap.builder(); ImmutableSetMultimap.Builder<String, TargetNode<?, ?>> buildForTestNodesBuilder = ImmutableSetMultimap.builder(); ImmutableSetMultimap.Builder<String, TargetNode<AppleTestDescriptionArg, ?>> testsBuilder = ImmutableSetMultimap.builder(); buildWorkspaceSchemes( projectGraph, projectGeneratorOptions.contains(ProjectGenerator.Option.INCLUDE_TESTS), projectGeneratorOptions.contains(ProjectGenerator.Option.INCLUDE_DEPENDENCIES_TESTS), workspaceName, workspaceArguments, schemeConfigsBuilder, schemeNameToSrcTargetNodeBuilder, buildForTestNodesBuilder, testsBuilder); ImmutableMap<String, XcodeWorkspaceConfigDescriptionArg> schemeConfigs = schemeConfigsBuilder.build(); ImmutableSetMultimap<String, Optional<TargetNode<?, ?>>> schemeNameToSrcTargetNode = schemeNameToSrcTargetNodeBuilder.build(); ImmutableSetMultimap<String, TargetNode<?, ?>> buildForTestNodes = buildForTestNodesBuilder.build(); ImmutableSetMultimap<String, TargetNode<AppleTestDescriptionArg, ?>> tests = testsBuilder.build(); ImmutableSet<BuildTarget> targetsInRequiredProjects = Stream.concat( schemeNameToSrcTargetNode.values().stream().flatMap(Optionals::toStream), buildForTestNodes.values().stream()) .map(TargetNode::getBuildTarget) .collect(ImmutableSet.toImmutableSet()); ImmutableMap.Builder<BuildTarget, PBXTarget> buildTargetToPbxTargetMapBuilder = ImmutableMap.builder(); ImmutableMap.Builder<PBXTarget, Path> targetToProjectPathMapBuilder = ImmutableMap.builder(); generateProjects( projectGenerators, listeningExecutorService, workspaceName, outputDirectory, workspaceGenerator, targetsInRequiredProjects, buildTargetToPbxTargetMapBuilder, targetToProjectPathMapBuilder); writeWorkspaceMetaData(outputDirectory, workspaceName); if (projectGeneratorOptions.contains( ProjectGenerator.Option.GENERATE_HEADERS_SYMLINK_TREES_ONLY)) { return workspaceGenerator.getWorkspaceDir(); } else { ImmutableMap<BuildTarget, PBXTarget> buildTargetToTarget = buildTargetToPbxTargetMapBuilder.build(); writeWorkspaceSchemes( workspaceName, outputDirectory, schemeConfigs, schemeNameToSrcTargetNode, buildForTestNodes, tests, targetToProjectPathMapBuilder.build(), buildTargetToTarget); return workspaceGenerator.writeWorkspace(); } } private void writeWorkspaceMetaData(Path outputDirectory, String workspaceName) throws IOException { Path path = combinedProject ? outputDirectory : outputDirectory.resolve(workspaceName + ".xcworkspace"); rootCell.getFilesystem().mkdirs(path); ImmutableList<String> requiredTargetsStrings = getRequiredBuildTargets() .stream() .map(Object::toString) .collect(ImmutableList.toImmutableList()); ImmutableMap<String, Object> data = ImmutableMap.of( "required-targets", requiredTargetsStrings, "xcconfig-paths", getXcconfigPaths(), "copy-in-xcode", getFilesToCopyInXcode()); String jsonString = ObjectMappers.WRITER.writeValueAsString(data); rootCell .getFilesystem() .writeContentsToPath(jsonString, path.resolve("buck-project.meta.json")); } private void generateProjects( Map<Path, ProjectGenerator> projectGenerators, ListeningExecutorService listeningExecutorService, String workspaceName, Path outputDirectory, WorkspaceGenerator workspaceGenerator, ImmutableSet<BuildTarget> targetsInRequiredProjects, ImmutableMap.Builder<BuildTarget, PBXTarget> buildTargetToPbxTargetMapBuilder, ImmutableMap.Builder<PBXTarget, Path> targetToProjectPathMapBuilder) throws IOException, InterruptedException { if (combinedProject) { generateCombinedProject( workspaceName, outputDirectory, workspaceGenerator, targetsInRequiredProjects, buildTargetToPbxTargetMapBuilder, targetToProjectPathMapBuilder); } else { generateProject( projectGenerators, listeningExecutorService, workspaceGenerator, targetsInRequiredProjects, buildTargetToPbxTargetMapBuilder, targetToProjectPathMapBuilder); } } private void generateProject( Map<Path, ProjectGenerator> projectGenerators, ListeningExecutorService listeningExecutorService, WorkspaceGenerator workspaceGenerator, ImmutableSet<BuildTarget> targetsInRequiredProjects, ImmutableMap.Builder<BuildTarget, PBXTarget> buildTargetToPbxTargetMapBuilder, ImmutableMap.Builder<PBXTarget, Path> targetToProjectPathMapBuilder) throws IOException, InterruptedException { ImmutableMultimap.Builder<Cell, BuildTarget> projectCellToBuildTargetsBuilder = ImmutableMultimap.builder(); for (TargetNode<?, ?> targetNode : projectGraph.getNodes()) { BuildTarget buildTarget = targetNode.getBuildTarget(); projectCellToBuildTargetsBuilder.put(rootCell.getCell(buildTarget), buildTarget); } ImmutableMultimap<Cell, BuildTarget> projectCellToBuildTargets = projectCellToBuildTargetsBuilder.build(); List<ListenableFuture<GenerationResult>> projectGeneratorFutures = new ArrayList<>(); for (Cell projectCell : projectCellToBuildTargets.keySet()) { ImmutableMultimap.Builder<Path, BuildTarget> projectDirectoryToBuildTargetsBuilder = ImmutableMultimap.builder(); ImmutableSet<BuildTarget> cellRules = ImmutableSet.copyOf(projectCellToBuildTargets.get(projectCell)); for (BuildTarget buildTarget : cellRules) { projectDirectoryToBuildTargetsBuilder.put(buildTarget.getBasePath(), buildTarget); } ImmutableMultimap<Path, BuildTarget> projectDirectoryToBuildTargets = projectDirectoryToBuildTargetsBuilder.build(); Path relativeTargetCell = rootCell.getRoot().relativize(projectCell.getRoot()); for (Path projectDirectory : projectDirectoryToBuildTargets.keySet()) { ImmutableSet<BuildTarget> rules = filterRulesForProjectDirectory( projectGraph, ImmutableSet.copyOf(projectDirectoryToBuildTargets.get(projectDirectory))); if (Sets.intersection(targetsInRequiredProjects, rules).isEmpty()) { continue; } boolean isMainProject = workspaceArguments.getSrcTarget().isPresent() && rules.contains(workspaceArguments.getSrcTarget().get()); projectGeneratorFutures.add( listeningExecutorService.submit( () -> { GenerationResult result = generateProjectForDirectory( projectGenerators, projectCell, projectDirectory, rules, isMainProject, targetsInRequiredProjects); // convert the projectPath to relative to the target cell here result = GenerationResult.of( relativeTargetCell.resolve(result.getProjectPath()), result.isProjectGenerated(), result.getRequiredBuildTargets(), result.getXcconfigPaths(), result.getFilesToCopyInXcode(), result.getBuildTargetToGeneratedTargetMap()); return result; })); } } List<GenerationResult> generationResults; try { generationResults = Futures.allAsList(projectGeneratorFutures).get(); } catch (ExecutionException e) { Throwables.throwIfInstanceOf(e.getCause(), IOException.class); Throwables.throwIfUnchecked(e.getCause()); throw new IllegalStateException("Unexpected exception: ", e); } for (GenerationResult result : generationResults) { if (!result.isProjectGenerated()) { continue; } workspaceGenerator.addFilePath(result.getProjectPath()); processGenerationResult( buildTargetToPbxTargetMapBuilder, targetToProjectPathMapBuilder, result); } } private void processGenerationResult( ImmutableMap.Builder<BuildTarget, PBXTarget> buildTargetToPbxTargetMapBuilder, ImmutableMap.Builder<PBXTarget, Path> targetToProjectPathMapBuilder, GenerationResult result) { requiredBuildTargetsBuilder.addAll(result.getRequiredBuildTargets()); ImmutableSortedSet<Path> relativeXcconfigPaths = result .getXcconfigPaths() .stream() .map((Path p) -> rootCell.getFilesystem().relativize(p)) .collect(ImmutableSortedSet.toImmutableSortedSet(Ordering.natural())); xcconfigPathsBuilder.addAll(relativeXcconfigPaths); filesToCopyInXcodeBuilder.addAll(result.getFilesToCopyInXcode()); buildTargetToPbxTargetMapBuilder.putAll(result.getBuildTargetToGeneratedTargetMap()); for (PBXTarget target : result.getBuildTargetToGeneratedTargetMap().values()) { targetToProjectPathMapBuilder.put(target, result.getProjectPath()); } } private GenerationResult generateProjectForDirectory( Map<Path, ProjectGenerator> projectGenerators, Cell projectCell, Path projectDirectory, ImmutableSet<BuildTarget> rules, boolean isMainProject, ImmutableSet<BuildTarget> targetsInRequiredProjects) throws IOException { boolean shouldGenerateProjects = false; ProjectGenerator generator; synchronized (projectGenerators) { generator = projectGenerators.get(projectDirectory); if (generator != null) { LOG.debug("Already generated project for target %s, skipping", projectDirectory); } else { LOG.debug("Generating project for directory %s with targets %s", projectDirectory, rules); String projectName; if (projectDirectory.getFileName().toString().equals("")) { // If we're generating a project in the root directory, use a generic name. projectName = "Project"; } else { // Otherwise, name the project the same thing as the directory we're in. projectName = projectDirectory.getFileName().toString(); } generator = new ProjectGenerator( projectGraph, dependenciesCache, projGenerationStateCache, rules, projectCell, projectDirectory, projectName, buildFileName, projectGeneratorOptions, ruleKeyConfiguration, isMainProject, workspaceArguments.getSrcTarget(), targetsInRequiredProjects, focusModules, defaultCxxPlatform, appleCxxFlavors, buildRuleResolverForNode, buckEventBus, halideBuckConfig, cxxBuckConfig, appleConfig, swiftBuckConfig); projectGenerators.put(projectDirectory, generator); shouldGenerateProjects = true; } } ImmutableSet<BuildTarget> requiredBuildTargets = ImmutableSet.of(); ImmutableMap<BuildTarget, PBXTarget> buildTargetToGeneratedTargetMap = ImmutableMap.of(); if (shouldGenerateProjects) { generator.createXcodeProjects(); } if (generator.isProjectGenerated()) { requiredBuildTargets = generator.getRequiredBuildTargets(); buildTargetToGeneratedTargetMap = generator.getBuildTargetToGeneratedTargetMap(); } return GenerationResult.of( generator.getProjectPath(), generator.isProjectGenerated(), requiredBuildTargets, generator.getXcconfigPaths(), generator.getFilesToCopyInXcode(), buildTargetToGeneratedTargetMap); } private void generateCombinedProject( String workspaceName, Path outputDirectory, WorkspaceGenerator workspaceGenerator, ImmutableSet<BuildTarget> targetsInRequiredProjects, ImmutableMap.Builder<BuildTarget, PBXTarget> buildTargetToPbxTargetMapBuilder, ImmutableMap.Builder<PBXTarget, Path> targetToProjectPathMapBuilder) throws IOException { LOG.debug("Generating a combined project"); ProjectGenerator generator = new ProjectGenerator( projectGraph, dependenciesCache, projGenerationStateCache, targetsInRequiredProjects, rootCell, outputDirectory.getParent(), workspaceName, buildFileName, projectGeneratorOptions, ruleKeyConfiguration, true, workspaceArguments.getSrcTarget(), targetsInRequiredProjects, focusModules, defaultCxxPlatform, appleCxxFlavors, buildRuleResolverForNode, buckEventBus, halideBuckConfig, cxxBuckConfig, appleConfig, swiftBuckConfig); combinedProjectGenerator = Optional.of(generator); generator.createXcodeProjects(); GenerationResult result = GenerationResult.of( generator.getProjectPath(), generator.isProjectGenerated(), generator.getRequiredBuildTargets(), generator.getXcconfigPaths(), generator.getFilesToCopyInXcode(), generator.getBuildTargetToGeneratedTargetMap()); workspaceGenerator.addFilePath(result.getProjectPath(), Optional.empty()); processGenerationResult( buildTargetToPbxTargetMapBuilder, targetToProjectPathMapBuilder, result); } private void buildWorkspaceSchemes( TargetGraph projectGraph, boolean includeProjectTests, boolean includeDependenciesTests, String workspaceName, XcodeWorkspaceConfigDescriptionArg workspaceArguments, ImmutableMap.Builder<String, XcodeWorkspaceConfigDescriptionArg> schemeConfigsBuilder, ImmutableSetMultimap.Builder<String, Optional<TargetNode<?, ?>>> schemeNameToSrcTargetNodeBuilder, ImmutableSetMultimap.Builder<String, TargetNode<?, ?>> buildForTestNodesBuilder, ImmutableSetMultimap.Builder<String, TargetNode<AppleTestDescriptionArg, ?>> testsBuilder) { ImmutableSetMultimap.Builder<String, TargetNode<AppleTestDescriptionArg, ?>> extraTestNodesBuilder = ImmutableSetMultimap.builder(); addWorkspaceScheme( projectGraph, dependenciesCache, workspaceName, workspaceArguments, schemeConfigsBuilder, schemeNameToSrcTargetNodeBuilder, extraTestNodesBuilder); addExtraWorkspaceSchemes( projectGraph, dependenciesCache, workspaceArguments.getExtraSchemes(), schemeConfigsBuilder, schemeNameToSrcTargetNodeBuilder, extraTestNodesBuilder); ImmutableSetMultimap<String, Optional<TargetNode<?, ?>>> schemeNameToSrcTargetNode = schemeNameToSrcTargetNodeBuilder.build(); ImmutableSetMultimap<String, TargetNode<AppleTestDescriptionArg, ?>> extraTestNodes = extraTestNodesBuilder.build(); buildWorkspaceSchemeTests( workspaceArguments.getSrcTarget(), projectGraph, includeProjectTests, includeDependenciesTests, schemeNameToSrcTargetNode, extraTestNodes, testsBuilder, buildForTestNodesBuilder); } private static void addWorkspaceScheme( TargetGraph projectGraph, AppleDependenciesCache dependenciesCache, String schemeName, XcodeWorkspaceConfigDescriptionArg schemeArguments, ImmutableMap.Builder<String, XcodeWorkspaceConfigDescriptionArg> schemeConfigsBuilder, ImmutableSetMultimap.Builder<String, Optional<TargetNode<?, ?>>> schemeNameToSrcTargetNodeBuilder, ImmutableSetMultimap.Builder<String, TargetNode<AppleTestDescriptionArg, ?>> extraTestNodesBuilder) { LOG.debug("Adding scheme %s", schemeName); schemeConfigsBuilder.put(schemeName, schemeArguments); if (schemeArguments.getSrcTarget().isPresent()) { schemeNameToSrcTargetNodeBuilder.putAll( schemeName, Iterables.transform( AppleBuildRules.getSchemeBuildableTargetNodes( projectGraph, Optional.of(dependenciesCache), projectGraph.get(schemeArguments.getSrcTarget().get())), Optional::of)); } else { schemeNameToSrcTargetNodeBuilder.put( XcodeWorkspaceConfigDescription.getWorkspaceNameFromArg(schemeArguments), Optional.empty()); } for (BuildTarget extraTarget : schemeArguments.getExtraTargets()) { schemeNameToSrcTargetNodeBuilder.putAll( schemeName, Iterables.transform( AppleBuildRules.getSchemeBuildableTargetNodes( projectGraph, Optional.of(dependenciesCache), Preconditions.checkNotNull(projectGraph.get(extraTarget))), Optional::of)); } extraTestNodesBuilder.putAll( schemeName, getExtraTestTargetNodes(projectGraph, schemeArguments.getExtraTests())); } private static void addExtraWorkspaceSchemes( TargetGraph projectGraph, AppleDependenciesCache dependenciesCache, ImmutableSortedMap<String, BuildTarget> extraSchemes, ImmutableMap.Builder<String, XcodeWorkspaceConfigDescriptionArg> schemeConfigsBuilder, ImmutableSetMultimap.Builder<String, Optional<TargetNode<?, ?>>> schemeNameToSrcTargetNodeBuilder, ImmutableSetMultimap.Builder<String, TargetNode<AppleTestDescriptionArg, ?>> extraTestNodesBuilder) { for (Map.Entry<String, BuildTarget> extraSchemeEntry : extraSchemes.entrySet()) { BuildTarget extraSchemeTarget = extraSchemeEntry.getValue(); Optional<TargetNode<?, ?>> extraSchemeNode = projectGraph.getOptional(extraSchemeTarget); if (!extraSchemeNode.isPresent() || !(extraSchemeNode.get().getDescription() instanceof XcodeWorkspaceConfigDescription)) { throw new HumanReadableException( "Extra scheme target '%s' should be of type 'xcode_workspace_config'", extraSchemeTarget); } XcodeWorkspaceConfigDescriptionArg extraSchemeArg = (XcodeWorkspaceConfigDescriptionArg) extraSchemeNode.get().getConstructorArg(); String schemeName = extraSchemeEntry.getKey(); addWorkspaceScheme( projectGraph, dependenciesCache, schemeName, extraSchemeArg, schemeConfigsBuilder, schemeNameToSrcTargetNodeBuilder, extraTestNodesBuilder); } } private static ImmutableSet<BuildTarget> filterRulesForProjectDirectory( TargetGraph projectGraph, ImmutableSet<BuildTarget> projectBuildTargets) { // ProjectGenerator implicitly generates targets for all apple_binary rules which // are referred to by apple_bundle rules' 'binary' field. // // We used to support an explicit xcode_project_config() which // listed all dependencies explicitly, but now that we synthesize // one, we need to ensure we continue to only pass apple_binary // targets which do not belong to apple_bundle rules. ImmutableSet.Builder<BuildTarget> binaryTargetsInsideBundlesBuilder = ImmutableSet.builder(); for (TargetNode<?, ?> projectTargetNode : projectGraph.getAll(projectBuildTargets)) { if (projectTargetNode.getDescription() instanceof AppleBundleDescription) { AppleBundleDescriptionArg appleBundleDescriptionArg = (AppleBundleDescriptionArg) projectTargetNode.getConstructorArg(); // We don't support apple_bundle rules referring to apple_binary rules // outside their current directory. Preconditions.checkState( appleBundleDescriptionArg .getBinary() .getBasePath() .equals(projectTargetNode.getBuildTarget().getBasePath()), "apple_bundle target %s contains reference to binary %s outside base path %s", projectTargetNode.getBuildTarget(), appleBundleDescriptionArg.getBinary(), projectTargetNode.getBuildTarget().getBasePath()); binaryTargetsInsideBundlesBuilder.add(appleBundleDescriptionArg.getBinary()); } } ImmutableSet<BuildTarget> binaryTargetsInsideBundles = binaryTargetsInsideBundlesBuilder.build(); // Remove all apple_binary targets which are inside bundles from // the rest of the build targets in the project. return ImmutableSet.copyOf(Sets.difference(projectBuildTargets, binaryTargetsInsideBundles)); } /** * Find tests to run. * * @param targetGraph input target graph * @param includeProjectTests whether to include tests of nodes in the project * @param orderedTargetNodes target nodes for which to fetch tests for * @param extraTestBundleTargets extra tests to include * @return test targets that should be run. */ private ImmutableSet<TargetNode<AppleTestDescriptionArg, ?>> getOrderedTestNodes( Optional<BuildTarget> mainTarget, TargetGraph targetGraph, boolean includeProjectTests, boolean includeDependenciesTests, ImmutableSet<TargetNode<?, ?>> orderedTargetNodes, ImmutableSet<TargetNode<AppleTestDescriptionArg, ?>> extraTestBundleTargets) { LOG.debug("Getting ordered test target nodes for %s", orderedTargetNodes); ImmutableSet.Builder<TargetNode<AppleTestDescriptionArg, ?>> testsBuilder = ImmutableSet.builder(); if (includeProjectTests) { Optional<TargetNode<?, ?>> mainTargetNode = Optional.empty(); if (mainTarget.isPresent()) { mainTargetNode = targetGraph.getOptional(mainTarget.get()); } for (TargetNode<?, ?> node : orderedTargetNodes) { if (includeDependenciesTests || (mainTargetNode.isPresent() && node.equals(mainTargetNode.get()))) { if (!(node.getConstructorArg() instanceof HasTests)) { continue; } ImmutableList<BuildTarget> focusedTests = ((HasTests) node.getConstructorArg()) .getTests() .stream() .filter(t -> focusModules.isFocusedOn(t)) .collect(ImmutableList.toImmutableList()); // Show a warning if the target is not focused but the tests are. if (focusedTests.size() > 0 && !focusModules.isFocusedOn(node.getBuildTarget())) { buckEventBus.post( ConsoleEvent.warning( "Skipping tests of %s since it's not focused", node.getBuildTarget())); continue; } for (BuildTarget explicitTestTarget : focusedTests) { Optional<TargetNode<?, ?>> explicitTestNode = targetGraph.getOptional(explicitTestTarget); if (explicitTestNode.isPresent()) { Optional<TargetNode<AppleTestDescriptionArg, ?>> castedNode = explicitTestNode.get().castArg(AppleTestDescriptionArg.class); if (castedNode.isPresent()) { testsBuilder.add(castedNode.get()); } else { LOG.debug( "Test target specified in '%s' is not a apple_test;" + " not including in project: '%s'", node.getBuildTarget(), explicitTestTarget); } } else { throw new HumanReadableException( "Test target specified in '%s' is not in the target graph: '%s'", node.getBuildTarget(), explicitTestTarget); } } } } } for (TargetNode<AppleTestDescriptionArg, ?> extraTestTarget : extraTestBundleTargets) { testsBuilder.add(extraTestTarget); } return testsBuilder.build(); } /** * Find transitive dependencies of inputs for building. * * @param projectGraph {@link TargetGraph} containing nodes * @param nodes Nodes to fetch dependencies for. * @param excludes Nodes to exclude from dependencies list. * @return targets and their dependencies that should be build. */ private static ImmutableSet<TargetNode<?, ?>> getTransitiveDepsAndInputs( TargetGraph projectGraph, AppleDependenciesCache dependenciesCache, Iterable<? extends TargetNode<?, ?>> nodes, ImmutableSet<TargetNode<?, ?>> excludes) { return FluentIterable.from(nodes) .transformAndConcat( input -> AppleBuildRules.getRecursiveTargetNodeDependenciesOfTypes( projectGraph, Optional.of(dependenciesCache), RecursiveDependenciesMode.BUILDING, input, Optional.empty())) .append(nodes) .filter( input -> !excludes.contains(input) && AppleBuildRules.isXcodeTargetDescription(input.getDescription())) .toSet(); } private static ImmutableSet<TargetNode<AppleTestDescriptionArg, ?>> getExtraTestTargetNodes( TargetGraph graph, Iterable<BuildTarget> targets) { ImmutableSet.Builder<TargetNode<AppleTestDescriptionArg, ?>> builder = ImmutableSet.builder(); for (TargetNode<?, ?> node : graph.getAll(targets)) { Optional<TargetNode<AppleTestDescriptionArg, ?>> castedNode = node.castArg(AppleTestDescriptionArg.class); if (castedNode.isPresent()) { builder.add(castedNode.get()); } else { throw new HumanReadableException( "Extra test target is not a test: '%s'", node.getBuildTarget()); } } return builder.build(); } private void buildWorkspaceSchemeTests( Optional<BuildTarget> mainTarget, TargetGraph projectGraph, boolean includeProjectTests, boolean includeDependenciesTests, ImmutableSetMultimap<String, Optional<TargetNode<?, ?>>> schemeNameToSrcTargetNode, ImmutableSetMultimap<String, TargetNode<AppleTestDescriptionArg, ?>> extraTestNodes, ImmutableSetMultimap.Builder<String, TargetNode<AppleTestDescriptionArg, ?>> selectedTestsBuilder, ImmutableSetMultimap.Builder<String, TargetNode<?, ?>> buildForTestNodesBuilder) { for (String schemeName : schemeNameToSrcTargetNode.keySet()) { ImmutableSet<TargetNode<?, ?>> targetNodes = schemeNameToSrcTargetNode .get(schemeName) .stream() .flatMap(Optionals::toStream) .collect(ImmutableSet.toImmutableSet()); ImmutableSet<TargetNode<AppleTestDescriptionArg, ?>> testNodes = getOrderedTestNodes( mainTarget, projectGraph, includeProjectTests, includeDependenciesTests, targetNodes, extraTestNodes.get(schemeName)); selectedTestsBuilder.putAll(schemeName, testNodes); buildForTestNodesBuilder.putAll( schemeName, Iterables.filter( TopologicalSort.sort(projectGraph), getTransitiveDepsAndInputs(projectGraph, dependenciesCache, testNodes, targetNodes) ::contains)); } } private void writeWorkspaceSchemes( String workspaceName, Path outputDirectory, ImmutableMap<String, XcodeWorkspaceConfigDescriptionArg> schemeConfigs, ImmutableSetMultimap<String, Optional<TargetNode<?, ?>>> schemeNameToSrcTargetNode, ImmutableSetMultimap<String, TargetNode<?, ?>> buildForTestNodes, ImmutableSetMultimap<String, TargetNode<AppleTestDescriptionArg, ?>> ungroupedTests, ImmutableMap<PBXTarget, Path> targetToProjectPathMap, ImmutableMap<BuildTarget, PBXTarget> buildTargetToPBXTarget) throws IOException { for (Map.Entry<String, XcodeWorkspaceConfigDescriptionArg> schemeConfigEntry : schemeConfigs.entrySet()) { String schemeName = schemeConfigEntry.getKey(); XcodeWorkspaceConfigDescriptionArg schemeConfigArg = schemeConfigEntry.getValue(); if (schemeConfigArg.getSrcTarget().isPresent() && !focusModules.isFocusedOn(schemeConfigArg.getSrcTarget().get())) { continue; } ImmutableSet<PBXTarget> orderedBuildTargets = schemeNameToSrcTargetNode .get(schemeName) .stream() .distinct() .flatMap(Optionals::toStream) .map(TargetNode::getBuildTarget) .map(buildTargetToPBXTarget::get) .filter(Objects::nonNull) .collect(ImmutableSet.toImmutableSet()); ImmutableSet<PBXTarget> orderedBuildTestTargets = buildForTestNodes .get(schemeName) .stream() .map(TargetNode::getBuildTarget) .map(buildTargetToPBXTarget::get) .filter(Objects::nonNull) .collect(ImmutableSet.toImmutableSet()); ImmutableSet<PBXTarget> orderedRunTestTargets = ungroupedTests .get(schemeName) .stream() .map(TargetNode::getBuildTarget) .map(buildTargetToPBXTarget::get) .filter(Objects::nonNull) .collect(ImmutableSet.toImmutableSet()); Optional<String> runnablePath = schemeConfigArg.getExplicitRunnablePath(); Optional<String> remoteRunnablePath; if (schemeConfigArg.getIsRemoteRunnable().orElse(false)) { // XXX TODO(beng): Figure out the actual name of the binary to launch remoteRunnablePath = Optional.of("/" + workspaceName); } else { remoteRunnablePath = Optional.empty(); } SchemeGenerator schemeGenerator = new SchemeGenerator( rootCell.getFilesystem(), schemeConfigArg.getSrcTarget().map(buildTargetToPBXTarget::get), orderedBuildTargets, orderedBuildTestTargets, orderedRunTestTargets, schemeName, combinedProject ? outputDirectory : outputDirectory.resolve(workspaceName + ".xcworkspace"), parallelizeBuild, runnablePath, remoteRunnablePath, XcodeWorkspaceConfigDescription.getActionConfigNamesFromArg(workspaceArguments), targetToProjectPathMap, schemeConfigArg.getEnvironmentVariables(), schemeConfigArg.getLaunchStyle().orElse(XCScheme.LaunchAction.LaunchStyle.AUTO)); schemeGenerator.writeScheme(); schemeGenerators.put(schemeName, schemeGenerator); } } }
apache-2.0
blademainer/common_utils
common_helper/src/main/java/com/xiongyingqi/captcha/utils/encoder/EncoderHelper.java
1317
/* * Copyright (c) 2009 Piotr Piastucki * * This file is part of Patchca CAPTCHA library. * * Patchca is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Patchca is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with Patchca. If not, see <http://www.gnu.org/licenses/>. */ package com.xiongyingqi.captcha.utils.encoder; import com.xiongyingqi.captcha.service.Captcha; import com.xiongyingqi.captcha.service.CaptchaService; import javax.imageio.ImageIO; import java.io.IOException; import java.io.OutputStream; public class EncoderHelper { public static String getChallangeAndWriteImage(CaptchaService service, String format, OutputStream os) throws IOException { Captcha captcha = service.getCaptcha(); ImageIO.write(captcha.getImage(), format, os); return captcha.getChallenge(); } }
apache-2.0
iBase4J/iBase4J
iBase4J-SYS-Facade/src/main/java/org/ibase4j/service/SysDeptService.java
259
package org.ibase4j.service; import org.ibase4j.model.SysDept; import top.ibase4j.core.base.BaseService; /** * @author ShenHuaJie * @since 2018年4月24日 上午10:59:30 */ public interface SysDeptService extends BaseService<SysDept> { }
apache-2.0
kingargyle/turmeric-bot
camel-core/src/test/java/org/apache/camel/impl/DefaultCamelContextAutoStartupTest.java
3088
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.impl; import org.apache.camel.TestSupport; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; /** * @version $Revision$ */ public class DefaultCamelContextAutoStartupTest extends TestSupport { public void testAutoStartupFalse() throws Exception { DefaultCamelContext camel = new DefaultCamelContext(new SimpleRegistry()); camel.disableJMX(); camel.setAutoStartup(false); camel.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { from("direct:start").routeId("foo").to("mock:result"); } }); camel.start(); assertEquals(true, camel.isStarted()); assertEquals(1, camel.getRoutes().size()); assertEquals(true, camel.getRouteStatus("foo").isStopped()); // now start the routes camel.startRoute("foo"); assertEquals(true, camel.getRouteStatus("foo").isStarted()); // and now its started we can test that it works by sending in a message to the route MockEndpoint mock = camel.getEndpoint("mock:result", MockEndpoint.class); mock.expectedMessageCount(1); camel.createProducerTemplate().sendBody("direct:start", "Hello World"); mock.assertIsSatisfied(); camel.stop(); } public void testAutoStartupTrue() throws Exception { DefaultCamelContext camel = new DefaultCamelContext(new SimpleRegistry()); camel.disableJMX(); camel.setAutoStartup(true); camel.addRoutes(new RouteBuilder() { @Override public void configure() throws Exception { from("direct:start").routeId("foo").to("mock:result"); } }); camel.start(); assertEquals(true, camel.isStarted()); assertEquals(1, camel.getRoutes().size()); assertEquals(true, camel.getRouteStatus("foo").isStarted()); MockEndpoint mock = camel.getEndpoint("mock:result", MockEndpoint.class); mock.expectedMessageCount(1); camel.createProducerTemplate().sendBody("direct:start", "Hello World"); mock.assertIsSatisfied(); camel.stop(); } }
apache-2.0
google/android-kerberos-authenticator
src/main/javatests/com/google/android/apps/work/kerberosauthenticator/BaseAuthenticatorActivityTest.java
5567
/* * Copyright 2019 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.apps.work.kerberosauthenticator; import static com.google.common.truth.Truth.assertThat; import static org.robolectric.Shadows.shadowOf; import android.content.BroadcastReceiver; import android.content.ContextWrapper; import android.content.Intent; import android.content.IntentFilter; import android.content.RestrictionsManager; import android.os.Bundle; import androidx.localbroadcastmanager.content.LocalBroadcastManager; import androidx.test.core.app.ApplicationProvider; import com.google.android.apps.work.kerberosauthenticator.internal.KerberosAccountDetails; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.Robolectric; import org.robolectric.RobolectricTestRunner; import org.robolectric.android.controller.ActivityController; import org.robolectric.annotation.Config; /** * Tests {@link BaseAuthenticatorActivity}. */ @RunWith(RobolectricTestRunner.class) @Config(sdk = 26) public class BaseAuthenticatorActivityTest { private RestrictionsManager restrictionsManager; private Bundle restrictionsBundle; private ContextWrapper context; @Before public void setUp() { context = ApplicationProvider.getApplicationContext(); restrictionsManager = (RestrictionsManager) context.getSystemService( context.getSystemServiceName(RestrictionsManager.class)); restrictionsBundle = TestHelper.makeRestrictionsBundle(); } @Test public void testGetConfigs_allValues() { shadowOf(restrictionsManager).setApplicationRestrictions(restrictionsBundle); ActivityController<BaseAuthenticatorActivity> controller = Robolectric.buildActivity( BaseAuthenticatorActivity.class).create().start(); BaseAuthenticatorActivity baseAuthenticatorActivity = controller.get(); AccountConfiguration accConfigs = baseAuthenticatorActivity.accountConfiguration; KerberosAccountDetails accountDetails = accConfigs.getAccountDetails(); assertThat(accountDetails).isNotNull(); assertThat(accountDetails.getUsername()).isEqualTo(TestHelper.TEST_USERNAME); assertThat(accountDetails.getPassword()).isEqualTo(TestHelper.TEST_PASSWORD); assertThat(accountDetails.getActiveDirectoryDomain()).isEqualTo(TestHelper.TEST_AD_DOMAIN); assertThat(accountDetails.getAdDomainController()).isEqualTo(TestHelper.TEST_AD_CONTROLLER); } @Test public void testGetConfigs_missingValues() { restrictionsBundle.remove(AccountConfiguration.USERNAME_KEY); shadowOf(restrictionsManager).setApplicationRestrictions(restrictionsBundle); ActivityController<BaseAuthenticatorActivity> controller = Robolectric.buildActivity( BaseAuthenticatorActivity.class).create().start(); BaseAuthenticatorActivity baseAuthenticatorActivity = controller.get(); AccountConfiguration accConfigs = baseAuthenticatorActivity.accountConfiguration; assertThat(accConfigs.getAccountDetails()).isNull(); } @Test public void testGetConfigs_noValuesSet() { // This tests the default behaviour initialised by the app's restrictions xml config. //restrictionsBundle.clear(); restrictionsBundle.clear(); shadowOf(restrictionsManager).setApplicationRestrictions(restrictionsBundle); ActivityController<BaseAuthenticatorActivity> controller = Robolectric.buildActivity( BaseAuthenticatorActivity.class).create().start(); BaseAuthenticatorActivity baseAuthenticatorActivity = controller.get(); AccountConfiguration accConfigs = baseAuthenticatorActivity.accountConfiguration; assertThat(accConfigs.getAccountDetails()).isNull(); } @Test public void testBroadcastConfigs() { // Set original restrictions shadowOf(restrictionsManager).setApplicationRestrictions(restrictionsBundle); ActivityController<BaseAuthenticatorActivity> controller = Robolectric.buildActivity( BaseAuthenticatorActivity.class).create().start(); BaseAuthenticatorActivity baseAuthenticatorActivity = controller.get(); AccountConfiguration accConfigs = baseAuthenticatorActivity.accountConfiguration; // Change one restriction assertThat(accConfigs.getAccountDetails().getUsername()).isEqualTo(TestHelper.TEST_USERNAME); restrictionsBundle.putString(AccountConfiguration.USERNAME_KEY, TestHelper.TEST_USERNAME + "1"); // Broadcast the restriction change BroadcastReceiver receiver = accConfigs.getReceiver(); LocalBroadcastManager.getInstance(context).registerReceiver(receiver, new IntentFilter( Intent.ACTION_APPLICATION_RESTRICTIONS_CHANGED)); LocalBroadcastManager.getInstance(context).sendBroadcast(new Intent( Intent.ACTION_APPLICATION_RESTRICTIONS_CHANGED)); // Check the username restriction is updated. assertThat(accConfigs.getAccountDetails().getUsername()) .isEqualTo(TestHelper.TEST_USERNAME + "1"); // Unregister receiver. LocalBroadcastManager.getInstance(context).unregisterReceiver(receiver); } }
apache-2.0
tectronics/pitestrunner
pitest/src/test/java/org/pitest/mutationtest/engine/gregor/MethodInfoTest.java
6688
/* * Copyright 2011 Henry Coles * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and limitations under the License. */ package org.pitest.mutationtest.engine.gregor; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; import org.junit.Test; import org.objectweb.asm.Opcodes; import org.objectweb.asm.Type; public class MethodInfoTest { private static final int NON_STATIC_MODIFIER = 0; private static final int NON_SYNTHETIC_MODIFIER = 0; private static final String VOID_RETURN = "()V"; private static final String STRING_RETURN = "()Ljava/lang/String;"; private static final int STATIC_MODIFIER = Opcodes.ACC_STATIC; private static final String NO_PARAMETERS = "()V"; private static final String ONE_PARAMETER = "(Ljava/lang/String;)V"; private static final int SYNTHETIC_MODIFIER = Opcodes.ACC_SYNTHETIC; private static final ClassInfo ENUMERATION_CLASS = new ClassInfo(0, 0, "", "", "java/lang/Enum", new String[0]); private final MethodInfo methodInfo = new MethodInfo(); @Test public void isVoidShouldReturnTrueWhenMethodIsVoid() { final MethodInfo testee = this.methodInfo.withMethodDescriptor(VOID_RETURN); assertThat(testee.isVoid(), is(true)); } @Test public void isVoidShouldReturnFalseWhenMethodIsNotVoid() { final MethodInfo testee = this.methodInfo .withMethodDescriptor(STRING_RETURN); assertThat(testee.isVoid(), is(false)); } @Test public void isStaticShouldReturnTrueWhenMethodIsStatic() { final MethodInfo testee = this.methodInfo.withAccess(STATIC_MODIFIER); assertThat(testee.isStatic(), is(true)); } @Test public void isStaticShouldReturnFalseWhenMethodIsNotStatic() { final MethodInfo testee = this.methodInfo.withAccess(NON_STATIC_MODIFIER); assertThat(testee.isStatic(), is(false)); } @Test public void takesNoParametersShouldReturnTrueWhenMethodTakesNoParameters() { final MethodInfo testee = this.methodInfo .withMethodDescriptor(NO_PARAMETERS); assertThat(testee.takesNoParameters(), is(true)); } @Test public void takesNoParametersShouldReturnFalseWhenMethodTakesOneParameter() { final MethodInfo testee = this.methodInfo .withMethodDescriptor(ONE_PARAMETER); assertThat(testee.takesNoParameters(), is(false)); } @Test public void isConstructorShouldReturnTrueWhenMethodIsConstructor() { final MethodInfo testee = this.methodInfo.withMethodName("<init>"); assertThat(testee.isConstructor(), is(true)); } @Test public void isConstructorShouldReturnTrueWhenMethodIsRegularMethod() { final MethodInfo testee = this.methodInfo.withMethodName("toString"); assertThat(testee.isConstructor(), is(false)); } @Test public void isSyntheticShouldReturnTrueWhenSyntheticAccessFlagSet() { final MethodInfo testee = this.methodInfo.withAccess(SYNTHETIC_MODIFIER); assertThat(testee.isSynthetic(), is(true)); } @Test public void isSyntheticShouldReturnFalseWhenNoSyntheticAccessFlagSet() { final MethodInfo testee = this.methodInfo .withAccess(NON_SYNTHETIC_MODIFIER); assertThat(testee.isSynthetic(), is(false)); } @Test public void isStaticInitializerShouldReturnTrueWhenMethodIsStaticInitializer() { final MethodInfo testee = this.methodInfo.withMethodName("<clinit>"); assertThat(testee.isStaticInitializer(), is(true)); } @Test public void getReturnTypeReturnsCorrectReturnType() { final MethodInfo testee = this.methodInfo .withMethodDescriptor(STRING_RETURN); assertThat(testee.getReturnType(), is(Type.getType(String.class))); } @Test public void getDescriptionReturnsQualifiedMethodName() { final String EXAMPLE_CLASS_NAME = "org.pitest.Example"; final ClassInfo EXAMPLE_CLASS_INFO = new ClassInfo(0, 0, EXAMPLE_CLASS_NAME, "", "", new String[0]); final String EXAMPLE_METHOD_NAME = "myMethod"; final String QULIFIED_METHOD_NAME = EXAMPLE_CLASS_NAME + "::" + EXAMPLE_METHOD_NAME; final MethodInfo testee = this.methodInfo.withOwner(EXAMPLE_CLASS_INFO) .withMethodName(EXAMPLE_METHOD_NAME); assertThat(testee.getDescription(), is(QULIFIED_METHOD_NAME)); } @Test public void isGeneratedEnumMethodReturnsTrueIfMethodIsEnumValuesMethod() { final MethodInfo testee = this.methodInfo.withOwner(ENUMERATION_CLASS) .withAccess(STATIC_MODIFIER).withMethodName("values") .withMethodDescriptor(NO_PARAMETERS); assertThat(testee.isGeneratedEnumMethod(), is(true)); } @Test public void isGeneratedEnumMethodReturnsTrueIfMethodIsEnumValueOfMethod() { final MethodInfo testee = this.methodInfo.withOwner(ENUMERATION_CLASS) .withAccess(STATIC_MODIFIER).withMethodName("valueOf") .withMethodDescriptor("(Ljava/lang/String;)Lorg/pitest/MyEnum;"); assertThat(testee.isGeneratedEnumMethod(), is(true)); } @Test public void isGeneratedEnumMethodReturnsTrueIfMethodIsStaticInitializerInEnum() { final MethodInfo testee = this.methodInfo.withOwner(ENUMERATION_CLASS) .withAccess(STATIC_MODIFIER).withMethodName("<clinit>"); assertThat(testee.isGeneratedEnumMethod(), is(true)); } @Test public void isGeneratedEnumMethodReturnsFalseForRegularEnumMethod() { final MethodInfo testee = this.methodInfo.withOwner(ENUMERATION_CLASS) .withMethodName("getOwner"); assertThat(testee.isGeneratedEnumMethod(), is(false)); } @Test public void isGeneratedEnumMethodReturnsFalseForNonEnumClasses() { final ClassInfo EXAMPLE_CLASS_INFO = new ClassInfo(0, 0, "org/pitest/Example", "", "java/lang/Object", new String[0]); final MethodInfo testee = this.methodInfo.withOwner(EXAMPLE_CLASS_INFO) .withAccess(STATIC_MODIFIER).withMethodName("values") .withMethodDescriptor(NO_PARAMETERS); assertThat(testee.isGeneratedEnumMethod(), is(false)); } }
apache-2.0
akarnokd/RxJavaFlow
src/main/java/rx/internal/operators/OnSubscribeUsing.java
4699
/** * Copyright 2014 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package rx.internal.operators; import java.util.Arrays; import java.util.concurrent.atomic.AtomicBoolean; import rx.Observable; import rx.Observable.OnSubscribe; import rx.Subscriber; import rx.Subscription; import rx.exceptions.CompositeException; import rx.functions.Action0; import rx.functions.Action1; import rx.functions.Supplier; import rx.functions.Function; /** * Constructs an observable sequence that depends on a resource object. */ public final class OnSubscribeUsing<T, Resource> implements OnSubscribe<T> { private final Supplier<Resource> resourceFactory; private final Function<? super Resource, ? extends Observable<? extends T>> observableFactory; private final Action1<? super Resource> dispose; private final boolean disposeEagerly; public OnSubscribeUsing(Supplier<Resource> resourceFactory, Function<? super Resource, ? extends Observable<? extends T>> observableFactory, Action1<? super Resource> dispose, boolean disposeEagerly) { this.resourceFactory = resourceFactory; this.observableFactory = observableFactory; this.dispose = dispose; this.disposeEagerly = disposeEagerly; } @Override public void call(Subscriber<? super T> subscriber) { try { // create the resource final Resource resource = resourceFactory.call(); // create an action/subscription that disposes only once final DisposeAction<Resource> disposeOnceOnly = new DisposeAction<Resource>(dispose, resource); // dispose on unsubscription subscriber.add(disposeOnceOnly); // create the observable final Observable<? extends T> source = observableFactory // create the observable .call(resource); final Observable<? extends T> observable; // supplement with on termination disposal if requested if (disposeEagerly) observable = source // dispose on completion or error .doOnTerminate(disposeOnceOnly); else observable = source; try { // start observable.unsafeSubscribe(subscriber); } catch (Throwable e) { Throwable disposeError = disposeEagerlyIfRequested(disposeOnceOnly); if (disposeError != null) subscriber.onError(new CompositeException(Arrays.asList(e, disposeError))); else // propagate error subscriber.onError(e); } } catch (Throwable e) { // then propagate error subscriber.onError(e); } } private Throwable disposeEagerlyIfRequested(final Action0 disposeOnceOnly) { if (disposeEagerly) try { disposeOnceOnly.call(); return null; } catch (Throwable e) { return e; } else return null; } private static final class DisposeAction<Resource> extends AtomicBoolean implements Action0, Subscription { private static final long serialVersionUID = 4262875056400218316L; private Action1<? super Resource> dispose; private Resource resource; private DisposeAction(Action1<? super Resource> dispose, Resource resource) { this.dispose = dispose; this.resource = resource; lazySet(false); // StoreStore barrier } @Override public void call() { if (compareAndSet(false, true)) { try { dispose.call(resource); } finally { resource = null; dispose = null; } } } @Override public boolean isUnsubscribed() { return get(); } @Override public void unsubscribe() { call(); } } }
apache-2.0
DavidBorges/BooK_Tea_Rebuild
Book_Tea/app/src/main/java/br/edu/ifpe/tads/pdm/book_tea/BookReadActivity.java
322
package br.edu.ifpe.tads.pdm.book_tea; import android.os.Bundle; import android.app.Activity; public class BookReadActivity extends Activity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_book_read); } }
apache-2.0
vam-google/google-cloud-java
google-api-grpc/proto-google-cloud-vision-v1p4beta1/src/main/java/com/google/cloud/vision/v1p4beta1/AsyncBatchAnnotateFilesResponseOrBuilder.java
2123
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/vision/v1p4beta1/image_annotator.proto package com.google.cloud.vision.v1p4beta1; public interface AsyncBatchAnnotateFilesResponseOrBuilder extends // @@protoc_insertion_point(interface_extends:google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesResponse) com.google.protobuf.MessageOrBuilder { /** * * * <pre> * The list of file annotation responses, one for each request in * AsyncBatchAnnotateFilesRequest. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.AsyncAnnotateFileResponse responses = 1;</code> */ java.util.List<com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileResponse> getResponsesList(); /** * * * <pre> * The list of file annotation responses, one for each request in * AsyncBatchAnnotateFilesRequest. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.AsyncAnnotateFileResponse responses = 1;</code> */ com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileResponse getResponses(int index); /** * * * <pre> * The list of file annotation responses, one for each request in * AsyncBatchAnnotateFilesRequest. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.AsyncAnnotateFileResponse responses = 1;</code> */ int getResponsesCount(); /** * * * <pre> * The list of file annotation responses, one for each request in * AsyncBatchAnnotateFilesRequest. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.AsyncAnnotateFileResponse responses = 1;</code> */ java.util.List<? extends com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileResponseOrBuilder> getResponsesOrBuilderList(); /** * * * <pre> * The list of file annotation responses, one for each request in * AsyncBatchAnnotateFilesRequest. * </pre> * * <code>repeated .google.cloud.vision.v1p4beta1.AsyncAnnotateFileResponse responses = 1;</code> */ com.google.cloud.vision.v1p4beta1.AsyncAnnotateFileResponseOrBuilder getResponsesOrBuilder( int index); }
apache-2.0
jenkinsci/aws-codecommit-trigger-plugin
src/test/java/com/ribose/jenkins/plugin/awscodecommittrigger/it/feature/AbstractFreestyleParamsTestProject.java
991
package com.ribose.jenkins.plugin.awscodecommittrigger.it.feature; import com.ribose.jenkins.plugin.awscodecommittrigger.it.AbstractFreestyleTestProject; import com.ribose.jenkins.plugin.awscodecommittrigger.it.fixture.ProjectFixture; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @RunWith(Parameterized.class) public abstract class AbstractFreestyleParamsTestProject extends AbstractFreestyleTestProject { private static Logger log = LoggerFactory.getLogger(AbstractFreestyleParamsTestProject.class); @Parameterized.Parameter public String name; @Parameterized.Parameter(1) public ProjectFixture fixture; @Test public void shouldPassIt() throws Exception { log.info("Running test fixture: {}", this.fixture.getName()); this.mockAwsSqs.send(this.fixture.getSendBranches()); this.submitAndAssertFixture(this.fixture); } }
apache-2.0
enjoy0924/sample-java
src/com/allere/sample/thrift/service/impl/HelloWorldImpl.java
549
package com.allere.sample.thrift.service.impl; /** * 这个类用来实现自动生成消息类里面的Iface接口,属于 * 服务器端的消息处理实现 * */ import org.apache.thrift.TException; import com.allere.sample.thrift.service.HelloWorldService.Iface; public class HelloWorldImpl implements Iface { public HelloWorldImpl() { } @Override public String sayHello(String username) throws TException { // TODO Auto-generated method stub return "Hi," + username + " welcome to plum land"; } }
apache-2.0
nagyist/marketcetera
trunk/modules/marketdata/yahoo/src/main/java/org/marketcetera/marketdata/yahoo/YahooField.java
5271
package org.marketcetera.marketdata.yahoo; import java.util.HashMap; import java.util.Map; import org.marketcetera.util.misc.ClassVersion; /* $License$ */ /** * Represents a field in a Yahoo market data request. * * @author <a href="mailto:colin@marketcetera.com">Colin DuPlantis</a> * @version $Id: YahooField.java 16154 2012-07-14 16:34:05Z colin $ * @since 2.1.4 */ @ClassVersion("$Id: YahooField.java 16154 2012-07-14 16:34:05Z colin $") enum YahooField { ASK("a"), //$NON-NLS-1$ AVERAGE_DAILY_VOLUME("a2"), //$NON-NLS-1$ ASK_SIZE("a5"), //$NON-NLS-1$ BID("b"), //$NON-NLS-1$ REAL_TIME_ASK("b2"), //$NON-NLS-1$ REAL_TIME_BID("b3"), //$NON-NLS-1$ BOOK_VALUE("b4"), //$NON-NLS-1$ BID_SIZE("b6"), //$NON-NLS-1$ CHANGE_AND_PERCENT_CHANGE("c"), //$NON-NLS-1$ CHANGE("c1"), //$NON-NLS-1$ COMMISSION("c3"), //$NON-NLS-1$ REAL_TIME_CHANGE("c6"), //$NON-NLS-1$ REAL_TIME_AFTER_HOURS_CHANGE("c8"), //$NON-NLS-1$ DIVIDEND_PER_SHARE("d"), //$NON-NLS-1$ LAST_TRADE_DATE("d1"), //$NON-NLS-1$ TRADE_DATE("d2"), //$NON-NLS-1$ EARNINGS_PER_SHARE("e"), //$NON-NLS-1$ ERROR_INDICATION("e1"), //$NON-NLS-1$ EPS_ESTIMATE_CURRENT_YEAR("e7"), //$NON-NLS-1$ EPS_ESTIMATE_NEXT_YEAR("e8"), //$NON-NLS-1$ EPS_ESTIMATE_NEXT_QUARTER("e9"), //$NON-NLS-1$ FLOAT_SHARES("f6"), //$NON-NLS-1$ DAY_LOW("g"), //$NON-NLS-1$ DAY_HIGH("h"), //$NON-NLS-1$ YEAR_LOW("j"), //$NON-NLS-1$ YEAR_HIGH("k"), //$NON-NLS-1$ HOLDINGS_GAIN_PERCENT("g1"), //$NON-NLS-1$ ANNUALIZED_GAIN("g3"), //$NON-NLS-1$ HOLDINGS_GAIN("g4"), //$NON-NLS-1$ REAL_TIME_HOLDINGS_GAIN_PERCENT("g5"), //$NON-NLS-1$ REAL_TIME_HOLDINGS_GAIN("g6"), //$NON-NLS-1$ MORE_INFO("i"), //$NON-NLS-1$ REAL_TIME_ORDER_BOOK("i5"), //$NON-NLS-1$ MARKET_CAPITALIZATION("j1"), //$NON-NLS-1$ REAL_TIME_MARKET_CAPITALIZATION("j3"), //$NON-NLS-1$ EBITDA("j4"), //$NON-NLS-1$ CHANGE_FROM_YEAR_LOW("j5"), //$NON-NLS-1$ PERCENT_CHANGE_FROM_YEAR_LOW("j6"), //$NON-NLS-1$ REAL_TIME_LAST_TRADE_WITH_TIME("k1"), //$NON-NLS-1$ REAL_TIME_CHANGE_PERCENT("k2"), //$NON-NLS-1$ LAST_TRADE_SIZE("k3"), //$NON-NLS-1$ CHANGE_FROM_YEAR_HIGH("k4"), //$NON-NLS-1$ PERCENT_CHANGE_FROM_YEAR_HIGH("k5"), //$NON-NLS-1$ LAST_TRADE_WITH_TIME("l"), //$NON-NLS-1$ LAST_TRADE_PRICE_ONLY("l1"), //$NON-NLS-1$ HIGH_LIMIT("l2"), //$NON-NLS-1$ LOW_LIMIT("l3"), //$NON-NLS-1$ DAY_RANGE("m"), //$NON-NLS-1$ REAL_TIME_DAY_RANGE("m2"), //$NON-NLS-1$ MOVING_AVERAGE_50_DAY("m3"), //$NON-NLS-1$ MOVING_AVERAGE_200_DAY("m4"), //$NON-NLS-1$ CHANGE_FROM_MOVING_AVERAGE_200_DAY("m5"), //$NON-NLS-1$ PERCENT_CHANGE_FROM_MOVING_AVERAGE_200_DAY("m6"), //$NON-NLS-1$ CHANGE_FROM_MOVING_AVERAGE_50_DAY("m7"), //$NON-NLS-1$ PERCENT_CHANGE_FROM_MOVING_AVERAGE_50_DAY("m8"), //$NON-NLS-1$ NAME("n"), //$NON-NLS-1$ NOTES("n4"), //$NON-NLS-1$ OPEN("o"), //$NON-NLS-1$ PREVIOUS_CLOSE("p"), //$NON-NLS-1$ PRICE_PAID("p1"), //$NON-NLS-1$ PERCENT_CHANGE("p2"), //$NON-NLS-1$ PRICE_OVER_SALES("p5"), //$NON-NLS-1$ PRICE_OVER_BOOK("p6"), //$NON-NLS-1$ EXPECTED_DIVIDEND_DATE("q"), //$NON-NLS-1$ PE_RATIO("r"), //$NON-NLS-1$ DIVIDEND_PAY_DATE("r1"), //$NON-NLS-1$ REAL_TIME_PE_RATION("r2"), //$NON-NLS-1$ PEG_RATION("r5"), //$NON-NLS-1$ PRICE_OVER_EPS_ESTIMATE_CURRENT_YEAR("r6"), //$NON-NLS-1$ PRICE_OVER_EPS_ESTIMATE_NEXT_YEAR("r7"), //$NON-NLS-1$ SYMBOL("s"), //$NON-NLS-1$ SHARES_OWNED("s1"), //$NON-NLS-1$ SHORT_RATIO("s7"), //$NON-NLS-1$ LAST_TRADE_TIME("t1"), //$NON-NLS-1$ TRADE_LINKS("t6"), //$NON-NLS-1$ TICKER_TREND("t7"), //$NON-NLS-1$ TARGET_PRICE_1_YEAR("t8"), //$NON-NLS-1$ VOLUME("v"), //$NON-NLS-1$ HOLDINGS_VALUE("v1"), //$NON-NLS-1$ REAL_TIME_HOLDINGS_VALUE("v7"), //$NON-NLS-1$ RANGE_52_WEEK("w"), //$NON-NLS-1$ DAY_VALUE_CHANGE("w1"), //$NON-NLS-1$ REAL_TIME_DAY_VALUE_CHANGE("w4"), //$NON-NLS-1$ STOCK_EXCHANGE("x"), //$NON-NLS-1$ DIVIDEND_YIELD("y"); //$NON-NLS-1$ /** * Gets the field for the given literal value. * * @param inCode a <code>String</code> value * @return a <code>YahooField</code> value or <code>null</code> if no field corresponds to the given literal */ public static YahooField getFieldFor(String inCode) { synchronized(fields) { if(fields.isEmpty()) { for(YahooField field : YahooField.values()) { fields.put(field.getCode(), field); } } return fields.get(inCode); } } /** * Get the code value. * * @return a <code>String</code> value */ public String getCode() { return code; } /** * Create a new YahooField instance. * * @param inCode a <code>YahooField</code> value */ private YahooField(String inCode) { code = inCode; } /** * literal value associated with the field */ private final String code; /** * fields by literal value */ private static final Map<String,YahooField> fields = new HashMap<String,YahooField>(); }
apache-2.0
caskdata/cdap
cdap-explore-client/src/main/java/co/cask/cdap/explore/utils/ExploreTableNaming.java
1818
/* * Copyright © 2015 Cask Data, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package co.cask.cdap.explore.utils; import co.cask.cdap.proto.Id; import co.cask.cdap.proto.id.DatasetId; import co.cask.cdap.proto.id.StreamId; /** * Specifies how to name tables for Explore. */ public final class ExploreTableNaming { public String getTableName(Id.Stream streamId) { return String.format("stream_%s", cleanTableName(streamId.getId())); } public String getTableName(StreamId streamId) { return String.format("stream_%s", cleanTableName(streamId.getStream())); } public String getTableName(DatasetId datasetID) { return String.format("dataset_%s", cleanTableName(datasetID.getDataset())); } public String getTableName(Id.DatasetInstance datasetID) { return String.format("dataset_%s", cleanTableName(datasetID.getId())); } public String getTableName(Id.Stream.View viewId) { return String.format("stream_%s_%s", cleanTableName(viewId.getStreamId()), cleanTableName(viewId.getId())); } public String cleanTableName(String name) { // Instance name is like cdap.user.my_table. // For now replace . with _ and - with _ since Hive tables cannot have . or _ in them. return name.replaceAll("\\.", "_").replaceAll("-", "_").toLowerCase(); } }
apache-2.0
tommyettinger/doughyo
src/main/java/vigna/fastutil/BidirectionalIterator.java
1685
package vigna.fastutil; /* * Copyright (C) 2002-2015 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.util.Iterator; import java.util.ListIterator; /** A bidirectional {@link Iterator}. * * <P>This kind of iterator is essentially a {@link ListIterator} that * does not support {@link ListIterator#previousIndex()} and {@link * ListIterator#nextIndex()}. It is useful for those maps that can easily * provide bidirectional iteration, but provide no index. * * <P>Note that iterators returned by <code>fastutil</code> classes are more * specific, and support skipping. This class serves the purpose of organising * in a cleaner way the relationships between various iterators. * * @see Iterator * @see ListIterator */ public interface BidirectionalIterator<K> extends Iterator<K> { /** Returns the previous element from the collection. * * @return the previous element from the collection. * @see ListIterator#previous() */ K previous(); /** Returns whether there is a previous element. * * @return whether there is a previous element. * @see ListIterator#hasPrevious() */ boolean hasPrevious(); }
apache-2.0
xiaowen118/test
src/main/java/com/chen/controllers/ChenBeanResolver.java
1026
/** * @author 54chen(陈臻) [chenzhen@xiaomi.com czhttp@gmail.com] * @since 2012-5-19 下午02:27:24 */ package com.chen.controllers; import com.chen.model.Chen; import net.paoding.rose.web.Invocation; import net.paoding.rose.web.paramresolver.ParamMetaData; import net.paoding.rose.web.paramresolver.ParamResolver; public class ChenBeanResolver implements ParamResolver { @Override public Object resolve(Invocation inv, ParamMetaData metaData) throws Exception { for (String paramName : metaData.getParamNames()) { if (paramName != null) { Chen chen = new Chen(); String value1 = inv.getParameter("chen1"); String value2 = inv.getParameter("chen2"); chen.setChen1(value1); chen.setChen2(value2); return chen; } } return null; } @Override public boolean supports(ParamMetaData metaData) { return Chen.class == metaData.getParamType(); } }
apache-2.0
trasa/aws-sdk-java
aws-java-sdk-emr/src/main/java/com/amazonaws/services/elasticmapreduce/model/JobFlowInstancesConfig.java
45300
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.elasticmapreduce.model; import java.io.Serializable; /** * <p> * A description of the Amazon EC2 instance running the job flow. A valid * JobFlowInstancesConfig must contain at least InstanceGroups, which is the * recommended configuration. However, a valid alternative is to have * MasterInstanceType, SlaveInstanceType, and InstanceCount (all three must be * present). * </p> */ public class JobFlowInstancesConfig implements Serializable, Cloneable { /** * <p> * The EC2 instance type of the master node. * </p> */ private String masterInstanceType; /** * <p> * The EC2 instance type of the slave nodes. * </p> */ private String slaveInstanceType; /** * <p> * The number of Amazon EC2 instances used to execute the job flow. * </p> */ private Integer instanceCount; /** * <p> * Configuration for the job flow's instance groups. * </p> */ private com.amazonaws.internal.SdkInternalList<InstanceGroupConfig> instanceGroups; /** * <p> * The name of the Amazon EC2 key pair that can be used to ssh to the master * node as the user called "hadoop." * </p> */ private String ec2KeyName; /** * <p> * The Availability Zone the job flow will run in. * </p> */ private PlacementType placement; /** * <p> * Specifies whether the job flow should be kept alive after completing all * steps. * </p> */ private Boolean keepJobFlowAliveWhenNoSteps; /** * <p> * Specifies whether to lock the job flow to prevent the Amazon EC2 * instances from being terminated by API call, user intervention, or in the * event of a job flow error. * </p> */ private Boolean terminationProtected; /** * <p> * The Hadoop version for the job flow. Valid inputs are "0.18" * (deprecated), "0.20" (deprecated), "0.20.205" (deprecated), "1.0.3", * "2.2.0", or "2.4.0". If you do not set this value, the default of 0.18 is * used, unless the AmiVersion parameter is set in the RunJobFlow call, in * which case the default version of Hadoop for that AMI version is used. * </p> */ private String hadoopVersion; /** * <p> * To launch the job flow in Amazon Virtual Private Cloud (Amazon VPC), set * this parameter to the identifier of the Amazon VPC subnet where you want * the job flow to launch. If you do not specify this value, the job flow is * launched in the normal Amazon Web Services cloud, outside of an Amazon * VPC. * </p> * <p> * Amazon VPC currently does not support cluster compute quadruple extra * large (cc1.4xlarge) instances. Thus you cannot specify the cc1.4xlarge * instance type for nodes of a job flow launched in a Amazon VPC. * </p> */ private String ec2SubnetId; /** * <p> * The identifier of the Amazon EC2 security group for the master node. * </p> */ private String emrManagedMasterSecurityGroup; /** * <p> * The identifier of the Amazon EC2 security group for the slave nodes. * </p> */ private String emrManagedSlaveSecurityGroup; /** * <p> * The identifier of the Amazon EC2 security group for the Amazon EMR * service to access clusters in VPC private subnets. * </p> */ private String serviceAccessSecurityGroup; /** * <p> * A list of additional Amazon EC2 security group IDs for the master node. * </p> */ private com.amazonaws.internal.SdkInternalList<String> additionalMasterSecurityGroups; /** * <p> * A list of additional Amazon EC2 security group IDs for the slave nodes. * </p> */ private com.amazonaws.internal.SdkInternalList<String> additionalSlaveSecurityGroups; /** * <p> * The EC2 instance type of the master node. * </p> * * @param masterInstanceType * The EC2 instance type of the master node. */ public void setMasterInstanceType(String masterInstanceType) { this.masterInstanceType = masterInstanceType; } /** * <p> * The EC2 instance type of the master node. * </p> * * @return The EC2 instance type of the master node. */ public String getMasterInstanceType() { return this.masterInstanceType; } /** * <p> * The EC2 instance type of the master node. * </p> * * @param masterInstanceType * The EC2 instance type of the master node. * @return Returns a reference to this object so that method calls can be * chained together. */ public JobFlowInstancesConfig withMasterInstanceType( String masterInstanceType) { setMasterInstanceType(masterInstanceType); return this; } /** * <p> * The EC2 instance type of the slave nodes. * </p> * * @param slaveInstanceType * The EC2 instance type of the slave nodes. */ public void setSlaveInstanceType(String slaveInstanceType) { this.slaveInstanceType = slaveInstanceType; } /** * <p> * The EC2 instance type of the slave nodes. * </p> * * @return The EC2 instance type of the slave nodes. */ public String getSlaveInstanceType() { return this.slaveInstanceType; } /** * <p> * The EC2 instance type of the slave nodes. * </p> * * @param slaveInstanceType * The EC2 instance type of the slave nodes. * @return Returns a reference to this object so that method calls can be * chained together. */ public JobFlowInstancesConfig withSlaveInstanceType(String slaveInstanceType) { setSlaveInstanceType(slaveInstanceType); return this; } /** * <p> * The number of Amazon EC2 instances used to execute the job flow. * </p> * * @param instanceCount * The number of Amazon EC2 instances used to execute the job flow. */ public void setInstanceCount(Integer instanceCount) { this.instanceCount = instanceCount; } /** * <p> * The number of Amazon EC2 instances used to execute the job flow. * </p> * * @return The number of Amazon EC2 instances used to execute the job flow. */ public Integer getInstanceCount() { return this.instanceCount; } /** * <p> * The number of Amazon EC2 instances used to execute the job flow. * </p> * * @param instanceCount * The number of Amazon EC2 instances used to execute the job flow. * @return Returns a reference to this object so that method calls can be * chained together. */ public JobFlowInstancesConfig withInstanceCount(Integer instanceCount) { setInstanceCount(instanceCount); return this; } /** * <p> * Configuration for the job flow's instance groups. * </p> * * @return Configuration for the job flow's instance groups. */ public java.util.List<InstanceGroupConfig> getInstanceGroups() { if (instanceGroups == null) { instanceGroups = new com.amazonaws.internal.SdkInternalList<InstanceGroupConfig>(); } return instanceGroups; } /** * <p> * Configuration for the job flow's instance groups. * </p> * * @param instanceGroups * Configuration for the job flow's instance groups. */ public void setInstanceGroups( java.util.Collection<InstanceGroupConfig> instanceGroups) { if (instanceGroups == null) { this.instanceGroups = null; return; } this.instanceGroups = new com.amazonaws.internal.SdkInternalList<InstanceGroupConfig>( instanceGroups); } /** * <p> * Configuration for the job flow's instance groups. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setInstanceGroups(java.util.Collection)} or * {@link #withInstanceGroups(java.util.Collection)} if you want to override * the existing values. * </p> * * @param instanceGroups * Configuration for the job flow's instance groups. * @return Returns a reference to this object so that method calls can be * chained together. */ public JobFlowInstancesConfig withInstanceGroups( InstanceGroupConfig... instanceGroups) { if (this.instanceGroups == null) { setInstanceGroups(new com.amazonaws.internal.SdkInternalList<InstanceGroupConfig>( instanceGroups.length)); } for (InstanceGroupConfig ele : instanceGroups) { this.instanceGroups.add(ele); } return this; } /** * <p> * Configuration for the job flow's instance groups. * </p> * * @param instanceGroups * Configuration for the job flow's instance groups. * @return Returns a reference to this object so that method calls can be * chained together. */ public JobFlowInstancesConfig withInstanceGroups( java.util.Collection<InstanceGroupConfig> instanceGroups) { setInstanceGroups(instanceGroups); return this; } /** * <p> * The name of the Amazon EC2 key pair that can be used to ssh to the master * node as the user called "hadoop." * </p> * * @param ec2KeyName * The name of the Amazon EC2 key pair that can be used to ssh to the * master node as the user called "hadoop." */ public void setEc2KeyName(String ec2KeyName) { this.ec2KeyName = ec2KeyName; } /** * <p> * The name of the Amazon EC2 key pair that can be used to ssh to the master * node as the user called "hadoop." * </p> * * @return The name of the Amazon EC2 key pair that can be used to ssh to * the master node as the user called "hadoop." */ public String getEc2KeyName() { return this.ec2KeyName; } /** * <p> * The name of the Amazon EC2 key pair that can be used to ssh to the master * node as the user called "hadoop." * </p> * * @param ec2KeyName * The name of the Amazon EC2 key pair that can be used to ssh to the * master node as the user called "hadoop." * @return Returns a reference to this object so that method calls can be * chained together. */ public JobFlowInstancesConfig withEc2KeyName(String ec2KeyName) { setEc2KeyName(ec2KeyName); return this; } /** * <p> * The Availability Zone the job flow will run in. * </p> * * @param placement * The Availability Zone the job flow will run in. */ public void setPlacement(PlacementType placement) { this.placement = placement; } /** * <p> * The Availability Zone the job flow will run in. * </p> * * @return The Availability Zone the job flow will run in. */ public PlacementType getPlacement() { return this.placement; } /** * <p> * The Availability Zone the job flow will run in. * </p> * * @param placement * The Availability Zone the job flow will run in. * @return Returns a reference to this object so that method calls can be * chained together. */ public JobFlowInstancesConfig withPlacement(PlacementType placement) { setPlacement(placement); return this; } /** * <p> * Specifies whether the job flow should be kept alive after completing all * steps. * </p> * * @param keepJobFlowAliveWhenNoSteps * Specifies whether the job flow should be kept alive after * completing all steps. */ public void setKeepJobFlowAliveWhenNoSteps( Boolean keepJobFlowAliveWhenNoSteps) { this.keepJobFlowAliveWhenNoSteps = keepJobFlowAliveWhenNoSteps; } /** * <p> * Specifies whether the job flow should be kept alive after completing all * steps. * </p> * * @return Specifies whether the job flow should be kept alive after * completing all steps. */ public Boolean getKeepJobFlowAliveWhenNoSteps() { return this.keepJobFlowAliveWhenNoSteps; } /** * <p> * Specifies whether the job flow should be kept alive after completing all * steps. * </p> * * @param keepJobFlowAliveWhenNoSteps * Specifies whether the job flow should be kept alive after * completing all steps. * @return Returns a reference to this object so that method calls can be * chained together. */ public JobFlowInstancesConfig withKeepJobFlowAliveWhenNoSteps( Boolean keepJobFlowAliveWhenNoSteps) { setKeepJobFlowAliveWhenNoSteps(keepJobFlowAliveWhenNoSteps); return this; } /** * <p> * Specifies whether the job flow should be kept alive after completing all * steps. * </p> * * @return Specifies whether the job flow should be kept alive after * completing all steps. */ public Boolean isKeepJobFlowAliveWhenNoSteps() { return this.keepJobFlowAliveWhenNoSteps; } /** * <p> * Specifies whether to lock the job flow to prevent the Amazon EC2 * instances from being terminated by API call, user intervention, or in the * event of a job flow error. * </p> * * @param terminationProtected * Specifies whether to lock the job flow to prevent the Amazon EC2 * instances from being terminated by API call, user intervention, or * in the event of a job flow error. */ public void setTerminationProtected(Boolean terminationProtected) { this.terminationProtected = terminationProtected; } /** * <p> * Specifies whether to lock the job flow to prevent the Amazon EC2 * instances from being terminated by API call, user intervention, or in the * event of a job flow error. * </p> * * @return Specifies whether to lock the job flow to prevent the Amazon EC2 * instances from being terminated by API call, user intervention, * or in the event of a job flow error. */ public Boolean getTerminationProtected() { return this.terminationProtected; } /** * <p> * Specifies whether to lock the job flow to prevent the Amazon EC2 * instances from being terminated by API call, user intervention, or in the * event of a job flow error. * </p> * * @param terminationProtected * Specifies whether to lock the job flow to prevent the Amazon EC2 * instances from being terminated by API call, user intervention, or * in the event of a job flow error. * @return Returns a reference to this object so that method calls can be * chained together. */ public JobFlowInstancesConfig withTerminationProtected( Boolean terminationProtected) { setTerminationProtected(terminationProtected); return this; } /** * <p> * Specifies whether to lock the job flow to prevent the Amazon EC2 * instances from being terminated by API call, user intervention, or in the * event of a job flow error. * </p> * * @return Specifies whether to lock the job flow to prevent the Amazon EC2 * instances from being terminated by API call, user intervention, * or in the event of a job flow error. */ public Boolean isTerminationProtected() { return this.terminationProtected; } /** * <p> * The Hadoop version for the job flow. Valid inputs are "0.18" * (deprecated), "0.20" (deprecated), "0.20.205" (deprecated), "1.0.3", * "2.2.0", or "2.4.0". If you do not set this value, the default of 0.18 is * used, unless the AmiVersion parameter is set in the RunJobFlow call, in * which case the default version of Hadoop for that AMI version is used. * </p> * * @param hadoopVersion * The Hadoop version for the job flow. Valid inputs are "0.18" * (deprecated), "0.20" (deprecated), "0.20.205" (deprecated), * "1.0.3", "2.2.0", or "2.4.0". If you do not set this value, the * default of 0.18 is used, unless the AmiVersion parameter is set in * the RunJobFlow call, in which case the default version of Hadoop * for that AMI version is used. */ public void setHadoopVersion(String hadoopVersion) { this.hadoopVersion = hadoopVersion; } /** * <p> * The Hadoop version for the job flow. Valid inputs are "0.18" * (deprecated), "0.20" (deprecated), "0.20.205" (deprecated), "1.0.3", * "2.2.0", or "2.4.0". If you do not set this value, the default of 0.18 is * used, unless the AmiVersion parameter is set in the RunJobFlow call, in * which case the default version of Hadoop for that AMI version is used. * </p> * * @return The Hadoop version for the job flow. Valid inputs are "0.18" * (deprecated), "0.20" (deprecated), "0.20.205" (deprecated), * "1.0.3", "2.2.0", or "2.4.0". If you do not set this value, the * default of 0.18 is used, unless the AmiVersion parameter is set * in the RunJobFlow call, in which case the default version of * Hadoop for that AMI version is used. */ public String getHadoopVersion() { return this.hadoopVersion; } /** * <p> * The Hadoop version for the job flow. Valid inputs are "0.18" * (deprecated), "0.20" (deprecated), "0.20.205" (deprecated), "1.0.3", * "2.2.0", or "2.4.0". If you do not set this value, the default of 0.18 is * used, unless the AmiVersion parameter is set in the RunJobFlow call, in * which case the default version of Hadoop for that AMI version is used. * </p> * * @param hadoopVersion * The Hadoop version for the job flow. Valid inputs are "0.18" * (deprecated), "0.20" (deprecated), "0.20.205" (deprecated), * "1.0.3", "2.2.0", or "2.4.0". If you do not set this value, the * default of 0.18 is used, unless the AmiVersion parameter is set in * the RunJobFlow call, in which case the default version of Hadoop * for that AMI version is used. * @return Returns a reference to this object so that method calls can be * chained together. */ public JobFlowInstancesConfig withHadoopVersion(String hadoopVersion) { setHadoopVersion(hadoopVersion); return this; } /** * <p> * To launch the job flow in Amazon Virtual Private Cloud (Amazon VPC), set * this parameter to the identifier of the Amazon VPC subnet where you want * the job flow to launch. If you do not specify this value, the job flow is * launched in the normal Amazon Web Services cloud, outside of an Amazon * VPC. * </p> * <p> * Amazon VPC currently does not support cluster compute quadruple extra * large (cc1.4xlarge) instances. Thus you cannot specify the cc1.4xlarge * instance type for nodes of a job flow launched in a Amazon VPC. * </p> * * @param ec2SubnetId * To launch the job flow in Amazon Virtual Private Cloud (Amazon * VPC), set this parameter to the identifier of the Amazon VPC * subnet where you want the job flow to launch. If you do not * specify this value, the job flow is launched in the normal Amazon * Web Services cloud, outside of an Amazon VPC. </p> * <p> * Amazon VPC currently does not support cluster compute quadruple * extra large (cc1.4xlarge) instances. Thus you cannot specify the * cc1.4xlarge instance type for nodes of a job flow launched in a * Amazon VPC. */ public void setEc2SubnetId(String ec2SubnetId) { this.ec2SubnetId = ec2SubnetId; } /** * <p> * To launch the job flow in Amazon Virtual Private Cloud (Amazon VPC), set * this parameter to the identifier of the Amazon VPC subnet where you want * the job flow to launch. If you do not specify this value, the job flow is * launched in the normal Amazon Web Services cloud, outside of an Amazon * VPC. * </p> * <p> * Amazon VPC currently does not support cluster compute quadruple extra * large (cc1.4xlarge) instances. Thus you cannot specify the cc1.4xlarge * instance type for nodes of a job flow launched in a Amazon VPC. * </p> * * @return To launch the job flow in Amazon Virtual Private Cloud (Amazon * VPC), set this parameter to the identifier of the Amazon VPC * subnet where you want the job flow to launch. If you do not * specify this value, the job flow is launched in the normal Amazon * Web Services cloud, outside of an Amazon VPC. </p> * <p> * Amazon VPC currently does not support cluster compute quadruple * extra large (cc1.4xlarge) instances. Thus you cannot specify the * cc1.4xlarge instance type for nodes of a job flow launched in a * Amazon VPC. */ public String getEc2SubnetId() { return this.ec2SubnetId; } /** * <p> * To launch the job flow in Amazon Virtual Private Cloud (Amazon VPC), set * this parameter to the identifier of the Amazon VPC subnet where you want * the job flow to launch. If you do not specify this value, the job flow is * launched in the normal Amazon Web Services cloud, outside of an Amazon * VPC. * </p> * <p> * Amazon VPC currently does not support cluster compute quadruple extra * large (cc1.4xlarge) instances. Thus you cannot specify the cc1.4xlarge * instance type for nodes of a job flow launched in a Amazon VPC. * </p> * * @param ec2SubnetId * To launch the job flow in Amazon Virtual Private Cloud (Amazon * VPC), set this parameter to the identifier of the Amazon VPC * subnet where you want the job flow to launch. If you do not * specify this value, the job flow is launched in the normal Amazon * Web Services cloud, outside of an Amazon VPC. </p> * <p> * Amazon VPC currently does not support cluster compute quadruple * extra large (cc1.4xlarge) instances. Thus you cannot specify the * cc1.4xlarge instance type for nodes of a job flow launched in a * Amazon VPC. * @return Returns a reference to this object so that method calls can be * chained together. */ public JobFlowInstancesConfig withEc2SubnetId(String ec2SubnetId) { setEc2SubnetId(ec2SubnetId); return this; } /** * <p> * The identifier of the Amazon EC2 security group for the master node. * </p> * * @param emrManagedMasterSecurityGroup * The identifier of the Amazon EC2 security group for the master * node. */ public void setEmrManagedMasterSecurityGroup( String emrManagedMasterSecurityGroup) { this.emrManagedMasterSecurityGroup = emrManagedMasterSecurityGroup; } /** * <p> * The identifier of the Amazon EC2 security group for the master node. * </p> * * @return The identifier of the Amazon EC2 security group for the master * node. */ public String getEmrManagedMasterSecurityGroup() { return this.emrManagedMasterSecurityGroup; } /** * <p> * The identifier of the Amazon EC2 security group for the master node. * </p> * * @param emrManagedMasterSecurityGroup * The identifier of the Amazon EC2 security group for the master * node. * @return Returns a reference to this object so that method calls can be * chained together. */ public JobFlowInstancesConfig withEmrManagedMasterSecurityGroup( String emrManagedMasterSecurityGroup) { setEmrManagedMasterSecurityGroup(emrManagedMasterSecurityGroup); return this; } /** * <p> * The identifier of the Amazon EC2 security group for the slave nodes. * </p> * * @param emrManagedSlaveSecurityGroup * The identifier of the Amazon EC2 security group for the slave * nodes. */ public void setEmrManagedSlaveSecurityGroup( String emrManagedSlaveSecurityGroup) { this.emrManagedSlaveSecurityGroup = emrManagedSlaveSecurityGroup; } /** * <p> * The identifier of the Amazon EC2 security group for the slave nodes. * </p> * * @return The identifier of the Amazon EC2 security group for the slave * nodes. */ public String getEmrManagedSlaveSecurityGroup() { return this.emrManagedSlaveSecurityGroup; } /** * <p> * The identifier of the Amazon EC2 security group for the slave nodes. * </p> * * @param emrManagedSlaveSecurityGroup * The identifier of the Amazon EC2 security group for the slave * nodes. * @return Returns a reference to this object so that method calls can be * chained together. */ public JobFlowInstancesConfig withEmrManagedSlaveSecurityGroup( String emrManagedSlaveSecurityGroup) { setEmrManagedSlaveSecurityGroup(emrManagedSlaveSecurityGroup); return this; } /** * <p> * The identifier of the Amazon EC2 security group for the Amazon EMR * service to access clusters in VPC private subnets. * </p> * * @param serviceAccessSecurityGroup * The identifier of the Amazon EC2 security group for the Amazon EMR * service to access clusters in VPC private subnets. */ public void setServiceAccessSecurityGroup(String serviceAccessSecurityGroup) { this.serviceAccessSecurityGroup = serviceAccessSecurityGroup; } /** * <p> * The identifier of the Amazon EC2 security group for the Amazon EMR * service to access clusters in VPC private subnets. * </p> * * @return The identifier of the Amazon EC2 security group for the Amazon * EMR service to access clusters in VPC private subnets. */ public String getServiceAccessSecurityGroup() { return this.serviceAccessSecurityGroup; } /** * <p> * The identifier of the Amazon EC2 security group for the Amazon EMR * service to access clusters in VPC private subnets. * </p> * * @param serviceAccessSecurityGroup * The identifier of the Amazon EC2 security group for the Amazon EMR * service to access clusters in VPC private subnets. * @return Returns a reference to this object so that method calls can be * chained together. */ public JobFlowInstancesConfig withServiceAccessSecurityGroup( String serviceAccessSecurityGroup) { setServiceAccessSecurityGroup(serviceAccessSecurityGroup); return this; } /** * <p> * A list of additional Amazon EC2 security group IDs for the master node. * </p> * * @return A list of additional Amazon EC2 security group IDs for the master * node. */ public java.util.List<String> getAdditionalMasterSecurityGroups() { if (additionalMasterSecurityGroups == null) { additionalMasterSecurityGroups = new com.amazonaws.internal.SdkInternalList<String>(); } return additionalMasterSecurityGroups; } /** * <p> * A list of additional Amazon EC2 security group IDs for the master node. * </p> * * @param additionalMasterSecurityGroups * A list of additional Amazon EC2 security group IDs for the master * node. */ public void setAdditionalMasterSecurityGroups( java.util.Collection<String> additionalMasterSecurityGroups) { if (additionalMasterSecurityGroups == null) { this.additionalMasterSecurityGroups = null; return; } this.additionalMasterSecurityGroups = new com.amazonaws.internal.SdkInternalList<String>( additionalMasterSecurityGroups); } /** * <p> * A list of additional Amazon EC2 security group IDs for the master node. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use * {@link #setAdditionalMasterSecurityGroups(java.util.Collection)} or * {@link #withAdditionalMasterSecurityGroups(java.util.Collection)} if you * want to override the existing values. * </p> * * @param additionalMasterSecurityGroups * A list of additional Amazon EC2 security group IDs for the master * node. * @return Returns a reference to this object so that method calls can be * chained together. */ public JobFlowInstancesConfig withAdditionalMasterSecurityGroups( String... additionalMasterSecurityGroups) { if (this.additionalMasterSecurityGroups == null) { setAdditionalMasterSecurityGroups(new com.amazonaws.internal.SdkInternalList<String>( additionalMasterSecurityGroups.length)); } for (String ele : additionalMasterSecurityGroups) { this.additionalMasterSecurityGroups.add(ele); } return this; } /** * <p> * A list of additional Amazon EC2 security group IDs for the master node. * </p> * * @param additionalMasterSecurityGroups * A list of additional Amazon EC2 security group IDs for the master * node. * @return Returns a reference to this object so that method calls can be * chained together. */ public JobFlowInstancesConfig withAdditionalMasterSecurityGroups( java.util.Collection<String> additionalMasterSecurityGroups) { setAdditionalMasterSecurityGroups(additionalMasterSecurityGroups); return this; } /** * <p> * A list of additional Amazon EC2 security group IDs for the slave nodes. * </p> * * @return A list of additional Amazon EC2 security group IDs for the slave * nodes. */ public java.util.List<String> getAdditionalSlaveSecurityGroups() { if (additionalSlaveSecurityGroups == null) { additionalSlaveSecurityGroups = new com.amazonaws.internal.SdkInternalList<String>(); } return additionalSlaveSecurityGroups; } /** * <p> * A list of additional Amazon EC2 security group IDs for the slave nodes. * </p> * * @param additionalSlaveSecurityGroups * A list of additional Amazon EC2 security group IDs for the slave * nodes. */ public void setAdditionalSlaveSecurityGroups( java.util.Collection<String> additionalSlaveSecurityGroups) { if (additionalSlaveSecurityGroups == null) { this.additionalSlaveSecurityGroups = null; return; } this.additionalSlaveSecurityGroups = new com.amazonaws.internal.SdkInternalList<String>( additionalSlaveSecurityGroups); } /** * <p> * A list of additional Amazon EC2 security group IDs for the slave nodes. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setAdditionalSlaveSecurityGroups(java.util.Collection)} * or {@link #withAdditionalSlaveSecurityGroups(java.util.Collection)} if * you want to override the existing values. * </p> * * @param additionalSlaveSecurityGroups * A list of additional Amazon EC2 security group IDs for the slave * nodes. * @return Returns a reference to this object so that method calls can be * chained together. */ public JobFlowInstancesConfig withAdditionalSlaveSecurityGroups( String... additionalSlaveSecurityGroups) { if (this.additionalSlaveSecurityGroups == null) { setAdditionalSlaveSecurityGroups(new com.amazonaws.internal.SdkInternalList<String>( additionalSlaveSecurityGroups.length)); } for (String ele : additionalSlaveSecurityGroups) { this.additionalSlaveSecurityGroups.add(ele); } return this; } /** * <p> * A list of additional Amazon EC2 security group IDs for the slave nodes. * </p> * * @param additionalSlaveSecurityGroups * A list of additional Amazon EC2 security group IDs for the slave * nodes. * @return Returns a reference to this object so that method calls can be * chained together. */ public JobFlowInstancesConfig withAdditionalSlaveSecurityGroups( java.util.Collection<String> additionalSlaveSecurityGroups) { setAdditionalSlaveSecurityGroups(additionalSlaveSecurityGroups); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getMasterInstanceType() != null) sb.append("MasterInstanceType: " + getMasterInstanceType() + ","); if (getSlaveInstanceType() != null) sb.append("SlaveInstanceType: " + getSlaveInstanceType() + ","); if (getInstanceCount() != null) sb.append("InstanceCount: " + getInstanceCount() + ","); if (getInstanceGroups() != null) sb.append("InstanceGroups: " + getInstanceGroups() + ","); if (getEc2KeyName() != null) sb.append("Ec2KeyName: " + getEc2KeyName() + ","); if (getPlacement() != null) sb.append("Placement: " + getPlacement() + ","); if (getKeepJobFlowAliveWhenNoSteps() != null) sb.append("KeepJobFlowAliveWhenNoSteps: " + getKeepJobFlowAliveWhenNoSteps() + ","); if (getTerminationProtected() != null) sb.append("TerminationProtected: " + getTerminationProtected() + ","); if (getHadoopVersion() != null) sb.append("HadoopVersion: " + getHadoopVersion() + ","); if (getEc2SubnetId() != null) sb.append("Ec2SubnetId: " + getEc2SubnetId() + ","); if (getEmrManagedMasterSecurityGroup() != null) sb.append("EmrManagedMasterSecurityGroup: " + getEmrManagedMasterSecurityGroup() + ","); if (getEmrManagedSlaveSecurityGroup() != null) sb.append("EmrManagedSlaveSecurityGroup: " + getEmrManagedSlaveSecurityGroup() + ","); if (getServiceAccessSecurityGroup() != null) sb.append("ServiceAccessSecurityGroup: " + getServiceAccessSecurityGroup() + ","); if (getAdditionalMasterSecurityGroups() != null) sb.append("AdditionalMasterSecurityGroups: " + getAdditionalMasterSecurityGroups() + ","); if (getAdditionalSlaveSecurityGroups() != null) sb.append("AdditionalSlaveSecurityGroups: " + getAdditionalSlaveSecurityGroups()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof JobFlowInstancesConfig == false) return false; JobFlowInstancesConfig other = (JobFlowInstancesConfig) obj; if (other.getMasterInstanceType() == null ^ this.getMasterInstanceType() == null) return false; if (other.getMasterInstanceType() != null && other.getMasterInstanceType().equals( this.getMasterInstanceType()) == false) return false; if (other.getSlaveInstanceType() == null ^ this.getSlaveInstanceType() == null) return false; if (other.getSlaveInstanceType() != null && other.getSlaveInstanceType().equals( this.getSlaveInstanceType()) == false) return false; if (other.getInstanceCount() == null ^ this.getInstanceCount() == null) return false; if (other.getInstanceCount() != null && other.getInstanceCount().equals(this.getInstanceCount()) == false) return false; if (other.getInstanceGroups() == null ^ this.getInstanceGroups() == null) return false; if (other.getInstanceGroups() != null && other.getInstanceGroups().equals(this.getInstanceGroups()) == false) return false; if (other.getEc2KeyName() == null ^ this.getEc2KeyName() == null) return false; if (other.getEc2KeyName() != null && other.getEc2KeyName().equals(this.getEc2KeyName()) == false) return false; if (other.getPlacement() == null ^ this.getPlacement() == null) return false; if (other.getPlacement() != null && other.getPlacement().equals(this.getPlacement()) == false) return false; if (other.getKeepJobFlowAliveWhenNoSteps() == null ^ this.getKeepJobFlowAliveWhenNoSteps() == null) return false; if (other.getKeepJobFlowAliveWhenNoSteps() != null && other.getKeepJobFlowAliveWhenNoSteps().equals( this.getKeepJobFlowAliveWhenNoSteps()) == false) return false; if (other.getTerminationProtected() == null ^ this.getTerminationProtected() == null) return false; if (other.getTerminationProtected() != null && other.getTerminationProtected().equals( this.getTerminationProtected()) == false) return false; if (other.getHadoopVersion() == null ^ this.getHadoopVersion() == null) return false; if (other.getHadoopVersion() != null && other.getHadoopVersion().equals(this.getHadoopVersion()) == false) return false; if (other.getEc2SubnetId() == null ^ this.getEc2SubnetId() == null) return false; if (other.getEc2SubnetId() != null && other.getEc2SubnetId().equals(this.getEc2SubnetId()) == false) return false; if (other.getEmrManagedMasterSecurityGroup() == null ^ this.getEmrManagedMasterSecurityGroup() == null) return false; if (other.getEmrManagedMasterSecurityGroup() != null && other.getEmrManagedMasterSecurityGroup().equals( this.getEmrManagedMasterSecurityGroup()) == false) return false; if (other.getEmrManagedSlaveSecurityGroup() == null ^ this.getEmrManagedSlaveSecurityGroup() == null) return false; if (other.getEmrManagedSlaveSecurityGroup() != null && other.getEmrManagedSlaveSecurityGroup().equals( this.getEmrManagedSlaveSecurityGroup()) == false) return false; if (other.getServiceAccessSecurityGroup() == null ^ this.getServiceAccessSecurityGroup() == null) return false; if (other.getServiceAccessSecurityGroup() != null && other.getServiceAccessSecurityGroup().equals( this.getServiceAccessSecurityGroup()) == false) return false; if (other.getAdditionalMasterSecurityGroups() == null ^ this.getAdditionalMasterSecurityGroups() == null) return false; if (other.getAdditionalMasterSecurityGroups() != null && other.getAdditionalMasterSecurityGroups().equals( this.getAdditionalMasterSecurityGroups()) == false) return false; if (other.getAdditionalSlaveSecurityGroups() == null ^ this.getAdditionalSlaveSecurityGroups() == null) return false; if (other.getAdditionalSlaveSecurityGroups() != null && other.getAdditionalSlaveSecurityGroups().equals( this.getAdditionalSlaveSecurityGroups()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getMasterInstanceType() == null) ? 0 : getMasterInstanceType().hashCode()); hashCode = prime * hashCode + ((getSlaveInstanceType() == null) ? 0 : getSlaveInstanceType().hashCode()); hashCode = prime * hashCode + ((getInstanceCount() == null) ? 0 : getInstanceCount() .hashCode()); hashCode = prime * hashCode + ((getInstanceGroups() == null) ? 0 : getInstanceGroups() .hashCode()); hashCode = prime * hashCode + ((getEc2KeyName() == null) ? 0 : getEc2KeyName().hashCode()); hashCode = prime * hashCode + ((getPlacement() == null) ? 0 : getPlacement().hashCode()); hashCode = prime * hashCode + ((getKeepJobFlowAliveWhenNoSteps() == null) ? 0 : getKeepJobFlowAliveWhenNoSteps().hashCode()); hashCode = prime * hashCode + ((getTerminationProtected() == null) ? 0 : getTerminationProtected().hashCode()); hashCode = prime * hashCode + ((getHadoopVersion() == null) ? 0 : getHadoopVersion() .hashCode()); hashCode = prime * hashCode + ((getEc2SubnetId() == null) ? 0 : getEc2SubnetId().hashCode()); hashCode = prime * hashCode + ((getEmrManagedMasterSecurityGroup() == null) ? 0 : getEmrManagedMasterSecurityGroup().hashCode()); hashCode = prime * hashCode + ((getEmrManagedSlaveSecurityGroup() == null) ? 0 : getEmrManagedSlaveSecurityGroup().hashCode()); hashCode = prime * hashCode + ((getServiceAccessSecurityGroup() == null) ? 0 : getServiceAccessSecurityGroup().hashCode()); hashCode = prime * hashCode + ((getAdditionalMasterSecurityGroups() == null) ? 0 : getAdditionalMasterSecurityGroups().hashCode()); hashCode = prime * hashCode + ((getAdditionalSlaveSecurityGroups() == null) ? 0 : getAdditionalSlaveSecurityGroups().hashCode()); return hashCode; } @Override public JobFlowInstancesConfig clone() { try { return (JobFlowInstancesConfig) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
apache-2.0
nezihyigitbasi/presto
presto-main/src/test/java/com/facebook/presto/sql/planner/iterative/rule/TestPickTableLayout.java
10572
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.sql.planner.iterative.rule; import com.facebook.presto.spi.ColumnHandle; import com.facebook.presto.spi.ConnectorId; import com.facebook.presto.spi.TableHandle; import com.facebook.presto.spi.predicate.Domain; import com.facebook.presto.spi.predicate.TupleDomain; import com.facebook.presto.spi.type.Type; import com.facebook.presto.sql.parser.SqlParser; import com.facebook.presto.sql.planner.iterative.Rule; import com.facebook.presto.sql.planner.iterative.rule.test.BaseRuleTest; import com.facebook.presto.testing.TestingTransactionHandle; import com.facebook.presto.tpch.TpchColumnHandle; import com.facebook.presto.tpch.TpchTableHandle; import com.facebook.presto.tpch.TpchTableLayoutHandle; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import java.util.Map; import java.util.Optional; import static com.facebook.presto.spi.predicate.Domain.singleValue; import static com.facebook.presto.spi.type.BigintType.BIGINT; import static com.facebook.presto.spi.type.VarcharType.createVarcharType; import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.constrainedTableScanWithTableLayout; import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.filter; import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.values; import static com.facebook.presto.sql.planner.iterative.rule.test.PlanBuilder.expression; import static io.airlift.slice.Slices.utf8Slice; public class TestPickTableLayout extends BaseRuleTest { private PickTableLayout pickTableLayout; private TableHandle nationTableHandle; private TableHandle ordersTableHandle; private ConnectorId connectorId; @BeforeClass public void setUpBeforeClass() { pickTableLayout = new PickTableLayout(tester().getMetadata(), new SqlParser()); connectorId = tester().getCurrentConnectorId(); TpchTableHandle nationTpchTableHandle = new TpchTableHandle("nation", 1.0); TpchTableHandle orderTpchTableHandle = new TpchTableHandle("orders", 1.0); nationTableHandle = new TableHandle( connectorId, nationTpchTableHandle, TestingTransactionHandle.create(), Optional.of(new TpchTableLayoutHandle(nationTpchTableHandle, TupleDomain.all()))); ordersTableHandle = new TableHandle( connectorId, orderTpchTableHandle, TestingTransactionHandle.create(), Optional.of(new TpchTableLayoutHandle(orderTpchTableHandle, TupleDomain.all()))); } @Test public void doesNotFireIfNoTableScan() { for (Rule<?> rule : pickTableLayout.rules()) { tester().assertThat(rule) .on(p -> p.values(p.variable("a", BIGINT))) .doesNotFire(); } } @Test public void doesNotFireIfTableScanHasTableLayout() { tester().assertThat(pickTableLayout.pickTableLayoutWithoutPredicate()) .on(p -> p.tableScan( nationTableHandle, ImmutableList.of(p.variable("nationkey", BIGINT)), ImmutableMap.of(p.variable("nationkey", BIGINT), new TpchColumnHandle("nationkey", BIGINT)))) .doesNotFire(); } @Test public void eliminateTableScanWhenNoLayoutExist() { tester().assertThat(pickTableLayout.pickTableLayoutForPredicate()) .on(p -> p.filter(expression("orderstatus = 'G'"), p.tableScan( ordersTableHandle, ImmutableList.of(p.variable("orderstatus", createVarcharType(1))), ImmutableMap.of(p.variable("orderstatus", createVarcharType(1)), new TpchColumnHandle("orderstatus", createVarcharType(1)))))) .matches(values("A")); } @Test public void replaceWithExistsWhenNoLayoutExist() { ColumnHandle columnHandle = new TpchColumnHandle("nationkey", BIGINT); tester().assertThat(pickTableLayout.pickTableLayoutForPredicate()) .on(p -> p.filter(expression("nationkey = BIGINT '44'"), p.tableScan( nationTableHandle, ImmutableList.of(p.variable("nationkey", BIGINT)), ImmutableMap.of(p.variable("nationkey", BIGINT), columnHandle), TupleDomain.none(), TupleDomain.none()))) .matches(values("A")); } @Test public void doesNotFireIfRuleNotChangePlan() { tester().assertThat(pickTableLayout.pickTableLayoutForPredicate()) .on(p -> p.filter(expression("nationkey % 17 = BIGINT '44' AND nationkey % 15 = BIGINT '43'"), p.tableScan( nationTableHandle, ImmutableList.of(p.variable("nationkey", BIGINT)), ImmutableMap.of(p.variable("nationkey", BIGINT), new TpchColumnHandle("nationkey", BIGINT)), TupleDomain.all(), TupleDomain.all()))) .doesNotFire(); } @Test public void ruleAddedTableLayoutToTableScan() { tester().assertThat(pickTableLayout.pickTableLayoutWithoutPredicate()) .on(p -> p.tableScan( new TableHandle( connectorId, new TpchTableHandle("nation", 1.0), TestingTransactionHandle.create(), Optional.empty()), ImmutableList.of(p.variable("nationkey", BIGINT)), ImmutableMap.of(p.variable("nationkey", BIGINT), new TpchColumnHandle("nationkey", BIGINT)))) .matches( constrainedTableScanWithTableLayout("nation", ImmutableMap.of(), ImmutableMap.of("nationkey", "nationkey"))); } @Test public void ruleAddedTableLayoutToFilterTableScan() { Map<String, Domain> filterConstraint = ImmutableMap.<String, Domain>builder() .put("orderstatus", singleValue(createVarcharType(1), utf8Slice("F"))) .build(); tester().assertThat(pickTableLayout.pickTableLayoutForPredicate()) .on(p -> p.filter(expression("orderstatus = CAST ('F' AS VARCHAR(1))"), p.tableScan( ordersTableHandle, ImmutableList.of(p.variable("orderstatus", createVarcharType(1))), ImmutableMap.of(p.variable("orderstatus", createVarcharType(1)), new TpchColumnHandle("orderstatus", createVarcharType(1)))))) .matches( constrainedTableScanWithTableLayout("orders", filterConstraint, ImmutableMap.of("orderstatus", "orderstatus"))); } @Test public void ruleAddedNewTableLayoutIfTableScanHasEmptyConstraint() { tester().assertThat(pickTableLayout.pickTableLayoutForPredicate()) .on(p -> p.filter(expression("orderstatus = 'F'"), p.tableScan( ordersTableHandle, ImmutableList.of(p.variable("orderstatus", createVarcharType(1))), ImmutableMap.of(p.variable("orderstatus", createVarcharType(1)), new TpchColumnHandle("orderstatus", createVarcharType(1)))))) .matches( constrainedTableScanWithTableLayout( "orders", ImmutableMap.of("orderstatus", singleValue(createVarcharType(1), utf8Slice("F"))), ImmutableMap.of("orderstatus", "orderstatus"))); } @Test public void ruleWithPushdownableToTableLayoutPredicate() { Type orderStatusType = createVarcharType(1); tester().assertThat(pickTableLayout.pickTableLayoutForPredicate()) .on(p -> p.filter(expression("orderstatus = 'O'"), p.tableScan( ordersTableHandle, ImmutableList.of(p.variable("orderstatus", orderStatusType)), ImmutableMap.of(p.variable("orderstatus", orderStatusType), new TpchColumnHandle("orderstatus", orderStatusType))))) .matches(constrainedTableScanWithTableLayout( "orders", ImmutableMap.of("orderstatus", singleValue(orderStatusType, utf8Slice("O"))), ImmutableMap.of("orderstatus", "orderstatus"))); } @Test public void nonDeterministicPredicate() { Type orderStatusType = createVarcharType(1); tester().assertThat(pickTableLayout.pickTableLayoutForPredicate()) .on(p -> p.filter(expression("orderstatus = 'O' AND rand() = 0"), p.tableScan( ordersTableHandle, ImmutableList.of(p.variable("orderstatus", orderStatusType)), ImmutableMap.of(p.variable("orderstatus", orderStatusType), new TpchColumnHandle("orderstatus", orderStatusType))))) .matches( filter("rand() = 0", constrainedTableScanWithTableLayout( "orders", ImmutableMap.of("orderstatus", singleValue(orderStatusType, utf8Slice("O"))), ImmutableMap.of("orderstatus", "orderstatus")))); } }
apache-2.0
andrewlxia/practice
3Sum.java
834
public class Solution { public List<List<Integer>> threeSum(int[] num) { Set<List<Integer>> set = new HashSet<> (); Arrays.sort(num); for(int i=0; i<num.length-2; i++) { int j=i+1, k=num.length-1; while(j < k) { int sum = num[i] + num[j] + num[k]; if (sum == 0) { List<Integer> triple = new ArrayList<> (); triple.add(num[i]); triple.add(num[j]); triple.add(num[k]); set.add(triple); j++; k--; } else if (sum < 0) { j++; } else { k--; } } } return new ArrayList(set); } }
apache-2.0
Johnnei/JavaTorrent
bittorrent/src/test/java/org/johnnei/javatorrent/bittorrent/tracker/TorrentInfoTest.java
2213
package org.johnnei.javatorrent.bittorrent.tracker; import java.time.Clock; import java.time.Duration; import org.junit.jupiter.api.Test; import org.johnnei.javatorrent.test.DummyEntity; import org.johnnei.javatorrent.test.TestClock; import org.johnnei.javatorrent.torrent.Torrent; import static org.junit.jupiter.api.Assertions.assertEquals; /** * Tests {@link TorrentInfo} */ public class TorrentInfoTest { @Test public void testSetInfoAnnounce() { Torrent torrent = DummyEntity.createUniqueTorrent(); Clock baseClock = Clock.fixed(Clock.systemDefaultZone().instant(), Clock.systemDefaultZone().getZone()); Clock offsetClock = Clock.offset(baseClock, Duration.ofSeconds(3)); TestClock testClock = new TestClock(baseClock); TorrentInfo cut = new TorrentInfo(torrent, testClock); testClock.setClock(offsetClock); cut.setInfo(15, 42); assertEquals(15, cut.getSeeders(), "Seeder count should have been 15"); assertEquals(42, cut.getLeechers(), "Leechers count should have been 42"); assertEquals(Duration.ZERO, cut.getTimeSinceLastAnnounce(), "Duration since last announce should have been zero"); } @Test public void testSetInfoScrape() { Torrent torrent = DummyEntity.createUniqueTorrent(); TorrentInfo cut = new TorrentInfo(torrent, Clock.systemDefaultZone()); assertEquals("N/A", cut.getDownloadCount(), "Download count should have been 0 causing N/A"); cut.setInfo(15, 42, 10); assertEquals(15, cut.getSeeders(), "Seeder count should have been 15"); assertEquals(42, cut.getLeechers(), "Leechers count should have been 42"); assertEquals("10", cut.getDownloadCount(), "Download count should have been 10"); } @Test public void testGettersAndSetters() { Torrent torrent = DummyEntity.createUniqueTorrent(); TorrentInfo cut = new TorrentInfo(torrent, Clock.systemDefaultZone()); assertEquals(torrent, cut.getTorrent(), "Torrent should be the same as the given one in the constructor"); assertEquals(TrackerEvent.EVENT_STARTED, cut.getEvent(), "Initial tracker event should be STARTED"); cut.setEvent(TrackerEvent.EVENT_COMPLETED); assertEquals(TrackerEvent.EVENT_COMPLETED, cut.getEvent(), "Event should have changed by set."); } }
apache-2.0
pluto-build/pluto
test/build/pluto/test/build/cycle/fixpoint/FileUtils.java
842
package build.pluto.test.build.cycle.fixpoint; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.sugarj.common.FileCommands; public class FileUtils { public static List<File> readPathsFromFile(File file, File workingDir) throws IOException{ List<String> lines = FileCommands.readFileLines(file); List<File> result = new ArrayList<>(lines.size()); for (String line : lines) { result.add(new File(workingDir, line)); } return result; } public static int readIntFromFile(File file) throws IOException { String integerString = FileCommands.readFileAsString(file).trim(); return Integer.parseInt(integerString); } public static void writeIntToFile(int num, File file) throws IOException { FileCommands.writeToFile(file, Integer.toString(num)); } }
apache-2.0
danish1994/House_Of_Change
src/com/osahub/thehouse/controller/ChangePasswordController.java
561
package com.osahub.thehouse.controller; import java.io.IOException; import javax.servlet.*; import javax.servlet.http.*; import static com.osahub.thehouse.dao.UserDetailsDao.modifyPassword; @SuppressWarnings("serial") public class ChangePasswordController extends HttpServlet { public void doPost(HttpServletRequest req, HttpServletResponse res) throws IOException, ServletException { String uID = req.getParameter("uID"); String pass = req.getParameter("pass"); //Change Name modifyPassword(uID, pass); res.sendRedirect("/login-check"); } }
apache-2.0
MarcinSzyszka/MobileSecondHand
AndroidStudio_Android/MobileSeconndHand/app/src/test/java/marcin_szyszka/mobileseconndhand/ExampleUnitTest.java
325
package marcin_szyszka.mobileseconndhand; import org.junit.Test; import static org.junit.Assert.*; /** * To work on unit tests, switch the Test Artifact in the Build Variants view. */ public class ExampleUnitTest { @Test public void addition_isCorrect() throws Exception { assertEquals(4, 2 + 2); } }
apache-2.0
lehmann/BrainSimulator
app/src/main/java/com/github/neuralnetworks/calculation/ConnectionCalculator.java
1165
package com.github.neuralnetworks.calculation; import com.github.neuralnetworks.architecture.Connections; import com.github.neuralnetworks.architecture.Layer; import com.github.neuralnetworks.calculation.memory.ValuesProvider; import java.io.Serializable; import java.util.List; /** * Implementations of this interface calculate a single connection between layers * !!! Important !!! The results of the calculations are represented as tensors (Tensor). * This is done, because it is assumed that implementations will provide a way for calculating many input results at once. * Each column of the matrix represents a single input. For example if the network is trained to classify MNIST images, each column of the input matrix will represent single MNIST image. */ public interface ConnectionCalculator extends Serializable { /** * @param connections - list of connections to calculate * @param valuesProvider - values provider for the connections * @param targetLayer - the target layer, to which "output" is associated */ public void calculate(List<Connections> connections, ValuesProvider valuesProvider, Layer targetLayer); }
apache-2.0
christophd/citrus
catalog/citrus-endpoint-catalog/src/main/java/com/consol/citrus/dsl/endpoint/websocket/WebSocketEndpointCatalog.java
1829
/* * Copyright 2020 the original author or authors. * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.consol.citrus.dsl.endpoint.websocket; import com.consol.citrus.websocket.client.WebSocketClientBuilder; import com.consol.citrus.websocket.endpoint.builder.WebSocketEndpoints; import com.consol.citrus.websocket.server.WebSocketServerBuilder; /** * @author Christoph Deppisch */ public class WebSocketEndpointCatalog { /** * Private constructor setting the client and server builder implementation. */ private WebSocketEndpointCatalog() { // prevent direct instantiation } public static WebSocketEndpointCatalog websocket() { return new WebSocketEndpointCatalog(); } /** * Gets the client builder. * @return */ public WebSocketClientBuilder client() { return WebSocketEndpoints.websocket().client(); } /** * Gets the client builder. * @return */ public WebSocketServerBuilder server() { return WebSocketEndpoints.websocket().server(); } }
apache-2.0
box/mojito
webapp/src/test/java/com/box/l10n/mojito/aws/s3/AmazonS3ConfigurationTest.java
1281
package com.box.l10n.mojito.aws.s3; import com.amazonaws.services.s3.AmazonS3; import org.junit.Assume; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; @RunWith(SpringJUnit4ClassRunner.class) @SpringBootTest(classes = {AmazonS3ConfigurationTest.class, AmazonS3ConfigurationProperties.class, AmazonS3Configuration.class}) @EnableConfigurationProperties public class AmazonS3ConfigurationTest { static Logger logger = LoggerFactory.getLogger(AmazonS3ConfigurationTest.class); @Autowired(required = false) AmazonS3 amazonS3; @Autowired AmazonS3ConfigurationProperties amazonS3ConfigurationProperties; @Before public void before() { Assume.assumeNotNull(amazonS3); } @Ignore("Placeholder to test amazon client") @Test public void testPutString() { amazonS3.putObject("change-me", "testkey", "काहीतरी"); } }
apache-2.0
Top-Q/kpi-editor
src/test/java/il/co/topq/kpi/client/Issues.java
825
package il.co.topq.kpi.client; import java.io.IOException; import il.co.topq.kpi.view.DataTable; import okhttp3.OkHttpClient; import okhttp3.Request; import okhttp3.Response; public class Issues extends AbstractResource { public Issues(OkHttpClient client, String baseUrl) { super(client, baseUrl); } public DataTable get(String type, int daysToDate) throws IOException { // @formatter:off Request request = new Request.Builder() .url(baseUrl + "aggs/issues/" + type + "/" + daysToDate) .get() .addHeader("content-type", "application/json") .addHeader("cache-control", "no-cache") .build(); // @formatter:on Response response = client.newCall(request).execute(); final String responseBody = response.body().string(); return mapper.readValue(responseBody, DataTable.class); } }
apache-2.0
75py/XUtilities
src/com/nagopy/android/xposed/utilities/setting/ModNotificationExpandedClockSettings.java
4205
/* * Copyright (C) 2013 75py * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.nagopy.android.xposed.utilities.setting; import android.content.res.XModuleResources; import android.graphics.Color; import android.graphics.Typeface; import com.nagopy.android.xposed.annotation.XIntDefaultValue; import com.nagopy.android.xposed.annotation.XSettings; import com.nagopy.android.xposed.annotation.XSettingsHint; import com.nagopy.android.xposed.annotation.XStringDefaultValue; import com.nagopy.android.xposed.utilities.util.Const; @XSettings(modulePackageName = Const.PACKAGE_NAME) class ModNotificationExpandedClockSettings { @XSettingsHint(ignore = true) public XModuleResources moduleResources; /** モジュールを有効にするかどうか */ public Boolean masterModNotificationExpandedClockEnable; /** 文字サイズ */ @XIntDefaultValue(100) public int notificationExpandedClockTimeTextSize; /** 文字色 */ @XIntDefaultValue(Color.WHITE) public int notificationExpandedClockTimeTextColor; /** フォーマット */ @XStringDefaultValue("HH:mm:ss") public String notificationExpandedClockTimeFormat; /** 英語表記にするかどうか */ public Boolean notificationExpandedClockTimeForceEnglish; // ---------------------------------------------- // ---------------------------------------------- // ---------------------------------------------- /** フォントファミリー */ @XStringDefaultValue("DEFAULT") public String notificationExpandedClockTimeTypefaceKbn; public String notificationExpandedClockTimeTypefaceName; @XIntDefaultValue(Typeface.NORMAL) public Integer notificationExpandedClockTimeTypefaceStyle; // ---------------------------------------------- // ---------------------------------------------- // ---------------------------------------------- /** デフォルトの文字サイズ */ @XSettingsHint(ignore = true) public float defaultTimeTextSize; /** デフォルトの文字色 */ @XSettingsHint(ignore = true) public int defaultTimeTextColor; /** デフォルトのフォント */ @XSettingsHint(ignore = true) public Typeface defaultTimeTypeface; /** 文字サイズ */ @XIntDefaultValue(100) public int notificationExpandedClockDateTextSize; /** 文字色 */ @XIntDefaultValue(Color.WHITE) public int notificationExpandedClockDateTextColor; /** フォーマット */ @XStringDefaultValue("yyyy/MM/dd(E)") public String notificationExpandedClockDateFormat; /** 英語表記にするかどうか */ public Boolean notificationExpandedClockDateForceEnglish; // ---------------------------------------------- // ---------------------------------------------- // ---------------------------------------------- /** フォントファミリー */ @XStringDefaultValue("DEFAULT") public String notificationExpandedClockDateTypefaceKbn; public String notificationExpandedClockDateTypefaceName; @XIntDefaultValue(Typeface.NORMAL) public Integer notificationExpandedClockDateTypefaceStyle; // ---------------------------------------------- // ---------------------------------------------- // ---------------------------------------------- /** デフォルトの文字サイズ */ @XSettingsHint(ignore = true) public float defaultDateTextSize; /** デフォルトの文字色 */ @XSettingsHint(ignore = true) public int defaultDateTextColor; /** デフォルトのフォント */ @XSettingsHint(ignore = true) public Typeface defaultDateTypeface; }
apache-2.0
terma/fast-select
src/main/java/com/github/terma/fastselect/MultiShortRequest.java
1937
/* Copyright 2015-2017 Artem Stasiuk Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.github.terma.fastselect; import com.github.terma.fastselect.data.MultiShortData; import java.util.Arrays; import java.util.Map; /** * SQL analog is {code}where COLUMN_X in (?, ?...){code} * <p> * For <code>short[]</code> data type. Storage implementation is {@link MultiShortData} */ @SuppressWarnings("WeakerAccess") public class MultiShortRequest extends ColumnRequest { private final short[] values; private MultiShortData data; private short[] realData; public MultiShortRequest(final String name, final short... values) { super(name); this.values = values; } @Override public boolean checkValue(final int position) { int dataStartPosition = data.getDataStart(position); int dataEndPosition = data.getDataEnd(position); for (int i = dataStartPosition; i < dataEndPosition; i++) { if (Arrays.binarySearch(values, realData[i]) >= 0) return true; } return false; } @Override public void prepare(Map<String, FastSelect.Column> columnByNames) { super.prepare(columnByNames); data = ((MultiShortData) column.data); realData = data.data.data; } @Override public String toString() { return getClass().getSimpleName() + " {name: '" + name + "', values: " + Arrays.toString(values) + "}"; } }
apache-2.0
dimagi/javarosa
javarosa/j2me/media/src/org/javarosa/media/audio/service/J2MEAudioCaptureService.java
10141
/* * Copyright (C) 2009 JavaRosa * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.javarosa.media.audio.service; import org.javarosa.core.services.UnavailableServiceException; import org.javarosa.j2me.services.AudioCaptureService; import org.javarosa.j2me.services.DataCaptureService; import org.javarosa.j2me.services.FileService; import org.javarosa.j2me.services.exception.AudioException; import org.javarosa.j2me.services.exception.FileException; import org.javarosa.utilities.file.J2MEFileService; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import javax.microedition.media.Manager; import javax.microedition.media.MediaException; import javax.microedition.media.Player; import javax.microedition.media.control.RecordControl; /** * An audio capture service that utilizes J2ME's robust Media API * * @author Ndubisi Onuora */ public class J2MEAudioCaptureService implements AudioCaptureService { public static final String serviceName = DataCaptureService.AUDIO; private int serviceState; private Player recordP; private RecordControl recordControl; private OutputStream audioDataStream; private Player playP; private InputStream recordedInputStream; private FileService fileService; private String recordFileName; private String defaultFileName; //private String recordDirectory; private boolean recordingDeleted; private boolean recordingCreated; private boolean recordingDirectoryCreated; private String audioFormat; private int counter; public J2MEAudioCaptureService() throws UnavailableServiceException { try { fileService = getFileService(); } catch(UnavailableServiceException ue) { ue.printStackTrace(); throw new UnavailableServiceException("File service is unavailable. Unable to start " + serviceName); } serviceState = AudioCaptureService.IDLE; recordingDeleted = false; recordingCreated = false; recordingDirectoryCreated = false; counter = 0; audioFormat = ".wav"; //Default audio format is WAV } public String getType() { return serviceName; } //@Override public OutputStream getAudio() { return audioDataStream; } public int getState() { return serviceState; } //@Override public void startRecord() throws AudioException { try { recordP = Manager.createPlayer("capture://audio"); recordP.realize(); recordControl = (RecordControl)recordP.getControl("RecordControl"); try { recordFileName = null; //Reset file name to prevent concatenation of previous recordFileName twice createFileName(recordFileName); System.err.println("Recorded Filename=" + recordFileName); audioDataStream = fileService.getFileOutputStream(recordFileName); } catch(FileException fe) { audioDataStream = null; System.err.println("Error obtaining audio output stream."); fe.printStackTrace(); } if(audioDataStream == null) { throw new AudioException("Could not record audio due to null audio output stream!"); } recordControl.setRecordStream(audioDataStream); recordControl.startRecord(); recordingCreated = true; recordingDeleted = false; recordP.start(); /* * If the method does not die before here, * then the capture has officially started. */ serviceState = AudioCaptureService.CAPTURE_STARTED; } catch(MediaException me) { throw new AudioException(me.getMessage()); } catch(IOException ioe) { System.err.println(ioe.getMessage()); } ++counter; } //@Override public void stopRecord() throws AudioException { try { recordControl.commit(); recordP.stop(); serviceState = AudioCaptureService.CAPTURE_STOPPED; } catch(MediaException me) { throw new AudioException(me.getMessage()); } catch(IOException ioe) { System.err.println(ioe.getMessage()); } } //@Override public void startPlayback() throws AudioException { try { try { audioDataStream = fileService.getFileOutputStream(recordFileName); recordedInputStream = fileService.getFileDataStream(recordFileName); } catch(FileException fe) { audioDataStream = null; recordedInputStream = null; System.err.println("An error occurred while obtaining the file data stream."); fe.printStackTrace(); } if(audioDataStream == null || recordingDeleted) { throw new AudioException("No audio data recorded!"); } playP = Manager.createPlayer(recordedInputStream, "audio/x-wav"); playP.prefetch(); playP.start(); serviceState = AudioCaptureService.PLAYBACK_STARTED; } catch(MediaException me) { throw new AudioException(me.getMessage()); } catch(IOException ioe) { System.err.println(ioe.getMessage()); } } //@Override public void stopPlayback() throws AudioException { if( recordingCreated && (serviceState == AudioCaptureService.PLAYBACK_STARTED) ) { try { playP.stop(); serviceState = AudioCaptureService.PLAYBACK_STOPPED; } catch(MediaException me) { throw new AudioException(me.getMessage()); } } } public String getAudioPath() { return recordFileName; } public void saveRecording(String fileName) throws FileException { /* * If saveRecording() is not called before a subsequent recording, previous recording will be erased. */ if(!recordingCreated) { createFileName(fileName); recordingCreated = true; } } public void removeRecording() throws FileException { if(recordingCreated) { try { audioDataStream.flush(); closeRecordingStream(); //closePlaybackStream(); System.err.println("Recorded Filename=" + recordFileName); fileService.deleteFile(recordFileName); recordFileName = null; //fileService.deleteDirectory(recordDirectory); recordControl.reset(); recordingCreated = false; recordingDeleted = true; --counter; } catch(IOException ie) { System.err.println("Error resetting record control!"); System.err.println(ie.getMessage()); ie.printStackTrace(); } catch(FileException fe) { System.err.println(fe.getMessage()); fe.printStackTrace(); throw new FileException("Error removing recorded audio!"); } } } //Retrieve a reference to the first available service private FileService getFileService() throws UnavailableServiceException { //#if app.usefileconnections //# return new J2MEFileService(); //#else throw new UnavailableServiceException("Unavailable service: " + J2MEFileService.serviceName); //#endif } private void createFileName(String fileName) throws FileException { String rootName = fileService.getDefaultRoot(); String restorepath = "file:///" + rootName + "JRSounds"; String fullName; defaultFileName = "Audio" + counter + audioFormat; if(!recordingDirectoryCreated) { //recordDirectory = restorepath; fileService.createDirectory(restorepath); recordingDirectoryCreated = true; } if(fileName == null) { fullName = restorepath + "/" + defaultFileName; } else { if(!fileName.endsWith(audioFormat)) fileName += audioFormat; fullName = restorepath + "/" + fileName; } recordFileName = fullName; } private void closeRecordingStream() throws IOException { if(audioDataStream != null && serviceState == CAPTURE_STOPPED) { audioDataStream.close(); } } private void closePlaybackStream() throws IOException { if(recordedInputStream != null && serviceState == PLAYBACK_STOPPED) { recordedInputStream.close(); } } //Closes all types of streams that are used public void closeStreams() throws IOException { if(recordP != null) recordP.close(); if(playP != null) playP.close(); closeRecordingStream(); closePlaybackStream(); serviceState = AudioCaptureService.CLOSED; } }
apache-2.0
pistolove/sourcecode4junit
Source4Tomcat/src/org/apache/tomcat/util/bcel/classfile/AnnotationDefault.java
3166
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.tomcat.util.bcel.classfile; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import org.apache.tomcat.util.bcel.Constants; /** * represents the default value of a annotation for a method info * * @version $Id: AnnotationDefault 1 2005-02-13 03:15:08Z dbrosius $ * @author <A HREF="mailto:dbrosius@qis.net">D. Brosius</A> * @since 5.3 */ public class AnnotationDefault extends Attribute { private static final long serialVersionUID = 6715933396664171543L; ElementValue default_value; /** * @param name_index * Index pointing to the name <em>Code</em> * @param length * Content length in bytes * @param file * Input stream * @param constant_pool * Array of constants */ public AnnotationDefault(int name_index, int length, DataInputStream file, ConstantPool constant_pool) throws IOException { this(name_index, length, (ElementValue) null, constant_pool); default_value = ElementValue.readElementValue(file, constant_pool); } /** * @param name_index * Index pointing to the name <em>Code</em> * @param length * Content length in bytes * @param defaultValue * the annotation's default value * @param constant_pool * Array of constants */ public AnnotationDefault(int name_index, int length, ElementValue defaultValue, ConstantPool constant_pool) { super(Constants.ATTR_ANNOTATION_DEFAULT, name_index, length, constant_pool); setDefaultValue(defaultValue); } /** * @param defaultValue * the default value of this methodinfo's annotation */ public final void setDefaultValue(ElementValue defaultValue) { default_value = defaultValue; } @Override public Attribute copy(ConstantPool _constant_pool) { throw new RuntimeException("Not implemented yet!"); } @Override public final void dump(DataOutputStream dos) throws IOException { super.dump(dos); default_value.dump(dos); } }
apache-2.0
torakiki/sejda-injector
src/test/java/org/pdfsam/injector/TransitiveDependencyTest.java
1375
/* * This file is part of the PDF Split And Merge source code * Copyright 2020 by Sober Lemur S.a.s di Vacondio Andrea (info@pdfsam.org). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.pdfsam.injector; import jakarta.inject.Inject; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertNotNull; public class TransitiveDependencyTest { @Test public void transitive() { Injector injector = Injector.start(); A a = injector.instance(A.class); assertNotNull(a.b.c); } public static class A { private final B b; @Inject public A(B b) { this.b = b; } } public static class B { private final C c; @Inject public B(C c) { this.c = c; } } public static class C { } }
apache-2.0
ernestp/consulo
platform/projectModel-api/src/com/intellij/openapi/roots/types/SourcesOrderRootType.java
1138
/* * Copyright 2013-2014 must-be.org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.roots.types; import com.intellij.openapi.roots.OrderRootTypeWithConvert; import org.consulo.lombok.annotations.LazyInstance; import org.jetbrains.annotations.NotNull; /** * @author VISTALL * @since 17.08.14 */ public class SourcesOrderRootType extends OrderRootTypeWithConvert { @NotNull @LazyInstance public static SourcesOrderRootType getInstance() { return getOrderRootType(SourcesOrderRootType.class); } public SourcesOrderRootType() { super("sources", "SOURCES", "sourcePath"); } }
apache-2.0
araqne/logdb
araqne-logstorage/src/main/java/org/araqne/logstorage/UnsupportedLogFileTypeException.java
232
package org.araqne.logstorage; public class UnsupportedLogFileTypeException extends RuntimeException { public UnsupportedLogFileTypeException(String type) { super(type); } private static final long serialVersionUID = 1L; }
apache-2.0
jasonsun916/coolweather
app/src/androidTest/java/com/sunsheng/coolweather/app/ApplicationTest.java
359
package com.sunsheng.coolweather.app; import android.app.Application; import android.test.ApplicationTestCase; /** * <a href="http://d.android.com/tools/testing/testing_android.html">Testing Fundamentals</a> */ public class ApplicationTest extends ApplicationTestCase<Application> { public ApplicationTest() { super(Application.class); } }
apache-2.0
NotFound403/WePay
src/main/java/cn/felord/wepay/ali/sdk/api/request/AlipayMarketingCampaignCashDetailQueryRequest.java
4859
package cn.felord.wepay.ali.sdk.api.request; import java.util.Map; import cn.felord.wepay.ali.sdk.api.AlipayRequest; import cn.felord.wepay.ali.sdk.api.internal.util.AlipayHashMap; import cn.felord.wepay.ali.sdk.api.response.AlipayMarketingCampaignCashDetailQueryResponse; import cn.felord.wepay.ali.sdk.api.AlipayObject; /** * ALIPAY API: alipay.marketing.campaign.cash.detail.query request * * @author auto create * @version $Id: $Id */ public class AlipayMarketingCampaignCashDetailQueryRequest implements AlipayRequest<AlipayMarketingCampaignCashDetailQueryResponse> { private AlipayHashMap udfParams; // add user-defined text parameters private String apiVersion="1.0"; /** * 现金活动详情查询 */ private String bizContent; /** * <p>Setter for the field <code>bizContent</code>.</p> * * @param bizContent a {@link java.lang.String} object. */ public void setBizContent(String bizContent) { this.bizContent = bizContent; } /** * <p>Getter for the field <code>bizContent</code>.</p> * * @return a {@link java.lang.String} object. */ public String getBizContent() { return this.bizContent; } private String terminalType; private String terminalInfo; private String prodCode; private String notifyUrl; private String returnUrl; private boolean needEncrypt=false; private AlipayObject bizModel=null; /** * <p>Getter for the field <code>notifyUrl</code>.</p> * * @return a {@link java.lang.String} object. */ public String getNotifyUrl() { return this.notifyUrl; } /** {@inheritDoc} */ public void setNotifyUrl(String notifyUrl) { this.notifyUrl = notifyUrl; } /** * <p>Getter for the field <code>returnUrl</code>.</p> * * @return a {@link java.lang.String} object. */ public String getReturnUrl() { return this.returnUrl; } /** {@inheritDoc} */ public void setReturnUrl(String returnUrl) { this.returnUrl = returnUrl; } /** * <p>Getter for the field <code>apiVersion</code>.</p> * * @return a {@link java.lang.String} object. */ public String getApiVersion() { return this.apiVersion; } /** {@inheritDoc} */ public void setApiVersion(String apiVersion) { this.apiVersion = apiVersion; } /** {@inheritDoc} */ public void setTerminalType(String terminalType){ this.terminalType=terminalType; } /** * <p>Getter for the field <code>terminalType</code>.</p> * * @return a {@link java.lang.String} object. */ public String getTerminalType(){ return this.terminalType; } /** {@inheritDoc} */ public void setTerminalInfo(String terminalInfo){ this.terminalInfo=terminalInfo; } /** * <p>Getter for the field <code>terminalInfo</code>.</p> * * @return a {@link java.lang.String} object. */ public String getTerminalInfo(){ return this.terminalInfo; } /** {@inheritDoc} */ public void setProdCode(String prodCode) { this.prodCode=prodCode; } /** * <p>Getter for the field <code>prodCode</code>.</p> * * @return a {@link java.lang.String} object. */ public String getProdCode() { return this.prodCode; } /** * <p>getApiMethodName.</p> * * @return a {@link java.lang.String} object. */ public String getApiMethodName() { return "alipay.marketing.campaign.cash.detail.query"; } /** * <p>getTextParams.</p> * * @return a {@link java.util.Map} object. */ public Map<String, String> getTextParams() { AlipayHashMap txtParams = new AlipayHashMap(); txtParams.put("biz_content", this.bizContent); if(udfParams != null) { txtParams.putAll(this.udfParams); } return txtParams; } /** * <p>putOtherTextParam.</p> * * @param key a {@link java.lang.String} object. * @param value a {@link java.lang.String} object. */ public void putOtherTextParam(String key, String value) { if(this.udfParams == null) { this.udfParams = new AlipayHashMap(); } this.udfParams.put(key, value); } /** * <p>getResponseClass.</p> * * @return a {@link java.lang.Class} object. */ public Class<AlipayMarketingCampaignCashDetailQueryResponse> getResponseClass() { return AlipayMarketingCampaignCashDetailQueryResponse.class; } /** * <p>isNeedEncrypt.</p> * * @return a boolean. */ public boolean isNeedEncrypt() { return this.needEncrypt; } /** {@inheritDoc} */ public void setNeedEncrypt(boolean needEncrypt) { this.needEncrypt=needEncrypt; } /** * <p>Getter for the field <code>bizModel</code>.</p> * * @return a {@link cn.felord.wepay.ali.sdk.api.AlipayObject} object. */ public AlipayObject getBizModel() { return this.bizModel; } /** {@inheritDoc} */ public void setBizModel(AlipayObject bizModel) { this.bizModel=bizModel; } }
apache-2.0
tolo/JServer
src/java/com/teletalk/jserver/tcp/http/HttpMessage.java
17857
/* * Copyright 2007 the project originators. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.teletalk.jserver.tcp.http; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.StringReader; import java.net.SocketException; import java.net.URLDecoder; import java.net.URLEncoder; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.GregorianCalendar; import java.util.HashMap; import java.util.Iterator; import java.util.Locale; import java.util.StringTokenizer; import java.util.TimeZone; import com.teletalk.jserver.JServerUtilities; /** * Abstract base class for representation of HTTP messages. This class handles reading * of a HTTP message from a stream and mapping of header key/values. * <i>Note:</i> All header keys are case insensitive. * * @author Tobias Löfstrand * * @since The beginning * * @see com.teletalk.jserver.tcp.http.HttpRequest * @see com.teletalk.jserver.tcp.http.HttpResponse */ public abstract class HttpMessage implements HttpConstants { // Dictionary for header data private HashMap headerData = new HashMap(); // Dictionary for mapping lowercase keys to actual keys private HashMap headerKeys = new HashMap(); // Flag indicating if the client closed the connection private boolean connectionClosed = false; /** * Initializes this HTTP message (start line and headers) with data from an input stream. This method calls * {@link #parseMessageStartLine(String)} to parse the message start line (request or status line). * * @param messageReader the input stream to read the message from. */ protected void readMessage(final InputStream messageReader) throws IOException { byte byteBuffer[] = new byte[2048]; //Temporary byte buffer for the message int requestEndIndex = 0; int i = 0; // Attempt to read first byte to be able to detect if the client closes the connection try { byteBuffer[i] = (byte) messageReader.read(); i++; } catch (SocketException se) { this.connectionClosed = true; return; } // Read message (until CRLF CRLF) for (;((byteBuffer[i] = (byte) messageReader.read()) >= 0); i++) { if (i >= (byteBuffer.length - 1)) //Allocate more space for message { byte[] newByteBuffer = new byte[byteBuffer.length * 2]; System.arraycopy(byteBuffer, 0, newByteBuffer, 0, byteBuffer.length); byteBuffer = newByteBuffer; } if (i > 3) { if ((byteBuffer[i - 3] == '\r') && (byteBuffer[i - 2] == '\n') && (byteBuffer[i - 1] == '\r') && (byteBuffer[i] == '\n')) { requestEndIndex = i - 4; break; } } } if (byteBuffer[0] < 0) { this.connectionClosed = true; } else if (requestEndIndex > 0) { StringTokenizer st; String requestString = new String(byteBuffer, 0, requestEndIndex + 1, DEFAULT_CHARACTER_ENCODING).trim(); byteBuffer = null; BufferedReader requestStringReader = new BufferedReader(new StringReader(requestString)); String startLine = requestStringReader.readLine(); if(startLine != null) { // Parse message start line (request or status line) this.parseMessageStartLine(startLine); // Parse request headers boolean endOfHeaders = false; String line = null; String currentHeaderKey = null; String currentHeaderValue = null; String lineFirstChar = ""; boolean lineEmpty; // Read first line line = requestStringReader.readLine(); while(!endOfHeaders) { if( line == null ) { endOfHeaders = true; lineEmpty = true; } else { lineEmpty = (line.trim().length() == 0); lineFirstChar = String.valueOf(line.charAt(0)); } // Parse new header if( !lineEmpty && (currentHeaderKey == null) ) { st = new StringTokenizer(line, HttpConstants.HEADER_KEY_VALUE_SEPARATOR, false); currentHeaderKey = st.nextToken(); currentHeaderValue = st.nextToken().trim(); while (st.hasMoreTokens()) { currentHeaderValue = currentHeaderValue + HttpConstants.HEADER_KEY_VALUE_SEPARATOR + st.nextToken().trim(); } } // Check if multi line header value else if( !lineEmpty && (lineFirstChar.equals(HttpConstants.SP) || lineFirstChar.equals(HttpConstants.HT)) ) { currentHeaderValue = currentHeaderValue + lineFirstChar + line.trim(); } // Add current header else if( currentHeaderKey != null ) { // Add current header... this.addHeader(currentHeaderKey.trim(), currentHeaderValue); // ...reset current key... currentHeaderKey = null; // ...and jump to beginning of loop, since line already contains an unparsed header continue; } // Read new line line = requestStringReader.readLine(); } } } } /** * Called when reading a HTTP message to parse the start line (request or status line). This method is provided * for subclasses that wish to extract information from the start line.<br> * <br> * This implementation is empty. * * @param startLine the HTTP message start line. */ protected void parseMessageStartLine(String startLine) throws IOException { } /** * Checks if the connection was closed by the client before the request could be read. * * @return <code>true</code> if the connection was closed by the client before the request could be read, otherwise <code>false</code>. * * @since 1.13 Build 600 */ public boolean isClosed() { return this.connectionClosed; } /** * Send the headers in the packet on an outputstream. * * @param os The outputstream on which to write the data * * @throws java.io.IOException If communication errors should occur */ protected void sendHeaders(OutputStream os) throws java.io.IOException { String formattedHeaders; formattedHeaders = this.formatHeaders() + "\r\n"; os.write(formattedHeaders.getBytes(DEFAULT_CHARACTER_ENCODING)); } /** * Gets the HashMap used for storage of the headers. No cloning will be performed on the * returned HashMap. * * @since 1.3.2 Build 699. */ public HashMap getHeaderData() { return this.headerData; } /** * Sets the HashMap used for storage of the headers. * * @since 1.3.2 Build 699. */ public void setHeaderData(final HashMap headerData) { this.headerData = headerData; } /** * Set a header key to a specified value. If the key is already * defined in this packet, the value is replaced. * * @param key The key to set the value for * @param value The value to set */ public void setHeader(String key, String value) { String realKeyName; ArrayList vect = new ArrayList(); key = HttpMessage.decode(key); vect.add(HttpMessage.decode(value)); if ((realKeyName = this.getRealKeyName(key.toLowerCase())) == null) { // The key does not exist in the hashtable realKeyName = key; this.headerKeys.put(key.toLowerCase(), realKeyName); } this.headerData.put(realKeyName, vect); } /** * Add another value to a header key. If the key is already * defined in this packet, the value is appended after the * existing value(s). * * @param key The key to add the value for. * @param value The value to set. */ public void addHeader(String key, String value) { ArrayList vect; String realKeyName; //Is this really nessecary? Remove for now....or.... key = HttpMessage.decode(key); value = HttpMessage.decode(value); if ((realKeyName = this.getRealKeyName(key.toLowerCase())) == null) { // The key does not exist in the hashtable vect = new ArrayList(); realKeyName = key; this.headerKeys.put(key.toLowerCase(), realKeyName); } else { vect = (ArrayList) this.headerData.get(realKeyName); } vect.add(value); this.headerData.put(realKeyName, vect); } /** * Get all header values for a specified key. If the header key * is undefined an array of zero length is returned. * * @param key The key to get the value(s) for * * @return The values for that key */ public String[] getHeader(final String key) { String realKeyName; ArrayList headerValues; if (this.hasHeader(key)) { realKeyName = getRealKeyName(key); headerValues = (ArrayList) this.headerData.get(realKeyName); if (headerValues != null) { return (String[]) headerValues.toArray(new String[headerValues.size()]); } } return new String[0]; } /** * Get the first header value for a specified key. If the header * key is undefined <code>null</code> is returned. * * @param key The key to get the value(s) for. * * @return The value for that key or <code>null</code> if the key has not been defined. */ public String getHeaderSingleValue(String key) { String[] allValues; allValues = this.getHeader(key); if (allValues.length > 0) { return allValues[0]; } else { return null; } } /** * Remove a key from the header. * * @param key The key to remove. */ public void removeHeader(String key) { String realKeyName; if (this.hasHeader(key)) { realKeyName = this.getRealKeyName(key); this.headerKeys.remove(key.toLowerCase()); this.headerData.remove(realKeyName); } } /** * Check if a key is defined in the header. * * @param key The key to check for presence. * * @return <code>true</code> if the key is defined, otherwise <code>false</code>. */ public boolean hasHeader(String key) { return (this.getRealKeyName(key) != null); } /** * Formats a header (possibly with several values). * * @since 1.3.2 Build 699 */ public String formatHeader(final String key, final String[] values) { StringBuffer sb = new StringBuffer(); if( (key != null) && (key.length() > 0) ) { if( (values != null) && (values.length > 0) ) { for(int i=0; i<values.length; i++) { sb.append(key); sb.append(": "); sb.append(values[i]); sb.append(HttpConstants.CRLF); } } else { sb.append(key); sb.append(":"); sb.append(HttpConstants.CRLF); } } return sb.toString(); } /** * Returns the headers in this HttpMessage as a string formatted according to the HTTP specification * (in a format appropriate for returning in a HTTP request or response). * * @return the headers in this HttpMessage as a string formatted according to the HTTP specification. * * @since 1.2 */ public String formatHeaders() { String key; StringBuffer sb = new StringBuffer(); ArrayList values; Iterator allKeys; allKeys = this.headerData.keySet().iterator(); while (allKeys.hasNext()) // Header keys { key = (String) allKeys.next(); if( (key != null) && (key.length() > 0) ) { values = (ArrayList) this.headerData.get(key); sb.append( this.formatHeader(key, (String[])values.toArray(new String[0])) ); } } return sb.toString(); } /** * Represent this set of headers as a string. * * @return The textual representation of this header */ public String toString() { return formatHeaders(); } /** * Get the key name used for storing data under the key in this packet. * * @param key The case insensitive key name. * * @return The case sensitive key name under which the values are stored in this packet. */ private String getRealKeyName(String key) { String realKeyName = null; String lowerCaseKey = key.toLowerCase(); if (this.headerKeys.containsKey(lowerCaseKey)) { realKeyName = (String) this.headerKeys.get(lowerCaseKey); } return realKeyName; } /** * Converts from a MIME format called "x-www-form-urlencoded" to a String.<BR><BR> * To convert to a String, each character is examined in turn:<BR><BR> * <li> The ASCII characters 'a' through 'z', 'A' through 'Z', and '0' through '9' remain the same. <BR> * <li> The plus sign '+'is converted into a space character ' '.<BR> * <li> The remaining characters are represented by 3-character strings which begin with the percent sign, "%xy", * where xy is the two-digit hexadecimal representation of the lower 8-bits of the character. * * @param s the string to decode. * * @return a decoded string. */ public static String decode(String s) { if (s == null) return s; try { return URLDecoder.decode(s); } catch (Exception e) { JServerUtilities.logError("HttpPacket", "Error while decoding", e); return s; } } /** * Converts a String into a MIME format called "x-www-form-urlencoded" format.<BR><BR> * To convert a String, each character is examined in turn: <BR> * <li> The ASCII characters 'a' through 'z', 'A' through 'Z', and '0' through '9' remain the same. <BR> * <li> The space character ' ' is converted into a plus sign '+'. * <li> All other characters are converted into the 3-character string "%xy", where xy is the two-digit hexadecimal representation of the lower 8-bits of the character. * * @param s the string to encode. * * @return an encoded string. */ public static String encode(String s) { if (s == null) return s; return URLEncoder.encode(s); } /** * Shortcut for setting the header variable "Content-Type". * This variable can also be set using method <code>setHeader(key, value)</code> inherited by HttpMessage. * * @param contentType The MIME content type of this response body. */ public void setContentType(final String contentType) { this.setHeader(CONTENT_TYPE_HEADER_KEY, contentType); } /** * Gets the size in bytes of the body of this message. * * @return the size in bytes of the body of this message. * * @since 1.2 */ public long getContentLength() { String sLength = this.getHeaderSingleValue(CONTENT_LENGTH_HEADER_KEY); if (sLength != null) { try { return Long.parseLong(sLength); } catch (NumberFormatException nfe) { } } return 0; } /** * Creates a string containing a HTTP-date/time stamp (rfc1123) for the current date and time. * * @return a string containing a HTTP (rfc1123) formatted date/time. * * @since 1.13 */ public static String createHTTPDateString() { return createHTTPDateString(new Date()); } /** * Creates a string containing a HTTP-date/time stamp (rfc1123) for the specified <code>Date</code> object. * * @param date the date and time to create a date/time string for. * * @return a string containing a HTTP (rfc1123) formatted date/time. * * @since 1.13 */ public static String createHTTPDateString(final Date date) { SimpleDateFormat dateFormat = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss 'GMT'", Locale.US); GregorianCalendar cal = new GregorianCalendar(TimeZone.getTimeZone("GMT")); cal.setTime(date); dateFormat.setCalendar(cal); return dateFormat.format(cal.getTime()); } }
apache-2.0
dokeeffe/auxremote
app/src/main/java/com/bobs/mount/GuideRate.java
127
package com.bobs.mount; /** * Created by dokeeffe on 26/12/16. */ public enum GuideRate { SIDEREAL, LUNAR, SOLAR, OFF }
apache-2.0
srijeyanthan/hops
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/ContainerPreemptEvent.java
2020
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager.scheduler; import io.hops.ha.common.TransactionState; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.event.AbstractEventTransaction; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer; /** * Simple event class used to communicate containers unreservations, preemption, * killing */ public class ContainerPreemptEvent extends AbstractEventTransaction<ContainerPreemptEventType> { private final ApplicationAttemptId aid; private final RMContainer container; public ContainerPreemptEvent(ApplicationAttemptId aid, RMContainer container, ContainerPreemptEventType type, TransactionState transactionState) { super(type, transactionState); this.aid = aid; this.container = container; } public RMContainer getContainer() { return this.container; } public ApplicationAttemptId getAppId() { return aid; } @Override public String toString() { StringBuilder sb = new StringBuilder(super.toString()); sb.append(" ").append(getAppId()); sb.append(" ").append(getContainer().getContainerId()); return sb.toString(); } }
apache-2.0
mgormley/pacaya
src/main/java/edu/jhu/pacaya/sch/tasks/SumPaths.java
10231
package edu.jhu.pacaya.sch.tasks; import static edu.jhu.pacaya.sch.util.Indexed.enumerate; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.function.DoubleConsumer; import org.apache.commons.math3.linear.RealVector; import edu.jhu.pacaya.sch.Schedule; import edu.jhu.pacaya.sch.SchedulingTask; import edu.jhu.pacaya.sch.graph.DiEdge; import edu.jhu.pacaya.sch.graph.IntDiGraph; import edu.jhu.pacaya.sch.graph.WeightedIntDiGraph; import edu.jhu.pacaya.sch.util.DefaultDict; import edu.jhu.pacaya.sch.util.Indexed; import edu.jhu.pacaya.sch.util.ScheduleUtils; import edu.jhu.pacaya.sch.util.dist.TruncatedNormal; import edu.jhu.prim.bimap.IntObjectBimap; import edu.jhu.prim.tuple.Pair; public class SumPaths implements SchedulingTask { private RealVector startWeight; private RealVector endWeight; private WeightedIntDiGraph wg; private IntDiGraph edgeGraph; private IntObjectBimap<DiEdge> edgesToNodes; private double lambda; private double sigma; private Double goldWeight; // private int haltAction; public SumPaths(WeightedIntDiGraph wg, RealVector s, RealVector t, double lambda, double sigma) { this.lambda = lambda; this.sigma = sigma; startWeight = s; endWeight = t; this.wg = wg; Pair<IntDiGraph, IntObjectBimap<DiEdge>> p = wg.edgeGraph(false); edgeGraph = p.get1(); // haltAction = edgeGraph.max() + 1; // TODO: hook the halt action in // edgeGraph.addNode(haltAction); this.edgesToNodes = p.get2(); this.goldWeight = WeightedIntDiGraph.sumWalks(wg.toMatrix(), s, t); } /* * public SumPaths(WeightedIntDiGraph g, Set<Integer> s, Set<Integer> t, * boolean flip) { this(g, asVec(g, s, flip), asVec(g, t, flip)); } * * public SumPaths(WeightedIntDiGraph g) { this(g, null, null); } */ /** * returns a real vector with 1's on for each i in s if flip is true then * the vector produced puts a 1.0 on all but those provided */ /* * private static RealVector asVec(WeightedIntDiGraph g, Set<Integer> s, * boolean flip) { if (s == null) { return null; } else { int n = g.max(); * RealVector v = new OpenMapRealVector(n, flip ? 1.0 : 0.0); for (int i : * g.getNodes()) { v.setEntry(i, flip ? 0.0 : 1.0); } return v; } } */ @Override public IntDiGraph getGraph() { // the nodes in the new graph are the edges of the old // the edges return wg; } public static class RecordingDoubleConsumer implements DoubleConsumer { private List<Double> record; public RecordingDoubleConsumer() { record = new LinkedList<Double>(); } @Override public void accept(double value) { record.add(value); } public List<Double> getRecord() { return record; } } /** * A consumer that computes an expected reward of a sequence of * observed values; the expectation is take with respect to the actual haltTime * which is a TruncatedNormal random variable with the mean of the underlying Normal * being the anticipated haltTime and the standard deviation of the underlying Normal * being a parameter. The reward given a halt time is the negative absolute difference of the * current result from gold minus lambda times the number of time steps taken. Reward * changes only at dicrete points in time; specifically, for 0 <= t < 1, the result is the initialization, for 1 <= t < 2, the * state is the result after one action, etc. */ private class EvaluatingDoubleConsumer implements DoubleConsumer { // the max probability halt time private int haltTime; // reward = -diff - lambda timestep private double lambda; // the target sum private double gold; // the total accumulated reward private double reward; // the amount of probabilty mass accounted for for the halt time private double usedProbMass; // the standard deviation of the underlying normal distribution over // actual stop time (the actual distribution that we model is a truncated version) private double sigma; // the most recent reward observed private double lastAccuracy; // the current timestep (starts at 0) private int i; public EvaluatingDoubleConsumer(double gold, double lambda, int haltTime, double sigma) { this.sigma = sigma; this.haltTime = haltTime; this.lambda = lambda; this.gold = gold; usedProbMass = 0.0; reward = 0; lastAccuracy = Double.NEGATIVE_INFINITY; i = 0; } public double probRemaining() { return 1.0 - usedProbMass; } /** * accept the value that will be available from the current time step (starting at 0) * and the next time step */ @Override public void accept(double value) { lastAccuracy = -Math.abs(value - gold); double currentReward = lastAccuracy - lambda * i; // have the mode be halfway between the action right after the halt time and the following double pHalt = TruncatedNormal.probabilityTruncZero(i, i+1, haltTime + 0.5, sigma); pHalt = Math.min(probRemaining(), pHalt); usedProbMass += pHalt; reward += pHalt * currentReward; i++; } public double getScore() { // we assume that the lastReward will be constant for the rest of time // we can calculate the mean halt time and penalize by that times lambda double remainingMeanHalt = TruncatedNormal.meanTruncLower(haltTime + 0.5, sigma, i); return reward + probRemaining() * (lastAccuracy - lambda * remainingMeanHalt); } } /** * Computes the approximate sum of paths through the graph where the weight * of each path is the product of edge weights along the path; * * If consumer c is not null, it will be given the intermediate estimates as * they are available */ public static double approxSumPaths(WeightedIntDiGraph g, RealVector startWeights, RealVector endWeights, Iterator<DiEdge> seq, DoubleConsumer c) { // we keep track of the total weight of discovered paths ending along // each edge and the total weight // of all paths ending at each node (including the empty path); on each // time step, we // at each step, we pick an edge (s, t), update the sum at s, and extend // each of those (including // the empty path starting there) with the edge (s, t) DefaultDict<DiEdge, Double> prefixWeightsEndingAt = new DefaultDict<DiEdge, Double>(Void -> 0.0); // we'll maintain node sums and overall sum with subtraction rather than // re-adding (it's an approximation anyway!) RealVector currentSums = startWeights.copy(); double currentTotal = currentSums.dotProduct(endWeights); if (c != null) { c.accept(currentTotal); } for (DiEdge e : ScheduleUtils.iterable(seq)) { int s = e.get1(); int t = e.get2(); // compute the new sums double oldTargetSum = currentSums.getEntry(t); double oldEdgeSum = prefixWeightsEndingAt.get(e); // new edge sum is the source sum times the edge weight double newEdgeSum = currentSums.getEntry(s) * g.getWeight(e); // new target sum is the old target sum plus the difference between // the new and old edge sums double newTargetSum = oldTargetSum + (newEdgeSum - oldEdgeSum); // the new total is the old total plus the difference in new and // target double newTotal = currentTotal + (newTargetSum - oldTargetSum) * endWeights.getEntry(t); // store the new sums prefixWeightsEndingAt.put(e, newEdgeSum); currentSums.setEntry(t, newTargetSum); currentTotal = newTotal; // and report the new total to the consumer if (c != null) { c.accept(currentTotal); } } return currentTotal; } /* // TODO: probably better to just have halt be explicit as an int rather than an action private Pair<Iterator<DiEdge>, Integer> filterOutStopTime(Schedule s) { int haltTime = -1; List<DiEdge> nonStopActions = new LinkedList<>(); for (Indexed<Integer> a : enumerate(s)) { if (a.get() == haltAction) { if (haltTime > -1) { throw new IllegalStateException("cannot have more than one halt action in schedule"); } haltTime = a.index(); } else { nonStopActions.add(edgesToNodes.lookupObject(a.get())); } } if (haltTime == -1) { haltTime = nonStopActions.size(); } return new Pair<>(nonStopActions.iterator(), haltTime); } */ private Iterator<DiEdge> edges(Schedule s) { Iterator<Integer> ixIter = s.iterator(); return new Iterator<DiEdge>() { @Override public DiEdge next() { return edgesToNodes.lookupObject(ixIter.next()); } @Override public boolean hasNext() { return ixIter.hasNext(); } }; } @Override public double score(Schedule s) { // compute the expected reward EvaluatingDoubleConsumer eval = new EvaluatingDoubleConsumer(goldWeight, lambda, s.getHaltTime(), sigma); approxSumPaths(wg, startWeight, endWeight, edges(s), eval); return eval.getScore(); } // public int haltAction() { // return haltAction; // } public double getGold() { return goldWeight; } }
apache-2.0