repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
prince89arora/logback-kafka-appender
src/main/java/lb/kafka/logback/appender/KafkaAppender.java
7447
package lb.kafka.logback.appender; import ch.qos.logback.core.UnsynchronizedAppenderBase; import lb.kafka.commons.ModuleAware; import lb.kafka.encoder.KafkaMessageEncoder; import lb.kafka.encoder.PatternBasedMessageEncoder; import lb.kafka.producer.DeliveryType; import lb.kafka.producer.FailedCallBack; import lb.kafka.producer.ProductionFactory; import lb.kafka.producer.transport.Transporter; import org.apache.kafka.common.serialization.ByteArraySerializer; import java.util.HashMap; import java.util.Map; import static org.apache.kafka.clients.producer.ProducerConfig.ACKS_CONFIG; import static org.apache.kafka.clients.producer.ProducerConfig.BATCH_SIZE_CONFIG; import static org.apache.kafka.clients.producer.ProducerConfig.BOOTSTRAP_SERVERS_CONFIG; import static org.apache.kafka.clients.producer.ProducerConfig.BUFFER_MEMORY_CONFIG; import static org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG; import static org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG; /** * Logback Appender to send log messages to Apache Kafka brokers. * * <ul> * <li><b>topic </b> Topic name for kafka brokers</li> * <li><b>deliveryType </b> Type of deliver to send payload to kafka brokers</li> * <li><b>brokers </b> Comma separated string for for list of broker servers</li> * <li><b>encoder </b> {@link KafkaMessageEncoder} encoder to encode log event message</li> * </ul> * * @author prince.arora */ public class KafkaAppender<E> extends UnsynchronizedAppenderBase<E> { /** * kafka topic */ private String topic; /** * delivery type to transportation. */ private String deliveryType; /** * kafka broker servers. */ private String brokers; /** * Acknowledgments the producer. */ private String acks = "1"; /** * Memory to be used for buffer records. */ private long bufferMemory = 33554432; /** * Size for batch records. */ private int batchSize = 16384; private KafkaMessageEncoder<E> encoder; private Transporter transporter; public String getTopic() { return topic; } public void setTopic(String topic) { this.topic = topic; } public String getDeliveryType() { return deliveryType; } public void setDeliveryType(String deliveryType) { this.deliveryType = deliveryType; } public String getBrokers() { return brokers; } public void setBrokers(String brokers) { this.brokers = brokers; } public void setBrokers(String[] brokers) { if (brokers != null) { for(String node : brokers) { if (this.brokers == null || this.brokers.equals("")) { this.brokers = node; } else { this.brokers = "," + node; } } } } public String getAcks() { return acks; } public void setAcks(String acks) { this.acks = acks; } public long getBufferMemory() { return bufferMemory; } public void setBufferMemory(long bufferMemory) { this.bufferMemory = bufferMemory; } public int getBatchSize() { return batchSize; } public void setBatchSize(int batchSize) { this.batchSize = batchSize; } public KafkaMessageEncoder<E> getEncoder() { return encoder; } public void setEncoder(KafkaMessageEncoder<E> encoder) { this.encoder = encoder; } public void start() { /** * Check if basic configurations are preset * Initialize all the configuration and store in {@link ModuleAware} */ boolean status = this.initialize(); if (status) { ModuleAware.CONTEXT.setProducerConfig(this.prepareConfiguration()); ProductionFactory.build(); this.transporter = ProductionFactory.transporter(); this.started = true; super.start(); } } public void stop() { super.stop(); /** * Destroy productionFactory */ if (this.transporter != null) { try { ProductionFactory.destroy(); } catch (Exception ex) { this.addWarn("Unable to shutdown kafka production", ex); } } } /** * The Logging event and the payload will be provided to the transporter * and will be forwarded to kafka brokers. * * @param event */ @Override protected void append(E event) { final byte[] payload = this.encoder.doEncode(event); this.transporter.transport(payload, new FailedCallBack()); } /** * Understanding delivery type configure in appender configuration and * preparing {@link DeliveryType} * * @return */ private DeliveryType getDeliveryOption() { //Normal delivery type will be used in case of no delivery type selected DeliveryType deliveryOption = DeliveryType.NORMAL; for (DeliveryType deliveryType : DeliveryType.values()) { if (deliveryType.toString().equals(this.deliveryType)) { deliveryOption = deliveryType; } } return deliveryOption; } /** * Chack basic configuration requirments for appender to start and update * {@link ModuleAware} * * @return */ private boolean initialize() { boolean status = true; if (this.topic == null || this.topic.equals("")) { addError("Unable to find any topic. Topic is required to connect and deliver packages to brokers."); status = false; } if (this.brokers == null || this.brokers.equals("")) { addError("Unable to find any broker. Specify broker(s) host and port to connect."); status = false; } ModuleAware.CONTEXT.setTopic(this.topic); ModuleAware.CONTEXT.setBrokers(brokers); ModuleAware.CONTEXT.setDeliveryType(this.getDeliveryOption()); ModuleAware.CONTEXT.setAcks(this.acks); ModuleAware.CONTEXT.setBufferMemory(this.bufferMemory); ModuleAware.CONTEXT.setBatchSize(this.batchSize); /** * default encoder if nothing provided in configuration. */ if (this.encoder == null) { this.encoder = new PatternBasedMessageEncoder<E>(); this.encoder.start(); } return status; } /** * Prepare final configuration map for producer to initiate kafka producer instance * Configurations will be picked up from {@link ModuleAware} * * @return */ private static Map<String, Object> prepareConfiguration() { Map<String, Object> properties = new HashMap<>(); properties.put(BOOTSTRAP_SERVERS_CONFIG, ModuleAware.CONTEXT.getBrokers()); properties.put(KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName()); properties.put(VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName()); properties.put(BATCH_SIZE_CONFIG, ModuleAware.CONTEXT.getBatchSize()); properties.put(ACKS_CONFIG, ModuleAware.CONTEXT.getAcks()); properties.put(BUFFER_MEMORY_CONFIG, ModuleAware.CONTEXT.getBufferMemory()); return properties; } }
apache-2.0
googleapis/java-containeranalysis
proto-google-cloud-containeranalysis-v1beta1/src/main/java/io/grafeas/v1beta1/provenance/ArtifactOrBuilder.java
4272
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/devtools/containeranalysis/v1beta1/provenance/provenance.proto package io.grafeas.v1beta1.provenance; public interface ArtifactOrBuilder extends // @@protoc_insertion_point(interface_extends:grafeas.v1beta1.provenance.Artifact) com.google.protobuf.MessageOrBuilder { /** * * * <pre> * Hash or checksum value of a binary, or Docker Registry 2.0 digest of a * container. * </pre> * * <code>string checksum = 1;</code> * * @return The checksum. */ java.lang.String getChecksum(); /** * * * <pre> * Hash or checksum value of a binary, or Docker Registry 2.0 digest of a * container. * </pre> * * <code>string checksum = 1;</code> * * @return The bytes for checksum. */ com.google.protobuf.ByteString getChecksumBytes(); /** * * * <pre> * Artifact ID, if any; for container images, this will be a URL by digest * like `gcr.io/projectID/imagename&#64;sha256:123456`. * </pre> * * <code>string id = 2;</code> * * @return The id. */ java.lang.String getId(); /** * * * <pre> * Artifact ID, if any; for container images, this will be a URL by digest * like `gcr.io/projectID/imagename&#64;sha256:123456`. * </pre> * * <code>string id = 2;</code> * * @return The bytes for id. */ com.google.protobuf.ByteString getIdBytes(); /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @return A list containing the names. */ java.util.List<java.lang.String> getNamesList(); /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @return The count of names. */ int getNamesCount(); /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @param index The index of the element to return. * @return The names at the given index. */ java.lang.String getNames(int index); /** * * * <pre> * Related artifact names. This may be the path to a binary or jar file, or in * the case of a container build, the name used to push the container image to * Google Container Registry, as presented to `docker push`. Note that a * single Artifact ID can have multiple names, for example if two tags are * applied to one image. * </pre> * * <code>repeated string names = 3;</code> * * @param index The index of the value to return. * @return The bytes of the names at the given index. */ com.google.protobuf.ByteString getNamesBytes(int index); }
apache-2.0
sdw2330976/Research-jetty-9.2.5
jetty-annotations/src/test/java/org/eclipse/jetty/annotations/TestAnnotationInheritance.java
9220
// // ======================================================================== // Copyright (c) 1995-2014 Mort Bay Consulting Pty. Ltd. // ------------------------------------------------------------------------ // All rights reserved. This program and the accompanying materials // are made available under the terms of the Eclipse Public License v1.0 // and Apache License v2.0 which accompanies this distribution. // // The Eclipse Public License is available at // http://www.eclipse.org/legal/epl-v10.html // // The Apache License v2.0 is available at // http://www.opensource.org/licenses/apache2.0.php // // You may elect to redistribute this code under either of these licenses. // ======================================================================== // package org.eclipse.jetty.annotations; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import javax.naming.Context; import javax.naming.InitialContext; import org.eclipse.jetty.annotations.AnnotationParser.AbstractHandler; import org.eclipse.jetty.annotations.AnnotationParser.ClassInfo; import org.eclipse.jetty.annotations.AnnotationParser.FieldInfo; import org.eclipse.jetty.annotations.AnnotationParser.MethodInfo; import org.eclipse.jetty.util.ConcurrentHashSet; import org.junit.After; import org.junit.Test; /** * */ public class TestAnnotationInheritance { List<String> classNames = new ArrayList<String>(); class SampleHandler extends AbstractHandler { public final List<String> annotatedClassNames = new ArrayList<String>(); public final List<String> annotatedMethods = new ArrayList<String>(); public final List<String> annotatedFields = new ArrayList<String>(); public void handle(ClassInfo info, String annotation) { if (annotation == null || !"org.eclipse.jetty.annotations.Sample".equals(annotation)) return; annotatedClassNames.add(info.getClassName()); } public void handle(FieldInfo info, String annotation) { if (annotation == null || !"org.eclipse.jetty.annotations.Sample".equals(annotation)) return; annotatedFields.add(info.getClassInfo().getClassName()+"."+info.getFieldName()); } public void handle(MethodInfo info, String annotation) { if (annotation == null || !"org.eclipse.jetty.annotations.Sample".equals(annotation)) return; annotatedMethods.add(info.getClassInfo().getClassName()+"."+info.getMethodName()); } } @After public void destroy() throws Exception { classNames.clear(); InitialContext ic = new InitialContext(); Context comp = (Context)ic.lookup("java:comp"); comp.destroySubcontext("env"); } @Test public void testParseClassNames() throws Exception { classNames.add(ClassA.class.getName()); classNames.add(ClassB.class.getName()); SampleHandler handler = new SampleHandler(); AnnotationParser parser = new AnnotationParser(); parser.parse(Collections.singleton(handler), classNames, new ClassNameResolver () { public boolean isExcluded(String name) { return false; } public boolean shouldOverride(String name) { return false; } }); //check we got 2 class annotations assertEquals(2, handler.annotatedClassNames.size()); //check we got all annotated methods on each class assertEquals (7, handler.annotatedMethods.size()); assertTrue (handler.annotatedMethods.contains("org.eclipse.jetty.annotations.ClassA.a")); assertTrue (handler.annotatedMethods.contains("org.eclipse.jetty.annotations.ClassA.b")); assertTrue (handler.annotatedMethods.contains("org.eclipse.jetty.annotations.ClassA.c")); assertTrue (handler.annotatedMethods.contains("org.eclipse.jetty.annotations.ClassA.d")); assertTrue (handler.annotatedMethods.contains("org.eclipse.jetty.annotations.ClassA.l")); assertTrue (handler.annotatedMethods.contains("org.eclipse.jetty.annotations.ClassB.a")); assertTrue (handler.annotatedMethods.contains("org.eclipse.jetty.annotations.ClassB.c")); //check we got all annotated fields on each class assertEquals(1, handler.annotatedFields.size()); assertEquals("org.eclipse.jetty.annotations.ClassA.m", handler.annotatedFields.get(0)); } @Test public void testParseClass() throws Exception { SampleHandler handler = new SampleHandler(); AnnotationParser parser = new AnnotationParser(); parser.parse(Collections.singleton(handler), ClassB.class, new ClassNameResolver () { public boolean isExcluded(String name) { return false; } public boolean shouldOverride(String name) { return false; } }, true); //check we got 2 class annotations assertEquals(2, handler.annotatedClassNames.size()); //check we got all annotated methods on each class assertEquals (7, handler.annotatedMethods.size()); assertTrue (handler.annotatedMethods.contains("org.eclipse.jetty.annotations.ClassA.a")); assertTrue (handler.annotatedMethods.contains("org.eclipse.jetty.annotations.ClassA.b")); assertTrue (handler.annotatedMethods.contains("org.eclipse.jetty.annotations.ClassA.c")); assertTrue (handler.annotatedMethods.contains("org.eclipse.jetty.annotations.ClassA.d")); assertTrue (handler.annotatedMethods.contains("org.eclipse.jetty.annotations.ClassA.l")); assertTrue (handler.annotatedMethods.contains("org.eclipse.jetty.annotations.ClassB.a")); assertTrue (handler.annotatedMethods.contains("org.eclipse.jetty.annotations.ClassB.c")); //check we got all annotated fields on each class assertEquals(1, handler.annotatedFields.size()); assertEquals("org.eclipse.jetty.annotations.ClassA.m", handler.annotatedFields.get(0)); } @Test public void testExclusions() throws Exception { AnnotationParser parser = new AnnotationParser(); SampleHandler handler = new SampleHandler(); parser.parse(Collections.singleton(handler), ClassA.class.getName(), new ClassNameResolver() { public boolean isExcluded(String name) { return true; } public boolean shouldOverride(String name) { return false; } }); assertEquals (0, handler.annotatedClassNames.size()); assertEquals (0, handler.annotatedFields.size()); assertEquals (0, handler.annotatedMethods.size()); handler.annotatedClassNames.clear(); handler.annotatedFields.clear(); handler.annotatedMethods.clear(); parser.parse (Collections.singleton(handler), ClassA.class.getName(), new ClassNameResolver() { public boolean isExcluded(String name) { return false; } public boolean shouldOverride(String name) { return false; } }); assertEquals (1, handler.annotatedClassNames.size()); } @Test public void testTypeInheritanceHandling() throws Exception { ConcurrentHashMap<String, ConcurrentHashSet<String>> map = new ConcurrentHashMap<String, ConcurrentHashSet<String>>(); AnnotationParser parser = new AnnotationParser(); ClassInheritanceHandler handler = new ClassInheritanceHandler(map); class Foo implements InterfaceD { } classNames.clear(); classNames.add(ClassA.class.getName()); classNames.add(ClassB.class.getName()); classNames.add(InterfaceD.class.getName()); classNames.add(Foo.class.getName()); parser.parse(Collections.singleton(handler), classNames, null); assertNotNull(map); assertFalse(map.isEmpty()); assertEquals(2, map.size()); assertTrue (map.keySet().contains("org.eclipse.jetty.annotations.ClassA")); assertTrue (map.keySet().contains("org.eclipse.jetty.annotations.InterfaceD")); ConcurrentHashSet<String> classes = map.get("org.eclipse.jetty.annotations.ClassA"); assertEquals(1, classes.size()); assertEquals ("org.eclipse.jetty.annotations.ClassB", classes.iterator().next()); classes = map.get("org.eclipse.jetty.annotations.InterfaceD"); assertEquals(2, classes.size()); assertTrue(classes.contains("org.eclipse.jetty.annotations.ClassB")); assertTrue(classes.contains(Foo.class.getName())); } }
apache-2.0
davidsoergel/ml
src/main/java/edu/berkeley/compbio/ml/cluster/ClusterableIteratorFactory.java
1264
/* * Copyright (c) 2006-2013 David Soergel <dev@davidsoergel.com> * Licensed under the Apache License, Version 2.0 * http://www.apache.org/licenses/LICENSE-2.0 */ package edu.berkeley.compbio.ml.cluster; import org.apache.commons.lang.NotImplementedException; import java.util.Collection; import java.util.Iterator; /** * A Factory for new Iterators based on a Collection. Each provided Iterator is a new, independent object, iterating in * whatever order the underlying Collection provides (which may or may not be defined). * * @author <a href="mailto:dev@davidsoergel.com">David Soergel</a> * @version $Id: CollectionIteratorFactory.java 313 2009-02-23 03:36:52Z soergel $ */ public class ClusterableIteratorFactory<T extends Clusterable<T>> implements Iterator<ClusterableIterator<T>> { protected final Collection<T> underlyingCollection; public ClusterableIteratorFactory(final Collection<? extends T> underlyingCollection) { this.underlyingCollection = (Collection<T>) underlyingCollection; } public boolean hasNext() { return true; } public ClusterableIterator<T> next() { return new CollectionClusterableIterator(underlyingCollection); } public void remove() { throw new NotImplementedException(); } }
apache-2.0
s4/core
src/main/java/io/s4/dispatcher/transformer/Transformer.java
775
/* * Copyright (c) 2010 Yahoo! Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the * License. See accompanying LICENSE file. */ package io.s4.dispatcher.transformer; public interface Transformer { public Object transform(Object event); }
apache-2.0
dushmis/closure-compiler
test/com/google/javascript/jscomp/CheckProvidesTest.java
4093
/* * Copyright 2008 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.javascript.jscomp.CheckProvides.MISSING_PROVIDE_WARNING; import com.google.javascript.jscomp.CheckLevel; import com.google.javascript.jscomp.CompilerOptions.LanguageMode; /** * Tests for {@link CheckProvides}. * */ public final class CheckProvidesTest extends CompilerTestCase { @Override protected CompilerPass getProcessor(Compiler compiler) { return new CheckProvides(compiler, CheckLevel.WARNING); } public void testIrrelevant() { testSame("var str = 'g4';"); } public void testHarmlessProcedural() { testSame("goog.provide('X'); /** @constructor */ function X(){};"); } public void testHarmless() { String js = "goog.provide('X'); /** @constructor */ X = function(){};"; testSame(js); } public void testHarmlessEs6Class() { setAcceptedLanguage(LanguageMode.ECMASCRIPT6); testSame("goog.provide('X'); var X = class {};"); testSame("goog.provide('X'); class X {};"); testSame("goog.provide('foo.bar.X'); foo.bar.X = class {};"); } public void testMissingProvideEs6Class() { setAcceptedLanguage(LanguageMode.ECMASCRIPT6); String js = "class X {};"; String warning = "missing goog.provide('X')"; test(js, js, null, MISSING_PROVIDE_WARNING, warning); js = "var X = class {};"; test(js, js, null, MISSING_PROVIDE_WARNING, warning); js = "foo.bar.X = class {};"; warning = "missing goog.provide('foo.bar.X')"; test(js, js, null, MISSING_PROVIDE_WARNING, warning); } public void testNoProvideInnerClass() { testSame( "goog.provide('X');\n" + "/** @constructor */ function X(){};" + "/** @constructor */ X.Y = function(){};"); } public void testMissingGoogProvide(){ String[] js = new String[]{"/** @constructor */ X = function(){};"}; String warning = "missing goog.provide('X')"; test(js, js, null, MISSING_PROVIDE_WARNING, warning); } public void testMissingGoogProvideWithNamespace(){ String[] js = new String[]{"goog = {}; " + "/** @constructor */ goog.X = function(){};"}; String warning = "missing goog.provide('goog.X')"; test(js, js, null, MISSING_PROVIDE_WARNING, warning); } public void testMissingGoogProvideWithinGoogScope(){ String[] js = new String[]{ "/** @constructor */ $jscomp.scope.bar = function() {};"}; test(js, js); } public void testGoogProvideInWrongFileShouldCreateWarning(){ String bad = "/** @constructor */ X = function(){};"; String good = "goog.provide('X'); goog.provide('Y');" + "/** @constructor */ X = function(){};" + "/** @constructor */ Y = function(){};"; String[] js = new String[] {good, bad}; String warning = "missing goog.provide('X')"; test(js, js, null, MISSING_PROVIDE_WARNING, warning); } public void testGoogProvideMissingConstructorIsOkForNow(){ // TODO(user) to prevent orphan goog.provide calls, the pass would have to // account for enums, static functions and constants testSame(new String[]{"goog.provide('Y'); X = function(){};"}); } public void testIgnorePrivateConstructor() { String js = "/** @constructor*/ X_ = function(){};"; testSame(js); } public void testIgnorePrivatelyAnnotatedConstructor() { testSame("/** @private\n@constructor */ X = function(){};"); testSame("/** @constructor\n@private */ X = function(){};"); } }
apache-2.0
blackberry/JDE-Samples
com/rim/samples/device/location/mapactiondemo/MapActionDemo.java
2061
/** * MapActionDemo.java * * Copyright © 1998-2011 Research In Motion Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Note: For the sake of simplicity, this sample application may not leverage * resource bundles and resource strings. However, it is STRONGLY recommended * that application developers make use of the localization features available * within the BlackBerry development platform to ensure a seamless application * experience across a variety of languages and geographies. For more information * on localizing your application, please refer to the BlackBerry Java Development * Environment Development Guide associated with this release. */ package com.rim.samples.device.mapactiondemo; import net.rim.device.api.ui.UiApplication; /** * This application demonstrates an implementation of the MapAction class that * provides customized map behaviour to the end user. */ public class MapActionDemo extends UiApplication { /** * Entry point for the application * * @param args * Command line parameters (not used) */ public static final void main(final String[] args) { // Create a new instance of the application and make the currently // running thread the application's event dispatch thread. new MapActionDemo().enterEventDispatcher(); } /** * Creates a new MapActionDemo object */ public MapActionDemo() { pushScreen(new MapActionDemoScreen()); } }
apache-2.0
vipshop/Saturn
saturn-console-api/src/main/java/com/vip/saturn/job/console/domain/SaturnJunkData.java
1800
/** * Copyright 2016 vip.com. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * </p> **/ package com.vip.saturn.job.console.domain; import java.io.Serializable; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; /** * @author yangjuanying */ public class SaturnJunkData implements Serializable { private static final long serialVersionUID = -3244143761577185890L; private String path; private String namespace; private String description; private String type; private String zkAddr; public String getPath() { return path; } public void setPath(String path) { this.path = path; } public String getNamespace() { return namespace; } public void setNamespace(String namespace) { this.namespace = namespace; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public String getType() { return type; } public void setType(String type) { this.type = type; } public String getZkAddr() { return zkAddr; } public void setZkAddr(String zkAddr) { this.zkAddr = zkAddr; } public String toString() { return ToStringBuilder.reflectionToString(this, ToStringStyle.SHORT_PREFIX_STYLE); } }
apache-2.0
goinstant/dagger
compiler/src/it/producers-functional-tests/src/main/java/test/DependentProducerModule.java
1234
/* * Copyright (C) 2015 Google, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package test; import com.google.common.base.Ascii; import com.google.common.collect.ImmutableList; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import dagger2.producers.ProducerModule; import dagger2.producers.Produces; import java.util.List; @ProducerModule final class DependentProducerModule { @Produces ListenableFuture<List<String>> greetings(Integer numGreetings, String greeting) { List<String> greetings = ImmutableList.of( String.valueOf(numGreetings), greeting, Ascii.toUpperCase(greeting)); return Futures.immediateFuture(greetings); } }
apache-2.0
chengmuxin/Note
src/com/chengmuxin/note/dialog/OrderDialog.java
2893
package com.chengmuxin.note.dialog; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.os.Bundle; import android.view.View; import android.view.View.OnClickListener; import android.view.Window; import android.widget.Button; import android.widget.CompoundButton; import android.widget.CompoundButton.OnCheckedChangeListener; import android.widget.RadioButton; import com.chengmuxin.note.R; public class OrderDialog extends Activity implements OnClickListener, OnCheckedChangeListener { private SharedPreferences pref; private SharedPreferences.Editor editor; private RadioButton bymodify, bycreate, bytitle, bylocal; private Button cancel; private long lastClick; public static void actionActivity(Context context) { Intent intent = new Intent(context, OrderDialog.class); context.startActivity(intent); } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); requestWindowFeature(Window.FEATURE_NO_TITLE); setContentView(R.layout.dialog_order); init(); } private void init() { pref = getSharedPreferences("NotePara", 0); lastClick = System.currentTimeMillis(); bymodify = (RadioButton) findViewById(R.id.dialog_order_bymodify); bycreate = (RadioButton) findViewById(R.id.dialog_order_bycreate); bytitle = (RadioButton) findViewById(R.id.dialog_order_bytitle); bylocal = (RadioButton) findViewById(R.id.dialog_order_bylocal); bymodify.setOnCheckedChangeListener(this); bycreate.setOnCheckedChangeListener(this); bytitle.setOnCheckedChangeListener(this); bylocal.setOnCheckedChangeListener(this); String str = pref.getString("order", ""); if ("modify".equals(str)) { bymodify.setChecked(true); } else if ("create".equals(str)) { bycreate.setChecked(true); } else if ("title".equals(str)) { bytitle.setChecked(true); } else if ("local".equals(str)) { bylocal.setChecked(true); } cancel = (Button) findViewById(R.id.dialog_order_cancel); cancel.setOnClickListener(this); } @Override public void onClick(View v) { switch (v.getId()) { case R.id.dialog_order_cancel: this.finish(); break; default: break; } } @Override public void onCheckedChanged(CompoundButton b, boolean isChecked) { if (isChecked) { editor = pref.edit(); switch (b.getId()) { case R.id.dialog_order_bymodify: editor.putString("order", "modify"); break; case R.id.dialog_order_bycreate: editor.putString("order", "create"); break; case R.id.dialog_order_bytitle: editor.putString("order", "title"); break; case R.id.dialog_order_bylocal: editor.putString("order", "local"); break; default: break; } editor.commit(); if (System.currentTimeMillis() - lastClick > 100) { OrderDialog.this.finish(); } } } }
apache-2.0
bkhezry/ExtraWebView
app/src/test/java/com/github/bkhezry/demo/ExampleUnitTest.java
401
package com.github.bkhezry.demo; import org.junit.Test; import static org.junit.Assert.*; /** * Example local unit test, which will execute on the development machine (host). * * @see <a href="http://d.android.com/tools/testing">Testing documentation</a> */ public class ExampleUnitTest { @Test public void addition_isCorrect() throws Exception { assertEquals(4, 2 + 2); } }
apache-2.0
EqualInformation/dashboard-demo-1
src/main/java/com/vaadin/addon/charts/model/Labels.java
7065
package com.vaadin.addon.charts.model; /* * #%L * Vaadin Charts * %% * Copyright (C) 2012 - 2015 Vaadin Ltd * %% * This program is available under Commercial Vaadin Add-On License 3.0 * (CVALv3). * * See the file licensing.txt distributed with this software for more * information about licensing. * * You should have received a copy of the CVALv3 along with this program. * If not, see <https://vaadin.com/license/cval-3>. * #L% */ import com.vaadin.addon.charts.model.style.Style; /** */ public class Labels extends AbstractConfigurationObject { private static final long serialVersionUID = 1L; private HorizontalAlign align; private Number[] autoRotation; private Number autoRotationLimit; private Number distance; private Boolean enabled; private String format; private String _fn_formatter; private Number padding; private Number staggerLines; private Number step; private Style style; private Boolean useHTML; private Number x; private Number y; private Number zIndex; private Items[] items; private String rotation; public Labels() { } /** * @see #setAlign(HorizontalAlign) */ public HorizontalAlign getAlign() { return align; } /** * What part of the string the given position is anchored to. Can be one of * <code>"left"</code>, <code>"center"</code> or <code>"right"</code>. * <p> * Defaults to: right */ public void setAlign(HorizontalAlign align) { this.align = align; } /** * @see #setAutoRotation(Number[]) */ public Number[] getAutoRotation() { return autoRotation; } /** * For horizontal axes, the allowed degrees of label rotation to prevent * overlapping labels. If there is enough space, labels are not rotated. As * the chart gets narrower, it will start rotating the labels -45 degrees, * then remove every second label and try again with rotations 0 and -45 * etc. Set it to <code>false</code> to disable rotation, which will cause * the labels to word-wrap if possible. * <p> * Defaults to: [-45] */ public void setAutoRotation(Number[] autoRotation) { this.autoRotation = autoRotation; } /** * @see #setAutoRotationLimit(Number) */ public Number getAutoRotationLimit() { return autoRotationLimit; } /** * When each category width is more than this many pixels, we don't apply * auto rotation. Instead, we lay out the axis label with word wrap. A lower * limit makes sense when the label contains multiple short words that don't * extend the available horizontal space for each label. * <p> * Defaults to: 80 */ public void setAutoRotationLimit(Number autoRotationLimit) { this.autoRotationLimit = autoRotationLimit; } /** * @see #setDistance(Number) */ public Number getDistance() { return distance; } /** * Angular gauges and solid gauges only. The label's pixel distance from the * perimeter of the plot area. * <p> * Defaults to: 15 */ public void setDistance(Number distance) { this.distance = distance; } public Labels(Boolean enabled) { this.enabled = enabled; } /** * @see #setEnabled(Boolean) */ public Boolean getEnabled() { return enabled; } /** * Enable or disable the axis labels. * <p> * Defaults to: true */ public void setEnabled(Boolean enabled) { this.enabled = enabled; } /** * @see #setFormat(String) */ public String getFormat() { return format; } /** * A <a href= * "http://www.highcharts.com/docs/chart-concepts/labels-and-string-formatting" * >format string</a> for the axis label. * <p> * Defaults to: {value} */ public void setFormat(String format) { this.format = format; } public String getFormatter() { return _fn_formatter; } public void setFormatter(String _fn_formatter) { this._fn_formatter = _fn_formatter; } /** * @see #setPadding(Number) */ public Number getPadding() { return padding; } /** * The pixel padding for axis labels, to ensure white space between them. * <p> * Defaults to: 5 */ public void setPadding(Number padding) { this.padding = padding; } /** * @see #setStaggerLines(Number) */ public Number getStaggerLines() { return staggerLines; } /** * Horizontal axes only. The number of lines to spread the labels over to * make room or tighter labels. . */ public void setStaggerLines(Number staggerLines) { this.staggerLines = staggerLines; } /** * @see #setStep(Number) */ public Number getStep() { return step; } /** * <p> * To show only every <em>n</em>'th label on the axis, set the step to * <em>n</em>. Setting the step to 2 shows every other label. * </p> * * <p> * By default, the step is calculated automatically to avoid overlap. To * prevent this, set it to 1. This usually only happens on a category axis, * and is often a sign that you have chosen the wrong axis type. Read more * at <a href="http://www.highcharts.com/docs/chart-concepts/axes">Axis * docs</a> => What axis should I use? * </p> */ public void setStep(Number step) { this.step = step; } /** * @see #setStyle(Style) */ public Style getStyle() { return style; } /** * CSS styles for the label. Use <code>whiteSpace: 'nowrap'</code> to * prevent wrapping of category labels. Use * <code>textOverflow: 'none'</code> to prevent ellipsis (dots). * <p> * Defaults to: {"color":"#6D869F","fontWeight":"bold"} */ public void setStyle(Style style) { this.style = style; } /** * @see #setUseHTML(Boolean) */ public Boolean getUseHTML() { return useHTML; } /** * Whether to <a href= * "http://www.highcharts.com/docs/chart-concepts/labels-and-string-formatting#html" * >use HTML</a> to render the labels. * <p> * Defaults to: false */ public void setUseHTML(Boolean useHTML) { this.useHTML = useHTML; } /** * @see #setX(Number) */ public Number getX() { return x; } /** * The x position offset of the label relative to the tick position on the * axis. Defaults to -15 for left axis, 15 for right axis. */ public void setX(Number x) { this.x = x; } /** * @see #setY(Number) */ public Number getY() { return y; } /** * The y position offset of the label relative to the tick position on the * axis. * <p> * Defaults to: 3 */ public void setY(Number y) { this.y = y; } /** * @see #setZIndex(Number) */ public Number getZIndex() { return zIndex; } /** * The Z index for the axis labels. * <p> * Defaults to: 7 */ public void setZIndex(Number zIndex) { this.zIndex = zIndex; } /** * @see #setItems(Items[]) */ public Items[] getItems() { return items; } /** * A HTML label that can be positioned anywhere in the chart area. */ public void setItems(Items[] items) { this.items = items; } public String getRotation() { return rotation; } public void setRotation(String rotation) { this.rotation = rotation; } public void setRotation(Number rotation) { this.rotation = rotation + ""; } public void setRotationPerpendicular() { this.rotation = "auto"; } }
apache-2.0
mikkokar/styx
components/proxy/src/main/java/com/hotels/styx/metrics/reporting/graphite/IoRetry.java
3147
/* Copyright (C) 2013-2018 Expedia Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.hotels.styx.metrics.reporting.graphite; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.UncheckedIOException; import java.util.function.Consumer; import static java.lang.String.format; /** * Generic retry class for tasks that perform I/O. */ final class IoRetry { private static final Logger LOGGER = LoggerFactory.getLogger(GraphiteReporter.class); /** * Executes the provided {@code task} retrying up to {@code times} times if an {@code IOException} is thrown. * <p> * This method executes a {@code task} which can throw an {@code IOException}. If the exception is thrown, * the {@code onError} block will be executed and the {@code task} retried up to {@code times} times. If the * limit of retries is reached, the method will return an {@code UncheckedIOException} encapsulating the {@code IOException}. * * @param times positive integer defining the number of times the {@code task} will be retried. * @param task block that can throw an {@code IOException} * @param errorHandler block to be executed whan an {@code IOException} occurs. * @throws UncheckedIOException wrapper around the original {@code IOException} thrown by {@code task} * when the limit of retries is reached. * @throws IllegalArgumentException if {@code times} is less than 1. */ public static void tryTimes(int times, IOAction task, Consumer<IOException> errorHandler) throws UncheckedIOException, IllegalArgumentException { if (times < 1) { throw new IllegalArgumentException("The number of retries should be a positive integer. It was " + times); } int retries = 0; while (true) { try { task.run(); return; } catch (IOException e) { onError(errorHandler, e); if (++retries == times) { throw new UncheckedIOException( format("Operation failed after %d retries: %s", times, e.getMessage()), e); } } } } private static void onError(Consumer<IOException> consumer, IOException failure) { try { consumer.accept(failure); } catch (Exception e) { LOGGER.warn("OnError block for I/O operation failed: " + e.getLocalizedMessage(), e); } } private IoRetry() { } }
apache-2.0
gavin2lee/incubator
mnisqm/mnisqm-test/src/main/java/com/lachesis/mnisqm/test/module/db/DbUnitUtil.java
971
package com.lachesis.mnisqm.test.module.db; import java.io.File; import java.io.FileOutputStream; import javax.sql.DataSource; import org.dbunit.database.DatabaseConfig; import org.dbunit.database.DatabaseDataSourceConnection; import org.dbunit.database.IDatabaseConnection; import org.dbunit.dataset.xml.FlatDtdDataSet; import com.lachesis.mnisqm.test.service.DataSourceFactoryBean; /** * * @author Paul Xu. * @since 1.0.0 * */ public class DbUnitUtil { public static void main(String[] args) throws Exception { DataSource dataSource = new DataSourceFactoryBean().getDataSource(); File file = new File("src/main/resources/mnisqm.dtd"); IDatabaseConnection connection = new DatabaseDataSourceConnection(dataSource); connection.getConfig().setProperty(DatabaseConfig.FEATURE_QUALIFIED_TABLE_NAMES, true); // write DTD file FlatDtdDataSet.write(connection.createDataSet(), new FileOutputStream(file)); } }
apache-2.0
woefzela/woefzela
Woefzela/src/org/meraka/nchlt/woefzela/LoadFieldworkerProfile.java
4608
/* * Copyright (c) 2011 CSIR, Meraka, South Africa * * Contributors: * - The Department of Arts and Culture, The Government of South Africa. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Developer: Nic de Vries * */ package org.meraka.nchlt.woefzela; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import android.content.Context; import android.net.Uri; import android.os.Environment; import android.util.Log; public class LoadFieldworkerProfile { private static final String TAG = "LoadFieldworkerProfile"; private static boolean PLAIN_TEXT = true; boolean mExternalStorageAvailable = false; boolean mExternalStorageWriteable = false; CharSequence xmlHeader;//CharSequence maintains formatting of resources. Always? //Info from file private String fileMimeType = null; private String fileContentType = null; private String name = null; private String surname = null; private String idNumber = null; private String mobile = null; private String emailAddr = null; private String profileKey = null; public LoadFieldworkerProfile(Context context, Uri fName) { String localeString = java.util.Locale.getDefault().getDisplayName(); Log.i(TAG,"localeString" + localeString); //Check if SDcard is ready to write to String state = Environment.getExternalStorageState(); if (Environment.MEDIA_MOUNTED.equals(state)) { // We can read and write the media mExternalStorageAvailable = mExternalStorageWriteable = true; Log.i(TAG,"SDCARD: Yay, we can read and write to it!"); } else if (Environment.MEDIA_MOUNTED_READ_ONLY.equals(state)) { // We can only read the media mExternalStorageAvailable = true; mExternalStorageWriteable = false; Log.i(TAG,"SDCARD: Nope, we can only read it."); } else { //Something else is wrong. It may be one of many other states, but all we need //to know is we can neither read nor write mExternalStorageAvailable = mExternalStorageWriteable = false; Log.i(TAG,"SDCARD: Hmmm...we can neither read nor write to it!"); } //Write file if (mExternalStorageWriteable != false) { Log.i(TAG,"Trying to write xml file..."); try { File root = Environment.getExternalStorageDirectory(); Log.i(TAG,"root = " + root.toString()); if (root.canRead()) { Log.i(TAG,"filename to read is: " + fName); File fid = new File(fName.getEncodedPath()); Log.i(TAG,"fid.<something>() = " + fid.getPath()); //not want: fid.getAbsolutePath(); getCanonicalPath() FileReader fRead = new FileReader(fid); BufferedReader in = new BufferedReader(fRead); fileMimeType = in.readLine(); fileContentType = in.readLine(); name = in.readLine(); surname = in.readLine(); idNumber = in.readLine(); mobile = in.readLine(); emailAddr = in.readLine(); profileKey = in.readLine(); Log.i(TAG,">> Information read from file <<"); Log.i(TAG,"fileMimeType: " + fileMimeType); Log.i(TAG,"fileContentType: " + fileContentType); Log.i(TAG,"name: " + name); Log.i(TAG,"surname: " + surname); Log.i(TAG,"idNumber: " + idNumber); Log.i(TAG,"mobile: " + mobile); Log.i(TAG,"emailAddr: " + emailAddr); Log.i(TAG,"profileKey: " + profileKey); in.close(); } else { Log.e(TAG, "root.canRead is false. Why?"); } } catch (IOException e) { Log.e(TAG, "Could not read file " + e.getMessage()); } } else { Log.e(TAG, "Sorry, but the SDcard is not ready/writable."); } } public String getName() { return name; } public String getSurname() { return surname; } public String getIdNumber() { return idNumber; } public String getProfileKey() { return profileKey; } public String getMobile() { return mobile; } public String getEmailAddr() { return emailAddr; } }
apache-2.0
MIUNPsychology/PUPIL
servlet-src/src/pupil/command/cmd_registerinput.java
6004
package pupil.command; import pupil.*; import pupil.sql.*; import pupil.core.*; import pupil.util.*; import java.io.*; import java.sql.*; import java.util.*; import javax.servlet.*; import javax.servlet.http.*; import javax.xml.parsers.*; import javax.xml.transform.*; import javax.xml.transform.dom.*; import javax.xml.transform.stream.StreamResult; import org.w3c.dom.*; import org.xml.sax.*; import org.apache.commons.io.*; public class cmd_registerinput extends Command { //private static org.apache.log4j.Logger log = org.apache.log4j.Logger.getLogger(cmd_registerinput.class); private static LogWrapper log = new LogWrapper(); public cmd_registerinput(DBManager dbm, StaticInfoBlob info) { super(dbm,info); } public void process(RequestInfoBlob rib,XMLInfoBlob xib,XMLCommand cmd) throws IOException, ServletException, SAXException, ParserConfigurationException, TransformerConfigurationException, TransformerException, DOMException { log.trace("Enter cmd_registerinput()"); String project = cmd.getParameter("project"); String login = (String)rib.session().getAttribute("login"); log.debug("project is: " + project); log.debug("login is: " + login); String projectid = ""; String studentid = ""; PreparedStatement ps; ResultSet r; try { ps = db.getPreparedStatement("SELECT * FROM project WHERE name = ?"); ps.setString(1,project); r = ps.executeQuery(); r.first(); projectid = r.getString("project_id"); r.close(); ps.close(); } catch(SQLException e) { log.error(e.getMessage(),e); xib.setErrorResponse("SQLException: " + e.getMessage()); return; } log.debug("project_id is: " + projectid); try { ps = db.getPreparedStatement("SELECT * FROM student WHERE login = ?"); ps.setString(1,login); r = ps.executeQuery(); r.first(); studentid = r.getString("student_id"); r.close(); ps.close(); } catch(SQLException e) { log.error(e.getMessage(),e); xib.setErrorResponse("SQLException: " + e.getMessage()); return; } log.debug("student_id is: " + studentid); try { ps = db.getPreparedStatement("INSERT INTO testcase(project_id,student_id) VALUES(?,?)"); ps.setString(1,projectid); ps.setString(2,studentid); db.executeAsUpdate(ps,true); } catch(SQLException e) { log.error(e.getMessage(),e); xib.setErrorResponse("SQLException: " + e.getMessage()); } log.debug("inserted new test case"); String testcaseid = ""; try { ps = db.getPreparedStatement("SELECT max(testcase_id) as testcaseid FROM testcase WHERE project_id = ?"); ps.setString(1,projectid); r = ps.executeQuery(); r.first(); testcaseid = r.getString("testcaseid"); r.close(); ps.close(); } catch(SQLException e) { log.error(e.getMessage(),e); xib.setErrorResponse("SQLException: " + e.getMessage()); return; } log.debug("testcaseid is: " + testcaseid); Element data = cmd.getData(); log.debug("data is: " + data); NodeList nl = data.getElementsByTagName("scene"); Element scene; String displayno,start,end,delta,keychar,correct,name,sceneid; HashMap<String,String> sceneids = new HashMap<String,String>(); try { ps = db.getPreparedStatement("SELECT scene_id FROM scene WHERE project_id = ? and description = ?"); ps.setString(1,projectid); for(int i = 0; i < nl.getLength(); i++) { scene = (Element)nl.item(i); log.debug("scene: " + scene + " (" + i + ")"); displayno = scene.getAttribute("displayno"); start = scene.getAttribute("start"); end = scene.getAttribute("end"); delta = scene.getAttribute("delta"); keychar = scene.getAttribute("keychar"); correct = scene.getAttribute("correct"); name = scene.getAttribute("name"); log.debug(" -- displayno = " + displayno); log.debug(" -- start = " + start); log.debug(" -- end = " + end); log.debug(" -- delta = " + delta); log.debug(" -- keychar = " + keychar); log.debug(" -- correct = " + correct); log.debug(" -- name = " + name); ps.setString(2,name); r = ps.executeQuery(); r.first(); sceneid = r.getString("scene_id"); r.close(); log.debug(" ++ sceneid = " + sceneid); sceneids.put(name,sceneid); } ps.close(); } catch(SQLException e) { log.error(e.getMessage(),e); xib.setErrorResponse("SQLException: " + e.getMessage()); return; } log.debug("managed to populate scene ids"); try { ps = db.getPreparedStatement("INSERT INTO input(testcase_id,scene_id,time_start,time_end,time_delta,actual_input,correct_input) VALUES(?,?,?,?,?,?,?)"); ps.setString(1,testcaseid); int n; for(int i = 0; i < nl.getLength(); i++) { n = 2; scene = (Element)nl.item(i); displayno = scene.getAttribute("displayno"); start = scene.getAttribute("start"); end = scene.getAttribute("end"); delta = scene.getAttribute("delta"); keychar = scene.getAttribute("keychar"); correct = scene.getAttribute("correct"); name = scene.getAttribute("name"); sceneid = sceneids.get(name); ps.setString(n++,sceneid); ps.setString(n++,start); ps.setString(n++,end); ps.setString(n++,delta); ps.setString(n++,keychar); ps.setString(n++,correct); ps.executeUpdate(); } ps.close(); } catch(SQLException e) { log.error(e.getMessage(),e); xib.setErrorResponse("SQLException: " + e.getMessage()); return; } log.debug("managed to store input"); log.trace("Leave cmd_registerinput()"); } }
apache-2.0
dspjlj/wsp
src/com/jlj/service/imp/WhdgoldeneggServiceImp.java
2628
package com.jlj.service.imp; import java.util.List; import javax.annotation.Resource; import org.springframework.stereotype.Component; import com.jlj.dao.IWhdgoldeneggDao; import com.jlj.model.Whdgoldenegg; import com.jlj.service.IWhdgoldeneggService; @Component("whdgoldeneggService") public class WhdgoldeneggServiceImp implements IWhdgoldeneggService { private IWhdgoldeneggDao whdgoldeneggDao; public IWhdgoldeneggDao getWhdgoldeneggDao() { return whdgoldeneggDao; } @Resource public void setWhdgoldeneggDao(IWhdgoldeneggDao whdgoldeneggDao) { this.whdgoldeneggDao = whdgoldeneggDao; } public void add(Whdgoldenegg whdgoldenegg) throws Exception { whdgoldeneggDao.save(whdgoldenegg); } public void delete(Whdgoldenegg whdgoldenegg) { whdgoldeneggDao.delete(whdgoldenegg); } public void deleteById(int id) { whdgoldeneggDao.deleteById(id); } public void update(Whdgoldenegg whdgoldenegg) { whdgoldeneggDao.update(whdgoldenegg); } public List<Whdgoldenegg> getWhdgoldeneggs() { return whdgoldeneggDao.getWhdgoldeneggs(); } public Whdgoldenegg loadById(int id) { return whdgoldeneggDao.loadById(id); } public int getPageCount(int totalCount,int size) { return totalCount%size==0?totalCount/size:(totalCount/size+1); } public int getTotalCount(int con, String convalue, int status, String publicaccount) { String queryString = "select count(*) from Whdgoldenegg mo where mo.publicaccount=? "; Object[] p = null; if(con!=0&&convalue!=null&&!convalue.equals("")){ //条件1 if(con==1){ queryString += " and mo.name like ? "; } p = new Object[]{publicaccount,'%'+convalue+'%'}; }else{ p = new Object[]{publicaccount}; } return whdgoldeneggDao.getUniqueResult(queryString,p); } public List<Whdgoldenegg> queryList(int con, String convalue, int status, String publicaccount, int page, int size) { String queryString = "from Whdgoldenegg mo where mo.publicaccount=? "; Object[] p = null; if(con!=0&&convalue!=null&&!convalue.equals("")){ //条件1 if(con==1){ queryString += " and mo.name like ? "; } p = new Object[]{publicaccount,'%'+convalue+'%'}; }else{ p = new Object[]{publicaccount}; } queryString += " order by mo.orderid asc "; return whdgoldeneggDao.pageList(queryString,p,page,size); } public List<Whdgoldenegg> getFrontWhdgoldeneggsByPublicAccount(String paccount) { String queryString = "from Whdgoldenegg mo where mo.publicaccount = ? and mo.ison = 1 order by mo.orderid asc "; Object[] p= new Object[]{paccount}; return whdgoldeneggDao.getObjectsByCondition(queryString, p); } }
apache-2.0
aljoscha/flink
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/io/StreamMultipleInputProcessorFactory.java
16948
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.streaming.runtime.io; import org.apache.flink.annotation.Internal; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.typeutils.TypeSerializer; import org.apache.flink.api.java.functions.KeySelector; import org.apache.flink.configuration.Configuration; import org.apache.flink.core.memory.ManagedMemoryUseCase; import org.apache.flink.metrics.Counter; import org.apache.flink.metrics.SimpleCounter; import org.apache.flink.runtime.io.disk.iomanager.IOManager; import org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable; import org.apache.flink.runtime.memory.MemoryManager; import org.apache.flink.runtime.metrics.groups.TaskIOMetricGroup; import org.apache.flink.streaming.api.graph.StreamConfig; import org.apache.flink.streaming.api.operators.Input; import org.apache.flink.streaming.api.operators.InputSelectable; import org.apache.flink.streaming.api.operators.MultipleInputStreamOperator; import org.apache.flink.streaming.api.operators.Output; import org.apache.flink.streaming.api.operators.sort.MultiInputSortingDataInput; import org.apache.flink.streaming.api.operators.sort.MultiInputSortingDataInput.SelectableSortingInputs; import org.apache.flink.streaming.api.watermark.Watermark; import org.apache.flink.streaming.runtime.metrics.WatermarkGauge; import org.apache.flink.streaming.runtime.streamrecord.LatencyMarker; import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; import org.apache.flink.streaming.runtime.streamstatus.StatusWatermarkValve; import org.apache.flink.streaming.runtime.streamstatus.StreamStatus; import org.apache.flink.streaming.runtime.streamstatus.StreamStatusMaintainer; import org.apache.flink.streaming.runtime.tasks.OperatorChain; import org.apache.flink.streaming.runtime.tasks.SourceOperatorStreamTask; import java.util.Arrays; import java.util.List; import java.util.stream.IntStream; import static org.apache.flink.streaming.api.graph.StreamConfig.requiresSorting; import static org.apache.flink.util.Preconditions.checkNotNull; import static org.apache.flink.util.Preconditions.checkState; /** A factory for {@link StreamMultipleInputProcessor}. */ @Internal public class StreamMultipleInputProcessorFactory { @SuppressWarnings({"unchecked", "rawtypes"}) public static StreamMultipleInputProcessor create( AbstractInvokable ownerTask, CheckpointedInputGate[] checkpointedInputGates, StreamConfig.InputConfig[] configuredInputs, IOManager ioManager, MemoryManager memoryManager, TaskIOMetricGroup ioMetricGroup, Counter mainOperatorRecordsIn, StreamStatusMaintainer streamStatusMaintainer, MultipleInputStreamOperator<?> mainOperator, WatermarkGauge[] inputWatermarkGauges, StreamConfig streamConfig, Configuration taskManagerConfig, Configuration jobConfig, ExecutionConfig executionConfig, ClassLoader userClassloader, OperatorChain<?, ?> operatorChain) { checkNotNull(operatorChain); List<Input> operatorInputs = mainOperator.getInputs(); int inputsCount = operatorInputs.size(); StreamOneInputProcessor<?>[] inputProcessors = new StreamOneInputProcessor[inputsCount]; Counter networkRecordsIn = new SimpleCounter(); ioMetricGroup.reuseRecordsInputCounter(networkRecordsIn); MultiStreamStreamStatusTracker streamStatusTracker = new MultiStreamStreamStatusTracker(inputsCount); checkState( configuredInputs.length == inputsCount, "Number of configured inputs in StreamConfig [%s] doesn't match the main operator's number of inputs [%s]", configuredInputs.length, inputsCount); StreamTaskInput[] inputs = new StreamTaskInput[inputsCount]; for (int i = 0; i < inputsCount; i++) { StreamConfig.InputConfig configuredInput = configuredInputs[i]; if (configuredInput instanceof StreamConfig.NetworkInputConfig) { StreamConfig.NetworkInputConfig networkInput = (StreamConfig.NetworkInputConfig) configuredInput; inputs[i] = new StreamTaskNetworkInput<>( checkpointedInputGates[networkInput.getInputGateIndex()], networkInput.getTypeSerializer(), ioManager, new StatusWatermarkValve( checkpointedInputGates[networkInput.getInputGateIndex()] .getNumberOfInputChannels()), i); } else if (configuredInput instanceof StreamConfig.SourceInputConfig) { StreamConfig.SourceInputConfig sourceInput = (StreamConfig.SourceInputConfig) configuredInput; inputs[i] = operatorChain.getSourceTaskInput(sourceInput); } else { throw new UnsupportedOperationException("Unknown input type: " + configuredInput); } } InputSelectable inputSelectable = mainOperator instanceof InputSelectable ? (InputSelectable) mainOperator : null; StreamConfig.InputConfig[] inputConfigs = streamConfig.getInputs(userClassloader); boolean anyRequiresSorting = Arrays.stream(inputConfigs).anyMatch(StreamConfig::requiresSorting); if (anyRequiresSorting) { if (inputSelectable != null) { throw new IllegalStateException( "The InputSelectable interface is not supported with sorting inputs"); } StreamTaskInput[] sortingInputs = IntStream.range(0, inputsCount) .filter(idx -> requiresSorting(inputConfigs[idx])) .mapToObj(idx -> inputs[idx]) .toArray(StreamTaskInput[]::new); KeySelector[] sortingInputKeySelectors = IntStream.range(0, inputsCount) .filter(idx -> requiresSorting(inputConfigs[idx])) .mapToObj(idx -> streamConfig.getStatePartitioner(idx, userClassloader)) .toArray(KeySelector[]::new); TypeSerializer[] sortingInputKeySerializers = IntStream.range(0, inputsCount) .filter(idx -> requiresSorting(inputConfigs[idx])) .mapToObj(idx -> streamConfig.getTypeSerializerIn(idx, userClassloader)) .toArray(TypeSerializer[]::new); StreamTaskInput[] passThroughInputs = IntStream.range(0, inputsCount) .filter(idx -> !requiresSorting(inputConfigs[idx])) .mapToObj(idx -> inputs[idx]) .toArray(StreamTaskInput[]::new); SelectableSortingInputs selectableSortingInputs = MultiInputSortingDataInput.wrapInputs( ownerTask, sortingInputs, sortingInputKeySelectors, sortingInputKeySerializers, streamConfig.getStateKeySerializer(userClassloader), passThroughInputs, memoryManager, ioManager, executionConfig.isObjectReuseEnabled(), streamConfig.getManagedMemoryFractionOperatorUseCaseOfSlot( ManagedMemoryUseCase.OPERATOR, taskManagerConfig, userClassloader), jobConfig); StreamTaskInput<?>[] sortedInputs = selectableSortingInputs.getSortedInputs(); StreamTaskInput<?>[] passedThroughInputs = selectableSortingInputs.getPassThroughInputs(); int sortedIndex = 0; int passThroughIndex = 0; for (int i = 0; i < inputs.length; i++) { if (requiresSorting(inputConfigs[i])) { inputs[i] = sortedInputs[sortedIndex]; sortedIndex++; } else { inputs[i] = passedThroughInputs[passThroughIndex]; passThroughIndex++; } } inputSelectable = selectableSortingInputs.getInputSelectable(); } for (int i = 0; i < inputsCount; i++) { StreamConfig.InputConfig configuredInput = configuredInputs[i]; if (configuredInput instanceof StreamConfig.NetworkInputConfig) { StreamTaskNetworkOutput dataOutput = new StreamTaskNetworkOutput<>( operatorInputs.get(i), streamStatusMaintainer, inputWatermarkGauges[i], streamStatusTracker, i, mainOperatorRecordsIn, networkRecordsIn); inputProcessors[i] = new StreamOneInputProcessor(inputs[i], dataOutput, operatorChain); } else if (configuredInput instanceof StreamConfig.SourceInputConfig) { StreamConfig.SourceInputConfig sourceInput = (StreamConfig.SourceInputConfig) configuredInput; Output<StreamRecord<?>> chainedSourceOutput = operatorChain.getChainedSourceOutput(sourceInput); inputProcessors[i] = new StreamOneInputProcessor( inputs[i], new StreamTaskSourceOutput( chainedSourceOutput, streamStatusMaintainer, inputWatermarkGauges[i], streamStatusTracker, i), operatorChain); } else { throw new UnsupportedOperationException("Unknown input type: " + configuredInput); } } return new StreamMultipleInputProcessor( new MultipleInputSelectionHandler(inputSelectable, inputsCount), inputProcessors); } /** * Stream status tracker for the inputs. We need to keep track for determining when to forward * stream status changes downstream. */ private static class MultiStreamStreamStatusTracker { private final StreamStatus[] streamStatuses; private MultiStreamStreamStatusTracker(int numberOfInputs) { this.streamStatuses = new StreamStatus[numberOfInputs]; Arrays.fill(streamStatuses, StreamStatus.ACTIVE); } public void setStreamStatus(int index, StreamStatus streamStatus) { streamStatuses[index] = streamStatus; } public StreamStatus getStreamStatus(int index) { return streamStatuses[index]; } public boolean allStreamStatusesAreIdle() { for (StreamStatus streamStatus : streamStatuses) { if (streamStatus.isActive()) { return false; } } return true; } } /** * The network data output implementation used for processing stream elements from {@link * StreamTaskNetworkInput} in two input selective processor. */ private static class StreamTaskNetworkOutput<T> extends AbstractDataOutput<T> { private final Input<T> input; private final WatermarkGauge inputWatermarkGauge; /** The input index to indicate how to process elements by two input operator. */ private final int inputIndex; private final MultiStreamStreamStatusTracker streamStatusTracker; private final Counter mainOperatorRecordsIn; private final Counter networkRecordsIn; private StreamTaskNetworkOutput( Input<T> input, StreamStatusMaintainer streamStatusMaintainer, WatermarkGauge inputWatermarkGauge, MultiStreamStreamStatusTracker streamStatusTracker, int inputIndex, Counter mainOperatorRecordsIn, Counter networkRecordsIn) { super(streamStatusMaintainer); this.input = checkNotNull(input); this.inputWatermarkGauge = checkNotNull(inputWatermarkGauge); this.streamStatusTracker = streamStatusTracker; this.inputIndex = inputIndex; this.mainOperatorRecordsIn = mainOperatorRecordsIn; this.networkRecordsIn = networkRecordsIn; } @Override public void emitRecord(StreamRecord<T> record) throws Exception { input.setKeyContextElement(record); input.processElement(record); mainOperatorRecordsIn.inc(); networkRecordsIn.inc(); } @Override public void emitWatermark(Watermark watermark) throws Exception { inputWatermarkGauge.setCurrentWatermark(watermark.getTimestamp()); input.processWatermark(watermark); } @Override public void emitStreamStatus(StreamStatus streamStatus) { streamStatusTracker.setStreamStatus(inputIndex, streamStatus); // check if we need to toggle the task's stream status if (!streamStatus.equals(streamStatusMaintainer.getStreamStatus())) { if (streamStatus.isActive()) { // we're no longer idle if at least one input has become active streamStatusMaintainer.toggleStreamStatus(StreamStatus.ACTIVE); } else if (streamStatusTracker.allStreamStatusesAreIdle()) { streamStatusMaintainer.toggleStreamStatus(StreamStatus.IDLE); } } } @Override public void emitLatencyMarker(LatencyMarker latencyMarker) throws Exception { input.processLatencyMarker(latencyMarker); } } @SuppressWarnings({"unchecked", "rawtypes"}) private static class StreamTaskSourceOutput extends SourceOperatorStreamTask.AsyncDataOutputToOutput { private final int inputIndex; private final MultiStreamStreamStatusTracker streamStatusTracker; public StreamTaskSourceOutput( Output<StreamRecord<?>> chainedSourceOutput, StreamStatusMaintainer streamStatusMaintainer, WatermarkGauge inputWatermarkGauge, MultiStreamStreamStatusTracker streamStatusTracker, int inputIndex) { super(chainedSourceOutput, streamStatusMaintainer, inputWatermarkGauge); this.streamStatusTracker = streamStatusTracker; this.inputIndex = inputIndex; } @Override public void emitStreamStatus(StreamStatus streamStatus) { streamStatusTracker.setStreamStatus(inputIndex, streamStatus); // check if we need to toggle the task's stream status if (!streamStatus.equals(streamStatusMaintainer.getStreamStatus())) { if (streamStatus.isActive()) { // we're no longer idle if at least one input has become active streamStatusMaintainer.toggleStreamStatus(StreamStatus.ACTIVE); } else if (streamStatusTracker.allStreamStatusesAreIdle()) { streamStatusMaintainer.toggleStreamStatus(StreamStatus.IDLE); } } } } }
apache-2.0
googleapis/java-bigtable
proto-google-cloud-bigtable-v2/src/main/java/com/google/bigtable/v2/ReadModifyWriteRowRequestOrBuilder.java
5117
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/bigtable/v2/bigtable.proto package com.google.bigtable.v2; public interface ReadModifyWriteRowRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:google.bigtable.v2.ReadModifyWriteRowRequest) com.google.protobuf.MessageOrBuilder { /** * * * <pre> * Required. The unique name of the table to which the read/modify/write rules should be * applied. * Values are of the form * `projects/&lt;project&gt;/instances/&lt;instance&gt;/tables/&lt;table&gt;`. * </pre> * * <code> * string table_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The tableName. */ java.lang.String getTableName(); /** * * * <pre> * Required. The unique name of the table to which the read/modify/write rules should be * applied. * Values are of the form * `projects/&lt;project&gt;/instances/&lt;instance&gt;/tables/&lt;table&gt;`. * </pre> * * <code> * string table_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for tableName. */ com.google.protobuf.ByteString getTableNameBytes(); /** * * * <pre> * This value specifies routing for replication. If not specified, the * "default" application profile will be used. * </pre> * * <code>string app_profile_id = 4;</code> * * @return The appProfileId. */ java.lang.String getAppProfileId(); /** * * * <pre> * This value specifies routing for replication. If not specified, the * "default" application profile will be used. * </pre> * * <code>string app_profile_id = 4;</code> * * @return The bytes for appProfileId. */ com.google.protobuf.ByteString getAppProfileIdBytes(); /** * * * <pre> * Required. The key of the row to which the read/modify/write rules should be applied. * </pre> * * <code>bytes row_key = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The rowKey. */ com.google.protobuf.ByteString getRowKey(); /** * * * <pre> * Required. Rules specifying how the specified row's contents are to be transformed * into writes. Entries are applied in order, meaning that earlier rules will * affect the results of later ones. * </pre> * * <code> * repeated .google.bigtable.v2.ReadModifyWriteRule rules = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ java.util.List<com.google.bigtable.v2.ReadModifyWriteRule> getRulesList(); /** * * * <pre> * Required. Rules specifying how the specified row's contents are to be transformed * into writes. Entries are applied in order, meaning that earlier rules will * affect the results of later ones. * </pre> * * <code> * repeated .google.bigtable.v2.ReadModifyWriteRule rules = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ com.google.bigtable.v2.ReadModifyWriteRule getRules(int index); /** * * * <pre> * Required. Rules specifying how the specified row's contents are to be transformed * into writes. Entries are applied in order, meaning that earlier rules will * affect the results of later ones. * </pre> * * <code> * repeated .google.bigtable.v2.ReadModifyWriteRule rules = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ int getRulesCount(); /** * * * <pre> * Required. Rules specifying how the specified row's contents are to be transformed * into writes. Entries are applied in order, meaning that earlier rules will * affect the results of later ones. * </pre> * * <code> * repeated .google.bigtable.v2.ReadModifyWriteRule rules = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ java.util.List<? extends com.google.bigtable.v2.ReadModifyWriteRuleOrBuilder> getRulesOrBuilderList(); /** * * * <pre> * Required. Rules specifying how the specified row's contents are to be transformed * into writes. Entries are applied in order, meaning that earlier rules will * affect the results of later ones. * </pre> * * <code> * repeated .google.bigtable.v2.ReadModifyWriteRule rules = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ com.google.bigtable.v2.ReadModifyWriteRuleOrBuilder getRulesOrBuilder(int index); }
apache-2.0
friedhardware/druid
server/src/test/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinatorTest.java
12879
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.metadata; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import io.druid.jackson.DefaultObjectMapper; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.LinearShardSpec; import io.druid.timeline.partition.NoneShardSpec; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.skife.jdbi.v2.Handle; import org.skife.jdbi.v2.tweak.HandleCallback; import java.io.IOException; import java.util.Set; public class IndexerSQLMetadataStorageCoordinatorTest { @Rule public final TestDerbyConnector.DerbyConnectorRule derbyConnectorRule = new TestDerbyConnector.DerbyConnectorRule(); private final ObjectMapper mapper = new DefaultObjectMapper(); private final DataSegment defaultSegment = new DataSegment( "dataSource", Interval.parse("2015-01-01T00Z/2015-01-02T00Z"), "version", ImmutableMap.<String, Object>of(), ImmutableList.of("dim1"), ImmutableList.of("m1"), new LinearShardSpec(0), 9, 100 ); private final DataSegment defaultSegment2 = new DataSegment( "dataSource", Interval.parse("2015-01-01T00Z/2015-01-02T00Z"), "version", ImmutableMap.<String, Object>of(), ImmutableList.of("dim1"), ImmutableList.of("m1"), new LinearShardSpec(1), 9, 100 ); private final DataSegment defaultSegment3 = new DataSegment( "dataSource", Interval.parse("2015-01-03T00Z/2015-01-04T00Z"), "version", ImmutableMap.<String, Object>of(), ImmutableList.of("dim1"), ImmutableList.of("m1"), new NoneShardSpec(), 9, 100 ); private final Set<DataSegment> segments = ImmutableSet.of(defaultSegment, defaultSegment2); IndexerSQLMetadataStorageCoordinator coordinator; private TestDerbyConnector derbyConnector; @Before public void setUp() { derbyConnector = derbyConnectorRule.getConnector(); mapper.registerSubtypes(LinearShardSpec.class); derbyConnector.createTaskTables(); derbyConnector.createSegmentTable(); coordinator = new IndexerSQLMetadataStorageCoordinator( mapper, derbyConnectorRule.metadataTablesConfigSupplier().get(), derbyConnector ); } private void unUseSegment() { for (final DataSegment segment : segments) { Assert.assertEquals( 1, (int) derbyConnector.getDBI().<Integer>withHandle( new HandleCallback<Integer>() { @Override public Integer withHandle(Handle handle) throws Exception { return handle.createStatement( String.format( "UPDATE %s SET used = false WHERE id = :id", derbyConnectorRule.metadataTablesConfigSupplier().get().getSegmentsTable() ) ).bind("id", segment.getIdentifier()).execute(); } } ) ); } } @Test public void testSimpleAnnounce() throws IOException { coordinator.announceHistoricalSegments(segments); Assert.assertArrayEquals( mapper.writeValueAsString(defaultSegment).getBytes("UTF-8"), derbyConnector.lookup( derbyConnectorRule.metadataTablesConfigSupplier().get().getSegmentsTable(), "id", "payload", defaultSegment.getIdentifier() ) ); } @Test public void testSimpleUsedList() throws IOException { coordinator.announceHistoricalSegments(segments); Assert.assertEquals( segments, ImmutableSet.copyOf( coordinator.getUsedSegmentsForInterval( defaultSegment.getDataSource(), defaultSegment.getInterval() ) ) ); } @Test public void testMultiIntervalUsedList() throws IOException { coordinator.announceHistoricalSegments(segments); coordinator.announceHistoricalSegments(ImmutableSet.of(defaultSegment3)); Assert.assertEquals( segments, ImmutableSet.copyOf( coordinator.getUsedSegmentsForIntervals( defaultSegment.getDataSource(), ImmutableList.of(defaultSegment.getInterval()) ) ) ); Assert.assertEquals( ImmutableSet.of(defaultSegment3), ImmutableSet.copyOf( coordinator.getUsedSegmentsForIntervals( defaultSegment.getDataSource(), ImmutableList.of(defaultSegment3.getInterval()) ) ) ); Assert.assertEquals( ImmutableSet.of(defaultSegment, defaultSegment2, defaultSegment3), ImmutableSet.copyOf( coordinator.getUsedSegmentsForIntervals( defaultSegment.getDataSource(), ImmutableList.of(defaultSegment.getInterval(), defaultSegment3.getInterval()) ) ) ); //case to check no duplication if two intervals overlapped with the interval of same segment. Assert.assertEquals( ImmutableList.of(defaultSegment3), coordinator.getUsedSegmentsForIntervals( defaultSegment.getDataSource(), ImmutableList.of(Interval.parse("2015-01-03T00Z/2015-01-03T05Z"), Interval.parse("2015-01-03T09Z/2015-01-04T00Z")) ) ); } @Test public void testSimpleUnUsedList() throws IOException { coordinator.announceHistoricalSegments(segments); unUseSegment(); Assert.assertEquals( segments, ImmutableSet.copyOf( coordinator.getUnusedSegmentsForInterval( defaultSegment.getDataSource(), defaultSegment.getInterval() ) ) ); } @Test public void testUsedOverlapLow() throws IOException { coordinator.announceHistoricalSegments(segments); Set<DataSegment> actualSegments = ImmutableSet.copyOf( coordinator.getUsedSegmentsForInterval( defaultSegment.getDataSource(), Interval.parse("2014-12-31T23:59:59.999Z/2015-01-01T00:00:00.001Z") // end is exclusive ) ); Assert.assertEquals( segments, actualSegments ); } @Test public void testUsedOverlapHigh() throws IOException { coordinator.announceHistoricalSegments(segments); Assert.assertEquals( segments, ImmutableSet.copyOf( coordinator.getUsedSegmentsForInterval( defaultSegment.getDataSource(), Interval.parse("2015-1-1T23:59:59.999Z/2015-02-01T00Z") ) ) ); } @Test public void testUsedOutOfBoundsLow() throws IOException { coordinator.announceHistoricalSegments(segments); Assert.assertTrue( coordinator.getUsedSegmentsForInterval( defaultSegment.getDataSource(), new Interval(defaultSegment.getInterval().getStart().minus(1), defaultSegment.getInterval().getStart()) ).isEmpty() ); } @Test public void testUsedOutOfBoundsHigh() throws IOException { coordinator.announceHistoricalSegments(segments); Assert.assertTrue( coordinator.getUsedSegmentsForInterval( defaultSegment.getDataSource(), new Interval(defaultSegment.getInterval().getEnd(), defaultSegment.getInterval().getEnd().plusDays(10)) ).isEmpty() ); } @Test public void testUsedWithinBoundsEnd() throws IOException { coordinator.announceHistoricalSegments(segments); Assert.assertEquals( segments, ImmutableSet.copyOf( coordinator.getUsedSegmentsForInterval( defaultSegment.getDataSource(), defaultSegment.getInterval().withEnd(defaultSegment.getInterval().getEnd().minusMillis(1)) ) ) ); } @Test public void testUsedOverlapEnd() throws IOException { coordinator.announceHistoricalSegments(segments); Assert.assertEquals( segments, ImmutableSet.copyOf( coordinator.getUsedSegmentsForInterval( defaultSegment.getDataSource(), defaultSegment.getInterval().withEnd(defaultSegment.getInterval().getEnd().plusMillis(1)) ) ) ); } @Test public void testUnUsedOverlapLow() throws IOException { coordinator.announceHistoricalSegments(segments); unUseSegment(); Assert.assertTrue( coordinator.getUnusedSegmentsForInterval( defaultSegment.getDataSource(), new Interval( defaultSegment.getInterval().getStart().minus(1), defaultSegment.getInterval().getStart().plus(1) ) ).isEmpty() ); } @Test public void testUnUsedUnderlapLow() throws IOException { coordinator.announceHistoricalSegments(segments); unUseSegment(); Assert.assertTrue( coordinator.getUnusedSegmentsForInterval( defaultSegment.getDataSource(), new Interval(defaultSegment.getInterval().getStart().plus(1), defaultSegment.getInterval().getEnd()) ).isEmpty() ); } @Test public void testUnUsedUnderlapHigh() throws IOException { coordinator.announceHistoricalSegments(segments); unUseSegment(); Assert.assertTrue( coordinator.getUnusedSegmentsForInterval( defaultSegment.getDataSource(), new Interval(defaultSegment.getInterval().getStart(), defaultSegment.getInterval().getEnd().minus(1)) ).isEmpty() ); } @Test public void testUnUsedOverlapHigh() throws IOException { coordinator.announceHistoricalSegments(segments); unUseSegment(); Assert.assertTrue( coordinator.getUnusedSegmentsForInterval( defaultSegment.getDataSource(), defaultSegment.getInterval().withStart(defaultSegment.getInterval().getEnd().minus(1)) ).isEmpty() ); } @Test public void testUnUsedBigOverlap() throws IOException { coordinator.announceHistoricalSegments(segments); unUseSegment(); Assert.assertEquals( segments, ImmutableSet.copyOf( coordinator.getUnusedSegmentsForInterval( defaultSegment.getDataSource(), Interval.parse("2000/2999") ) ) ); } @Test public void testUnUsedLowRange() throws IOException { coordinator.announceHistoricalSegments(segments); unUseSegment(); Assert.assertEquals( segments, ImmutableSet.copyOf( coordinator.getUnusedSegmentsForInterval( defaultSegment.getDataSource(), defaultSegment.getInterval().withStart(defaultSegment.getInterval().getStart().minus(1)) ) ) ); Assert.assertEquals( segments, ImmutableSet.copyOf( coordinator.getUnusedSegmentsForInterval( defaultSegment.getDataSource(), defaultSegment.getInterval().withStart(defaultSegment.getInterval().getStart().minusYears(1)) ) ) ); } @Test public void testUnUsedHighRange() throws IOException { coordinator.announceHistoricalSegments(segments); unUseSegment(); Assert.assertEquals( segments, ImmutableSet.copyOf( coordinator.getUnusedSegmentsForInterval( defaultSegment.getDataSource(), defaultSegment.getInterval().withEnd(defaultSegment.getInterval().getEnd().plus(1)) ) ) ); Assert.assertEquals( segments, ImmutableSet.copyOf( coordinator.getUnusedSegmentsForInterval( defaultSegment.getDataSource(), defaultSegment.getInterval().withEnd(defaultSegment.getInterval().getEnd().plusYears(1)) ) ) ); } }
apache-2.0
onepf/OPFPush-Unity-Plugin
opfpush-unity/android/src/main/java/org/onepf/opfpush/unity/utils/MessagesDeliveryController.java
3701
package org.onepf.opfpush.unity.utils; import android.content.Context; import android.support.annotation.NonNull; import com.unity3d.player.UnityPlayer; import org.onepf.opfutils.OPFLog; import org.onepf.opfutils.OPFPreferences; import java.util.Comparator; import java.util.HashSet; import java.util.Set; import java.util.TreeSet; import static java.util.Locale.US; /** * @author Roman Savin * @since 20.07.2015 */ public final class MessagesDeliveryController { private static final String OPF_UNITY_POSTFIX = "opfpush-unity"; private static final String EVENT_RECEIVER = "OPFPush"; private static final String UNDELIVERED_MESSAGES_KEYS = "UNDELIVERED_MESSAGES_KEYS"; private static final String UNDELIVERED_MESSAGE_CALLBACK_POSTFIX = "-callback"; private static final String UNDELIVERED_MESSAGE_BODY_POSTFIX = "-body"; private MessagesDeliveryController() { throw new UnsupportedOperationException(); } public static void sendUnityMessage(@NonNull final Context context, @NonNull final String callbackMethod, @NonNull final String message) { try { UnityPlayer.UnitySendMessage(EVENT_RECEIVER, callbackMethod, message); } catch (UnsatisfiedLinkError e) { OPFLog.w(String.format(US, "Message %s hasn't been delivered. It'll be resend later.", message)); //Save undelivered message to shared preferences final String newUndeliveredMessageKey = String.valueOf(System.currentTimeMillis()); final OPFPreferences preferences = new OPFPreferences(context, OPF_UNITY_POSTFIX); final Set<String> undeliveredMessagesKeys = preferences.getStringSet(UNDELIVERED_MESSAGES_KEYS, new HashSet<String>()); undeliveredMessagesKeys.add(newUndeliveredMessageKey); preferences.put(UNDELIVERED_MESSAGES_KEYS, undeliveredMessagesKeys); preferences.put(newUndeliveredMessageKey + UNDELIVERED_MESSAGE_CALLBACK_POSTFIX, callbackMethod); preferences.put(newUndeliveredMessageKey + UNDELIVERED_MESSAGE_BODY_POSTFIX, message); } } @SuppressWarnings("unused") public static void resendUndeliveredMessages() { OPFLog.logMethod(); final Context context = UnityPlayer.currentActivity; final OPFPreferences preferences = new OPFPreferences(context, OPF_UNITY_POSTFIX); final Set<String> undeliveredMessagesKeys = new TreeSet<>(new Comparator<String>() { @Override public int compare(final String lhs, final String rhs) { //Keys are string representation of System.currentTimeMillis(); final long left = Long.parseLong(lhs); final long right = Long.parseLong(rhs); return (int) (left - right); } }); undeliveredMessagesKeys.addAll(preferences.getStringSet(UNDELIVERED_MESSAGES_KEYS, new HashSet<String>(0))); preferences.remove(UNDELIVERED_MESSAGES_KEYS); for (String key : undeliveredMessagesKeys) { final String callbackMethodKey = key + UNDELIVERED_MESSAGE_CALLBACK_POSTFIX; final String messageKey = key + UNDELIVERED_MESSAGE_BODY_POSTFIX; final String callbackMethod = preferences.getString(callbackMethodKey); final String message = preferences.getString(messageKey); preferences.remove(callbackMethodKey); preferences.remove(messageKey); if (callbackMethod != null && message != null) { sendUnityMessage(context, callbackMethod, message); } } } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-workdocs/src/main/java/com/amazonaws/services/workdocs/model/transform/DocumentMetadataJsonUnmarshaller.java
4597
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.workdocs.model.transform; import java.math.*; import javax.annotation.Generated; import com.amazonaws.services.workdocs.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import com.fasterxml.jackson.core.JsonToken; import static com.fasterxml.jackson.core.JsonToken.*; /** * DocumentMetadata JSON Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DocumentMetadataJsonUnmarshaller implements Unmarshaller<DocumentMetadata, JsonUnmarshallerContext> { public DocumentMetadata unmarshall(JsonUnmarshallerContext context) throws Exception { DocumentMetadata documentMetadata = new DocumentMetadata(); int originalDepth = context.getCurrentDepth(); String currentParentElement = context.getCurrentParentElement(); int targetDepth = originalDepth + 1; JsonToken token = context.getCurrentToken(); if (token == null) token = context.nextToken(); if (token == VALUE_NULL) { return null; } while (true) { if (token == null) break; if (token == FIELD_NAME || token == START_OBJECT) { if (context.testExpression("Id", targetDepth)) { context.nextToken(); documentMetadata.setId(context.getUnmarshaller(String.class).unmarshall(context)); } if (context.testExpression("CreatorId", targetDepth)) { context.nextToken(); documentMetadata.setCreatorId(context.getUnmarshaller(String.class).unmarshall(context)); } if (context.testExpression("ParentFolderId", targetDepth)) { context.nextToken(); documentMetadata.setParentFolderId(context.getUnmarshaller(String.class).unmarshall(context)); } if (context.testExpression("CreatedTimestamp", targetDepth)) { context.nextToken(); documentMetadata.setCreatedTimestamp(DateJsonUnmarshallerFactory.getInstance("unixTimestamp").unmarshall(context)); } if (context.testExpression("ModifiedTimestamp", targetDepth)) { context.nextToken(); documentMetadata.setModifiedTimestamp(DateJsonUnmarshallerFactory.getInstance("unixTimestamp").unmarshall(context)); } if (context.testExpression("LatestVersionMetadata", targetDepth)) { context.nextToken(); documentMetadata.setLatestVersionMetadata(DocumentVersionMetadataJsonUnmarshaller.getInstance().unmarshall(context)); } if (context.testExpression("ResourceState", targetDepth)) { context.nextToken(); documentMetadata.setResourceState(context.getUnmarshaller(String.class).unmarshall(context)); } if (context.testExpression("Labels", targetDepth)) { context.nextToken(); documentMetadata.setLabels(new ListUnmarshaller<String>(context.getUnmarshaller(String.class)) .unmarshall(context)); } } else if (token == END_ARRAY || token == END_OBJECT) { if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) { if (context.getCurrentDepth() <= originalDepth) break; } } token = context.nextToken(); } return documentMetadata; } private static DocumentMetadataJsonUnmarshaller instance; public static DocumentMetadataJsonUnmarshaller getInstance() { if (instance == null) instance = new DocumentMetadataJsonUnmarshaller(); return instance; } }
apache-2.0
NovaOrdis/novaordis-utilities
src/test/java/io/novaordis/utilities/crawler/Node.java
2169
package io.novaordis.utilities.crawler; import java.util.List; import java.util.ArrayList; /** * @author <a href="mailto:ovidiu@feodorov.com">Ovidiu Feodorov</a> * * Copyright 2008 Ovidiu Feodorov * * @version <tt>$Revision$</tt> * * $Id$ */ class Node implements Frame { // Constants ----------------------------------------------------------------------------------- // Static -------------------------------------------------------------------------------------- // Attributes ---------------------------------------------------------------------------------- private List<Frame> children; private int depth; private String id; // Constructors -------------------------------------------------------------------------------- Node() { this(1); } Node(int depth) { this("DEFAULT_ID", depth); } Node(String id, int depth) { this.id = id; this.depth = depth; children = new ArrayList<Frame>(); } // Frame implementation ------------------------------------------------------------------------ public List<Frame> children() { return children; } public Collector use(Collector in) throws Exception { in.collect(this); return in; } // Public -------------------------------------------------------------------------------------- void add(Node child) { children.add(child); } public int getDepth() { return depth; } public void setDepth(int depth) { this.depth = depth; } public String getID() { return id; } // Package protected --------------------------------------------------------------------------- // Protected ----------------------------------------------------------------------------------- // Private ------------------------------------------------------------------------------------- // Inner classes ------------------------------------------------------------------------------- }
apache-2.0
missioncommand/emp3-android-examples
example-cameraandwmsandwcs/src/main/java/mil/emp3/examples/wms_and_wcs/CustomActivity.java
15169
package mil.emp3.examples.wms_and_wcs; import android.app.Activity; import android.databinding.DataBindingUtil; import android.os.Bundle; import android.util.Log; import android.view.View; import android.widget.ArrayAdapter; import android.widget.EditText; import android.widget.Toast; import org.cmapi.primitives.IGeoAltitudeMode; import java.net.MalformedURLException; import java.util.ArrayList; import mil.emp3.api.GeoPackage; import mil.emp3.api.LineOfSight; import mil.emp3.api.LookAt; import mil.emp3.api.WCS; import mil.emp3.api.WMS; import mil.emp3.api.WMTS; import mil.emp3.api.enums.WMSVersionEnum; import mil.emp3.api.exceptions.EMP_Exception; import mil.emp3.api.interfaces.ICamera; import mil.emp3.api.interfaces.ILookAt; import mil.emp3.api.interfaces.IMap; import mil.emp3.api.utils.EmpGeoColor; import mil.emp3.api.utils.EmpGeoPosition; import mil.emp3.examples.maptestfragment.CameraUtility; import mil.emp3.examples.wms_and_wcs.databinding.ActivityCustomBinding; public class CustomActivity extends Activity { private WMS wmsService = null; private WCS wcsService = null; private ActivityCustomBinding dataBinding; private String url; private String layer; private IGeoAltitudeMode.AltitudeMode altitudeMode = IGeoAltitudeMode.AltitudeMode.ABSOLUTE; private final mil.emp3.api.Camera camera = new mil.emp3.api.Camera(); private final static String TAG = CustomActivity.class.getSimpleName(); private WMTS wmtsService = null; private WMTS oldWMTSService = null; private IMap map = null; private GeoPackage geoPackage = null; private LineOfSight los = null; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Log.i(TAG, "Setting custom activity"); dataBinding = DataBindingUtil.setContentView(this, R.layout.activity_custom); dataBinding.setCwms(this); ArrayAdapter<CharSequence> versionAdapter = ArrayAdapter.createFromResource(this, R.array.wms_versions, android.R.layout.simple_spinner_item); dataBinding.VersionText.setAdapter(versionAdapter); ArrayAdapter<CharSequence> altitudeModeAdapter = ArrayAdapter.createFromResource(this, R.array.altitude_mode, android.R.layout.simple_spinner_item); dataBinding.AltitudeMode.setAdapter(altitudeModeAdapter); /* Instantiate a camera and set the location and angle The altitude here is set initially to 1000 km */ camera.setName("Main Cam"); camera.setAltitudeMode(altitudeMode); camera.setAltitude(1e5); camera.setHeading(0.0); camera.setLatitude(40.0); camera.setLongitude(-100.0); camera.setRoll(0.0); camera.setTilt(0.0); map = dataBinding.map; try { map.addMapStateChangeEventListener(mapStateChangeEvent -> { Log.d(TAG, "mapStateChangeEvent " + mapStateChangeEvent.getNewState()); switch (mapStateChangeEvent.getNewState()) { case MAP_READY: try { map.setCamera(camera, false); } catch (EMP_Exception empe) { empe.printStackTrace(); } break; } }); } catch (EMP_Exception e) { Log.e(TAG, "addMapStateChangeEventListener", e); } try { map.addMapInteractionEventListener(mapUserInteractionEvent -> Log.d(TAG, "mapUserInteractionEvent " + mapUserInteractionEvent.getPoint().x)); } catch (EMP_Exception e) { Log.e(TAG, "addMapInteractionEventListener", e); } } // Cancel button exits the app public void onClickCancel(View view) { finish(); } // The Zoom- button zooms out 20% each time it is pressed // The altitude is limited to 100,000 km public void onClickZoomOut(View view) { ICamera camera = map.getCamera(); double initAltitude = camera.getAltitude(); if (initAltitude <= 1e8 / 1.2) { initAltitude *= 1.2; camera.setAltitude(initAltitude); camera.apply(false); Log.i(TAG, "camera altitude " + initAltitude + " latitude " + camera.getLatitude() + " longitude " + camera.getLongitude()); } else { Toast.makeText(CustomActivity.this, "Can't zoom out any more, altitude " + initAltitude, Toast.LENGTH_LONG).show(); } } // The Zoom+ button zooms 20% each time it is pressed // The altitude is limited to 1 km public void onClickZoomIn(View view) { ICamera camera = map.getCamera(); double initAltitude = camera.getAltitude(); if (initAltitude >= 1.2) { initAltitude /= 1.2; camera.setAltitude(initAltitude); camera.apply(false); Log.i(TAG, "camera altitude " + initAltitude + " latitude " + camera.getLatitude() + " longitude " + camera.getLongitude()); } else { Toast.makeText(CustomActivity.this, "Can't zoom in any more, altitude " + initAltitude, Toast.LENGTH_LONG).show(); } } // Pan left turns the camera left 5 degrees // each time the button is pressed public void onClickPanLeft(View v) { try { ICamera camera = map.getCamera(); double dPan = camera.getHeading(); dPan -= 5.0; if (dPan < 0.0) { dPan += 360.0; } camera.setHeading(dPan); camera.apply(false); } catch (Exception e) { e.printStackTrace(); } } // Pan right turns the camera right 5 degrees // each time the button is pressed public void onClickPanRight(View v) { try { ICamera camera = map.getCamera(); double dPan = camera.getHeading(); dPan += 5.0; if (dPan >= 360.0) { dPan -= 360.0; } camera.setHeading(dPan); camera.apply(false); } catch (Exception e) { e.printStackTrace(); } } // Tilt up another 5 degrees, within limits public void onClickTiltUp(View v) { try { ICamera camera = map.getCamera(); double dTilt = camera.getTilt(); if (dTilt <= 85.0) { dTilt += 5; camera.setTilt(dTilt); camera.apply(false); } else Toast.makeText(CustomActivity.this, "Can't tilt any higher", Toast.LENGTH_SHORT).show(); } catch (Exception e) { e.printStackTrace(); } } // Tilt down another 5 degrees, within limits public void onClickTiltDown(View v) { try { ICamera camera = map.getCamera(); double dTilt = camera.getTilt(); if (dTilt >= -85.0) { dTilt -= 5; camera.setTilt(dTilt); camera.apply(false); } else Toast.makeText(CustomActivity.this, "Can't tilt any lower", Toast.LENGTH_SHORT).show(); } catch (Exception e) { e.printStackTrace(); } } public void onClickRollCCW(View v) { try { ICamera camera = map.getCamera(); double dRoll = camera.getRoll(); if (dRoll >= -175.0) { dRoll -= 5; camera.setTilt(dRoll); camera.apply(false); } else Toast.makeText(CustomActivity.this, "Can't tilt any lower", Toast.LENGTH_SHORT).show(); } catch (Exception e) { e.printStackTrace(); } } public void onClickRollCW(View v) { try { ICamera camera = map.getCamera(); double dRoll = camera.getRoll(); if (dRoll <= 175.0) { dRoll += 5; camera.setTilt(dRoll); camera.apply(false); } else Toast.makeText(CustomActivity.this, "Can't tilt any lower", Toast.LENGTH_SHORT).show(); } catch (Exception e) { e.printStackTrace(); } } public void onClickOK(View v) { try { url = dataBinding.UrlText.getText().toString(); String version = dataBinding.VersionText.getSelectedItem().toString(); WMSVersionEnum wmsVersion = WMSVersionEnum.valueOf(version); String altModeString = dataBinding.AltitudeMode.getSelectedItem().toString(); IGeoAltitudeMode.AltitudeMode altMode = null; switch (altModeString) { case "CLAMP TO GROUND": altMode = IGeoAltitudeMode.AltitudeMode.CLAMP_TO_GROUND; break; case "RELATIVE TO GROUND": altMode = IGeoAltitudeMode.AltitudeMode.RELATIVE_TO_GROUND; break; default: altMode = IGeoAltitudeMode.AltitudeMode.ABSOLUTE; break; } layer = dataBinding.LayerText.getText().toString(); ArrayList<String> layers = new ArrayList<>(); layers.add(layer); wmsService = new WMS(url + "wms", wmsVersion, "image/png", true, layers); String resolution = dataBinding.ResolutionText.getText().toString(); wmsService.setLayerResolution(Double.valueOf(resolution)); if (wmsService != null) { map.addMapService(wmsService); if (altMode != altitudeMode) { camera.setAltitudeMode(altMode); altitudeMode = altMode; camera.apply(false); } if(!(dataBinding.addWCS.isEnabled() || dataBinding.removeWCS.isEnabled())){ dataBinding.addWCS.setEnabled(true); } } else { Log.i(TAG, "Got null WMS service"); } } catch (Exception e) { e.printStackTrace(); } } public void onClickAddWCS(View v){ try { try { url = dataBinding.UrlText.getText().toString(); layer = dataBinding.LayerText.getText().toString(); wcsService = new WCS(url + "wcs", layer); } catch (MalformedURLException e) { e.printStackTrace(); } Log.i(TAG, wcsService.toString()); map.addMapService(wcsService); ILookAt calculatedLookAt = CameraUtility.setupLookAt(28, 87, 9000, 27.9878, 86.9250, 8848); map.setLookAt(calculatedLookAt, false); dataBinding.removeWCS.setEnabled(true); dataBinding.addWCS.setEnabled(false); } catch (EMP_Exception ex) { } } public void onClickRemoveWCS(View v){ try { map.removeMapService(wcsService); dataBinding.removeWCS.setEnabled(false); dataBinding.addWCS.setEnabled(true); } catch (EMP_Exception ex) { } } public void onClickAddWMTS(View v) { EditText urlText = (EditText) findViewById(R.id.UrlText); EditText layerName = (EditText) findViewById(R.id.LayerText); try { String url = urlText.getText().toString(); String layer = layerName.getText().toString(); ArrayList<String> layers = new ArrayList<>(); layers.add(layer); wmtsService = new WMTS( url, null, null, layers); map.addMapService(CustomActivity.this.wmtsService); ICamera camera = map.getCamera(); camera.setLatitude(64.27); camera.setLongitude(10.12); camera.setAltitude(225000); camera.apply(false); if (wmtsService != null) { if (wmtsService != oldWMTSService) { if (oldWMTSService != null) map.removeMapService(oldWMTSService); else Log.i(TAG, "No previous WMTS service"); map.addMapService(wmtsService); oldWMTSService = wmtsService; } else { Log.i(TAG, "Layer unchanged"); } } else { Log.i(TAG, "Got null WMTS service"); } } catch (Exception e) { e.printStackTrace(); } } public void onClickRemoveWMTS(View v) { try { map.removeMapService(oldWMTSService); oldWMTSService = null; } catch (Exception e) { e.printStackTrace(); } } public void onClickAddGeopackage(View v) { EditText geopackage = (EditText) findViewById(R.id.LayerText); try { geoPackage = new GeoPackage("File://" + geopackage.getText().toString()); CustomActivity.this.map.addMapService(geoPackage); ICamera camera = CustomActivity.this.map.getCamera(); // Place the camera directly over the GeoPackage image. camera.setLatitude(39.54795); camera.setLongitude(-76.16334); camera.setAltitude(2580); camera.apply(true); } catch (Exception e) { e.printStackTrace(); } } public void onClickRemoveGeopackage(View v){ try { CustomActivity.this.map.removeMapService(geoPackage); } catch (Exception e) { e.printStackTrace(); } } public void onClickAddLoS(View v) { try { map.addMapService(wcsService); Thread.sleep(1000); EmpGeoPosition position = new EmpGeoPosition(46.230, -122.190, 2500.0); EmpGeoColor visibleAttr = new EmpGeoColor(0.5d, 0, 25, 0); EmpGeoColor occludeAttr = new EmpGeoColor(0.8d, 25, 25, 25); double range = 10000.0d; los = new LineOfSight(position, range, visibleAttr, occludeAttr); map.addMapService(los); LookAt lookAt = new LookAt(46.230, -122.190, 500, IGeoAltitudeMode.AltitudeMode.ABSOLUTE); lookAt.setRange(1.5e4); /*range*/ lookAt.setHeading(45.0); /*heading*/ lookAt.setTilt(70.0); /*tilt*/ /*0 roll*/ ; map.setLookAt(lookAt, false); dataBinding.addLoS.setEnabled(false); dataBinding.removeLoS.setEnabled(true); } catch (Exception e) { e.printStackTrace(); } } public void onClickRemoveLoS(View V){ try { map.removeMapService(los); dataBinding.addLoS.setEnabled(true); dataBinding.removeLoS.setEnabled(false); } catch (Exception e) { e.printStackTrace(); } } }
apache-2.0
rtomyj/Animus
Diary/src/com/Settings/NotifTimePicker.java
632
package com.Settings; import android.content.Context; import android.preference.DialogPreference; import android.util.AttributeSet; import com.rtomyj.Diary.R; public class NotifTimePicker extends DialogPreference{ public NotifTimePicker(Context context, AttributeSet attrs) { super(context, attrs); setPersistent(false); setDialogLayoutResource(R.layout.time_picker); setPositiveButtonText(android.R.string.ok); setNegativeButtonText(android.R.string.cancel); setTitle("Change Time of Notification."); setKey("TimePicker"); setDialogIcon(null); } }
apache-2.0
Pyknic/SocialPhotoNetworkClient
src/main/java/com/speedment/examples/social/util/LayoutUtil.java
1785
/** * * Copyright (c) 2006-2015, Speedment, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); You may not * use this file except in compliance with the License. You may obtain a copy of * the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.speedment.examples.social.util; import javafx.scene.Parent; import javafx.scene.layout.Region; /** * * @author Emil Forslund */ public class LayoutUtil { public static void centerInParent(Region child) { final Parent parent = child.getParent(); if (parent instanceof Region) { final Region parentRegion = (Region) parent; child.layoutXProperty().bind( parentRegion.widthProperty().divide(2).subtract( child.widthProperty().divide(2) ) ); child.layoutYProperty().bind( parentRegion.heightProperty().divide(2).subtract( child.heightProperty().divide(2) ) ); } else { throw new UnsupportedOperationException("Parent " + parent + " is not a Region."); } } public static void fillParent(Region child) { final Parent parent = child.getParent(); if (parent instanceof Region) { final Region parentRegion = (Region) parent; child.prefWidthProperty().bind(parentRegion.widthProperty()); child.prefHeightProperty().bind(parentRegion.heightProperty()); } else { throw new UnsupportedOperationException("Parent " + parent + " is not a Region."); } } }
apache-2.0
Afrozaar/wp-api-v2-client-java
src/main/java/com/afrozaar/wordpress/wpapi/v2/model/Page.java
8644
package com.afrozaar.wordpress.wpapi.v2.model; import com.google.common.collect.ImmutableMap; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyOrder; import javax.annotation.Generated; import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.function.BiConsumer; @JsonInclude(JsonInclude.Include.NON_NULL) @Generated("org.jsonschema2pojo") @JsonPropertyOrder({ "_links", "author", "comment_status", "content", "date", "date_gmt", "excerpt", "featured_media", "guid", "id", "link", "menu_order", "modified", "modified_gmt", "parent", "password", "ping_status", "slug", "status", "template", "title", "type" }) public class Page { @JsonProperty("_links") private com.afrozaar.wordpress.wpapi.v2.model.Links Links; @JsonProperty("author") private Long author; @JsonProperty("comment_status") private String commentStatus; @JsonProperty("content") private Content content; @JsonProperty("date") private String date; @JsonProperty("date_gmt") private String dateGmt; @JsonProperty("excerpt") private Excerpt excerpt; @JsonProperty("featured_media") private Long featuredMedia; @JsonProperty("guid") private Guid guid; @JsonProperty("id") private Long id; @JsonProperty("link") private String link; @JsonProperty("menu_order") private Long menuOrder; @JsonProperty("modified") private String modified; @JsonProperty("modified_gmt") private String modifiedGmt; @JsonProperty("parent") private Long parent; @JsonProperty("password") private String password; @JsonProperty("ping_status") private String pingStatus; @JsonProperty("slug") private String slug; @JsonProperty("status") private String status; @JsonProperty("template") private String template; @JsonProperty("title") private Title title; @JsonProperty("type") private String type; @JsonIgnore private Map<String, Object> additionalProperties = new HashMap<String, Object>(); @JsonProperty("_links") public com.afrozaar.wordpress.wpapi.v2.model.Links getLinks() { return Links; } @JsonProperty("_links") public void setLinks(com.afrozaar.wordpress.wpapi.v2.model.Links Links) { this.Links = Links; } @JsonProperty("author") public Long getAuthor() { return author; } @JsonProperty("author") public void setAuthor(Long author) { this.author = author; } @JsonProperty("comment_status") public String getCommentStatus() { return commentStatus; } @JsonProperty("comment_status") public void setCommentStatus(String commentStatus) { this.commentStatus = commentStatus; } @JsonProperty("content") public Content getContent() { return content; } @JsonProperty("content") public void setContent(Content content) { this.content = content; } @JsonProperty("date") public String getDate() { return date; } @JsonProperty("date") public void setDate(String date) { this.date = date; } @JsonProperty("date_gmt") public String getDateGmt() { return dateGmt; } @JsonProperty("date_gmt") public void setDateGmt(String dateGmt) { this.dateGmt = dateGmt; } @JsonProperty("excerpt") public Excerpt getExcerpt() { return excerpt; } @JsonProperty("excerpt") public void setExcerpt(Excerpt excerpt) { this.excerpt = excerpt; } @JsonProperty("featured_media") public Long getFeaturedMedia() { return featuredMedia; } @JsonProperty("featured_media") public void setFeaturedMedia(Long featuredMedia) { this.featuredMedia = featuredMedia; } @JsonProperty("guid") public Guid getGuid() { return guid; } @JsonProperty("guid") public void setGuid(Guid guid) { this.guid = guid; } @JsonProperty("id") public Long getId() { return id; } @JsonProperty("id") public void setId(Long id) { this.id = id; } @JsonProperty("link") public String getLink() { return link; } @JsonProperty("link") public void setLink(String link) { this.link = link; } @JsonProperty("menu_order") public Long getMenuOrder() { return menuOrder; } @JsonProperty("menu_order") public void setMenuOrder(Long menuOrder) { this.menuOrder = menuOrder; } @JsonProperty("modified") public String getModified() { return modified; } @JsonProperty("modified") public void setModified(String modified) { this.modified = modified; } @JsonProperty("modified_gmt") public String getModifiedGmt() { return modifiedGmt; } @JsonProperty("modified_gmt") public void setModifiedGmt(String modifiedGmt) { this.modifiedGmt = modifiedGmt; } @JsonProperty("parent") public Long getParent() { return parent; } @JsonProperty("parent") public void setParent(Long parent) { this.parent = parent; } @JsonProperty("password") public String getPassword() { return password; } @JsonProperty("password") public void setPassword(String password) { this.password = password; } @JsonProperty("ping_status") public String getPingStatus() { return pingStatus; } @JsonProperty("ping_status") public void setPingStatus(String pingStatus) { this.pingStatus = pingStatus; } @JsonProperty("slug") public String getSlug() { return slug; } @JsonProperty("slug") public void setSlug(String slug) { this.slug = slug; } @JsonProperty("status") public String getStatus() { return status; } @JsonProperty("status") public void setStatus(String status) { this.status = status; } @JsonProperty("template") public String getTemplate() { return template; } @JsonProperty("template") public void setTemplate(String template) { this.template = template; } @JsonProperty("title") public Title getTitle() { return title; } @JsonProperty("title") public void setTitle(Title title) { this.title = title; } @JsonProperty("type") public String getType() { return type; } @JsonProperty("type") public void setType(String type) { this.type = type; } @JsonAnyGetter public Map<String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperty(String name, Object value) { this.additionalProperties.put(name, value); } public Map<String, Object> asMap() { final ImmutableMap.Builder<String, Object> builder = new ImmutableMap.Builder<>(); BiConsumer<String, Object> c = (index, value) -> Optional.ofNullable(value).ifPresent(val -> builder.put(index, val)); c.accept("author", author); c.accept("comment_status", commentStatus); c.accept("content", Objects.nonNull(content) ? content.getRaw() : null); c.accept("date", date); c.accept("date_gmt", dateGmt); c.accept("excerpt", Objects.nonNull(excerpt) ? excerpt.getRaw() : null); c.accept("featured_media", featuredMedia); c.accept("guid", Objects.nonNull(guid) ? guid.getRaw() : null); c.accept("id", id); c.accept("link", link); c.accept("menu_order", menuOrder); c.accept("modified", modified); c.accept("modified_gmt", modifiedGmt); c.accept("parent", parent); c.accept("password", password); c.accept("ping_status", pingStatus); c.accept("slug", slug); c.accept("status", status); c.accept("template", template); c.accept("title", Objects.nonNull(title) ? title.getRaw() : null); c.accept("type", type); return builder.build(); } }
apache-2.0
sguilhen/wildfly-elytron
src/main/java/org/wildfly/security/mechanism/scram/ScramServerErrorCode.java
4278
/* * JBoss, Home of Professional Open Source. * Copyright 2015 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wildfly.security.mechanism.scram; import java.nio.charset.StandardCharsets; import java.util.EnumSet; import java.util.Locale; /** * @author <a href="mailto:david.lloyd@redhat.com">David M. Lloyd</a> */ public enum ScramServerErrorCode { INVALID_ENCODING, EXTENSIONS_NOT_SUPPORTED, INVALID_PROOF, CHANNEL_BINDINGS_DONT_MATCH, SERVER_DOES_NOT_SUPPORT_CHANNEL_BINDING, CHANNEL_BINDING_NOT_SUPPORTED, UNSUPPORTED_CHANNEL_BINDING_TYPE, UNKNOWN_USER, INVALID_USERNAME_ENCODING, NO_RESOURCES, OTHER_ERROR, ; private final String text; private final byte[] messageBytes; ScramServerErrorCode() { text = name().replace('_', '-').toLowerCase(Locale.US); final int length = text.length(); byte[] msg = new byte[length + 2]; msg[0] = 'e'; msg[1] = '='; System.arraycopy(text.getBytes(StandardCharsets.UTF_8), 0, msg, 2, length); messageBytes = msg; } public String getText() { return text; } public byte[] getMessageBytes() { return messageBytes.clone(); } byte[] getRawMessageBytes() { return messageBytes; } public static ScramServerErrorCode fromErrorString(String value) { try { return valueOf(value.replace('-', '_').toUpperCase(Locale.US)); } catch (IllegalArgumentException ignored) { return OTHER_ERROR; } } private static final int fullSize = values().length; /** * Determine whether the given set is fully populated (or "full"), meaning it contains all possible values. * * @param set the set * * @return {@code true} if the set is full, {@code false} otherwise */ public static boolean isFull(final EnumSet<ScramServerErrorCode> set) { return set != null && set.size() == fullSize; } /** * Determine whether this instance is equal to one of the given instances. * * @param v1 the first instance * * @return {@code true} if one of the instances matches this one, {@code false} otherwise */ public boolean in(final ScramServerErrorCode v1) { return this == v1; } /** * Determine whether this instance is equal to one of the given instances. * * @param v1 the first instance * @param v2 the second instance * * @return {@code true} if one of the instances matches this one, {@code false} otherwise */ public boolean in(final ScramServerErrorCode v1, final ScramServerErrorCode v2) { return this == v1 || this == v2; } /** * Determine whether this instance is equal to one of the given instances. * * @param v1 the first instance * @param v2 the second instance * @param v3 the third instance * * @return {@code true} if one of the instances matches this one, {@code false} otherwise */ public boolean in(final ScramServerErrorCode v1, final ScramServerErrorCode v2, final ScramServerErrorCode v3) { return this == v1 || this == v2 || this == v3; } /** * Determine whether this instance is equal to one of the given instances. * * @param values the possible values * * @return {@code true} if one of the instances matches this one, {@code false} otherwise */ public boolean in(final ScramServerErrorCode... values) { if (values != null) for (ScramServerErrorCode value : values) { if (this == value) return true; } return false; } }
apache-2.0
joewoo999/selenium_webuitest
src/main/java/com/github/joseph/core/page/pageObject/commands/actions/RightClick.java
669
package com.github.joseph.core.page.pageObject.commands.actions; import com.github.joseph.core.driverContext.DriverContext; import com.github.joseph.core.page.pageObject.function.ActionFunction; import org.openqa.selenium.support.pagefactory.ElementLocator; public class RightClick implements ActionFunction { private ElementLocator locator; @Override public void accept(ElementLocator locator) { this.locator = locator; DriverContext.getActions().contextClick(locator.findElement()).perform(); } @Override public String toString() { return String.format("(ActionFunction)Right click element:%s.", locator); } }
apache-2.0
googleapis/java-dataproc
google-cloud-dataproc/src/test/java/com/google/cloud/dataproc/v1/MockAutoscalingPolicyService.java
1657
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.dataproc.v1; import com.google.api.core.BetaApi; import com.google.api.gax.grpc.testing.MockGrpcService; import com.google.protobuf.AbstractMessage; import io.grpc.ServerServiceDefinition; import java.util.List; import javax.annotation.Generated; @BetaApi @Generated("by gapic-generator-java") public class MockAutoscalingPolicyService implements MockGrpcService { private final MockAutoscalingPolicyServiceImpl serviceImpl; public MockAutoscalingPolicyService() { serviceImpl = new MockAutoscalingPolicyServiceImpl(); } @Override public List<AbstractMessage> getRequests() { return serviceImpl.getRequests(); } @Override public void addResponse(AbstractMessage response) { serviceImpl.addResponse(response); } @Override public void addException(Exception exception) { serviceImpl.addException(exception); } @Override public ServerServiceDefinition getServiceDefinition() { return serviceImpl.bindService(); } @Override public void reset() { serviceImpl.reset(); } }
apache-2.0
charles-cooper/idylfin
src/com/opengamma/analytics/financial/equity/EquityInstrumentDefinition.java
608
/** * Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.analytics.financial.equity; import javax.time.calendar.ZonedDateTime; /** * * @param <T> Type of the EquityDerivative that the definition returns */ public interface EquityInstrumentDefinition<T extends Derivative> { T toDerivative(ZonedDateTime date, String... yieldCurveNames); <U, V> V accept(final EquityInstrumentDefinitionVisitor<U, V> visitor, final U data); <V> V accept(EquityInstrumentDefinitionVisitor<?, V> visitor); }
apache-2.0
Taller/sqlworkbench-plus
src/workbench/sql/wbcommands/WbListTriggers.java
2753
/* * WbListTriggers.java * * This file is part of SQL Workbench/J, http://www.sql-workbench.net * * Copyright 2002-2015, Thomas Kellerer * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * To contact the author please send an email to: support@sql-workbench.net * */ package workbench.sql.wbcommands; import java.sql.SQLException; import workbench.console.ConsoleSettings; import workbench.console.RowDisplay; import workbench.resource.ResourceMgr; import workbench.db.TriggerReader; import workbench.db.TriggerReaderFactory; import workbench.storage.DataStore; import workbench.sql.SqlCommand; import workbench.sql.StatementRunnerResult; import workbench.util.ArgumentParser; import workbench.util.ArgumentType; /** * List all triggers defined for the current schema. * <br> * This is the same information as displayed in the DbExplorer's "Triggers" tab. * * @see workbench.db.TriggerReader#getTriggers(java.lang.String, java.lang.String) * @author Thomas Kellerer */ public class WbListTriggers extends SqlCommand { public static final String VERB = "WbListTriggers"; public WbListTriggers() { cmdLine = new ArgumentParser(); cmdLine.addArgument(CommonArgs.ARG_SCHEMA, ArgumentType.SchemaArgument); cmdLine.addArgument(CommonArgs.ARG_CATALOG, ArgumentType.CatalogArgument); } @Override public String getVerb() { return VERB; } @Override public StatementRunnerResult execute(String aSql) throws SQLException { StatementRunnerResult result = new StatementRunnerResult(); String options = getCommandLine(aSql); cmdLine.parse(options); ConsoleSettings.getInstance().setNextRowDisplay(RowDisplay.SingleLine); TriggerReader reader = TriggerReaderFactory.createReader(this.currentConnection); String schema = cmdLine.getValue(CommonArgs.ARG_SCHEMA, currentConnection.getCurrentSchema()); String catalog = cmdLine.getValue(CommonArgs.ARG_CATALOG, currentConnection.getMetadata().getCurrentCatalog()); DataStore ds = reader.getTriggers(catalog, schema); ds.setResultName(ResourceMgr.getString("TxtDbExplorerTriggers")); ds.setGeneratingSql(VERB + " " + options); result.addDataStore(ds); return result; } @Override public boolean isWbCommand() { return true; } }
apache-2.0
cristian-sulea/jatoo-image
src/main/java/jatoo/image/ImageCacheFile.java
4588
/* * Copyright (C) Cristian Sulea ( http://cristian.sulea.net ) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jatoo.image; import java.awt.image.BufferedImage; import java.io.File; import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * A handy image file cache. * * @author <a href="http://cristian.sulea.net" rel="author">Cristian Sulea</a> * @version 1.1, March 13, 2018 */ public class ImageCacheFile extends ImageCache { /** The logger. */ private static final Log logger = LogFactory.getLog(ImageThumbnails.class); // private static final Pattern FILE_NAME_PATTERN = Pattern.compile("[^a-zA-Z0-9\\-]"); private static final ImageUtils.FORMAT FORMAT = ImageUtils.FORMAT.JPEG; private static final String EXTENSION = "." + ImageUtils.FORMAT.JPEG.name().toLowerCase(); /** The folder where cached image files are stored. */ private final File folder; /** * Constructs a new image cache in the provided folder. * * @param folder * the folder where files are stored */ public ImageCacheFile(final File folder) { // // if folder does not exists if (!folder.exists()) { // // creates the folder (and parent folders) if (!folder.mkdirs()) { throw new IllegalArgumentException(folder + " was not created"); } } // // if folder exists else { // // check if it's a folder if (!folder.isDirectory()) { throw new IllegalArgumentException(folder + " is not a folder"); } } // // done this.folder = folder; } @Override protected void addImpl(BufferedImage image, String key) { File file = createFileFromKey(key); try { ImageUtils.save(image, file, FORMAT); } catch (IOException e) { logger.error("failed to add the image: " + file, e); } } @Override protected BufferedImage getImpl(String key) { File file = createFileFromKey(key); BufferedImage image = null; // // if the file does not exists // there is no need to try a read if (file.exists()) { try { // // try to read the cached image image = ImageUtils.read(file); // // touch (will be used to know when this image was used last time) if (!file.setLastModified(System.currentTimeMillis())) { logger.info("set last-modified time on the cached image file " + file + " failed"); } // // return the cached image return image; } catch (IOException e) { logger.warn("failed to read the cached image file: " + file, e); } } // // null is an accepted value return null; } @Override protected boolean containsImpl(String key) { return createFileFromKey(key).exists(); } @Override protected void removeImpl(String key) { File file = createFileFromKey(key); if (!file.delete()) { throw new IllegalStateException(file + " cannot be deleted"); } } @Override protected void clearImpl() { File[] files = folder.listFiles(); if (files != null) { for (File file : files) { delete(file); } } } private File createFileFromKey(String key) { // return new File(folder, THUMBNAIL_FILE_NAME_PATTERN.matcher(key).replaceAll("_") + "." + // ImageUtils.FORMAT.JPEG.name().toLowerCase()); return new File(folder, new File(key).getName() + "_" + key.hashCode() + EXTENSION); } private void delete(final File file) { if (file.isDirectory()) { File[] files = file.listFiles(); if (files != null) { for (File f : files) { delete(f); } } } if (!file.delete()) { throw new IllegalStateException(file + " cannot be deleted"); } } }
apache-2.0
bigshiliu/test
com.hutong.supersdk/src/main/java/com/hutong/supersdk/sdk/handler/iosyy/IOSYYPPSDK.java
5446
package com.hutong.supersdk.sdk.handler.iosyy; import java.io.ByteArrayOutputStream; import java.io.InputStream; import java.net.HttpURLConnection; import java.net.URL; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.stereotype.Component; import com.hutong.supersdk.common.model.JsonReqObj; import com.hutong.supersdk.common.util.ParseJson; import com.hutong.supersdk.sdk.IPayCallBackSDK; import com.hutong.supersdk.sdk.IPaySuccessHandler; import com.hutong.supersdk.sdk.IVerifyUserSDK; import com.hutong.supersdk.sdk.modeltools.SDKVerifyRet; import com.hutong.supersdk.sdk.modeltools.pp.PPData; import com.hutong.supersdk.sdk.modeltools.pp.PPRes; import com.hutong.supersdk.sdk.modeltools.pp.PPSDKInfo; import com.hutong.supersdk.sdk.modeltools.pp.PPUserObj; import com.hutong.supersdk.sdk.utils.Base64; import com.hutong.supersdk.sdk.utils.MD5Util; import com.hutong.supersdk.sdk.utils.pp.PPRSAEncrypt; @Component("iOSYYPPSDK") public class IOSYYPPSDK implements IVerifyUserSDK, IPayCallBackSDK { private static final String SDK_ID = "IOSYYPPTools"; private final static Log logger = LogFactory.getLog(IOSYYPPSDK.class); @Override public String getSDKId() { return SDK_ID; } @Override public Class<?> getConfigClazz() { return PPSDKInfo.class; } @Override public Object payCallBack(IPaySuccessHandler callback, Map<String, String> paramMap, InputStream servletInputStream, String method, Object config) { String success = "success"; String fail = "fail"; try { PPSDKInfo configInfo = (PPSDKInfo) config; String platformOrderId = paramMap.get("order_id"); String orderId = paramMap.get("billno"); int status = Integer.parseInt(paramMap.get("status")); double amount = Double.parseDouble(paramMap.get("amount")); String appId = configInfo.getAppId(); String pubKey = configInfo.getPubKey(); String sign = paramMap.get("sign"); byte[] privateData = Base64.decode(sign); PPRSAEncrypt rsa = new PPRSAEncrypt(); rsa.loadPublicKey(pubKey); String data = new String(rsa.decrypt(rsa.getPublicKey(), privateData)); PPData dataObj = ParseJson.getJsonContentByStr(data, PPData.class); if (dataObj == null || !(dataObj.getApp_id().equals(appId) && dataObj.getOrder_id().equals(platformOrderId) && dataObj.getBillno().equals(orderId) && dataObj.getAmount() == amount && dataObj.getStatus() == status)) { logger.error("Check Order Failed. paraMap=" + paramMap.toString() + " decodeSign=" + data); return fail; } boolean iResult = callback.succeedPayment(orderId, platformOrderId, amount, "RMB", "", ParseJson.encodeJson(paramMap)); if(iResult) return success; } catch (Exception e) { logger.error("", e); } return fail; } @Override public SDKVerifyRet verifyUser(JsonReqObj input, Object config) { SDKVerifyRet ret = new SDKVerifyRet(); try { // pp助手请求对象 PPRes pp = new PPRes(); // 配置参数 PPSDKInfo configInfo = (PPSDKInfo) config; String gameId = configInfo.getAppId(); String appKey = configInfo.getAppKey(); String token = input.getSDKAccessToken(); String service = "account.verifySession"; String encrypt = "MD5"; Long time = System.currentTimeMillis() / 1000; pp.setId(time.intValue()); pp.setService(service); pp.getData().put("sid", token); pp.getGame().put("gameId", Integer.parseInt(gameId)); pp.setEncrypt(encrypt); String sign = MD5Util.MD5("sid=" + token + appKey); pp.setSign(sign); String json = ParseJson.encodeJson(pp); if (json == null) return ret.fail(); String urlStr = configInfo.getRequestUrl(); byte datas[] = json.getBytes(); URL url = new URL(urlStr); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setDoOutput(true); conn.setRequestMethod("POST"); conn.setRequestProperty("User_Agent", "25PP"); conn.setRequestProperty("Connection", "close"); // 长度是实体的二进制长度 conn.setRequestProperty("Content-Length", String.valueOf(datas.length)); conn.getOutputStream().write(datas); conn.getOutputStream().flush(); conn.getOutputStream().close(); InputStream ppServer = conn.getInputStream(); String response = new String(readStream(ppServer)); PPUserObj retObj = ParseJson.getJsonContentByStr(response, PPUserObj.class); if (retObj == null || 1 != Integer.parseInt(String.valueOf(retObj.getState().get("code")))) { ret.fail(); ret.setErrorMsg(this.getSDKId() + " verifyUser Error !"); return ret; } ret.setSdkUid(retObj.getData().get("accountId")); ret.setExtra("creator", retObj.getData().get("creator")); ret.setSdkAccessToken(token); return ret.success(); } catch (Exception e) { logger.error("", e); } ret.setErrorMsg(this.getSDKId() + " verifyUser Error !"); return ret.fail(); } /** * 读取流 * * @param inStream inStream * @return 字节数组 * @throws Exception */ private static byte[] readStream(InputStream inStream) throws Exception { ByteArrayOutputStream outSteam = new ByteArrayOutputStream(); byte[] buffer = new byte[1024]; int len; while ((len = inStream.read(buffer)) != -1) { outSteam.write(buffer, 0, len); } outSteam.close(); return outSteam.toByteArray(); } }
apache-2.0
krasa/StringManipulation
test/osmedile/intellij/stringmanip/escaping/normalize/NormalizationTypeTest.java
560
package osmedile.intellij.stringmanip.escaping.normalize; import org.junit.Test; import osmedile.intellij.stringmanip.escaping.DiacriticsToAsciiAction; import static org.junit.Assert.assertEquals; public class NormalizationTypeTest { @Test public void normalize() { assertEquals("eclair", NormalizationType.STRIP_ACCENTS.normalize("éclair")); assertEquals("eclair", DiacriticsToAsciiAction.toPlain("éclair")); assertEquals("OE", DiacriticsToAsciiAction.toPlain("Œ")); assertEquals("Œ", NormalizationType.STRIP_ACCENTS.normalize("Œ")); } }
apache-2.0
18380460383/eshare
app/src/main/java/com/kzmen/sczxjf/ui/fragment/personal/CMenuFragment.java
11471
package com.kzmen.sczxjf.ui.fragment.personal; import android.app.Activity; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.graphics.Bitmap; import android.os.Bundle; import android.os.Handler; import android.support.annotation.Nullable; import android.text.SpannableStringBuilder; import android.text.TextUtils; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.RelativeLayout; import android.widget.TextView; import com.bumptech.glide.Glide; import com.kzmen.sczxjf.AppContext; import com.kzmen.sczxjf.Constants; import com.kzmen.sczxjf.R; import com.kzmen.sczxjf.UIManager; import com.kzmen.sczxjf.bean.kzbean.UserBean; import com.kzmen.sczxjf.ui.activity.kzmessage.MainTabActivity; import com.kzmen.sczxjf.ui.activity.kzmessage.PersonMessActivity; import com.kzmen.sczxjf.ui.activity.menu.FriendOfmineAcitivty; import com.kzmen.sczxjf.ui.activity.menu.MyAskActivity; import com.kzmen.sczxjf.ui.activity.menu.MyCollectionAcitivity; import com.kzmen.sczxjf.ui.activity.menu.MyIntegralActivity; import com.kzmen.sczxjf.ui.activity.menu.MyPackageAcitivity; import com.kzmen.sczxjf.ui.activity.menu.ShopOfIntegralActivity; import com.kzmen.sczxjf.ui.activity.menu.SpecialPowerActivity; import com.kzmen.sczxjf.ui.fragment.basic.SuperFragment; import com.kzmen.sczxjf.util.TextViewUtil; import com.kzmen.sczxjf.util.glide.GlideCircleTransform; import com.kzmen.sczxjf.utils.AppUtils; import com.kzmen.sczxjf.utils.BitmapUtils; import butterknife.ButterKnife; import butterknife.InjectView; import butterknife.OnClick; /** * 创建者:杨操 * 时间:2016/4/12 * 功能描述:个人端菜单栏 */ public class CMenuFragment extends SuperFragment { @InjectView(R.id.c_menu_user_head_iv) ImageView cMenuUserHeadIv; @InjectView(R.id.c_menu_user_name_tv) TextView cMenuUserNameTv; @InjectView(R.id.c_menu_attestation_mark) TextView cMenuAttestationMark; @InjectView(R.id.c_menu_user_landing_num_tv) TextView cMenuUserLandingNumTv; @InjectView(R.id.c_menu_integral) TextView cMenuIntegral; @InjectView(R.id.c_menu_balance) TextView cMenuBalance; @InjectView(R.id.c_menu_caifu) LinearLayout cMenuCaifu; @InjectView(R.id.c_menu_collect_onc) RelativeLayout cMenuCollect; @InjectView(R.id.c_menu_friend_onc) RelativeLayout cMenuFriend; @InjectView(R.id.c_menu_activity_onc) RelativeLayout cMenuActivity; @InjectView(R.id.c_menu_credits_exchange_onc) RelativeLayout cMenuCreditsExchange; @InjectView(R.id.c_menu_creative_collection_rl) RelativeLayout cMenuCreativeCollection; @InjectView(R.id.c_menu_setting_onc) LinearLayout cMenuSetting; @InjectView(R.id.c_menu_feedback_onc) LinearLayout cMenuFeedback; @InjectView(R.id.ll_package) LinearLayout ll_package; @InjectView(R.id.ll_jifen) LinearLayout ll_jifen; @InjectView(R.id.c_menu_attestation_mark_for_e) TextView cMenuAttestationMarkForE; @InjectView(R.id.c_menu_attestation_mark_for_m) TextView cMenuAttestationMarkForM; @InjectView(R.id.iv_close) ImageView iv_close; @InjectView(R.id.tv_jifen) TextView tvJifen; @InjectView(R.id.tv_package) TextView tvPackage; private boolean DOINGGETBALANCE; private MenuBack menuBack; private View view; private BroadcastReceiver bannerReceiver; public void setMenuBack(MenuBack menuBack) { this.menuBack = menuBack; } @Nullable @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { view = setContentView(inflater, container, R.layout.fragment_menu); ButterKnife.inject(this, view); setRecriver(); AppContext instance = AppContext.getInstance(); if (!TextUtils.isEmpty(instance.getUserLogin().getUid())) { setUserInfo(); //getBanner(); setDatauser(); } return view; } private void setRecriver() { bannerReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { //getBanner(); } }; IntentFilter filter = new IntentFilter(); filter.addAction(Constants.FRAGMENT_MONEY); getContext().registerReceiver(bannerReceiver, filter); } @OnClick({R.id.c_menu_user_head_iv, R.id.c_menu_collect_onc, /*R.id.c_menu_order_onc,*/ R.id.c_menu_friend_onc, R.id.c_menu_activity_onc, R.id.c_menu_balance, R.id.c_menu_credits_exchange_onc, R.id.c_menu_setting_onc, R.id.c_menu_feedback_onc, R.id.c_menu_creative_collection_rl , R.id.iv_close, R.id.ll_package, R.id.ll_jifen}) public void Listener(View view) { Intent intent = null; switch (view.getId()) { case R.id.c_menu_user_head_iv: //TODO 点击头像 /* if (!AppContext.getInstance().getPersonageOnLine()) { //TODO 登陆 Intent intent = new Intent(getContext(), LoginActivity.class); startActivityForResult(intent, 2); } else { UIManager.showPersonInfoActivity((Activity) getContext()); Log.i("info", "跳"); }*/ intent = new Intent(getContext(), PersonMessActivity.class); startActivity(intent); break; case R.id.ll_jifen: intent = new Intent(getContext(), MyIntegralActivity.class); startActivity(intent); break; case R.id.ll_package: intent = new Intent(getContext(), MyPackageAcitivity.class); startActivity(intent); break; case R.id.c_menu_collect_onc: //TODO 点击收藏 Intent intent3 = new Intent(getContext(), MyCollectionAcitivity.class); getContext().startActivity(intent3); break; case R.id.c_menu_friend_onc: //TODO 点击好友 getContext().startActivity(new Intent(getContext(), FriendOfmineAcitivty.class)); break; case R.id.c_menu_activity_onc: //TODO 点击活动 getContext().startActivity(new Intent(getContext(), SpecialPowerActivity.class)); break; case R.id.c_menu_credits_exchange_onc: getContext().startActivity(new Intent(getContext(), ShopOfIntegralActivity.class)); break; case R.id.c_menu_setting_onc: //TODO 点击设置 UIManager.showSetActivity((Activity) getContext()); break; case R.id.c_menu_feedback_onc: //TODO 点击意见反馈 // FeedbackAPI.openFeedbackActivity(getActivity()); break; case R.id.c_menu_balance: //TODO 点击转发记录 /* Intent intent2 = new Intent(getContext(), RecordActivity.class); intent2.putExtra(RecordActivity.FLAG, 1); getContext().startActivity(intent2);*/ break; case R.id.c_menu_creative_collection_rl: Intent intentn = new Intent(getContext(), MyAskActivity.class); getContext().startActivity(intentn); break; case R.id.iv_close: ((MainTabActivity) getActivity()).closeDraw(); break; } if (menuBack != null) { menuBack.startActivity(); } } /* private void getBanner() { if (!DOINGGETBALANCE) { DOINGGETBALANCE = true; EnWebUtil.getInstance().post(getActivity(), new String[]{"JiebianInfo", "findOneJiebianBalance"}, new RequestParams(), new EnWebUtil.AesListener2() { @Override public void onSuccess(String errorCode, String errorMsg, String data) { DOINGGETBALANCE = false; try { JSONObject jsonObject = new JSONObject(data); User_For_pe peUser = AppContext.getInstance().getPEUser(); peUser.setScore(jsonObject.getString("score")); peUser.setBalance(Double.valueOf(jsonObject.getString("balance"))); setDate(); } catch (Exception e) { e.printStackTrace(); } } @Override public void onFail(String result) { DOINGGETBALANCE = false; } }); } } */ public void setUserInfo() { cMenuUserHeadIv.setImageBitmap(BitmapUtils.toRoundBitmap(AppUtils.readBitMap(getContext(), R.drawable.image_def))); setDatauser(); } private UserBean peUser; public void setDatauser() { //getBanner(); peUser = AppContext.getInstance().getUserLogin(); Log.e("tstmenu",peUser.toString()); if (!TextUtils.isEmpty(peUser.getUsername())) { cMenuUserNameTv.setText(peUser.getUsername()); } else if (!TextUtils.isEmpty(peUser.getPhone())) { String userphone = peUser.getPhone(); String s = userphone.substring(0, 3) + "****" + userphone.substring(7, 11); cMenuUserNameTv.setText(s); } SpannableStringBuilder colorText = TextViewUtil.getColorText(peUser.getHotnum() + "天", "#ff8307"); SpannableStringBuilder str = new SpannableStringBuilder("连续登陆:"); cMenuUserLandingNumTv.setText(str.append(colorText)); setDate(); Glide.with(getActivity()).load(peUser.getAvatar()).transform(new GlideCircleTransform(getActivity())).into(cMenuUserHeadIv); tvJifen.setText(peUser.getScore()); tvPackage.setText(peUser.getBalance() + ""); } private void setDate() { // UserBean peUser1 = AppContext.getInstance().getUserLogin(); } public void setHeadImage(final Bitmap bitmap) { new Handler().postDelayed(new Runnable() { @Override public void run() { if (bitmap != null && cMenuUserHeadIv != null) { // cMenuUserHeadIv.setImageBitmap(BitmapUtils.toRoundBitmap(bitmap)); } } }, 3000); } @Override public void onDestroyView() { super.onDestroyView(); ButterKnife.reset(this); } public interface MenuBack { void startActivity(); } @Override public void onDestroy() { super.onDestroy(); getContext().unregisterReceiver(bannerReceiver); } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == 2 && resultCode == Activity.RESULT_OK && data.getIntExtra("loginstate", 0) == 1) { setUserInfo(); //getBanner(); } super.onActivityResult(requestCode, resultCode, data); } @Override protected void lazyLoad() { } }
apache-2.0
vam-google/google-cloud-java
google-cloud-clients/google-cloud-compute/src/main/java/com/google/cloud/compute/v1/SetServiceAccountInstanceHttpRequest.java
17769
/* * Copyright 2019 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.compute.v1; import com.google.api.core.BetaApi; import com.google.api.gax.httpjson.ApiMessage; import java.util.List; import java.util.Objects; import javax.annotation.Generated; import javax.annotation.Nullable; @Generated("by GAPIC") @BetaApi /** * Request object for method compute.instances.setServiceAccount. Sets the service account on the * instance. For more information, read Changing the service account and access scopes for an * instance. */ public final class SetServiceAccountInstanceHttpRequest implements ApiMessage { private final String access_token; private final String callback; private final String fields; private final String instance; private final InstancesSetServiceAccountRequest instancesSetServiceAccountRequestResource; private final String key; private final String prettyPrint; private final String quotaUser; private final String requestId; private final String userIp; private SetServiceAccountInstanceHttpRequest() { this.access_token = null; this.callback = null; this.fields = null; this.instance = null; this.instancesSetServiceAccountRequestResource = null; this.key = null; this.prettyPrint = null; this.quotaUser = null; this.requestId = null; this.userIp = null; } private SetServiceAccountInstanceHttpRequest( String access_token, String callback, String fields, String instance, InstancesSetServiceAccountRequest instancesSetServiceAccountRequestResource, String key, String prettyPrint, String quotaUser, String requestId, String userIp) { this.access_token = access_token; this.callback = callback; this.fields = fields; this.instance = instance; this.instancesSetServiceAccountRequestResource = instancesSetServiceAccountRequestResource; this.key = key; this.prettyPrint = prettyPrint; this.quotaUser = quotaUser; this.requestId = requestId; this.userIp = userIp; } @Override public Object getFieldValue(String fieldName) { if ("access_token".equals(fieldName)) { return access_token; } if ("callback".equals(fieldName)) { return callback; } if ("fields".equals(fieldName)) { return fields; } if ("instance".equals(fieldName)) { return instance; } if ("instancesSetServiceAccountRequestResource".equals(fieldName)) { return instancesSetServiceAccountRequestResource; } if ("key".equals(fieldName)) { return key; } if ("prettyPrint".equals(fieldName)) { return prettyPrint; } if ("quotaUser".equals(fieldName)) { return quotaUser; } if ("requestId".equals(fieldName)) { return requestId; } if ("userIp".equals(fieldName)) { return userIp; } return null; } @Nullable @Override public InstancesSetServiceAccountRequest getApiMessageRequestBody() { return instancesSetServiceAccountRequestResource; } @Nullable @Override /** * The fields that should be serialized (even if they have empty values). If the containing * message object has a non-null fieldmask, then all the fields in the field mask (and only those * fields in the field mask) will be serialized. If the containing object does not have a * fieldmask, then only non-empty fields will be serialized. */ public List<String> getFieldMask() { return null; } /** OAuth 2.0 token for the current user. */ public String getAccessToken() { return access_token; } /** Name of the JavaScript callback function that handles the response. */ public String getCallback() { return callback; } /** Selector specifying a subset of fields to include in the response. */ public String getFields() { return fields; } /** * Name of the instance resource to start. It must have the format * `{project}/zones/{zone}/instances/{instance}/setServiceAccount`. \`{instance}\` must start with * a letter, and contain only letters (\`[A-Za-z]\`), numbers (\`[0-9]\`), dashes (\`-\`), &#42; * underscores (\`_\`), periods (\`.\`), tildes (\`~\`), plus (\`+\`) or percent &#42; signs * (\`%\`). It must be between 3 and 255 characters in length, and it &#42; must not start with * \`"goog"\`. */ public String getInstance() { return instance; } public InstancesSetServiceAccountRequest getInstancesSetServiceAccountRequestResource() { return instancesSetServiceAccountRequestResource; } /** API key. Required unless you provide an OAuth 2.0 token. */ public String getKey() { return key; } /** Returns response with indentations and line breaks. */ public String getPrettyPrint() { return prettyPrint; } /** Alternative to userIp. */ public String getQuotaUser() { return quotaUser; } /** * An optional request ID to identify requests. Specify a unique request ID so that if you must * retry your request, the server will know to ignore the request if it has already been * completed. * * <p>For example, consider a situation where you make an initial request and the request times * out. If you make the request again with the same request ID, the server can check if original * operation with the same request ID was received, and if so, will ignore the second request. * This prevents clients from accidentally creating duplicate commitments. * * <p>The request ID must be a valid UUID with the exception that zero UUID is not supported * (00000000-0000-0000-0000-000000000000). */ public String getRequestId() { return requestId; } /** IP address of the end user for whom the API call is being made. */ public String getUserIp() { return userIp; } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(SetServiceAccountInstanceHttpRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } public static SetServiceAccountInstanceHttpRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final SetServiceAccountInstanceHttpRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new SetServiceAccountInstanceHttpRequest(); } public static class Builder { private String access_token; private String callback; private String fields; private String instance; private InstancesSetServiceAccountRequest instancesSetServiceAccountRequestResource; private String key; private String prettyPrint; private String quotaUser; private String requestId; private String userIp; Builder() {} public Builder mergeFrom(SetServiceAccountInstanceHttpRequest other) { if (other == SetServiceAccountInstanceHttpRequest.getDefaultInstance()) return this; if (other.getAccessToken() != null) { this.access_token = other.access_token; } if (other.getCallback() != null) { this.callback = other.callback; } if (other.getFields() != null) { this.fields = other.fields; } if (other.getInstance() != null) { this.instance = other.instance; } if (other.getInstancesSetServiceAccountRequestResource() != null) { this.instancesSetServiceAccountRequestResource = other.instancesSetServiceAccountRequestResource; } if (other.getKey() != null) { this.key = other.key; } if (other.getPrettyPrint() != null) { this.prettyPrint = other.prettyPrint; } if (other.getQuotaUser() != null) { this.quotaUser = other.quotaUser; } if (other.getRequestId() != null) { this.requestId = other.requestId; } if (other.getUserIp() != null) { this.userIp = other.userIp; } return this; } Builder(SetServiceAccountInstanceHttpRequest source) { this.access_token = source.access_token; this.callback = source.callback; this.fields = source.fields; this.instance = source.instance; this.instancesSetServiceAccountRequestResource = source.instancesSetServiceAccountRequestResource; this.key = source.key; this.prettyPrint = source.prettyPrint; this.quotaUser = source.quotaUser; this.requestId = source.requestId; this.userIp = source.userIp; } /** OAuth 2.0 token for the current user. */ public String getAccessToken() { return access_token; } /** OAuth 2.0 token for the current user. */ public Builder setAccessToken(String access_token) { this.access_token = access_token; return this; } /** Name of the JavaScript callback function that handles the response. */ public String getCallback() { return callback; } /** Name of the JavaScript callback function that handles the response. */ public Builder setCallback(String callback) { this.callback = callback; return this; } /** Selector specifying a subset of fields to include in the response. */ public String getFields() { return fields; } /** Selector specifying a subset of fields to include in the response. */ public Builder setFields(String fields) { this.fields = fields; return this; } /** * Name of the instance resource to start. It must have the format * `{project}/zones/{zone}/instances/{instance}/setServiceAccount`. \`{instance}\` must start * with a letter, and contain only letters (\`[A-Za-z]\`), numbers (\`[0-9]\`), dashes (\`-\`), * &#42; underscores (\`_\`), periods (\`.\`), tildes (\`~\`), plus (\`+\`) or percent &#42; * signs (\`%\`). It must be between 3 and 255 characters in length, and it &#42; must not start * with \`"goog"\`. */ public String getInstance() { return instance; } /** * Name of the instance resource to start. It must have the format * `{project}/zones/{zone}/instances/{instance}/setServiceAccount`. \`{instance}\` must start * with a letter, and contain only letters (\`[A-Za-z]\`), numbers (\`[0-9]\`), dashes (\`-\`), * &#42; underscores (\`_\`), periods (\`.\`), tildes (\`~\`), plus (\`+\`) or percent &#42; * signs (\`%\`). It must be between 3 and 255 characters in length, and it &#42; must not start * with \`"goog"\`. */ public Builder setInstance(String instance) { this.instance = instance; return this; } public InstancesSetServiceAccountRequest getInstancesSetServiceAccountRequestResource() { return instancesSetServiceAccountRequestResource; } public Builder setInstancesSetServiceAccountRequestResource( InstancesSetServiceAccountRequest instancesSetServiceAccountRequestResource) { this.instancesSetServiceAccountRequestResource = instancesSetServiceAccountRequestResource; return this; } /** API key. Required unless you provide an OAuth 2.0 token. */ public String getKey() { return key; } /** API key. Required unless you provide an OAuth 2.0 token. */ public Builder setKey(String key) { this.key = key; return this; } /** Returns response with indentations and line breaks. */ public String getPrettyPrint() { return prettyPrint; } /** Returns response with indentations and line breaks. */ public Builder setPrettyPrint(String prettyPrint) { this.prettyPrint = prettyPrint; return this; } /** Alternative to userIp. */ public String getQuotaUser() { return quotaUser; } /** Alternative to userIp. */ public Builder setQuotaUser(String quotaUser) { this.quotaUser = quotaUser; return this; } /** * An optional request ID to identify requests. Specify a unique request ID so that if you must * retry your request, the server will know to ignore the request if it has already been * completed. * * <p>For example, consider a situation where you make an initial request and the request times * out. If you make the request again with the same request ID, the server can check if original * operation with the same request ID was received, and if so, will ignore the second request. * This prevents clients from accidentally creating duplicate commitments. * * <p>The request ID must be a valid UUID with the exception that zero UUID is not supported * (00000000-0000-0000-0000-000000000000). */ public String getRequestId() { return requestId; } /** * An optional request ID to identify requests. Specify a unique request ID so that if you must * retry your request, the server will know to ignore the request if it has already been * completed. * * <p>For example, consider a situation where you make an initial request and the request times * out. If you make the request again with the same request ID, the server can check if original * operation with the same request ID was received, and if so, will ignore the second request. * This prevents clients from accidentally creating duplicate commitments. * * <p>The request ID must be a valid UUID with the exception that zero UUID is not supported * (00000000-0000-0000-0000-000000000000). */ public Builder setRequestId(String requestId) { this.requestId = requestId; return this; } /** IP address of the end user for whom the API call is being made. */ public String getUserIp() { return userIp; } /** IP address of the end user for whom the API call is being made. */ public Builder setUserIp(String userIp) { this.userIp = userIp; return this; } public SetServiceAccountInstanceHttpRequest build() { String missing = ""; if (instance == null) { missing += " instance"; } if (!missing.isEmpty()) { throw new IllegalStateException("Missing required properties:" + missing); } return new SetServiceAccountInstanceHttpRequest( access_token, callback, fields, instance, instancesSetServiceAccountRequestResource, key, prettyPrint, quotaUser, requestId, userIp); } public Builder clone() { Builder newBuilder = new Builder(); newBuilder.setAccessToken(this.access_token); newBuilder.setCallback(this.callback); newBuilder.setFields(this.fields); newBuilder.setInstance(this.instance); newBuilder.setInstancesSetServiceAccountRequestResource( this.instancesSetServiceAccountRequestResource); newBuilder.setKey(this.key); newBuilder.setPrettyPrint(this.prettyPrint); newBuilder.setQuotaUser(this.quotaUser); newBuilder.setRequestId(this.requestId); newBuilder.setUserIp(this.userIp); return newBuilder; } } @Override public String toString() { return "SetServiceAccountInstanceHttpRequest{" + "access_token=" + access_token + ", " + "callback=" + callback + ", " + "fields=" + fields + ", " + "instance=" + instance + ", " + "instancesSetServiceAccountRequestResource=" + instancesSetServiceAccountRequestResource + ", " + "key=" + key + ", " + "prettyPrint=" + prettyPrint + ", " + "quotaUser=" + quotaUser + ", " + "requestId=" + requestId + ", " + "userIp=" + userIp + "}"; } @Override public boolean equals(Object o) { if (o == this) { return true; } if (o instanceof SetServiceAccountInstanceHttpRequest) { SetServiceAccountInstanceHttpRequest that = (SetServiceAccountInstanceHttpRequest) o; return Objects.equals(this.access_token, that.getAccessToken()) && Objects.equals(this.callback, that.getCallback()) && Objects.equals(this.fields, that.getFields()) && Objects.equals(this.instance, that.getInstance()) && Objects.equals( this.instancesSetServiceAccountRequestResource, that.getInstancesSetServiceAccountRequestResource()) && Objects.equals(this.key, that.getKey()) && Objects.equals(this.prettyPrint, that.getPrettyPrint()) && Objects.equals(this.quotaUser, that.getQuotaUser()) && Objects.equals(this.requestId, that.getRequestId()) && Objects.equals(this.userIp, that.getUserIp()); } return false; } @Override public int hashCode() { return Objects.hash( access_token, callback, fields, instance, instancesSetServiceAccountRequestResource, key, prettyPrint, quotaUser, requestId, userIp); } }
apache-2.0
yuqirong/NewsPublish
src/com/cjlu/newspublish/services/impl/WeatherServiceImpl.java
3366
package com.cjlu.newspublish.services.impl; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.net.HttpURLConnection; import java.net.URL; import java.text.ParseException; import java.util.List; import javax.servlet.ServletOutputStream; import javax.servlet.http.HttpServletResponse; import org.dom4j.DocumentException; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import com.cjlu.newspublish.daos.impl.WeatherDaoImpl; import com.cjlu.newspublish.models.Weather; import com.cjlu.newspublish.services.WeatherService; import com.cjlu.newspublish.utils.DateUtils; import com.cjlu.newspublish.utils.DomReadXmlUtils; @Service("weatherService") public class WeatherServiceImpl extends BaseServiceImpl<Weather> implements WeatherService { @Autowired private WeatherDaoImpl weatherDao; @Override public Weather getWeather(Weather model, HttpServletResponse httpResponse) throws IOException, ParseException { List<String> strings = null; String area = model.getCounty(); if (area.length() > 2) { if (area.endsWith("ÊÐ") || area.endsWith("ÏØ")) { area = area.substring(0, area.length() - 1); } } String data = "<soap:Envelope xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' xmlns:xsd='http://www.w3.org/2001/XMLSchema' xmlns:soap='http://schemas.xmlsoap.org/soap/envelope/'>" + "<soap:Body><getWeather xmlns='http://WebXml.com.cn/'><theCityCode>" + area + "</theCityCode><theUserID>" + "" + "</theUserID> </getWeather></soap:Body></soap:Envelope>"; String path = "http://webservice.webxml.com.cn/WebServices/WeatherWS.asmx"; URL url = new URL(path); HttpURLConnection openConnection = (HttpURLConnection) url .openConnection(); openConnection.setDoInput(true); openConnection.setDoOutput(true); openConnection.setRequestProperty("Content-Type", "text/xml;charset=utf-8"); OutputStream outputStream = openConnection.getOutputStream(); outputStream.write(data.getBytes("utf-8")); outputStream.flush(); int responseCode = openConnection.getResponseCode(); StringBuilder stringBuilder = new StringBuilder(); if (responseCode == 200) { InputStream inputStream = openConnection.getInputStream(); BufferedReader reader = new BufferedReader(new InputStreamReader( inputStream, "utf-8")); ServletOutputStream os = httpResponse.getOutputStream(); BufferedWriter writer = new BufferedWriter(new OutputStreamWriter( os, "utf-8")); char[] buffer = new char[1024]; int length = 0; while ((length = reader.read(buffer)) > 0) { writer.write(buffer, 0, length); stringBuilder.append(buffer, 0, length); } writer.flush(); writer.close(); os.close(); reader.close(); inputStream.close(); try { strings = DomReadXmlUtils.readStringXml(stringBuilder .toString()); } catch (DocumentException e) { e.printStackTrace(); } model = new Weather(model.getProvince(), model.getCity(), model.getCounty(), strings.get(7), strings.get(8), strings.get(12), strings.get(13), strings.get(17), strings.get(18), DateUtils.stringToDate(strings.get(3))); } return model; } }
apache-2.0
stapetro/mnk_javabasic
src/main/java/com/strategy/Strategy.java
92
package com.strategy; public interface Strategy { public void sort(int[] numbers); }
apache-2.0
JoelMarcey/buck
src/com/facebook/buck/cxx/TransitiveCxxPreprocessorInputCache.java
4094
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.cxx; import com.facebook.buck.core.model.BuildTarget; import com.facebook.buck.core.rules.ActionGraphBuilder; import com.facebook.buck.cxx.toolchain.CxxPlatform; import com.facebook.buck.util.concurrent.Parallelizer; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSortedMap; import com.google.common.util.concurrent.UncheckedExecutionException; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.concurrent.ExecutionException; /** Transitive C++ preprocessor input cache */ public class TransitiveCxxPreprocessorInputCache { private final Cache<CxxPlatform, ImmutableSortedMap<BuildTarget, CxxPreprocessorInput>> cache = CacheBuilder.newBuilder().build(); private final CxxPreprocessorDep preprocessorDep; public TransitiveCxxPreprocessorInputCache(CxxPreprocessorDep preprocessorDep) { this.preprocessorDep = preprocessorDep; } /** Get a value from the cache */ public ImmutableMap<BuildTarget, CxxPreprocessorInput> getUnchecked( CxxPlatform key, ActionGraphBuilder graphBuilder) { try { return cache.get( key, () -> computeTransitiveCxxToPreprocessorInputMap( key, preprocessorDep, true, graphBuilder, graphBuilder.getParallelizer())); } catch (ExecutionException e) { throw new UncheckedExecutionException(e.getCause()); } } public static ImmutableMap<BuildTarget, CxxPreprocessorInput> computeTransitiveCxxToPreprocessorInputMap( CxxPlatform key, CxxPreprocessorDep preprocessorDep, boolean includeDep, ActionGraphBuilder graphBuilder) { return computeTransitiveCxxToPreprocessorInputMap( key, preprocessorDep, includeDep, graphBuilder, graphBuilder.getParallelizer()); } private static ImmutableSortedMap<BuildTarget, CxxPreprocessorInput> computeTransitiveCxxToPreprocessorInputMap( CxxPlatform key, CxxPreprocessorDep preprocessorDep, boolean includeDep, ActionGraphBuilder graphBuilder, Parallelizer parallelizer) { Map<BuildTarget, CxxPreprocessorInput> builder = new HashMap<>(); if (includeDep) { builder.put( preprocessorDep.getBuildTarget(), preprocessorDep.getCxxPreprocessorInput(key, graphBuilder)); } Collection<ImmutableMap<BuildTarget, CxxPreprocessorInput>> transitiveDepInputs = parallelizer.maybeParallelizeTransform( ImmutableList.copyOf(preprocessorDep.getCxxPreprocessorDeps(key, graphBuilder)), dep -> dep.getTransitiveCxxPreprocessorInput(key, graphBuilder)); transitiveDepInputs.forEach(builder::putAll); // Using an ImmutableSortedMap here: // // 1. Memory efficiency. ImmutableSortedMap is implemented with 2 lists (an ImmutableSortedSet // of keys, and a ImmutableList of values). This is much more efficient than an ImmutableMap, // which creates an Entry instance for each entry. // // 2. Historically we seem to care that the result has some definite order. // // 3. We mostly iterate over these maps rather than do lookups, so ImmutableSortedMap // binary-search based lookup is not an issue. return ImmutableSortedMap.copyOf(builder); } }
apache-2.0
OpenEstate/OpenEstate-IO
OpenImmo/src/main/jaxb/org/openestate/io/openimmo/xml/Versteigerung.java
19363
package org.openestate.io.openimmo.xml; import java.io.Serializable; import java.math.BigDecimal; import java.util.Calendar; import javax.annotation.Generated; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; import org.jvnet.jaxb2_commons.lang.CopyStrategy2; import org.jvnet.jaxb2_commons.lang.CopyTo2; import org.jvnet.jaxb2_commons.lang.Equals2; import org.jvnet.jaxb2_commons.lang.EqualsStrategy2; import org.jvnet.jaxb2_commons.lang.JAXBCopyStrategy; import org.jvnet.jaxb2_commons.lang.JAXBEqualsStrategy; import org.jvnet.jaxb2_commons.lang.JAXBToStringStrategy; import org.jvnet.jaxb2_commons.lang.ToString2; import org.jvnet.jaxb2_commons.lang.ToStringStrategy2; import org.jvnet.jaxb2_commons.locator.ObjectLocator; import org.jvnet.jaxb2_commons.locator.util.LocatorUtils; /** * Java class for &lt;versteigerung&gt; element. * * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "zwangsversteigerung", "aktenzeichen", "zvtermin", "zusatztermin", "amtsgericht", "verkehrswert" }) @XmlRootElement(name = "versteigerung") @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0") public class Versteigerung implements Serializable, Cloneable, CopyTo2, Equals2, ToString2 { @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0") protected Boolean zwangsversteigerung; @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0") protected String aktenzeichen; @XmlElement(type = String.class) @XmlJavaTypeAdapter(Adapter3 .class) @XmlSchemaType(name = "dateTime") @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0") protected Calendar zvtermin; @XmlElement(type = String.class) @XmlJavaTypeAdapter(Adapter3 .class) @XmlSchemaType(name = "dateTime") @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0") protected Calendar zusatztermin; @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0") protected String amtsgericht; @XmlElement(type = String.class) @XmlJavaTypeAdapter(Adapter2 .class) @XmlSchemaType(name = "decimal") @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0") protected BigDecimal verkehrswert; /** * Gets the value of the zwangsversteigerung property. * * @return * possible object is * {@link Boolean } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0") public Boolean getZwangsversteigerung() { return zwangsversteigerung; } /** * Sets the value of the zwangsversteigerung property. * * @param value * allowed object is * {@link Boolean } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0") public void setZwangsversteigerung(Boolean value) { this.zwangsversteigerung = value; } /** * Gets the value of the aktenzeichen property. * * @return * possible object is * {@link String } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0") public String getAktenzeichen() { return aktenzeichen; } /** * Sets the value of the aktenzeichen property. * * @param value * allowed object is * {@link String } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0") public void setAktenzeichen(String value) { this.aktenzeichen = value; } /** * Gets the value of the zvtermin property. * * @return * possible object is * {@link String } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0") public Calendar getZvtermin() { return zvtermin; } /** * Sets the value of the zvtermin property. * * @param value * allowed object is * {@link String } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0") public void setZvtermin(Calendar value) { this.zvtermin = value; } /** * Gets the value of the zusatztermin property. * * @return * possible object is * {@link String } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0") public Calendar getZusatztermin() { return zusatztermin; } /** * Sets the value of the zusatztermin property. * * @param value * allowed object is * {@link String } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0") public void setZusatztermin(Calendar value) { this.zusatztermin = value; } /** * Gets the value of the amtsgericht property. * * @return * possible object is * {@link String } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0") public String getAmtsgericht() { return amtsgericht; } /** * Sets the value of the amtsgericht property. * * @param value * allowed object is * {@link String } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0") public void setAmtsgericht(String value) { this.amtsgericht = value; } /** * Gets the value of the verkehrswert property. * * @return * possible object is * {@link String } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0") public BigDecimal getVerkehrswert() { return verkehrswert; } /** * Sets the value of the verkehrswert property. * * @param value * allowed object is * {@link String } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0") public void setVerkehrswert(BigDecimal value) { this.verkehrswert = value; } @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0") public String toString() { final ToStringStrategy2 strategy = JAXBToStringStrategy.INSTANCE2; final StringBuilder buffer = new StringBuilder(); append(null, buffer, strategy); return buffer.toString(); } @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0") public StringBuilder append(ObjectLocator locator, StringBuilder buffer, ToStringStrategy2 strategy) { strategy.appendStart(locator, this, buffer); appendFields(locator, buffer, strategy); strategy.appendEnd(locator, this, buffer); return buffer; } @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0") public StringBuilder appendFields(ObjectLocator locator, StringBuilder buffer, ToStringStrategy2 strategy) { { Boolean theZwangsversteigerung; theZwangsversteigerung = this.getZwangsversteigerung(); strategy.appendField(locator, this, "zwangsversteigerung", buffer, theZwangsversteigerung, (this.zwangsversteigerung!= null)); } { String theAktenzeichen; theAktenzeichen = this.getAktenzeichen(); strategy.appendField(locator, this, "aktenzeichen", buffer, theAktenzeichen, (this.aktenzeichen!= null)); } { Calendar theZvtermin; theZvtermin = this.getZvtermin(); strategy.appendField(locator, this, "zvtermin", buffer, theZvtermin, (this.zvtermin!= null)); } { Calendar theZusatztermin; theZusatztermin = this.getZusatztermin(); strategy.appendField(locator, this, "zusatztermin", buffer, theZusatztermin, (this.zusatztermin!= null)); } { String theAmtsgericht; theAmtsgericht = this.getAmtsgericht(); strategy.appendField(locator, this, "amtsgericht", buffer, theAmtsgericht, (this.amtsgericht!= null)); } { BigDecimal theVerkehrswert; theVerkehrswert = this.getVerkehrswert(); strategy.appendField(locator, this, "verkehrswert", buffer, theVerkehrswert, (this.verkehrswert!= null)); } return buffer; } @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0") public Object clone() { return copyTo(createNewInstance()); } @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0") public Object copyTo(Object target) { final CopyStrategy2 strategy = JAXBCopyStrategy.INSTANCE2; return copyTo(null, target, strategy); } @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0") public Object copyTo(ObjectLocator locator, Object target, CopyStrategy2 strategy) { final Object draftCopy = ((target == null)?createNewInstance():target); if (draftCopy instanceof Versteigerung) { final Versteigerung copy = ((Versteigerung) draftCopy); { Boolean zwangsversteigerungShouldBeCopiedAndSet = strategy.shouldBeCopiedAndSet(locator, (this.zwangsversteigerung!= null)); if (zwangsversteigerungShouldBeCopiedAndSet == Boolean.TRUE) { Boolean sourceZwangsversteigerung; sourceZwangsversteigerung = this.getZwangsversteigerung(); Boolean copyZwangsversteigerung = ((Boolean) strategy.copy(LocatorUtils.property(locator, "zwangsversteigerung", sourceZwangsversteigerung), sourceZwangsversteigerung, (this.zwangsversteigerung!= null))); copy.setZwangsversteigerung(copyZwangsversteigerung); } else { if (zwangsversteigerungShouldBeCopiedAndSet == Boolean.FALSE) { copy.zwangsversteigerung = null; } } } { Boolean aktenzeichenShouldBeCopiedAndSet = strategy.shouldBeCopiedAndSet(locator, (this.aktenzeichen!= null)); if (aktenzeichenShouldBeCopiedAndSet == Boolean.TRUE) { String sourceAktenzeichen; sourceAktenzeichen = this.getAktenzeichen(); String copyAktenzeichen = ((String) strategy.copy(LocatorUtils.property(locator, "aktenzeichen", sourceAktenzeichen), sourceAktenzeichen, (this.aktenzeichen!= null))); copy.setAktenzeichen(copyAktenzeichen); } else { if (aktenzeichenShouldBeCopiedAndSet == Boolean.FALSE) { copy.aktenzeichen = null; } } } { Boolean zvterminShouldBeCopiedAndSet = strategy.shouldBeCopiedAndSet(locator, (this.zvtermin!= null)); if (zvterminShouldBeCopiedAndSet == Boolean.TRUE) { Calendar sourceZvtermin; sourceZvtermin = this.getZvtermin(); Calendar copyZvtermin = ((Calendar) strategy.copy(LocatorUtils.property(locator, "zvtermin", sourceZvtermin), sourceZvtermin, (this.zvtermin!= null))); copy.setZvtermin(copyZvtermin); } else { if (zvterminShouldBeCopiedAndSet == Boolean.FALSE) { copy.zvtermin = null; } } } { Boolean zusatzterminShouldBeCopiedAndSet = strategy.shouldBeCopiedAndSet(locator, (this.zusatztermin!= null)); if (zusatzterminShouldBeCopiedAndSet == Boolean.TRUE) { Calendar sourceZusatztermin; sourceZusatztermin = this.getZusatztermin(); Calendar copyZusatztermin = ((Calendar) strategy.copy(LocatorUtils.property(locator, "zusatztermin", sourceZusatztermin), sourceZusatztermin, (this.zusatztermin!= null))); copy.setZusatztermin(copyZusatztermin); } else { if (zusatzterminShouldBeCopiedAndSet == Boolean.FALSE) { copy.zusatztermin = null; } } } { Boolean amtsgerichtShouldBeCopiedAndSet = strategy.shouldBeCopiedAndSet(locator, (this.amtsgericht!= null)); if (amtsgerichtShouldBeCopiedAndSet == Boolean.TRUE) { String sourceAmtsgericht; sourceAmtsgericht = this.getAmtsgericht(); String copyAmtsgericht = ((String) strategy.copy(LocatorUtils.property(locator, "amtsgericht", sourceAmtsgericht), sourceAmtsgericht, (this.amtsgericht!= null))); copy.setAmtsgericht(copyAmtsgericht); } else { if (amtsgerichtShouldBeCopiedAndSet == Boolean.FALSE) { copy.amtsgericht = null; } } } { Boolean verkehrswertShouldBeCopiedAndSet = strategy.shouldBeCopiedAndSet(locator, (this.verkehrswert!= null)); if (verkehrswertShouldBeCopiedAndSet == Boolean.TRUE) { BigDecimal sourceVerkehrswert; sourceVerkehrswert = this.getVerkehrswert(); BigDecimal copyVerkehrswert = ((BigDecimal) strategy.copy(LocatorUtils.property(locator, "verkehrswert", sourceVerkehrswert), sourceVerkehrswert, (this.verkehrswert!= null))); copy.setVerkehrswert(copyVerkehrswert); } else { if (verkehrswertShouldBeCopiedAndSet == Boolean.FALSE) { copy.verkehrswert = null; } } } } return draftCopy; } @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0") public Object createNewInstance() { return new Versteigerung(); } @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0") public boolean equals(ObjectLocator thisLocator, ObjectLocator thatLocator, Object object, EqualsStrategy2 strategy) { if ((object == null)||(this.getClass()!= object.getClass())) { return false; } if (this == object) { return true; } final Versteigerung that = ((Versteigerung) object); { Boolean lhsZwangsversteigerung; lhsZwangsversteigerung = this.getZwangsversteigerung(); Boolean rhsZwangsversteigerung; rhsZwangsversteigerung = that.getZwangsversteigerung(); if (!strategy.equals(LocatorUtils.property(thisLocator, "zwangsversteigerung", lhsZwangsversteigerung), LocatorUtils.property(thatLocator, "zwangsversteigerung", rhsZwangsversteigerung), lhsZwangsversteigerung, rhsZwangsversteigerung, (this.zwangsversteigerung!= null), (that.zwangsversteigerung!= null))) { return false; } } { String lhsAktenzeichen; lhsAktenzeichen = this.getAktenzeichen(); String rhsAktenzeichen; rhsAktenzeichen = that.getAktenzeichen(); if (!strategy.equals(LocatorUtils.property(thisLocator, "aktenzeichen", lhsAktenzeichen), LocatorUtils.property(thatLocator, "aktenzeichen", rhsAktenzeichen), lhsAktenzeichen, rhsAktenzeichen, (this.aktenzeichen!= null), (that.aktenzeichen!= null))) { return false; } } { Calendar lhsZvtermin; lhsZvtermin = this.getZvtermin(); Calendar rhsZvtermin; rhsZvtermin = that.getZvtermin(); if (!strategy.equals(LocatorUtils.property(thisLocator, "zvtermin", lhsZvtermin), LocatorUtils.property(thatLocator, "zvtermin", rhsZvtermin), lhsZvtermin, rhsZvtermin, (this.zvtermin!= null), (that.zvtermin!= null))) { return false; } } { Calendar lhsZusatztermin; lhsZusatztermin = this.getZusatztermin(); Calendar rhsZusatztermin; rhsZusatztermin = that.getZusatztermin(); if (!strategy.equals(LocatorUtils.property(thisLocator, "zusatztermin", lhsZusatztermin), LocatorUtils.property(thatLocator, "zusatztermin", rhsZusatztermin), lhsZusatztermin, rhsZusatztermin, (this.zusatztermin!= null), (that.zusatztermin!= null))) { return false; } } { String lhsAmtsgericht; lhsAmtsgericht = this.getAmtsgericht(); String rhsAmtsgericht; rhsAmtsgericht = that.getAmtsgericht(); if (!strategy.equals(LocatorUtils.property(thisLocator, "amtsgericht", lhsAmtsgericht), LocatorUtils.property(thatLocator, "amtsgericht", rhsAmtsgericht), lhsAmtsgericht, rhsAmtsgericht, (this.amtsgericht!= null), (that.amtsgericht!= null))) { return false; } } { BigDecimal lhsVerkehrswert; lhsVerkehrswert = this.getVerkehrswert(); BigDecimal rhsVerkehrswert; rhsVerkehrswert = that.getVerkehrswert(); if (!strategy.equals(LocatorUtils.property(thisLocator, "verkehrswert", lhsVerkehrswert), LocatorUtils.property(thatLocator, "verkehrswert", rhsVerkehrswert), lhsVerkehrswert, rhsVerkehrswert, (this.verkehrswert!= null), (that.verkehrswert!= null))) { return false; } } return true; } @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T06:31:15+02:00", comments = "JAXB RI v2.3.0") public boolean equals(Object object) { final EqualsStrategy2 strategy = JAXBEqualsStrategy.INSTANCE2; return equals(null, null, object, strategy); } }
apache-2.0
atlasapi/atlas-deer
atlas-processing/src/test/java/org/atlasapi/messaging/EquivalenceGraphUpdateResolverTest.java
4215
package org.atlasapi.messaging; import java.util.Map; import org.atlasapi.content.ItemRef; import org.atlasapi.entity.Id; import org.atlasapi.equivalence.EquivalenceGraph; import org.atlasapi.equivalence.EquivalenceGraphStore; import org.atlasapi.equivalence.EquivalenceGraphUpdate; import org.atlasapi.media.entity.Publisher; import com.metabroadcast.common.collect.ImmutableOptionalMap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; import com.google.common.util.concurrent.Futures; import org.joda.time.DateTime; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.when; @RunWith(MockitoJUnitRunner.class) public class EquivalenceGraphUpdateResolverTest { @Mock private EquivalenceGraphStore graphStore; private EquivalenceGraphUpdateResolver graphUpdateResolver; private EquivalenceGraph staleUpdated; private EquivalenceGraph currentUpdated; private EquivalenceGraph staleCreated; private EquivalenceGraph currentCreated; @Before public void setUp() throws Exception { graphUpdateResolver = EquivalenceGraphUpdateResolver.create(graphStore); staleUpdated = EquivalenceGraph.valueOf(getItem(0L)); currentUpdated = EquivalenceGraph.valueOf(getItem(1L)); staleCreated = EquivalenceGraph.valueOf(getItem(2L)); currentCreated = EquivalenceGraph.valueOf(getItem(3L)); } @Test public void resolveUpdatedGraph() throws Exception { setupMocks(ImmutableMap.of(staleUpdated.getId(), currentUpdated)); ImmutableSet<EquivalenceGraph> graphs = graphUpdateResolver.resolve( EquivalenceGraphUpdate.builder(staleUpdated).build() ); assertThat(graphs.size(), is(1)); assertThat(graphs.contains(currentUpdated), is(true)); } @Test public void resolveCreatedGraphs() throws Exception { setupMocks(ImmutableMap.of( staleUpdated.getId(), currentUpdated, staleCreated.getId(), currentCreated )); ImmutableSet<EquivalenceGraph> graphs = graphUpdateResolver.resolve( EquivalenceGraphUpdate.builder(staleUpdated) .withCreated(ImmutableList.of(staleCreated)) .build() ); assertThat(graphs.size(), is(2)); assertThat(graphs.contains(currentCreated), is(true)); } @Test public void doNotReturnUpdatedGraphIfItDoesNotResolve() throws Exception { setupMocks(ImmutableMap.of(), staleUpdated.getId()); ImmutableSet<EquivalenceGraph> graphs = graphUpdateResolver.resolve( EquivalenceGraphUpdate.builder(staleUpdated).build() ); assertThat(graphs.size(), is(0)); } @Test public void doNotReturnCreatedGraphIfItDoesNotResolve() throws Exception { setupMocks( ImmutableMap.of(staleUpdated.getId(), currentUpdated), staleCreated.getId() ); ImmutableSet<EquivalenceGraph> graphs = graphUpdateResolver.resolve( EquivalenceGraphUpdate.builder(staleUpdated) .withCreated(ImmutableList.of(staleCreated)) .build() ); assertThat(graphs.size(), is(1)); assertThat(graphs.contains(currentUpdated), is(true)); assertThat(graphs.contains(staleCreated), is(false)); } private void setupMocks(Map<Id, EquivalenceGraph> graphs, Id... missingIds) { when(graphStore.resolveIds( Sets.union(graphs.keySet(), ImmutableSet.copyOf(missingIds)) )) .thenReturn(Futures.immediateFuture( ImmutableOptionalMap.fromMap(graphs) )); } private ItemRef getItem(long id) { return new ItemRef(Id.valueOf(id), Publisher.METABROADCAST, "", DateTime.now()); } }
apache-2.0
whiteley/jetty8
jetty-rewrite/src/main/java/org/eclipse/jetty/rewrite/handler/MsieSslRule.java
3356
// // ======================================================================== // Copyright (c) 1995-2013 Mort Bay Consulting Pty. Ltd. // ------------------------------------------------------------------------ // All rights reserved. This program and the accompanying materials // are made available under the terms of the Eclipse Public License v1.0 // and Apache License v2.0 which accompanies this distribution. // // The Eclipse Public License is available at // http://www.eclipse.org/legal/epl-v10.html // // The Apache License v2.0 is available at // http://www.opensource.org/licenses/apache2.0.php // // You may elect to redistribute this code under either of these licenses. // ======================================================================== // package org.eclipse.jetty.rewrite.handler; import java.io.IOException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.eclipse.jetty.http.HttpHeaderValues; import org.eclipse.jetty.http.HttpHeaders; import org.eclipse.jetty.util.StringMap; /** * MSIE (Microsoft Internet Explorer) SSL Rule. * Disable keep alive for SSL from IE5 or IE6 on Windows 2000. * * * */ public class MsieSslRule extends Rule { private static final int IEv5 = '5'; private static final int IEv6 = '6'; private static StringMap __IE6_BadOS = new StringMap(); { __IE6_BadOS.put("NT 5.01", Boolean.TRUE); __IE6_BadOS.put("NT 5.0",Boolean.TRUE); __IE6_BadOS.put("NT 4.0",Boolean.TRUE); __IE6_BadOS.put("98",Boolean.TRUE); __IE6_BadOS.put("98; Win 9x 4.90",Boolean.TRUE); __IE6_BadOS.put("95",Boolean.TRUE); __IE6_BadOS.put("CE",Boolean.TRUE); } public MsieSslRule() { _handling = false; _terminating = false; } public String matchAndApply(String target, HttpServletRequest request, HttpServletResponse response) throws IOException { if (request.isSecure()) { String user_agent = request.getHeader(HttpHeaders.USER_AGENT); if (user_agent!=null) { int msie=user_agent.indexOf("MSIE"); if (msie>0 && user_agent.length()-msie>5) { // Get Internet Explorer Version int ieVersion = user_agent.charAt(msie+5); if ( ieVersion<=IEv5) { response.setHeader(HttpHeaders.CONNECTION, HttpHeaderValues.CLOSE); return target; } if (ieVersion==IEv6) { int windows = user_agent.indexOf("Windows",msie+5); if (windows>0) { int end=user_agent.indexOf(')',windows+8); if(end<0 || __IE6_BadOS.getEntry(user_agent,windows+8,end-windows-8)!=null) { response.setHeader(HttpHeaders.CONNECTION, HttpHeaderValues.CLOSE); return target; } } } } } } return null; } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-devicefarm/src/main/java/com/amazonaws/services/devicefarm/model/transform/ListDeviceInstancesRequestProtocolMarshaller.java
2761
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.devicefarm.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.Request; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.devicefarm.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.protocol.*; import com.amazonaws.protocol.Protocol; import com.amazonaws.annotation.SdkInternalApi; /** * ListDeviceInstancesRequest Marshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class ListDeviceInstancesRequestProtocolMarshaller implements Marshaller<Request<ListDeviceInstancesRequest>, ListDeviceInstancesRequest> { private static final OperationInfo SDK_OPERATION_BINDING = OperationInfo.builder().protocol(Protocol.AWS_JSON).requestUri("/") .httpMethodName(HttpMethodName.POST).hasExplicitPayloadMember(false).hasPayloadMembers(true) .operationIdentifier("DeviceFarm_20150623.ListDeviceInstances").serviceName("AWSDeviceFarm").build(); private final com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory; public ListDeviceInstancesRequestProtocolMarshaller(com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory) { this.protocolFactory = protocolFactory; } public Request<ListDeviceInstancesRequest> marshall(ListDeviceInstancesRequest listDeviceInstancesRequest) { if (listDeviceInstancesRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { final ProtocolRequestMarshaller<ListDeviceInstancesRequest> protocolMarshaller = protocolFactory.createProtocolMarshaller(SDK_OPERATION_BINDING, listDeviceInstancesRequest); protocolMarshaller.startMarshalling(); ListDeviceInstancesRequestMarshaller.getInstance().marshall(listDeviceInstancesRequest, protocolMarshaller); return protocolMarshaller.finishMarshalling(); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
apache-2.0
yy13003/Im001
Hello/src/com/example/hello/bean/XinListBean.java
70
package com.example.hello.bean; public class XinListBean { }
apache-2.0
alex-dorokhov/libgdx
gdx/src/com/badlogic/gdx/scenes/scene2d/Event.java
4740
/******************************************************************************* * Copyright 2011 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.badlogic.gdx.scenes.scene2d; import com.badlogic.gdx.utils.Null; import com.badlogic.gdx.utils.Pool.Poolable; /** The base class for all events. * <p> * By default an event will "bubble" up through an actor's parent's handlers (see {@link #setBubbles(boolean)}). * <p> * An actor's capture listeners can {@link #stop()} an event to prevent child actors from seeing it. * <p> * An Event may be marked as "handled" which will end its propagation outside of the Stage (see {@link #handle()}). The default * {@link Actor#fire(Event)} will mark events handled if an {@link EventListener} returns true. * <p> * A cancelled event will be stopped and handled. Additionally, many actors will undo the side-effects of a canceled event. (See * {@link #cancel()}.) * * @see InputEvent * @see Actor#fire(Event) */ public class Event implements Poolable { private Stage stage; private Actor targetActor; private Actor listenerActor; private boolean capture; // true means event occurred during the capture phase private boolean bubbles = true; // true means propagate to target's parents private boolean handled; // true means the event was handled (the stage will eat the input) private boolean stopped; // true means event propagation was stopped private boolean cancelled; // true means propagation was stopped and any action that this event would cause should not happen /** Marks this event as handled. This does not affect event propagation inside scene2d, but causes the {@link Stage} event * methods to return true, which will eat the event so it is not passed on to the application under the stage. */ public void handle () { handled = true; } /** Marks this event cancelled. This {@link #handle() handles} the event and {@link #stop() stops} the event propagation. It * also cancels any default action that would have been taken by the code that fired the event. Eg, if the event is for a * checkbox being checked, cancelling the event could uncheck the checkbox. */ public void cancel () { cancelled = true; stopped = true; handled = true; } /** Marks this event has being stopped. This halts event propagation. Any other listeners on the {@link #getListenerActor() * listener actor} are notified, but after that no other listeners are notified. */ public void stop () { stopped = true; } public void reset () { stage = null; targetActor = null; listenerActor = null; capture = false; bubbles = true; handled = false; stopped = false; cancelled = false; } /** Returns the actor that the event originated from. */ public Actor getTarget () { return targetActor; } public void setTarget (Actor targetActor) { this.targetActor = targetActor; } /** Returns the actor that this listener is attached to. */ public Actor getListenerActor () { return listenerActor; } public void setListenerActor (Actor listenerActor) { this.listenerActor = listenerActor; } public boolean getBubbles () { return bubbles; } /** If true, after the event is fired on the target actor, it will also be fired on each of the parent actors, all the way to * the root. */ public void setBubbles (boolean bubbles) { this.bubbles = bubbles; } /** {@link #handle()} */ public boolean isHandled () { return handled; } /** @see #stop() */ public boolean isStopped () { return stopped; } /** @see #cancel() */ public boolean isCancelled () { return cancelled; } public void setCapture (boolean capture) { this.capture = capture; } /** If true, the event was fired during the capture phase. * @see Actor#fire(Event) */ public boolean isCapture () { return capture; } public void setStage (Stage stage) { this.stage = stage; } /** The stage for the actor the event was fired on. */ public Stage getStage () { return stage; } }
apache-2.0
StQuote/VisEditor
Editor/src/com/kotcrab/vis/editor/module/project/assetsmanager/AssetDragAndDrop.java
7973
/* * Copyright 2014-2015 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.kotcrab.vis.editor.module.project.assetsmanager; import com.badlogic.gdx.assets.loaders.BitmapFontLoader.BitmapFontParameter; import com.badlogic.gdx.files.FileHandle; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.graphics.g2d.BitmapFont; import com.badlogic.gdx.graphics.g2d.TextureRegion; import com.badlogic.gdx.scenes.scene2d.Actor; import com.badlogic.gdx.scenes.scene2d.InputEvent; import com.badlogic.gdx.scenes.scene2d.ui.Image; import com.badlogic.gdx.scenes.scene2d.ui.Label; import com.badlogic.gdx.scenes.scene2d.ui.Label.LabelStyle; import com.badlogic.gdx.scenes.scene2d.utils.DragAndDrop.Payload; import com.badlogic.gdx.scenes.scene2d.utils.DragAndDrop.Source; import com.badlogic.gdx.utils.Array; import com.badlogic.gdx.utils.Disposable; import com.badlogic.gdx.utils.ObjectMap.Values; import com.kotcrab.vis.editor.extension.AssetType; import com.kotcrab.vis.editor.module.ModuleInjector; import com.kotcrab.vis.editor.module.project.*; import com.kotcrab.vis.editor.scheme.SpriterAssetData; import com.kotcrab.vis.editor.ui.tabbedpane.DragAndDropTarget; import com.kotcrab.vis.editor.util.FileUtils; import com.kotcrab.vis.editor.util.scene2d.VisDragAndDrop; import com.kotcrab.vis.editor.util.scene2d.VisDropSource; import com.kotcrab.vis.runtime.assets.*; import com.kotcrab.vis.ui.widget.VisLabel; /** * Assets manager drag and drop helper class. * @author Kotcrab */ public class AssetDragAndDrop implements Disposable { private FileAccessModule fileAccess; private TextureCacheModule textureCache; private FontCacheModule fontCache; private ParticleCacheModule particleCache; private SpriterDataIOModule spriterDataIO; private VisDragAndDrop dragAndDrop; private DragAndDropTarget dropTarget; public AssetDragAndDrop (ModuleInjector injector) { injector.injectModules(this); dragAndDrop = new VisDragAndDrop(injector); dragAndDrop.setKeepWithinStage(false); dragAndDrop.setDragTime(0); } public void setDropTarget (DragAndDropTarget dropTarget) { this.dropTarget = dropTarget; } public void rebuild (Array<Actor> mainActors, Array<Actor> miscActors, Values<TextureAtlasViewTab> atlasesViews) { if (dropTarget != null) { dragAndDrop.clear(); for (Actor actor : mainActors) { addSource((FileItem) actor); } for (Actor actor : miscActors) { addSource((FileItem) actor); } dragAndDrop.addTarget(dropTarget.getDropTarget()); } for (TextureAtlasViewTab view : atlasesViews) { Array<AtlasItem> items = view.getItems(); for (AtlasItem item : items) addAtlasSource(item); } } public void addSources (Array<AtlasItem> items) { for (AtlasItem item : items) addAtlasSource(item); } private void addAtlasSource (AtlasItem item) { dragAndDrop.addSource(new Source(item) { @Override public Payload dragStart (InputEvent event, float x, float y, int pointer) { return createTexturePayload(item.getRegion(), item.getAtlasAsset()); } }); } private void addSource (FileItem item) { if (item.isMainFile() == false) { dragAndDrop.addSource(new VisDropSource(dragAndDrop, item).defaultView("This file type is unsupported in this marked directory.")); return; } String relativePath = fileAccess.relativizeToAssetsFolder(item.getFile()); if (item.getType().equals(AssetType.TEXTURE)) { dragAndDrop.addSource(new Source(item) { @Override public Payload dragStart (InputEvent event, float x, float y, int pointer) { TextureRegionAsset asset = new TextureRegionAsset(fileAccess.relativizeToAssetsFolder(item.getFile())); return createTexturePayload(item.getRegion(), asset); } }); } if (item.getType().equals(AssetType.TTF_FONT)) { dragAndDrop.addSource(new Source(item) { @Override public Payload dragStart (InputEvent event, float x, float y, int pointer) { Payload payload = new Payload(); TtfFontAsset asset = new TtfFontAsset(fileAccess.relativizeToAssetsFolder(item.getFile()), FontCacheModule.DEFAULT_FONT_SIZE); payload.setObject(asset); BitmapFont font = fontCache.get(asset, 1); LabelStyle style = new LabelStyle(font, Color.WHITE); Label label = new VisLabel(FontCacheModule.DEFAULT_TEXT, style); payload.setDragActor(label); float invZoom = 1.0f / dropTarget.getCameraZoom(); label.setFontScale(invZoom); dragAndDrop.setDragActorPosition(-label.getWidth() * invZoom / 2, label.getHeight() / 2); return payload; } }); } if (item.getType().equals(AssetType.BMP_FONT_FILE) || item.getType().equals(AssetType.BMP_FONT_TEXTURE)) { dragAndDrop.addSource(new Source(item) { @Override public Payload dragStart (InputEvent event, float x, float y, int pointer) { Payload payload = new Payload(); FileHandle fontFile; if (item.getType().equals(AssetType.BMP_FONT_FILE)) fontFile = item.getFile(); else fontFile = FileUtils.sibling(item.getFile(), "fnt"); BmpFontAsset asset = new BmpFontAsset(fileAccess.relativizeToAssetsFolder(fontFile), new BitmapFontParameter()); payload.setObject(asset); LabelStyle style = new LabelStyle(fontCache.get(asset, 1), Color.WHITE); Label label = new VisLabel(FontCacheModule.DEFAULT_TEXT, style); payload.setDragActor(label); float invZoom = 1.0f / dropTarget.getCameraZoom(); label.setFontScale(invZoom); dragAndDrop.setDragActorPosition(-label.getWidth() * invZoom / 2, label.getHeight() / 2); return payload; } }); } if (item.getType().equals(AssetType.PARTICLE_EFFECT)) { dragAndDrop.addSource(new VisDropSource(dragAndDrop, item).defaultView("New Particle Effect \n (drop on scene to add)").setPayload(new ParticleAsset(relativePath))); } if (item.getType().equals(AssetType.MUSIC)) { dragAndDrop.addSource(new VisDropSource(dragAndDrop, item).defaultView("New Music \n (drop on scene to add)").setPayload(new MusicAsset(relativePath))); } if (item.getType().equals(AssetType.SOUND)) { dragAndDrop.addSource(new VisDropSource(dragAndDrop, item).defaultView("New Sound \n (drop on scene to add)").setPayload(new SoundAsset(relativePath))); } if (item.getType().equals(AssetType.SPRITER_SCML)) { FileHandle dataFile = item.getFile().parent().child(".vis").child("data.json"); if (dataFile.exists() == false) return; SpriterAssetData data = spriterDataIO.loadData(dataFile); dragAndDrop.addSource(new VisDropSource(dragAndDrop, item).defaultView("New Spriter Animation \n (drop on scene to add)").setPayload(new SpriterAsset(relativePath, data.imageScale))); } if (item.getType().equals(AssetType.UNKNOWN) == false && item.getSupport() != null) { dragAndDrop.addSource(item.getSupport().createDropSource(dragAndDrop, item)); } } private Payload createTexturePayload (TextureRegion region, TextureAssetDescriptor asset) { Payload payload = new Payload(); payload.setObject(asset); //image creation Image img = new Image(region); payload.setDragActor(img); float invZoom = 1.0f / dropTarget.getCameraZoom(); img.setScale(invZoom); dragAndDrop.setDragActorPosition(-img.getWidth() * invZoom / 2, img.getHeight() - img.getHeight() * invZoom / 2); return payload; } public void clear () { dragAndDrop.clear(); } @Override public void dispose () { dragAndDrop.dispose(); } }
apache-2.0
linkedin/pinot
pinot-core/src/main/java/org/apache/pinot/core/query/distinct/raw/BaseRawFloatSingleColumnDistinctExecutor.java
2372
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pinot.core.query.distinct.raw; import it.unimi.dsi.fastutil.floats.FloatIterator; import it.unimi.dsi.fastutil.floats.FloatOpenHashSet; import it.unimi.dsi.fastutil.floats.FloatSet; import java.util.ArrayList; import java.util.List; import org.apache.pinot.common.utils.DataSchema; import org.apache.pinot.common.utils.DataSchema.ColumnDataType; import org.apache.pinot.core.data.table.Record; import org.apache.pinot.core.query.distinct.DistinctTable; import org.apache.pinot.core.query.distinct.DistinctExecutor; import org.apache.pinot.core.query.request.context.ExpressionContext; /** * Base implementation of {@link DistinctExecutor} for single raw FLOAT column. */ abstract class BaseRawFloatSingleColumnDistinctExecutor implements DistinctExecutor { final ExpressionContext _expression; final int _limit; final FloatSet _valueSet; BaseRawFloatSingleColumnDistinctExecutor(ExpressionContext expression, int limit) { _expression = expression; _limit = limit; _valueSet = new FloatOpenHashSet(Math.min(limit, MAX_INITIAL_CAPACITY)); } @Override public DistinctTable getResult() { DataSchema dataSchema = new DataSchema(new String[]{_expression.toString()}, new ColumnDataType[]{ColumnDataType.FLOAT}); List<Record> records = new ArrayList<>(_valueSet.size()); FloatIterator valueIterator = _valueSet.iterator(); while (valueIterator.hasNext()) { records.add(new Record(new Object[]{valueIterator.nextFloat()})); } return new DistinctTable(dataSchema, records); } }
apache-2.0
aharin/inproctester
inproctester-tests/src/main/java/com/thoughtworks/inproctester/testapp/TestServlet.java
2367
/* Copyright 2011 ThoughtWorks Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.inproctester.testapp; import javax.servlet.ServletException; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; public class TestServlet extends HttpServlet { public static Contact contact = new Contact(); public static final String FLASH_MESSAGE_COOKIE_NAME = "FLASH_MESSAGE"; @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { Cookie flashMessageCookie = getCookie(req, FLASH_MESSAGE_COOKIE_NAME); if (flashMessageCookie != null) { req.setAttribute("message", flashMessageCookie.getValue()); Cookie cookie = new Cookie(FLASH_MESSAGE_COOKIE_NAME, ""); cookie.setMaxAge(0); resp.addCookie(cookie); } req.setAttribute("contact", contact); getServletContext().getRequestDispatcher("/test.ftl").forward(req, resp); } private Cookie getCookie(HttpServletRequest req, String cookieName) { Cookie[] cookies = req.getCookies(); if (cookies != null) { for (Cookie cookie : cookies) { if (cookie.getName().equals(cookieName)) { return cookie; } } } return null; } @Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { contact.setName(req.getParameter("contactName")); resp.addCookie(new Cookie(FLASH_MESSAGE_COOKIE_NAME, "Success")); resp.sendRedirect(req.getContextPath() + "/contacts/1"); } }
apache-2.0
dreajay/jCodes
jCodes-nio/src/main/java/com/jcodes/nio/HelloWorldServer.java
3857
package com.jcodes.nio; import java.io.IOException; import java.net.InetSocketAddress; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.channels.SelectionKey; import java.nio.channels.Selector; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; import java.nio.charset.Charset; import java.nio.charset.CharsetDecoder; import java.nio.charset.CharsetEncoder; import java.util.Iterator; public class HelloWorldServer { static int BLOCK = 1024; static String name = ""; protected Selector selector; protected ByteBuffer clientBuffer = ByteBuffer.allocate(BLOCK); protected CharsetDecoder decoder; static CharsetEncoder encoder = Charset.forName("GB2312").newEncoder(); public HelloWorldServer(int port) throws IOException { selector = this.getSelector(port); Charset charset = Charset.forName("GB2312"); decoder = charset.newDecoder(); } // 获取Selector protected Selector getSelector(int port) throws IOException { ServerSocketChannel server = ServerSocketChannel.open(); Selector sel = Selector.open(); server.socket().bind(new InetSocketAddress(port)); server.configureBlocking(false); server.register(sel, SelectionKey.OP_ACCEPT); return sel; } // 监听端口 public void listen() { try { for (;;) { selector.select(); Iterator iter = selector.selectedKeys().iterator(); while (iter.hasNext()) { SelectionKey key = (SelectionKey) iter.next(); iter.remove(); process(key); } } } catch (IOException e) { e.printStackTrace(); } } // 处理事件 protected void process(SelectionKey key) throws IOException { if (key.isAcceptable()) { // 接收请求 ServerSocketChannel server = (ServerSocketChannel) key.channel(); SocketChannel channel = server.accept(); //设置非阻塞模式 channel.configureBlocking(false); channel.register(selector, SelectionKey.OP_READ); } else if (key.isReadable()) { // 读信息 SocketChannel channel = (SocketChannel) key.channel(); int count = channel.read(clientBuffer); if (count > 0) { clientBuffer.flip(); CharBuffer charBuffer = decoder.decode(clientBuffer); name = charBuffer.toString(); // System.out.println(name); SelectionKey sKey = channel.register(selector, SelectionKey.OP_WRITE); sKey.attach(name); } else { channel.close(); } clientBuffer.clear(); } else if (key.isWritable()) { // 写事件 SocketChannel channel = (SocketChannel) key.channel(); String name = (String) key.attachment(); ByteBuffer block = encoder.encode(CharBuffer .wrap("Hello !" + name)); channel.write(block); //channel.close(); } } public static void main(String[] args) { int port = 8888; try { HelloWorldServer server = new HelloWorldServer(port); System.out.println("listening on " + port); server.listen(); } catch (IOException e) { e.printStackTrace(); } } }
apache-2.0
vivantech/kc_fixes
src/main/java/org/kuali/kra/bo/OrganizationCorrespondent.java
1828
/* * Copyright 2005-2014 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kra.bo; import org.kuali.kra.irb.correspondence.CorrespondentType; public class OrganizationCorrespondent extends Correspondent { private static final long serialVersionUID = 1L; private String organizationId; private Organization organization; public OrganizationCorrespondent() { super(); } public String getOrganizationId() { return organizationId; } public void setOrganizationId(String organizationId) { this.organizationId = organizationId; } public Organization getOrganization() { return organization; } public void setOrganization(Organization organization) { this.organization = organization; } public KcPerson getPerson() { return getKcPersonService().getKcPersonByPersonId(getPersonId()); } public org.kuali.kra.irb.correspondence.CorrespondentType getCorrespondentType() { return (org.kuali.kra.irb.correspondence.CorrespondentType) correspondentType; } public void setCorrespondentType(org.kuali.kra.irb.correspondence.CorrespondentType correspondentType) { this.correspondentType = correspondentType; } }
apache-2.0
ewall/fhir-starter
src/main/java/org/ewall/app/HealthportDataProvider.java
1462
package org.ewall.app; import java.util.Collection; import java.util.List; import ca.uhn.fhir.model.api.Bundle; import ca.uhn.fhir.model.dstu.resource.MedicationPrescription; import ca.uhn.fhir.model.primitive.UriDt; import ca.uhn.fhir.rest.server.EncodingEnum; /** * Data Provider using Georgia Tech's HealthPort FHIR server * requires VPN connection to GT network */ public class HealthportDataProvider extends AbstractDataProvider { public HealthportDataProvider() { //super("https://taurus.i3l.gatech.edu:8443/HealthPort/fhir/", EncodingEnum.JSON, "Patient/3.568001602-01"); super("http://localhost:8080/HealthPort/fhir/", EncodingEnum.JSON, "Patient/3.568001602-01"); } public Collection<MedicationPrescription> getAllPrescriptionsForPatient(String id) { // surprise! HealthPort doesn't support PATIENT search parameter, need to use SUBJECT instead... so we'll do the search by URL String searchstr = serverBase + "MedicationPrescription?subject:Patient=" + id; UriDt searchurl = new UriDt(searchstr); Bundle response = client.search(searchurl); List<MedicationPrescription> prescriptions = response.getResources(MedicationPrescription.class); while (!response.getLinkNext().isEmpty()) { // load next page response = client.loadPage().next(response).execute(); prescriptions.addAll(response.getResources(MedicationPrescription.class)); } return prescriptions; } }
apache-2.0
brettwooldridge/buck
src/com/facebook/buck/apple/AppleBundleDescription.java
16601
/* * Copyright 2014-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.apple; import com.facebook.buck.apple.toolchain.AppleCxxPlatform; import com.facebook.buck.apple.toolchain.AppleCxxPlatformsProvider; import com.facebook.buck.apple.toolchain.ApplePlatform; import com.facebook.buck.apple.toolchain.CodeSignIdentityStore; import com.facebook.buck.apple.toolchain.ProvisioningProfileStore; import com.facebook.buck.core.cell.CellPathResolver; import com.facebook.buck.core.description.MetadataProvidingDescription; import com.facebook.buck.core.description.arg.CommonDescriptionArg; import com.facebook.buck.core.description.arg.HasDeclaredDeps; import com.facebook.buck.core.description.arg.HasDefaultPlatform; import com.facebook.buck.core.description.arg.HasTests; import com.facebook.buck.core.description.arg.Hint; import com.facebook.buck.core.description.attr.ImplicitDepsInferringDescription; import com.facebook.buck.core.model.BuildTarget; import com.facebook.buck.core.model.Flavor; import com.facebook.buck.core.model.FlavorDomain; import com.facebook.buck.core.model.Flavored; import com.facebook.buck.core.model.InternalFlavor; import com.facebook.buck.core.model.targetgraph.BuildRuleCreationContextWithTargetGraph; import com.facebook.buck.core.model.targetgraph.DescriptionWithTargetGraph; import com.facebook.buck.core.rules.ActionGraphBuilder; import com.facebook.buck.core.rules.BuildRuleParams; import com.facebook.buck.core.toolchain.ToolchainProvider; import com.facebook.buck.core.util.immutables.BuckStyleImmutable; import com.facebook.buck.cxx.CxxDescriptionEnhancer; import com.facebook.buck.cxx.FrameworkDependencies; import com.facebook.buck.cxx.toolchain.CxxBuckConfig; import com.facebook.buck.cxx.toolchain.CxxPlatform; import com.facebook.buck.cxx.toolchain.CxxPlatformsProvider; import com.facebook.buck.cxx.toolchain.LinkerMapMode; import com.facebook.buck.cxx.toolchain.StripStyle; import com.facebook.buck.rules.coercer.PatternMatchedCollection; import com.facebook.buck.swift.SwiftBuckConfig; import com.facebook.buck.versions.Version; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import java.util.Optional; import org.immutables.value.Value; public class AppleBundleDescription implements DescriptionWithTargetGraph<AppleBundleDescriptionArg>, Flavored, ImplicitDepsInferringDescription<AppleBundleDescription.AbstractAppleBundleDescriptionArg>, MetadataProvidingDescription<AppleBundleDescriptionArg> { public static final ImmutableSet<Flavor> SUPPORTED_LIBRARY_FLAVORS = ImmutableSet.of(CxxDescriptionEnhancer.STATIC_FLAVOR, CxxDescriptionEnhancer.SHARED_FLAVOR); public static final Flavor WATCH_OS_FLAVOR = InternalFlavor.of("watchos-armv7k"); public static final Flavor WATCH_OS_64_32_FLAVOR = InternalFlavor.of("watchos-arm64_32"); public static final Flavor WATCH_SIMULATOR_FLAVOR = InternalFlavor.of("watchsimulator-i386"); private static final Flavor WATCH = InternalFlavor.of("watch"); private final ToolchainProvider toolchainProvider; private final XCodeDescriptions xcodeDescriptions; private final AppleBinaryDescription appleBinaryDescription; private final AppleLibraryDescription appleLibraryDescription; private final AppleConfig appleConfig; private final CxxBuckConfig cxxBuckConfig; private final SwiftBuckConfig swiftBuckConfig; public AppleBundleDescription( ToolchainProvider toolchainProvider, XCodeDescriptions xcodeDescriptions, AppleBinaryDescription appleBinaryDescription, AppleLibraryDescription appleLibraryDescription, AppleConfig appleConfig, CxxBuckConfig cxxBuckConfig, SwiftBuckConfig swiftBuckConfig) { this.toolchainProvider = toolchainProvider; this.xcodeDescriptions = xcodeDescriptions; this.appleBinaryDescription = appleBinaryDescription; this.appleLibraryDescription = appleLibraryDescription; this.appleConfig = appleConfig; this.cxxBuckConfig = cxxBuckConfig; this.swiftBuckConfig = swiftBuckConfig; } @Override public Class<AppleBundleDescriptionArg> getConstructorArgType() { return AppleBundleDescriptionArg.class; } @Override public Optional<ImmutableSet<FlavorDomain<?>>> flavorDomains() { ImmutableSet.Builder<FlavorDomain<?>> builder = ImmutableSet.builder(); ImmutableSet<FlavorDomain<?>> localDomains = ImmutableSet.of(AppleDebugFormat.FLAVOR_DOMAIN, AppleDescriptions.INCLUDE_FRAMEWORKS); builder.addAll(localDomains); appleLibraryDescription.flavorDomains().ifPresent(domains -> builder.addAll(domains)); appleBinaryDescription.flavorDomains().ifPresent(domains -> builder.addAll(domains)); return Optional.of(builder.build()); } @Override public boolean hasFlavors(ImmutableSet<Flavor> flavors) { if (appleLibraryDescription.hasFlavors(flavors)) { return true; } ImmutableSet.Builder<Flavor> flavorBuilder = ImmutableSet.builder(); for (Flavor flavor : flavors) { if (AppleDebugFormat.FLAVOR_DOMAIN.getFlavors().contains(flavor)) { continue; } if (AppleDescriptions.INCLUDE_FRAMEWORKS.getFlavors().contains(flavor)) { continue; } flavorBuilder.add(flavor); } return appleBinaryDescription.hasFlavors(flavorBuilder.build()); } @Override public AppleBundle createBuildRule( BuildRuleCreationContextWithTargetGraph context, BuildTarget buildTarget, BuildRuleParams params, AppleBundleDescriptionArg args) { ActionGraphBuilder graphBuilder = context.getActionGraphBuilder(); AppleDebugFormat flavoredDebugFormat = AppleDebugFormat.FLAVOR_DOMAIN .getValue(buildTarget) .orElse(appleConfig.getDefaultDebugInfoFormatForBinaries()); if (!buildTarget.getFlavors().contains(flavoredDebugFormat.getFlavor())) { return (AppleBundle) graphBuilder.requireRule( buildTarget.withAppendedFlavors(flavoredDebugFormat.getFlavor())); } if (!AppleDescriptions.INCLUDE_FRAMEWORKS.getValue(buildTarget).isPresent()) { return (AppleBundle) graphBuilder.requireRule( buildTarget.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR)); } CxxPlatformsProvider cxxPlatformsProvider = getCxxPlatformsProvider(); return AppleDescriptions.createAppleBundle( xcodeDescriptions, cxxPlatformsProvider, getAppleCxxPlatformFlavorDomain(), context.getTargetGraph(), buildTarget, context.getProjectFilesystem(), params, graphBuilder, toolchainProvider.getByName( CodeSignIdentityStore.DEFAULT_NAME, CodeSignIdentityStore.class), toolchainProvider.getByName( ProvisioningProfileStore.DEFAULT_NAME, ProvisioningProfileStore.class), args.getBinary(), args.getPlatformBinary(), args.getExtension(), args.getProductName(), args.getInfoPlist(), args.getInfoPlistSubstitutions(), args.getDeps(), args.getTests(), flavoredDebugFormat, appleConfig.useDryRunCodeSigning(), appleConfig.cacheBundlesAndPackages(), appleConfig.shouldVerifyBundleResources(), appleConfig.assetCatalogValidation(), args.getAssetCatalogsCompilationOptions(), args.getCodesignFlags(), args.getCodesignIdentity(), args.getIbtoolModuleFlag(), args.getIbtoolFlags(), appleConfig.getCodesignTimeout(), swiftBuckConfig.getCopyStdlibToFrameworks(), cxxBuckConfig.shouldCacheStrip()); } /** * Propagate the bundle's platform, debug symbol and strip flavors to its dependents which are * other bundles (e.g. extensions) */ @Override public void findDepsForTargetFromConstructorArgs( BuildTarget buildTarget, CellPathResolver cellRoots, AbstractAppleBundleDescriptionArg constructorArg, ImmutableCollection.Builder<BuildTarget> extraDepsBuilder, ImmutableCollection.Builder<BuildTarget> targetGraphOnlyDepsBuilder) { CxxPlatformsProvider cxxPlatformsProvider = getCxxPlatformsProvider(); if (!cxxPlatformsProvider.getCxxPlatforms().containsAnyOf(buildTarget.getFlavors())) { buildTarget = buildTarget.withAppendedFlavors(cxxPlatformsProvider.getDefaultCxxPlatform().getFlavor()); } FlavorDomain<AppleCxxPlatform> appleCxxPlatformsFlavorDomain = getAppleCxxPlatformFlavorDomain(); Optional<MultiarchFileInfo> fatBinaryInfo = MultiarchFileInfos.create(appleCxxPlatformsFlavorDomain, buildTarget); CxxPlatform cxxPlatform; if (fatBinaryInfo.isPresent()) { AppleCxxPlatform appleCxxPlatform = fatBinaryInfo.get().getRepresentativePlatform(); cxxPlatform = appleCxxPlatform.getCxxPlatform(); } else { cxxPlatform = ApplePlatforms.getCxxPlatformForBuildTarget(cxxPlatformsProvider, buildTarget); } String platformName = cxxPlatform.getFlavor().getName(); Flavor[] actualWatchFlavors; if (ApplePlatform.isSimulator(platformName)) { actualWatchFlavors = new Flavor[] {WATCH_SIMULATOR_FLAVOR}; } else if (platformName.startsWith(ApplePlatform.IPHONEOS.getName()) || platformName.startsWith(ApplePlatform.WATCHOS.getName())) { actualWatchFlavors = new Flavor[] {WATCH_OS_FLAVOR, WATCH_OS_64_32_FLAVOR}; } else { actualWatchFlavors = new Flavor[] {InternalFlavor.of(platformName)}; } ImmutableSortedSet<BuildTarget> binaryTargets = constructorArg.getBinaryTargets(); FluentIterable<BuildTarget> depsExcludingBinary = FluentIterable.from(constructorArg.getDeps()).filter(dep -> !binaryTargets.contains(dep)); // Propagate platform flavors. Need special handling for watch to map the pseudo-flavor // watch to the actual watch platform (simulator or device) so can't use // Flavors.propagateFlavorsInDomainIfNotPresent() { FluentIterable<BuildTarget> targetsWithPlatformFlavors = depsExcludingBinary.filter( Flavors.containsFlavors(cxxPlatformsProvider.getCxxPlatforms())::test); FluentIterable<BuildTarget> targetsWithoutPlatformFlavors = depsExcludingBinary.filter( Flavors.containsFlavors(cxxPlatformsProvider.getCxxPlatforms()).negate()::test); FluentIterable<BuildTarget> watchTargets = targetsWithoutPlatformFlavors .filter(Flavors.containsFlavor(WATCH)::test) .transform( input -> input.withoutFlavors(WATCH).withAppendedFlavors(actualWatchFlavors)); targetsWithoutPlatformFlavors = targetsWithoutPlatformFlavors.filter(Flavors.containsFlavor(WATCH).negate()::test); // Gather all the deps now that we've added platform flavors to everything. depsExcludingBinary = targetsWithPlatformFlavors .append(watchTargets) .append( Flavors.propagateFlavorDomains( buildTarget, ImmutableSet.of(cxxPlatformsProvider.getCxxPlatforms()), targetsWithoutPlatformFlavors)); } // Propagate some flavors depsExcludingBinary = Flavors.propagateFlavorsInDomainIfNotPresent( StripStyle.FLAVOR_DOMAIN, buildTarget, depsExcludingBinary); depsExcludingBinary = Flavors.propagateFlavorsInDomainIfNotPresent( AppleDebugFormat.FLAVOR_DOMAIN, buildTarget, depsExcludingBinary); depsExcludingBinary = Flavors.propagateFlavorsInDomainIfNotPresent( LinkerMapMode.FLAVOR_DOMAIN, buildTarget, depsExcludingBinary); if (fatBinaryInfo.isPresent()) { depsExcludingBinary = depsExcludingBinary.append( fatBinaryInfo .get() .getRepresentativePlatform() .getCodesignProvider() .getParseTimeDeps()); } else { depsExcludingBinary = depsExcludingBinary.append( appleCxxPlatformsFlavorDomain .getValue(buildTarget) .map(platform -> platform.getCodesignProvider().getParseTimeDeps()) .orElse(ImmutableSet.of())); } extraDepsBuilder.addAll(depsExcludingBinary); } @Override public <U> Optional<U> createMetadata( BuildTarget buildTarget, ActionGraphBuilder graphBuilder, CellPathResolver cellRoots, AppleBundleDescriptionArg args, Optional<ImmutableMap<BuildTarget, Version>> selectedVersions, Class<U> metadataClass) { if (metadataClass.isAssignableFrom(FrameworkDependencies.class)) { // Bundles should be opaque to framework dependencies. return Optional.empty(); } CxxPlatformsProvider cxxPlatformsProvider = getCxxPlatformsProvider(); FlavorDomain<AppleCxxPlatform> appleCxxPlatforms = getAppleCxxPlatformFlavorDomain(); AppleCxxPlatform appleCxxPlatform = ApplePlatforms.getAppleCxxPlatformForBuildTarget( cxxPlatformsProvider, appleCxxPlatforms, buildTarget, MultiarchFileInfos.create(appleCxxPlatforms, buildTarget)); BuildTarget binaryTarget = AppleDescriptions.getTargetPlatformBinary( args.getBinary(), args.getPlatformBinary(), appleCxxPlatform.getFlavor()); return graphBuilder.requireMetadata(binaryTarget, metadataClass); } private FlavorDomain<AppleCxxPlatform> getAppleCxxPlatformFlavorDomain() { AppleCxxPlatformsProvider appleCxxPlatformsProvider = toolchainProvider.getByName( AppleCxxPlatformsProvider.DEFAULT_NAME, AppleCxxPlatformsProvider.class); return appleCxxPlatformsProvider.getAppleCxxPlatforms(); } private CxxPlatformsProvider getCxxPlatformsProvider() { return toolchainProvider.getByName( CxxPlatformsProvider.DEFAULT_NAME, CxxPlatformsProvider.class); } @BuckStyleImmutable @Value.Immutable interface AbstractAppleBundleDescriptionArg extends CommonDescriptionArg, HasAppleBundleFields, HasAppleCodesignFields, HasDefaultPlatform, HasDeclaredDeps, HasTests { // binary should not be immediately added as a dependency, since in case there is platform // binary matching target platform exists, it will be used as an actual dependency. @Hint(isTargetGraphOnlyDep = true) Optional<BuildTarget> getBinary(); // similar to binary attribute but provides a way to select a platform-specific binary @Hint(isTargetGraphOnlyDep = true) Optional<PatternMatchedCollection<BuildTarget>> getPlatformBinary(); /** * Returns all binary targets of this bundle, which includes default and platform-specific ones. */ default ImmutableSortedSet<BuildTarget> getBinaryTargets() { ImmutableSortedSet.Builder<BuildTarget> binaryTargetsBuilder = ImmutableSortedSet.naturalOrder(); if (getBinary().isPresent()) { binaryTargetsBuilder.add(getBinary().get()); } if (getPlatformBinary().isPresent()) { binaryTargetsBuilder.addAll(getPlatformBinary().get().getValues()); } return binaryTargetsBuilder.build(); } // ibtool take --module <PRODUCT_MODULE_NAME> arguments to override // customModule field set on its elements. (only when customModuleProvider="target") // Module (so far, it seems to only represent swift module) contains the // implementation of the declared element in nib file. Optional<Boolean> getIbtoolModuleFlag(); Optional<ImmutableList<String>> getIbtoolFlags(); @Override @Hint(isDep = false) @Value.NaturalOrder ImmutableSortedSet<BuildTarget> getDeps(); } }
apache-2.0
WuSicheng54321/Thinking-in-Java-4th
ThinkingInJava09/src/Test15.java
118
abstract class A151{ void a151(){} void a1512(){} } class A152 extends A151{ } public class Test15 { }
apache-2.0
q1q1w1w1q/Twitter-Client
app/src/main/java/allenwang/twitterclient/UserDatailActivity.java
2896
package allenwang.twitterclient; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v7.app.AppCompatActivity; import android.widget.ImageView; import android.widget.RelativeLayout; import android.widget.TextView; import com.bumptech.glide.Glide; import com.twitter.sdk.android.core.TwitterSession; import com.twitter.sdk.android.core.models.User; import allenwang.twitterclient.http.MyTwitterApiCllient; import allenwang.twitterclient.viewpager.TimeLineFragment; import retrofit2.Call; import retrofit2.Callback; import retrofit2.Response; /** * Created by allenwang on 2017/3/11. */ public class UserDatailActivity extends AppCompatActivity { long userId; private ImageView pictureImageView; private TextView taglineTextView; private TextView followerTextView; private TextView followingTextView; private RelativeLayout twitterFragmentContainer; @Override protected void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_user_detail); pictureImageView = (ImageView) findViewById(R.id.iv_picture); taglineTextView = (TextView) findViewById(R.id.tv_tagline); followerTextView = (TextView) findViewById(R.id.tv_followers); followingTextView = (TextView) findViewById(R.id.tv_following); twitterFragmentContainer = (RelativeLayout) findViewById(R.id.tweet_frament_container); userId = getIntent().getLongExtra(Constant.KEY_USER_ID, TwitterSession.UNKNOWN_USER_ID); //Toast.makeText(this, String.valueOf(userId), Toast.LENGTH_SHORT).show(); getUserInfo(); getTwiiterFragment(); } private void getTwiiterFragment() { TimeLineFragment fragment = TimeLineFragment.newInstance(userId, TimeLineFragment.TWEET); getSupportFragmentManager() .beginTransaction() .replace(R.id.tweet_frament_container, fragment, "") .commit(); } private void getUserInfo(){ MyTwitterApiCllient client = new MyTwitterApiCllient(); Call call = client.getUserService().show(userId, null, false); call.enqueue(new Callback<User>() { @Override public void onResponse(Call<User> call, Response<User> response) { User user = response.body(); if (user == null) return; Glide.with(UserDatailActivity.this).load(user.profileImageUrl).into(pictureImageView); taglineTextView.setText(user.description); followerTextView.setText("#follower : " +user.followersCount); followingTextView.setText("#following : " +user.friendsCount); } @Override public void onFailure(Call<User> call, Throwable t) { } }); } }
apache-2.0
spring-projects/spring-data-examples
jpa/deferred/src/main/java/example/model/Customer995.java
624
package example.model; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; @Entity public class Customer995 { @Id @GeneratedValue(strategy = GenerationType.AUTO) private long id; private String firstName; private String lastName; protected Customer995() {} public Customer995(String firstName, String lastName) { this.firstName = firstName; this.lastName = lastName; } @Override public String toString() { return String.format("Customer995[id=%d, firstName='%s', lastName='%s']", id, firstName, lastName); } }
apache-2.0
gavin2lee/generic-support
auth-admin-api/src/test/java/com/generic/support/admin/annotation/RepositoryTestContext.java
729
package com.generic.support.admin.annotation; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Inherited; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.springframework.test.annotation.Rollback; import org.springframework.test.context.ContextConfiguration; import org.springframework.transaction.annotation.Transactional; @Documented @Inherited @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.TYPE) @ContextConfiguration(locations = { "classpath:spring/support-auth-manager-repository-test.xml" }) @Rollback(true) @Transactional public @interface RepositoryTestContext { }
apache-2.0
myc0058/PayPushServer
PayPushServer/src/com/myc0058/paypush/Controller/AndroidController.java
4323
package com.myc0058.paypush.Controller; import java.util.List; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; import com.google.android.gcm.server.MulticastResult; import com.google.android.gcm.server.Result; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.myc0058.paypush.Android.AndroidBillingHelper; import com.myc0058.paypush.Android.GCMSender; import com.myc0058.paypush.Params.AndroPushParam; import com.myc0058.paypush.Params.AndroPushsParam; import com.myc0058.paypush.Params.AndroReceiptParam; import com.myc0058.paypush.Response.AndroPushResponse; import com.myc0058.paypush.Response.AndroPushsResponse; import com.myc0058.paypush.Response.AndroReceiptResponse; import com.myc0058.paypush.Strings.PayPushServletCommands; import com.myc0058.paypush.settings.PayPushGlobalConsts; /** * Spring Controller for Android. * * @author YoungChul Mo * @since 2016-04-24 */ @Controller public class AndroidController { /** * Check android receipt validation. * * @param data data json string. * @return Response json string. */ @RequestMapping(value = PayPushServletCommands.ANDRO_RECEIPT) public @ResponseBody String androReciept(@RequestParam("data") String data) { Gson gson = new GsonBuilder().serializeNulls().create(); AndroReceiptParam param = gson.fromJson(data, AndroReceiptParam.class); AndroidBillingHelper helper = new AndroidBillingHelper( PayPushGlobalConsts.PACKAGE_NAME, PayPushGlobalConsts.RSA_PUBLIC_KEY); AndroReceiptResponse response = new AndroReceiptResponse(); response.setValidationResult(helper.validation(param.getSignature(), param.getSignedData())); response.setOrderid(helper.getOutOrderId()); return gson.toJson(response, AndroReceiptResponse.class); } /** * Send a GCM Message. * * @param data data json string * @return Response json string */ @RequestMapping(value = PayPushServletCommands.ANDRO_PUSH) public @ResponseBody String androSendPush(@RequestParam("data") String data) { Gson gson = new GsonBuilder().serializeNulls().create(); AndroPushParam param = gson.fromJson(data, AndroPushParam.class); GCMSender sender = new GCMSender(); Result result = sender.sendMessage(param.getRegistrationID(), param.getDataList()); AndroPushResponse response = new AndroPushResponse(); response.setSuccess(result.getMessageId() != null); response.setRegistrationID(param.getRegistrationID()); response.setCanonicalID(result.getCanonicalRegistrationId()); if (response.isSuccess()) { //ErrorCodeName Constants : ex) Constants.ERROR_BLABLA response.setErrorCodeName(result.getErrorCodeName()); } return gson.toJson(response, AndroPushResponse.class); } /** * Send GCM messages. * * @param data data json string * @return Response json string */ @RequestMapping(value = PayPushServletCommands.ANDRO_PUSHS) public @ResponseBody String androSendPushs(@RequestParam("data") String data) { Gson gson = new GsonBuilder().serializeNulls().create(); AndroPushsParam param = gson.fromJson(data, AndroPushsParam.class); GCMSender sender = new GCMSender(); MulticastResult result = sender.sendMessages( param.getRegistrationIDs(), param.getDataList()); AndroPushsResponse response = new AndroPushsResponse(); response.setSuccessCount(result.getSuccess()); response.setFailCount(result.getFailure()); response.setCanonicalCount(result.getCanonicalIds()); if (param.isMoreDetail()) { List<Result> pushResults = result.getResults(); for (int i = 0; i < pushResults.size(); i++) { AndroPushResponse temp = new AndroPushResponse(); Result pushResult = pushResults.get(i); temp.setSuccess(pushResult.getMessageId() != null); temp.setRegistrationID(param.getRegistrationIDs().get(i)); temp.setCanonicalID(pushResult.getCanonicalRegistrationId()); if (temp.isSuccess()) { //ErrorCodeName Constants : ex) Constants.ERROR_BLABLA temp.setErrorCodeName(pushResult.getErrorCodeName()); } response.getResults().add(temp); } } return gson.toJson(response, AndroPushsResponse.class); } }
apache-2.0
Celeral/Netlet
src/main/java/com/celeral/netlet/util/CircularBuffer.java
10376
/* * Copyright (c) 2013 DataTorrent, Inc. ALL Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.celeral.netlet.util; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; import java.util.concurrent.TimeUnit; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static java.lang.Thread.sleep; /** * Provides a premium implementation of circular buffer<p> * <br> * * @param <T> type of the objects in this buffer. * @since 1.0.0 */ public class CircularBuffer<T> implements UnsafeBlockingQueue<T> { private final T[] buffer; private final int buffermask; private final int spinMillis; protected volatile long tail; protected volatile long head; /** * * Constructing a circular buffer of 'n' integers<p> * <br> * * @param n size of the buffer to be constructed * @param spin time in milliseconds for which to wait before checking for expected value if it's missing * <br> */ @SuppressWarnings("unchecked") public CircularBuffer(int n, int spin) { int i = 1; while (i < n) { i <<= 1; } buffer = (T[])new Object[i]; buffermask = i - 1; spinMillis = spin; } private CircularBuffer(T[] buffer, int buffermask, int spinMillis) { this.buffer = buffer; this.buffermask = buffermask; this.spinMillis = spinMillis; } /** * * Constructing a circular buffer of 'n' integers<p> * <br> * * @param n size of the buffer to be constructed * <br> */ public CircularBuffer(int n) { this(n, 10); } @Override public boolean add(T e) { if (head - tail <= buffermask) { buffer[(int)(head & buffermask)] = e; head++; return true; } throw new IllegalStateException("Collection is full"); } @Override public T remove() { if (head > tail) { final int pos = (int)(tail & buffermask); T t = buffer[pos]; buffer[pos] = null; tail++; return t; } throw new IllegalStateException("Collection is empty"); } @Override public T peek() { if (head > tail) { return buffer[(int)(tail & buffermask)]; } return null; } @Override public int size() { return (int)(head - tail); } /** * * Total design capacity of the buffer<p> * <br> * * @return Total return capacity of the buffer * <br> */ public int capacity() { return buffermask + 1; } @Override public int drainTo(Collection<? super T> container) { int size = size(); while (head > tail) { container.add(buffer[(int)(tail & buffermask)]); tail++; } return size; } @Override public String toString() { return "head=" + head + ", tail=" + tail + ", capacity=" + (buffermask + 1); } @Override public boolean offer(T e) { if (head - tail <= buffermask) { buffer[(int)(head & buffermask)] = e; head++; return true; } return false; } @Override @SuppressWarnings("SleepWhileInLoop") public void put(T e) throws InterruptedException { do { if (head - tail < buffermask) { buffer[(int)(head & buffermask)] = e; head++; return; } Thread.sleep(spinMillis); } while (true); } @Override @SuppressWarnings("SleepWhileInLoop") public boolean offer(T e, long timeout, TimeUnit unit) throws InterruptedException { long millis = unit.toMillis(timeout); do { if (head - tail < buffermask) { buffer[(int)(head & buffermask)] = e; head++; return true; } Thread.sleep(spinMillis); } while ((millis -= spinMillis) >= 0); return false; } @Override @SuppressWarnings("SleepWhileInLoop") public T take() throws InterruptedException { do { if (head > tail) { final int pos = (int)(tail & buffermask); T t = buffer[pos]; buffer[pos] = null; tail++; return t; } Thread.sleep(spinMillis); } while (true); } @Override @SuppressWarnings("SleepWhileInLoop") public T poll(long timeout, TimeUnit unit) throws InterruptedException { long millis = unit.toMillis(timeout); do { if (head > tail) { final int pos = (int)(tail & buffermask); T t = buffer[pos]; buffer[pos] = null; tail++; return t; } Thread.sleep(spinMillis); } while ((millis -= spinMillis) >= 0); return null; } @Override public int remainingCapacity() { return buffermask + 1 - (int)(head - tail); } @Override public boolean remove(Object o) { throw new UnsupportedOperationException("Not supported yet."); } @Override public boolean contains(Object o) { throw new UnsupportedOperationException("Not supported yet."); } @Override public int drainTo(final Collection<? super T> collection, final int maxElements) { int i = -1; while (++i < maxElements && head > tail) { final int pos = (int)(tail & buffermask); collection.add(buffer[pos]); buffer[pos] = null; tail++; } return i; } @Override public T poll() { if (head > tail) { final int pos = (int)(tail & buffermask); T t = buffer[pos]; buffer[pos] = null; tail++; return t; } return null; } @Override public T pollUnsafe() { final int pos = (int)(tail & buffermask); T t = buffer[pos]; buffer[pos] = null; tail++; return t; } @Override public T element() { if (head > tail) { return buffer[(int)(tail & buffermask)]; } throw new IllegalStateException("Collection is empty"); } @Override public boolean isEmpty() { return head == tail; } private class FrozenIterator implements Iterator<T>, Iterable<T>, Cloneable { private final long frozenHead; private final long frozenTail; private long tail; FrozenIterator() { this(CircularBuffer.this.head, CircularBuffer.this.tail); } FrozenIterator(long frozenHead, long frozenTail) { this.frozenHead = frozenHead; this.frozenTail = frozenTail; this.tail = frozenTail; } @Override public boolean hasNext() { return tail < frozenHead; } @Override public T next() { return buffer[(int)(tail++ & buffermask)]; } @Override public void remove() { buffer[(int)((tail - 1) & buffermask)] = null; } @Override public Iterator<T> iterator() { return new FrozenIterator(frozenHead, frozenTail); } } public Iterator<T> getFrozenIterator() { return new FrozenIterator(); } public Iterable<T> getFrozenIterable() { return new FrozenIterator(); } @Override public Iterator<T> iterator() { return new Iterator<T>() { @Override public boolean hasNext() { return head > tail; } @Override public T next() { final int pos = (int)(tail & buffermask); T t = buffer[pos]; buffer[pos] = null; tail++; return t; } @Override public void remove() { } }; } @Override public Object[] toArray() { final int count = (int)(head - tail); Object[] array = new Object[count]; for (int i = 0; i < count; i++) { final int pos = (int)(tail & buffermask); array[i] = buffer[pos]; buffer[pos] = null; tail++; } return array; } @Override @SuppressWarnings("unchecked") public <T> T[] toArray(T[] a) { int count = (int)(head - tail); if (a.length < count) { a = (T[])new Object[count]; } for (int i = 0; i < count; i++) { final int pos = (int)(tail & buffermask); a[i] = (T)buffer[pos]; buffer[pos] = null; tail++; } return a; } @Override public boolean containsAll(Collection<?> c) { throw new UnsupportedOperationException("Not supported yet."); } @Override public boolean addAll(Collection<? extends T> c) { throw new UnsupportedOperationException("Not supported yet."); } @Override public boolean removeAll(Collection<?> c) { throw new UnsupportedOperationException("Not supported yet."); } @Override public boolean retainAll(Collection<?> c) { throw new UnsupportedOperationException("Not supported yet."); } @Override public void clear() { head = 0; tail = 0; Arrays.fill(buffer, null); } @Override public T peekUnsafe() { return buffer[(int)(tail & buffermask)]; } public CircularBuffer<T> getWhitehole(final String exceptionMessage) { CircularBuffer<T> cb = new CircularBuffer<T>(buffer, buffermask, spinMillis) { @Override public boolean add(T e) { throw new IllegalStateException(exceptionMessage); } @Override @SuppressWarnings("SleepWhileInLoop") public void put(T e) throws InterruptedException { while (true) { sleep(spinMillis); } } @Override public boolean offer(T e) { return false; } @Override public boolean offer(T e, long timeout, TimeUnit unit) throws InterruptedException { long millis = unit.toMillis(timeout); sleep(millis); return false; } @Override public int remainingCapacity() { return 0; } @Override public boolean addAll(Collection<? extends T> c) { throw new IllegalStateException(exceptionMessage); } }; cb.head = head; cb.tail = tail; return cb; } private static final Logger logger = LoggerFactory.getLogger(CircularBuffer.class); }
apache-2.0
ebayopensource/turmeric-runtime
codegen/codegen-tools/src/test/resources/CalculatorServiceImpl/gen-src/org/ebayopensource/qaservices/calculatorservice/intf/CalculatorService.java
752
/** * CalculatorService.java * * This file was auto-generated from WSDL * by the Apache Axis2 version: 1.6-wso2v2 Built on : Oct 25, 2010 (02:27:59 MST) */ package org.ebayopensource.qaservices.calculatorservice.intf; /** * CalculatorService java skeleton interface for the axisService */ public interface CalculatorService { /** * Auto generated method signature * * @param add */ public com.ebayopensource.test.soaframework.tools.codegen.AddResponse add ( com.ebayopensource.test.soaframework.tools.codegen.Add add ) ; }
apache-2.0
cuongnt1987/qwap
src/main/java/com/cuongnt/qwap/checker/MobileChecker.java
496
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.cuongnt.qwap.checker; import java.io.Serializable; /** * * @author richard */ public interface MobileChecker extends Serializable { public boolean isMobile(); public boolean isIos(); public boolean isAndroid(); public boolean isWindowPhone(); public int getOsCode(); }
apache-2.0
Atmosphere/atmosphere-samples
nettosphere-samples/chat/src/main/java/org/nettosphere/samples/chat/NettosphereChat.java
2002
/* * Copyright 2008-2022 Async-IO.org * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.nettosphere.samples.chat; import org.atmosphere.nettosphere.Config; import org.atmosphere.nettosphere.Nettosphere; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; /** * A bootstrap class that start Nettosphere and the Atmosphere Chat samples. */ public class NettosphereChat { private static final Logger logger = LoggerFactory.getLogger(Nettosphere.class); public static void main(String[] args) throws IOException { Config.Builder b = new Config.Builder(); b.resource(Chat.class) // For *-distrubution .resource("./webapps") // For mvn exec:java .resource("./src/main/resources") // For running inside an IDE .resource("./nettosphere-samples/chat/src/main/resources") .port(8080).host("0.0.0.0").build(); Nettosphere s = new Nettosphere.Builder().config(b.build()).build(); s.start(); String a = ""; logger.info("NettoSphere Chat Server started on port {}", 8080); logger.info("Type quit to stop the server"); BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); while (!(a.equals("quit"))) { a = br.readLine(); } System.exit(-1); } }
apache-2.0
dturanski/spring-cloud-data
spring-cloud-dataflow-rest-resource/src/main/java/org/springframework/cloud/dataflow/rest/resource/AbstractDefinitionAppStatusResource.java
1731
/* * Copyright 2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.dataflow.rest.resource; import java.util.HashMap; import java.util.Map; import org.springframework.hateoas.ResourceSupport; /** * Resource representing the status of an app in a definition. * * @author Glenn Renfro */ public abstract class AbstractDefinitionAppStatusResource extends ResourceSupport { private String appName; private String dsl; Map<String,String> appStatuses; protected AbstractDefinitionAppStatusResource() { } public AbstractDefinitionAppStatusResource(String appName, String dsl, Map<String, String> appStatuses) { this.dsl = dsl; this.appName = appName; this.appStatuses = new HashMap<>(); this.appStatuses.putAll(appStatuses); } public String getAppName() { return appName; } public void setAppName(String appName) { this.appName = appName; } public String getDsl() { return dsl; } public void setDsl(String dsl) { this.dsl = dsl; } public Map<String, String> getAppStatuses() { return appStatuses; } public void setAppStatuses(Map<String, String> appStatuses) { this.appStatuses = appStatuses; } }
apache-2.0
gyfora/flink
flink-runtime/src/test/java/org/apache/flink/runtime/executiongraph/failover/flip1/ExecutionFailureHandlerTest.java
6858
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.executiongraph.failover.flip1; import org.apache.flink.runtime.execution.SuppressRestartsException; import org.apache.flink.runtime.jobgraph.JobVertexID; import org.apache.flink.runtime.scheduler.strategy.ExecutionVertexID; import org.apache.flink.util.IterableUtils; import org.apache.flink.util.TestLogger; import org.junit.Before; import org.junit.Test; import java.util.Collections; import java.util.Set; import java.util.stream.Collectors; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * Tests for {@link ExecutionFailureHandler}. */ public class ExecutionFailureHandlerTest extends TestLogger { private static final long RESTART_DELAY_MS = 1234L; private FailoverTopology<?, ?> failoverTopology; private TestFailoverStrategy failoverStrategy; private TestRestartBackoffTimeStrategy backoffTimeStrategy; private ExecutionFailureHandler executionFailureHandler; @Before public void setUp() { TestFailoverTopology.Builder topologyBuilder = new TestFailoverTopology.Builder(); topologyBuilder.newVertex(); failoverTopology = topologyBuilder.build(); failoverStrategy = new TestFailoverStrategy(); backoffTimeStrategy = new TestRestartBackoffTimeStrategy(true, RESTART_DELAY_MS); executionFailureHandler = new ExecutionFailureHandler(failoverTopology, failoverStrategy, backoffTimeStrategy); } /** * Tests the case that task restarting is accepted. */ @Test public void testNormalFailureHandling() { final Set<ExecutionVertexID> tasksToRestart = Collections.singleton( new ExecutionVertexID(new JobVertexID(), 0)); failoverStrategy.setTasksToRestart(tasksToRestart); // trigger a task failure final FailureHandlingResult result = executionFailureHandler.getFailureHandlingResult( new ExecutionVertexID(new JobVertexID(), 0), new Exception("test failure")); // verify results assertTrue(result.canRestart()); assertEquals(RESTART_DELAY_MS, result.getRestartDelayMS()); assertEquals(tasksToRestart, result.getVerticesToRestart()); try { result.getError(); fail("Cannot get error when the restarting is accepted"); } catch (IllegalStateException ex) { // expected } assertEquals(1, executionFailureHandler.getNumberOfRestarts()); } /** * Tests the case that task restarting is suppressed. */ @Test public void testRestartingSuppressedFailureHandlingResult() { // restart strategy suppresses restarting backoffTimeStrategy.setCanRestart(false); // trigger a task failure final FailureHandlingResult result = executionFailureHandler.getFailureHandlingResult( new ExecutionVertexID(new JobVertexID(), 0), new Exception("test failure")); // verify results assertFalse(result.canRestart()); assertNotNull(result.getError()); assertFalse(ExecutionFailureHandler.isUnrecoverableError(result.getError())); try { result.getVerticesToRestart(); fail("get tasks to restart is not allowed when restarting is suppressed"); } catch (IllegalStateException ex) { // expected } try { result.getRestartDelayMS(); fail("get restart delay is not allowed when restarting is suppressed"); } catch (IllegalStateException ex) { // expected } assertEquals(0, executionFailureHandler.getNumberOfRestarts()); } /** * Tests the case that the failure is non-recoverable type. */ @Test public void testNonRecoverableFailureHandlingResult() { // trigger an unrecoverable task failure final FailureHandlingResult result = executionFailureHandler.getFailureHandlingResult( new ExecutionVertexID(new JobVertexID(), 0), new Exception(new SuppressRestartsException(new Exception("test failure")))); // verify results assertFalse(result.canRestart()); assertNotNull(result.getError()); assertTrue(ExecutionFailureHandler.isUnrecoverableError(result.getError())); try { result.getVerticesToRestart(); fail("get tasks to restart is not allowed when restarting is suppressed"); } catch (IllegalStateException ex) { // expected } try { result.getRestartDelayMS(); fail("get restart delay is not allowed when restarting is suppressed"); } catch (IllegalStateException ex) { // expected } assertEquals(0, executionFailureHandler.getNumberOfRestarts()); } /** * Tests the check for unrecoverable error. */ @Test public void testUnrecoverableErrorCheck() { // normal error assertFalse(ExecutionFailureHandler.isUnrecoverableError(new Exception())); // direct unrecoverable error assertTrue(ExecutionFailureHandler.isUnrecoverableError(new SuppressRestartsException(new Exception()))); // nested unrecoverable error assertTrue(ExecutionFailureHandler.isUnrecoverableError( new Exception(new SuppressRestartsException(new Exception())))); } @Test public void testGlobalFailureHandling() { final FailureHandlingResult result = executionFailureHandler.getGlobalFailureHandlingResult( new Exception("test failure")); assertEquals( IterableUtils.toStream(failoverTopology.getVertices()) .map(FailoverVertex::getId) .collect(Collectors.toSet()), result.getVerticesToRestart()); } // ------------------------------------------------------------------------ // utilities // ------------------------------------------------------------------------ /** * A FailoverStrategy implementation for tests. It always suggests restarting the given tasks to restart. */ private static class TestFailoverStrategy implements FailoverStrategy { private Set<ExecutionVertexID> tasksToRestart; public TestFailoverStrategy() { } public void setTasksToRestart(final Set<ExecutionVertexID> tasksToRestart) { this.tasksToRestart = tasksToRestart; } @Override public Set<ExecutionVertexID> getTasksNeedingRestart( final ExecutionVertexID executionVertexId, final Throwable cause) { return tasksToRestart; } } }
apache-2.0
ramonrabello/devfestnorte-app
android/src/main/java/br/com/devfest/norte/ui/debug/actions/ShowAllDriveFilesDebugAction.java
1125
/* * Copyright 2014 Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package br.com.devfest.norte.ui.debug.actions; import android.content.Context; import android.content.Intent; import br.com.devfest.norte.ui.debug.DebugAction; public class ShowAllDriveFilesDebugAction implements DebugAction { @Override public void run(Context context, final Callback callback) { context.startActivity(new Intent(context, ViewFilesInAppFolderActivity.class)); } @Override public String getLabel() { return "List all files in AppData folder"; } }
apache-2.0
WestCoastInformatics/UMLS-Terminology-Server
integration-test/src/test/java/com/wci/umls/server/test/rest/ProjectServiceRestNormalUseTest.java
4623
/* * Copyright 2016 West Coast Informatics, LLC */ package com.wci.umls.server.test.rest; import static org.junit.Assert.assertNull; import java.util.Date; import java.util.HashSet; import java.util.Set; import org.apache.log4j.Logger; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import com.wci.umls.server.Project; import com.wci.umls.server.helpers.ProjectList; import com.wci.umls.server.jpa.ProjectJpa; /** * Implementation of the "Project Service REST Normal Use" Test Cases. */ public class ProjectServiceRestNormalUseTest extends ProjectServiceRestTest { /** The admin auth token. */ private static String authToken; /** * Create test fixtures per test. * * @throws Exception the exception */ @Override @Before public void setup() throws Exception { // authentication authToken = securityService.authenticate(adminUser, adminPassword).getAuthToken(); } /** * Test get, update, and remove project. * * @throws Exception the exception */ @Test public void testAddUpdateRemoveProject() throws Exception { Logger.getLogger(getClass()).debug("TEST " + name.getMethodName()); // Add a project Logger.getLogger(getClass()).info(" Add project"); ProjectJpa project = new ProjectJpa(); Set<String> values = new HashSet<>(); values.add("PUBLISHED"); project.setDescription("Sample"); project.setName("Sample " + new Date().getTime()); project.setTerminology("MTH"); project.setWorkflowPath("DEFAULT"); ProjectJpa project2 = (ProjectJpa) projectService.addProject(project, authToken); // TEST: retrieve the project and verify it is equal Assert.assertEquals(project, project2); // Update that newly added project Logger.getLogger(getClass()).info(" Update project"); project2.setName("Sample 2 " + new Date().getTime()); projectService.updateProject(project2, authToken); Project project3 = projectService.getProject(project2.getId(), authToken); // TEST: retrieve the project and verify it is equal Assert.assertEquals(project2, project3); // Remove the project Logger.getLogger(getClass()).info(" Remove project"); projectService.removeProject(project2.getId(), authToken); // TEST: verify that it is removed (call should return null) project3 = projectService.getProject(project2.getId(), authToken); assertNull(project3); } /** * Test getProjects() * * @throws Exception the exception */ @Test public void testAddProjects() throws Exception { Logger.getLogger(getClass()).debug("TEST " + name.getMethodName()); // Add a project Logger.getLogger(getClass()).info(" Add project"); ProjectJpa project = new ProjectJpa(); Set<String> values = new HashSet<>(); values.add("PUBLISHED"); project.setDescription("Sample"); project.setName("Sample " + new Date().getTime()); project.setTerminology("MTH"); project.setWorkflowPath("DEFAULT"); ProjectJpa project2 = new ProjectJpa(project); project = (ProjectJpa) projectService.addProject(project, authToken); // Add a second project Logger.getLogger(getClass()).info(" Add second project"); project2.setName("Sample 2 " + new Date().getTime()); project2.setDescription("Sample 2"); project2.setTerminology("MTH"); project2.setWorkflowPath("DEFAULT"); project2 = (ProjectJpa) projectService.addProject(project2, authToken); // Get the projects Logger.getLogger(getClass()).info(" Get the projects"); ProjectList projectList = projectService.findProjects(null, null, authToken); int projectCount = projectList.size(); Assert.assertTrue(projectList.contains(project)); Assert.assertTrue(projectList.contains(project2)); // remove first project Logger.getLogger(getClass()).info(" Remove first project"); projectService.removeProject(project.getId(), authToken); projectList = projectService.findProjects(null, null, authToken); Assert.assertEquals(projectCount - 1, projectList.size()); // remove second project Logger.getLogger(getClass()).info(" Remove second project"); projectService.removeProject(project2.getId(), authToken); projectList = projectService.findProjects(null, null, authToken); Assert.assertEquals(projectCount - 2, projectList.size()); } /** * Teardown. * * @throws Exception the exception */ @Override @After public void teardown() throws Exception { // logout securityService.logout(authToken); } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-iot1clickprojects/src/main/java/com/amazonaws/services/iot1clickprojects/model/transform/InternalFailureExceptionUnmarshaller.java
3170
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.iot1clickprojects.model.transform; import java.math.*; import javax.annotation.Generated; import com.amazonaws.services.iot1clickprojects.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import com.fasterxml.jackson.core.JsonToken; import static com.fasterxml.jackson.core.JsonToken.*; /** * InternalFailureException JSON Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class InternalFailureExceptionUnmarshaller extends EnhancedJsonErrorUnmarshaller { private InternalFailureExceptionUnmarshaller() { super(com.amazonaws.services.iot1clickprojects.model.InternalFailureException.class, "InternalFailureException"); } @Override public com.amazonaws.services.iot1clickprojects.model.InternalFailureException unmarshallFromContext(JsonUnmarshallerContext context) throws Exception { com.amazonaws.services.iot1clickprojects.model.InternalFailureException internalFailureException = new com.amazonaws.services.iot1clickprojects.model.InternalFailureException( null); int originalDepth = context.getCurrentDepth(); String currentParentElement = context.getCurrentParentElement(); int targetDepth = originalDepth + 1; JsonToken token = context.getCurrentToken(); if (token == null) token = context.nextToken(); if (token == VALUE_NULL) { return null; } while (true) { if (token == null) break; if (token == FIELD_NAME || token == START_OBJECT) { if (context.testExpression("code", targetDepth)) { context.nextToken(); internalFailureException.setCode(context.getUnmarshaller(String.class).unmarshall(context)); } } else if (token == END_ARRAY || token == END_OBJECT) { if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) { if (context.getCurrentDepth() <= originalDepth) break; } } token = context.nextToken(); } return internalFailureException; } private static InternalFailureExceptionUnmarshaller instance; public static InternalFailureExceptionUnmarshaller getInstance() { if (instance == null) instance = new InternalFailureExceptionUnmarshaller(); return instance; } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-inspector2/src/main/java/com/amazonaws/services/inspector2/model/transform/ListTagsForResourceRequestMarshaller.java
2066
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.inspector2.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.services.inspector2.model.*; import com.amazonaws.protocol.*; import com.amazonaws.annotation.SdkInternalApi; /** * ListTagsForResourceRequestMarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class ListTagsForResourceRequestMarshaller { private static final MarshallingInfo<String> RESOURCEARN_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PATH) .marshallLocationName("resourceArn").build(); private static final ListTagsForResourceRequestMarshaller instance = new ListTagsForResourceRequestMarshaller(); public static ListTagsForResourceRequestMarshaller getInstance() { return instance; } /** * Marshall the given parameter object. */ public void marshall(ListTagsForResourceRequest listTagsForResourceRequest, ProtocolMarshaller protocolMarshaller) { if (listTagsForResourceRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(listTagsForResourceRequest.getResourceArn(), RESOURCEARN_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
apache-2.0
hbarnard/fcrepo-phaidra
fcrepo-common/target/generated-sources/cxf/org/fcrepo/server/types/gen/GetNextPID.java
2321
package org.fcrepo.server.types.gen; import java.math.BigInteger; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="numPIDs" type="{http://www.w3.org/2001/XMLSchema}nonNegativeInteger"/> * &lt;element name="pidNamespace" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "numPIDs", "pidNamespace" }) @XmlRootElement(name = "getNextPID") public class GetNextPID { @XmlElement(required = true, nillable = true) @XmlSchemaType(name = "nonNegativeInteger") protected BigInteger numPIDs; @XmlElement(required = true, nillable = true) protected String pidNamespace; /** * Gets the value of the numPIDs property. * * @return * possible object is * {@link BigInteger } * */ public BigInteger getNumPIDs() { return numPIDs; } /** * Sets the value of the numPIDs property. * * @param value * allowed object is * {@link BigInteger } * */ public void setNumPIDs(BigInteger value) { this.numPIDs = value; } /** * Gets the value of the pidNamespace property. * * @return * possible object is * {@link String } * */ public String getPidNamespace() { return pidNamespace; } /** * Sets the value of the pidNamespace property. * * @param value * allowed object is * {@link String } * */ public void setPidNamespace(String value) { this.pidNamespace = value; } }
apache-2.0
G4Studio/tobacco
G4Studio5/src/org/g4studio/core/orm/xibatis/sqlmap/engine/mapping/sql/simple/SimpleDynamicSql.java
2905
package org.g4studio.core.orm.xibatis.sqlmap.engine.mapping.sql.simple; import java.util.StringTokenizer; import org.g4studio.core.orm.xibatis.common.beans.Probe; import org.g4studio.core.orm.xibatis.common.beans.ProbeFactory; import org.g4studio.core.orm.xibatis.sqlmap.client.SqlMapException; import org.g4studio.core.orm.xibatis.sqlmap.engine.impl.SqlMapExecutorDelegate; import org.g4studio.core.orm.xibatis.sqlmap.engine.mapping.parameter.ParameterMap; import org.g4studio.core.orm.xibatis.sqlmap.engine.mapping.result.ResultMap; import org.g4studio.core.orm.xibatis.sqlmap.engine.mapping.sql.Sql; import org.g4studio.core.orm.xibatis.sqlmap.engine.scope.StatementScope; public class SimpleDynamicSql implements Sql { private static final Probe PROBE = ProbeFactory.getProbe(); private static final String ELEMENT_TOKEN = "$"; private String sqlStatement; private SqlMapExecutorDelegate delegate; public SimpleDynamicSql(SqlMapExecutorDelegate delegate, String sqlStatement) { this.delegate = delegate; this.sqlStatement = sqlStatement; } public String getSql(StatementScope statementScope, Object parameterObject) { return processDynamicElements(sqlStatement, parameterObject); } public ParameterMap getParameterMap(StatementScope statementScope, Object parameterObject) { return statementScope.getParameterMap(); } public ResultMap getResultMap(StatementScope statementScope, Object parameterObject) { return statementScope.getResultMap(); } public void cleanup(StatementScope statementScope) { } public static boolean isSimpleDynamicSql(String sql) { return sql != null && sql.indexOf(ELEMENT_TOKEN) > -1; } private String processDynamicElements(String sql, Object parameterObject) { StringTokenizer parser = new StringTokenizer(sql, ELEMENT_TOKEN, true); StringBuffer newSql = new StringBuffer(); String token = null; String lastToken = null; while (parser.hasMoreTokens()) { token = parser.nextToken(); if (ELEMENT_TOKEN.equals(lastToken)) { if (ELEMENT_TOKEN.equals(token)) { newSql.append(ELEMENT_TOKEN); token = null; } else { Object value = null; if (parameterObject != null) { if (delegate.getTypeHandlerFactory().hasTypeHandler(parameterObject.getClass())) { value = parameterObject; } else { value = PROBE.getObject(parameterObject, token); } } if (value != null) { newSql.append(String.valueOf(value)); } token = parser.nextToken(); if (!ELEMENT_TOKEN.equals(token)) { throw new SqlMapException("Unterminated dynamic element in sql (" + sql + ")."); } token = null; } } else { if (!ELEMENT_TOKEN.equals(token)) { newSql.append(token); } } lastToken = token; } return newSql.toString(); } }
apache-2.0
apepper/elasticsearch
core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java
149897
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.highlight; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.google.common.base.Joiner; import com.google.common.collect.Iterables; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings.Builder; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.BoostableQueryBuilder; import org.elasticsearch.index.query.IdsQueryBuilder; import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.index.query.MatchQueryBuilder.Operator; import org.elasticsearch.index.query.MatchQueryBuilder.Type; import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.highlight.HighlightBuilder.Field; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matcher; import org.hamcrest.Matchers; import org.junit.Test; import java.io.IOException; import java.util.HashMap; import java.util.Map; import static org.elasticsearch.client.Requests.searchRequest; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.boostingQuery; import static org.elasticsearch.index.query.QueryBuilders.commonTermsQuery; import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; import static org.elasticsearch.index.query.QueryBuilders.fuzzyQuery; import static org.elasticsearch.index.query.QueryBuilders.matchPhrasePrefixQuery; import static org.elasticsearch.index.query.QueryBuilders.matchPhraseQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.index.query.QueryBuilders.missingQuery; import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery; import static org.elasticsearch.index.query.QueryBuilders.prefixQuery; import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; import static org.elasticsearch.index.query.QueryBuilders.regexpQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.index.query.QueryBuilders.typeQuery; import static org.elasticsearch.index.query.QueryBuilders.wildcardQuery; import static org.elasticsearch.search.builder.SearchSourceBuilder.highlight; import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHighlight; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNotHighlighted; import static org.elasticsearch.test.hamcrest.RegexMatcher.matches; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.startsWith; public class HighlighterSearchIT extends ESIntegTestCase { @Test public void testHighlightingWithWildcardName() throws IOException { // test the kibana case with * as fieldname that will try highlight all fields including meta fields XContentBuilder mappings = jsonBuilder(); mappings.startObject(); mappings.startObject("type") .startObject("properties") .startObject("text") .field("type", "string") .field("analyzer", "keyword") .field("index_options", "offsets") .field("term_vector", "with_positions_offsets") .endObject() .endObject() .endObject(); mappings.endObject(); assertAcked(prepareCreate("test") .addMapping("type", mappings)); ensureYellow(); client().prepareIndex("test", "type", "1") .setSource(jsonBuilder().startObject().field("text", "text").endObject()) .get(); refresh(); String highlighter = randomFrom(new String[]{"plain", "postings", "fvh"}); SearchResponse search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text"))).addHighlightedField(new Field("*").highlighterType(highlighter)).get(); assertHighlight(search, 0, "text", 0, equalTo("<em>text</em>")); } @Test public void testPlainHighlighterWithLongUnanalyzedStringTerm() throws IOException { XContentBuilder mappings = jsonBuilder(); mappings.startObject(); mappings.startObject("type") .startObject("properties") .startObject("long_text") .field("type", "string") .field("analyzer", "keyword") .field("index_options", "offsets") .field("term_vector", "with_positions_offsets") .field("ignore_above", 1) .endObject() .startObject("text") .field("type", "string") .field("analyzer", "keyword") .field("index_options", "offsets") .field("term_vector", "with_positions_offsets") .endObject() .endObject() .endObject(); mappings.endObject(); assertAcked(prepareCreate("test") .addMapping("type", mappings)); ensureYellow(); // crate a term that is larger than the allowed 32766, index it and then try highlight on it // the search request should still succeed StringBuilder builder = new StringBuilder(); for (int i = 0; i < 32767; i++) { builder.append('a'); } client().prepareIndex("test", "type", "1") .setSource(jsonBuilder().startObject().field("long_text", builder.toString()).field("text", "text").endObject()) .get(); refresh(); String highlighter = randomFrom(new String[]{"plain", "postings", "fvh"}); SearchResponse search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text"))).addHighlightedField(new Field("*").highlighterType(highlighter)).get(); assertHighlight(search, 0, "text", 0, equalTo("<em>text</em>")); search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text"))).addHighlightedField(new Field("long_text").highlighterType(highlighter)).get(); assertNoFailures(search); assertThat(search.getHits().getAt(0).getHighlightFields().size(), equalTo(0)); search = client().prepareSearch().setQuery(prefixQuery("text", "te")).addHighlightedField(new Field("long_text").highlighterType(highlighter)).get(); assertNoFailures(search); assertThat(search.getHits().getAt(0).getHighlightFields().size(), equalTo(0)); } @Test public void testHighlightingWhenFieldsAreNotStoredThereIsNoSource() throws IOException { XContentBuilder mappings = jsonBuilder(); mappings.startObject(); mappings.startObject("type") .startObject("_source") .field("enabled", false) .endObject() .startObject("properties") .startObject("unstored_field") .field("index_options", "offsets") .field("term_vector", "with_positions_offsets") .field("type", "string") .field("store", "no") .endObject() .startObject("text") .field("index_options", "offsets") .field("term_vector", "with_positions_offsets") .field("type", "string") .field("store", "yes") .endObject() .endObject() .endObject(); mappings.endObject(); assertAcked(prepareCreate("test") .addMapping("type", mappings)); ensureYellow(); client().prepareIndex("test", "type", "1") .setSource(jsonBuilder().startObject().field("unstored_text", "text").field("text", "text").endObject()) .get(); refresh(); String highlighter = randomFrom(new String[]{"plain", "postings", "fvh"}); SearchResponse search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text"))).addHighlightedField(new Field("*").highlighterType(highlighter)).get(); assertHighlight(search, 0, "text", 0, equalTo("<em>text</em>")); search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text"))).addHighlightedField(new Field("unstored_text")).get(); assertNoFailures(search); assertThat(search.getHits().getAt(0).getHighlightFields().size(), equalTo(0)); } @Test // see #3486 public void testHighTermFrequencyDoc() throws IOException { assertAcked(prepareCreate("test") .addMapping("test", "name", "type=string,term_vector=with_positions_offsets,store=" + (randomBoolean() ? "yes" : "no"))); ensureYellow(); StringBuilder builder = new StringBuilder(); for (int i = 0; i < 6000; i++) { builder.append("abc").append(" "); } client().prepareIndex("test", "test", "1") .setSource("name", builder.toString()) .get(); refresh(); SearchResponse search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("name", "abc"))).addHighlightedField("name").get(); assertHighlight(search, 0, "name", 0, startsWith("<em>abc</em> <em>abc</em> <em>abc</em> <em>abc</em>")); } @Test public void testNgramHighlightingWithBrokenPositions() throws IOException { assertAcked(prepareCreate("test") .addMapping("test", jsonBuilder() .startObject() .startObject("test") .startObject("properties") .startObject("name") .startObject("fields") .startObject("autocomplete") .field("type", "string") .field("analyzer", "autocomplete") .field("search_analyzer", "search_autocomplete") .field("term_vector", "with_positions_offsets") .endObject() .startObject("name") .field("type", "string") .endObject() .endObject() .field("type", "multi_field") .endObject() .endObject() .endObject()) .setSettings(settingsBuilder() .put(indexSettings()) .put("analysis.tokenizer.autocomplete.max_gram", 20) .put("analysis.tokenizer.autocomplete.min_gram", 1) .put("analysis.tokenizer.autocomplete.token_chars", "letter,digit") .put("analysis.tokenizer.autocomplete.type", "nGram") .put("analysis.filter.wordDelimiter.type", "word_delimiter") .putArray("analysis.filter.wordDelimiter.type_table", "& => ALPHANUM", "| => ALPHANUM", "! => ALPHANUM", "? => ALPHANUM", ". => ALPHANUM", "- => ALPHANUM", "# => ALPHANUM", "% => ALPHANUM", "+ => ALPHANUM", ", => ALPHANUM", "~ => ALPHANUM", ": => ALPHANUM", "/ => ALPHANUM", "^ => ALPHANUM", "$ => ALPHANUM", "@ => ALPHANUM", ") => ALPHANUM", "( => ALPHANUM", "] => ALPHANUM", "[ => ALPHANUM", "} => ALPHANUM", "{ => ALPHANUM") .put("analysis.filter.wordDelimiter.type.split_on_numerics", false) .put("analysis.filter.wordDelimiter.generate_word_parts", true) .put("analysis.filter.wordDelimiter.generate_number_parts", false) .put("analysis.filter.wordDelimiter.catenate_words", true) .put("analysis.filter.wordDelimiter.catenate_numbers", true) .put("analysis.filter.wordDelimiter.catenate_all", false) .put("analysis.analyzer.autocomplete.tokenizer", "autocomplete") .putArray("analysis.analyzer.autocomplete.filter", "lowercase", "wordDelimiter") .put("analysis.analyzer.search_autocomplete.tokenizer", "whitespace") .putArray("analysis.analyzer.search_autocomplete.filter", "lowercase", "wordDelimiter"))); ensureYellow(); client().prepareIndex("test", "test", "1") .setSource("name", "ARCOTEL Hotels Deutschland").get(); refresh(); SearchResponse search = client().prepareSearch("test").setTypes("test").setQuery(matchQuery("name.autocomplete", "deut tel").operator(Operator.OR)).addHighlightedField("name.autocomplete").execute().actionGet(); assertHighlight(search, 0, "name.autocomplete", 0, equalTo("ARCO<em>TEL</em> Ho<em>tel</em>s <em>Deut</em>schland")); } @Test public void testMultiPhraseCutoff() throws IOException { /* * MultiPhraseQuery can literally kill an entire node if there are too many terms in the * query. We cut off and extract terms if there are more than 16 terms in the query */ assertAcked(prepareCreate("test") .addMapping("test", "body", "type=string,analyzer=custom_analyzer,search_analyzer=custom_analyzer,term_vector=with_positions_offsets") .setSettings( settingsBuilder().put(indexSettings()) .put("analysis.filter.wordDelimiter.type", "word_delimiter") .put("analysis.filter.wordDelimiter.type.split_on_numerics", false) .put("analysis.filter.wordDelimiter.generate_word_parts", true) .put("analysis.filter.wordDelimiter.generate_number_parts", true) .put("analysis.filter.wordDelimiter.catenate_words", true) .put("analysis.filter.wordDelimiter.catenate_numbers", true) .put("analysis.filter.wordDelimiter.catenate_all", false) .put("analysis.analyzer.custom_analyzer.tokenizer", "whitespace") .putArray("analysis.analyzer.custom_analyzer.filter", "lowercase", "wordDelimiter")) ); ensureGreen(); client().prepareIndex("test", "test", "1") .setSource("body", "Test: http://www.facebook.com http://elasticsearch.org http://xing.com http://cnn.com http://quora.com http://twitter.com this is a test for highlighting feature Test: http://www.facebook.com http://elasticsearch.org http://xing.com http://cnn.com http://quora.com http://twitter.com this is a test for highlighting feature") .get(); refresh(); SearchResponse search = client().prepareSearch().setQuery(matchQuery("body", "Test: http://www.facebook.com ").type(Type.PHRASE)).addHighlightedField("body").execute().actionGet(); assertHighlight(search, 0, "body", 0, startsWith("<em>Test: http://www.facebook.com</em>")); search = client().prepareSearch().setQuery(matchQuery("body", "Test: http://www.facebook.com http://elasticsearch.org http://xing.com http://cnn.com http://quora.com http://twitter.com this is a test for highlighting feature Test: http://www.facebook.com http://elasticsearch.org http://xing.com http://cnn.com http://quora.com http://twitter.com this is a test for highlighting feature").type(Type.PHRASE)).addHighlightedField("body").execute().actionGet(); assertHighlight(search, 0, "body", 0, equalTo("<em>Test</em>: <em>http://www.facebook.com</em> <em>http://elasticsearch.org</em> <em>http://xing.com</em> <em>http://cnn.com</em> http://quora.com")); } @Test public void testNgramHighlightingPreLucene42() throws IOException { assertAcked(prepareCreate("test") .addMapping("test", "name", "type=string,analyzer=name_index_analyzer,search_analyzer=name_search_analyzer," + randomStoreField() + "term_vector=with_positions_offsets", "name2", "type=string,analyzer=name2_index_analyzer,search_analyzer=name_search_analyzer," + randomStoreField() + "term_vector=with_positions_offsets") .setSettings(settingsBuilder() .put(indexSettings()) .put("analysis.filter.my_ngram.max_gram", 20) .put("analysis.filter.my_ngram.version", "4.1") .put("analysis.filter.my_ngram.min_gram", 1) .put("analysis.filter.my_ngram.type", "ngram") .put("analysis.tokenizer.my_ngramt.max_gram", 20) .put("analysis.tokenizer.my_ngramt.version", "4.1") .put("analysis.tokenizer.my_ngramt.min_gram", 1) .put("analysis.tokenizer.my_ngramt.type", "ngram") .put("analysis.analyzer.name_index_analyzer.tokenizer", "my_ngramt") .put("analysis.analyzer.name2_index_analyzer.tokenizer", "whitespace") .putArray("analysis.analyzer.name2_index_analyzer.filter", "lowercase", "my_ngram") .put("analysis.analyzer.name_search_analyzer.tokenizer", "whitespace") .put("analysis.analyzer.name_search_analyzer.filter", "lowercase"))); ensureYellow(); client().prepareIndex("test", "test", "1") .setSource("name", "logicacmg ehemals avinci - the know how company", "name2", "logicacmg ehemals avinci - the know how company").get(); client().prepareIndex("test", "test", "2") .setSource("name", "avinci, unilog avinci, logicacmg, logica", "name2", "avinci, unilog avinci, logicacmg, logica").get(); refresh(); SearchResponse search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("name", "logica m"))).addHighlightedField("name").get(); assertHighlight(search, 0, "name", 0, anyOf(equalTo("<em>logica</em>c<em>m</em>g ehe<em>m</em>als avinci - the know how co<em>m</em>pany"), equalTo("avinci, unilog avinci, <em>logica</em>c<em>m</em>g, <em>logica</em>"))); assertHighlight(search, 1, "name", 0, anyOf(equalTo("<em>logica</em>c<em>m</em>g ehe<em>m</em>als avinci - the know how co<em>m</em>pany"), equalTo("avinci, unilog avinci, <em>logica</em>c<em>m</em>g, <em>logica</em>"))); search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("name", "logica ma"))).addHighlightedField("name").get(); assertHighlight(search, 0, "name", 0, anyOf(equalTo("<em>logica</em>cmg ehe<em>ma</em>ls avinci - the know how company"), equalTo("avinci, unilog avinci, <em>logica</em>cmg, <em>logica</em>"))); assertHighlight(search, 1, "name", 0, anyOf(equalTo("<em>logica</em>cmg ehe<em>ma</em>ls avinci - the know how company"), equalTo("avinci, unilog avinci, <em>logica</em>cmg, <em>logica</em>"))); search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("name", "logica"))).addHighlightedField("name").get(); assertHighlight(search, 0, "name", 0, anyOf(equalTo("<em>logica</em>cmg ehemals avinci - the know how company"), equalTo("avinci, unilog avinci, <em>logica</em>cmg, <em>logica</em>"))); assertHighlight(search, 0, "name", 0, anyOf(equalTo("<em>logica</em>cmg ehemals avinci - the know how company"), equalTo("avinci, unilog avinci, <em>logica</em>cmg, <em>logica</em>"))); search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("name2", "logica m"))).addHighlightedField("name2").get(); assertHighlight(search, 0, "name2", 0, anyOf(equalTo("<em>logica</em>c<em>m</em>g ehe<em>m</em>als avinci - the know how co<em>m</em>pany"), equalTo("avinci, unilog avinci, <em>logica</em>c<em>m</em>g, <em>logica</em>"))); assertHighlight(search, 1, "name2", 0, anyOf(equalTo("<em>logica</em>c<em>m</em>g ehe<em>m</em>als avinci - the know how co<em>m</em>pany"), equalTo("avinci, unilog avinci, <em>logica</em>c<em>m</em>g, <em>logica</em>"))); search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("name2", "logica ma"))).addHighlightedField("name2").get(); assertHighlight(search, 0, "name2", 0, anyOf(equalTo("<em>logica</em>cmg ehe<em>ma</em>ls avinci - the know how company"), equalTo("avinci, unilog avinci, <em>logica</em>cmg, <em>logica</em>"))); assertHighlight(search, 1, "name2", 0, anyOf(equalTo("<em>logica</em>cmg ehe<em>ma</em>ls avinci - the know how company"), equalTo("avinci, unilog avinci, <em>logica</em>cmg, <em>logica</em>"))); search = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("name2", "logica"))).addHighlightedField("name2").get(); assertHighlight(search, 0, "name2", 0, anyOf(equalTo("<em>logica</em>cmg ehemals avinci - the know how company"), equalTo("avinci, unilog avinci, <em>logica</em>cmg, <em>logica</em>"))); assertHighlight(search, 1, "name2", 0, anyOf(equalTo("<em>logica</em>cmg ehemals avinci - the know how company"), equalTo("avinci, unilog avinci, <em>logica</em>cmg, <em>logica</em>"))); } @Test public void testNgramHighlighting() throws IOException { assertAcked(prepareCreate("test") .addMapping("test", "name", "type=string,analyzer=name_index_analyzer,search_analyzer=name_search_analyzer,term_vector=with_positions_offsets", "name2", "type=string,analyzer=name2_index_analyzer,search_analyzer=name_search_analyzer,term_vector=with_positions_offsets") .setSettings(settingsBuilder() .put(indexSettings()) .put("analysis.filter.my_ngram.max_gram", 20) .put("analysis.filter.my_ngram.min_gram", 1) .put("analysis.filter.my_ngram.type", "ngram") .put("analysis.tokenizer.my_ngramt.max_gram", 20) .put("analysis.tokenizer.my_ngramt.min_gram", 1) .put("analysis.tokenizer.my_ngramt.token_chars", "letter,digit") .put("analysis.tokenizer.my_ngramt.type", "ngram") .put("analysis.analyzer.name_index_analyzer.tokenizer", "my_ngramt") .put("analysis.analyzer.name2_index_analyzer.tokenizer", "whitespace") .put("analysis.analyzer.name2_index_analyzer.filter", "my_ngram") .put("analysis.analyzer.name_search_analyzer.tokenizer", "whitespace"))); client().prepareIndex("test", "test", "1") .setSource("name", "logicacmg ehemals avinci - the know how company", "name2", "logicacmg ehemals avinci - the know how company").get(); refresh(); ensureGreen(); SearchResponse search = client().prepareSearch().setQuery(matchQuery("name", "logica m")).addHighlightedField("name").get(); assertHighlight(search, 0, "name", 0, equalTo("<em>logica</em>c<em>m</em>g ehe<em>m</em>als avinci - the know how co<em>m</em>pany")); search = client().prepareSearch().setQuery(matchQuery("name", "logica ma")).addHighlightedField("name").get(); assertHighlight(search, 0, "name", 0, equalTo("<em>logica</em>cmg ehe<em>ma</em>ls avinci - the know how company")); search = client().prepareSearch().setQuery(matchQuery("name", "logica")).addHighlightedField("name").get(); assertHighlight(search, 0, "name", 0, equalTo("<em>logica</em>cmg ehemals avinci - the know how company")); search = client().prepareSearch().setQuery(matchQuery("name2", "logica m")).addHighlightedField("name2").get(); assertHighlight(search, 0, "name2", 0, equalTo("<em>logicacmg</em> <em>ehemals</em> avinci - the know how <em>company</em>")); search = client().prepareSearch().setQuery(matchQuery("name2", "logica ma")).addHighlightedField("name2").get(); assertHighlight(search, 0, "name2", 0, equalTo("<em>logicacmg</em> <em>ehemals</em> avinci - the know how company")); search = client().prepareSearch().setQuery(matchQuery("name2", "logica")).addHighlightedField("name2").get(); assertHighlight(search, 0, "name2", 0, equalTo("<em>logicacmg</em> ehemals avinci - the know how company")); } @Test public void testEnsureNoNegativeOffsets() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", "no_long_term", "type=string,term_vector=with_positions_offsets", "long_term", "type=string,term_vector=with_positions_offsets")); ensureYellow(); client().prepareIndex("test", "type1", "1") .setSource("no_long_term", "This is a test where foo is highlighed and should be highlighted", "long_term", "This is a test thisisaverylongwordandmakessurethisfails where foo is highlighed and should be highlighted") .get(); refresh(); SearchResponse search = client().prepareSearch() .setQuery(matchQuery("long_term", "thisisaverylongwordandmakessurethisfails foo highlighed")) .addHighlightedField("long_term", 18, 1) .get(); assertHighlight(search, 0, "long_term", 0, 1, equalTo("<em>thisisaverylongwordandmakessurethisfails</em>")); search = client().prepareSearch() .setQuery(matchQuery("no_long_term", "test foo highlighed").type(Type.PHRASE).slop(3)) .addHighlightedField("no_long_term", 18, 1).setHighlighterPostTags("</b>").setHighlighterPreTags("<b>") .get(); assertNotHighlighted(search, 0, "no_long_term"); search = client().prepareSearch() .setQuery(matchQuery("no_long_term", "test foo highlighed").type(Type.PHRASE).slop(3)) .addHighlightedField("no_long_term", 30, 1).setHighlighterPostTags("</b>").setHighlighterPreTags("<b>") .get(); assertHighlight(search, 0, "no_long_term", 0, 1, equalTo("a <b>test</b> where <b>foo</b> is <b>highlighed</b> and")); } @Test public void testSourceLookupHighlightingUsingPlainHighlighter() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") // we don't store title and don't use term vector, now lets see if it works... .startObject("title").field("type", "string").field("store", "no").field("term_vector", "no").endObject() .startObject("attachments").startObject("properties").startObject("body").field("type", "string").field("store", "no").field("term_vector", "no").endObject().endObject().endObject() .endObject().endObject().endObject())); ensureYellow(); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < indexRequestBuilders.length; i++) { indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) .setSource(XContentFactory.jsonBuilder().startObject() .field("title", "This is a test on the highlighting bug present in elasticsearch") .startArray("attachments").startObject().field("body", "attachment 1").endObject().startObject().field("body", "attachment 2").endObject().endArray() .endObject()); } indexRandom(true, indexRequestBuilders); SearchResponse search = client().prepareSearch() .setQuery(matchQuery("title", "bug")) .addHighlightedField("title", -1, 0) .get(); for (int i = 0; i < indexRequestBuilders.length; i++) { assertHighlight(search, i, "title", 0, equalTo("This is a test on the highlighting <em>bug</em> present in elasticsearch")); } search = client().prepareSearch() .setQuery(matchQuery("attachments.body", "attachment")) .addHighlightedField("attachments.body", -1, 0) .get(); for (int i = 0; i < indexRequestBuilders.length; i++) { assertHighlight(search, i, "attachments.body", 0, equalTo("<em>attachment</em> 1")); assertHighlight(search, i, "attachments.body", 1, equalTo("<em>attachment</em> 2")); } } @Test public void testSourceLookupHighlightingUsingFastVectorHighlighter() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") // we don't store title, now lets see if it works... .startObject("title").field("type", "string").field("store", "no").field("term_vector", "with_positions_offsets").endObject() .startObject("attachments").startObject("properties").startObject("body").field("type", "string").field("store", "no").field("term_vector", "with_positions_offsets").endObject().endObject().endObject() .endObject().endObject().endObject())); ensureYellow(); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < indexRequestBuilders.length; i++) { indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) .setSource(XContentFactory.jsonBuilder().startObject() .field("title", "This is a test on the highlighting bug present in elasticsearch") .startArray("attachments").startObject().field("body", "attachment 1").endObject().startObject().field("body", "attachment 2").endObject().endArray() .endObject()); } indexRandom(true, indexRequestBuilders); SearchResponse search = client().prepareSearch() .setQuery(matchQuery("title", "bug")) .addHighlightedField("title", -1, 0) .get(); for (int i = 0; i < indexRequestBuilders.length; i++) { assertHighlight(search, i, "title", 0, equalTo("This is a test on the highlighting <em>bug</em> present in elasticsearch")); } search = client().prepareSearch() .setQuery(matchQuery("attachments.body", "attachment")) .addHighlightedField("attachments.body", -1, 2) .execute().get(); for (int i = 0; i < 5; i++) { assertHighlight(search, i, "attachments.body", 0, equalTo("<em>attachment</em> 1")); assertHighlight(search, i, "attachments.body", 1, equalTo("<em>attachment</em> 2")); } } @Test public void testSourceLookupHighlightingUsingPostingsHighlighter() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") // we don't store title, now lets see if it works... .startObject("title").field("type", "string").field("store", "no").field("index_options", "offsets").endObject() .startObject("attachments").startObject("properties").startObject("body").field("type", "string").field("store", "no").field("index_options", "offsets").endObject().endObject().endObject() .endObject().endObject().endObject())); ensureYellow(); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < indexRequestBuilders.length; i++) { indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) .setSource(XContentFactory.jsonBuilder().startObject() .array("title", "This is a test on the highlighting bug present in elasticsearch. Hopefully it works.", "This is the second bug to perform highlighting on.") .startArray("attachments").startObject().field("body", "attachment for this test").endObject().startObject().field("body", "attachment 2").endObject().endArray() .endObject()); } indexRandom(true, indexRequestBuilders); SearchResponse search = client().prepareSearch() .setQuery(matchQuery("title", "bug")) //asking for the whole field to be highlighted .addHighlightedField("title", -1, 0).get(); for (int i = 0; i < indexRequestBuilders.length; i++) { assertHighlight(search, i, "title", 0, equalTo("This is a test on the highlighting <em>bug</em> present in elasticsearch. Hopefully it works.")); assertHighlight(search, i, "title", 1, 2, equalTo("This is the second <em>bug</em> to perform highlighting on.")); } search = client().prepareSearch() .setQuery(matchQuery("title", "bug")) //sentences will be generated out of each value .addHighlightedField("title").get(); for (int i = 0; i < indexRequestBuilders.length; i++) { assertHighlight(search, i, "title", 0, equalTo("This is a test on the highlighting <em>bug</em> present in elasticsearch.")); assertHighlight(search, i, "title", 1, 2, equalTo("This is the second <em>bug</em> to perform highlighting on.")); } search = client().prepareSearch() .setQuery(matchQuery("attachments.body", "attachment")) .addHighlightedField("attachments.body", -1, 2) .get(); for (int i = 0; i < indexRequestBuilders.length; i++) { assertHighlight(search, i, "attachments.body", 0, equalTo("<em>attachment</em> for this test")); assertHighlight(search, i, "attachments.body", 1, 2, equalTo("<em>attachment</em> 2")); } } @Test public void testHighlightIssue1994() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", "title", "type=string,store=no", "titleTV", "type=string,store=no,term_vector=with_positions_offsets")); ensureYellow(); indexRandom(false, client().prepareIndex("test", "type1", "1") .setSource("title", new String[]{"This is a test on the highlighting bug present in elasticsearch", "The bug is bugging us"}, "titleTV", new String[]{"This is a test on the highlighting bug present in elasticsearch", "The bug is bugging us"})); indexRandom(true, client().prepareIndex("test", "type1", "2") .setSource("titleTV", new String[]{"some text to highlight", "highlight other text"})); SearchResponse search = client().prepareSearch() .setQuery(matchQuery("title", "bug")) .addHighlightedField("title", -1, 2) .addHighlightedField("titleTV", -1, 2).setHighlighterRequireFieldMatch(false) .get(); assertHighlight(search, 0, "title", 0, equalTo("This is a test on the highlighting <em>bug</em> present in elasticsearch")); assertHighlight(search, 0, "title", 1, 2, equalTo("The <em>bug</em> is bugging us")); assertHighlight(search, 0, "titleTV", 0, equalTo("This is a test on the highlighting <em>bug</em> present in elasticsearch")); assertHighlight(search, 0, "titleTV", 1, 2, equalTo("The <em>bug</em> is bugging us")); search = client().prepareSearch() .setQuery(matchQuery("titleTV", "highlight")) .addHighlightedField("titleTV", -1, 2) .get(); assertHighlight(search, 0, "titleTV", 0, equalTo("some text to <em>highlight</em>")); assertHighlight(search, 0, "titleTV", 1, 2, equalTo("<em>highlight</em> other text")); } @Test public void testGlobalHighlightingSettingsOverriddenAtFieldLevel() { createIndex("test"); ensureGreen(); client().prepareIndex("test", "type1") .setSource("field1", new String[]{"this is a test", "this is the second test"}, "field2", new String[]{"this is another test", "yet another test"}).get(); refresh(); logger.info("--> highlighting and searching on field1 and field2 produces different tags"); SearchSourceBuilder source = searchSource() .query(termQuery("field1", "test")) .highlight(highlight().order("score").preTags("<global>").postTags("</global>").fragmentSize(1).numOfFragments(1) .field(new HighlightBuilder.Field("field1").numOfFragments(2)) .field(new HighlightBuilder.Field("field2").preTags("<field2>").postTags("</field2>").fragmentSize(50).requireFieldMatch(false))); SearchResponse searchResponse = client().prepareSearch("test").setSource(source.buildAsBytes()).get(); assertHighlight(searchResponse, 0, "field1", 0, 2, equalTo(" <global>test</global>")); assertHighlight(searchResponse, 0, "field1", 1, 2, equalTo(" <global>test</global>")); assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("this is another <field2>test</field2>")); } @Test //https://github.com/elasticsearch/elasticsearch/issues/5175 public void testHighlightingOnWildcardFields() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", "field-postings", "type=string,index_options=offsets", "field-fvh", "type=string,term_vector=with_positions_offsets", "field-plain", "type=string")); ensureGreen(); client().prepareIndex("test", "type1") .setSource("field-postings", "This is the first test sentence. Here is the second one.", "field-fvh", "This is the test with term_vectors", "field-plain", "This is the test for the plain highlighter").get(); refresh(); logger.info("--> highlighting and searching on field*"); SearchSourceBuilder source = searchSource() //postings hl doesn't support require_field_match, its field needs to be queried directly .query(termQuery("field-postings", "test")) .highlight(highlight().field("field*").preTags("<xxx>").postTags("</xxx>").requireFieldMatch(false)); SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet(); assertHighlight(searchResponse, 0, "field-postings", 0, 1, equalTo("This is the first <xxx>test</xxx> sentence.")); assertHighlight(searchResponse, 0, "field-fvh", 0, 1, equalTo("This is the <xxx>test</xxx> with term_vectors")); assertHighlight(searchResponse, 0, "field-plain", 0, 1, equalTo("This is the <xxx>test</xxx> for the plain highlighter")); } @Test public void testForceSourceWithSourceDisabled() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1") .startObject("_source").field("enabled", false).endObject() .startObject("properties") .startObject("field1").field("type", "string").field("store", "yes").field("index_options", "offsets") .field("term_vector", "with_positions_offsets").endObject() .endObject().endObject().endObject())); ensureGreen(); client().prepareIndex("test", "type1") .setSource("field1", "The quick brown fox jumps over the lazy dog", "field2", "second field content").get(); refresh(); //works using stored field SearchResponse searchResponse = client().prepareSearch("test") .setQuery(termQuery("field1", "quick")) .addHighlightedField(new Field("field1").preTags("<xxx>").postTags("</xxx>")) .get(); assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("The <xxx>quick</xxx> brown fox jumps over the lazy dog")); assertFailures(client().prepareSearch("test") .setQuery(termQuery("field1", "quick")) .addHighlightedField(new Field("field1").preTags("<xxx>").postTags("</xxx>").highlighterType("plain").forceSource(true)), RestStatus.BAD_REQUEST, containsString("source is forced for fields [field1] but type [type1] has disabled _source")); assertFailures(client().prepareSearch("test") .setQuery(termQuery("field1", "quick")) .addHighlightedField(new Field("field1").preTags("<xxx>").postTags("</xxx>").highlighterType("fvh").forceSource(true)), RestStatus.BAD_REQUEST, containsString("source is forced for fields [field1] but type [type1] has disabled _source")); assertFailures(client().prepareSearch("test") .setQuery(termQuery("field1", "quick")) .addHighlightedField(new Field("field1").preTags("<xxx>").postTags("</xxx>").highlighterType("postings").forceSource(true)), RestStatus.BAD_REQUEST, containsString("source is forced for fields [field1] but type [type1] has disabled _source")); SearchSourceBuilder searchSource = SearchSourceBuilder.searchSource().query(termQuery("field1", "quick")) .highlight(highlight().forceSource(true).field("field1")); assertFailures(client().prepareSearch("test").setSource(searchSource.buildAsBytes()), RestStatus.BAD_REQUEST, containsString("source is forced for fields [field1] but type [type1] has disabled _source")); searchSource = SearchSourceBuilder.searchSource().query(termQuery("field1", "quick")) .highlight(highlight().forceSource(true).field("field*")); assertFailures(client().prepareSearch("test").setSource(searchSource.buildAsBytes()), RestStatus.BAD_REQUEST, matches("source is forced for fields \\[field\\d, field\\d\\] but type \\[type1\\] has disabled _source")); } @Test public void testPlainHighlighter() throws Exception { createIndex("test"); ensureGreen(); client().prepareIndex("test", "type1") .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog").get(); refresh(); logger.info("--> highlighting and searching on field1"); SearchSourceBuilder source = searchSource() .query(termQuery("field1", "test")) .highlight(highlight().field("field1").order("score").preTags("<xxx>").postTags("</xxx>")); SearchResponse searchResponse = client().prepareSearch("test").setSource(source.buildAsBytes()).get(); assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("this is a <xxx>test</xxx>")); logger.info("--> searching on _all, highlighting on field1"); source = searchSource() .query(termQuery("_all", "test")) .highlight(highlight().field("field1").order("score").preTags("<xxx>").postTags("</xxx>").requireFieldMatch(false)); searchResponse = client().prepareSearch("test").setSource(source.buildAsBytes()).get(); assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("this is a <xxx>test</xxx>")); logger.info("--> searching on _all, highlighting on field2"); source = searchSource() .query(termQuery("_all", "quick")) .highlight(highlight().field("field2").order("score").preTags("<xxx>").postTags("</xxx>").requireFieldMatch(false)); searchResponse = client().prepareSearch("test").setSource(source.buildAsBytes()).get(); assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The <xxx>quick</xxx> brown fox jumps over the lazy dog")); logger.info("--> searching on _all, highlighting on field2"); source = searchSource() .query(prefixQuery("_all", "qui")) .highlight(highlight().field("field2").order("score").preTags("<xxx>").postTags("</xxx>").requireFieldMatch(false)); searchResponse = client().prepareSearch("test").setSource(source.buildAsBytes()).get(); assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The <xxx>quick</xxx> brown fox jumps over the lazy dog")); logger.info("--> searching on _all with constant score, highlighting on field2"); source = searchSource() .query(constantScoreQuery(prefixQuery("_all", "qui"))) .highlight(highlight().field("field2").order("score").preTags("<xxx>").postTags("</xxx>").requireFieldMatch(false)); searchResponse = client().prepareSearch("test").setSource(source.buildAsBytes()).get(); assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The <xxx>quick</xxx> brown fox jumps over the lazy dog")); logger.info("--> searching on _all with constant score, highlighting on field2"); source = searchSource() .query(boolQuery().should(constantScoreQuery(prefixQuery("_all", "qui")))) .highlight(highlight().field("field2").order("score").preTags("<xxx>").postTags("</xxx>").requireFieldMatch(false)); searchResponse = client().prepareSearch("test").setSource(source.buildAsBytes()).get(); assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The <xxx>quick</xxx> brown fox jumps over the lazy dog")); } @Test public void testFastVectorHighlighter() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); ensureGreen(); client().prepareIndex("test", "type1") .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog").get(); refresh(); logger.info("--> highlighting and searching on field1"); SearchSourceBuilder source = searchSource() .query(termQuery("field1", "test")) .highlight(highlight().field("field1", 100, 0).order("score").preTags("<xxx>").postTags("</xxx>")); SearchResponse searchResponse = client().prepareSearch("test").setSource(source.buildAsBytes()).get(); assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("this is a <xxx>test</xxx>")); logger.info("--> searching on _all, highlighting on field1"); source = searchSource() .query(termQuery("_all", "test")) .highlight(highlight().field("field1", 100, 0).order("score").preTags("<xxx>").postTags("</xxx>").requireFieldMatch(false)); searchResponse = client().prepareSearch("test").setSource(source.buildAsBytes()).get(); // LUCENE 3.1 UPGRADE: Caused adding the space at the end... assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("this is a <xxx>test</xxx>")); logger.info("--> searching on _all, highlighting on field2"); source = searchSource() .query(termQuery("_all", "quick")) .highlight(highlight().field("field2", 100, 0).order("score").preTags("<xxx>").postTags("</xxx>").requireFieldMatch(false)); searchResponse = client().prepareSearch("test").setSource(source.buildAsBytes()).get(); // LUCENE 3.1 UPGRADE: Caused adding the space at the end... assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The <xxx>quick</xxx> brown fox jumps over the lazy dog")); logger.info("--> searching on _all, highlighting on field2"); source = searchSource() .query(prefixQuery("_all", "qui")) .highlight(highlight().field("field2", 100, 0).order("score").preTags("<xxx>").postTags("</xxx>").requireFieldMatch(false)); searchResponse = client().prepareSearch("test").setSource(source.buildAsBytes()).get(); // LUCENE 3.1 UPGRADE: Caused adding the space at the end... assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The <xxx>quick</xxx> brown fox jumps over the lazy dog")); } /** * The FHV can spend a long time highlighting degenerate documents if phraseLimit is not set. */ @Test(timeout=120000) public void testFVHManyMatches() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); ensureGreen(); // Index one megabyte of "t " over and over and over again client().prepareIndex("test", "type1") .setSource("field1", Joiner.on("").join(Iterables.limit(Iterables.cycle("t "), 1024*256))).get(); refresh(); logger.info("--> highlighting and searching on field1"); SearchSourceBuilder source = searchSource() .query(termQuery("field1", "t")) .highlight(highlight().highlighterType("fvh").field("field1", 20, 1).order("score").preTags("<xxx>").postTags("</xxx>")); SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet(); assertHighlight(searchResponse, 0, "field1", 0, 1, containsString("<xxx>t</xxx>")); logger.info("--> done"); } @Test public void testMatchedFieldsFvhRequireFieldMatch() throws Exception { checkMatchedFieldsCase(true); } @Test public void testMatchedFieldsFvhNoRequireFieldMatch() throws Exception { checkMatchedFieldsCase(false); } private void checkMatchedFieldsCase(boolean requireFieldMatch) throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("foo") .field("type", "multi_field") .startObject("fields") .startObject("foo") .field("type", "string") .field("termVector", "with_positions_offsets") .field("store", "yes") .field("analyzer", "english") .endObject() .startObject("plain") .field("type", "string") .field("termVector", "with_positions_offsets") .field("analyzer", "standard") .endObject() .endObject() .endObject() .startObject("bar") .field("type", "multi_field") .startObject("fields") .startObject("bar") .field("type", "string") .field("termVector", "with_positions_offsets") .field("store", "yes") .field("analyzer", "english") .endObject() .startObject("plain") .field("type", "string") .field("termVector", "with_positions_offsets") .field("analyzer", "standard") .endObject() .endObject() .endObject() .endObject())); ensureGreen(); index("test", "type1", "1", "foo", "running with scissors"); index("test", "type1", "2", "foo", "cat cat junk junk junk junk junk junk junk cats junk junk", "bar", "cat cat junk junk junk junk junk junk junk cats junk junk"); index("test", "type1", "3", "foo", "weird", "bar", "result"); refresh(); Field fooField = new Field("foo").numOfFragments(1).order("score").fragmentSize(25) .highlighterType("fvh").requireFieldMatch(requireFieldMatch); Field barField = new Field("bar").numOfFragments(1).order("score").fragmentSize(25) .highlighterType("fvh").requireFieldMatch(requireFieldMatch); SearchRequestBuilder req = client().prepareSearch("test").addHighlightedField(fooField); // First check highlighting without any matched fields set SearchResponse resp = req.setQuery(queryStringQuery("running scissors").field("foo")).get(); assertHighlight(resp, 0, "foo", 0, equalTo("<em>running</em> with <em>scissors</em>")); // And that matching a subfield doesn't automatically highlight it resp = req.setQuery(queryStringQuery("foo.plain:running scissors").field("foo")).get(); assertHighlight(resp, 0, "foo", 0, equalTo("running with <em>scissors</em>")); // Add the subfield to the list of matched fields but don't match it. Everything should still work // like before we added it. fooField.matchedFields("foo", "foo.plain"); resp = req.setQuery(queryStringQuery("running scissors").field("foo")).get(); assertHighlight(resp, 0, "foo", 0, equalTo("<em>running</em> with <em>scissors</em>")); // Now make half the matches come from the stored field and half from just a matched field. resp = req.setQuery(queryStringQuery("foo.plain:running scissors").field("foo")).get(); assertHighlight(resp, 0, "foo", 0, equalTo("<em>running</em> with <em>scissors</em>")); // Now remove the stored field from the matched field list. That should work too. fooField.matchedFields("foo.plain"); resp = req.setQuery(queryStringQuery("foo.plain:running scissors").field("foo")).get(); assertHighlight(resp, 0, "foo", 0, equalTo("<em>running</em> with scissors")); // Now make sure boosted fields don't blow up when matched fields is both the subfield and stored field. fooField.matchedFields("foo", "foo.plain"); resp = req.setQuery(queryStringQuery("foo.plain:running^5 scissors").field("foo")).get(); assertHighlight(resp, 0, "foo", 0, equalTo("<em>running</em> with <em>scissors</em>")); // Now just all matches are against the matched field. This still returns highlighting. resp = req.setQuery(queryStringQuery("foo.plain:running foo.plain:scissors").field("foo")).get(); assertHighlight(resp, 0, "foo", 0, equalTo("<em>running</em> with <em>scissors</em>")); // And all matched field via the queryString's field parameter, just in case resp = req.setQuery(queryStringQuery("running scissors").field("foo.plain")).get(); assertHighlight(resp, 0, "foo", 0, equalTo("<em>running</em> with <em>scissors</em>")); // Finding the same string two ways is ok too resp = req.setQuery(queryStringQuery("run foo.plain:running^5 scissors").field("foo")).get(); assertHighlight(resp, 0, "foo", 0, equalTo("<em>running</em> with <em>scissors</em>")); // But we use the best found score when sorting fragments resp = req.setQuery(queryStringQuery("cats foo.plain:cats^5").field("foo")).get(); assertHighlight(resp, 0, "foo", 0, equalTo("junk junk <em>cats</em> junk junk")); // which can also be written by searching on the subfield resp = req.setQuery(queryStringQuery("cats").field("foo").field("foo.plain^5")).get(); assertHighlight(resp, 0, "foo", 0, equalTo("junk junk <em>cats</em> junk junk")); // Speaking of two fields, you can have two fields, only one of which has matchedFields enabled QueryBuilder twoFieldsQuery = queryStringQuery("cats").field("foo").field("foo.plain^5") .field("bar").field("bar.plain^5"); resp = req.setQuery(twoFieldsQuery).addHighlightedField(barField).get(); assertHighlight(resp, 0, "foo", 0, equalTo("junk junk <em>cats</em> junk junk")); assertHighlight(resp, 0, "bar", 0, equalTo("<em>cat</em> <em>cat</em> junk junk junk junk")); // And you can enable matchedField highlighting on both barField.matchedFields("bar", "bar.plain"); resp = req.get(); assertHighlight(resp, 0, "foo", 0, equalTo("junk junk <em>cats</em> junk junk")); assertHighlight(resp, 0, "bar", 0, equalTo("junk junk <em>cats</em> junk junk")); // Setting a matchedField that isn't searched/doesn't exist is simply ignored. barField.matchedFields("bar", "candy"); resp = req.get(); assertHighlight(resp, 0, "foo", 0, equalTo("junk junk <em>cats</em> junk junk")); assertHighlight(resp, 0, "bar", 0, equalTo("<em>cat</em> <em>cat</em> junk junk junk junk")); // If the stored field doesn't have a value it doesn't matter what you match, you get nothing. barField.matchedFields("bar", "foo.plain"); resp = req.setQuery(queryStringQuery("running scissors").field("foo.plain").field("bar")).get(); assertHighlight(resp, 0, "foo", 0, equalTo("<em>running</em> with <em>scissors</em>")); assertThat(resp.getHits().getAt(0).getHighlightFields(), not(hasKey("bar"))); // If the stored field is found but the matched field isn't then you don't get a result either. fooField.matchedFields("bar.plain"); resp = req.setQuery(queryStringQuery("running scissors").field("foo").field("foo.plain").field("bar").field("bar.plain")).get(); assertThat(resp.getHits().getAt(0).getHighlightFields(), not(hasKey("foo"))); // But if you add the stored field to the list of matched fields then you'll get a result again fooField.matchedFields("foo", "bar.plain"); resp = req.setQuery(queryStringQuery("running scissors").field("foo").field("foo.plain").field("bar").field("bar.plain")).get(); assertHighlight(resp, 0, "foo", 0, equalTo("<em>running</em> with <em>scissors</em>")); assertThat(resp.getHits().getAt(0).getHighlightFields(), not(hasKey("bar"))); // You _can_ highlight fields that aren't subfields of one another. resp = req.setQuery(queryStringQuery("weird").field("foo").field("foo.plain").field("bar").field("bar.plain")).get(); assertHighlight(resp, 0, "foo", 0, equalTo("<em>weird</em>")); assertHighlight(resp, 0, "bar", 0, equalTo("<em>resul</em>t")); assertFailures(req.setQuery(queryStringQuery("result").field("foo").field("foo.plain").field("bar").field("bar.plain")), RestStatus.INTERNAL_SERVER_ERROR, containsString("IndexOutOfBoundsException")); } @Test public void testFastVectorHighlighterManyDocs() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); ensureGreen(); int COUNT = between(20, 100); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[COUNT]; for (int i = 0; i < COUNT; i++) { indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field1", "test " + i); } logger.info("--> indexing docs"); indexRandom(true, indexRequestBuilders); logger.info("--> searching explicitly on field1 and highlighting on it"); SearchResponse searchResponse = client().prepareSearch() .setSize(COUNT) .setQuery(termQuery("field1", "test")) .addHighlightedField("field1", 100, 0) .get(); for (int i = 0; i < COUNT; i++) { SearchHit hit = searchResponse.getHits().getHits()[i]; // LUCENE 3.1 UPGRADE: Caused adding the space at the end... assertHighlight(searchResponse, i, "field1", 0, 1, equalTo("<em>test</em> " + hit.id())); } logger.info("--> searching explicitly _all and highlighting on _all"); searchResponse = client().prepareSearch() .setSize(COUNT) .setQuery(termQuery("_all", "test")) .addHighlightedField("_all", 100, 0) .get(); for (int i = 0; i < COUNT; i++) { SearchHit hit = searchResponse.getHits().getHits()[i]; assertHighlight(searchResponse, i, "_all", 0, 1, equalTo("<em>test</em> " + hit.id() + " ")); } } public XContentBuilder type1TermVectorMapping() throws IOException { return XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("_all").field("store", "yes").field("termVector", "with_positions_offsets").endObject() .startObject("properties") .startObject("field1").field("type", "string").field("termVector", "with_positions_offsets").endObject() .startObject("field2").field("type", "string").field("termVector", "with_positions_offsets").endObject() .endObject() .endObject().endObject(); } @Test public void testSameContent() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", "title", "type=string,store=yes,term_vector=with_positions_offsets")); ensureYellow(); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < 5; i++) { indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) .setSource("title", "This is a test on the highlighting bug present in elasticsearch"); } indexRandom(true, indexRequestBuilders); SearchResponse search = client().prepareSearch() .setQuery(matchQuery("title", "bug")) .addHighlightedField("title", -1, 0) .get(); for (int i = 0; i < 5; i++) { assertHighlight(search, i, "title", 0, 1, equalTo("This is a test on the highlighting <em>bug</em> present in elasticsearch")); } } @Test public void testFastVectorHighlighterOffsetParameter() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", "title", "type=string,store=yes,term_vector=with_positions_offsets").get()); ensureYellow(); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < 5; i++) { indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) .setSource("title", "This is a test on the highlighting bug present in elasticsearch"); } indexRandom(true, indexRequestBuilders); SearchResponse search = client().prepareSearch() .setQuery(matchQuery("title", "bug")) .addHighlightedField("title", 30, 1, 10) .get(); for (int i = 0; i < 5; i++) { // LUCENE 3.1 UPGRADE: Caused adding the space at the end... assertHighlight(search, i, "title", 0, 1, equalTo("highlighting <em>bug</em> present in elasticsearch")); } } @Test public void testEscapeHtml() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", "title", "type=string,store=yes")); ensureYellow(); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < indexRequestBuilders.length; i++) { indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) .setSource("title", "This is a html escaping highlighting test for *&? elasticsearch"); } indexRandom(true, indexRequestBuilders); SearchResponse search = client().prepareSearch() .setQuery(matchQuery("title", "test")) .setHighlighterEncoder("html") .addHighlightedField("title", 50, 1, 10) .get(); for (int i = 0; i < indexRequestBuilders.length; i++) { assertHighlight(search, i, "title", 0, 1, equalTo("This is a html escaping highlighting <em>test</em> for *&amp;? elasticsearch")); } } @Test public void testEscapeHtml_vector() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", "title", "type=string,store=yes,term_vector=with_positions_offsets")); ensureYellow(); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < 5; i++) { indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) .setSource("title", "This is a html escaping highlighting test for *&? elasticsearch"); } indexRandom(true, indexRequestBuilders); SearchResponse search = client().prepareSearch() .setQuery(matchQuery("title", "test")) .setHighlighterEncoder("html") .addHighlightedField("title", 30, 1, 10) .get(); for (int i = 0; i < 5; i++) { assertHighlight(search, i, "title", 0, 1, equalTo("highlighting <em>test</em> for *&amp;? elasticsearch")); } } @Test public void testMultiMapperVectorWithStore() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("title").field("type", "multi_field").startObject("fields") .startObject("title").field("type", "string").field("store", "yes").field("term_vector", "with_positions_offsets").field("analyzer", "classic").endObject() .startObject("key").field("type", "string").field("store", "yes").field("term_vector", "with_positions_offsets").field("analyzer", "whitespace").endObject() .endObject().endObject() .endObject().endObject().endObject())); ensureGreen(); client().prepareIndex("test", "type1", "1").setSource("title", "this is a test").get(); refresh(); // simple search on body with standard analyzer with a simple field query SearchResponse search = client().prepareSearch() .setQuery(matchQuery("title", "this is a test")) .setHighlighterEncoder("html") .addHighlightedField("title", 50, 1) .get(); assertHighlight(search, 0, "title", 0, 1, equalTo("this is a <em>test</em>")); // search on title.key and highlight on title search = client().prepareSearch() .setQuery(matchQuery("title.key", "this is a test")) .setHighlighterEncoder("html") .addHighlightedField("title.key", 50, 1) .get(); assertHighlight(search, 0, "title.key", 0, 1, equalTo("<em>this</em> <em>is</em> <em>a</em> <em>test</em>")); } @Test public void testMultiMapperVectorFromSource() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("title").field("type", "multi_field").startObject("fields") .startObject("title").field("type", "string").field("store", "no").field("term_vector", "with_positions_offsets").field("analyzer", "classic").endObject() .startObject("key").field("type", "string").field("store", "no").field("term_vector", "with_positions_offsets").field("analyzer", "whitespace").endObject() .endObject().endObject() .endObject().endObject().endObject())); ensureGreen(); client().prepareIndex("test", "type1", "1").setSource("title", "this is a test").get(); refresh(); // simple search on body with standard analyzer with a simple field query SearchResponse search = client().prepareSearch() .setQuery(matchQuery("title", "this is a test")) .setHighlighterEncoder("html") .addHighlightedField("title", 50, 1) .get(); assertHighlight(search, 0, "title", 0, 1, equalTo("this is a <em>test</em>")); // search on title.key and highlight on title.key search = client().prepareSearch() .setQuery(matchQuery("title.key", "this is a test")) .setHighlighterEncoder("html") .addHighlightedField("title.key", 50, 1) .get(); assertHighlight(search, 0, "title.key", 0, 1, equalTo("<em>this</em> <em>is</em> <em>a</em> <em>test</em>")); } @Test public void testMultiMapperNoVectorWithStore() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("title").field("type", "multi_field").startObject("fields") .startObject("title").field("type", "string").field("store", "yes").field("term_vector", "no").field("analyzer", "classic").endObject() .startObject("key").field("type", "string").field("store", "yes").field("term_vector", "no").field("analyzer", "whitespace").endObject() .endObject().endObject() .endObject().endObject().endObject())); ensureGreen(); client().prepareIndex("test", "type1", "1").setSource("title", "this is a test").get(); refresh(); // simple search on body with standard analyzer with a simple field query SearchResponse search = client().prepareSearch() .setQuery(matchQuery("title", "this is a test")) .setHighlighterEncoder("html") .addHighlightedField("title", 50, 1) .get(); assertHighlight(search, 0, "title", 0, 1, equalTo("this is a <em>test</em>")); // search on title.key and highlight on title search = client().prepareSearch() .setQuery(matchQuery("title.key", "this is a test")) .setHighlighterEncoder("html") .addHighlightedField("title.key", 50, 1) .get(); assertHighlight(search, 0, "title.key", 0, 1, equalTo("<em>this</em> <em>is</em> <em>a</em> <em>test</em>")); } @Test public void testMultiMapperNoVectorFromSource() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("title").field("type", "multi_field").startObject("fields") .startObject("title").field("type", "string").field("store", "no").field("term_vector", "no").field("analyzer", "classic").endObject() .startObject("key").field("type", "string").field("store", "no").field("term_vector", "no").field("analyzer", "whitespace").endObject() .endObject().endObject() .endObject().endObject().endObject())); ensureGreen(); client().prepareIndex("test", "type1", "1").setSource("title", "this is a test").get(); refresh(); // simple search on body with standard analyzer with a simple field query SearchResponse search = client().prepareSearch() .setQuery(matchQuery("title", "this is a test")) .setHighlighterEncoder("html") .addHighlightedField("title", 50, 1) .get(); assertHighlight(search, 0, "title", 0, 1, equalTo("this is a <em>test</em>")); // search on title.key and highlight on title.key search = client().prepareSearch() .setQuery(matchQuery("title.key", "this is a test")) .setHighlighterEncoder("html") .addHighlightedField("title.key", 50, 1) .get(); assertHighlight(search, 0, "title.key", 0, 1, equalTo("<em>this</em> <em>is</em> <em>a</em> <em>test</em>")); } @Test public void testFastVectorHighlighterShouldFailIfNoTermVectors() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", "title", "type=string,store=yes,term_vector=no")); ensureGreen(); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < 5; i++) { indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) .setSource("title", "This is a test for the enabling fast vector highlighter"); } indexRandom(true, indexRequestBuilders); SearchResponse search = client().prepareSearch() .setQuery(matchPhraseQuery("title", "this is a test")) .addHighlightedField("title", 50, 1, 10) .get(); assertNoFailures(search); assertFailures(client().prepareSearch() .setQuery(matchPhraseQuery("title", "this is a test")) .addHighlightedField("title", 50, 1, 10) .setHighlighterType("fast-vector-highlighter"), RestStatus.BAD_REQUEST, containsString("the field [title] should be indexed with term vector with position offsets to be used with fast vector highlighter")); //should not fail if there is a wildcard assertNoFailures(client().prepareSearch() .setQuery(matchPhraseQuery("title", "this is a test")) .addHighlightedField("tit*", 50, 1, 10) .setHighlighterType("fast-vector-highlighter").get()); } @Test public void testDisableFastVectorHighlighter() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", "title", "type=string,store=yes,term_vector=with_positions_offsets,analyzer=classic")); ensureGreen(); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < indexRequestBuilders.length; i++) { indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) .setSource("title", "This is a test for the workaround for the fast vector highlighting SOLR-3724"); } indexRandom(true, indexRequestBuilders); SearchResponse search = client().prepareSearch() .setQuery(matchPhraseQuery("title", "test for the workaround")) .addHighlightedField("title", 50, 1, 10) .get(); for (int i = 0; i < indexRequestBuilders.length; i++) { // Because of SOLR-3724 nothing is highlighted when FVH is used assertNotHighlighted(search, i, "title"); } // Using plain highlighter instead of FVH search = client().prepareSearch() .setQuery(matchPhraseQuery("title", "test for the workaround")) .addHighlightedField("title", 50, 1, 10) .setHighlighterType("highlighter") .get(); for (int i = 0; i < indexRequestBuilders.length; i++) { assertHighlight(search, i, "title", 0, 1, equalTo("This is a <em>test</em> for the <em>workaround</em> for the fast vector highlighting SOLR-3724")); } // Using plain highlighter instead of FVH on the field level search = client().prepareSearch() .setQuery(matchPhraseQuery("title", "test for the workaround")) .addHighlightedField(new HighlightBuilder.Field("title").highlighterType("highlighter")) .setHighlighterType("highlighter") .get(); for (int i = 0; i < indexRequestBuilders.length; i++) { assertHighlight(search, i, "title", 0, 1, equalTo("This is a <em>test</em> for the <em>workaround</em> for the fast vector highlighting SOLR-3724")); } } @Test public void testFSHHighlightAllMvFragments() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", "tags", "type=string,term_vector=with_positions_offsets")); ensureGreen(); client().prepareIndex("test", "type1", "1") .setSource("tags", new String[]{ "this is a really long tag i would like to highlight", "here is another one that is very long and has the tag token near the end"}).get(); refresh(); SearchResponse response = client().prepareSearch("test") .setQuery(QueryBuilders.matchQuery("tags", "tag")) .addHighlightedField("tags", -1, 0).get(); assertHighlight(response, 0, "tags", 0, equalTo("this is a really long <em>tag</em> i would like to highlight")); assertHighlight(response, 0, "tags", 1, 2, equalTo("here is another one that is very long and has the <em>tag</em> token near the end")); } @Test public void testBoostingQuery() { createIndex("test"); ensureGreen(); client().prepareIndex("test", "type1") .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog").get(); refresh(); logger.info("--> highlighting and searching on field1"); SearchSourceBuilder source = searchSource() .query(boostingQuery().positive(termQuery("field2", "brown")).negative(termQuery("field2", "foobar")).negativeBoost(0.5f)) .highlight(highlight().field("field2").order("score").preTags("<x>").postTags("</x>")); SearchResponse searchResponse = client().prepareSearch("test").setSource(source.buildAsBytes()).get(); assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick <x>brown</x> fox jumps over the lazy dog")); } @Test @AwaitsFix(bugUrl="Broken now that BoostingQuery does not extend BooleanQuery anymore") public void testBoostingQueryTermVector() throws IOException { assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); ensureGreen(); client().prepareIndex("test", "type1").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog") .get(); refresh(); logger.info("--> highlighting and searching on field1"); SearchSourceBuilder source = searchSource() .query(boostingQuery().positive(termQuery("field2", "brown")).negative(termQuery("field2", "foobar")).negativeBoost(0.5f)) .highlight(highlight().field("field2").order("score").preTags("<x>").postTags("</x>")); SearchResponse searchResponse = client().prepareSearch("test").setSource(source.buildAsBytes()).get(); assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick <x>brown</x> fox jumps over the lazy dog")); } @Test public void testCommonTermsQuery() { createIndex("test"); ensureGreen(); client().prepareIndex("test", "type1") .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog") .get(); refresh(); logger.info("--> highlighting and searching on field1"); SearchSourceBuilder source = searchSource() .query(commonTermsQuery("field2", "quick brown").cutoffFrequency(100)) .highlight(highlight().field("field2").order("score").preTags("<x>").postTags("</x>")); SearchResponse searchResponse = client().prepareSearch("test").setSource(source.buildAsBytes()).get(); assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The <x>quick</x> <x>brown</x> fox jumps over the lazy dog")); } @Test public void testCommonTermsTermVector() throws IOException { assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); ensureGreen(); client().prepareIndex("test", "type1").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog").get(); refresh(); logger.info("--> highlighting and searching on field1"); SearchSourceBuilder source = searchSource().query(commonTermsQuery("field2", "quick brown").cutoffFrequency(100)) .highlight(highlight().field("field2").order("score").preTags("<x>").postTags("</x>")); SearchResponse searchResponse = client().prepareSearch("test").setSource(source.buildAsBytes()).get(); assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The <x>quick</x> <x>brown</x> fox jumps over the lazy dog")); } @Test public void testPhrasePrefix() throws IOException { Builder builder = settingsBuilder() .put(indexSettings()) .put("index.analysis.analyzer.synonym.tokenizer", "whitespace") .putArray("index.analysis.analyzer.synonym.filter", "synonym", "lowercase") .put("index.analysis.filter.synonym.type", "synonym") .putArray("index.analysis.filter.synonym.synonyms", "quick => fast"); assertAcked(prepareCreate("test").setSettings(builder.build()).addMapping("type1", type1TermVectorMapping()) .addMapping("type2", "_all", "store=yes,termVector=with_positions_offsets", "field4", "type=string,term_vector=with_positions_offsets,analyzer=synonym", "field3", "type=string,analyzer=synonym")); ensureGreen(); client().prepareIndex("test", "type1", "0") .setSource("field0", "The quick brown fox jumps over the lazy dog", "field1", "The quick brown fox jumps over the lazy dog").get(); client().prepareIndex("test", "type1", "1") .setSource("field1", "The quick browse button is a fancy thing, right bro?").get(); refresh(); logger.info("--> highlighting and searching on field0"); SearchSourceBuilder source = searchSource() .query(matchPhrasePrefixQuery("field0", "quick bro")) .highlight(highlight().field("field0").order("score").preTags("<x>").postTags("</x>")); SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet(); assertHighlight(searchResponse, 0, "field0", 0, 1, equalTo("The <x>quick</x> <x>brown</x> fox jumps over the lazy dog")); logger.info("--> highlighting and searching on field1"); source = searchSource() .query(matchPhrasePrefixQuery("field1", "quick bro")) .highlight(highlight().field("field1").order("score").preTags("<x>").postTags("</x>")); searchResponse = client().search(searchRequest("test").source(source)).actionGet(); assertHighlight(searchResponse, 0, "field1", 0, 1, anyOf(equalTo("The <x>quick browse</x> button is a fancy thing, right bro?"), equalTo("The <x>quick brown</x> fox jumps over the lazy dog"))); assertHighlight(searchResponse, 1, "field1", 0, 1, anyOf(equalTo("The <x>quick browse</x> button is a fancy thing, right bro?"), equalTo("The <x>quick brown</x> fox jumps over the lazy dog"))); // with synonyms client().prepareIndex("test", "type2", "0") .setSource("field4", "The quick brown fox jumps over the lazy dog", "field3", "The quick brown fox jumps over the lazy dog").get(); client().prepareIndex("test", "type2", "1") .setSource("field4", "The quick browse button is a fancy thing, right bro?").get(); client().prepareIndex("test", "type2", "2") .setSource("field4", "a quick fast blue car").get(); refresh(); source = searchSource().postFilter(typeQuery("type2")).query(matchPhrasePrefixQuery("field3", "fast bro")) .highlight(highlight().field("field3").order("score").preTags("<x>").postTags("</x>")); searchResponse = client().search(searchRequest("test").source(source)).actionGet(); assertHighlight(searchResponse, 0, "field3", 0, 1, equalTo("The <x>quick</x> <x>brown</x> fox jumps over the lazy dog")); logger.info("--> highlighting and searching on field4"); source = searchSource().postFilter(typeQuery("type2")).query(matchPhrasePrefixQuery("field4", "the fast bro")) .highlight(highlight().field("field4").order("score").preTags("<x>").postTags("</x>")); searchResponse = client().search(searchRequest("test").source(source)).actionGet(); assertHighlight(searchResponse, 0, "field4", 0, 1, anyOf(equalTo("<x>The quick browse</x> button is a fancy thing, right bro?"), equalTo("<x>The quick brown</x> fox jumps over the lazy dog"))); assertHighlight(searchResponse, 1, "field4", 0, 1, anyOf(equalTo("<x>The quick browse</x> button is a fancy thing, right bro?"), equalTo("<x>The quick brown</x> fox jumps over the lazy dog"))); logger.info("--> highlighting and searching on field4"); source = searchSource().postFilter(typeQuery("type2")).query(matchPhrasePrefixQuery("field4", "a fast quick blue ca")) .highlight(highlight().field("field4").order("score").preTags("<x>").postTags("</x>")); searchResponse = client().search(searchRequest("test").source(source)).actionGet(); assertHighlight(searchResponse, 0, "field4", 0, 1, equalTo("<x>a quick fast blue car</x>")); } @Test public void testPlainHighlightDifferentFragmenter() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", "tags", "type=string")); ensureGreen(); client().prepareIndex("test", "type1", "1") .setSource(jsonBuilder().startObject().field("tags", "this is a really long tag i would like to highlight", "here is another one that is very long tag and has the tag token near the end").endObject()).get(); refresh(); SearchResponse response = client().prepareSearch("test") .setQuery(QueryBuilders.matchQuery("tags", "long tag").type(MatchQueryBuilder.Type.PHRASE)) .addHighlightedField(new HighlightBuilder.Field("tags") .fragmentSize(-1).numOfFragments(2).fragmenter("simple")).get(); assertHighlight(response, 0, "tags", 0, equalTo("this is a really <em>long</em> <em>tag</em> i would like to highlight")); assertHighlight(response, 0, "tags", 1, 2, equalTo("here is another one that is very <em>long</em> <em>tag</em> and has the <em>tag</em> token near the end")); response = client().prepareSearch("test") .setQuery(QueryBuilders.matchQuery("tags", "long tag").type(MatchQueryBuilder.Type.PHRASE)) .addHighlightedField(new HighlightBuilder.Field("tags") .fragmentSize(-1).numOfFragments(2).fragmenter("span")).get(); assertHighlight(response, 0, "tags", 0, equalTo("this is a really <em>long</em> <em>tag</em> i would like to highlight")); assertHighlight(response, 0, "tags", 1, 2, equalTo("here is another one that is very <em>long</em> <em>tag</em> and has the <em>tag</em> token near the end")); assertFailures(client().prepareSearch("test") .setQuery(QueryBuilders.matchQuery("tags", "long tag").type(MatchQueryBuilder.Type.PHRASE)) .addHighlightedField(new HighlightBuilder.Field("tags") .fragmentSize(-1).numOfFragments(2).fragmenter("invalid")), RestStatus.BAD_REQUEST, containsString("unknown fragmenter option [invalid] for the field [tags]")); } @Test public void testPlainHighlighterMultipleFields() { createIndex("test"); ensureGreen(); index("test", "type1", "1", "field1", "The <b>quick<b> brown fox", "field2", "The <b>slow<b> brown fox"); refresh(); SearchResponse response = client().prepareSearch("test") .setQuery(QueryBuilders.matchQuery("field1", "fox")) .addHighlightedField(new HighlightBuilder.Field("field1").preTags("<1>").postTags("</1>").requireFieldMatch(true)) .addHighlightedField(new HighlightBuilder.Field("field2").preTags("<2>").postTags("</2>").requireFieldMatch(false)) .get(); assertHighlight(response, 0, "field1", 0, 1, equalTo("The <b>quick<b> brown <1>fox</1>")); assertHighlight(response, 0, "field2", 0, 1, equalTo("The <b>slow<b> brown <2>fox</2>")); } @Test public void testFastVectorHighlighterMultipleFields() { assertAcked(prepareCreate("test") .addMapping("type1", "field1", "type=string,term_vector=with_positions_offsets", "field2", "type=string,term_vector=with_positions_offsets")); ensureGreen(); index("test", "type1", "1", "field1", "The <b>quick<b> brown fox", "field2", "The <b>slow<b> brown fox"); refresh(); SearchResponse response = client().prepareSearch("test") .setQuery(QueryBuilders.matchQuery("field1", "fox")) .addHighlightedField(new HighlightBuilder.Field("field1").preTags("<1>").postTags("</1>").requireFieldMatch(true)) .addHighlightedField(new HighlightBuilder.Field("field2").preTags("<2>").postTags("</2>").requireFieldMatch(false)) .get(); assertHighlight(response, 0, "field1", 0, 1, equalTo("The <b>quick<b> brown <1>fox</1>")); assertHighlight(response, 0, "field2", 0, 1, equalTo("The <b>slow<b> brown <2>fox</2>")); } @Test public void testMissingStoredField() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", "highlight_field", "type=string,store=yes")); ensureGreen(); client().prepareIndex("test", "type1", "1") .setSource(jsonBuilder().startObject() .field("field", "highlight") .endObject()).get(); refresh(); // This query used to fail when the field to highlight was absent SearchResponse response = client().prepareSearch("test") .setQuery(QueryBuilders.matchQuery("field", "highlight").type(MatchQueryBuilder.Type.BOOLEAN)) .addHighlightedField(new HighlightBuilder.Field("highlight_field") .fragmentSize(-1).numOfFragments(1).fragmenter("simple")).get(); assertThat(response.getHits().hits()[0].highlightFields().isEmpty(), equalTo(true)); } @Test // https://github.com/elasticsearch/elasticsearch/issues/3211 public void testNumericHighlighting() throws Exception { assertAcked(prepareCreate("test") .addMapping("test", "text", "type=string,index=analyzed", "byte", "type=byte", "short", "type=short", "int", "type=integer", "long", "type=long", "float", "type=float", "double", "type=double")); ensureGreen(); client().prepareIndex("test", "test", "1").setSource("text", "elasticsearch test", "byte", 25, "short", 42, "int", 100, "long", -1, "float", 3.2f, "double", 42.42).get(); refresh(); SearchResponse response = client().prepareSearch("test") .setQuery(QueryBuilders.matchQuery("text", "test").type(MatchQueryBuilder.Type.BOOLEAN)) .addHighlightedField("text") .addHighlightedField("byte") .addHighlightedField("short") .addHighlightedField("int") .addHighlightedField("long") .addHighlightedField("float") .addHighlightedField("double") .get(); // Highlighting of numeric fields is not supported, but it should not raise errors // (this behavior is consistent with version 0.20) assertHitCount(response, 1l); } @Test // https://github.com/elasticsearch/elasticsearch/issues/3200 public void testResetTwice() throws Exception { assertAcked(prepareCreate("test") .setSettings(settingsBuilder() .put(indexSettings()) .put("analysis.analyzer.my_analyzer.type", "pattern") .put("analysis.analyzer.my_analyzer.pattern", "\\s+") .build()) .addMapping("type", "text", "type=string,analyzer=my_analyzer")); ensureGreen(); client().prepareIndex("test", "type", "1") .setSource("text", "elasticsearch test").get(); refresh(); SearchResponse response = client().prepareSearch("test") .setQuery(QueryBuilders.matchQuery("text", "test").type(MatchQueryBuilder.Type.BOOLEAN)) .addHighlightedField("text").execute().actionGet(); // PatternAnalyzer will throw an exception if it is resetted twice assertHitCount(response, 1l); } @Test public void testHighlightUsesHighlightQuery() throws IOException { assertAcked(prepareCreate("test") .addMapping("type1", "text", "type=string," + randomStoreField() + "term_vector=with_positions_offsets,index_options=offsets")); ensureGreen(); index("test", "type1", "1", "text", "Testing the highlight query feature"); refresh(); HighlightBuilder.Field field = new HighlightBuilder.Field("text"); SearchRequestBuilder search = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("text", "testing")) .addHighlightedField(field); Matcher<String> searchQueryMatcher = equalTo("<em>Testing</em> the highlight query feature"); field.highlighterType("plain"); SearchResponse response = search.get(); assertHighlight(response, 0, "text", 0, searchQueryMatcher); field.highlighterType("fvh"); response = search.get(); assertHighlight(response, 0, "text", 0, searchQueryMatcher); field.highlighterType("postings"); response = search.get(); assertHighlight(response, 0, "text", 0, searchQueryMatcher); Matcher<String> hlQueryMatcher = equalTo("Testing the highlight <em>query</em> feature"); field.highlightQuery(matchQuery("text", "query")); field.highlighterType("fvh"); response = search.get(); assertHighlight(response, 0, "text", 0, hlQueryMatcher); field.highlighterType("plain"); response = search.get(); assertHighlight(response, 0, "text", 0, hlQueryMatcher); field.highlighterType("postings"); response = search.get(); assertHighlight(response, 0, "text", 0, hlQueryMatcher); // Make sure the the highlightQuery is taken into account when it is set on the highlight context instead of the field search.setHighlighterQuery(matchQuery("text", "query")); field.highlighterType("fvh").highlightQuery(null); response = search.get(); assertHighlight(response, 0, "text", 0, hlQueryMatcher); field.highlighterType("plain"); response = search.get(); assertHighlight(response, 0, "text", 0, hlQueryMatcher); field.highlighterType("postings"); response = search.get(); assertHighlight(response, 0, "text", 0, hlQueryMatcher); } private static String randomStoreField() { if (randomBoolean()) { return "store=yes,"; } return ""; } @Test public void testHighlightNoMatchSize() throws IOException { assertAcked(prepareCreate("test") .addMapping("type1", "text", "type=string," + randomStoreField() + "term_vector=with_positions_offsets,index_options=offsets")); ensureGreen(); String text = "I am pretty long so some of me should get cut off. Second sentence"; index("test", "type1", "1", "text", text); refresh(); // When you don't set noMatchSize you don't get any results if there isn't anything to highlight. HighlightBuilder.Field field = new HighlightBuilder.Field("text") .fragmentSize(21) .numOfFragments(1) .highlighterType("plain"); SearchResponse response = client().prepareSearch("test").addHighlightedField(field).get(); assertNotHighlighted(response, 0, "text"); field.highlighterType("fvh"); response = client().prepareSearch("test").addHighlightedField(field).get(); assertNotHighlighted(response, 0, "text"); field.highlighterType("postings"); response = client().prepareSearch("test").addHighlightedField(field).get(); assertNotHighlighted(response, 0, "text"); // When noMatchSize is set to 0 you also shouldn't get any field.highlighterType("plain").noMatchSize(0); response = client().prepareSearch("test").addHighlightedField(field).get(); assertNotHighlighted(response, 0, "text"); field.highlighterType("fvh"); response = client().prepareSearch("test").addHighlightedField(field).get(); assertNotHighlighted(response, 0, "text"); field.highlighterType("postings"); response = client().prepareSearch("test").addHighlightedField(field).get(); assertNotHighlighted(response, 0, "text"); // When noMatchSize is between 0 and the size of the string field.highlighterType("plain").noMatchSize(21); response = client().prepareSearch("test").addHighlightedField(field).get(); assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so")); // The FVH also works but the fragment is longer than the plain highlighter because of boundary_max_scan field.highlighterType("fvh"); response = client().prepareSearch("test").addHighlightedField(field).get(); assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some")); // Postings hl also works but the fragment is the whole first sentence (size ignored) field.highlighterType("postings"); response = client().prepareSearch("test").addHighlightedField(field).get(); assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some of me should get cut off.")); // We can also ask for a fragment longer than the input string and get the whole string field.highlighterType("plain").noMatchSize(text.length() * 2); response = client().prepareSearch("test").addHighlightedField(field).get(); assertHighlight(response, 0, "text", 0, 1, equalTo(text)); field.highlighterType("fvh"); response = client().prepareSearch("test").addHighlightedField(field).get(); assertHighlight(response, 0, "text", 0, 1, equalTo(text)); //no difference using postings hl as the noMatchSize is ignored (just needs to be greater than 0) field.highlighterType("postings"); response = client().prepareSearch("test").addHighlightedField(field).get(); assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some of me should get cut off.")); // We can also ask for a fragment exactly the size of the input field and get the whole field field.highlighterType("plain").noMatchSize(text.length()); response = client().prepareSearch("test").addHighlightedField(field).get(); assertHighlight(response, 0, "text", 0, 1, equalTo(text)); field.highlighterType("fvh"); response = client().prepareSearch("test").addHighlightedField(field).get(); assertHighlight(response, 0, "text", 0, 1, equalTo(text)); //no difference using postings hl as the noMatchSize is ignored (just needs to be greater than 0) field.highlighterType("postings"); response = client().prepareSearch("test").addHighlightedField(field).get(); assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some of me should get cut off.")); // You can set noMatchSize globally in the highlighter as well field.highlighterType("plain").noMatchSize(null); response = client().prepareSearch("test").setHighlighterNoMatchSize(21).addHighlightedField(field).get(); assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so")); field.highlighterType("fvh"); response = client().prepareSearch("test").setHighlighterNoMatchSize(21).addHighlightedField(field).get(); assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some")); field.highlighterType("postings"); response = client().prepareSearch("test").setHighlighterNoMatchSize(21).addHighlightedField(field).get(); assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some of me should get cut off.")); // We don't break if noMatchSize is less than zero though field.highlighterType("plain").noMatchSize(randomIntBetween(Integer.MIN_VALUE, -1)); response = client().prepareSearch("test").addHighlightedField(field).get(); assertNotHighlighted(response, 0, "text"); field.highlighterType("fvh"); response = client().prepareSearch("test").addHighlightedField(field).get(); assertNotHighlighted(response, 0, "text"); field.highlighterType("postings"); response = client().prepareSearch("test").addHighlightedField(field).get(); assertNotHighlighted(response, 0, "text"); } @Test public void testHighlightNoMatchSizeWithMultivaluedFields() throws IOException { assertAcked(prepareCreate("test") .addMapping("type1", "text", "type=string," + randomStoreField() + "term_vector=with_positions_offsets,index_options=offsets")); ensureGreen(); String text1 = "I am pretty long so some of me should get cut off. We'll see how that goes."; String text2 = "I am short"; index("test", "type1", "1", "text", new String[] {text1, text2}); refresh(); // The no match fragment should come from the first value of a multi-valued field HighlightBuilder.Field field = new HighlightBuilder.Field("text") .fragmentSize(21) .numOfFragments(1) .highlighterType("plain") .noMatchSize(21); SearchResponse response = client().prepareSearch("test").addHighlightedField(field).get(); assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so")); field.highlighterType("fvh"); response = client().prepareSearch("test").addHighlightedField(field).get(); assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some")); // Postings hl also works but the fragment is the whole first sentence (size ignored) field.highlighterType("postings"); response = client().prepareSearch("test").addHighlightedField(field).get(); assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some of me should get cut off.")); // And noMatchSize returns nothing when the first entry is empty string! index("test", "type1", "2", "text", new String[] {"", text2}); refresh(); IdsQueryBuilder idsQueryBuilder = QueryBuilders.idsQuery("type1").addIds("2"); field.highlighterType("plain"); response = client().prepareSearch("test") .setQuery(idsQueryBuilder) .addHighlightedField(field).get(); assertNotHighlighted(response, 0, "text"); field.highlighterType("fvh"); response = client().prepareSearch("test") .setQuery(idsQueryBuilder) .addHighlightedField(field).get(); assertNotHighlighted(response, 0, "text"); field.highlighterType("postings"); response = client().prepareSearch("test") .setQuery(idsQueryBuilder) .addHighlightedField(field).get(); assertNotHighlighted(response, 0, "text"); // But if the field was actually empty then you should get no highlighting field index("test", "type1", "3", "text", new String[] {}); refresh(); idsQueryBuilder = QueryBuilders.idsQuery("type1").addIds("3"); field.highlighterType("plain"); response = client().prepareSearch("test") .setQuery(idsQueryBuilder) .addHighlightedField(field).get(); assertNotHighlighted(response, 0, "text"); field.highlighterType("fvh"); response = client().prepareSearch("test") .setQuery(idsQueryBuilder) .addHighlightedField(field).get(); assertNotHighlighted(response, 0, "text"); field.highlighterType("postings"); response = client().prepareSearch("test") .setQuery(idsQueryBuilder) .addHighlightedField(field).get(); assertNotHighlighted(response, 0, "text"); // Same for if the field doesn't even exist on the document index("test", "type1", "4"); refresh(); idsQueryBuilder = QueryBuilders.idsQuery("type1").addIds("4"); field.highlighterType("plain"); response = client().prepareSearch("test") .setQuery(idsQueryBuilder) .addHighlightedField(field).get(); assertNotHighlighted(response, 0, "text"); field.highlighterType("fvh"); response = client().prepareSearch("test") .setQuery(idsQueryBuilder) .addHighlightedField(field).get(); assertNotHighlighted(response, 0, "text"); field.highlighterType("fvh"); response = client().prepareSearch("test") .setQuery(idsQueryBuilder) .addHighlightedField(field).get(); assertNotHighlighted(response, 0, "postings"); // Again same if the field isn't mapped field = new HighlightBuilder.Field("unmapped") .highlighterType("plain") .noMatchSize(21); response = client().prepareSearch("test").addHighlightedField(field).get(); assertNotHighlighted(response, 0, "text"); field.highlighterType("fvh"); response = client().prepareSearch("test").addHighlightedField(field).get(); assertNotHighlighted(response, 0, "text"); field.highlighterType("postings"); response = client().prepareSearch("test").addHighlightedField(field).get(); assertNotHighlighted(response, 0, "text"); } @Test public void testHighlightNoMatchSizeNumberOfFragments() throws IOException { assertAcked(prepareCreate("test") .addMapping("type1", "text", "type=string," + randomStoreField() + "term_vector=with_positions_offsets,index_options=offsets")); ensureGreen(); String text1 = "This is the first sentence. This is the second sentence." + HighlightUtils.PARAGRAPH_SEPARATOR; String text2 = "This is the third sentence. This is the fourth sentence."; String text3 = "This is the fifth sentence"; index("test", "type1", "1", "text", new String[] {text1, text2, text3}); refresh(); // The no match fragment should come from the first value of a multi-valued field HighlightBuilder.Field field = new HighlightBuilder.Field("text") .fragmentSize(1) .numOfFragments(0) .highlighterType("plain") .noMatchSize(20); SearchResponse response = client().prepareSearch("test").addHighlightedField(field).get(); assertHighlight(response, 0, "text", 0, 1, equalTo("This is the first")); field.highlighterType("fvh"); response = client().prepareSearch("test").addHighlightedField(field).get(); assertHighlight(response, 0, "text", 0, 1, equalTo("This is the first sentence")); // Postings hl also works but the fragment is the whole first sentence (size ignored) field.highlighterType("postings"); response = client().prepareSearch("test").addHighlightedField(field).get(); assertHighlight(response, 0, "text", 0, 1, equalTo("This is the first sentence.")); //if there's a match we only return the values with matches (whole value as number_of_fragments == 0) MatchQueryBuilder queryBuilder = QueryBuilders.matchQuery("text", "third fifth"); field.highlighterType("plain"); response = client().prepareSearch("test").setQuery(queryBuilder).addHighlightedField(field).get(); assertHighlight(response, 0, "text", 0, 2, equalTo("This is the <em>third</em> sentence. This is the fourth sentence.")); assertHighlight(response, 0, "text", 1, 2, equalTo("This is the <em>fifth</em> sentence")); field.highlighterType("fvh"); response = client().prepareSearch("test").setQuery(queryBuilder).addHighlightedField(field).get(); assertHighlight(response, 0, "text", 0, 2, equalTo("This is the <em>third</em> sentence. This is the fourth sentence.")); assertHighlight(response, 0, "text", 1, 2, equalTo("This is the <em>fifth</em> sentence")); field.highlighterType("postings"); response = client().prepareSearch("test").setQuery(queryBuilder).addHighlightedField(field).get(); assertHighlight(response, 0, "text", 0, 2, equalTo("This is the <em>third</em> sentence. This is the fourth sentence.")); assertHighlight(response, 0, "text", 1, 2, equalTo("This is the <em>fifth</em> sentence")); } @Test public void testPostingsHighlighter() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test", "type1") .setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy quick dog").get(); refresh(); logger.info("--> highlighting and searching on field1"); SearchSourceBuilder source = searchSource() .query(termQuery("field1", "test")) .highlight(highlight().field("field1").preTags("<xxx>").postTags("</xxx>")); SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet(); assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("this is a <xxx>test</xxx>")); logger.info("--> searching on field1, highlighting on field1"); source = searchSource() .query(termQuery("field1", "test")) .highlight(highlight().field("field1").preTags("<xxx>").postTags("</xxx>")); searchResponse = client().search(searchRequest("test").source(source)).actionGet(); assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("this is a <xxx>test</xxx>")); logger.info("--> searching on field2, highlighting on field2"); source = searchSource() .query(termQuery("field2", "quick")) .highlight(highlight().field("field2").order("score").preTags("<xxx>").postTags("</xxx>")); searchResponse = client().search(searchRequest("test").source(source)).actionGet(); assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The <xxx>quick</xxx> brown fox jumps over the lazy <xxx>quick</xxx> dog")); logger.info("--> searching on field2, highlighting on field2"); source = searchSource() .query(matchPhraseQuery("field2", "quick brown")) .highlight(highlight().field("field2").preTags("<xxx>").postTags("</xxx>")); searchResponse = client().search(searchRequest("test").source(source)).actionGet(); //phrase query results in highlighting all different terms regardless of their positions assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The <xxx>quick</xxx> <xxx>brown</xxx> fox jumps over the lazy <xxx>quick</xxx> dog")); //lets fall back to the standard highlighter then, what people would do to highlight query matches logger.info("--> searching on field2, highlighting on field2, falling back to the plain highlighter"); source = searchSource() .query(matchPhraseQuery("_all", "quick brown")) .highlight(highlight().field("field2").preTags("<xxx>").postTags("</xxx>").highlighterType("highlighter").requireFieldMatch(false)); searchResponse = client().search(searchRequest("test").source(source)).actionGet(); assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The <xxx>quick</xxx> <xxx>brown</xxx> fox jumps over the lazy <xxx>quick</xxx> dog")); } @Test public void testPostingsHighlighterMultipleFields() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping()).get()); ensureGreen(); index("test", "type1", "1", "field1", "The <b>quick<b> brown fox. Second sentence.", "field2", "The <b>slow<b> brown fox. Second sentence."); refresh(); SearchResponse response = client().prepareSearch("test") .setQuery(QueryBuilders.matchQuery("field1", "fox")) .addHighlightedField(new HighlightBuilder.Field("field1").preTags("<1>").postTags("</1>").requireFieldMatch(true)) .get(); assertHighlight(response, 0, "field1", 0, 1, equalTo("The <b>quick<b> brown <1>fox</1>.")); } @Test public void testPostingsHighlighterNumberOfFragments() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test", "type1", "1") .setSource("field1", "The quick brown fox jumps over the lazy dog. The lazy red fox jumps over the quick dog. The quick brown dog jumps over the lazy fox.", "field2", "The quick brown fox jumps over the lazy dog. The lazy red fox jumps over the quick dog. The quick brown dog jumps over the lazy fox.").get(); refresh(); logger.info("--> highlighting and searching on field1"); SearchSourceBuilder source = searchSource() .query(termQuery("field1", "fox")) .highlight(highlight() .field(new HighlightBuilder.Field("field1").numOfFragments(5).preTags("<field1>").postTags("</field1>"))); SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet(); assertHighlight(searchResponse, 0, "field1", 0, equalTo("The quick brown <field1>fox</field1> jumps over the lazy dog.")); assertHighlight(searchResponse, 0, "field1", 1, equalTo("The lazy red <field1>fox</field1> jumps over the quick dog.")); assertHighlight(searchResponse, 0, "field1", 2, 3, equalTo("The quick brown dog jumps over the lazy <field1>fox</field1>.")); client().prepareIndex("test", "type1", "2") .setSource("field1", new String[]{"The quick brown fox jumps over the lazy dog. Second sentence not finished", "The lazy red fox jumps over the quick dog.", "The quick brown dog jumps over the lazy fox."}).get(); refresh(); source = searchSource() .query(termQuery("field1", "fox")) .highlight(highlight() .field(new HighlightBuilder.Field("field1").numOfFragments(0).preTags("<field1>").postTags("</field1>"))); searchResponse = client().search(searchRequest("test").source(source)).actionGet(); assertHitCount(searchResponse, 2l); for (SearchHit searchHit : searchResponse.getHits()) { if ("1".equals(searchHit.id())) { assertHighlight(searchHit, "field1", 0, 1, equalTo("The quick brown <field1>fox</field1> jumps over the lazy dog. The lazy red <field1>fox</field1> jumps over the quick dog. The quick brown dog jumps over the lazy <field1>fox</field1>.")); } else if ("2".equals(searchHit.id())) { assertHighlight(searchHit, "field1", 0, 3, equalTo("The quick brown <field1>fox</field1> jumps over the lazy dog. Second sentence not finished")); assertHighlight(searchHit, "field1", 1, 3, equalTo("The lazy red <field1>fox</field1> jumps over the quick dog.")); assertHighlight(searchHit, "field1", 2, 3, equalTo("The quick brown dog jumps over the lazy <field1>fox</field1>.")); } else { fail("Only hits with id 1 and 2 are returned"); } } } @Test public void testMultiMatchQueryHighlight() throws IOException { String[] highlighterTypes = new String[] {"fvh", "plain", "postings"}; XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("_all").field("store", "yes").field("index_options", "offsets").endObject() .startObject("properties") .startObject("field1").field("type", "string").field("index_options", "offsets").field("term_vector", "with_positions_offsets").endObject() .startObject("field2").field("type", "string").field("index_options", "offsets").field("term_vector", "with_positions_offsets").endObject() .endObject() .endObject().endObject(); assertAcked(prepareCreate("test").addMapping("type1", mapping)); ensureGreen(); client().prepareIndex("test", "type1") .setSource("field1", "The quick brown fox jumps over", "field2", "The quick brown fox jumps over").get(); refresh(); final int iters = scaledRandomIntBetween(20, 30); for (int i = 0; i < iters; i++) { String highlighterType = rarely() ? null : RandomPicks.randomFrom(getRandom(), highlighterTypes); MultiMatchQueryBuilder.Type[] supportedQueryTypes; if ("postings".equals(highlighterType)) { //phrase_prefix is not supported by postings highlighter, as it rewrites against an empty reader, the prefix will never match any term supportedQueryTypes = new MultiMatchQueryBuilder.Type[]{MultiMatchQueryBuilder.Type.BEST_FIELDS, MultiMatchQueryBuilder.Type.CROSS_FIELDS, MultiMatchQueryBuilder.Type.MOST_FIELDS, MultiMatchQueryBuilder.Type.PHRASE}; } else { supportedQueryTypes = MultiMatchQueryBuilder.Type.values(); } MultiMatchQueryBuilder.Type matchQueryType = rarely() ? null : RandomPicks.randomFrom(getRandom(), supportedQueryTypes); final MultiMatchQueryBuilder multiMatchQueryBuilder = multiMatchQuery("the quick brown fox", "field1", "field2").type(matchQueryType); SearchSourceBuilder source = searchSource() .query(multiMatchQueryBuilder) .highlight(highlight().highlightQuery(randomBoolean() ? multiMatchQueryBuilder : null).highlighterType(highlighterType) .field(new Field("field1").requireFieldMatch(true).preTags("<field1>").postTags("</field1>"))); logger.info("Running multi-match type: [" + matchQueryType + "] highlight with type: [" + highlighterType + "]"); SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet(); assertHitCount(searchResponse, 1l); assertHighlight(searchResponse, 0, "field1", 0, anyOf(equalTo("<field1>The quick brown fox</field1> jumps over"), equalTo("<field1>The</field1> <field1>quick</field1> <field1>brown</field1> <field1>fox</field1> jumps over"))); } } @Test public void testPostingsHighlighterOrderByScore() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test", "type1") .setSource("field1", new String[]{"This sentence contains one match, not that short. This sentence contains two sentence matches. This one contains no matches.", "This is the second value's first sentence. This one contains no matches. This sentence contains three sentence occurrences (sentence).", "One sentence match here and scored lower since the text is quite long, not that appealing. This one contains no matches."}).get(); refresh(); logger.info("--> highlighting and searching on field1"); SearchSourceBuilder source = searchSource() .query(termQuery("field1", "sentence")) .highlight(highlight().field("field1").order("score")); SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet(); Map<String,HighlightField> highlightFieldMap = searchResponse.getHits().getAt(0).highlightFields(); assertThat(highlightFieldMap.size(), equalTo(1)); HighlightField field1 = highlightFieldMap.get("field1"); assertThat(field1.fragments().length, equalTo(5)); assertThat(field1.fragments()[0].string(), equalTo("This <em>sentence</em> contains three <em>sentence</em> occurrences (<em>sentence</em>).")); assertThat(field1.fragments()[1].string(), equalTo("This <em>sentence</em> contains two <em>sentence</em> matches.")); assertThat(field1.fragments()[2].string(), equalTo("This is the second value's first <em>sentence</em>.")); assertThat(field1.fragments()[3].string(), equalTo("This <em>sentence</em> contains one match, not that short.")); assertThat(field1.fragments()[4].string(), equalTo("One <em>sentence</em> match here and scored lower since the text is quite long, not that appealing.")); } @Test public void testPostingsHighlighterEscapeHtml() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", "title", "type=string," + randomStoreField() + "index_options=offsets")); ensureYellow(); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < 5; i++) { indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) .setSource("title", "This is a html escaping highlighting test for *&? elasticsearch"); } indexRandom(true, indexRequestBuilders); SearchResponse searchResponse = client().prepareSearch() .setQuery(matchQuery("title", "test")) .setHighlighterEncoder("html") .addHighlightedField("title").get(); for (int i = 0; i < indexRequestBuilders.length; i++) { assertHighlight(searchResponse, i, "title", 0, 1, equalTo("This is a html escaping highlighting <em>test</em> for *&amp;?")); } } @Test public void testPostingsHighlighterMultiMapperWithStore() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("title").field("type", "multi_field").startObject("fields") .startObject("title").field("type", "string").field("store", "yes").field("index_options", "offsets").field("analyzer", "classic").endObject() .startObject("key").field("type", "string").field("store", "yes").field("index_options", "offsets").field("analyzer", "whitespace").endObject() .endObject().endObject() .endObject().endObject().endObject())); ensureGreen(); client().prepareIndex("test", "type1", "1").setSource("title", "this is a test . Second sentence.").get(); refresh(); // simple search on body with standard analyzer with a simple field query SearchResponse searchResponse = client().prepareSearch() //lets make sure we analyze the query and we highlight the resulting terms .setQuery(matchQuery("title", "This is a Test")) .addHighlightedField("title").get(); assertHitCount(searchResponse, 1l); SearchHit hit = searchResponse.getHits().getAt(0); //stopwords are not highlighted since not indexed assertHighlight(hit, "title", 0, 1, equalTo("this is a <em>test</em> .")); // search on title.key and highlight on title searchResponse = client().prepareSearch() .setQuery(matchQuery("title.key", "this is a test")) .addHighlightedField("title.key").get(); assertHitCount(searchResponse, 1l); //stopwords are now highlighted since we used only whitespace analyzer here assertHighlight(searchResponse, 0, "title.key", 0, 1, equalTo("<em>this</em> <em>is</em> <em>a</em> <em>test</em> .")); } @Test public void testPostingsHighlighterMultiMapperFromSource() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("title").field("type", "multi_field").startObject("fields") .startObject("title").field("type", "string").field("store", "no").field("index_options", "offsets").field("analyzer", "classic").endObject() .startObject("key").field("type", "string").field("store", "no").field("index_options", "offsets").field("analyzer", "whitespace").endObject() .endObject().endObject() .endObject().endObject().endObject())); ensureGreen(); client().prepareIndex("test", "type1", "1").setSource("title", "this is a test").get(); refresh(); // simple search on body with standard analyzer with a simple field query SearchResponse searchResponse = client().prepareSearch() .setQuery(matchQuery("title", "this is a test")) .addHighlightedField("title") .get(); assertHighlight(searchResponse, 0, "title", 0, 1, equalTo("this is a <em>test</em>")); // search on title.key and highlight on title.key searchResponse = client().prepareSearch() .setQuery(matchQuery("title.key", "this is a test")) .addHighlightedField("title.key").get(); assertHighlight(searchResponse, 0, "title.key", 0, 1, equalTo("<em>this</em> <em>is</em> <em>a</em> <em>test</em>")); } @Test public void testPostingsHighlighterShouldFailIfNoOffsets() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("title").field("type", "string").field("store", "yes").field("index_options", "docs").endObject() .endObject().endObject().endObject())); ensureGreen(); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < indexRequestBuilders.length; i++) { indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)) .setSource("title", "This is a test for the postings highlighter"); } indexRandom(true, indexRequestBuilders); SearchResponse search = client().prepareSearch() .setQuery(matchQuery("title", "this is a test")) .addHighlightedField("title") .get(); assertNoFailures(search); assertFailures(client().prepareSearch() .setQuery(matchQuery("title", "this is a test")) .addHighlightedField("title") .setHighlighterType("postings-highlighter"), RestStatus.BAD_REQUEST, containsString("the field [title] should be indexed with positions and offsets in the postings list to be used with postings highlighter")); assertFailures(client().prepareSearch() .setQuery(matchQuery("title", "this is a test")) .addHighlightedField("title") .setHighlighterType("postings"), RestStatus.BAD_REQUEST, containsString("the field [title] should be indexed with positions and offsets in the postings list to be used with postings highlighter")); //should not fail if there is a wildcard assertNoFailures(client().prepareSearch() .setQuery(matchQuery("title", "this is a test")) .addHighlightedField("tit*") .setHighlighterType("postings").get()); } @Test public void testPostingsHighlighterBoostingQuery() throws IOException { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test", "type1").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.") .get(); refresh(); logger.info("--> highlighting and searching on field1"); SearchSourceBuilder source = searchSource() .query(boostingQuery().positive(termQuery("field2", "brown")).negative(termQuery("field2", "foobar")).negativeBoost(0.5f)) .highlight(highlight().field("field2").preTags("<x>").postTags("</x>")); SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet(); assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick <x>brown</x> fox jumps over the lazy dog!")); } @Test public void testPostingsHighlighterCommonTermsQuery() throws IOException { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test", "type1").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.").get(); refresh(); logger.info("--> highlighting and searching on field1"); SearchSourceBuilder source = searchSource().query(commonTermsQuery("field2", "quick brown").cutoffFrequency(100)) .highlight(highlight().field("field2").preTags("<x>").postTags("</x>")); SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet(); assertHitCount(searchResponse, 1l); assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The <x>quick</x> <x>brown</x> fox jumps over the lazy dog!")); } private static XContentBuilder type1PostingsffsetsMapping() throws IOException { return XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("field1").field("type", "string").field("index_options", "offsets").endObject() .startObject("field2").field("type", "string").field("index_options", "offsets").endObject() .endObject() .endObject().endObject(); } @Test public void testPostingsHighlighterPrefixQuery() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test", "type1").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.").get(); refresh(); logger.info("--> highlighting and searching on field2"); SearchSourceBuilder source = searchSource().query(prefixQuery("field2", "qui")) .highlight(highlight().field("field2")); SearchResponse searchResponse = client().prepareSearch("test").setSource(source.buildAsBytes()).get(); assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The <em>quick</em> brown fox jumps over the lazy dog!")); } @Test public void testPostingsHighlighterFuzzyQuery() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test", "type1").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.").get(); refresh(); logger.info("--> highlighting and searching on field2"); SearchSourceBuilder source = searchSource().query(fuzzyQuery("field2", "quck")) .highlight(highlight().field("field2")); SearchResponse searchResponse = client().prepareSearch("test").setSource(source.buildAsBytes()).get(); assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The <em>quick</em> brown fox jumps over the lazy dog!")); } @Test public void testPostingsHighlighterRegexpQuery() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test", "type1").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.").get(); refresh(); logger.info("--> highlighting and searching on field2"); SearchSourceBuilder source = searchSource().query(regexpQuery("field2", "qu[a-l]+k")) .highlight(highlight().field("field2")); SearchResponse searchResponse = client().prepareSearch("test").setSource(source.buildAsBytes()).get(); assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The <em>quick</em> brown fox jumps over the lazy dog!")); } @Test public void testPostingsHighlighterWildcardQuery() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test", "type1").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.").get(); refresh(); logger.info("--> highlighting and searching on field2"); SearchSourceBuilder source = searchSource().query(wildcardQuery("field2", "qui*")) .highlight(highlight().field("field2")); SearchResponse searchResponse = client().prepareSearch("test").setSource(source.buildAsBytes()).get(); assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The <em>quick</em> brown fox jumps over the lazy dog!")); source = searchSource().query(wildcardQuery("field2", "qu*k")) .highlight(highlight().field("field2")); searchResponse = client().prepareSearch("test").setSource(source.buildAsBytes()).get(); assertHitCount(searchResponse, 1l); assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The <em>quick</em> brown fox jumps over the lazy dog!")); } @Test public void testPostingsHighlighterTermRangeQuery() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test", "type1").setSource("field1", "this is a test", "field2", "aaab").get(); refresh(); logger.info("--> highlighting and searching on field2"); SearchSourceBuilder source = searchSource().query(rangeQuery("field2").gte("aaaa").lt("zzzz")) .highlight(highlight().field("field2")); SearchResponse searchResponse = client().prepareSearch("test").setSource(source.buildAsBytes()).get(); assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("<em>aaab</em>")); } @Test public void testPostingsHighlighterQueryString() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test", "type1").setSource("field1", "this is a test", "field2", "The quick brown fox jumps over the lazy dog! Second sentence.").get(); refresh(); logger.info("--> highlighting and searching on field2"); SearchSourceBuilder source = searchSource().query(queryStringQuery("qui*").defaultField("field2")) .highlight(highlight().field("field2")); SearchResponse searchResponse = client().prepareSearch("test").setSource(source.buildAsBytes()).get(); assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The <em>quick</em> brown fox jumps over the lazy dog!")); } @Test public void testPostingsHighlighterRegexpQueryWithinConstantScoreQuery() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test", "type1").setSource("field1", "The photography word will get highlighted").get(); refresh(); logger.info("--> highlighting and searching on field1"); SearchSourceBuilder source = searchSource().query(constantScoreQuery(regexpQuery("field1", "pho[a-z]+"))) .highlight(highlight().field("field1")); SearchResponse searchResponse = client().prepareSearch("test").setSource(source.buildAsBytes()).get(); assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("The <em>photography</em> word will get highlighted")); } @Test public void testPostingsHighlighterMultiTermQueryMultipleLevels() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test", "type1").setSource("field1", "The photography word will get highlighted").get(); refresh(); logger.info("--> highlighting and searching on field1"); SearchSourceBuilder source = searchSource().query(boolQuery() .should(constantScoreQuery(QueryBuilders.missingQuery("field1"))) .should(matchQuery("field1", "test")) .should(constantScoreQuery(queryStringQuery("field1:photo*")))) .highlight(highlight().field("field1")); SearchResponse searchResponse = client().prepareSearch("test").setSource(source.buildAsBytes()).get(); assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("The <em>photography</em> word will get highlighted")); } @Test public void testPostingsHighlighterPrefixQueryWithinBooleanQuery() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test", "type1").setSource("field1", "The photography word will get highlighted").get(); refresh(); logger.info("--> highlighting and searching on field1"); SearchSourceBuilder source = searchSource().query(boolQuery().must(prefixQuery("field1", "photo")).should(matchQuery("field1", "test").minimumShouldMatch("0"))) .highlight(highlight().field("field1")); SearchResponse searchResponse = client().prepareSearch("test").setSource(source.buildAsBytes()).get(); assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("The <em>photography</em> word will get highlighted")); } @Test public void testPostingsHighlighterQueryStringWithinFilteredQuery() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test", "type1").setSource("field1", "The photography word will get highlighted").get(); refresh(); logger.info("--> highlighting and searching on field1"); SearchSourceBuilder source = searchSource().query(boolQuery().must(queryStringQuery("field1:photo*")).filter(missingQuery("field_null"))) .highlight(highlight().field("field1")); SearchResponse searchResponse = client().prepareSearch("test").setSource(source.buildAsBytes()).get(); assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("The <em>photography</em> word will get highlighted")); } @Test public void testPostingsHighlighterManyDocs() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); ensureGreen(); int COUNT = between(20, 100); Map<String, String> prefixes = new HashMap<>(COUNT); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[COUNT]; for (int i = 0; i < COUNT; i++) { //generating text with word to highlight in a different position //(https://github.com/elasticsearch/elasticsearch/issues/4103) String prefix = randomAsciiOfLengthBetween(5, 30); prefixes.put(String.valueOf(i), prefix); indexRequestBuilders[i] = client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field1", "Sentence " + prefix + " test. Sentence two."); } logger.info("--> indexing docs"); indexRandom(true, indexRequestBuilders); logger.info("--> searching explicitly on field1 and highlighting on it"); SearchRequestBuilder searchRequestBuilder = client().prepareSearch() .setSize(COUNT) .setQuery(termQuery("field1", "test")) .addHighlightedField("field1"); SearchResponse searchResponse = searchRequestBuilder.get(); assertHitCount(searchResponse, (long)COUNT); assertThat(searchResponse.getHits().hits().length, equalTo(COUNT)); for (SearchHit hit : searchResponse.getHits()) { String prefix = prefixes.get(hit.id()); assertHighlight(hit, "field1", 0, 1, equalTo("Sentence " + prefix + " <em>test</em>.")); } } @Test @AwaitsFix(bugUrl="Broken now that BoostingQuery does not extend BooleanQuery anymore") public void testFastVectorHighlighterPhraseBoost() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1TermVectorMapping())); phraseBoostTestCase("fvh"); } @Test public void testPostingsHighlighterPhraseBoost() throws Exception { assertAcked(prepareCreate("test").addMapping("type1", type1PostingsffsetsMapping())); phraseBoostTestCase("postings"); } /** * Test phrase boosting over normal term matches. Note that this will never pass with the plain highlighter * because it doesn't support the concept of terms having a different weight based on position. * @param highlighterType highlighter to test */ private void phraseBoostTestCase(String highlighterType) { ensureGreen(); StringBuilder text = new StringBuilder(); text.append("words words junk junk junk junk junk junk junk junk highlight junk junk junk junk together junk\n"); for (int i = 0; i<10; i++) { text.append("junk junk junk junk junk junk junk junk junk junk junk junk junk junk junk junk junk junk junk junk\n"); } text.append("highlight words together\n"); for (int i = 0; i<10; i++) { text.append("junk junk junk junk junk junk junk junk junk junk junk junk junk junk junk junk junk junk junk junk\n"); } index("test", "type1", "1", "field1", text.toString()); refresh(); // Match queries phraseBoostTestCaseForClauses(highlighterType, 100f, matchQuery("field1", "highlight words together"), matchPhraseQuery("field1", "highlight words together")); // Query string with a single field phraseBoostTestCaseForClauses(highlighterType, 100f, queryStringQuery("highlight words together").field("field1"), queryStringQuery("\"highlight words together\"").field("field1").autoGeneratePhraseQueries(true)); // Query string with a single field without dismax phraseBoostTestCaseForClauses(highlighterType, 100f, queryStringQuery("highlight words together").field("field1").useDisMax(false), queryStringQuery("\"highlight words together\"").field("field1").useDisMax(false).autoGeneratePhraseQueries(true)); // Query string with more than one field phraseBoostTestCaseForClauses(highlighterType, 100f, queryStringQuery("highlight words together").field("field1").field("field2"), queryStringQuery("\"highlight words together\"").field("field1").field("field2").autoGeneratePhraseQueries(true)); // Query string boosting the field phraseBoostTestCaseForClauses(highlighterType, 1f, queryStringQuery("highlight words together").field("field1"), queryStringQuery("\"highlight words together\"").field("field1^100").autoGeneratePhraseQueries(true)); } private <P extends QueryBuilder & BoostableQueryBuilder<?>> void phraseBoostTestCaseForClauses(String highlighterType, float boost, QueryBuilder terms, P phrase) { Matcher<String> highlightedMatcher = Matchers.either(containsString("<em>highlight words together</em>")).or( containsString("<em>highlight</em> <em>words</em> <em>together</em>")); SearchRequestBuilder search = client().prepareSearch("test").setHighlighterRequireFieldMatch(true) .setHighlighterOrder("score").setHighlighterType(highlighterType) .addHighlightedField("field1", 100, 1); // Try with a bool query phrase.boost(boost); SearchResponse response = search.setQuery(boolQuery().must(terms).should(phrase)).get(); assertHighlight(response, 0, "field1", 0, 1, highlightedMatcher); phrase.boost(1); // Try with a boosting query response = search.setQuery(boostingQuery().positive(phrase).negative(terms).boost(boost).negativeBoost(1)).get(); assertHighlight(response, 0, "field1", 0, 1, highlightedMatcher); // Try with a boosting query using a negative boost response = search.setQuery(boostingQuery().positive(phrase).negative(terms).boost(1).negativeBoost(1/boost)).get(); assertHighlight(response, 0, "field1", 0, 1, highlightedMatcher); } }
apache-2.0
spring-projects/spring-framework
spring-context/src/main/java/org/springframework/context/index/CandidateComponentsIndexLoader.java
4398
/* * Copyright 2002-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.context.index; import java.io.IOException; import java.net.URL; import java.util.ArrayList; import java.util.Enumeration; import java.util.List; import java.util.Properties; import java.util.concurrent.ConcurrentMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.core.SpringProperties; import org.springframework.core.io.UrlResource; import org.springframework.core.io.support.PropertiesLoaderUtils; import org.springframework.lang.Nullable; import org.springframework.util.ConcurrentReferenceHashMap; /** * Candidate components index loading mechanism for internal use within the framework. * * @author Stephane Nicoll * @since 5.0 */ public final class CandidateComponentsIndexLoader { /** * The location to look for components. * <p>Can be present in multiple JAR files. */ public static final String COMPONENTS_RESOURCE_LOCATION = "META-INF/spring.components"; /** * System property that instructs Spring to ignore the components index, i.e. * to always return {@code null} from {@link #loadIndex(ClassLoader)}. * <p>The default is "false", allowing for regular use of the index. Switching this * flag to {@code true} fulfills a corner case scenario when an index is partially * available for some libraries (or use cases) but couldn't be built for the whole * application. In this case, the application context fallbacks to a regular * classpath arrangement (i.e. as though no index were present at all). */ public static final String IGNORE_INDEX = "spring.index.ignore"; private static final boolean shouldIgnoreIndex = SpringProperties.getFlag(IGNORE_INDEX); private static final Log logger = LogFactory.getLog(CandidateComponentsIndexLoader.class); private static final ConcurrentMap<ClassLoader, CandidateComponentsIndex> cache = new ConcurrentReferenceHashMap<>(); private CandidateComponentsIndexLoader() { } /** * Load and instantiate the {@link CandidateComponentsIndex} from * {@value #COMPONENTS_RESOURCE_LOCATION}, using the given class loader. If no * index is available, return {@code null}. * @param classLoader the ClassLoader to use for loading (can be {@code null} to use the default) * @return the index to use or {@code null} if no index was found * @throws IllegalArgumentException if any module index cannot * be loaded or if an error occurs while creating {@link CandidateComponentsIndex} */ @Nullable public static CandidateComponentsIndex loadIndex(@Nullable ClassLoader classLoader) { ClassLoader classLoaderToUse = classLoader; if (classLoaderToUse == null) { classLoaderToUse = CandidateComponentsIndexLoader.class.getClassLoader(); } return cache.computeIfAbsent(classLoaderToUse, CandidateComponentsIndexLoader::doLoadIndex); } @Nullable private static CandidateComponentsIndex doLoadIndex(ClassLoader classLoader) { if (shouldIgnoreIndex) { return null; } try { Enumeration<URL> urls = classLoader.getResources(COMPONENTS_RESOURCE_LOCATION); if (!urls.hasMoreElements()) { return null; } List<Properties> result = new ArrayList<>(); while (urls.hasMoreElements()) { URL url = urls.nextElement(); Properties properties = PropertiesLoaderUtils.loadProperties(new UrlResource(url)); result.add(properties); } if (logger.isDebugEnabled()) { logger.debug("Loaded " + result.size() + " index(es)"); } int totalCount = result.stream().mapToInt(Properties::size).sum(); return (totalCount > 0 ? new CandidateComponentsIndex(result) : null); } catch (IOException ex) { throw new IllegalStateException("Unable to load indexes from location [" + COMPONENTS_RESOURCE_LOCATION + "]", ex); } } }
apache-2.0
Sellegit/j2objc
runtime/src/main/java/apple/foundation/NSHTTPCookieAttribute.java
1883
package apple.foundation; import java.io.*; import java.nio.*; import java.util.*; import com.google.j2objc.annotations.*; import com.google.j2objc.runtime.*; import com.google.j2objc.runtime.block.*; import apple.audiotoolbox.*; import apple.corefoundation.*; import apple.coregraphics.*; import apple.coreservices.*; import apple.uikit.*; import apple.coreanimation.*; import apple.coredata.*; import apple.coremedia.*; import apple.security.*; import apple.dispatch.*; /*<javadoc>*/ /*</javadoc>*/ @Library("Foundation/Foundation.h") public class NSHTTPCookieAttribute extends Object { @GlobalConstant("NSHTTPCookieName") public static native NSString NameAttribute(); @GlobalConstant("NSHTTPCookieValue") public static native NSString ValueAttribute(); @GlobalConstant("NSHTTPCookieOriginURL") public static native NSString OriginURLAttribute(); @GlobalConstant("NSHTTPCookieVersion") public static native NSString VersionAttribute(); @GlobalConstant("NSHTTPCookieDomain") public static native NSString DomainAttribute(); @GlobalConstant("NSHTTPCookiePath") public static native NSString PathAttribute(); @GlobalConstant("NSHTTPCookieSecure") public static native NSString SecureAttribute(); @GlobalConstant("NSHTTPCookieExpires") public static native NSString ExpiresAttribute(); @GlobalConstant("NSHTTPCookieComment") public static native NSString CommentAttribute(); @GlobalConstant("NSHTTPCookieCommentURL") public static native NSString CommentURLAttribute(); @GlobalConstant("NSHTTPCookieDiscard") public static native NSString DiscardAttribute(); @GlobalConstant("NSHTTPCookieMaximumAge") public static native NSString MaximumAgeAttribute(); @GlobalConstant("NSHTTPCookiePort") public static native NSString PortAttribute(); }
apache-2.0
ceocoder/cloud-bigtable-client
bigtable-protos/src/generated/java/com/google/bigtable/admin/table/v1/BigtableTableServiceGrpc.java
28872
package com.google.bigtable.admin.table.v1; import static io.grpc.stub.Calls.createMethodDescriptor; import static io.grpc.stub.Calls.asyncUnaryCall; import static io.grpc.stub.Calls.asyncServerStreamingCall; import static io.grpc.stub.Calls.asyncClientStreamingCall; import static io.grpc.stub.Calls.duplexStreamingCall; import static io.grpc.stub.Calls.blockingUnaryCall; import static io.grpc.stub.Calls.blockingServerStreamingCall; import static io.grpc.stub.Calls.unaryFutureCall; import static io.grpc.stub.ServerCalls.createMethodDefinition; import static io.grpc.stub.ServerCalls.asyncUnaryRequestCall; import static io.grpc.stub.ServerCalls.asyncStreamingRequestCall; @javax.annotation.Generated("by gRPC proto compiler") public class BigtableTableServiceGrpc { private static final io.grpc.stub.Method<com.google.bigtable.admin.table.v1.CreateTableRequest, com.google.bigtable.admin.table.v1.Table> METHOD_CREATE_TABLE = io.grpc.stub.Method.create( io.grpc.MethodType.UNARY, "CreateTable", io.grpc.protobuf.ProtoUtils.marshaller(com.google.bigtable.admin.table.v1.CreateTableRequest.PARSER), io.grpc.protobuf.ProtoUtils.marshaller(com.google.bigtable.admin.table.v1.Table.PARSER)); private static final io.grpc.stub.Method<com.google.bigtable.admin.table.v1.ListTablesRequest, com.google.bigtable.admin.table.v1.ListTablesResponse> METHOD_LIST_TABLES = io.grpc.stub.Method.create( io.grpc.MethodType.UNARY, "ListTables", io.grpc.protobuf.ProtoUtils.marshaller(com.google.bigtable.admin.table.v1.ListTablesRequest.PARSER), io.grpc.protobuf.ProtoUtils.marshaller(com.google.bigtable.admin.table.v1.ListTablesResponse.PARSER)); private static final io.grpc.stub.Method<com.google.bigtable.admin.table.v1.GetTableRequest, com.google.bigtable.admin.table.v1.Table> METHOD_GET_TABLE = io.grpc.stub.Method.create( io.grpc.MethodType.UNARY, "GetTable", io.grpc.protobuf.ProtoUtils.marshaller(com.google.bigtable.admin.table.v1.GetTableRequest.PARSER), io.grpc.protobuf.ProtoUtils.marshaller(com.google.bigtable.admin.table.v1.Table.PARSER)); private static final io.grpc.stub.Method<com.google.bigtable.admin.table.v1.DeleteTableRequest, com.google.protobuf.Empty> METHOD_DELETE_TABLE = io.grpc.stub.Method.create( io.grpc.MethodType.UNARY, "DeleteTable", io.grpc.protobuf.ProtoUtils.marshaller(com.google.bigtable.admin.table.v1.DeleteTableRequest.PARSER), io.grpc.protobuf.ProtoUtils.marshaller(com.google.protobuf.Empty.PARSER)); private static final io.grpc.stub.Method<com.google.bigtable.admin.table.v1.RenameTableRequest, com.google.protobuf.Empty> METHOD_RENAME_TABLE = io.grpc.stub.Method.create( io.grpc.MethodType.UNARY, "RenameTable", io.grpc.protobuf.ProtoUtils.marshaller(com.google.bigtable.admin.table.v1.RenameTableRequest.PARSER), io.grpc.protobuf.ProtoUtils.marshaller(com.google.protobuf.Empty.PARSER)); private static final io.grpc.stub.Method<com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest, com.google.bigtable.admin.table.v1.ColumnFamily> METHOD_CREATE_COLUMN_FAMILY = io.grpc.stub.Method.create( io.grpc.MethodType.UNARY, "CreateColumnFamily", io.grpc.protobuf.ProtoUtils.marshaller(com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest.PARSER), io.grpc.protobuf.ProtoUtils.marshaller(com.google.bigtable.admin.table.v1.ColumnFamily.PARSER)); private static final io.grpc.stub.Method<com.google.bigtable.admin.table.v1.ColumnFamily, com.google.bigtable.admin.table.v1.ColumnFamily> METHOD_UPDATE_COLUMN_FAMILY = io.grpc.stub.Method.create( io.grpc.MethodType.UNARY, "UpdateColumnFamily", io.grpc.protobuf.ProtoUtils.marshaller(com.google.bigtable.admin.table.v1.ColumnFamily.PARSER), io.grpc.protobuf.ProtoUtils.marshaller(com.google.bigtable.admin.table.v1.ColumnFamily.PARSER)); private static final io.grpc.stub.Method<com.google.bigtable.admin.table.v1.DeleteColumnFamilyRequest, com.google.protobuf.Empty> METHOD_DELETE_COLUMN_FAMILY = io.grpc.stub.Method.create( io.grpc.MethodType.UNARY, "DeleteColumnFamily", io.grpc.protobuf.ProtoUtils.marshaller(com.google.bigtable.admin.table.v1.DeleteColumnFamilyRequest.PARSER), io.grpc.protobuf.ProtoUtils.marshaller(com.google.protobuf.Empty.PARSER)); public static BigtableTableServiceStub newStub(io.grpc.Channel channel) { return new BigtableTableServiceStub(channel, CONFIG); } public static BigtableTableServiceBlockingStub newBlockingStub( io.grpc.Channel channel) { return new BigtableTableServiceBlockingStub(channel, CONFIG); } public static BigtableTableServiceFutureStub newFutureStub( io.grpc.Channel channel) { return new BigtableTableServiceFutureStub(channel, CONFIG); } public static final BigtableTableServiceServiceDescriptor CONFIG = new BigtableTableServiceServiceDescriptor(); @javax.annotation.concurrent.Immutable public static class BigtableTableServiceServiceDescriptor extends io.grpc.stub.AbstractServiceDescriptor<BigtableTableServiceServiceDescriptor> { public final io.grpc.MethodDescriptor<com.google.bigtable.admin.table.v1.CreateTableRequest, com.google.bigtable.admin.table.v1.Table> createTable; public final io.grpc.MethodDescriptor<com.google.bigtable.admin.table.v1.ListTablesRequest, com.google.bigtable.admin.table.v1.ListTablesResponse> listTables; public final io.grpc.MethodDescriptor<com.google.bigtable.admin.table.v1.GetTableRequest, com.google.bigtable.admin.table.v1.Table> getTable; public final io.grpc.MethodDescriptor<com.google.bigtable.admin.table.v1.DeleteTableRequest, com.google.protobuf.Empty> deleteTable; public final io.grpc.MethodDescriptor<com.google.bigtable.admin.table.v1.RenameTableRequest, com.google.protobuf.Empty> renameTable; public final io.grpc.MethodDescriptor<com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest, com.google.bigtable.admin.table.v1.ColumnFamily> createColumnFamily; public final io.grpc.MethodDescriptor<com.google.bigtable.admin.table.v1.ColumnFamily, com.google.bigtable.admin.table.v1.ColumnFamily> updateColumnFamily; public final io.grpc.MethodDescriptor<com.google.bigtable.admin.table.v1.DeleteColumnFamilyRequest, com.google.protobuf.Empty> deleteColumnFamily; private BigtableTableServiceServiceDescriptor() { createTable = createMethodDescriptor( "google.bigtable.admin.table.v1.BigtableTableService", METHOD_CREATE_TABLE); listTables = createMethodDescriptor( "google.bigtable.admin.table.v1.BigtableTableService", METHOD_LIST_TABLES); getTable = createMethodDescriptor( "google.bigtable.admin.table.v1.BigtableTableService", METHOD_GET_TABLE); deleteTable = createMethodDescriptor( "google.bigtable.admin.table.v1.BigtableTableService", METHOD_DELETE_TABLE); renameTable = createMethodDescriptor( "google.bigtable.admin.table.v1.BigtableTableService", METHOD_RENAME_TABLE); createColumnFamily = createMethodDescriptor( "google.bigtable.admin.table.v1.BigtableTableService", METHOD_CREATE_COLUMN_FAMILY); updateColumnFamily = createMethodDescriptor( "google.bigtable.admin.table.v1.BigtableTableService", METHOD_UPDATE_COLUMN_FAMILY); deleteColumnFamily = createMethodDescriptor( "google.bigtable.admin.table.v1.BigtableTableService", METHOD_DELETE_COLUMN_FAMILY); } @SuppressWarnings("unchecked") private BigtableTableServiceServiceDescriptor( java.util.Map<java.lang.String, io.grpc.MethodDescriptor<?, ?>> methodMap) { createTable = (io.grpc.MethodDescriptor<com.google.bigtable.admin.table.v1.CreateTableRequest, com.google.bigtable.admin.table.v1.Table>) methodMap.get( CONFIG.createTable.getName()); listTables = (io.grpc.MethodDescriptor<com.google.bigtable.admin.table.v1.ListTablesRequest, com.google.bigtable.admin.table.v1.ListTablesResponse>) methodMap.get( CONFIG.listTables.getName()); getTable = (io.grpc.MethodDescriptor<com.google.bigtable.admin.table.v1.GetTableRequest, com.google.bigtable.admin.table.v1.Table>) methodMap.get( CONFIG.getTable.getName()); deleteTable = (io.grpc.MethodDescriptor<com.google.bigtable.admin.table.v1.DeleteTableRequest, com.google.protobuf.Empty>) methodMap.get( CONFIG.deleteTable.getName()); renameTable = (io.grpc.MethodDescriptor<com.google.bigtable.admin.table.v1.RenameTableRequest, com.google.protobuf.Empty>) methodMap.get( CONFIG.renameTable.getName()); createColumnFamily = (io.grpc.MethodDescriptor<com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest, com.google.bigtable.admin.table.v1.ColumnFamily>) methodMap.get( CONFIG.createColumnFamily.getName()); updateColumnFamily = (io.grpc.MethodDescriptor<com.google.bigtable.admin.table.v1.ColumnFamily, com.google.bigtable.admin.table.v1.ColumnFamily>) methodMap.get( CONFIG.updateColumnFamily.getName()); deleteColumnFamily = (io.grpc.MethodDescriptor<com.google.bigtable.admin.table.v1.DeleteColumnFamilyRequest, com.google.protobuf.Empty>) methodMap.get( CONFIG.deleteColumnFamily.getName()); } @java.lang.Override protected BigtableTableServiceServiceDescriptor build( java.util.Map<java.lang.String, io.grpc.MethodDescriptor<?, ?>> methodMap) { return new BigtableTableServiceServiceDescriptor(methodMap); } @java.lang.Override public com.google.common.collect.ImmutableList<io.grpc.MethodDescriptor<?, ?>> methods() { return com.google.common.collect.ImmutableList.<io.grpc.MethodDescriptor<?, ?>>of( createTable, listTables, getTable, deleteTable, renameTable, createColumnFamily, updateColumnFamily, deleteColumnFamily); } } public static interface BigtableTableService { public void createTable(com.google.bigtable.admin.table.v1.CreateTableRequest request, io.grpc.stub.StreamObserver<com.google.bigtable.admin.table.v1.Table> responseObserver); public void listTables(com.google.bigtable.admin.table.v1.ListTablesRequest request, io.grpc.stub.StreamObserver<com.google.bigtable.admin.table.v1.ListTablesResponse> responseObserver); public void getTable(com.google.bigtable.admin.table.v1.GetTableRequest request, io.grpc.stub.StreamObserver<com.google.bigtable.admin.table.v1.Table> responseObserver); public void deleteTable(com.google.bigtable.admin.table.v1.DeleteTableRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver); public void renameTable(com.google.bigtable.admin.table.v1.RenameTableRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver); public void createColumnFamily(com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest request, io.grpc.stub.StreamObserver<com.google.bigtable.admin.table.v1.ColumnFamily> responseObserver); public void updateColumnFamily(com.google.bigtable.admin.table.v1.ColumnFamily request, io.grpc.stub.StreamObserver<com.google.bigtable.admin.table.v1.ColumnFamily> responseObserver); public void deleteColumnFamily(com.google.bigtable.admin.table.v1.DeleteColumnFamilyRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver); } public static interface BigtableTableServiceBlockingClient { public com.google.bigtable.admin.table.v1.Table createTable(com.google.bigtable.admin.table.v1.CreateTableRequest request); public com.google.bigtable.admin.table.v1.ListTablesResponse listTables(com.google.bigtable.admin.table.v1.ListTablesRequest request); public com.google.bigtable.admin.table.v1.Table getTable(com.google.bigtable.admin.table.v1.GetTableRequest request); public com.google.protobuf.Empty deleteTable(com.google.bigtable.admin.table.v1.DeleteTableRequest request); public com.google.protobuf.Empty renameTable(com.google.bigtable.admin.table.v1.RenameTableRequest request); public com.google.bigtable.admin.table.v1.ColumnFamily createColumnFamily(com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest request); public com.google.bigtable.admin.table.v1.ColumnFamily updateColumnFamily(com.google.bigtable.admin.table.v1.ColumnFamily request); public com.google.protobuf.Empty deleteColumnFamily(com.google.bigtable.admin.table.v1.DeleteColumnFamilyRequest request); } public static interface BigtableTableServiceFutureClient { public com.google.common.util.concurrent.ListenableFuture<com.google.bigtable.admin.table.v1.Table> createTable( com.google.bigtable.admin.table.v1.CreateTableRequest request); public com.google.common.util.concurrent.ListenableFuture<com.google.bigtable.admin.table.v1.ListTablesResponse> listTables( com.google.bigtable.admin.table.v1.ListTablesRequest request); public com.google.common.util.concurrent.ListenableFuture<com.google.bigtable.admin.table.v1.Table> getTable( com.google.bigtable.admin.table.v1.GetTableRequest request); public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.Empty> deleteTable( com.google.bigtable.admin.table.v1.DeleteTableRequest request); public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.Empty> renameTable( com.google.bigtable.admin.table.v1.RenameTableRequest request); public com.google.common.util.concurrent.ListenableFuture<com.google.bigtable.admin.table.v1.ColumnFamily> createColumnFamily( com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest request); public com.google.common.util.concurrent.ListenableFuture<com.google.bigtable.admin.table.v1.ColumnFamily> updateColumnFamily( com.google.bigtable.admin.table.v1.ColumnFamily request); public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.Empty> deleteColumnFamily( com.google.bigtable.admin.table.v1.DeleteColumnFamilyRequest request); } public static class BigtableTableServiceStub extends io.grpc.stub.AbstractStub<BigtableTableServiceStub, BigtableTableServiceServiceDescriptor> implements BigtableTableService { private BigtableTableServiceStub(io.grpc.Channel channel, BigtableTableServiceServiceDescriptor config) { super(channel, config); } @java.lang.Override protected BigtableTableServiceStub build(io.grpc.Channel channel, BigtableTableServiceServiceDescriptor config) { return new BigtableTableServiceStub(channel, config); } @java.lang.Override public void createTable(com.google.bigtable.admin.table.v1.CreateTableRequest request, io.grpc.stub.StreamObserver<com.google.bigtable.admin.table.v1.Table> responseObserver) { asyncUnaryCall( channel.newCall(config.createTable), request, responseObserver); } @java.lang.Override public void listTables(com.google.bigtable.admin.table.v1.ListTablesRequest request, io.grpc.stub.StreamObserver<com.google.bigtable.admin.table.v1.ListTablesResponse> responseObserver) { asyncUnaryCall( channel.newCall(config.listTables), request, responseObserver); } @java.lang.Override public void getTable(com.google.bigtable.admin.table.v1.GetTableRequest request, io.grpc.stub.StreamObserver<com.google.bigtable.admin.table.v1.Table> responseObserver) { asyncUnaryCall( channel.newCall(config.getTable), request, responseObserver); } @java.lang.Override public void deleteTable(com.google.bigtable.admin.table.v1.DeleteTableRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) { asyncUnaryCall( channel.newCall(config.deleteTable), request, responseObserver); } @java.lang.Override public void renameTable(com.google.bigtable.admin.table.v1.RenameTableRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) { asyncUnaryCall( channel.newCall(config.renameTable), request, responseObserver); } @java.lang.Override public void createColumnFamily(com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest request, io.grpc.stub.StreamObserver<com.google.bigtable.admin.table.v1.ColumnFamily> responseObserver) { asyncUnaryCall( channel.newCall(config.createColumnFamily), request, responseObserver); } @java.lang.Override public void updateColumnFamily(com.google.bigtable.admin.table.v1.ColumnFamily request, io.grpc.stub.StreamObserver<com.google.bigtable.admin.table.v1.ColumnFamily> responseObserver) { asyncUnaryCall( channel.newCall(config.updateColumnFamily), request, responseObserver); } @java.lang.Override public void deleteColumnFamily(com.google.bigtable.admin.table.v1.DeleteColumnFamilyRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) { asyncUnaryCall( channel.newCall(config.deleteColumnFamily), request, responseObserver); } } public static class BigtableTableServiceBlockingStub extends io.grpc.stub.AbstractStub<BigtableTableServiceBlockingStub, BigtableTableServiceServiceDescriptor> implements BigtableTableServiceBlockingClient { private BigtableTableServiceBlockingStub(io.grpc.Channel channel, BigtableTableServiceServiceDescriptor config) { super(channel, config); } @java.lang.Override protected BigtableTableServiceBlockingStub build(io.grpc.Channel channel, BigtableTableServiceServiceDescriptor config) { return new BigtableTableServiceBlockingStub(channel, config); } @java.lang.Override public com.google.bigtable.admin.table.v1.Table createTable(com.google.bigtable.admin.table.v1.CreateTableRequest request) { return blockingUnaryCall( channel.newCall(config.createTable), request); } @java.lang.Override public com.google.bigtable.admin.table.v1.ListTablesResponse listTables(com.google.bigtable.admin.table.v1.ListTablesRequest request) { return blockingUnaryCall( channel.newCall(config.listTables), request); } @java.lang.Override public com.google.bigtable.admin.table.v1.Table getTable(com.google.bigtable.admin.table.v1.GetTableRequest request) { return blockingUnaryCall( channel.newCall(config.getTable), request); } @java.lang.Override public com.google.protobuf.Empty deleteTable(com.google.bigtable.admin.table.v1.DeleteTableRequest request) { return blockingUnaryCall( channel.newCall(config.deleteTable), request); } @java.lang.Override public com.google.protobuf.Empty renameTable(com.google.bigtable.admin.table.v1.RenameTableRequest request) { return blockingUnaryCall( channel.newCall(config.renameTable), request); } @java.lang.Override public com.google.bigtable.admin.table.v1.ColumnFamily createColumnFamily(com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest request) { return blockingUnaryCall( channel.newCall(config.createColumnFamily), request); } @java.lang.Override public com.google.bigtable.admin.table.v1.ColumnFamily updateColumnFamily(com.google.bigtable.admin.table.v1.ColumnFamily request) { return blockingUnaryCall( channel.newCall(config.updateColumnFamily), request); } @java.lang.Override public com.google.protobuf.Empty deleteColumnFamily(com.google.bigtable.admin.table.v1.DeleteColumnFamilyRequest request) { return blockingUnaryCall( channel.newCall(config.deleteColumnFamily), request); } } public static class BigtableTableServiceFutureStub extends io.grpc.stub.AbstractStub<BigtableTableServiceFutureStub, BigtableTableServiceServiceDescriptor> implements BigtableTableServiceFutureClient { private BigtableTableServiceFutureStub(io.grpc.Channel channel, BigtableTableServiceServiceDescriptor config) { super(channel, config); } @java.lang.Override protected BigtableTableServiceFutureStub build(io.grpc.Channel channel, BigtableTableServiceServiceDescriptor config) { return new BigtableTableServiceFutureStub(channel, config); } @java.lang.Override public com.google.common.util.concurrent.ListenableFuture<com.google.bigtable.admin.table.v1.Table> createTable( com.google.bigtable.admin.table.v1.CreateTableRequest request) { return unaryFutureCall( channel.newCall(config.createTable), request); } @java.lang.Override public com.google.common.util.concurrent.ListenableFuture<com.google.bigtable.admin.table.v1.ListTablesResponse> listTables( com.google.bigtable.admin.table.v1.ListTablesRequest request) { return unaryFutureCall( channel.newCall(config.listTables), request); } @java.lang.Override public com.google.common.util.concurrent.ListenableFuture<com.google.bigtable.admin.table.v1.Table> getTable( com.google.bigtable.admin.table.v1.GetTableRequest request) { return unaryFutureCall( channel.newCall(config.getTable), request); } @java.lang.Override public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.Empty> deleteTable( com.google.bigtable.admin.table.v1.DeleteTableRequest request) { return unaryFutureCall( channel.newCall(config.deleteTable), request); } @java.lang.Override public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.Empty> renameTable( com.google.bigtable.admin.table.v1.RenameTableRequest request) { return unaryFutureCall( channel.newCall(config.renameTable), request); } @java.lang.Override public com.google.common.util.concurrent.ListenableFuture<com.google.bigtable.admin.table.v1.ColumnFamily> createColumnFamily( com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest request) { return unaryFutureCall( channel.newCall(config.createColumnFamily), request); } @java.lang.Override public com.google.common.util.concurrent.ListenableFuture<com.google.bigtable.admin.table.v1.ColumnFamily> updateColumnFamily( com.google.bigtable.admin.table.v1.ColumnFamily request) { return unaryFutureCall( channel.newCall(config.updateColumnFamily), request); } @java.lang.Override public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.Empty> deleteColumnFamily( com.google.bigtable.admin.table.v1.DeleteColumnFamilyRequest request) { return unaryFutureCall( channel.newCall(config.deleteColumnFamily), request); } } public static io.grpc.ServerServiceDefinition bindService( final BigtableTableService serviceImpl) { return io.grpc.ServerServiceDefinition.builder("google.bigtable.admin.table.v1.BigtableTableService") .addMethod(createMethodDefinition( METHOD_CREATE_TABLE, asyncUnaryRequestCall( new io.grpc.stub.ServerCalls.UnaryRequestMethod< com.google.bigtable.admin.table.v1.CreateTableRequest, com.google.bigtable.admin.table.v1.Table>() { @java.lang.Override public void invoke( com.google.bigtable.admin.table.v1.CreateTableRequest request, io.grpc.stub.StreamObserver<com.google.bigtable.admin.table.v1.Table> responseObserver) { serviceImpl.createTable(request, responseObserver); } }))) .addMethod(createMethodDefinition( METHOD_LIST_TABLES, asyncUnaryRequestCall( new io.grpc.stub.ServerCalls.UnaryRequestMethod< com.google.bigtable.admin.table.v1.ListTablesRequest, com.google.bigtable.admin.table.v1.ListTablesResponse>() { @java.lang.Override public void invoke( com.google.bigtable.admin.table.v1.ListTablesRequest request, io.grpc.stub.StreamObserver<com.google.bigtable.admin.table.v1.ListTablesResponse> responseObserver) { serviceImpl.listTables(request, responseObserver); } }))) .addMethod(createMethodDefinition( METHOD_GET_TABLE, asyncUnaryRequestCall( new io.grpc.stub.ServerCalls.UnaryRequestMethod< com.google.bigtable.admin.table.v1.GetTableRequest, com.google.bigtable.admin.table.v1.Table>() { @java.lang.Override public void invoke( com.google.bigtable.admin.table.v1.GetTableRequest request, io.grpc.stub.StreamObserver<com.google.bigtable.admin.table.v1.Table> responseObserver) { serviceImpl.getTable(request, responseObserver); } }))) .addMethod(createMethodDefinition( METHOD_DELETE_TABLE, asyncUnaryRequestCall( new io.grpc.stub.ServerCalls.UnaryRequestMethod< com.google.bigtable.admin.table.v1.DeleteTableRequest, com.google.protobuf.Empty>() { @java.lang.Override public void invoke( com.google.bigtable.admin.table.v1.DeleteTableRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) { serviceImpl.deleteTable(request, responseObserver); } }))) .addMethod(createMethodDefinition( METHOD_RENAME_TABLE, asyncUnaryRequestCall( new io.grpc.stub.ServerCalls.UnaryRequestMethod< com.google.bigtable.admin.table.v1.RenameTableRequest, com.google.protobuf.Empty>() { @java.lang.Override public void invoke( com.google.bigtable.admin.table.v1.RenameTableRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) { serviceImpl.renameTable(request, responseObserver); } }))) .addMethod(createMethodDefinition( METHOD_CREATE_COLUMN_FAMILY, asyncUnaryRequestCall( new io.grpc.stub.ServerCalls.UnaryRequestMethod< com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest, com.google.bigtable.admin.table.v1.ColumnFamily>() { @java.lang.Override public void invoke( com.google.bigtable.admin.table.v1.CreateColumnFamilyRequest request, io.grpc.stub.StreamObserver<com.google.bigtable.admin.table.v1.ColumnFamily> responseObserver) { serviceImpl.createColumnFamily(request, responseObserver); } }))) .addMethod(createMethodDefinition( METHOD_UPDATE_COLUMN_FAMILY, asyncUnaryRequestCall( new io.grpc.stub.ServerCalls.UnaryRequestMethod< com.google.bigtable.admin.table.v1.ColumnFamily, com.google.bigtable.admin.table.v1.ColumnFamily>() { @java.lang.Override public void invoke( com.google.bigtable.admin.table.v1.ColumnFamily request, io.grpc.stub.StreamObserver<com.google.bigtable.admin.table.v1.ColumnFamily> responseObserver) { serviceImpl.updateColumnFamily(request, responseObserver); } }))) .addMethod(createMethodDefinition( METHOD_DELETE_COLUMN_FAMILY, asyncUnaryRequestCall( new io.grpc.stub.ServerCalls.UnaryRequestMethod< com.google.bigtable.admin.table.v1.DeleteColumnFamilyRequest, com.google.protobuf.Empty>() { @java.lang.Override public void invoke( com.google.bigtable.admin.table.v1.DeleteColumnFamilyRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) { serviceImpl.deleteColumnFamily(request, responseObserver); } }))).build(); } }
apache-2.0
ConnCollege/cas-5
src/main/java/edu/conncoll/cas/peci/restlet/PECIResource.java
34371
package edu.conncoll.cas.peci.restlet; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.HashMap; import javax.sql.DataSource; import javax.validation.constraints.NotNull; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.json.JSONException; import org.json.JSONObject; import org.json.JSONArray; import org.restlet.data.MediaType; import org.restlet.data.Status; import org.restlet.ext.json.JsonRepresentation; import org.restlet.resource.Representation; import org.restlet.resource.Resource; import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; import org.springframework.dao.EmptyResultDataAccessException; import edu.conncoll.cas.jdbc.JDBCCamel; /** * <p>Reset services for PECI - Better description soon</p> * * @version 1.0 RC1 3/22/2016 * @see org.json.JSONObject * @see org.restlet.resource.Resource * @see org.restlet.resource.Representation * @see org.restlet.ext.json.JsonRepresentation * @see edu.conncoll.cas.jdbc.jdbcCamel * @author Andrew Tillinghast * */ public class PECIResource extends Resource { @NotNull private JDBCCamel jdbc; @NotNull private NamedParameterJdbcTemplate jdbcCAS; @NotNull private DataSource CASSource; public final boolean allowGet() { return false; } public final boolean allowPost() { return true; } public final boolean allowPut() { return false; } public final boolean allowDelete() { return false; } public final boolean allowOptions(){ return true; } public final void setCamelJdbc(final JDBCCamel jdbc) { this.jdbc = jdbc; DataSource cas = jdbc.getCASSource(); this.jdbcCAS = new NamedParameterJdbcTemplate(cas); } private Log log = LogFactory.getLog(this.getClass()); @Override public void acceptRepresentation( Representation resetEntity ) { //create json objects (one for response and one for incoming request) //and an array list for errors JSONObject json = null; JSONObject jsonResponse = new JSONObject(); ArrayList<String> reasons = new ArrayList<String>(); try { //ensure the incoming information is valid JSON data if ( !resetEntity.getMediaType().isCompatible(MediaType.APPLICATION_JSON) ) { jsonResponse.put("result", "error"); jsonResponse.put("message", "not a valid json string"); getResponse().setStatus( Status.CLIENT_ERROR_UNSUPPORTED_MEDIA_TYPE, jsonResponse.toString() ); } else { //get a JSON object from our incoming data JsonRepresentation jsonRep = new JsonRepresentation(resetEntity); json = jsonRep.toJsonObject(); log.debug("Json: "+ json.toString()); //validate that the required parameters were passed with the request if ( !json.has("PIDM") ) { reasons.add("no student PDIM was provided"); } if ( !json.has("PPID") ) { reasons.add("No Contact or Parent PPID was provided"); } if ( !json.has("DATA") ) { reasons.add("DATA Type was not specified"); } if ( !json.has("MODE") ) { reasons.add("Data MODE was not specified"); } //if there are valid error reasons, then send back an error response if ( !reasons.isEmpty() ) { jsonResponse.put("result", "error"); jsonResponse.put("message", "incomplete and/or invalid parameters"); jsonResponse.put("reasons", reasons); getResponse().setStatus( Status.CLIENT_ERROR_BAD_REQUEST ); getResponse().setEntity( jsonResponse.toString(), MediaType.APPLICATION_JSON ); log.debug("Bad request sent for password reset"); } else { //create the variables necessary for Action String pdim = json.getString("PIDM"); String ppid = json.getString("PPID"); String dataType = json.getString("DATA"); String dataMode = json.getString("MODE"); log.debug("Valid peci restlet request Data:" + dataType + " PDIM: " + pdim + " Mode: " + dataMode); String SQL; //Get the real PDIM from the UUID Map<String,Object> pidmData = new HashMap<String,Object>(); SQL="SELECT STUDENT_PIDM from peci_trans_start where STUDENT_UUID='" + pdim + "' and CONFIRMED=0"; pidmData = jdbcCAS.queryForMap(SQL,new HashMap<String,Object>()); Map<String,Object> namedParameters = new HashMap<String,Object>(); namedParameters.put("STUDENT_PIDM", pidmData.get("STUDENT_PIDM")); namedParameters.put("PARENT_PPID", ppid); namedParameters.put("PARENT_PIDM", 0); Map<String,Object> parentData = new HashMap<String,Object>(); Map<String,Object> emrgData = new HashMap<String,Object>(); List<Map<String,Object>> phoneData = new ArrayList<Map<String,Object>>(); Map<String,Object> emailData =new HashMap<String,Object>(); Map<String,Object> addressData = new HashMap<String,Object>(); if (dataType.equals("PHONES")) { SQL=" CALL `cas`.`PECI_Phone_Names`(:STUDENT_PIDM) "; phoneData = jdbcCAS.queryForList(SQL,namedParameters); jsonResponse.put("phones",phoneData ); jsonResponse.put("result", "success"); getResponse().setStatus( Status.SUCCESS_OK ); getResponse().setEntity( jsonResponse.toString(), MediaType.APPLICATION_JSON ); } else if (dataType.equals("PARENT")) { if (dataMode.equals("DELETE")){ SQL="update cc_adv_peci_parents_t set CHANGE_COLS='DELETE' where STUDENT_PIDM=:STUDENT_PIDM and PARENT_PPID=:PARENT_PPID"; jdbcCAS.update(SQL,namedParameters); SQL="update cc_gen_peci_phone_data_t set CHANGE_COLS='DELETE' where STUDENT_PIDM=:STUDENT_PIDM and PARENT_PPID=:PARENT_PPID" +" and PARENT_PPID not in (Select PARENT_PPID from cc_gen_peci_emergs_t where STUDENT_PIDM=:STUDENT_PIDM and CHANGE_COLS not like '%DELETE%')"; jdbcCAS.update(SQL,namedParameters); }else if (dataMode.equals("PROMOTE")){ //Make a parent into an emergency contact as well SQL = "SELECT COUNT(*) ct FROM cc_gen_peci_emergs_t where STUDENT_PIDM=:STUDENT_PIDM and PARENT_PPID=:PARENT_PPID"; emrgData = jdbcCAS.queryForMap(SQL,namedParameters); if (Integer.parseInt(emrgData.get("ct").toString()) > 0) { //remove the "DELETE" text from CHANGE_COLS SQL="update cc_gen_peci_emergs_t set CHANGE_COLS='NEW' where STUDENT_PIDM=:STUDENT_PIDM and PARENT_PPID=:PARENT_PPID"; jdbcCAS.update(SQL,namedParameters); } else { // Add a new record to the contacts table SQL="insert into cc_gen_peci_emergs_t ( STUDENT_PIDM, PARENT_PPID, CHANGE_COLS, EMERG_LEGAL_PREFIX_NAME, EMERG_LEGAL_FIRST_NAME," + " EMERG_LEGAL_MIDDLE_NAME, EMERG_LEGAL_LAST_NAME, EMERG_LEGAL_SUFFIX_NAME, EMERG_PREF_FIRST_NAME, EMERG_PREF_MIDDLE_NAME," + " EMERG_PREF_LAST_NAME, EMERG_RELT_CODE) " + " SELECT STUDENT_PIDM, PARENT_PPID, 'NEW' CHANGE_COLS, PARENT_LEGAL_PREFIX_NAME, PARENT_LEGAL_FIRST_NAME, " + " PARENT_LEGAL_MIDDLE_NAME, PARENT_LEGAL_LAST_NAME, PARENT_LEGAL_SUFFIX_NAME, PARENT_PREF_FIRST_NAME, " + " PARENT_PREF_MIDDLE_NAME, PARENT_PREF_LAST_NAME, PARENT_RELT_CODE" + " from cc_adv_peci_parents_t" + " where STUDENT_PIDM=:STUDENT_PIDM and PARENT_PPID=:PARENT_PPID"; jdbcCAS.update(SQL,namedParameters); } }else { if (ppid == ""){ //new Parent response log.debug("New Temporaty Parent Record"); } else { try { //Parent Data SQL="select PARENT_LEGAL_PREFIX_NAME, PARENT_PREF_FIRST_NAME, PARENT_PREF_MIDDLE_NAME, PARENT_PREF_LAST_NAME, PARENT_LEGAL_SUFFIX_NAME," + " PARENT_RELT_CODE, EMERG_CONTACT_PRIORITY, EMERG_NO_CELL_PHONE, EMERG_PHONE_NUMBER_TYPE_CODE, EMERG_CELL_PHONE_CARRIER," + " EMERG_PHONE_TTY_DEVICE, DEPENDENT, PECI_ROLE, CONTACT_TYPE, PARENT_PIDM from cc_adv_peci_parents_t" + " where STUDENT_PIDM=:STUDENT_PIDM and PARENT_PPID=:PARENT_PPID"; parentData = jdbcCAS.queryForMap(SQL,namedParameters); } catch (EmptyResultDataAccessException e){ // dataset empty } try { //phones SQL="select PECI_PHONE_CODE,PHONE_CODE,PHONE_AREA_CODE,PHONE_NUMBER,PHONE_NUMBER_INTL,PHONE_SEQUENCE_NO,PHONE_STATUS_IND,PHONE_PRIMARY_IND," + " CELL_PHONE_CARRIER,PHONE_TTY_DEVICE,EMERG_AUTO_OPT_OUT,EMERG_SEND_TEXT,EMERG_NO_CELL_PHONE from cc_gen_peci_phone_data_t" + " where (PHONE_STATUS_IND is null or PHONE_STATUS_IND = 'A') and (CHANGE_COLS != 'DELETE' or CHANGE_COLS is null) and STUDENT_PIDM=:STUDENT_PIDM and PARENT_PPID=:PARENT_PPID"; phoneData = jdbcCAS.queryForList(SQL,namedParameters); } catch (EmptyResultDataAccessException e){ // dataset empty } try { //email SQL="select PECI_EMAIL_CODE,EMAIL_ADDRESS from cc_gen_peci_email_data_t where STUDENT_PIDM=:STUDENT_PIDM and PARENT_PPID=:PARENT_PPID"; emailData = jdbcCAS.queryForMap(SQL,namedParameters); } catch (EmptyResultDataAccessException e){ // dataset empty } try { //Address SQL="select EMERG_CONTACT_PRIORITY,PERSON_ROLE,PECI_ADDR_CODE,ADDR_CODE,ADDR_SEQUENCE_NO,ADDR_STREET_LINE1,ADDR_STREET_LINE2," + " ADDR_STREET_LINE3,ADDR_CITY,ADDR_STAT_CODE,ADDR_ZIP,ADDR_NATN_CODE,ADDR_STATUS_IND from cc_gen_peci_addr_data_t " + " where (ADDR_STATUS_IND is null or ADDR_STATUS_IND = 'A') and STUDENT_PIDM=:STUDENT_PIDM and PARENT_PPID=:PARENT_PPID"; addressData = jdbcCAS.queryForMap(SQL,namedParameters); } catch (EmptyResultDataAccessException e){ // dataset empty } if (dataMode.equals("READ")) { jsonResponse.put("result", "success"); jsonResponse.put("parent",parentData ); jsonResponse.put("phones",phoneData ); jsonResponse.put("email",emailData ); jsonResponse.put("address",addressData ); getResponse().setStatus( Status.SUCCESS_OK ); getResponse().setEntity( jsonResponse.toString(), MediaType.APPLICATION_JSON ); } else{ Map<String,Object> parentDataIn = toMap(json.getJSONObject("parent")); Map<String,Object> emailDataIn = toMap(json.getJSONObject("email")); Map<String,Object> addressDataIn = toMap(json.getJSONObject("address")); List<Object> phoneDataList = toList(json.getJSONArray("phones")); List<Map<String,Object>> phoneDataIn = new ArrayList<Map<String,Object>>(); for (Object phone : phoneDataList){ if (phone.getClass().getName() == "java.util.HashMap") { phoneDataIn.add((Map<String,Object>)phone); } } //Find the values that have been updated Map<String,Object> updates = new HashMap<String,Object>(); if (ppid.equals("null")){ log.debug("Creating new parent record."); //Add a parent //Find an Alpha Seq No char seqNo = Parent_PPID(namedParameters); log.debug ("New parent record to create PPID: " + Character.toString(seqNo)); SQL = "INSERT cc_adv_peci_parents_t SET "; List<String> columns = new ArrayList(parentDataIn.keySet()); for(int y=0; y<columns.size(); y++) { String key = columns.get(y); if (!key.equals("PARENT_PPID")) { Object newValue = parentDataIn.get(key); if (newValue.getClass().getName().equals("java.lang.String")) { SQL = SQL + key +" = '" + newValue.toString().replace("'", "''") + "', "; } else { SQL = SQL + key +" = " + newValue + ", "; } } } SQL = SQL + "CHANGE_COLS = 'NEW', "; SQL = SQL + "STUDENT_PIDM=:STUDENT_PIDM, "; SQL = SQL + "PARENT_PPID=:PARENT_PPID"; log.debug("PECI Restlet generated new parent SQL: " + SQL); namedParameters.put("PARENT_PPID",String.valueOf(seqNo)); jdbcCAS.update(SQL,namedParameters); jsonResponse.put("PARENT_PPID",String.valueOf(seqNo)); //Per Tonm's requst on 5/6/2016 adding a new parent automatically adds them as an emergency Contact /* Per Tom's request on 6/3/2016 commenting this out SQL = "INSERT cc_gen_peci_emergs_t SET "; for(int y=0; y<columns.size(); y++) { String key = columns.get(y); if (!key.equals("PARENT_PPID")) { Object newValue = parentDataIn.get(key); key = key.replace("PARENT_","EMERG_"); if (newValue.getClass().getName().equals("java.lang.String")) { SQL = SQL + key +" = '" + newValue + "', "; } else { SQL = SQL + key +" = " + newValue + ", "; } } } SQL = SQL + "CHANGE_COLS = 'NEW', " + "STUDENT_PIDM=:STUDENT_PIDM, " + "PARENT_PPID=:PARENT_PPID"; namedParameters.put("PARENT_PPID",String.valueOf(seqNo)); jdbcCAS.update(SQL,namedParameters); */ } else { //Parent Data updates = compareMap(parentDataIn, parentData); writeUpdates(namedParameters,updates,"cc_adv_peci_parents_t"); //ensure that Contact data i.e. name is updated in sync with Parent SQL="UPDATE cc_gen_peci_emergs_t e " + " INNER JOIN cc_adv_peci_parents_t p " + " ON p.STUDENT_PIDM = e.STUDENT_PIDM " + " AND p.PARENT_PPID = e.PARENT_PPID " + " SET EMERG_LEGAL_PREFIX_NAME = PARENT_LEGAL_PREFIX_NAME, " + " EMERG_LEGAL_FIRST_NAME = PARENT_LEGAL_FIRST_NAME, " + " EMERG_LEGAL_MIDDLE_NAME = PARENT_LEGAL_MIDDLE_NAME, " + " EMERG_LEGAL_LAST_NAME = PARENT_LEGAL_LAST_NAME, " + " EMERG_LEGAL_SUFFIX_NAME = PARENT_LEGAL_SUFFIX_NAME, " + " EMERG_PREF_FIRST_NAME = PARENT_PREF_FIRST_NAME, " + " EMERG_PREF_MIDDLE_NAME = PARENT_PREF_MIDDLE_NAME, " + " EMERG_PREF_LAST_NAME = PARENT_PREF_LAST_NAME," + " EMERG_RELT_CODE = PARENT_RELT_CODE" + " WHERE p.STUDENT_PIDM=:STUDENT_PIDM " + " AND p.PARENT_PPID=:PARENT_PPID"; jdbcCAS.update(SQL,namedParameters); } //Get Parent PIDM SQL="SELECT PARENT_PIDM FROM cc_adv_peci_parents_t WHERE"; SQL = SQL + " PARENT_PPID=:PARENT_PPID "; SQL = SQL + " and STUDENT_PIDM=:STUDENT_PIDM"; Map<String,Object> parentPIDM = jdbcCAS.queryForMap(SQL,namedParameters); String strPPIDM = parentPIDM.get("PARENT_PIDM").toString(); //email updates = compareMap(emailDataIn, emailData); if (!updates.isEmpty()) log.debug ("Updating email data"); updates.put("PARENT_PIDM", strPPIDM); writeUpdates(namedParameters,updates,"cc_gen_peci_email_data_t"); //adresses updates = compareMap(addressDataIn, addressData); if (!updates.isEmpty()) log.debug ("Updating adress data"); updates.put("PARENT_PIDM", strPPIDM); writeUpdates(namedParameters,updates,"cc_gen_peci_addr_data_t"); //phones namedParameters.put("PARENT_PIDM", strPPIDM); phoneUpdate (phoneDataIn, phoneData, namedParameters); getResponse().setStatus( Status.SUCCESS_OK ); getResponse().setEntity( jsonResponse.toString(), MediaType.APPLICATION_JSON ); } } } } else { if (dataMode.equals("DELETE")){ SQL="update cc_gen_peci_emergs_t set CHANGE_COLS='DELETE' where STUDENT_PIDM=:STUDENT_PIDM and PARENT_PPID=:PARENT_PPID"; jdbcCAS.update(SQL,namedParameters); SQL="update cc_gen_peci_phone_data_t set CHANGE_COLS='DELETE' where STUDENT_PIDM=:STUDENT_PIDM and PARENT_PPID=:PARENT_PPID" +" and PARENT_PPID not in (Select PARENT_PPID from cc_adv_peci_parents_t where STUDENT_PIDM=:STUDENT_PIDM and (CHANGE_COLS not like '%DELETE%' or CHANGE_COLS is null))"; jdbcCAS.update(SQL,namedParameters); }else{ if (ppid == ""){ //new Contact response log.debug("New Temporaty Contact Record"); } else { try { //Contact Data SQL="select EMERG_LEGAL_PREFIX_NAME,EMERG_LEGAL_PREFIX_NAME,EMERG_PREF_FIRST_NAME,EMERG_PREF_MIDDLE_NAME,EMERG_PREF_LAST_NAME," + "EMERG_LEGAL_SUFFIX_NAME,EMERG_RELT_CODE,EMERG_CONTACT_PRIORITY,EMERG_NO_CELL_PHONE,EMERG_PHONE_NUMBER_TYPE_CODE," + "EMERG_CELL_PHONE_CARRIER,EMERG_PHONE_TTY_DEVICE, PARENT_PIDM from cc_gen_peci_emergs_t where STUDENT_PIDM=:STUDENT_PIDM and PARENT_PPID=:PARENT_PPID"; emrgData = jdbcCAS.queryForMap(SQL,namedParameters); } catch (EmptyResultDataAccessException e){ // dataset empty } try { //phones SQL="select PECI_PHONE_CODE,PHONE_CODE,PHONE_AREA_CODE,PHONE_NUMBER,PHONE_NUMBER_INTL,PHONE_SEQUENCE_NO,PHONE_STATUS_IND,PHONE_PRIMARY_IND," + "CELL_PHONE_CARRIER,PHONE_TTY_DEVICE,EMERG_AUTO_OPT_OUT,EMERG_SEND_TEXT,EMERG_NO_CELL_PHONE from cc_gen_peci_phone_data_t " + "where (PHONE_STATUS_IND is null or PHONE_STATUS_IND = 'A') and (CHANGE_COLS != 'DELETE' or CHANGE_COLS is null) and STUDENT_PIDM=:STUDENT_PIDM and PARENT_PPID=:PARENT_PPID"; phoneData = jdbcCAS.queryForList(SQL,namedParameters); } catch (EmptyResultDataAccessException e){ // dataset empty } try { //email SQL="select PECI_EMAIL_CODE,EMAIL_ADDRESS from cc_gen_peci_email_data_t where STUDENT_PIDM=:STUDENT_PIDM and PARENT_PPID=:PARENT_PPID"; emailData = jdbcCAS.queryForMap(SQL,namedParameters); } catch (EmptyResultDataAccessException e){ // dataset empty } try{ //Address SQL="select EMERG_CONTACT_PRIORITY,PERSON_ROLE,PECI_ADDR_CODE,ADDR_CODE,ADDR_SEQUENCE_NO,ADDR_STREET_LINE1,ADDR_STREET_LINE2,ADDR_STREET_LINE3," + "ADDR_CITY,ADDR_STAT_CODE,ADDR_ZIP,ADDR_NATN_CODE,ADDR_STATUS_IND from cc_gen_peci_addr_data_t where (ADDR_STATUS_IND is null or " + " ADDR_STATUS_IND = 'A') and STUDENT_PIDM=:STUDENT_PIDM and PARENT_PPID=:PARENT_PPID"; addressData = jdbcCAS.queryForMap(SQL,namedParameters); } catch (EmptyResultDataAccessException e){ // dataset empty } } if (dataMode.equals("READ")) { jsonResponse.put("result", "success"); jsonResponse.put("contact",emrgData ); jsonResponse.put("phones",phoneData ); jsonResponse.put("email",emailData ); jsonResponse.put("address",addressData ); getResponse().setStatus( Status.SUCCESS_OK ); getResponse().setEntity( jsonResponse.toString(), MediaType.APPLICATION_JSON ); } else { Map<String,Object> emrgDataIn = toMap(json.getJSONObject("contact")); Map<String,Object> emailDataIn = toMap(json.getJSONObject("email")); Map<String,Object> addressDataIn = toMap(json.getJSONObject("address")); List<Object> phoneDataList = toList(json.getJSONArray("phones")); List<Map<String,Object>> phoneDataIn = new ArrayList<Map<String,Object>>(); for (Object phone : phoneDataList){ if (phone.getClass().getName() == "java.util.HashMap") { phoneDataIn.add((Map<String,Object>)phone); } } //Find the values that have been updated Map<String,Object> updates = new HashMap<String,Object>(); //contact Data if (ppid.equals("null")){ log.debug("Creating new parent record."); //Add a contact //Find an Alpha Seq No Map<String,Object> maxData = new HashMap<String, Object>(); char seqNo = Parent_PPID(namedParameters); log.debug ("New contact record to create PPID: " + Character.toString(seqNo)); SQL = "INSERT cc_gen_peci_emergs_t SET "; List<String> columns = new ArrayList(emrgDataIn.keySet()); for(int y=0; y<columns.size(); y++) { String key = columns.get(y); if (!key.equals("PARENT_PPID")) { Object newValue = emrgDataIn.get(key); if (newValue.getClass().getName().equals("java.lang.String")) { SQL = SQL + key +" = '" + newValue.toString().replace("'", "''") + "', "; } else { SQL = SQL + key +" = " + newValue + ", "; } } } SQL = SQL + "CHANGE_COLS = 'NEW', " + "STUDENT_PIDM=:STUDENT_PIDM, " + "PARENT_PPID=:PARENT_PPID"; namedParameters.put("PARENT_PPID",String.valueOf(seqNo)); jdbcCAS.update(SQL,namedParameters); jsonResponse.put("PARENT_PPID",String.valueOf(seqNo)); } else { updates = compareMap(emrgDataIn, emrgData); writeUpdates(namedParameters,updates,"cc_gen_peci_emergs_t"); //ensure that Parent data i.e. name is updated in sync with contact SQL="UPDATE cc_adv_peci_parents_t p " + " INNER JOIN cc_gen_peci_emergs_t e" + " ON p.STUDENT_PIDM = e.STUDENT_PIDM" + " AND p.PARENT_PPID = e.PARENT_PPID" + " SET PARENT_LEGAL_PREFIX_NAME=EMERG_LEGAL_PREFIX_NAME, " + " PARENT_LEGAL_FIRST_NAME=EMERG_LEGAL_FIRST_NAME, " + " PARENT_LEGAL_MIDDLE_NAME=EMERG_LEGAL_MIDDLE_NAME, " + " PARENT_LEGAL_LAST_NAME=EMERG_LEGAL_LAST_NAME, " + " PARENT_LEGAL_SUFFIX_NAME=EMERG_LEGAL_SUFFIX_NAME, " + " PARENT_PREF_FIRST_NAME=EMERG_PREF_FIRST_NAME, " + " PARENT_PREF_MIDDLE_NAME=EMERG_PREF_MIDDLE_NAME, " + " PARENT_PREF_LAST_NAME=EMERG_PREF_LAST_NAME," + " PARENT_RELT_CODE=EMERG_RELT_CODE" + " WHERE p.STUDENT_PIDM=:STUDENT_PIDM " + " AND p.PARENT_PPID=:PARENT_PPID"; jdbcCAS.update(SQL,namedParameters); } //email updates = compareMap(emailDataIn, emailData); writeUpdates(namedParameters,updates,"cc_gen_peci_email_data_t"); //adresses updates = compareMap(addressDataIn, addressData); writeUpdates(namedParameters,updates,"cc_gen_peci_addr_data_t"); //phones phoneUpdate (phoneDataIn, phoneData, namedParameters); getResponse().setStatus( Status.SUCCESS_OK ); getResponse().setEntity( jsonResponse.toString(), MediaType.APPLICATION_JSON ); } } } } } } catch (JSONException e) { getResponse().setStatus( Status.CLIENT_ERROR_BAD_REQUEST, e.getMessage() ); getResponse().setEntity( "{ \"JSON Error\": \"" + e.getMessage() + "\"}", MediaType.APPLICATION_JSON ); log.error( "JSON Error", e ); } catch (Exception e) { e.printStackTrace(); getResponse().setEntity( "{ \"Internal Server Error\": \"" + e.getMessage() + "\"}", MediaType.APPLICATION_JSON ); getResponse().setStatus( Status.SERVER_ERROR_INTERNAL, e.getMessage() ); log.error( "Restlet Error ", e ); } } public void writeUpdates (Map<String,Object> namedParameters, Map<String, Object> updates, String tableName) { writeUpdates (namedParameters, updates, tableName, jdbcCAS); } public static void writeUpdates (Map<String,Object> namedParameters, Map<String, Object> updates, String tableName, NamedParameterJdbcTemplate jdbcCAS) { Map<String, Object> sourceData = new HashMap<String, Object>(); if (updates.size() > 0 ) { //Write Data changes String SQL="select count(*) ct from "+ tableName +" where STUDENT_PIDM=:STUDENT_PIDM and PARENT_PPID=:PARENT_PPID"; sourceData = jdbcCAS.queryForMap(SQL,namedParameters); int ct = Integer.valueOf(sourceData.get("ct").toString()); if (ct == 0){ SQL = "Insert "+ tableName +" SET "; List<String> columns = new ArrayList(updates.keySet()); for(int i=0; i<columns.size(); i++) { String key = columns.get(i); Object newValue = updates.get(key); if (newValue.getClass().getName().equals("java.lang.String")) { SQL = SQL + key +" = '" + newValue + "', "; } else { SQL = SQL + key +" = " + newValue + ", "; } } SQL = SQL + "CHANGE_COLS = 'NEW', " + "STUDENT_PIDM=:STUDENT_PIDM, " + "PARENT_PPID=:PARENT_PPID"; jdbcCAS.update(SQL,namedParameters); } else{ SQL="select CHANGE_COLS from "+ tableName +" where STUDENT_PIDM=:STUDENT_PIDM and PARENT_PPID=:PARENT_PPID"; sourceData = jdbcCAS.queryForMap(SQL,namedParameters); String changeCol = (String) sourceData.get("CHANGE_COLS"); if (changeCol == null) changeCol=""; SQL = "UPDATE "+ tableName +" SET "; List<String> columns = new ArrayList(updates.keySet()); for(int i=0; i<columns.size(); i++) { String key = columns.get(i); Object newValue = updates.get(key); if (newValue.getClass().getName().equals("java.lang.String")) { if (newValue.toString().equals("null")) { SQL = SQL + key +" = null, "; } else { SQL = SQL + key +" = '" + newValue + "', "; } }else{ SQL = SQL + key +" = " + newValue + ", "; } changeCol = changeCol + key + ","; } SQL = SQL + "CHANGE_COLS = '" + changeCol +"'" + " where STUDENT_PIDM=:STUDENT_PIDM and PARENT_PPID=:PARENT_PPID"; jdbcCAS.update(SQL,namedParameters); } } } public static Map<String, Object> toMap(JSONObject object) throws JSONException { Map<String, Object> map = new HashMap<String, Object>(); Iterator<String> keysItr = object.keys(); while(keysItr.hasNext()) { String key = keysItr.next(); Object value = object.get(key); if(value instanceof JSONArray) { value = toList((JSONArray) value); } else if(value instanceof JSONObject) { value = toMap((JSONObject) value); } map.put(key, value); } return map; } public static List<Object> toList(JSONArray array) throws JSONException { List<Object> list = new ArrayList<Object>(); for(int i = 0; i < array.length(); i++) { Object value = array.get(i); if(value instanceof JSONArray) { value = toList((JSONArray) value); } else if(value instanceof JSONObject) { value = toMap((JSONObject) value); } list.add(value); } return list; } public char Parent_PPID (Map<String, Object> namedParameters){ Map<String,Object> maxData = new HashMap<String, Object>(); String SQL; char seqNo = 'A'; try{ SQL="select max(PARENT_PPID) seq from cc_adv_peci_parents_t where STUDENT_PIDM=:STUDENT_PIDM "; maxData = jdbcCAS.queryForMap(SQL,namedParameters); } catch (EmptyResultDataAccessException e){ // dataset empty } if ((maxData.get("seq") == null) || (maxData.get("seq").toString().matches("^-?\\d+$")) ){ seqNo = 'A'; } else { seqNo = maxData.get("seq").toString().charAt(0); seqNo = (char)((int)seqNo + 1); } try{ SQL="select max(PARENT_PPID) seq from cc_gen_peci_emergs_t where STUDENT_PIDM=:STUDENT_PIDM "; maxData = jdbcCAS.queryForMap(SQL,namedParameters); } catch (EmptyResultDataAccessException e){ // dataset empty } if (!((maxData.get("seq") == null) || (maxData.get("seq").toString().matches("^-?\\d+$")) )){ if (maxData.get("seq").toString().charAt(0) >= seqNo){ seqNo = maxData.get("seq").toString().charAt(0); seqNo = (char)((int)seqNo + 1); } } return seqNo; } public static Map<String, Object> compareMap(Map<String, Object> testMap, Map<String, Object> origMap) throws Exception { Map<String, Object> map = new HashMap<String, Object>(); List<String> columns = new ArrayList(testMap.keySet()); for(int i=0; i<columns.size(); i++) { String key = columns.get(i); Object testValue = testMap.get(key); if (origMap.containsKey(key) || origMap.size() == 0) { Object origValue = origMap.get(key); if (origValue != null) { if ( origValue.getClass().getName().equals("java.lang.String") ) { if (!(origValue.equals(testValue))){ map.put(key,testValue.toString()); } } else if ( origValue.getClass().getName().equals("java.lang.Integer") ){ if (!(origValue != testValue)){ map.put(key,testValue); } } } else if ( !(testValue.getClass().getName().equals("org.json.JSONObject$Null")) && testValue != null) { map.put(key,testValue); } } } return map; } public void phoneUpdate (List<Map<String,Object>> phoneDataIn, List<Map<String,Object>> phoneData, Map<String,Object> namedParameters) throws Exception { phoneUpdate (phoneDataIn, phoneData, namedParameters, jdbcCAS); } public static void phoneUpdate (List<Map<String,Object>> phoneDataIn, List<Map<String,Object>> phoneData, Map<String,Object> namedParameters, NamedParameterJdbcTemplate jdbcCAS) throws Exception { String SQL; for (int i=0;i<phoneDataIn.size();i++){ Map<String,Object> phoneRecordIn = phoneDataIn.get(i); phoneRecordIn.put("PARENT_PIDM", namedParameters.get("PARENT_PIDM")); String phoneNumber = ""; if (phoneRecordIn.containsKey("PHONE_NUMBER")){ if (!(phoneRecordIn.get("PHONE_NUMBER").getClass().getName().equals("org.json.JSONObject$Null"))) phoneNumber=(String)phoneRecordIn.get("PHONE_NUMBER"); } String phoneNumberIntl = ""; if (phoneRecordIn.containsKey("PHONE_NUMBER_INTL")) { if (!(phoneRecordIn.get("PHONE_NUMBER_INTL").getClass().getName().equals("org.json.JSONObject$Null"))) phoneNumberIntl = (String)phoneRecordIn.get("PHONE_NUMBER_INTL"); } if (( phoneRecordIn.get("PHONE_SEQUENCE_NO").getClass().getName().equals("org.json.JSONObject$Null") ) || phoneRecordIn.get("PHONE_SEQUENCE_NO")=="") { //Add a phone to the parent //Find an Alpha Seq No Map<String,Object> maxData = new HashMap<String, Object>(); char seqNo; try{ SQL="select max(PHONE_SEQUENCE_NO) seq from cc_gen_peci_phone_data_t where STUDENT_PIDM=:STUDENT_PIDM and PARENT_PPID=:PARENT_PPID"; maxData = jdbcCAS.queryForMap(SQL,namedParameters); } catch (EmptyResultDataAccessException e){ // dataset empty } if ((maxData.get("seq") == null) || (maxData.get("seq").toString().matches("^-?\\d+$")) ){ seqNo = 'A'; } else { seqNo = maxData.get("seq").toString().charAt(0); seqNo = (char)((int)seqNo + 1); } if ( !phoneNumberIntl.equals("") || !phoneNumber.equals("")) { SQL = "INSERT cc_gen_peci_phone_data_t SET "; List<String> columns = new ArrayList(phoneRecordIn.keySet()); for(int y=0; y<columns.size(); y++) { String key = columns.get(y); if (!key.equals("PHONE_SEQUENCE_NO")) { Object newValue = phoneRecordIn.get(key); if (newValue.getClass().getName().equals("java.lang.String")) { SQL = SQL + key +" = '" + newValue + "', "; } else { SQL = SQL + key +" = " + newValue + ", "; } } } SQL = SQL + "CHANGE_COLS = 'NEW', " + "STUDENT_PIDM=:STUDENT_PIDM, " + "PARENT_PPID=:PARENT_PPID, " + "PHONE_SEQUENCE_NO='" + seqNo + "'"; jdbcCAS.update(SQL,namedParameters); } } else { for (int x=0;x<phoneData.size();x++) { Map<String,Object> phoneRecord = phoneData.get(x); if ( (phoneRecordIn.get("PHONE_CODE").equals(phoneRecord.get("PHONE_CODE"))) && (phoneRecordIn.get("PHONE_SEQUENCE_NO").equals(phoneRecord.get("PHONE_SEQUENCE_NO"))) ){ Map<String,Object> phoneParameters = namedParameters; phoneParameters.put("PHONE_SEQUENCE_NO", phoneRecordIn.get("PHONE_SEQUENCE_NO")); phoneParameters.put("PHONE_CODE", phoneRecordIn.get("PHONE_CODE")); if ((phoneNumber.isEmpty()) && (phoneNumberIntl.isEmpty())){ //Inactivate the phone record SQL = "UPDATE cc_gen_peci_phone_data_t SET " + " CHANGE_COLS = 'DELETE', " + " PHONE_STATUS_IND = 'I' " + " where STUDENT_PIDM=:STUDENT_PIDM and PARENT_PPID=:PARENT_PPID" + " and PHONE_CODE=:PHONE_CODE and PHONE_SEQUENCE_NO=:PHONE_SEQUENCE_NO"; jdbcCAS.update(SQL,phoneParameters); }else{ // compare Map<String,Object> updates = compareMap(phoneRecordIn, phoneRecord); if (updates.size() > 0 ) { //Write Parent Data changes SQL="select CHANGE_COLS from cc_gen_peci_phone_data_t where STUDENT_PIDM=:STUDENT_PIDM and " + "PARENT_PPID=:PARENT_PPID and PHONE_CODE=:PHONE_CODE and PHONE_SEQUENCE_NO=:PHONE_SEQUENCE_NO"; Map<String,Object> sourceData = jdbcCAS.queryForMap(SQL,phoneParameters); String changeCol = (String) sourceData.get("CHANGE_COLS"); if (changeCol == null) changeCol=""; SQL = "UPDATE cc_gen_peci_phone_data_t SET "; List<String> columns = new ArrayList(updates.keySet()); for(int y=0; y<columns.size(); y++) { String key = columns.get(y); Object newValue = updates.get(key); if (newValue.getClass().getName().equals("java.lang.String")) { SQL = SQL + key +" = '" + newValue.toString().replace("'", "''") + "', "; } else { SQL = SQL + key +" = " + newValue + ", "; } changeCol = changeCol + key + ","; } SQL = SQL + "CHANGE_COLS = '" + changeCol +"'" + " where STUDENT_PIDM=:STUDENT_PIDM and PARENT_PPID=:PARENT_PPID" + " and PHONE_CODE=:PHONE_CODE and PHONE_SEQUENCE_NO=:PHONE_SEQUENCE_NO"; jdbcCAS.update(SQL,phoneParameters); } } } } } } } }
apache-2.0
zhishan332/hunt4j
hunt4j-core/src/main/java/com/yermoon/hunt4j/core/http/SimpleDownLoader.java
3829
package com.yermoon.hunt4j.core.http; import com.yermoon.hunt4j.core.exception.HuntException; import com.yermoon.hunt4j.core.io.FileUtils; import com.yermoon.hunt4j.core.thread.ThreadPool; import java.io.File; import java.io.FileOutputStream; import java.io.InputStream; import java.net.URL; import java.net.URLConnection; import java.nio.channels.Channels; import java.nio.channels.FileChannel; import java.nio.channels.ReadableByteChannel; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; /** * 基于多线程的下载器 * * @author wangqing * @since 1.0.0 */ public class SimpleDownLoader implements DownLoader { private int threadNum; private String tempFilePath; private ExecutorService executorService; public SimpleDownLoader(int threadNum) { if (threadNum <= 0) { this.threadNum = 1; } this.tempFilePath = FileUtils.getWorkPath(); executorService = ThreadPool.getInstance(this.threadNum).get(); } public void setExecutorService(ExecutorService executorService) { this.executorService = executorService; } public void setThreadNum(int threadNum) { this.threadNum = threadNum; } public void setTempFilePath(String tempFilePath) { this.tempFilePath = tempFilePath; } /** * 下载 * * @param url 下载地址 * @param path 下载路径 * @param timeout 超时时间 ,0的话为不限制时间,直到程序出现异常 单位秒 * @return 下载后文件路径 * @throws Exception */ @Override public File downLoad(String url, String path, int timeout) throws Exception { if (path == null || "".equals(path.trim())) path = FileUtils.getAutoFileName(tempFilePath) ; File tempFile = new File(path); if (tempFile.isDirectory()) throw new HuntException("路径不正确:" + path); File ff = new File(path); Future<Boolean> future = executorService.submit(new HttpDownloader(url, (new FileOutputStream(path)).getChannel())); if (timeout <= 0) { if (future.get()) { if (ff.exists()) return ff; } } else { if (future.get(timeout, TimeUnit.SECONDS)) { if (ff.exists()) return ff; } } return null; } class HttpDownloader implements Callable<Boolean> { private URLConnection connection; private FileChannel outputChann; public HttpDownloader(String url, FileChannel fileChannel) throws Exception { connection = (new URL(url)).openConnection(); connection.addRequestProperty("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8"); connection.addRequestProperty("Accept-Language", "zh-CN,zh;q=0.8,en;q=0.6"); connection.addRequestProperty("User-Agent", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.137 Safari/537.36"); this.outputChann = fileChannel; } /** * Computes a result, or throws an exception if unable to do so. * * @return computed result * @throws Exception if unable to compute a result */ @Override public Boolean call() throws Exception { connection.connect(); InputStream inputStream = connection.getInputStream(); ReadableByteChannel rChannel = Channels.newChannel(inputStream); outputChann.transferFrom(rChannel, 0, Integer.MAX_VALUE); inputStream.close(); outputChann.close(); return true; } } }
apache-2.0
zohar-mizrahi/flink
flink-runtime-web/src/test/java/org/apache/flink/runtime/webmonitor/handlers/SubtasksTimesHandlerTest.java
5245
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.webmonitor.handlers; import org.apache.flink.runtime.execution.ExecutionState; import org.apache.flink.runtime.executiongraph.AccessExecution; import org.apache.flink.runtime.executiongraph.AccessExecutionGraph; import org.apache.flink.runtime.executiongraph.AccessExecutionJobVertex; import org.apache.flink.runtime.webmonitor.ExecutionGraphHolder; import org.apache.flink.runtime.webmonitor.history.ArchivedJson; import org.apache.flink.runtime.webmonitor.history.JsonArchivist; import org.apache.flink.runtime.webmonitor.utils.ArchivedJobGenerationUtils; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import org.junit.Assert; import org.junit.Test; import java.io.IOException; import java.util.Collection; import static org.mockito.Mockito.mock; /** * Tests for the SubtasksTimesHandler. */ public class SubtasksTimesHandlerTest { @Test public void testArchiver() throws Exception { JsonArchivist archivist = new SubtasksTimesHandler.SubtasksTimesJsonArchivist(); AccessExecutionGraph originalJob = ArchivedJobGenerationUtils.getTestJob(); AccessExecutionJobVertex originalTask = ArchivedJobGenerationUtils.getTestTask(); AccessExecution originalAttempt = ArchivedJobGenerationUtils.getTestAttempt(); Collection<ArchivedJson> archives = archivist.archiveJsonWithPath(originalJob); Assert.assertEquals(1, archives.size()); ArchivedJson archive = archives.iterator().next(); Assert.assertEquals("/jobs/" + originalJob.getJobID() + "/vertices/" + originalTask.getJobVertexId() + "/subtasktimes", archive.getPath()); compareSubtaskTimes(originalTask, originalAttempt, archive.getJson()); } @Test public void testGetPaths() { SubtasksTimesHandler handler = new SubtasksTimesHandler(mock(ExecutionGraphHolder.class)); String[] paths = handler.getPaths(); Assert.assertEquals(1, paths.length); Assert.assertEquals("/jobs/:jobid/vertices/:vertexid/subtasktimes", paths[0]); } @Test public void testJsonGeneration() throws Exception { AccessExecutionJobVertex originalTask = ArchivedJobGenerationUtils.getTestTask(); AccessExecution originalAttempt = ArchivedJobGenerationUtils.getTestAttempt(); String json = SubtasksTimesHandler.createSubtaskTimesJson(originalTask); compareSubtaskTimes(originalTask, originalAttempt, json); } private static void compareSubtaskTimes(AccessExecutionJobVertex originalTask, AccessExecution originalAttempt, String json) throws IOException { JsonNode result = ArchivedJobGenerationUtils.MAPPER.readTree(json); Assert.assertEquals(originalTask.getJobVertexId().toString(), result.get("id").asText()); Assert.assertEquals(originalTask.getName(), result.get("name").asText()); Assert.assertTrue(result.get("now").asLong() > 0L); ArrayNode subtasks = (ArrayNode) result.get("subtasks"); JsonNode subtask = subtasks.get(0); Assert.assertEquals(0, subtask.get("subtask").asInt()); Assert.assertEquals(originalAttempt.getAssignedResourceLocation().getHostname(), subtask.get("host").asText()); Assert.assertEquals(originalAttempt.getStateTimestamp(originalAttempt.getState()) - originalAttempt.getStateTimestamp(ExecutionState.SCHEDULED), subtask.get("duration").asLong()); JsonNode timestamps = subtask.get("timestamps"); Assert.assertEquals(originalAttempt.getStateTimestamp(ExecutionState.CREATED), timestamps.get(ExecutionState.CREATED.name()).asLong()); Assert.assertEquals(originalAttempt.getStateTimestamp(ExecutionState.SCHEDULED), timestamps.get(ExecutionState.SCHEDULED.name()).asLong()); Assert.assertEquals(originalAttempt.getStateTimestamp(ExecutionState.DEPLOYING), timestamps.get(ExecutionState.DEPLOYING.name()).asLong()); Assert.assertEquals(originalAttempt.getStateTimestamp(ExecutionState.RUNNING), timestamps.get(ExecutionState.RUNNING.name()).asLong()); Assert.assertEquals(originalAttempt.getStateTimestamp(ExecutionState.FINISHED), timestamps.get(ExecutionState.FINISHED.name()).asLong()); Assert.assertEquals(originalAttempt.getStateTimestamp(ExecutionState.CANCELING), timestamps.get(ExecutionState.CANCELING.name()).asLong()); Assert.assertEquals(originalAttempt.getStateTimestamp(ExecutionState.CANCELED), timestamps.get(ExecutionState.CANCELED.name()).asLong()); Assert.assertEquals(originalAttempt.getStateTimestamp(ExecutionState.FAILED), timestamps.get(ExecutionState.FAILED.name()).asLong()); } }
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-ec2/src/main/java/com/amazonaws/services/ec2/model/ReservedInstancesModification.java
22289
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.ec2.model; import java.io.Serializable; import javax.annotation.Generated; /** * <p> * Describes a Reserved Instance modification. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/ReservedInstancesModification" target="_top">AWS * API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ReservedInstancesModification implements Serializable, Cloneable { /** * <p> * A unique, case-sensitive key supplied by the client to ensure that the request is idempotent. For more * information, see <a * href="https://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html">Ensuring * Idempotency</a>. * </p> */ private String clientToken; /** * <p> * The time when the modification request was created. * </p> */ private java.util.Date createDate; /** * <p> * The time for the modification to become effective. * </p> */ private java.util.Date effectiveDate; /** * <p> * Contains target configurations along with their corresponding new Reserved Instance IDs. * </p> */ private com.amazonaws.internal.SdkInternalList<ReservedInstancesModificationResult> modificationResults; /** * <p> * The IDs of one or more Reserved Instances. * </p> */ private com.amazonaws.internal.SdkInternalList<ReservedInstancesId> reservedInstancesIds; /** * <p> * A unique ID for the Reserved Instance modification. * </p> */ private String reservedInstancesModificationId; /** * <p> * The status of the Reserved Instances modification request. * </p> */ private String status; /** * <p> * The reason for the status. * </p> */ private String statusMessage; /** * <p> * The time when the modification request was last updated. * </p> */ private java.util.Date updateDate; /** * <p> * A unique, case-sensitive key supplied by the client to ensure that the request is idempotent. For more * information, see <a * href="https://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html">Ensuring * Idempotency</a>. * </p> * * @param clientToken * A unique, case-sensitive key supplied by the client to ensure that the request is idempotent. For more * information, see <a * href="https://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html">Ensuring * Idempotency</a>. */ public void setClientToken(String clientToken) { this.clientToken = clientToken; } /** * <p> * A unique, case-sensitive key supplied by the client to ensure that the request is idempotent. For more * information, see <a * href="https://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html">Ensuring * Idempotency</a>. * </p> * * @return A unique, case-sensitive key supplied by the client to ensure that the request is idempotent. For more * information, see <a * href="https://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html">Ensuring * Idempotency</a>. */ public String getClientToken() { return this.clientToken; } /** * <p> * A unique, case-sensitive key supplied by the client to ensure that the request is idempotent. For more * information, see <a * href="https://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html">Ensuring * Idempotency</a>. * </p> * * @param clientToken * A unique, case-sensitive key supplied by the client to ensure that the request is idempotent. For more * information, see <a * href="https://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html">Ensuring * Idempotency</a>. * @return Returns a reference to this object so that method calls can be chained together. */ public ReservedInstancesModification withClientToken(String clientToken) { setClientToken(clientToken); return this; } /** * <p> * The time when the modification request was created. * </p> * * @param createDate * The time when the modification request was created. */ public void setCreateDate(java.util.Date createDate) { this.createDate = createDate; } /** * <p> * The time when the modification request was created. * </p> * * @return The time when the modification request was created. */ public java.util.Date getCreateDate() { return this.createDate; } /** * <p> * The time when the modification request was created. * </p> * * @param createDate * The time when the modification request was created. * @return Returns a reference to this object so that method calls can be chained together. */ public ReservedInstancesModification withCreateDate(java.util.Date createDate) { setCreateDate(createDate); return this; } /** * <p> * The time for the modification to become effective. * </p> * * @param effectiveDate * The time for the modification to become effective. */ public void setEffectiveDate(java.util.Date effectiveDate) { this.effectiveDate = effectiveDate; } /** * <p> * The time for the modification to become effective. * </p> * * @return The time for the modification to become effective. */ public java.util.Date getEffectiveDate() { return this.effectiveDate; } /** * <p> * The time for the modification to become effective. * </p> * * @param effectiveDate * The time for the modification to become effective. * @return Returns a reference to this object so that method calls can be chained together. */ public ReservedInstancesModification withEffectiveDate(java.util.Date effectiveDate) { setEffectiveDate(effectiveDate); return this; } /** * <p> * Contains target configurations along with their corresponding new Reserved Instance IDs. * </p> * * @return Contains target configurations along with their corresponding new Reserved Instance IDs. */ public java.util.List<ReservedInstancesModificationResult> getModificationResults() { if (modificationResults == null) { modificationResults = new com.amazonaws.internal.SdkInternalList<ReservedInstancesModificationResult>(); } return modificationResults; } /** * <p> * Contains target configurations along with their corresponding new Reserved Instance IDs. * </p> * * @param modificationResults * Contains target configurations along with their corresponding new Reserved Instance IDs. */ public void setModificationResults(java.util.Collection<ReservedInstancesModificationResult> modificationResults) { if (modificationResults == null) { this.modificationResults = null; return; } this.modificationResults = new com.amazonaws.internal.SdkInternalList<ReservedInstancesModificationResult>(modificationResults); } /** * <p> * Contains target configurations along with their corresponding new Reserved Instance IDs. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setModificationResults(java.util.Collection)} or {@link #withModificationResults(java.util.Collection)} * if you want to override the existing values. * </p> * * @param modificationResults * Contains target configurations along with their corresponding new Reserved Instance IDs. * @return Returns a reference to this object so that method calls can be chained together. */ public ReservedInstancesModification withModificationResults(ReservedInstancesModificationResult... modificationResults) { if (this.modificationResults == null) { setModificationResults(new com.amazonaws.internal.SdkInternalList<ReservedInstancesModificationResult>(modificationResults.length)); } for (ReservedInstancesModificationResult ele : modificationResults) { this.modificationResults.add(ele); } return this; } /** * <p> * Contains target configurations along with their corresponding new Reserved Instance IDs. * </p> * * @param modificationResults * Contains target configurations along with their corresponding new Reserved Instance IDs. * @return Returns a reference to this object so that method calls can be chained together. */ public ReservedInstancesModification withModificationResults(java.util.Collection<ReservedInstancesModificationResult> modificationResults) { setModificationResults(modificationResults); return this; } /** * <p> * The IDs of one or more Reserved Instances. * </p> * * @return The IDs of one or more Reserved Instances. */ public java.util.List<ReservedInstancesId> getReservedInstancesIds() { if (reservedInstancesIds == null) { reservedInstancesIds = new com.amazonaws.internal.SdkInternalList<ReservedInstancesId>(); } return reservedInstancesIds; } /** * <p> * The IDs of one or more Reserved Instances. * </p> * * @param reservedInstancesIds * The IDs of one or more Reserved Instances. */ public void setReservedInstancesIds(java.util.Collection<ReservedInstancesId> reservedInstancesIds) { if (reservedInstancesIds == null) { this.reservedInstancesIds = null; return; } this.reservedInstancesIds = new com.amazonaws.internal.SdkInternalList<ReservedInstancesId>(reservedInstancesIds); } /** * <p> * The IDs of one or more Reserved Instances. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setReservedInstancesIds(java.util.Collection)} or {@link #withReservedInstancesIds(java.util.Collection)} * if you want to override the existing values. * </p> * * @param reservedInstancesIds * The IDs of one or more Reserved Instances. * @return Returns a reference to this object so that method calls can be chained together. */ public ReservedInstancesModification withReservedInstancesIds(ReservedInstancesId... reservedInstancesIds) { if (this.reservedInstancesIds == null) { setReservedInstancesIds(new com.amazonaws.internal.SdkInternalList<ReservedInstancesId>(reservedInstancesIds.length)); } for (ReservedInstancesId ele : reservedInstancesIds) { this.reservedInstancesIds.add(ele); } return this; } /** * <p> * The IDs of one or more Reserved Instances. * </p> * * @param reservedInstancesIds * The IDs of one or more Reserved Instances. * @return Returns a reference to this object so that method calls can be chained together. */ public ReservedInstancesModification withReservedInstancesIds(java.util.Collection<ReservedInstancesId> reservedInstancesIds) { setReservedInstancesIds(reservedInstancesIds); return this; } /** * <p> * A unique ID for the Reserved Instance modification. * </p> * * @param reservedInstancesModificationId * A unique ID for the Reserved Instance modification. */ public void setReservedInstancesModificationId(String reservedInstancesModificationId) { this.reservedInstancesModificationId = reservedInstancesModificationId; } /** * <p> * A unique ID for the Reserved Instance modification. * </p> * * @return A unique ID for the Reserved Instance modification. */ public String getReservedInstancesModificationId() { return this.reservedInstancesModificationId; } /** * <p> * A unique ID for the Reserved Instance modification. * </p> * * @param reservedInstancesModificationId * A unique ID for the Reserved Instance modification. * @return Returns a reference to this object so that method calls can be chained together. */ public ReservedInstancesModification withReservedInstancesModificationId(String reservedInstancesModificationId) { setReservedInstancesModificationId(reservedInstancesModificationId); return this; } /** * <p> * The status of the Reserved Instances modification request. * </p> * * @param status * The status of the Reserved Instances modification request. */ public void setStatus(String status) { this.status = status; } /** * <p> * The status of the Reserved Instances modification request. * </p> * * @return The status of the Reserved Instances modification request. */ public String getStatus() { return this.status; } /** * <p> * The status of the Reserved Instances modification request. * </p> * * @param status * The status of the Reserved Instances modification request. * @return Returns a reference to this object so that method calls can be chained together. */ public ReservedInstancesModification withStatus(String status) { setStatus(status); return this; } /** * <p> * The reason for the status. * </p> * * @param statusMessage * The reason for the status. */ public void setStatusMessage(String statusMessage) { this.statusMessage = statusMessage; } /** * <p> * The reason for the status. * </p> * * @return The reason for the status. */ public String getStatusMessage() { return this.statusMessage; } /** * <p> * The reason for the status. * </p> * * @param statusMessage * The reason for the status. * @return Returns a reference to this object so that method calls can be chained together. */ public ReservedInstancesModification withStatusMessage(String statusMessage) { setStatusMessage(statusMessage); return this; } /** * <p> * The time when the modification request was last updated. * </p> * * @param updateDate * The time when the modification request was last updated. */ public void setUpdateDate(java.util.Date updateDate) { this.updateDate = updateDate; } /** * <p> * The time when the modification request was last updated. * </p> * * @return The time when the modification request was last updated. */ public java.util.Date getUpdateDate() { return this.updateDate; } /** * <p> * The time when the modification request was last updated. * </p> * * @param updateDate * The time when the modification request was last updated. * @return Returns a reference to this object so that method calls can be chained together. */ public ReservedInstancesModification withUpdateDate(java.util.Date updateDate) { setUpdateDate(updateDate); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getClientToken() != null) sb.append("ClientToken: ").append(getClientToken()).append(","); if (getCreateDate() != null) sb.append("CreateDate: ").append(getCreateDate()).append(","); if (getEffectiveDate() != null) sb.append("EffectiveDate: ").append(getEffectiveDate()).append(","); if (getModificationResults() != null) sb.append("ModificationResults: ").append(getModificationResults()).append(","); if (getReservedInstancesIds() != null) sb.append("ReservedInstancesIds: ").append(getReservedInstancesIds()).append(","); if (getReservedInstancesModificationId() != null) sb.append("ReservedInstancesModificationId: ").append(getReservedInstancesModificationId()).append(","); if (getStatus() != null) sb.append("Status: ").append(getStatus()).append(","); if (getStatusMessage() != null) sb.append("StatusMessage: ").append(getStatusMessage()).append(","); if (getUpdateDate() != null) sb.append("UpdateDate: ").append(getUpdateDate()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ReservedInstancesModification == false) return false; ReservedInstancesModification other = (ReservedInstancesModification) obj; if (other.getClientToken() == null ^ this.getClientToken() == null) return false; if (other.getClientToken() != null && other.getClientToken().equals(this.getClientToken()) == false) return false; if (other.getCreateDate() == null ^ this.getCreateDate() == null) return false; if (other.getCreateDate() != null && other.getCreateDate().equals(this.getCreateDate()) == false) return false; if (other.getEffectiveDate() == null ^ this.getEffectiveDate() == null) return false; if (other.getEffectiveDate() != null && other.getEffectiveDate().equals(this.getEffectiveDate()) == false) return false; if (other.getModificationResults() == null ^ this.getModificationResults() == null) return false; if (other.getModificationResults() != null && other.getModificationResults().equals(this.getModificationResults()) == false) return false; if (other.getReservedInstancesIds() == null ^ this.getReservedInstancesIds() == null) return false; if (other.getReservedInstancesIds() != null && other.getReservedInstancesIds().equals(this.getReservedInstancesIds()) == false) return false; if (other.getReservedInstancesModificationId() == null ^ this.getReservedInstancesModificationId() == null) return false; if (other.getReservedInstancesModificationId() != null && other.getReservedInstancesModificationId().equals(this.getReservedInstancesModificationId()) == false) return false; if (other.getStatus() == null ^ this.getStatus() == null) return false; if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false) return false; if (other.getStatusMessage() == null ^ this.getStatusMessage() == null) return false; if (other.getStatusMessage() != null && other.getStatusMessage().equals(this.getStatusMessage()) == false) return false; if (other.getUpdateDate() == null ^ this.getUpdateDate() == null) return false; if (other.getUpdateDate() != null && other.getUpdateDate().equals(this.getUpdateDate()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getClientToken() == null) ? 0 : getClientToken().hashCode()); hashCode = prime * hashCode + ((getCreateDate() == null) ? 0 : getCreateDate().hashCode()); hashCode = prime * hashCode + ((getEffectiveDate() == null) ? 0 : getEffectiveDate().hashCode()); hashCode = prime * hashCode + ((getModificationResults() == null) ? 0 : getModificationResults().hashCode()); hashCode = prime * hashCode + ((getReservedInstancesIds() == null) ? 0 : getReservedInstancesIds().hashCode()); hashCode = prime * hashCode + ((getReservedInstancesModificationId() == null) ? 0 : getReservedInstancesModificationId().hashCode()); hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode()); hashCode = prime * hashCode + ((getStatusMessage() == null) ? 0 : getStatusMessage().hashCode()); hashCode = prime * hashCode + ((getUpdateDate() == null) ? 0 : getUpdateDate().hashCode()); return hashCode; } @Override public ReservedInstancesModification clone() { try { return (ReservedInstancesModification) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-cloudsearch/src/main/java/com/amazonaws/services/cloudsearchv2/model/DescribeIndexFieldsRequest.java
10220
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.cloudsearchv2.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * <p> * Container for the parameters to the <code><a>DescribeIndexFields</a></code> operation. Specifies the name of the * domain you want to describe. To restrict the response to particular index fields, specify the names of the index * fields you want to describe. To show the active configuration and exclude any pending changes, set the * <code>Deployed</code> option to <code>true</code>. * </p> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DescribeIndexFieldsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The name of the domain you want to describe. * </p> */ private String domainName; /** * <p> * A list of the index fields you want to describe. If not specified, information is returned for all configured * index fields. * </p> */ private com.amazonaws.internal.SdkInternalList<String> fieldNames; /** * <p> * Whether to display the deployed configuration (<code>true</code>) or include any pending changes ( * <code>false</code>). Defaults to <code>false</code>. * </p> */ private Boolean deployed; /** * <p> * The name of the domain you want to describe. * </p> * * @param domainName * The name of the domain you want to describe. */ public void setDomainName(String domainName) { this.domainName = domainName; } /** * <p> * The name of the domain you want to describe. * </p> * * @return The name of the domain you want to describe. */ public String getDomainName() { return this.domainName; } /** * <p> * The name of the domain you want to describe. * </p> * * @param domainName * The name of the domain you want to describe. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeIndexFieldsRequest withDomainName(String domainName) { setDomainName(domainName); return this; } /** * <p> * A list of the index fields you want to describe. If not specified, information is returned for all configured * index fields. * </p> * * @return A list of the index fields you want to describe. If not specified, information is returned for all * configured index fields. */ public java.util.List<String> getFieldNames() { if (fieldNames == null) { fieldNames = new com.amazonaws.internal.SdkInternalList<String>(); } return fieldNames; } /** * <p> * A list of the index fields you want to describe. If not specified, information is returned for all configured * index fields. * </p> * * @param fieldNames * A list of the index fields you want to describe. If not specified, information is returned for all * configured index fields. */ public void setFieldNames(java.util.Collection<String> fieldNames) { if (fieldNames == null) { this.fieldNames = null; return; } this.fieldNames = new com.amazonaws.internal.SdkInternalList<String>(fieldNames); } /** * <p> * A list of the index fields you want to describe. If not specified, information is returned for all configured * index fields. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setFieldNames(java.util.Collection)} or {@link #withFieldNames(java.util.Collection)} if you want to * override the existing values. * </p> * * @param fieldNames * A list of the index fields you want to describe. If not specified, information is returned for all * configured index fields. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeIndexFieldsRequest withFieldNames(String... fieldNames) { if (this.fieldNames == null) { setFieldNames(new com.amazonaws.internal.SdkInternalList<String>(fieldNames.length)); } for (String ele : fieldNames) { this.fieldNames.add(ele); } return this; } /** * <p> * A list of the index fields you want to describe. If not specified, information is returned for all configured * index fields. * </p> * * @param fieldNames * A list of the index fields you want to describe. If not specified, information is returned for all * configured index fields. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeIndexFieldsRequest withFieldNames(java.util.Collection<String> fieldNames) { setFieldNames(fieldNames); return this; } /** * <p> * Whether to display the deployed configuration (<code>true</code>) or include any pending changes ( * <code>false</code>). Defaults to <code>false</code>. * </p> * * @param deployed * Whether to display the deployed configuration (<code>true</code>) or include any pending changes ( * <code>false</code>). Defaults to <code>false</code>. */ public void setDeployed(Boolean deployed) { this.deployed = deployed; } /** * <p> * Whether to display the deployed configuration (<code>true</code>) or include any pending changes ( * <code>false</code>). Defaults to <code>false</code>. * </p> * * @return Whether to display the deployed configuration (<code>true</code>) or include any pending changes ( * <code>false</code>). Defaults to <code>false</code>. */ public Boolean getDeployed() { return this.deployed; } /** * <p> * Whether to display the deployed configuration (<code>true</code>) or include any pending changes ( * <code>false</code>). Defaults to <code>false</code>. * </p> * * @param deployed * Whether to display the deployed configuration (<code>true</code>) or include any pending changes ( * <code>false</code>). Defaults to <code>false</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeIndexFieldsRequest withDeployed(Boolean deployed) { setDeployed(deployed); return this; } /** * <p> * Whether to display the deployed configuration (<code>true</code>) or include any pending changes ( * <code>false</code>). Defaults to <code>false</code>. * </p> * * @return Whether to display the deployed configuration (<code>true</code>) or include any pending changes ( * <code>false</code>). Defaults to <code>false</code>. */ public Boolean isDeployed() { return this.deployed; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getDomainName() != null) sb.append("DomainName: ").append(getDomainName()).append(","); if (getFieldNames() != null) sb.append("FieldNames: ").append(getFieldNames()).append(","); if (getDeployed() != null) sb.append("Deployed: ").append(getDeployed()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DescribeIndexFieldsRequest == false) return false; DescribeIndexFieldsRequest other = (DescribeIndexFieldsRequest) obj; if (other.getDomainName() == null ^ this.getDomainName() == null) return false; if (other.getDomainName() != null && other.getDomainName().equals(this.getDomainName()) == false) return false; if (other.getFieldNames() == null ^ this.getFieldNames() == null) return false; if (other.getFieldNames() != null && other.getFieldNames().equals(this.getFieldNames()) == false) return false; if (other.getDeployed() == null ^ this.getDeployed() == null) return false; if (other.getDeployed() != null && other.getDeployed().equals(this.getDeployed()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getDomainName() == null) ? 0 : getDomainName().hashCode()); hashCode = prime * hashCode + ((getFieldNames() == null) ? 0 : getFieldNames().hashCode()); hashCode = prime * hashCode + ((getDeployed() == null) ? 0 : getDeployed().hashCode()); return hashCode; } @Override public DescribeIndexFieldsRequest clone() { return (DescribeIndexFieldsRequest) super.clone(); } }
apache-2.0
ganyao114/SwiftAndroid
eventposter/src/main/java/net/swiftos/eventposter/presenter/Presenter.java
3234
package net.swiftos.eventposter.presenter; import android.os.Bundle; import net.swiftos.eventposter.core.EventPoster; import net.swiftos.eventposter.utils.LOG; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; /** * Created by gy939 on 2016/9/26. */ public abstract class Presenter implements IPresenter{ private static Map<Class<? extends Presenter>,Presenter> presenterMap = new ConcurrentHashMap<>(); private Map<Class<? extends Presenter>,Presenter> childs = new ConcurrentHashMap<>(); private static Presenter rootPresenter; private Presenter parent; public static void establish(){ rootPresenter = new RootPresenter(); rootPresenter.onPresenterInit(rootPresenter); presenterMap.put(rootPresenter.getClass(),rootPresenter); } public static ExtCall With(Presenter presenter){ if (presenter == null) return new ExtCall(rootPresenter); return new ExtCall(presenter); } public static <T extends Presenter> T find(Class<T> presenterType){ T presenter = null; if (presenterMap.containsKey(presenterType)) presenter = (T) presenterMap.get(presenterType); return presenter; } protected void init(Presenter presenter){ onPresenterInit(presenter); presenterMap.put(getClass(),this); parent = presenter; } @Override public void onPost(IPresenter context, Bundle data) { } public void destory(Presenter presenter){ presenterMap.remove(getClass()).onPresenterDestory(presenter); if (parent != null) parent.notifyChildDestory(this); for (Map.Entry<Class<? extends Presenter>,Presenter> entry:childs.entrySet()){ entry.getValue().destory(this); } } @Override public void onPresenterInit(IPresenter context) { EventPoster.register(this); } @Override public void onPresenterDestory(IPresenter context) { EventPoster.unRegister(this); } public void notifyChildDestory(Presenter presenter){ childs.remove(presenter.getClass()); } public static class ExtCall{ private Presenter presenter; public ExtCall(Presenter presenter) { this.presenter = presenter; } public void sendPost(Class<? extends Presenter> tarPresenter,Bundle data){ Presenter tar = Presenter.find(tarPresenter); if (tar != null){ tar.onPost(presenter,data); } } public <T extends Presenter> T start(Class<T> presenterType){ T presenter = null; try { presenter = presenterType.newInstance(); } catch (Exception e) { LOG.e("presenter" + presenterType.getSimpleName() + "init error"); System.exit(1); return null; } if (presenter == null) return null; Presenter op = presenterMap.get(presenterType); if (op != null){ op.destory(this.presenter); } presenter.init(this.presenter); return presenter; } } }
apache-2.0
lolay/citygrid-java-old
src/test/java/com/lolay/citygrid/content/places/detail/DetailIntegration.java
2549
/* * Licensed to Lolay, Inc. under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Lolay, Inc. licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://github.com/lolay/citygrid/raw/master/LICENSE * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.lolay.citygrid.content.places.detail; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.lolay.citygrid.ClientFactory; import com.lolay.citygrid.InvokerException; import com.lolay.citygrid.content.places.detail.DetailAddress; import com.lolay.citygrid.content.places.detail.DetailClient; import com.lolay.citygrid.content.places.detail.DetailInvoker; import com.lolay.citygrid.content.places.detail.DetailLocation; import com.lolay.citygrid.content.places.detail.DetailResults; import com.lolay.citygrid.content.places.search.SearchIntegration; import junit.framework.TestCase; public class DetailIntegration extends TestCase { private static final Log testDetailLog = LogFactory.getLog(SearchIntegration.class.getName() + ".testDetail"); private static final String baseUrl = "http://api.citygridmedia.com"; public void testDetail() throws Exception { Log log = testDetailLog; log.trace("ENTER"); DetailClient client = new ClientFactory(baseUrl).getDetail(); DetailInvoker invoker = DetailInvoker.builder().listingId(10100230).clientIp("127.0.0.1") .publisher("test").placement("search_page").build(); DetailResults results = null; try { long start = System.currentTimeMillis(); results = invoker.profile(client); long end = System.currentTimeMillis(); log.trace(String.format("Get detail took %s ms", end - start)); } catch (InvokerException e) { log.error(e.getErrorCodes(), e); fail(); } assertNotNull(results); DetailLocation location = results.getLocation(); assertNotNull(location); DetailAddress address = location.getAddress(); assertNotNull(address); assertNotNull(address.getPostalCode()); // TODO add validation } }
apache-2.0
slipperyseal/atomicobjects
atomicobjects-sql/src/main/java/net/catchpole/sql/data/Row.java
2744
package net.catchpole.sql.data; // Copyright 2014 catchpole.net // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. import java.io.Serializable; import java.sql.ResultSet; import java.sql.SQLException; import java.util.Iterator; /** */ public class Row implements Iterable, Serializable { private boolean changed = false; private Object[] data; public Row(ResultSet resultSet, int width) throws SQLException { this.data = new Object[width]; for (int x = 0; x < width; x++) { data[x] = resultSet.getObject(x + 1); } } public Row(int width) throws SQLException { data = new Object[width]; } public Row(Object[] array) throws SQLException { this(array.length); System.arraycopy(array, 0, data, 0, array.length); } public int getWidth() { return data.length; } public Object get(int index) { return data[index]; } public void set(int index, Object object) { Object original = data[index]; if ((original == null && object != null) || (original != null && object == null) || (original != null && object != null && !original.equals(object))) { changed = true; } data[index] = object; } public void clearChanged() { this.changed = false; } public boolean hasChanged() { return changed; } public Iterator iterator() { return new RowIterator(); } public String toString() { StringBuilder sb = new StringBuilder(80); boolean first = true; for (Object value : data) { if (first) { first = false; } else { sb.append(','); sb.append(' '); } sb.append(value); } return sb.toString(); } class RowIterator implements Iterator { private int pos; public boolean hasNext() { return pos < data.length; } public Object next() { return data[pos++]; } public void remove() { throw new UnsupportedOperationException(); } } }
apache-2.0
google/or-tools
ortools/sat/samples/RankingSampleSat.java
7360
// Copyright 2010-2021 Google LLC // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.ortools.sat.samples; import com.google.ortools.Loader; import com.google.ortools.sat.CpModel; import com.google.ortools.sat.CpSolver; import com.google.ortools.sat.CpSolverStatus; import com.google.ortools.sat.IntVar; import com.google.ortools.sat.IntervalVar; import com.google.ortools.sat.LinearExpr; import com.google.ortools.sat.Literal; import java.util.ArrayList; import java.util.List; /** Code sample to demonstrates how to rank intervals. */ public class RankingSampleSat { /** * This code takes a list of interval variables in a noOverlap constraint, and a parallel list of * integer variables and enforces the following constraint * <ul> * <li>rank[i] == -1 iff interval[i] is not active. * <li>rank[i] == number of active intervals that precede interval[i]. * </ul> */ static void rankTasks(CpModel model, IntVar[] starts, Literal[] presences, IntVar[] ranks) { int numTasks = starts.length; // Creates precedence variables between pairs of intervals. Literal[][] precedences = new Literal[numTasks][numTasks]; for (int i = 0; i < numTasks; ++i) { for (int j = 0; j < numTasks; ++j) { if (i == j) { precedences[i][i] = presences[i]; } else { IntVar prec = model.newBoolVar(String.format("%d before %d", i, j)); precedences[i][j] = prec; // Ensure that task i precedes task j if prec is true. model.addLessOrEqualWithOffset(starts[i], starts[j], 1).onlyEnforceIf(prec); } } } // Create optional intervals. for (int i = 0; i < numTasks - 1; ++i) { for (int j = i + 1; j < numTasks; ++j) { List<Literal> list = new ArrayList<>(); list.add(precedences[i][j]); list.add(precedences[j][i]); list.add(presences[i].not()); // Makes sure that if i is not performed, all precedences are false. model.addImplication(presences[i].not(), precedences[i][j].not()); model.addImplication(presences[i].not(), precedences[j][i].not()); list.add(presences[j].not()); // Makes sure that if j is not performed, all precedences are false. model.addImplication(presences[j].not(), precedences[i][j].not()); model.addImplication(presences[j].not(), precedences[j][i].not()); // The following boolOr will enforce that for any two intervals: // i precedes j or j precedes i or at least one interval is not // performed. model.addBoolOr(list.toArray(new Literal[0])); // For efficiency, we add a redundant constraint declaring that only one of i precedes j and // j precedes i are true. This will speed up the solve because the reason of this // propagation is shorter that using interval bounds is true. model.addImplication(precedences[i][j], precedences[j][i].not()); model.addImplication(precedences[j][i], precedences[i][j].not()); } } // Links precedences and ranks. for (int i = 0; i < numTasks; ++i) { IntVar[] vars = new IntVar[numTasks + 1]; int[] coefs = new int[numTasks + 1]; for (int j = 0; j < numTasks; ++j) { vars[j] = (IntVar) precedences[j][i]; coefs[j] = 1; } vars[numTasks] = ranks[i]; coefs[numTasks] = -1; // ranks == sum(precedences) - 1; model.addEquality(LinearExpr.scalProd(vars, coefs), 1); } } public static void main(String[] args) throws Exception { Loader.loadNativeLibraries(); CpModel model = new CpModel(); int horizon = 100; int numTasks = 4; IntVar[] starts = new IntVar[numTasks]; IntVar[] ends = new IntVar[numTasks]; IntervalVar[] intervals = new IntervalVar[numTasks]; Literal[] presences = new Literal[numTasks]; IntVar[] ranks = new IntVar[numTasks]; IntVar trueVar = model.newConstant(1); // Creates intervals, half of them are optional. for (int t = 0; t < numTasks; ++t) { starts[t] = model.newIntVar(0, horizon, "start_" + t); int duration = t + 1; ends[t] = model.newIntVar(0, horizon, "end_" + t); if (t < numTasks / 2) { intervals[t] = model.newIntervalVar( starts[t], LinearExpr.constant(duration), ends[t], "interval_" + t); presences[t] = trueVar; } else { presences[t] = model.newBoolVar("presence_" + t); intervals[t] = model.newOptionalIntervalVar( starts[t], LinearExpr.constant(duration), ends[t], presences[t], "o_interval_" + t); } // The rank will be -1 iff the task is not performed. ranks[t] = model.newIntVar(-1, numTasks - 1, "rank_" + t); } // Adds NoOverlap constraint. model.addNoOverlap(intervals); // Adds ranking constraint. rankTasks(model, starts, presences, ranks); // Adds a constraint on ranks (ranks[0] < ranks[1]). model.addLessOrEqualWithOffset(ranks[0], ranks[1], 1); // Creates makespan variable. IntVar makespan = model.newIntVar(0, horizon, "makespan"); for (int t = 0; t < numTasks; ++t) { model.addLessOrEqual(ends[t], makespan).onlyEnforceIf(presences[t]); } // The objective function is a mix of a fixed gain per task performed, and a fixed cost for each // additional day of activity. // The solver will balance both cost and gain and minimize makespan * per-day-penalty - number // of tasks performed * per-task-gain. // // On this problem, as the fixed cost is less that the duration of the last interval, the solver // will not perform the last interval. IntVar[] objectiveVars = new IntVar[numTasks + 1]; int[] objectiveCoefs = new int[numTasks + 1]; for (int t = 0; t < numTasks; ++t) { objectiveVars[t] = (IntVar) presences[t]; objectiveCoefs[t] = -7; } objectiveVars[numTasks] = makespan; objectiveCoefs[numTasks] = 2; model.minimize(LinearExpr.scalProd(objectiveVars, objectiveCoefs)); // Creates a solver and solves the model. CpSolver solver = new CpSolver(); CpSolverStatus status = solver.solve(model); if (status == CpSolverStatus.OPTIMAL) { System.out.println("Optimal cost: " + solver.objectiveValue()); System.out.println("Makespan: " + solver.value(makespan)); for (int t = 0; t < numTasks; ++t) { if (solver.booleanValue(presences[t])) { System.out.printf("Task %d starts at %d with rank %d%n", t, solver.value(starts[t]), solver.value(ranks[t])); } else { System.out.printf( "Task %d in not performed and ranked at %d%n", t, solver.value(ranks[t])); } } } else { System.out.println("Solver exited with nonoptimal status: " + status); } } }
apache-2.0
BartoszJarocki/boilerpipe-android
src/main/java/mf/org/apache/xerces/dom/NodeImpl.java
75610
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package mf.org.apache.xerces.dom; import java.io.IOException; import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.Hashtable; import mf.org.w3c.dom.DOMException; import mf.org.w3c.dom.Document; import mf.org.w3c.dom.DocumentType; import mf.org.w3c.dom.NamedNodeMap; import mf.org.w3c.dom.Node; import mf.org.w3c.dom.NodeList; import mf.org.w3c.dom.UserDataHandler; import mf.org.w3c.dom.events.Event; import mf.org.w3c.dom.events.EventListener; import mf.org.w3c.dom.events.EventTarget; /** * NodeImpl provides the basic structure of a DOM tree. It is never used * directly, but instead is subclassed to add type and data * information, and additional methods, appropriate to each node of * the tree. Only its subclasses should be instantiated -- and those, * with the exception of Document itself, only through a specific * Document's factory methods. * <P> * The Node interface provides shared behaviors such as siblings and * children, both for consistancy and so that the most common tree * operations may be performed without constantly having to downcast * to specific node types. When there is no obvious mapping for one of * these queries, it will respond with null. * Note that the default behavior is that children are forbidden. To * permit them, the subclass ParentNode overrides several methods. * <P> * NodeImpl also implements NodeList, so it can return itself in * response to the getChildNodes() query. This eliminiates the need * for a separate ChildNodeList object. Note that this is an * IMPLEMENTATION DETAIL; applications should _never_ assume that * this identity exists. * <P> * All nodes in a single document must originate * in that document. (Note that this is much tighter than "must be * same implementation") Nodes are all aware of their ownerDocument, * and attempts to mismatch will throw WRONG_DOCUMENT_ERR. * <P> * However, to save memory not all nodes always have a direct reference * to their ownerDocument. When a node is owned by another node it relies * on its owner to store its ownerDocument. Parent nodes always store it * though, so there is never more than one level of indirection. * And when a node doesn't have an owner, ownerNode refers to its * ownerDocument. * <p> * This class doesn't directly support mutation events, however, it still * implements the EventTarget interface and forward all related calls to the * document so that the document class do so. * * @xerces.internal * * @author Arnaud Le Hors, IBM * @author Joe Kesselman, IBM * @version $Id: NodeImpl.java 814769 2009-09-14 18:25:54Z mrglavas $ * @since PR-DOM-Level-1-19980818. */ public abstract class NodeImpl implements Node, NodeList, EventTarget, Cloneable, Serializable{ // // Constants // // TreePosition Constants. // Taken from DOM L3 Node interface. /** * The node precedes the reference node. */ public static final short TREE_POSITION_PRECEDING = 0x01; /** * The node follows the reference node. */ public static final short TREE_POSITION_FOLLOWING = 0x02; /** * The node is an ancestor of the reference node. */ public static final short TREE_POSITION_ANCESTOR = 0x04; /** * The node is a descendant of the reference node. */ public static final short TREE_POSITION_DESCENDANT = 0x08; /** * The two nodes have an equivalent position. This is the case of two * attributes that have the same <code>ownerElement</code>, and two * nodes that are the same. */ public static final short TREE_POSITION_EQUIVALENT = 0x10; /** * The two nodes are the same. Two nodes that are the same have an * equivalent position, though the reverse may not be true. */ public static final short TREE_POSITION_SAME_NODE = 0x20; /** * The two nodes are disconnected, they do not have any common ancestor. * This is the case of two nodes that are not in the same document. */ public static final short TREE_POSITION_DISCONNECTED = 0x00; // DocumentPosition public static final short DOCUMENT_POSITION_DISCONNECTED = 0x01; public static final short DOCUMENT_POSITION_PRECEDING = 0x02; public static final short DOCUMENT_POSITION_FOLLOWING = 0x04; public static final short DOCUMENT_POSITION_CONTAINS = 0x08; public static final short DOCUMENT_POSITION_IS_CONTAINED = 0x10; public static final short DOCUMENT_POSITION_IMPLEMENTATION_SPECIFIC = 0x20; /** Serialization version. */ static final long serialVersionUID = -6316591992167219696L; // public /** Element definition node type. */ public static final short ELEMENT_DEFINITION_NODE = 21; // // Data // // links protected NodeImpl ownerNode; // typically the parent but not always! // data protected short flags; protected final static short READONLY = 0x1<<0; protected final static short SYNCDATA = 0x1<<1; protected final static short SYNCCHILDREN = 0x1<<2; protected final static short OWNED = 0x1<<3; protected final static short FIRSTCHILD = 0x1<<4; protected final static short SPECIFIED = 0x1<<5; protected final static short IGNORABLEWS = 0x1<<6; protected final static short HASSTRING = 0x1<<7; protected final static short NORMALIZED = 0x1<<8; protected final static short ID = 0x1<<9; // // Constructors // /** * No public constructor; only subclasses of Node should be * instantiated, and those normally via a Document's factory methods * <p> * Every Node knows what Document it belongs to. */ protected NodeImpl(CoreDocumentImpl ownerDocument) { // as long as we do not have any owner, ownerNode is our ownerDocument ownerNode = ownerDocument; } // <init>(CoreDocumentImpl) /** Constructor for serialization. */ public NodeImpl() {} // // Node methods // /** * A short integer indicating what type of node this is. The named * constants for this value are defined in the org.w3c.dom.Node interface. */ public abstract short getNodeType(); /** * the name of this node. */ public abstract String getNodeName(); /** * Returns the node value. * @throws DOMException(DOMSTRING_SIZE_ERR) */ public String getNodeValue() throws DOMException { return null; // overridden in some subclasses } /** * Sets the node value. * @throws DOMException(NO_MODIFICATION_ALLOWED_ERR) */ public void setNodeValue(String x) throws DOMException { // Default behavior is to do nothing, overridden in some subclasses } /** * Adds a child node to the end of the list of children for this node. * Convenience shorthand for insertBefore(newChild,null). * @see #insertBefore(Node, Node) * <P> * By default we do not accept any children, ParentNode overrides this. * @see ParentNode * * @return newChild, in its new state (relocated, or emptied in the case of * DocumentNode.) * * @throws DOMException(HIERARCHY_REQUEST_ERR) if newChild is of a * type that shouldn't be a child of this node. * * @throws DOMException(WRONG_DOCUMENT_ERR) if newChild has a * different owner document than we do. * * @throws DOMException(NO_MODIFICATION_ALLOWED_ERR) if this node is * read-only. */ public Node appendChild(Node newChild) throws DOMException { return insertBefore(newChild, null); } /** * Returns a duplicate of a given node. You can consider this a * generic "copy constructor" for nodes. The newly returned object should * be completely independent of the source object's subtree, so changes * in one after the clone has been made will not affect the other. * <P> * Note: since we never have any children deep is meaningless here, * ParentNode overrides this behavior. * @see ParentNode * * <p> * Example: Cloning a Text node will copy both the node and the text it * contains. * <p> * Example: Cloning something that has children -- Element or Attr, for * example -- will _not_ clone those children unless a "deep clone" * has been requested. A shallow clone of an Attr node will yield an * empty Attr of the same name. * <p> * NOTE: Clones will always be read/write, even if the node being cloned * is read-only, to permit applications using only the DOM API to obtain * editable copies of locked portions of the tree. */ public Node cloneNode(boolean deep) { if (needsSyncData()) { synchronizeData(); } NodeImpl newnode; try { newnode = (NodeImpl)clone(); } catch (CloneNotSupportedException e) { // if we get here we have an error in our program we may as well // be vocal about it, so that people can take appropriate action. throw new RuntimeException("**Internal Error**" + e); } // Need to break the association w/ original kids newnode.ownerNode = ownerDocument(); newnode.isOwned(false); // By default we make all clones readwrite, // this is overriden in readonly subclasses newnode.isReadOnly(false); ownerDocument().callUserDataHandlers(this, newnode, UserDataHandler.NODE_CLONED); return newnode; } // cloneNode(boolean):Node /** * Find the Document that this Node belongs to (the document in * whose context the Node was created). The Node may or may not * currently be part of that Document's actual contents. */ public Document getOwnerDocument() { // if we have an owner simply forward the request // otherwise ownerNode is our ownerDocument if (isOwned()) { return ownerNode.ownerDocument(); } else { return (Document) ownerNode; } } /** * same as above but returns internal type and this one is not overridden * by CoreDocumentImpl to return null */ CoreDocumentImpl ownerDocument() { // if we have an owner simply forward the request // otherwise ownerNode is our ownerDocument if (isOwned()) { return ownerNode.ownerDocument(); } else { return (CoreDocumentImpl) ownerNode; } } /** * NON-DOM * set the ownerDocument of this node */ protected void setOwnerDocument(CoreDocumentImpl doc) { if (needsSyncData()) { synchronizeData(); } // if we have an owner we rely on it to have it right // otherwise ownerNode is our ownerDocument if (!isOwned()) { ownerNode = doc; } } /** * Returns the node number */ protected int getNodeNumber() { int nodeNumber; CoreDocumentImpl cd = (CoreDocumentImpl)(this.getOwnerDocument()); nodeNumber = cd.getNodeNumber(this); return nodeNumber; } /** * Obtain the DOM-tree parent of this node, or null if it is not * currently active in the DOM tree (perhaps because it has just been * created or removed). Note that Document, DocumentFragment, and * Attribute will never have parents. */ public Node getParentNode() { return null; // overriden by ChildNode } /* * same as above but returns internal type */ NodeImpl parentNode() { return null; } /** The next child of this node's parent, or null if none */ public Node getNextSibling() { return null; // default behavior, overriden in ChildNode } /** The previous child of this node's parent, or null if none */ public Node getPreviousSibling() { return null; // default behavior, overriden in ChildNode } ChildNode previousSibling() { return null; // default behavior, overriden in ChildNode } /** * Return the collection of attributes associated with this node, * or null if none. At this writing, Element is the only type of node * which will ever have attributes. * * @see ElementImpl */ public NamedNodeMap getAttributes() { return null; // overridden in ElementImpl } /** * Returns whether this node (if it is an element) has any attributes. * @return <code>true</code> if this node has any attributes, * <code>false</code> otherwise. * @since DOM Level 2 * @see ElementImpl */ public boolean hasAttributes() { return false; // overridden in ElementImpl } /** * Test whether this node has any children. Convenience shorthand * for (Node.getFirstChild()!=null) * <P> * By default we do not have any children, ParentNode overrides this. * @see ParentNode */ public boolean hasChildNodes() { return false; } /** * Obtain a NodeList enumerating all children of this node. If there * are none, an (initially) empty NodeList is returned. * <p> * NodeLists are "live"; as children are added/removed the NodeList * will immediately reflect those changes. Also, the NodeList refers * to the actual nodes, so changes to those nodes made via the DOM tree * will be reflected in the NodeList and vice versa. * <p> * In this implementation, Nodes implement the NodeList interface and * provide their own getChildNodes() support. Other DOMs may solve this * differently. */ public NodeList getChildNodes() { return this; } /** The first child of this Node, or null if none. * <P> * By default we do not have any children, ParentNode overrides this. * @see ParentNode */ public Node getFirstChild() { return null; } /** The first child of this Node, or null if none. * <P> * By default we do not have any children, ParentNode overrides this. * @see ParentNode */ public Node getLastChild() { return null; } /** * Move one or more node(s) to our list of children. Note that this * implicitly removes them from their previous parent. * <P> * By default we do not accept any children, ParentNode overrides this. * @see ParentNode * * @param newChild The Node to be moved to our subtree. As a * convenience feature, inserting a DocumentNode will instead insert * all its children. * * @param refChild Current child which newChild should be placed * immediately before. If refChild is null, the insertion occurs * after all existing Nodes, like appendChild(). * * @return newChild, in its new state (relocated, or emptied in the case of * DocumentNode.) * * @throws DOMException(HIERARCHY_REQUEST_ERR) if newChild is of a * type that shouldn't be a child of this node, or if newChild is an * ancestor of this node. * * @throws DOMException(WRONG_DOCUMENT_ERR) if newChild has a * different owner document than we do. * * @throws DOMException(NOT_FOUND_ERR) if refChild is not a child of * this node. * * @throws DOMException(NO_MODIFICATION_ALLOWED_ERR) if this node is * read-only. */ public Node insertBefore(Node newChild, Node refChild) throws DOMException { throw new DOMException(DOMException.HIERARCHY_REQUEST_ERR, DOMMessageFormatter.formatMessage(DOMMessageFormatter.DOM_DOMAIN, "HIERARCHY_REQUEST_ERR", null)); } /** * Remove a child from this Node. The removed child's subtree * remains intact so it may be re-inserted elsewhere. * <P> * By default we do not have any children, ParentNode overrides this. * @see ParentNode * * @return oldChild, in its new state (removed). * * @throws DOMException(NOT_FOUND_ERR) if oldChild is not a child of * this node. * * @throws DOMException(NO_MODIFICATION_ALLOWED_ERR) if this node is * read-only. */ public Node removeChild(Node oldChild) throws DOMException { throw new DOMException(DOMException.NOT_FOUND_ERR, DOMMessageFormatter.formatMessage(DOMMessageFormatter.DOM_DOMAIN, "NOT_FOUND_ERR", null)); } /** * Make newChild occupy the location that oldChild used to * have. Note that newChild will first be removed from its previous * parent, if any. Equivalent to inserting newChild before oldChild, * then removing oldChild. * <P> * By default we do not have any children, ParentNode overrides this. * @see ParentNode * * @return oldChild, in its new state (removed). * * @throws DOMException(HIERARCHY_REQUEST_ERR) if newChild is of a * type that shouldn't be a child of this node, or if newChild is * one of our ancestors. * * @throws DOMException(WRONG_DOCUMENT_ERR) if newChild has a * different owner document than we do. * * @throws DOMException(NOT_FOUND_ERR) if oldChild is not a child of * this node. * * @throws DOMException(NO_MODIFICATION_ALLOWED_ERR) if this node is * read-only. */ public Node replaceChild(Node newChild, Node oldChild) throws DOMException { throw new DOMException(DOMException.HIERARCHY_REQUEST_ERR, DOMMessageFormatter.formatMessage(DOMMessageFormatter.DOM_DOMAIN, "HIERARCHY_REQUEST_ERR", null)); } // // NodeList methods // /** * NodeList method: Count the immediate children of this node * <P> * By default we do not have any children, ParentNode overrides this. * @see ParentNode * * @return int */ public int getLength() { return 0; } /** * NodeList method: Return the Nth immediate child of this node, or * null if the index is out of bounds. * <P> * By default we do not have any children, ParentNode overrides this. * @see ParentNode * * @return org.w3c.dom.Node * @param index int */ public Node item(int index) { return null; } // // DOM2: methods, getters, setters // /** * Puts all <code>Text</code> nodes in the full depth of the sub-tree * underneath this <code>Node</code>, including attribute nodes, into a * "normal" form where only markup (e.g., tags, comments, processing * instructions, CDATA sections, and entity references) separates * <code>Text</code> nodes, i.e., there are no adjacent <code>Text</code> * nodes. This can be used to ensure that the DOM view of a document is * the same as if it were saved and re-loaded, and is useful when * operations (such as XPointer lookups) that depend on a particular * document tree structure are to be used.In cases where the document * contains <code>CDATASections</code>, the normalize operation alone may * not be sufficient, since XPointers do not differentiate between * <code>Text</code> nodes and <code>CDATASection</code> nodes. * <p> * Note that this implementation simply calls normalize() on this Node's * children. It is up to implementors or Node to override normalize() * to take action. */ public void normalize() { /* by default we do not have any children, ParentNode overrides this behavior */ } /** * Introduced in DOM Level 2. <p> * Tests whether the DOM implementation implements a specific feature and * that feature is supported by this node. * @param feature The package name of the feature to test. This is the same * name as what can be passed to the method hasFeature on * DOMImplementation. * @param version This is the version number of the package name to * test. In Level 2, version 1, this is the string "2.0". If the version is * not specified, supporting any version of the feature will cause the * method to return true. * @return boolean Returns true if this node defines a subtree within which * the specified feature is supported, false otherwise. * @since WD-DOM-Level-2-19990923 */ public boolean isSupported(String feature, String version) { return ownerDocument().getImplementation().hasFeature(feature, version); } /** * Introduced in DOM Level 2. <p> * * The namespace URI of this node, or null if it is unspecified. When this * node is of any type other than ELEMENT_NODE and ATTRIBUTE_NODE, this is * always null and setting it has no effect. <p> * * This is not a computed value that is the result of a namespace lookup * based on an examination of the namespace declarations in scope. It is * merely the namespace URI given at creation time.<p> * * For nodes created with a DOM Level 1 method, such as createElement * from the Document interface, this is null. * @since WD-DOM-Level-2-19990923 * @see AttrNSImpl * @see ElementNSImpl */ public String getNamespaceURI() { return null; } /** * Introduced in DOM Level 2. <p> * * The namespace prefix of this node, or null if it is unspecified. When * this node is of any type other than ELEMENT_NODE and ATTRIBUTE_NODE this * is always null and setting it has no effect.<p> * * For nodes created with a DOM Level 1 method, such as createElement * from the Document interface, this is null. <p> * * @since WD-DOM-Level-2-19990923 * @see AttrNSImpl * @see ElementNSImpl */ public String getPrefix() { return null; } /** * Introduced in DOM Level 2. <p> * * The namespace prefix of this node, or null if it is unspecified. When * this node is of any type other than ELEMENT_NODE and ATTRIBUTE_NODE * this is always null and setting it has no effect.<p> * * For nodes created with a DOM Level 1 method, such as createElement from * the Document interface, this is null.<p> * * Note that setting this attribute changes the nodeName attribute, which * holds the qualified name, as well as the tagName and name attributes of * the Element and Attr interfaces, when applicable.<p> * * @throws INVALID_CHARACTER_ERR Raised if the specified * prefix contains an invalid character. * * @since WD-DOM-Level-2-19990923 * @see AttrNSImpl * @see ElementNSImpl */ public void setPrefix(String prefix) throws DOMException { throw new DOMException(DOMException.NAMESPACE_ERR, DOMMessageFormatter.formatMessage(DOMMessageFormatter.DOM_DOMAIN, "NAMESPACE_ERR", null)); } /** * Introduced in DOM Level 2. <p> * * Returns the local part of the qualified name of this node. * For nodes created with a DOM Level 1 method, such as createElement * from the Document interface, and for nodes of any type other than * ELEMENT_NODE and ATTRIBUTE_NODE this is the same as the nodeName * attribute. * @since WD-DOM-Level-2-19990923 * @see AttrNSImpl * @see ElementNSImpl */ public String getLocalName() { return null; } // // EventTarget support // public void addEventListener(String type, EventListener listener, boolean useCapture) { // simply forward to Document ownerDocument().addEventListener(this, type, listener, useCapture); } public void removeEventListener(String type, EventListener listener, boolean useCapture) { // simply forward to Document ownerDocument().removeEventListener(this, type, listener, useCapture); } public boolean dispatchEvent(Event event) { // simply forward to Document return ownerDocument().dispatchEvent(this, event); } // // Public DOM Level 3 methods // /** * The absolute base URI of this node or <code>null</code> if undefined. * This value is computed according to . However, when the * <code>Document</code> supports the feature "HTML" , the base URI is * computed using first the value of the href attribute of the HTML BASE * element if any, and the value of the <code>documentURI</code> * attribute from the <code>Document</code> interface otherwise. * <br> When the node is an <code>Element</code>, a <code>Document</code> * or a a <code>ProcessingInstruction</code>, this attribute represents * the properties [base URI] defined in . When the node is a * <code>Notation</code>, an <code>Entity</code>, or an * <code>EntityReference</code>, this attribute represents the * properties [declaration base URI] in the . How will this be affected * by resolution of relative namespace URIs issue?It's not.Should this * only be on Document, Element, ProcessingInstruction, Entity, and * Notation nodes, according to the infoset? If not, what is it equal to * on other nodes? Null? An empty string? I think it should be the * parent's.No.Should this be read-only and computed or and actual * read-write attribute?Read-only and computed (F2F 19 Jun 2000 and * teleconference 30 May 2001).If the base HTML element is not yet * attached to a document, does the insert change the Document.baseURI? * Yes. (F2F 26 Sep 2001) * @since DOM Level 3 */ public String getBaseURI() { return null; } /** * Compares a node with this node with regard to their position in the * tree and according to the document order. This order can be extended * by module that define additional types of nodes. * @param other The node to compare against this node. * @return Returns how the given node is positioned relatively to this * node. * @since DOM Level 3 * @deprecated */ public short compareTreePosition(Node other) { // Questions of clarification for this method - to be answered by the // DOM WG. Current assumptions listed - LM // // 1. How do ENTITY nodes compare? // Current assumption: TREE_POSITION_DISCONNECTED, as ENTITY nodes // aren't really 'in the tree' // // 2. How do NOTATION nodes compare? // Current assumption: TREE_POSITION_DISCONNECTED, as NOTATION nodes // aren't really 'in the tree' // // 3. Are TREE_POSITION_ANCESTOR and TREE_POSITION_DESCENDANT // only relevant for nodes that are "part of the document tree"? // <outer> // <inner myattr="true"/> // </outer> // Is the element node "outer" considered an ancestor of "myattr"? // Current assumption: No. // // 4. How do children of ATTRIBUTE nodes compare (with eachother, or // with children of other attribute nodes with the same element) // Current assumption: Children of ATTRIBUTE nodes are treated as if // they they are the attribute node itself, unless the 2 nodes // are both children of the same attribute. // // 5. How does an ENTITY_REFERENCE node compare with it's children? // Given the DOM, it should precede its children as an ancestor. // Given "document order", does it represent the same position? // Current assumption: An ENTITY_REFERENCE node is an ancestor of its // children. // // 6. How do children of a DocumentFragment compare? // Current assumption: If both nodes are part of the same document // fragment, there are compared as if they were part of a document. // If the nodes are the same... if (this==other) return (TREE_POSITION_SAME_NODE | TREE_POSITION_EQUIVALENT); // If either node is of type ENTITY or NOTATION, compare as disconnected short thisType = this.getNodeType(); short otherType = other.getNodeType(); // If either node is of type ENTITY or NOTATION, compare as disconnected if (thisType == Node.ENTITY_NODE || thisType == Node.NOTATION_NODE || otherType == Node.ENTITY_NODE || otherType == Node.NOTATION_NODE ) { return TREE_POSITION_DISCONNECTED; } // Find the ancestor of each node, and the distance each node is from // its ancestor. // During this traversal, look for ancestor/descendent relationships // between the 2 nodes in question. // We do this now, so that we get this info correct for attribute nodes // and their children. Node node; Node thisAncestor = this; Node otherAncestor = other; int thisDepth=0; int otherDepth=0; for (node=this; node != null; node = node.getParentNode()) { thisDepth +=1; if (node == other) // The other node is an ancestor of this one. return (TREE_POSITION_ANCESTOR | TREE_POSITION_PRECEDING); thisAncestor = node; } for (node=other; node!=null; node=node.getParentNode()) { otherDepth +=1; if (node == this) // The other node is a descendent of the reference node. return (TREE_POSITION_DESCENDANT | TREE_POSITION_FOLLOWING); otherAncestor = node; } Node thisNode = this; Node otherNode = other; int thisAncestorType = thisAncestor.getNodeType(); int otherAncestorType = otherAncestor.getNodeType(); // if the ancestor is an attribute, get owning element. // we are now interested in the owner to determine position. if (thisAncestorType == Node.ATTRIBUTE_NODE) { thisNode = ((AttrImpl)thisAncestor).getOwnerElement(); } if (otherAncestorType == Node.ATTRIBUTE_NODE) { otherNode = ((AttrImpl)otherAncestor).getOwnerElement(); } // Before proceeding, we should check if both ancestor nodes turned // out to be attributes for the same element if (thisAncestorType == Node.ATTRIBUTE_NODE && otherAncestorType == Node.ATTRIBUTE_NODE && thisNode==otherNode) return TREE_POSITION_EQUIVALENT; // Now, find the ancestor of the owning element, if the original // ancestor was an attribute // Note: the following 2 loops are quite close to the ones above. // May want to common them up. LM. if (thisAncestorType == Node.ATTRIBUTE_NODE) { thisDepth=0; for (node=thisNode; node != null; node=node.getParentNode()) { thisDepth +=1; if (node == otherNode) // The other node is an ancestor of the owning element { return TREE_POSITION_PRECEDING; } thisAncestor = node; } } // Now, find the ancestor of the owning element, if the original // ancestor was an attribute if (otherAncestorType == Node.ATTRIBUTE_NODE) { otherDepth=0; for (node=otherNode; node != null; node=node.getParentNode()) { otherDepth +=1; if (node == thisNode) // The other node is a descendent of the reference // node's element return TREE_POSITION_FOLLOWING; otherAncestor = node; } } // thisAncestor and otherAncestor must be the same at this point, // otherwise, we are not in the same tree or document fragment if (thisAncestor != otherAncestor) return TREE_POSITION_DISCONNECTED; // Go up the parent chain of the deeper node, until we find a node // with the same depth as the shallower node if (thisDepth > otherDepth) { for (int i=0; i<thisDepth - otherDepth; i++) thisNode = thisNode.getParentNode(); // Check if the node we have reached is in fact "otherNode". This can // happen in the case of attributes. In this case, otherNode // "precedes" this. if (thisNode == otherNode) return TREE_POSITION_PRECEDING; } else { for (int i=0; i<otherDepth - thisDepth; i++) otherNode = otherNode.getParentNode(); // Check if the node we have reached is in fact "thisNode". This can // happen in the case of attributes. In this case, otherNode // "follows" this. if (otherNode == thisNode) return TREE_POSITION_FOLLOWING; } // We now have nodes at the same depth in the tree. Find a common // ancestor. Node thisNodeP, otherNodeP; for (thisNodeP=thisNode.getParentNode(), otherNodeP=otherNode.getParentNode(); thisNodeP!=otherNodeP;) { thisNode = thisNodeP; otherNode = otherNodeP; thisNodeP = thisNodeP.getParentNode(); otherNodeP = otherNodeP.getParentNode(); } // At this point, thisNode and otherNode are direct children of // the common ancestor. // See whether thisNode or otherNode is the leftmost for (Node current=thisNodeP.getFirstChild(); current!=null; current=current.getNextSibling()) { if (current==otherNode) { return TREE_POSITION_PRECEDING; } else if (current==thisNode) { return TREE_POSITION_FOLLOWING; } } // REVISIT: shouldn't get here. Should probably throw an // exception return 0; } /** * Compares a node with this node with regard to their position in the * document. * @param other The node to compare against this node. * @return Returns how the given node is positioned relatively to this * node. * @since DOM Level 3 */ public short compareDocumentPosition(Node other) throws DOMException { // If the nodes are the same, no flags should be set if (this==other) return 0; // check if other is from a different implementation if (other != null && !(other instanceof NodeImpl)) { // other comes from a different implementation String msg = DOMMessageFormatter.formatMessage( DOMMessageFormatter.DOM_DOMAIN, "NOT_SUPPORTED_ERR", null); throw new DOMException(DOMException.NOT_SUPPORTED_ERR, msg); } Document thisOwnerDoc, otherOwnerDoc; // get the respective Document owners. if (this.getNodeType() == Node.DOCUMENT_NODE) thisOwnerDoc = (Document)this; else thisOwnerDoc = this.getOwnerDocument(); if (other.getNodeType() == Node.DOCUMENT_NODE) otherOwnerDoc = (Document)other; else otherOwnerDoc = other.getOwnerDocument(); // If from different documents, we know they are disconnected. // and have an implementation dependent order if (thisOwnerDoc != otherOwnerDoc && thisOwnerDoc !=null && otherOwnerDoc !=null) { int otherDocNum = ((CoreDocumentImpl)otherOwnerDoc).getNodeNumber(); int thisDocNum = ((CoreDocumentImpl)thisOwnerDoc).getNodeNumber(); if (otherDocNum > thisDocNum) return DOCUMENT_POSITION_DISCONNECTED | DOCUMENT_POSITION_FOLLOWING | DOCUMENT_POSITION_IMPLEMENTATION_SPECIFIC; else return DOCUMENT_POSITION_DISCONNECTED | DOCUMENT_POSITION_PRECEDING | DOCUMENT_POSITION_IMPLEMENTATION_SPECIFIC; } // Find the ancestor of each node, and the distance each node is from // its ancestor. // During this traversal, look for ancestor/descendent relationships // between the 2 nodes in question. // We do this now, so that we get this info correct for attribute nodes // and their children. Node node; Node thisAncestor = this; Node otherAncestor = other; int thisDepth=0; int otherDepth=0; for (node=this; node != null; node = node.getParentNode()) { thisDepth +=1; if (node == other) // The other node is an ancestor of this one. return (DOCUMENT_POSITION_CONTAINS | DOCUMENT_POSITION_PRECEDING); thisAncestor = node; } for (node=other; node!=null; node=node.getParentNode()) { otherDepth +=1; if (node == this) // The other node is a descendent of the reference node. return (DOCUMENT_POSITION_IS_CONTAINED | DOCUMENT_POSITION_FOLLOWING); otherAncestor = node; } int thisAncestorType = thisAncestor.getNodeType(); int otherAncestorType = otherAncestor.getNodeType(); Node thisNode = this; Node otherNode = other; // Special casing for ENTITY, NOTATION, DOCTYPE and ATTRIBUTES // LM: should rewrite this. switch (thisAncestorType) { case Node.NOTATION_NODE: case Node.ENTITY_NODE: { DocumentType container = thisOwnerDoc.getDoctype(); if (container == otherAncestor) return (DOCUMENT_POSITION_CONTAINS | DOCUMENT_POSITION_PRECEDING); switch (otherAncestorType) { case Node.NOTATION_NODE: case Node.ENTITY_NODE: { if (thisAncestorType != otherAncestorType) // the nodes are of different types return ((thisAncestorType>otherAncestorType) ? DOCUMENT_POSITION_PRECEDING:DOCUMENT_POSITION_FOLLOWING); else { // the nodes are of the same type. Find order. if (thisAncestorType == Node.NOTATION_NODE) if (((NamedNodeMapImpl)container.getNotations()).precedes(otherAncestor,thisAncestor)) return (DOCUMENT_POSITION_PRECEDING | DOCUMENT_POSITION_IMPLEMENTATION_SPECIFIC); else return (DOCUMENT_POSITION_FOLLOWING | DOCUMENT_POSITION_IMPLEMENTATION_SPECIFIC); else if (((NamedNodeMapImpl)container.getEntities()).precedes(otherAncestor,thisAncestor)) return (DOCUMENT_POSITION_PRECEDING | DOCUMENT_POSITION_IMPLEMENTATION_SPECIFIC); else return (DOCUMENT_POSITION_FOLLOWING | DOCUMENT_POSITION_IMPLEMENTATION_SPECIFIC); } } } thisNode = thisAncestor = thisOwnerDoc; break; } case Node.DOCUMENT_TYPE_NODE: { if (otherNode == thisOwnerDoc) return (DOCUMENT_POSITION_PRECEDING | DOCUMENT_POSITION_CONTAINS); else if (thisOwnerDoc!=null && thisOwnerDoc==otherOwnerDoc) return (DOCUMENT_POSITION_FOLLOWING); break; } case Node.ATTRIBUTE_NODE: { thisNode = ((AttrImpl)thisAncestor).getOwnerElement(); if (otherAncestorType==Node.ATTRIBUTE_NODE) { otherNode = ((AttrImpl)otherAncestor).getOwnerElement(); if (otherNode == thisNode) { if (((NamedNodeMapImpl)thisNode.getAttributes()).precedes(other,this)) return (DOCUMENT_POSITION_PRECEDING | DOCUMENT_POSITION_IMPLEMENTATION_SPECIFIC); else return (DOCUMENT_POSITION_FOLLOWING | DOCUMENT_POSITION_IMPLEMENTATION_SPECIFIC); } } // Now, find the ancestor of the element thisDepth=0; for (node=thisNode; node != null; node=node.getParentNode()) { thisDepth +=1; if (node == otherNode) { // The other node is an ancestor of the owning element return (DOCUMENT_POSITION_CONTAINS | DOCUMENT_POSITION_PRECEDING); } thisAncestor = node; } } } switch (otherAncestorType) { case Node.NOTATION_NODE: case Node.ENTITY_NODE: { DocumentType container = thisOwnerDoc.getDoctype(); if (container == this) return (DOCUMENT_POSITION_IS_CONTAINED | DOCUMENT_POSITION_FOLLOWING); otherNode = otherAncestor = thisOwnerDoc; break; } case Node.DOCUMENT_TYPE_NODE: { if (thisNode == otherOwnerDoc) return (DOCUMENT_POSITION_FOLLOWING | DOCUMENT_POSITION_IS_CONTAINED); else if (otherOwnerDoc!=null && thisOwnerDoc==otherOwnerDoc) return (DOCUMENT_POSITION_PRECEDING); break; } case Node.ATTRIBUTE_NODE: { otherDepth=0; otherNode = ((AttrImpl)otherAncestor).getOwnerElement(); for (node=otherNode; node != null; node=node.getParentNode()) { otherDepth +=1; if (node == thisNode) // The other node is a descendent of the reference // node's element return DOCUMENT_POSITION_FOLLOWING | DOCUMENT_POSITION_IS_CONTAINED; otherAncestor = node; } } } // thisAncestor and otherAncestor must be the same at this point, // otherwise, the original nodes are disconnected if (thisAncestor != otherAncestor) { int thisAncestorNum, otherAncestorNum; thisAncestorNum = ((NodeImpl)thisAncestor).getNodeNumber(); otherAncestorNum = ((NodeImpl)otherAncestor).getNodeNumber(); if (thisAncestorNum > otherAncestorNum) return DOCUMENT_POSITION_DISCONNECTED | DOCUMENT_POSITION_FOLLOWING | DOCUMENT_POSITION_IMPLEMENTATION_SPECIFIC; else return DOCUMENT_POSITION_DISCONNECTED | DOCUMENT_POSITION_PRECEDING | DOCUMENT_POSITION_IMPLEMENTATION_SPECIFIC; } // Go up the parent chain of the deeper node, until we find a node // with the same depth as the shallower node if (thisDepth > otherDepth) { for (int i=0; i<thisDepth - otherDepth; i++) thisNode = thisNode.getParentNode(); // Check if the node we have reached is in fact "otherNode". This can // happen in the case of attributes. In this case, otherNode // "precedes" this. if (thisNode == otherNode) { return DOCUMENT_POSITION_PRECEDING; } } else { for (int i=0; i<otherDepth - thisDepth; i++) otherNode = otherNode.getParentNode(); // Check if the node we have reached is in fact "thisNode". This can // happen in the case of attributes. In this case, otherNode // "follows" this. if (otherNode == thisNode) return DOCUMENT_POSITION_FOLLOWING; } // We now have nodes at the same depth in the tree. Find a common // ancestor. Node thisNodeP, otherNodeP; for (thisNodeP=thisNode.getParentNode(), otherNodeP=otherNode.getParentNode(); thisNodeP!=otherNodeP;) { thisNode = thisNodeP; otherNode = otherNodeP; thisNodeP = thisNodeP.getParentNode(); otherNodeP = otherNodeP.getParentNode(); } // At this point, thisNode and otherNode are direct children of // the common ancestor. // See whether thisNode or otherNode is the leftmost for (Node current=thisNodeP.getFirstChild(); current!=null; current=current.getNextSibling()) { if (current==otherNode) { return DOCUMENT_POSITION_PRECEDING; } else if (current==thisNode) { return DOCUMENT_POSITION_FOLLOWING; } } // REVISIT: shouldn't get here. Should probably throw an // exception return 0; } /** * This attribute returns the text content of this node and its * descendants. When it is defined to be null, setting it has no effect. * When set, any possible children this node may have are removed and * replaced by a single <code>Text</code> node containing the string * this attribute is set to. On getting, no serialization is performed, * the returned string does not contain any markup. No whitespace * normalization is performed, the returned string does not contain the * element content whitespaces . Similarly, on setting, no parsing is * performed either, the input string is taken as pure textual content. * <br>The string returned is made of the text content of this node * depending on its type, as defined below: * <table border='1'> * <tr> * <th>Node type</th> * <th>Content</th> * </tr> /** * This attribute returns the text content of this node and its * descendants. When it is defined to be null, setting it has no effect. * When set, any possible children this node may have are removed and * replaced by a single <code>Text</code> node containing the string * this attribute is set to. On getting, no serialization is performed, * the returned string does not contain any markup. No whitespace * normalization is performed, the returned string does not contain the * element content whitespaces . Similarly, on setting, no parsing is * performed either, the input string is taken as pure textual content. * <br>The string returned is made of the text content of this node * depending on its type, as defined below: * <table border='1'> * <tr> * <th>Node type</th> * <th>Content</th> * </tr> * <tr> * <td valign='top' rowspan='1' colspan='1'> * ELEMENT_NODE, ENTITY_NODE, ENTITY_REFERENCE_NODE, * DOCUMENT_FRAGMENT_NODE</td> * <td valign='top' rowspan='1' colspan='1'>concatenation of the <code>textContent</code> * attribute value of every child node, excluding COMMENT_NODE and * PROCESSING_INSTRUCTION_NODE nodes</td> * </tr> * <tr> * <td valign='top' rowspan='1' colspan='1'>ATTRIBUTE_NODE, TEXT_NODE, * CDATA_SECTION_NODE, COMMENT_NODE, PROCESSING_INSTRUCTION_NODE</td> * <td valign='top' rowspan='1' colspan='1'> * <code>nodeValue</code></td> * </tr> * <tr> * <td valign='top' rowspan='1' colspan='1'>DOCUMENT_NODE, DOCUMENT_TYPE_NODE, NOTATION_NODE</td> * <td valign='top' rowspan='1' colspan='1'> * null</td> * </tr> * </table> * @exception DOMException * NO_MODIFICATION_ALLOWED_ERR: Raised when the node is readonly. * @exception DOMException * DOMSTRING_SIZE_ERR: Raised when it would return more characters than * fit in a <code>DOMString</code> variable on the implementation * platform. * @since DOM Level 3 */ public String getTextContent() throws DOMException { return getNodeValue(); // overriden in some subclasses } // internal method taking a StringBuffer in parameter void getTextContent(StringBuffer buf) throws DOMException { String content = getNodeValue(); if (content != null) { buf.append(content); } } /** * This attribute returns the text content of this node and its * descendants. When it is defined to be null, setting it has no effect. * When set, any possible children this node may have are removed and * replaced by a single <code>Text</code> node containing the string * this attribute is set to. On getting, no serialization is performed, * the returned string does not contain any markup. No whitespace * normalization is performed, the returned string does not contain the * element content whitespaces . Similarly, on setting, no parsing is * performed either, the input string is taken as pure textual content. * <br>The string returned is made of the text content of this node * depending on its type, as defined below: * <table border='1'> * <tr> * <th>Node type</th> * <th>Content</th> * </tr> * <tr> * <td valign='top' rowspan='1' colspan='1'> * ELEMENT_NODE, ENTITY_NODE, ENTITY_REFERENCE_NODE, * DOCUMENT_FRAGMENT_NODE</td> * <td valign='top' rowspan='1' colspan='1'>concatenation of the <code>textContent</code> * attribute value of every child node, excluding COMMENT_NODE and * PROCESSING_INSTRUCTION_NODE nodes</td> * </tr> * <tr> * <td valign='top' rowspan='1' colspan='1'>ATTRIBUTE_NODE, TEXT_NODE, * CDATA_SECTION_NODE, COMMENT_NODE, PROCESSING_INSTRUCTION_NODE</td> * <td valign='top' rowspan='1' colspan='1'> * <code>nodeValue</code></td> * </tr> * <tr> * <td valign='top' rowspan='1' colspan='1'>DOCUMENT_NODE, DOCUMENT_TYPE_NODE, NOTATION_NODE</td> * <td valign='top' rowspan='1' colspan='1'> * null</td> * </tr> * </table> * @exception DOMException * NO_MODIFICATION_ALLOWED_ERR: Raised when the node is readonly. * @exception DOMException * DOMSTRING_SIZE_ERR: Raised when it would return more characters than * fit in a <code>DOMString</code> variable on the implementation * platform. * @since DOM Level 3 */ public void setTextContent(String textContent) throws DOMException { setNodeValue(textContent); } /** * Returns whether this node is the same node as the given one. * <br>This method provides a way to determine whether two * <code>Node</code> references returned by the implementation reference * the same object. When two <code>Node</code> references are references * to the same object, even if through a proxy, the references may be * used completely interchangably, such that all attributes have the * same values and calling the same DOM method on either reference * always has exactly the same effect. * @param other The node to test against. * @return Returns <code>true</code> if the nodes are the same, * <code>false</code> otherwise. * @since DOM Level 3 */ public boolean isSameNode(Node other) { // we do not use any wrapper so the answer is obvious return this == other; } /** * DOM Level 3: Experimental * This method checks if the specified <code>namespaceURI</code> is the * default namespace or not. * @param namespaceURI The namespace URI to look for. * @return <code>true</code> if the specified <code>namespaceURI</code> * is the default namespace, <code>false</code> otherwise. * @since DOM Level 3 */ public boolean isDefaultNamespace(String namespaceURI){ // REVISIT: remove casts when DOM L3 becomes REC. short type = this.getNodeType(); switch (type) { case Node.ELEMENT_NODE: { String namespace = this.getNamespaceURI(); String prefix = this.getPrefix(); // REVISIT: is it possible that prefix is empty string? if (prefix == null || prefix.length() == 0) { if (namespaceURI == null) { return (namespace == namespaceURI); } return namespaceURI.equals(namespace); } if (this.hasAttributes()) { ElementImpl elem = (ElementImpl)this; NodeImpl attr = (NodeImpl)elem.getAttributeNodeNS("http://www.w3.org/2000/xmlns/", "xmlns"); if (attr != null) { String value = attr.getNodeValue(); if (namespaceURI == null) { return (namespace == value); } return namespaceURI.equals(value); } } NodeImpl ancestor = (NodeImpl)getElementAncestor(this); if (ancestor != null) { return ancestor.isDefaultNamespace(namespaceURI); } return false; } case Node.DOCUMENT_NODE:{ return((NodeImpl)((Document)this).getDocumentElement()).isDefaultNamespace(namespaceURI); } case Node.ENTITY_NODE : case Node.NOTATION_NODE: case Node.DOCUMENT_FRAGMENT_NODE: case Node.DOCUMENT_TYPE_NODE: // type is unknown return false; case Node.ATTRIBUTE_NODE:{ if (this.ownerNode.getNodeType() == Node.ELEMENT_NODE) { return ownerNode.isDefaultNamespace(namespaceURI); } return false; } default:{ NodeImpl ancestor = (NodeImpl)getElementAncestor(this); if (ancestor != null) { return ancestor.isDefaultNamespace(namespaceURI); } return false; } } } /** * * DOM Level 3 - Experimental: * Look up the prefix associated to the given namespace URI, starting from this node. * * @param namespaceURI * @return the prefix for the namespace */ public String lookupPrefix(String namespaceURI){ // REVISIT: When Namespaces 1.1 comes out this may not be true // Prefix can't be bound to null namespace if (namespaceURI == null) { return null; } short type = this.getNodeType(); switch (type) { case Node.ELEMENT_NODE: { this.getNamespaceURI(); // to flip out children return lookupNamespacePrefix(namespaceURI, (ElementImpl)this); } case Node.DOCUMENT_NODE:{ return((NodeImpl)((Document)this).getDocumentElement()).lookupPrefix(namespaceURI); } case Node.ENTITY_NODE : case Node.NOTATION_NODE: case Node.DOCUMENT_FRAGMENT_NODE: case Node.DOCUMENT_TYPE_NODE: // type is unknown return null; case Node.ATTRIBUTE_NODE:{ if (this.ownerNode.getNodeType() == Node.ELEMENT_NODE) { return ownerNode.lookupPrefix(namespaceURI); } return null; } default:{ NodeImpl ancestor = (NodeImpl)getElementAncestor(this); if (ancestor != null) { return ancestor.lookupPrefix(namespaceURI); } return null; } } } /** * DOM Level 3 - Experimental: * Look up the namespace URI associated to the given prefix, starting from this node. * Use lookupNamespaceURI(null) to lookup the default namespace * * @param specifiedPrefix * @return the URI for the namespace * @since DOM Level 3 */ public String lookupNamespaceURI(String specifiedPrefix) { short type = this.getNodeType(); switch (type) { case Node.ELEMENT_NODE : { String namespace = this.getNamespaceURI(); String prefix = this.getPrefix(); if (namespace !=null) { // REVISIT: is it possible that prefix is empty string? if (specifiedPrefix== null && prefix==specifiedPrefix) { // looking for default namespace return namespace; } else if (prefix != null && prefix.equals(specifiedPrefix)) { // non default namespace return namespace; } } if (this.hasAttributes()) { NamedNodeMap map = this.getAttributes(); int length = map.getLength(); for (int i=0;i<length;i++) { Node attr = map.item(i); String attrPrefix = attr.getPrefix(); String value = attr.getNodeValue(); namespace = attr.getNamespaceURI(); if (namespace !=null && namespace.equals("http://www.w3.org/2000/xmlns/")) { // at this point we are dealing with DOM Level 2 nodes only if (specifiedPrefix == null && attr.getNodeName().equals("xmlns")) { // default namespace return value.length() > 0 ? value : null; } else if (attrPrefix !=null && attrPrefix.equals("xmlns") && attr.getLocalName().equals(specifiedPrefix)) { // non default namespace return value.length() > 0 ? value : null; } } } } NodeImpl ancestor = (NodeImpl)getElementAncestor(this); if (ancestor != null) { return ancestor.lookupNamespaceURI(specifiedPrefix); } return null; } case Node.DOCUMENT_NODE : { return((NodeImpl)((Document)this).getDocumentElement()).lookupNamespaceURI(specifiedPrefix); } case Node.ENTITY_NODE : case Node.NOTATION_NODE: case Node.DOCUMENT_FRAGMENT_NODE: case Node.DOCUMENT_TYPE_NODE: // type is unknown return null; case Node.ATTRIBUTE_NODE:{ if (this.ownerNode.getNodeType() == Node.ELEMENT_NODE) { return ownerNode.lookupNamespaceURI(specifiedPrefix); } return null; } default:{ NodeImpl ancestor = (NodeImpl)getElementAncestor(this); if (ancestor != null) { return ancestor.lookupNamespaceURI(specifiedPrefix); } return null; } } } Node getElementAncestor(Node currentNode) { Node parent = currentNode.getParentNode(); while (parent != null) { short type = parent.getNodeType(); if (type == Node.ELEMENT_NODE) { return parent; } parent = parent.getParentNode(); } return null; } String lookupNamespacePrefix(String namespaceURI, ElementImpl el){ String namespace = this.getNamespaceURI(); // REVISIT: if no prefix is available is it null or empty string, or // could be both? String prefix = this.getPrefix(); if (namespace!=null && namespace.equals(namespaceURI)) { if (prefix != null) { String foundNamespace = el.lookupNamespaceURI(prefix); if (foundNamespace !=null && foundNamespace.equals(namespaceURI)) { return prefix; } } } if (this.hasAttributes()) { NamedNodeMap map = this.getAttributes(); int length = map.getLength(); for (int i=0;i<length;i++) { Node attr = map.item(i); String attrPrefix = attr.getPrefix(); String value = attr.getNodeValue(); namespace = attr.getNamespaceURI(); if (namespace !=null && namespace.equals("http://www.w3.org/2000/xmlns/")) { // DOM Level 2 nodes if (((attr.getNodeName().equals("xmlns")) || (attrPrefix !=null && attrPrefix.equals("xmlns")) && value.equals(namespaceURI))) { String localname= attr.getLocalName(); String foundNamespace = el.lookupNamespaceURI(localname); if (foundNamespace !=null && foundNamespace.equals(namespaceURI)) { return localname; } } } } } NodeImpl ancestor = (NodeImpl)getElementAncestor(this); if (ancestor != null) { return ancestor.lookupNamespacePrefix(namespaceURI, el); } return null; } /** * Tests whether two nodes are equal. * <br>This method tests for equality of nodes, not sameness (i.e., * whether the two nodes are references to the same object) which can be * tested with <code>Node.isSameNode</code>. All nodes that are the same * will also be equal, though the reverse may not be true. * <br>Two nodes are equal if and only if the following conditions are * satisfied: The two nodes are of the same type.The following string * attributes are equal: <code>nodeName</code>, <code>localName</code>, * <code>namespaceURI</code>, <code>prefix</code>, <code>nodeValue</code> * , <code>baseURI</code>. This is: they are both <code>null</code>, or * they have the same length and are character for character identical. * The <code>attributes</code> <code>NamedNodeMaps</code> are equal. * This is: they are both <code>null</code>, or they have the same * length and for each node that exists in one map there is a node that * exists in the other map and is equal, although not necessarily at the * same index.The <code>childNodes</code> <code>NodeLists</code> are * equal. This is: they are both <code>null</code>, or they have the * same length and contain equal nodes at the same index. This is true * for <code>Attr</code> nodes as for any other type of node. Note that * normalization can affect equality; to avoid this, nodes should be * normalized before being compared. * <br>For two <code>DocumentType</code> nodes to be equal, the following * conditions must also be satisfied: The following string attributes * are equal: <code>publicId</code>, <code>systemId</code>, * <code>internalSubset</code>.The <code>entities</code> * <code>NamedNodeMaps</code> are equal.The <code>notations</code> * <code>NamedNodeMaps</code> are equal. * <br>On the other hand, the following do not affect equality: the * <code>ownerDocument</code> attribute, the <code>specified</code> * attribute for <code>Attr</code> nodes, the * <code>isWhitespaceInElementContent</code> attribute for * <code>Text</code> nodes, as well as any user data or event listeners * registered on the nodes. * @param arg The node to compare equality with. * @return If the nodes, and possibly subtrees are equal, * <code>true</code> otherwise <code>false</code>. * @since DOM Level 3 */ public boolean isEqualNode(Node arg) { if (arg == this) { return true; } if (arg.getNodeType() != getNodeType()) { return false; } // in theory nodeName can't be null but better be careful // who knows what other implementations may be doing?... if (getNodeName() == null) { if (arg.getNodeName() != null) { return false; } } else if (!getNodeName().equals(arg.getNodeName())) { return false; } if (getLocalName() == null) { if (arg.getLocalName() != null) { return false; } } else if (!getLocalName().equals(arg.getLocalName())) { return false; } if (getNamespaceURI() == null) { if (arg.getNamespaceURI() != null) { return false; } } else if (!getNamespaceURI().equals(arg.getNamespaceURI())) { return false; } if (getPrefix() == null) { if (arg.getPrefix() != null) { return false; } } else if (!getPrefix().equals(arg.getPrefix())) { return false; } if (getNodeValue() == null) { if (arg.getNodeValue() != null) { return false; } } else if (!getNodeValue().equals(arg.getNodeValue())) { return false; } return true; } /** * @since DOM Level 3 */ public Object getFeature(String feature, String version) { // we don't have any alternate node, either this node does the job // or we don't have anything that does return isSupported(feature, version) ? this : null; } /** * Associate an object to a key on this node. The object can later be * retrieved from this node by calling <code>getUserData</code> with the * same key. * @param key The key to associate the object to. * @param data The object to associate to the given key, or * <code>null</code> to remove any existing association to that key. * @param handler The handler to associate to that key, or * <code>null</code>. * @return Returns the <code>DOMObject</code> previously associated to * the given key on this node, or <code>null</code> if there was none. * @since DOM Level 3 */ public Object setUserData(String key, Object data, UserDataHandler handler) { return ownerDocument().setUserData(this, key, data, handler); } /** * Retrieves the object associated to a key on a this node. The object * must first have been set to this node by calling * <code>setUserData</code> with the same key. * @param key The key the object is associated to. * @return Returns the <code>DOMObject</code> associated to the given key * on this node, or <code>null</code> if there was none. * @since DOM Level 3 */ public Object getUserData(String key) { return ownerDocument().getUserData(this, key); } protected Hashtable getUserDataRecord(){ return ownerDocument().getUserDataRecord(this); } // // Public methods // /** * NON-DOM: PR-DOM-Level-1-19980818 mentions readonly nodes in conjunction * with Entities, but provides no API to support this. * <P> * Most DOM users should not touch this method. Its anticpated use * is during construction of EntityRefernces, where it will be used to * lock the contents replicated from Entity so they can't be casually * altered. It _could_ be published as a DOM extension, if desired. * <P> * Note: since we never have any children deep is meaningless here, * ParentNode overrides this behavior. * @see ParentNode * * @param readOnly True or false as desired. * @param deep If true, children are also toggled. Note that this will * not change the state of an EntityReference or its children, * which are always read-only. */ public void setReadOnly(boolean readOnly, boolean deep) { if (needsSyncData()) { synchronizeData(); } isReadOnly(readOnly); } // setReadOnly(boolean,boolean) /** * NON-DOM: Returns true if this node is read-only. This is a * shallow check. */ public boolean getReadOnly() { if (needsSyncData()) { synchronizeData(); } return isReadOnly(); } // getReadOnly():boolean /** * NON-DOM: As an alternative to subclassing the DOM, this implementation * has been extended with the ability to attach an object to each node. * (If you need multiple objects, you can attach a collection such as a * vector or hashtable, then attach your application information to that.) * <p><b>Important Note:</b> You are responsible for removing references * to your data on nodes that are no longer used. Failure to do so will * prevent the nodes, your data is attached to, to be garbage collected * until the whole document is. * * @param data the object to store or null to remove any existing reference */ public void setUserData(Object data) { ownerDocument().setUserData(this, data); } /** * NON-DOM: * Returns the user data associated to this node. */ public Object getUserData() { return ownerDocument().getUserData(this); } // // Protected methods // /** * Denotes that this node has changed. */ protected void changed() { // we do not actually store this information on every node, we only // have a global indicator on the Document. Doing otherwise cost us too // much for little gain. ownerDocument().changed(); } /** * Returns the number of changes to this node. */ protected int changes() { // we do not actually store this information on every node, we only // have a global indicator on the Document. Doing otherwise cost us too // much for little gain. return ownerDocument().changes(); } /** * Override this method in subclass to hook in efficient * internal data structure. */ protected void synchronizeData() { // By default just change the flag to avoid calling this method again needsSyncData(false); } /** * For non-child nodes, the node which "points" to this node. * For example, the owning element for an attribute */ protected Node getContainer() { return null; } /* * Flags setters and getters */ final boolean isReadOnly() { return (flags & READONLY) != 0; } final void isReadOnly(boolean value) { flags = (short) (value ? flags | READONLY : flags & ~READONLY); } final boolean needsSyncData() { return (flags & SYNCDATA) != 0; } final void needsSyncData(boolean value) { flags = (short) (value ? flags | SYNCDATA : flags & ~SYNCDATA); } final boolean needsSyncChildren() { return (flags & SYNCCHILDREN) != 0; } public final void needsSyncChildren(boolean value) { flags = (short) (value ? flags | SYNCCHILDREN : flags & ~SYNCCHILDREN); } final boolean isOwned() { return (flags & OWNED) != 0; } final void isOwned(boolean value) { flags = (short) (value ? flags | OWNED : flags & ~OWNED); } final boolean isFirstChild() { return (flags & FIRSTCHILD) != 0; } final void isFirstChild(boolean value) { flags = (short) (value ? flags | FIRSTCHILD : flags & ~FIRSTCHILD); } final boolean isSpecified() { return (flags & SPECIFIED) != 0; } final void isSpecified(boolean value) { flags = (short) (value ? flags | SPECIFIED : flags & ~SPECIFIED); } // inconsistent name to avoid clash with public method on TextImpl final boolean internalIsIgnorableWhitespace() { return (flags & IGNORABLEWS) != 0; } final void isIgnorableWhitespace(boolean value) { flags = (short) (value ? flags | IGNORABLEWS : flags & ~IGNORABLEWS); } final boolean hasStringValue() { return (flags & HASSTRING) != 0; } final void hasStringValue(boolean value) { flags = (short) (value ? flags | HASSTRING : flags & ~HASSTRING); } final boolean isNormalized() { return (flags & NORMALIZED) != 0; } final void isNormalized(boolean value) { // See if flag should propagate to parent. if (!value && isNormalized() && ownerNode != null) { ownerNode.isNormalized(false); } flags = (short) (value ? flags | NORMALIZED : flags & ~NORMALIZED); } final boolean isIdAttribute() { return (flags & ID) != 0; } final void isIdAttribute(boolean value) { flags = (short) (value ? flags | ID : flags & ~ID); } // // Object methods // /** NON-DOM method for debugging convenience. */ public String toString() { return "["+getNodeName()+": "+getNodeValue()+"]"; } // // Serialization methods // /** Serialize object. */ private void writeObject(ObjectOutputStream out) throws IOException { // synchronize data if (needsSyncData()) { synchronizeData(); } // write object out.defaultWriteObject(); } // writeObject(ObjectOutputStream) } // class NodeImpl
apache-2.0
zhoukang0107/Android-Practise
app/src/main/java/com/zack/imageloader/User.java
1085
package com.zack.imageloader; import java.util.ArrayList; import java.util.List; public class User { String name; String pwd; int age; boolean ignore; String address; List<User> users; List<String> list; String[] array; public User(String name, String pwd, int age, boolean ignore, String address) { this.name = name; this.pwd = pwd; this.age = age; this.ignore = ignore; this.address = address; list = new ArrayList<>(); list.add("aaa"); list.add("bbb"); list.add("ccc"); list.add("ddd"); list.add("eee"); list.add("fff"); list.add("ggg"); /*array = new String[3]; array[0] = "arr0"; array[1] = "arr1"; array[2] = "arr2";*/ } public User(String name, String pwd, int age, boolean ignore, String address, List<User> users) { this.name = name; this.pwd = pwd; this.age = age; this.ignore = ignore; this.address = address; this.users = users; } }
apache-2.0
apache/openejb
itests/openejb-itests-client/src/main/java/org/apache/openejb/test/mdb/BmtMdbAllowedOperationsTests.java
9138
/** * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.openejb.test.mdb; import org.apache.openejb.test.object.OperationsPolicy; import javax.jms.Destination; /** * * [9] Should be run as the nineth test suite of the BasicStatelessTestClients * * <PRE> * ========================================================================= * Operations allowed in the methods of a stateless SessionBean with * container-managed transaction demarcation * ========================================================================= * * Bean method | Bean method can perform the following operations * ______________________|__________________________________________________ * | * constructor | - * ______________________|__________________________________________________ * | * setSessionContext | SessionContext methods: * | - getEJBHome * | JNDI access to java:comp/env * ______________________|__________________________________________________ * | * ejbCreate | SessionContext methods: * ejbRemove | - getEJBHome * | - getEJBObject * | JNDI access to java:comp/env * ______________________|__________________________________________________ * | * business method | SessionContext methods: * from remote interface | - getEJBHome * | - getCallerPrincipal * | - getRollbackOnly * | - isCallerInRole * | - setRollbackOnly * | - getEJBObject * | JNDI access to java:comp/env * | Resource manager access * | Enterprise bean access * ______________________|__________________________________________________ * </PRE> * * @author <a href="mailto:david.blevins@visi.com">David Blevins</a> * @author <a href="mailto:Richard@Monson-Haefel.com">Richard Monson-Haefel</a> */ public class BmtMdbAllowedOperationsTests extends MdbTestClient { protected BasicMdbObject basicMdbObject; public BmtMdbAllowedOperationsTests(){ super("AllowedOperations."); } protected void setUp() throws Exception { super.setUp(); Destination destination = (Destination) initialContext.lookup("Basic BMT Mdb Bean"); basicMdbObject = MdbProxy.newProxyInstance(BasicMdbObject.class, connectionFactory, destination); basicMdbObject.businessMethod("foo"); } protected void tearDown() throws Exception { MdbProxy.destroyProxy(basicMdbObject); super.tearDown(); } //===================================== // Test EJBContext allowed operations // /** * <PRE> * Bean method | Bean method can perform the following operations * ______________________|__________________________________________________ * | * dependency injection | MessageDrivenContext methods:lookup * methods (e.g., setMes-| * sageDrivenContext) | JNDI access to java:comp/env * ______________________|__________________________________________________ * </PRE> */ public void test01_setSessionContext(){ try { OperationsPolicy policy = new OperationsPolicy(); policy.allow( OperationsPolicy.Context_lookup); policy.allow( OperationsPolicy.JNDI_access_to_java_comp_env ); Object expected = policy; Object actual = basicMdbObject.getAllowedOperationsReport("setMessageDrivenContext"); assertNotNull("The OperationsPolicy is null", actual ); assertEquals( expected, actual ); } catch (Exception e){ fail("Received Exception "+e.getClass()+ " : "+e.getMessage()); } } /** * <PRE> * Bean method | Bean method can perform the following operations * ______________________|__________________________________________________ * | * ejbCreate | SessionContext methods: * ejbRemove | - getTimerService * | - lookup * | - getUserTransaction, * | JNDI access to java:comp/env * ______________________|__________________________________________________ * </PRE> */ public void test02_ejbCreate() { try { OperationsPolicy policy = new OperationsPolicy(); policy.allow( OperationsPolicy.Context_lookup ); policy.allow( OperationsPolicy.Context_getUserTransaction ); policy.allow( OperationsPolicy.JNDI_access_to_java_comp_env ); Object expected = policy; Object actual = basicMdbObject.getAllowedOperationsReport("ejbCreate"); assertNotNull("The OperationsPolicy is null", actual ); assertEquals( expected, actual ); } catch (Exception e){ fail("Received Exception "+e.getClass()+ " : "+e.getMessage()); } } /** * <PRE> * Bean method | Bean method can perform the following operations * ______________________|__________________________________________________ * | * ejbCreate | SessionContext methods: * ejbRemove | - getEJBHome * | - getEJBObject * | - getUserTransaction, * | JNDI access to java:comp/env * ______________________|__________________________________________________ * </PRE> */ public void TODO_test03_ejbRemove(){ try { /* TO DO: This test needs unique functionality to work */ OperationsPolicy policy = new OperationsPolicy(); policy.allow( OperationsPolicy.Context_getEJBHome ); policy.allow( OperationsPolicy.Context_getEJBObject ); policy.allow( OperationsPolicy.Context_getUserTransaction ); policy.allow( OperationsPolicy.JNDI_access_to_java_comp_env ); Object expected = policy; Object actual = basicMdbObject.getAllowedOperationsReport("ejbRemove"); assertNotNull("The OperationsPolicy is null", actual ); assertEquals( expected, actual ); } catch (Exception e){ fail("Received Exception "+e.getClass()+ " : "+e.getMessage()); } } /** * <PRE> * Bean method | Bean method can perform the following operations * ______________________|__________________________________________________ * | * business method | SessionContext methods: * from remote interface | - getCallerPrincipal * | - getUserTransaction, * | - getTimerService * | - lookup * | JNDI access to java:comp/env * | Resource manager access * | Enterprise bean access * | EntityManagerFactory access * ______________________|__________________________________________________ * </PRE> */ public void test04_businessMethod(){ try { OperationsPolicy policy = new OperationsPolicy(); policy.allow( OperationsPolicy.Context_getUserTransaction ); policy.allow( OperationsPolicy.Context_getCallerPrincipal ); policy.allow( OperationsPolicy.Context_lookup ); policy.allow( OperationsPolicy.JNDI_access_to_java_comp_env ); Object expected = policy; Object actual = basicMdbObject.getAllowedOperationsReport("businessMethod"); assertNotNull("The OperationsPolicy is null", actual ); assertEquals( expected, actual ); } catch (Exception e){ fail("Received Exception "+e.getClass()+ " : "+e.getMessage()); } } // // Test EJBContext allowed operations //===================================== }
apache-2.0
deeplearning4j/deeplearning4j
deeplearning4j/deeplearning4j-core/src/test/java/org/deeplearning4j/gradientcheck/GlobalPoolingGradientCheckTests.java
14381
/******************************************************************************* * Copyright (c) 2015-2018 Skymind, Inc. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package org.deeplearning4j.gradientcheck; import org.deeplearning4j.BaseDL4JTest; import org.deeplearning4j.TestUtils; import org.deeplearning4j.nn.conf.CNN2DFormat; import org.deeplearning4j.nn.conf.ConvolutionMode; import org.deeplearning4j.nn.conf.MultiLayerConfiguration; import org.deeplearning4j.nn.conf.NeuralNetConfiguration; import org.deeplearning4j.nn.conf.distribution.NormalDistribution; import org.deeplearning4j.nn.conf.inputs.InputType; import org.deeplearning4j.nn.conf.layers.*; import org.deeplearning4j.nn.conf.layers.recurrent.SimpleRnn; import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; import org.junit.Test; import org.nd4j.linalg.activations.Activation; import org.nd4j.linalg.api.buffer.DataType; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.learning.config.NoOp; import org.nd4j.linalg.lossfunctions.LossFunctions; import java.util.Random; import static org.junit.Assert.assertTrue; /** * Created by Alex on 17/01/2017. */ public class GlobalPoolingGradientCheckTests extends BaseDL4JTest { static { Nd4j.setDataType(DataType.DOUBLE); } private static final boolean PRINT_RESULTS = true; private static final boolean RETURN_ON_FIRST_FAILURE = false; private static final double DEFAULT_EPS = 1e-6; private static final double DEFAULT_MAX_REL_ERROR = 1e-3; private static final double DEFAULT_MIN_ABS_ERROR = 1e-8; @Override public long getTimeoutMilliseconds() { return 90000L; } @Test public void testRNNGlobalPoolingBasicMultiLayer() { //Basic test of global pooling w/ LSTM Nd4j.getRandom().setSeed(12345L); int timeSeriesLength = 5; int nIn = 5; int layerSize = 4; int nOut = 2; int[] minibatchSizes = new int[] {1, 3}; PoolingType[] poolingTypes = new PoolingType[] {PoolingType.AVG, PoolingType.SUM, PoolingType.MAX, PoolingType.PNORM}; for (int miniBatchSize : minibatchSizes) { for (PoolingType pt : poolingTypes) { MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .dataType(DataType.DOUBLE) .updater(new NoOp()) .dist(new NormalDistribution(0, 1.0)).seed(12345L).list() .layer(0, new SimpleRnn.Builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH) .build()) .layer(1, new GlobalPoolingLayer.Builder().poolingType(pt).build()) .layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut).build()) .build(); MultiLayerNetwork mln = new MultiLayerNetwork(conf); mln.init(); Random r = new Random(12345L); INDArray input = Nd4j.rand(DataType.DOUBLE, miniBatchSize, nIn, timeSeriesLength).subi(0.5); INDArray labels = TestUtils.randomOneHot(miniBatchSize, nOut).castTo(DataType.DOUBLE); if (PRINT_RESULTS) { System.out.println("testLSTMGlobalPoolingBasicMultiLayer() - " + pt + ", minibatch = " + miniBatchSize); // for (int j = 0; j < mln.getnLayers(); j++) // System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels); assertTrue(gradOK); TestUtils.testModelSerialization(mln); } } } @Test public void testCnnGlobalPoolingBasicMultiLayer() { //Basic test of global pooling w/ CNN Nd4j.getRandom().setSeed(12345L); for(boolean nchw : new boolean[]{true, false}) { int inputDepth = 3; int inputH = 5; int inputW = 4; int layerDepth = 4; int nOut = 2; int[] minibatchSizes = new int[]{1, 3}; PoolingType[] poolingTypes = new PoolingType[]{PoolingType.AVG, PoolingType.SUM, PoolingType.MAX, PoolingType.PNORM}; for (int miniBatchSize : minibatchSizes) { for (PoolingType pt : poolingTypes) { MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .dataType(DataType.DOUBLE) .updater(new NoOp()) .dist(new NormalDistribution(0, 1.0)).seed(12345L).list() .layer(0, new ConvolutionLayer.Builder().kernelSize(2, 2).stride(1, 1).nOut(layerDepth) .build()) .layer(1, new GlobalPoolingLayer.Builder().poolingType(pt).build()) .layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .activation(Activation.SOFTMAX).nOut(nOut).build()) .setInputType(InputType.convolutional(inputH, inputW, inputDepth, nchw ? CNN2DFormat.NCHW : CNN2DFormat.NHWC)).build(); MultiLayerNetwork mln = new MultiLayerNetwork(conf); mln.init(); Random r = new Random(12345L); long[] inShape = nchw ? new long[]{miniBatchSize, inputDepth, inputH, inputW} : new long[]{miniBatchSize, inputH, inputW, inputDepth}; INDArray input = Nd4j.rand(DataType.DOUBLE, inShape).subi(0.5); INDArray labels = Nd4j.zeros(miniBatchSize, nOut); for (int i = 0; i < miniBatchSize; i++) { int idx = r.nextInt(nOut); labels.putScalar(i, idx, 1.0); } if (PRINT_RESULTS) { System.out.println("testCnnGlobalPoolingBasicMultiLayer() - " + pt + ", minibatch = " + miniBatchSize + " - " + (nchw ? "NCHW" : "NHWC")); // for (int j = 0; j < mln.getnLayers(); j++) // System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(mln, DEFAULT_EPS, DEFAULT_MAX_REL_ERROR, DEFAULT_MIN_ABS_ERROR, PRINT_RESULTS, RETURN_ON_FIRST_FAILURE, input, labels); assertTrue(gradOK); TestUtils.testModelSerialization(mln); } } } } @Test public void testLSTMWithMasking() { //Basic test of LSTM layer Nd4j.getRandom().setSeed(12345L); int timeSeriesLength = 5; int nIn = 4; int layerSize = 3; int nOut = 2; int miniBatchSize = 3; PoolingType[] poolingTypes = new PoolingType[] {PoolingType.AVG, PoolingType.SUM, PoolingType.MAX, PoolingType.PNORM}; for (PoolingType pt : poolingTypes) { MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .dataType(DataType.DOUBLE) .updater(new NoOp()) .dist(new NormalDistribution(0, 1.0)).seed(12345L).list() .layer(0, new LSTM.Builder().nIn(nIn).nOut(layerSize).activation(Activation.TANH) .build()) .layer(1, new GlobalPoolingLayer.Builder().poolingType(pt).build()) .layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .activation(Activation.SOFTMAX).nIn(layerSize).nOut(nOut).build()) .build(); MultiLayerNetwork mln = new MultiLayerNetwork(conf); mln.init(); Random r = new Random(12345L); INDArray input = Nd4j.rand(DataType.DOUBLE, miniBatchSize, nIn, timeSeriesLength).subi(0.5); INDArray featuresMask = Nd4j.create(miniBatchSize, timeSeriesLength); for (int i = 0; i < miniBatchSize; i++) { int to = timeSeriesLength - i; for (int j = 0; j < to; j++) { featuresMask.putScalar(i, j, 1.0); } } INDArray labels = TestUtils.randomOneHot(miniBatchSize, nOut); mln.setLayerMaskArrays(featuresMask, null); if (PRINT_RESULTS) { System.out.println("testLSTMGlobalPoolingBasicMultiLayer() - " + pt + ", minibatch = " + miniBatchSize); // for (int j = 0; j < mln.getnLayers(); j++) // System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(mln).input(input) .labels(labels).inputMask(featuresMask)); assertTrue(gradOK); TestUtils.testModelSerialization(mln); } } @Test public void testCnnGlobalPoolingMasking() { //Global pooling w/ CNN + masking, where mask is along dimension 2, then separately test along dimension 3 Nd4j.getRandom().setSeed(12345L); int inputDepth = 2; int inputH = 5; int inputW = 5; int layerDepth = 3; int nOut = 2; for (int maskDim = 2; maskDim <= 3; maskDim++) { int[] minibatchSizes = new int[] {1, 3}; PoolingType[] poolingTypes = new PoolingType[] {PoolingType.AVG, PoolingType.SUM, PoolingType.MAX, PoolingType.PNORM}; for (int miniBatchSize : minibatchSizes) { for (PoolingType pt : poolingTypes) { int[] kernel; int[] stride; if (maskDim == 2) { //"time" (variable length) dimension is dimension 2 kernel = new int[] {2, inputW}; stride = new int[] {1, inputW}; } else { kernel = new int[] {inputH, 2}; stride = new int[] {inputH, 1}; } MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .dataType(DataType.DOUBLE) .updater(new NoOp()) .dist(new NormalDistribution(0, 1.0)).convolutionMode(ConvolutionMode.Same) .seed(12345L).list() .layer(0, new ConvolutionLayer.Builder().kernelSize(kernel).stride(stride) .nOut(layerDepth).build()) .layer(1, new GlobalPoolingLayer.Builder().poolingType(pt).build()) .layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.MCXENT) .activation(Activation.SOFTMAX).nOut(nOut).build()) .setInputType(InputType.convolutional(inputH, inputW, inputDepth)).build(); MultiLayerNetwork mln = new MultiLayerNetwork(conf); mln.init(); Random r = new Random(12345L); INDArray input = Nd4j.rand(new int[] {miniBatchSize, inputDepth, inputH, inputW}).subi(0.5); INDArray inputMask; if (miniBatchSize == 1) { inputMask = Nd4j.create(new double[] {1, 1, 1, 1, 0}).reshape(1,1,(maskDim == 2 ? inputH : 1), (maskDim == 3 ? inputW : 1)); } else if (miniBatchSize == 3) { inputMask = Nd4j.create(new double[][] {{1, 1, 1, 1, 1}, {1, 1, 1, 1, 0}, {1, 1, 1, 0, 0}}) .reshape(miniBatchSize,1,(maskDim == 2 ? inputH : 1), (maskDim == 3 ? inputW : 1)); } else { throw new RuntimeException(); } INDArray labels = Nd4j.zeros(miniBatchSize, nOut); for (int i = 0; i < miniBatchSize; i++) { int idx = r.nextInt(nOut); labels.putScalar(i, idx, 1.0); } if (PRINT_RESULTS) { System.out.println("testCnnGlobalPoolingBasicMultiLayer() - " + pt + ", minibatch = " + miniBatchSize); // for (int j = 0; j < mln.getnLayers(); j++) // System.out.println("Layer " + j + " # params: " + mln.getLayer(j).numParams()); } boolean gradOK = GradientCheckUtil.checkGradients(new GradientCheckUtil.MLNConfig().net(mln).input(input) .labels(labels).inputMask(inputMask)); assertTrue(gradOK); TestUtils.testModelSerialization(mln); } } } } }
apache-2.0
hirohanin/pig7hadoop21
src/org/apache/pig/impl/io/ReadToEndLoader.java
8814
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pig.impl.io; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.mapreduce.task.JobContextImpl; import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; import org.apache.pig.LoadCaster; import org.apache.pig.LoadFunc; import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit; import org.apache.pig.data.Tuple; import org.apache.pig.impl.plan.OperatorKey; /** * This is wrapper Loader which wraps a real LoadFunc underneath and allows * to read a file completely starting a given split (indicated by a split index * which is used to look in the List<InputSplit> returned by the underlying * InputFormat's getSplits() method). So if the supplied split index is 0, this * loader will read the entire file. If it is non zero it will read the partial * file beginning from that split to the last split. * * The call sequence to use this is: * 1) construct an object using the constructor * 2) Call getNext() in a loop till it returns null */ public class ReadToEndLoader extends LoadFunc { /** * the wrapped LoadFunc which will do the actual reading */ private LoadFunc wrappedLoadFunc; /** * the Configuration object used to locate the input location - this will * be used to call {@link LoadFunc#setLocation(String, Configuration)} on * the wrappedLoadFunc */ private Configuration conf; /** * the input location string (typically input file/dir name ) */ private String inputLocation; /** * If the splits to be read are not in increasing sequence of integers * this array can be used */ private int[] toReadSplits = null; /** * index into toReadSplits */ private int toReadSplitsIdx = 0; /** * the index of the split the loader is currently reading from */ private int curSplitIndex; /** * the input splits returned by underlying {@link InputFormat#getSplits(JobContext)} */ private List<InputSplit> inpSplits = null; /** * underlying RecordReader */ private RecordReader reader = null; /** * underlying InputFormat */ private InputFormat inputFormat = null; /** * @param wrappedLoadFunc * @param conf * @param inputLocation * @param splitIndex * @throws IOException * @throws InterruptedException */ public ReadToEndLoader(LoadFunc wrappedLoadFunc, Configuration conf, String inputLocation, int splitIndex) throws IOException { this.wrappedLoadFunc = wrappedLoadFunc; this.inputLocation = inputLocation; this.conf = conf; this.curSplitIndex = splitIndex; init(); } /** * This constructor takes an array of split indexes (toReadSplitIdxs) of the * splits to be read. * @param wrappedLoadFunc * @param conf * @param inputLocation * @param toReadSplitIdxs * @throws IOException * @throws InterruptedException */ public ReadToEndLoader(LoadFunc wrappedLoadFunc, Configuration conf, String inputLocation, int[] toReadSplitIdxs) throws IOException { this.wrappedLoadFunc = wrappedLoadFunc; this.inputLocation = inputLocation; this.toReadSplits = toReadSplitIdxs; this.conf = conf; this.curSplitIndex = toReadSplitIdxs.length > 0 ? toReadSplitIdxs[0] : Integer.MAX_VALUE; init(); } @SuppressWarnings("unchecked") private void init() throws IOException { // make a copy so that if the underlying InputFormat writes to the // conf, we don't affect the caller's copy conf = new Configuration(conf); // let's initialize the wrappedLoadFunc Job job = new Job(conf); wrappedLoadFunc.setLocation(inputLocation, job); // The above setLocation call could write to the conf within // the job - get a hold of the modified conf conf = job.getConfiguration(); inputFormat = wrappedLoadFunc.getInputFormat(); try { inpSplits = inputFormat.getSplits(new JobContextImpl(conf, new JobID())); } catch (InterruptedException e) { throw new IOException(e); } } private boolean initializeReader() throws IOException, InterruptedException { if(curSplitIndex > inpSplits.size() - 1) { // past the last split, we are done return false; } if(reader != null){ reader.close(); } InputSplit curSplit = inpSplits.get(curSplitIndex); TaskAttemptContext tAContext = new TaskAttemptContextImpl(conf, new TaskAttemptID()); reader = inputFormat.createRecordReader(curSplit, tAContext); reader.initialize(curSplit, tAContext); // create a dummy pigsplit - other than the actual split, the other // params are really not needed here where we are just reading the // input completely PigSplit pigSplit = new PigSplit(curSplit, -1, new ArrayList<OperatorKey>(), -1); wrappedLoadFunc.prepareToRead(reader, pigSplit); return true; } @Override public Tuple getNext() throws IOException { try { Tuple t = null; if(reader == null) { // first call return getNextHelper(); } else { // we already have a reader initialized t = wrappedLoadFunc.getNext(); if(t != null) { return t; } // if loadfunc returned null, we need to read next split // if there is one updateCurSplitIndex(); return getNextHelper(); } } catch (InterruptedException e) { throw new IOException(e); } } private Tuple getNextHelper() throws IOException, InterruptedException { Tuple t = null; while(initializeReader()) { t = wrappedLoadFunc.getNext(); if(t == null) { // try next split updateCurSplitIndex(); } else { return t; } } return null; } /** * Updates curSplitIndex , just increment if splitIndexes is null, * else get next split in splitIndexes */ private void updateCurSplitIndex() { if(toReadSplits == null){ ++curSplitIndex; }else{ ++toReadSplitsIdx; if(toReadSplitsIdx >= toReadSplits.length){ // finished all the splits in splitIndexes array curSplitIndex = Integer.MAX_VALUE; }else{ curSplitIndex = toReadSplits[toReadSplitsIdx]; } } } @Override public InputFormat getInputFormat() throws IOException { throw new UnsupportedOperationException(); } @Override public LoadCaster getLoadCaster() throws IOException { throw new UnsupportedOperationException(); } @Override public void prepareToRead(RecordReader reader, PigSplit split) { throw new UnsupportedOperationException(); } @Override public void setLocation(String location, Job job) throws IOException { throw new UnsupportedOperationException(); } }
apache-2.0
openmhealth/shimmer
shim-server/src/main/java/org/openmhealth/shim/googlefit/mapper/GoogleFitCaloriesBurnedDataPointMapper.java
2516
/* * Copyright 2015 Open mHealth * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openmhealth.shim.googlefit.mapper; import com.fasterxml.jackson.databind.JsonNode; import org.openmhealth.schema.domain.omh.CaloriesBurned2; import org.openmhealth.schema.domain.omh.DataPoint; import java.util.Optional; import static org.openmhealth.schema.domain.omh.KcalUnit.KILOCALORIE; import static org.openmhealth.shim.common.mapper.JsonNodeMappingSupport.*; /** * A mapper from Google Fit "merged calories expended" endpoint responses (derived:com.google.calories.expended:com.google.android.gms:merge_calories_expended) * to {@link CaloriesBurned2} objects. * * @author Chris Schaefbauer * @see <a href="https://developers.google.com/fit/rest/v1/data-types">Google Fit Data Type Documentation</a> */ public class GoogleFitCaloriesBurnedDataPointMapper extends GoogleFitDataPointMapper<CaloriesBurned2> { @Override protected Optional<DataPoint<CaloriesBurned2>> asDataPoint(JsonNode listNode) { JsonNode listValueNode = asRequiredNode(listNode, "value"); double caloriesBurnedValue = asRequiredDouble(listValueNode.get(0), "fpVal"); CaloriesBurned2.Builder measureBuilder = new CaloriesBurned2.Builder(KILOCALORIE.newUnitValue(caloriesBurnedValue), getTimeFrame(listNode)); CaloriesBurned2 caloriesBurned = measureBuilder.build(); Optional<String> originDataSourceId = asOptionalString(listNode, "originDataSourceId"); // Google Fit calories burned endpoint returns calories burned by basal metabolic rate (BMR), however these // are not activity related calories burned so we do not create a data point for values from this source if (originDataSourceId.isPresent()) { if (originDataSourceId.get().contains("bmr")) { return Optional.empty(); } } return Optional.of(newDataPoint(caloriesBurned, originDataSourceId.orElse(null))); } }
apache-2.0
yuri0x7c1/ofbiz-explorer
src/test/resources/apache-ofbiz-17.12.04/applications/humanres/src/main/java/org/apache/ofbiz/humanres/HumanResEvents.java
15018
/******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. *******************************************************************************/ package org.apache.ofbiz.humanres; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.ofbiz.base.util.Debug; import org.apache.ofbiz.base.util.UtilGenerics; import org.apache.ofbiz.base.util.UtilValidate; import org.apache.ofbiz.entity.Delegator; import org.apache.ofbiz.entity.GenericEntityException; import org.apache.ofbiz.entity.GenericValue; import org.apache.ofbiz.entity.condition.EntityCondition; import org.apache.ofbiz.entity.condition.EntityOperator; import org.apache.ofbiz.entity.util.EntityQuery; public class HumanResEvents { public static final String module = HumanResEvents.class.getName(); public static final String resourceError = "HumanResErrorUiLabels"; // Please note : the structure of map in this function is according to the JSON data map of the jsTree public static String getChildHRCategoryTree(HttpServletRequest request, HttpServletResponse response){ Delegator delegator = (Delegator) request.getAttribute("delegator"); String partyId = request.getParameter("partyId"); String onclickFunction = request.getParameter("onclickFunction"); String additionParam = request.getParameter("additionParam"); String hrefString = request.getParameter("hrefString"); String hrefString2 = request.getParameter("hrefString2"); Map<String, Object> paramMap = new HashMap<String, Object>(); paramMap.put("delegator", delegator); paramMap.put("partyId", partyId); paramMap.put("onclickFunction", onclickFunction); paramMap.put("additionParam", additionParam); paramMap.put("hrefString", hrefString); paramMap.put("hrefString2", hrefString2); List<Map<String,Object>> categoryList = new ArrayList<Map<String,Object>>(); //check employee position try { categoryList.addAll(getCurrentEmployeeDetails(paramMap)); } catch (GenericEntityException e) { Debug.logError(e, module); return "error"; } try { GenericValue partyGroup = EntityQuery.use(delegator).from("PartyGroup").where("partyId", partyId).queryOne(); if (partyGroup != null) { paramMap.put("partyGroup", partyGroup); /* get the child departments of company or party */ categoryList.addAll(getChildComps(paramMap)); /* get employee which are working in company or party */ categoryList.addAll(getEmployeeInComp(paramMap)); } } catch (GenericEntityException e) { Debug.logError(e, module); return "error"; } request.setAttribute("hrTree", categoryList); return "success"; } private static List<Map<String, Object>> getCurrentEmployeeDetails(Map<String, Object> params) throws GenericEntityException{ Delegator delegator = (Delegator) params.get("delegator"); String partyId = (String) params.get("partyId"); String onclickFunction = (String) params.get("onclickFunction"); String additionParam = (String) params.get("additionParam"); String hrefString = (String) params.get("hrefString"); String hrefString2 = (String) params.get("hrefString2"); List<Map<String, Object>> responseList = new ArrayList<>(); long emplPosCount; try { emplPosCount = EntityQuery.use(delegator).from("EmplPosition") .where("emplPositionId", partyId).queryCount(); if (emplPosCount > 0) { String emplId = partyId; List<GenericValue> emlpfillCtxs = EntityQuery.use(delegator).from("EmplPositionFulfillment") .where("emplPositionId", emplId) .filterByDate().queryList(); if (UtilValidate.isNotEmpty(emlpfillCtxs)) { for (GenericValue emlpfillCtx : emlpfillCtxs ) { String memberId = emlpfillCtx.getString("partyId"); GenericValue memCtx = EntityQuery.use(delegator).from("Person").where("partyId", partyId).queryOne(); String title = null; if (UtilValidate.isNotEmpty(memCtx)) { String firstname = memCtx.getString("firstName"); String lastname = memCtx.getString("lastName"); if (UtilValidate.isEmpty(lastname)) { lastname = ""; } if (UtilValidate.isEmpty(firstname)) { firstname = ""; } title = firstname +" "+ lastname; } GenericValue memGroupCtx = EntityQuery.use(delegator).from("PartyGroup").where("partyId", partyId).queryOne(); if (UtilValidate.isNotEmpty(memGroupCtx)) { title = memGroupCtx.getString("groupName"); } Map<String,Object> josonMap = new HashMap<String, Object>(); Map<String,Object> dataMap = new HashMap<String, Object>(); Map<String,Object> dataAttrMap = new HashMap<String, Object>(); Map<String,Object> attrMap = new HashMap<String, Object>(); dataAttrMap.put("onClick", onclickFunction + "('" + memberId + additionParam + "')"); String hrefStr = hrefString + memberId; if (UtilValidate.isNotEmpty(hrefString2)) { hrefStr = hrefStr + hrefString2; } dataAttrMap.put("href", hrefStr); attrMap.put("rel", "P"); attrMap.put("id", memberId); dataMap.put("title", title); dataMap.put("attr", dataAttrMap); josonMap.put("attr",attrMap); josonMap.put("data", dataMap); responseList.add(josonMap) ; } } } } catch (GenericEntityException e) { Debug.logError(e, module); throw new GenericEntityException(e); } return responseList; } private static List<Map<String, Object>> getChildComps(Map<String, Object> params) throws GenericEntityException{ Delegator delegator = (Delegator) params.get("delegator"); String onclickFunction = (String) params.get("onclickFunction"); String additionParam = (String) params.get("additionParam"); String hrefString = (String) params.get("hrefString"); String hrefString2 = (String) params.get("hrefString2"); Map<String , Object> partyGroup = UtilGenerics.checkMap(params.get("partyGroup")); List<Map<String, Object>> resultList = new ArrayList<Map<String,Object>>(); List<GenericValue> childOfComs = null; try { childOfComs = EntityQuery.use(delegator).from("PartyRelationship") .where("partyIdFrom", partyGroup.get("partyId"), "partyRelationshipTypeId", "GROUP_ROLLUP") .filterByDate().queryList(); if (UtilValidate.isNotEmpty(childOfComs)) { for (GenericValue childOfCom : childOfComs ) { Object catId = null; String catNameField = null; String title = null; Map<String, Object> josonMap = new HashMap<String, Object>(); Map<String, Object> dataMap = new HashMap<String, Object>(); Map<String, Object> dataAttrMap = new HashMap<String, Object>(); Map<String, Object> attrMap = new HashMap<String, Object>(); catId = childOfCom.get("partyIdTo"); //Department or Sub department GenericValue childContext = EntityQuery.use(delegator).from("PartyGroup").where("partyId", catId).queryOne(); if (UtilValidate.isNotEmpty(childContext)) { catNameField = (String) childContext.get("groupName"); title = catNameField; josonMap.put("title",title); } //Check child existing List<GenericValue> childOfSubComs = EntityQuery.use(delegator).from("PartyRelationship") .where("partyIdFrom", catId, "partyRelationshipTypeId", "GROUP_ROLLUP") .filterByDate().queryList(); //check employee position List<GenericValue> isPosition = EntityQuery.use(delegator).from("EmplPosition").where("partyId", catId).queryList(); if (UtilValidate.isNotEmpty(childOfSubComs) || UtilValidate.isNotEmpty(isPosition)) { josonMap.put("state", "closed"); } //Employee GenericValue emContext = EntityQuery.use(delegator).from("Person").where("partyId", catId).queryOne(); if (UtilValidate.isNotEmpty(emContext)) { String firstname = (String) emContext.get("firstName"); String lastname = (String) emContext.get("lastName"); if (UtilValidate.isEmpty(lastname)) { lastname = ""; } if (UtilValidate.isEmpty(firstname)) { firstname = ""; } title = firstname +" "+ lastname; } dataAttrMap.put("onClick", onclickFunction + "('" + catId + additionParam + "')"); String hrefStr = hrefString + catId; if (UtilValidate.isNotEmpty(hrefString2)) { hrefStr = hrefStr + hrefString2; } dataAttrMap.put("href", hrefStr); dataMap.put("attr", dataAttrMap); dataMap.put("title", title); attrMap.put("rel", "Y"); attrMap.put("id", catId); josonMap.put("attr",attrMap); josonMap.put("data", dataMap); resultList.add(josonMap); } } } catch (GenericEntityException e) { Debug.logError(e, module); throw new GenericEntityException(e); } return resultList; } private static List<Map<String, Object>> getEmployeeInComp(Map<String, Object> params) throws GenericEntityException{ List<GenericValue> isEmpls = null; Delegator delegator = (Delegator) params.get("delegator"); String partyId = (String) params.get("partyId"); List<Map<String, Object>> resultList = new ArrayList<Map<String,Object>>(); try { isEmpls = EntityQuery.use(delegator).from("EmplPosition") .where(EntityCondition.makeCondition("partyId", EntityOperator.EQUALS, partyId), EntityCondition.makeCondition("statusId", EntityOperator.NOT_EQUAL, "EMPL_POS_INACTIVE")) .filterByDate("actualFromDate", "actualThruDate") .queryList(); if (UtilValidate.isNotEmpty(isEmpls)) { for (GenericValue childOfEmpl : isEmpls ) { Map<String, Object> emplMap = new HashMap<String, Object>(); Map<String, Object> emplAttrMap = new HashMap<String, Object>(); Map<String, Object> empldataMap = new HashMap<String, Object>(); Map<String, Object> emplDataAttrMap = new HashMap<String, Object>(); String emplId = (String) childOfEmpl.get("emplPositionId"); String typeId = (String) childOfEmpl.get("emplPositionTypeId"); //check child List<GenericValue> emlpfCtxs = EntityQuery.use(delegator).from("EmplPositionFulfillment") .where("emplPositionId", emplId) .filterByDate().queryList(); if (UtilValidate.isNotEmpty(emlpfCtxs)) { emplMap.put("state", "closed"); } GenericValue emplContext = EntityQuery.use(delegator).from("EmplPositionType").where("emplPositionTypeId", typeId).queryOne(); String title = null; if (UtilValidate.isNotEmpty(emplContext)) { title = (String) emplContext.get("description") + " " +"["+ emplId +"]"; } String hrefStr = "emplPositionView?emplPositionId=" + emplId; emplAttrMap.put("href", hrefStr); emplAttrMap.put("onClick", "callEmplDocument" + "('" + emplId + "')"); empldataMap.put("title", title); empldataMap.put("attr", emplAttrMap); emplDataAttrMap.put("id", emplId); emplDataAttrMap.put("rel", "N"); emplMap.put("data", empldataMap); emplMap.put("attr",emplDataAttrMap); emplMap.put("title",title); resultList.add(emplMap); } } } catch (GenericEntityException e) { Debug.logError(e, module); throw new GenericEntityException(e); } return resultList; } }
apache-2.0
BioGram/Android
app/src/main/java/co/biogram/main/ui/social/LikeUI.java
21211
package co.biogram.main.ui.social; import android.graphics.Color; import android.graphics.drawable.ColorDrawable; import android.graphics.drawable.GradientDrawable; import android.graphics.drawable.StateListDrawable; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.view.Gravity; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.RelativeLayout; import com.androidnetworking.AndroidNetworking; import com.androidnetworking.error.ANError; import com.androidnetworking.interfaces.StringRequestListener; import org.json.JSONArray; import org.json.JSONObject; import java.util.ArrayList; import java.util.List; import co.biogram.main.R; import co.biogram.main.fragment.FragmentView; import co.biogram.main.handler.GlideApp; import co.biogram.main.handler.Misc; import co.biogram.main.handler.RecyclerViewOnScroll; import co.biogram.main.ui.view.LoadingView; import co.biogram.main.ui.view.TextView; import de.hdodenhof.circleimageview.CircleImageView; public class LikeUI extends FragmentView { private List<Struct> PeopleList = new ArrayList<>(); private AdapterLike Adapter; private boolean IsComment; private String ID; public LikeUI(String id, boolean isComment) { ID = id; IsComment = isComment; } @Override public void OnCreate() { LinearLayout LinearLayoutMain = new LinearLayout(Activity); LinearLayoutMain.setLayoutParams(new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.MATCH_PARENT)); LinearLayoutMain.setBackgroundResource(Misc.IsDark() ? R.color.GroundDark : R.color.GroundWhite); LinearLayoutMain.setOrientation(LinearLayout.VERTICAL); LinearLayoutMain.setClickable(true); RelativeLayout RelativeLayoutHeader = new RelativeLayout(Activity); RelativeLayoutHeader.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, Misc.ToDP(56))); RelativeLayoutHeader.setBackgroundResource(Misc.IsDark() ? R.color.ActionBarDark : R.color.ActionBarWhite); LinearLayoutMain.addView(RelativeLayoutHeader); RelativeLayout.LayoutParams ImageViewBackParam = new RelativeLayout.LayoutParams(Misc.ToDP(56), Misc.ToDP(56)); ImageViewBackParam.addRule(Misc.Align("R")); ImageView ImageViewBack = new ImageView(Activity); ImageViewBack.setLayoutParams(ImageViewBackParam); ImageViewBack.setPadding(Misc.ToDP(13), Misc.ToDP(13), Misc.ToDP(13), Misc.ToDP(13)); ImageViewBack.setImageResource(Misc.IsRTL() ? R.drawable.z_general_back_blue : R.drawable.z_general_back_blue); ImageViewBack.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Activity.onBackPressed(); } }); ImageViewBack.setId(Misc.generateViewId()); RelativeLayoutHeader.addView(ImageViewBack); RelativeLayout.LayoutParams TextViewTitleParam = new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.WRAP_CONTENT, RelativeLayout.LayoutParams.WRAP_CONTENT); TextViewTitleParam.addRule(Misc.AlignTo("R"), ImageViewBack.getId()); TextViewTitleParam.addRule(RelativeLayout.CENTER_VERTICAL); TextView TextViewTitle = new TextView(Activity, 16, true); TextViewTitle.setLayoutParams(TextViewTitleParam); TextViewTitle.SetColor(Misc.IsDark() ? R.color.TextDark : R.color.TextWhite); TextViewTitle.setText(Misc.String(R.string.LikeUI)); TextViewTitle.setPadding(0, Misc.ToDP(6), 0, 0); RelativeLayoutHeader.addView(TextViewTitle); View ViewLine = new View(Activity); ViewLine.setLayoutParams(new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, Misc.ToDP(1))); ViewLine.setBackgroundResource(Misc.IsDark() ? R.color.LineDark : R.color.LineWhite); LinearLayoutMain.addView(ViewLine); RelativeLayout RelativeLayoutContent = new RelativeLayout(Activity); RelativeLayoutContent.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT)); LinearLayoutMain.addView(RelativeLayoutContent); LinearLayoutManager LinearLayoutManagerMain = new LinearLayoutManager(Activity); RecyclerView RecyclerViewMain = new RecyclerView(Activity); RecyclerViewMain.setLayoutParams(new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.MATCH_PARENT)); RecyclerViewMain.setAdapter(Adapter = new AdapterLike()); RecyclerViewMain.setLayoutManager(LinearLayoutManagerMain); RecyclerViewMain.addOnScrollListener(new RecyclerViewOnScroll() { @Override public void OnLoadMore() { Update(null); } }); RelativeLayoutContent.addView(RecyclerViewMain); LoadingView LoadingViewMain = new LoadingView(Activity); LoadingViewMain.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT)); LoadingViewMain.setBackgroundResource(R.color.GroundWhite); LoadingViewMain.Start(); RelativeLayoutContent.addView(LoadingViewMain); Update(LoadingViewMain); ViewMain = LinearLayoutMain; } @Override public void OnPause() { AndroidNetworking.forceCancel("LikeUI"); } private void Update(final LoadingView Loading) { if (IsComment) { AndroidNetworking.post(Misc.GetRandomServer("PostCommentLikeList")) .addBodyParameter("Skip", String.valueOf(PeopleList.size())) .addBodyParameter("CommentID", ID) .addHeaders("Token", Misc.GetString("Token")) .setTag("LikeUI") .build() .getAsString(new StringRequestListener() { @Override public void onResponse(String Response) { try { JSONObject Result = new JSONObject(Response); if (Result.getInt("Message") == 0 && !Result.isNull("Result")) { JSONArray ResultList = new JSONArray(Result.getString("Result")); for (int K = 0; K < ResultList.length(); K++) { JSONObject D = ResultList.getJSONObject(K); PeopleList.add(new Struct(D.getString("ID"), D.getString("Name"), D.getString("Username"), D.getString("Avatar"), D.getBoolean("Follow"))); } Adapter.notifyDataSetChanged(); } } catch (Exception e) { Misc.Debug("LikeUI-Update: " + e.toString()); } if (Loading != null) { Loading.Stop(); Loading.setVisibility(View.GONE); } } @Override public void onError(ANError e) { if (Loading != null) { Loading.Stop(); Loading.setVisibility(View.GONE); } } }); return; } AndroidNetworking.post(Misc.GetRandomServer("PostLikeList")) .addBodyParameter("Skip", String.valueOf(PeopleList.size())) .addBodyParameter("PostID", ID) .addHeaders("Token", Misc.GetString("Token")) .setTag("LikeUI") .build() .getAsString(new StringRequestListener() { @Override public void onResponse(String Response) { try { JSONObject Result = new JSONObject(Response); if (Result.getInt("Message") == 0 && !Result.isNull("Result")) { JSONArray ResultList = new JSONArray(Result.getString("Result")); for (int K = 0; K < ResultList.length(); K++) { JSONObject D = ResultList.getJSONObject(K); PeopleList.add(new Struct(D.getString("ID"), D.getString("Name"), D.getString("Username"), D.getString("Avatar"), D.getBoolean("Follow"))); } Adapter.notifyDataSetChanged(); } } catch (Exception e) { Misc.Debug("LikeUI-Update: " + e.toString()); } if (Loading != null) { Loading.Stop(); Loading.setVisibility(View.GONE); } } @Override public void onError(ANError e) { if (Loading != null) { Loading.Stop(); Loading.setVisibility(View.GONE); } } }); } private class AdapterLike extends RecyclerView.Adapter<AdapterLike.ViewHolderMain> { private int ID_PROFILE = Misc.generateViewId(); private int ID_NAME = Misc.generateViewId(); private int ID_USERNAME = Misc.generateViewId(); private int ID_FOLLOW = Misc.generateViewId(); private int ID_LINE = Misc.generateViewId(); private GradientDrawable DrawableFollow; private GradientDrawable DrawableUnfollow; AdapterLike() { DrawableFollow = new GradientDrawable(); DrawableFollow.setColor(Misc.Color(R.color.Primary)); DrawableFollow.setCornerRadius(Misc.ToDP(20)); DrawableUnfollow = new GradientDrawable(); DrawableUnfollow.setColor(Misc.Color(R.color.Gray)); DrawableUnfollow.setCornerRadius(Misc.ToDP(20)); } @Override public void onBindViewHolder(ViewHolderMain Holder, int p) { if (Holder.getItemViewType() == 0) return; final int Position = Holder.getAdapterPosition(); GlideApp.with(Activity).load(PeopleList.get(Position).Profile).placeholder(R.drawable._general_avatar).into(Holder.CircleImageViewProfile); Holder.CircleImageViewProfile.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { // TODO Open Profile } }); Holder.TextViewName.setText(PeopleList.get(Position).Name); Holder.TextViewName.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { // TODO Open Profile } }); Holder.TextViewUsername.setText(("@" + PeopleList.get(Position).Username)); Holder.TextViewUsername.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { // TODO Open Profile } }); if (Misc.GetString("ID").equals(PeopleList.get(Position).ID)) Holder.TextViewFollow.setVisibility(View.GONE); else Holder.TextViewFollow.setVisibility(View.VISIBLE); if (PeopleList.get(Position).Follow) { Holder.TextViewFollow.SetColor(R.color.TextDark); Holder.TextViewFollow.setText(Activity.getString(R.string.LikeUIUnfollow)); Holder.TextViewFollow.setBackground(DrawableUnfollow); } else { Holder.TextViewFollow.SetColor(R.color.TextDark); Holder.TextViewFollow.setText(Activity.getString(R.string.LikeUIFollow)); Holder.TextViewFollow.setBackground(DrawableFollow); } Holder.TextViewFollow.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { AndroidNetworking.post(Misc.GetRandomServer("ProfileFollow")) .addBodyParameter("Username", PeopleList.get(Position).Username) .addHeaders("Token", Misc.GetString("Token")) .setTag("LikeUI") .build() .getAsString(null); PeopleList.get(Position).Follow = !PeopleList.get(Position).Follow; notifyDataSetChanged(); Misc.ToastOld(PeopleList.get(Position).Follow ? Activity.getString(R.string.LikeUIFollowed) : Activity.getString(R.string.LikeUIUnfollowed)); } }); if (Position == PeopleList.size() - 1) Holder.ViewLine.setVisibility(View.GONE); else Holder.ViewLine.setVisibility(View.VISIBLE); } @Override public ViewHolderMain onCreateViewHolder(ViewGroup p, int ViewType) { if (ViewType == 0) { RelativeLayout RelativeLayoutMain = new RelativeLayout(Activity); RelativeLayoutMain.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT)); RelativeLayout.LayoutParams LinearLayoutMainParam = new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.WRAP_CONTENT); LinearLayoutMainParam.addRule(RelativeLayout.CENTER_IN_PARENT); LinearLayout LinearLayoutMain = new LinearLayout(Activity); LinearLayoutMain.setLayoutParams(LinearLayoutMainParam); LinearLayoutMain.setOrientation(LinearLayout.VERTICAL); LinearLayoutMain.setGravity(Gravity.CENTER); RelativeLayoutMain.addView(LinearLayoutMain); RelativeLayout.LayoutParams ImageViewContentParam = new RelativeLayout.LayoutParams(Misc.ToDP(56), Misc.ToDP(56)); ImageViewContentParam.addRule(RelativeLayout.CENTER_IN_PARENT); ImageView ImageViewContent = new CircleImageView(Activity); ImageViewContent.setLayoutParams(ImageViewContentParam); ImageViewContent.setImageResource(R.drawable._general_like); ImageViewContent.setId(Misc.generateViewId()); LinearLayoutMain.addView(ImageViewContent); RelativeLayout.LayoutParams TextViewContentParam = new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.WRAP_CONTENT, RelativeLayout.LayoutParams.WRAP_CONTENT); TextViewContentParam.addRule(RelativeLayout.BELOW, ImageViewContent.getId()); TextViewContentParam.addRule(RelativeLayout.CENTER_HORIZONTAL); TextView TextViewContent = new TextView(Activity, 16, false); TextViewContent.setLayoutParams(TextViewContentParam); TextViewContent.SetColor(R.color.Gray); TextViewContent.setText(Activity.getString(R.string.LikeUINo)); LinearLayoutMain.addView(TextViewContent); return new ViewHolderMain(RelativeLayoutMain, true); } StateListDrawable StatePress = new StateListDrawable(); StatePress.addState(new int[]{android.R.attr.state_pressed}, new ColorDrawable(Color.parseColor("#b0eeeeee"))); RelativeLayout RelativeLayoutMain = new RelativeLayout(Activity); RelativeLayoutMain.setLayoutParams(new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.WRAP_CONTENT)); RelativeLayoutMain.setBackground(StatePress); RelativeLayoutMain.setOnClickListener(null); RelativeLayout.LayoutParams CircleImageViewProfileParam = new RelativeLayout.LayoutParams(Misc.ToDP(48), Misc.ToDP(48)); CircleImageViewProfileParam.setMargins(Misc.ToDP(10), Misc.ToDP(10), Misc.ToDP(10), Misc.ToDP(10)); CircleImageViewProfileParam.addRule(RelativeLayout.ALIGN_PARENT_LEFT); CircleImageView CircleImageViewProfile = new CircleImageView(Activity); CircleImageViewProfile.setLayoutParams(CircleImageViewProfileParam); //CircleImageViewProfile.SetBorderColor(R.color.LineWhite); //CircleImageViewProfile.SetBorderWidth(1); CircleImageViewProfile.setId(ID_PROFILE); RelativeLayoutMain.addView(CircleImageViewProfile); RelativeLayout.LayoutParams TextViewNameParam = new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.WRAP_CONTENT, RelativeLayout.LayoutParams.WRAP_CONTENT); TextViewNameParam.addRule(RelativeLayout.RIGHT_OF, ID_PROFILE); TextViewNameParam.setMargins(0, Misc.ToDP(13), 0, 0); TextView TextViewName = new TextView(Activity, 14, false); TextViewName.setLayoutParams(TextViewNameParam); TextViewName.SetColor(Misc.IsDark() ? R.color.TextDark : R.color.TextWhite); TextViewName.setId(ID_NAME); RelativeLayoutMain.addView(TextViewName); RelativeLayout.LayoutParams TextViewUsernameParam = new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.WRAP_CONTENT, RelativeLayout.LayoutParams.WRAP_CONTENT); TextViewUsernameParam.addRule(RelativeLayout.RIGHT_OF, ID_PROFILE); TextViewUsernameParam.setMargins(0, Misc.ToDP(35), 0, 0); TextView TextViewUsername = new TextView(Activity, 12, false); TextViewUsername.setLayoutParams(TextViewUsernameParam); TextViewUsername.SetColor(R.color.Gray); TextViewUsername.setId(ID_USERNAME); RelativeLayoutMain.addView(TextViewUsername); RelativeLayout.LayoutParams TextViewFollowParam = new RelativeLayout.LayoutParams(Misc.ToDP(90), RelativeLayout.LayoutParams.WRAP_CONTENT); TextViewFollowParam.addRule(RelativeLayout.ALIGN_PARENT_RIGHT); TextViewFollowParam.addRule(RelativeLayout.CENTER_VERTICAL); TextViewFollowParam.setMargins(0, 0, Misc.ToDP(10), 0); TextView TextViewFollow = new TextView(Activity, 12, true); TextViewFollow.setLayoutParams(TextViewFollowParam); TextViewFollow.setPadding(Misc.ToDP(8), Misc.ToDP(5), Misc.ToDP(8), Misc.ToDP(3)); TextViewFollow.setGravity(Gravity.CENTER_HORIZONTAL); TextViewFollow.setId(ID_FOLLOW); RelativeLayoutMain.addView(TextViewFollow); RelativeLayout.LayoutParams ViewLineParam = new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, Misc.ToDP(1)); ViewLineParam.addRule(RelativeLayout.BELOW, ID_PROFILE); View ViewLine = new View(Activity); ViewLine.setLayoutParams(ViewLineParam); ViewLine.setBackgroundResource(Misc.IsDark() ? R.color.LineDark : R.color.LineWhite); ViewLine.setId(ID_LINE); RelativeLayoutMain.addView(ViewLine); return new ViewHolderMain(RelativeLayoutMain, false); } @Override public int getItemViewType(int Position) { return PeopleList.size() == 0 ? 0 : 1; } @Override public int getItemCount() { return PeopleList.size() == 0 ? 1 : PeopleList.size(); } class ViewHolderMain extends RecyclerView.ViewHolder { CircleImageView CircleImageViewProfile; TextView TextViewName; TextView TextViewUsername; TextView TextViewFollow; View ViewLine; ViewHolderMain(View v, boolean NoContent) { super(v); if (NoContent) return; CircleImageViewProfile = v.findViewById(ID_PROFILE); TextViewName = v.findViewById(ID_NAME); TextViewUsername = v.findViewById(ID_USERNAME); TextViewFollow = v.findViewById(ID_FOLLOW); ViewLine = v.findViewById(ID_LINE); } } } private class Struct { String ID; String Name; String Username; String Profile; boolean Follow; Struct(String I, String N, String U, String P, boolean F) { ID = I; Name = N; Username = U; Profile = P; Follow = F; } } }
apache-2.0
hazendaz/assertj-core
src/test/java/org/assertj/core/api/Assertions_assertThat_with_primitive_double_Test.java
1215
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright 2012-2021 the original author or authors. */ package org.assertj.core.api; import static org.assertj.core.api.Assertions.assertThat; import org.junit.jupiter.api.Test; /** * Tests for <code>{@link Assertions#assertThat(double)}</code>. * * @author Alex Ruiz */ class Assertions_assertThat_with_primitive_double_Test { @Test void should_create_Assert() { AbstractDoubleAssert<?> assertions = Assertions.assertThat(0d); assertThat(assertions).isNotNull(); } @Test void should_pass_actual() { AbstractDoubleAssert<?> assertions = Assertions.assertThat(8d); assertThat(assertions.actual).isEqualTo(new Double(8d)); } }
apache-2.0
finmath/finmath-experiments
src/main/java/net/finmath/experiments/montecarlo/interestrates/LIBORMarketModelCalibrationATMTest.java
35405
/* * (c) Copyright Christian P. Fries, Germany. Contact: email@christian-fries.de. * * Created on 16.01.2015 */ package net.finmath.experiments.montecarlo.interestrates; import java.text.DecimalFormat; import java.text.DecimalFormatSymbols; import java.time.LocalDate; import java.time.Month; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.Vector; import org.junit.Assert; import net.finmath.exception.CalculationException; import net.finmath.functions.AnalyticFormulas; import net.finmath.marketdata.calibration.ParameterObject; import net.finmath.marketdata.calibration.Solver; import net.finmath.marketdata.model.AnalyticModel; import net.finmath.marketdata.model.AnalyticModelFromCurvesAndVols; import net.finmath.marketdata.model.curves.Curve; import net.finmath.marketdata.model.curves.CurveInterpolation.ExtrapolationMethod; import net.finmath.marketdata.model.curves.CurveInterpolation.InterpolationEntity; import net.finmath.marketdata.model.curves.CurveInterpolation.InterpolationMethod; import net.finmath.marketdata.model.curves.DiscountCurve; import net.finmath.marketdata.model.curves.DiscountCurveFromForwardCurve; import net.finmath.marketdata.model.curves.DiscountCurveInterpolation; import net.finmath.marketdata.model.curves.ForwardCurve; import net.finmath.marketdata.model.curves.ForwardCurveFromDiscountCurve; import net.finmath.marketdata.products.AnalyticProduct; import net.finmath.marketdata.products.Swap; import net.finmath.marketdata.products.SwapAnnuity; import net.finmath.montecarlo.BrownianMotion; import net.finmath.montecarlo.BrownianMotionFromMersenneRandomNumbers; import net.finmath.montecarlo.RandomVariableFactory; import net.finmath.montecarlo.RandomVariableFromArrayFactory; import net.finmath.montecarlo.interestrate.CalibrationProduct; import net.finmath.montecarlo.interestrate.LIBORMarketModel; import net.finmath.montecarlo.interestrate.LIBORModelMonteCarloSimulationModel; import net.finmath.montecarlo.interestrate.LIBORMonteCarloSimulationFromLIBORModel; import net.finmath.montecarlo.interestrate.models.LIBORMarketModelFromCovarianceModel; import net.finmath.montecarlo.interestrate.models.covariance.AbstractLIBORCovarianceModelParametric; import net.finmath.montecarlo.interestrate.models.covariance.DisplacedLocalVolatilityModel; import net.finmath.montecarlo.interestrate.models.covariance.LIBORCorrelationModel; import net.finmath.montecarlo.interestrate.models.covariance.LIBORCorrelationModelExponentialDecay; import net.finmath.montecarlo.interestrate.models.covariance.LIBORCovarianceModelFromVolatilityAndCorrelation; import net.finmath.montecarlo.interestrate.models.covariance.LIBORVolatilityModel; import net.finmath.montecarlo.interestrate.models.covariance.LIBORVolatilityModelPiecewiseConstant; import net.finmath.montecarlo.interestrate.products.AbstractTermStructureMonteCarloProduct; import net.finmath.montecarlo.interestrate.products.SwaptionGeneralizedAnalyticApproximation; import net.finmath.montecarlo.interestrate.products.SwaptionSimple; import net.finmath.montecarlo.interestrate.products.TermStructureMonteCarloProduct; import net.finmath.montecarlo.process.EulerSchemeFromProcessModel; import net.finmath.optimizer.LevenbergMarquardt.RegularizationMethod; import net.finmath.optimizer.OptimizerFactory; import net.finmath.optimizer.OptimizerFactoryLevenbergMarquardt; import net.finmath.optimizer.SolverException; import net.finmath.time.Schedule; import net.finmath.time.ScheduleGenerator; import net.finmath.time.TimeDiscretization; import net.finmath.time.TimeDiscretizationFromArray; import net.finmath.time.businessdaycalendar.BusinessdayCalendarExcludingTARGETHolidays; import net.finmath.time.daycount.DayCountConvention_ACT_365; /** * This class does some experiments the LIBOR market model calibration. * * @author Christian Fries */ public class LIBORMarketModelCalibrationATMTest { public enum LIBORMarketModelType { NORMAL, DISPLACED, } public enum CalibrationProductType { MONTECARLO, ANALYTIC, } private static final boolean isPrintResults = false; private static final boolean isPrintResultsForCurves = false; private static DecimalFormat formatterValue = new DecimalFormat(" ##0.000%;-##0.000%", new DecimalFormatSymbols(Locale.ENGLISH)); private static DecimalFormat formatterParam = new DecimalFormat(" #0.000;-#0.000", new DecimalFormatSymbols(Locale.ENGLISH)); private static DecimalFormat formatterDeviation = new DecimalFormat(" 0.00000E00;-0.00000E00", new DecimalFormatSymbols(Locale.ENGLISH)); private final RandomVariableFactory randomVariableFactory = new RandomVariableFromArrayFactory(); // private final RandomVariableFactory randomVariableFactory = new RandomVariableOpenCLFactory(); // private final RandomVariableFactory randomVariableFactory = new RandomVariableCudaFactory(); private final LIBORMarketModelType modelType; private final CalibrationProductType calibrationProductType; private final int numberOfPathsCalibration; private final int numberOfPathsBenchmark; public static void main(String[] args) throws Exception { /* * You may modify the number of path (e.g. divide all by 10), depending on your machine. * Also: the last run may need more memory: Use -Xmx12G as JVM option to run with 12 GB. */ (new LIBORMarketModelCalibrationATMTest(LIBORMarketModelType.NORMAL, CalibrationProductType.ANALYTIC, 1000 /* numberOfPathsCalibration */, 1000 /* numberOfPathBenchmark */)).testATMSwaptionCalibration(); (new LIBORMarketModelCalibrationATMTest(LIBORMarketModelType.NORMAL, CalibrationProductType.MONTECARLO, 1000 /* numberOfPathsCalibration */, 1000 /* numberOfPathBenchmark */)).testATMSwaptionCalibration(); (new LIBORMarketModelCalibrationATMTest(LIBORMarketModelType.NORMAL, CalibrationProductType.MONTECARLO, 1000 /* numberOfPathsCalibration */, 10000 /* numberOfPathBenchmark */)).testATMSwaptionCalibration(); (new LIBORMarketModelCalibrationATMTest(LIBORMarketModelType.NORMAL, CalibrationProductType.ANALYTIC, 1000 /* numberOfPathsCalibration */, 10000 /* numberOfPathBenchmark */)).testATMSwaptionCalibration(); (new LIBORMarketModelCalibrationATMTest(LIBORMarketModelType.NORMAL, CalibrationProductType.MONTECARLO, 10000 /* numberOfPathsCalibration */, 50000 /* numberOfPathBenchmark */)).testATMSwaptionCalibration(); (new LIBORMarketModelCalibrationATMTest(LIBORMarketModelType.NORMAL, CalibrationProductType.ANALYTIC, 10000 /* numberOfPathsCalibration */, 50000 /* numberOfPathBenchmark */)).testATMSwaptionCalibration(); } public LIBORMarketModelCalibrationATMTest(LIBORMarketModelType modelType, CalibrationProductType calibrationProductType, int numberOfPathsCalibration, int numberOfPathBenchmark) { super(); this.modelType = modelType; this.calibrationProductType = calibrationProductType; this.numberOfPathsCalibration = numberOfPathsCalibration; this.numberOfPathsBenchmark = numberOfPathBenchmark; } /** * Calibration of swaptions - using Brute force Monte-Carlo or Analytic approximation - depending on the calibrationProductType. * * @throws CalculationException Thrown if the model fails to calibrate. * @throws SolverException Thrown if the solver fails to find a solution. */ public void testATMSwaptionCalibration() throws CalculationException, SolverException { /* * Calibration test */ System.out.println("Calibration to Swaptions:"); System.out.println("\tModel..........................: " + modelType); System.out.println("\tCalibration products...........: " + calibrationProductType); System.out.println("\tNumber of path (calibration)...: " + numberOfPathsCalibration); System.out.println("\tNumber of path (benchmarking)..: " + numberOfPathsBenchmark); /* * Calibration of rate curves */ System.out.print("\nCalibration of rate curves..."); final long millisCurvesStart = System.currentTimeMillis(); final AnalyticModel curveModel = getCalibratedCurve(); // Create the forward curve (initial value of the LIBOR market model) final ForwardCurve forwardCurve = curveModel.getForwardCurve("ForwardCurveFromDiscountCurve(discountCurve-EUR,6M)"); final DiscountCurve discountCurve = curveModel.getDiscountCurve("discountCurve-EUR"); final long millisCurvesEnd = System.currentTimeMillis(); System.out.println("done (" + (millisCurvesEnd-millisCurvesStart)/1000.0 + " sec)."); /* * Calibration of model volatilities */ /* * Create a set of calibration products. */ final ArrayList<String> calibrationItemNames = new ArrayList<>(); final ArrayList<CalibrationProduct> calibrationProducts = new ArrayList<>(); final ArrayList<CalibrationProduct> calibrationBenchmarks = new ArrayList<>(); final ArrayList<CalibrationProduct> calibrationMonteCarloValue = new ArrayList<>(); final double swapPeriodLength = 0.5; final String[] atmExpiries = { "1M", "1M", "1M", "1M", "1M", "1M", "1M", "1M", "1M", "1M", "1M", "1M", "1M", "1M", "3M", "3M", "3M", "3M", "3M", "3M", "3M", "3M", "3M", "3M", "3M", "3M", "3M", "3M", "6M", "6M", "6M", "6M", "6M", "6M", "6M", "6M", "6M", "6M", "6M", "6M", "6M", "6M", "1Y", "1Y", "1Y", "1Y", "1Y", "1Y", "1Y", "1Y", "1Y", "1Y", "1Y", "1Y", "1Y", "1Y", "2Y", "2Y", "2Y", "2Y", "2Y", "2Y", "2Y", "2Y", "2Y", "2Y", "2Y", "2Y", "2Y", "2Y", "3Y", "3Y", "3Y", "3Y", "3Y", "3Y", "3Y", "3Y", "3Y", "3Y", "3Y", "3Y", "3Y", "3Y", "4Y", "4Y", "4Y", "4Y", "4Y", "4Y", "4Y", "4Y", "4Y", "4Y", "4Y", "4Y", "4Y", "4Y", "5Y", "5Y", "5Y", "5Y", "5Y", "5Y", "5Y", "5Y", "5Y", "5Y", "5Y", "5Y", "5Y", "5Y", "7Y", "7Y", "7Y", "7Y", "7Y", "7Y", "7Y", "7Y", "7Y", "7Y", "7Y", "7Y", "7Y", "7Y", "10Y", "10Y", "10Y", "10Y", "10Y", "10Y", "10Y", "10Y", "10Y", "10Y", "10Y", "10Y", "10Y", "10Y", "15Y", "15Y", "15Y", "15Y", "15Y", "15Y", "15Y", "15Y", "15Y", "15Y", "15Y", "15Y", "15Y", "15Y", "20Y", "20Y", "20Y", "20Y", "20Y", "20Y", "20Y", "20Y", "20Y", "20Y", "20Y", "20Y", "20Y", "20Y", "25Y", "25Y", "25Y", "25Y", "25Y", "25Y", "25Y", "25Y", "25Y", "25Y", "25Y", "25Y", "25Y", "25Y", "30Y", "30Y", "30Y", "30Y", "30Y", "30Y", "30Y", "30Y", "30Y", "30Y", "30Y", "30Y", "30Y", "30Y" }; final String[] atmTenors = { "1Y", "2Y", "3Y", "4Y", "5Y", "6Y", "7Y", "8Y", "9Y", "10Y", "15Y", "20Y", "25Y", "30Y", "1Y", "2Y", "3Y", "4Y", "5Y", "6Y", "7Y", "8Y", "9Y", "10Y", "15Y", "20Y", "25Y", "30Y", "1Y", "2Y", "3Y", "4Y", "5Y", "6Y", "7Y", "8Y", "9Y", "10Y", "15Y", "20Y", "25Y", "30Y", "1Y", "2Y", "3Y", "4Y", "5Y", "6Y", "7Y", "8Y", "9Y", "10Y", "15Y", "20Y", "25Y", "30Y", "1Y", "2Y", "3Y", "4Y", "5Y", "6Y", "7Y", "8Y", "9Y", "10Y", "15Y", "20Y", "25Y", "30Y", "1Y", "2Y", "3Y", "4Y", "5Y", "6Y", "7Y", "8Y", "9Y", "10Y", "15Y", "20Y", "25Y", "30Y", "1Y", "2Y", "3Y", "4Y", "5Y", "6Y", "7Y", "8Y", "9Y", "10Y", "15Y", "20Y", "25Y", "30Y", "1Y", "2Y", "3Y", "4Y", "5Y", "6Y", "7Y", "8Y", "9Y", "10Y", "15Y", "20Y", "25Y", "30Y", "1Y", "2Y", "3Y", "4Y", "5Y", "6Y", "7Y", "8Y", "9Y", "10Y", "15Y", "20Y", "25Y", "30Y", "1Y", "2Y", "3Y", "4Y", "5Y", "6Y", "7Y", "8Y", "9Y", "10Y", "15Y", "20Y", "25Y", "30Y", "1Y", "2Y", "3Y", "4Y", "5Y", "6Y", "7Y", "8Y", "9Y", "10Y", "15Y", "20Y", "25Y", "30Y", "1Y", "2Y", "3Y", "4Y", "5Y", "6Y", "7Y", "8Y", "9Y", "10Y", "15Y", "20Y", "25Y", "30Y", "1Y", "2Y", "3Y", "4Y", "5Y", "6Y", "7Y", "8Y", "9Y", "10Y", "15Y", "20Y", "25Y", "30Y", "1Y", "2Y", "3Y", "4Y", "5Y", "6Y", "7Y", "8Y", "9Y", "10Y", "15Y", "20Y", "25Y", "30Y" }; final double[] atmNormalVolatilities = { 0.00151, 0.00169, 0.0021, 0.00248, 0.00291, 0.00329, 0.00365, 0.004, 0.00437, 0.00466, 0.00527, 0.00571, 0.00604, 0.00625, 0.0016, 0.00174, 0.00217, 0.00264, 0.00314, 0.00355, 0.00398, 0.00433, 0.00469, 0.00493, 0.00569, 0.00607, 0.00627, 0.00645, 0.00182, 0.00204, 0.00238, 0.00286, 0.00339, 0.00384, 0.00424, 0.00456, 0.00488, 0.0052, 0.0059, 0.00623, 0.0064, 0.00654, 0.00205, 0.00235, 0.00272, 0.0032, 0.00368, 0.00406, 0.00447, 0.00484, 0.00515, 0.00544, 0.00602, 0.00629, 0.0064, 0.00646, 0.00279, 0.00319, 0.0036, 0.00396, 0.00436, 0.00469, 0.00503, 0.0053, 0.00557, 0.00582, 0.00616, 0.00628, 0.00638, 0.00641, 0.00379, 0.00406, 0.00439, 0.00472, 0.00504, 0.00532, 0.0056, 0.00582, 0.00602, 0.00617, 0.0063, 0.00636, 0.00638, 0.00639, 0.00471, 0.00489, 0.00511, 0.00539, 0.00563, 0.00583, 0.006, 0.00618, 0.0063, 0.00644, 0.00641, 0.00638, 0.00635, 0.00634, 0.00544, 0.00557, 0.00572, 0.00591, 0.00604, 0.00617, 0.0063, 0.00641, 0.00651, 0.00661, 0.00645, 0.00634, 0.00627, 0.00624, 0.00625, 0.00632, 0.00638, 0.00644, 0.0065, 0.00655, 0.00661, 0.00667, 0.00672, 0.00673, 0.00634, 0.00614, 0.00599, 0.00593, 0.00664, 0.00671, 0.00675, 0.00676, 0.00676, 0.00675, 0.00676, 0.00674, 0.00672, 0.00669, 0.00616, 0.00586, 0.00569, 0.00558, 0.00647, 0.00651, 0.00651, 0.00651, 0.00652, 0.00649, 0.00645, 0.0064, 0.00637, 0.00631, 0.00576, 0.00534, 0.00512, 0.00495, 0.00615, 0.0062, 0.00618, 0.00613, 0.0061, 0.00607, 0.00602, 0.00596, 0.00591, 0.00586, 0.00536, 0.00491, 0.00469, 0.0045, 0.00578, 0.00583, 0.00579, 0.00574, 0.00567, 0.00562, 0.00556, 0.00549, 0.00545, 0.00538, 0.00493, 0.00453, 0.00435, 0.0042, 0.00542, 0.00547, 0.00539, 0.00532, 0.00522, 0.00516, 0.0051, 0.00504, 0.005, 0.00495, 0.00454, 0.00418, 0.00404, 0.00394 }; final LocalDate referenceDate = LocalDate.of(2016, Month.SEPTEMBER, 30); final BusinessdayCalendarExcludingTARGETHolidays cal = new BusinessdayCalendarExcludingTARGETHolidays(); final DayCountConvention_ACT_365 modelDC = new DayCountConvention_ACT_365(); for(int i=0; i<atmNormalVolatilities.length; i++ ) { final LocalDate exerciseDate = cal.getDateFromDateAndOffsetCode(referenceDate, atmExpiries[i]); final LocalDate tenorEndDate = cal.getDateFromDateAndOffsetCode(exerciseDate, atmTenors[i]); double exercise = modelDC.getDaycountFraction(referenceDate, exerciseDate); double tenor = modelDC.getDaycountFraction(exerciseDate, tenorEndDate); // We consider an idealized tenor grid (alternative: adapt the model grid) exercise = Math.round(exercise/0.25)*0.25; tenor = Math.round(tenor/0.25)*0.25; if(exercise < 1.0) { continue; } final int numberOfPeriods = (int)Math.round(tenor / swapPeriodLength); final double moneyness = 0.0; final double targetVolatility = atmNormalVolatilities[i]; final String targetVolatilityType = "VOLATILITYNORMAL"; final double weight = 1.0; calibrationItemNames.add(atmExpiries[i]+"\t"+atmTenors[i]); calibrationProducts.add(createCalibrationItem(weight, exercise, swapPeriodLength, numberOfPeriods, moneyness, targetVolatility, targetVolatilityType, forwardCurve, discountCurve, calibrationProductType)); calibrationBenchmarks.add(createCalibrationItem(weight, exercise, swapPeriodLength, numberOfPeriods, moneyness, targetVolatility, targetVolatilityType, forwardCurve, discountCurve, CalibrationProductType.MONTECARLO)); calibrationMonteCarloValue.add(createCalibrationItem(weight, exercise, swapPeriodLength, numberOfPeriods, moneyness, targetVolatility, "VALUE", forwardCurve, discountCurve, CalibrationProductType.MONTECARLO)); } /* * Create a simulation time discretization and forward rate curve discretization */ final double lastTime = 40.0; final double dt = 0.25; final TimeDiscretization timeDiscretization = new TimeDiscretizationFromArray(0.0, (int) (lastTime / dt), dt); final TimeDiscretization liborPeriodDiscretization = timeDiscretization; /* * Create covariance model */ final int numberOfFactors = 1; final LIBORVolatilityModel volatilityModel = new LIBORVolatilityModelPiecewiseConstant(timeDiscretization, liborPeriodDiscretization, new TimeDiscretizationFromArray(0.00, 1.0, 2.0, 5.0, 10.0, 20.0, 30.0, 40.0), new TimeDiscretizationFromArray(0.00, 1.0, 2.0, 5.0, 10.0, 20.0, 30.0, 40.0), 0.50 / 100); final LIBORCorrelationModel correlationModel = new LIBORCorrelationModelExponentialDecay(timeDiscretization, liborPeriodDiscretization, numberOfFactors, 0.05, false); // Create a covariance model //AbstractLIBORCovarianceModelParametric covarianceModelParametric = new LIBORCovarianceModelExponentialForm5Param(timeDiscretizationFromArray, liborPeriodDiscretization, numberOfFactors, new double[] { 0.20/100.0, 0.05/100.0, 0.10, 0.05/100.0, 0.10} ); final AbstractLIBORCovarianceModelParametric covarianceModelFromVolAndCor = new LIBORCovarianceModelFromVolatilityAndCorrelation(timeDiscretization, liborPeriodDiscretization, volatilityModel, correlationModel); // Create blended local volatility model with fixed parameter (0=lognormal, > 1 = almost a normal model). final AbstractLIBORCovarianceModelParametric covarianceModelDisplaced = new DisplacedLocalVolatilityModel(covarianceModelFromVolAndCor, 1.0/0.25, false /* isCalibrateable */); final AbstractLIBORCovarianceModelParametric covarianceModel; switch(modelType) { case NORMAL: covarianceModel = covarianceModelFromVolAndCor; break; case DISPLACED: covarianceModel = covarianceModelDisplaced; break; default: throw new IllegalArgumentException("Unknown " + modelType.getClass().getSimpleName() + ": " + modelType); } /* * Create Brownian motion used for calibration */ final BrownianMotion brownianMotion = new net.finmath.montecarlo.BrownianMotionFromMersenneRandomNumbers(timeDiscretization, numberOfFactors, numberOfPathsCalibration, 31415 /* seed */, randomVariableFactory); /* * Specify the optimizer used for calibration, set calibration properties (should use our brownianMotion for calibration). */ final Double accuracy = 1E-7; // Lower accuracy to reduce runtime of the unit test final int maxIterations = 200; final int numberOfThreads = 1; final double lambda = 0.1; final OptimizerFactory optimizerFactory = new OptimizerFactoryLevenbergMarquardt( RegularizationMethod.LEVENBERG, lambda, maxIterations, accuracy, numberOfThreads); final double[] parameterStandardDeviation = new double[covarianceModelFromVolAndCor.getParameterAsDouble().length]; final double[] parameterLowerBound = new double[covarianceModelFromVolAndCor.getParameterAsDouble().length]; final double[] parameterUpperBound = new double[covarianceModelFromVolAndCor.getParameterAsDouble().length]; Arrays.fill(parameterStandardDeviation, 0.20/100.0); Arrays.fill(parameterLowerBound, 0.0); Arrays.fill(parameterUpperBound, Double.POSITIVE_INFINITY); // Set calibration properties (should use our brownianMotion for calibration - needed to have to right correlation). final Map<String, Object> calibrationParameters = Map.of( "brownianMotion", brownianMotion, "optimizerFactory", optimizerFactory, "parameterStep", 1E-4); /* * Set model properties */ final Map<String, Object> properties = Map.of( // Choose the simulation measure "measure", LIBORMarketModelFromCovarianceModel.Measure.SPOT.name(), // Choose normal state space for the Euler scheme (the covariance model above carries a linear local volatility model, such that the resulting model is log-normal). "stateSpace", LIBORMarketModelFromCovarianceModel.StateSpace.NORMAL.name(), // Calibration parameters (from above) "calibrationParameters", calibrationParameters); System.out.print("\nCalibration of model volatilities...."); final long millisCalibrationStart = System.currentTimeMillis(); /* * Create corresponding LIBOR Market Model */ final LIBORMarketModel liborMarketModelCalibrated = LIBORMarketModelFromCovarianceModel.of( liborPeriodDiscretization, curveModel, forwardCurve, new DiscountCurveFromForwardCurve(forwardCurve), randomVariableFactory, covarianceModel, calibrationProducts.toArray(new CalibrationProduct[calibrationProducts.size()]), properties); final long millisCalibrationEnd = System.currentTimeMillis(); System.out.println("done (" + (millisCalibrationEnd-millisCalibrationStart)/1000.0 + " sec)."); if(isPrintResults) { System.out.println("\nCalibrated parameters are:"); final double[] param = ((AbstractLIBORCovarianceModelParametric)((LIBORMarketModelFromCovarianceModel) liborMarketModelCalibrated).getCovarianceModel()).getParameterAsDouble(); for(final double p : param) { System.out.println(p); } } /* * Simulation used to calibrate */ final EulerSchemeFromProcessModel process = new EulerSchemeFromProcessModel(liborMarketModelCalibrated, brownianMotion); final LIBORModelMonteCarloSimulationModel simulationCalibrated = new LIBORMonteCarloSimulationFromLIBORModel(process); /* * Benchmark simulation (using the calibrated covariance model) */ final LIBORMarketModel liborMarketModelBenchmark = LIBORMarketModelFromCovarianceModel.of( liborPeriodDiscretization, curveModel, forwardCurve, new DiscountCurveFromForwardCurve(forwardCurve), randomVariableFactory, liborMarketModelCalibrated.getCovarianceModel(), null, properties); final BrownianMotion brownianMotionBenchmark = new BrownianMotionFromMersenneRandomNumbers(timeDiscretization, numberOfFactors, numberOfPathsBenchmark, 31415 /* seed */, randomVariableFactory); final EulerSchemeFromProcessModel processBenchmark = new EulerSchemeFromProcessModel(liborMarketModelBenchmark, brownianMotionBenchmark); final LIBORModelMonteCarloSimulationModel simulationBenchmark = new LIBORMonteCarloSimulationFromLIBORModel(processBenchmark); /* * Check the calibrated model - with the analytic and the Monte-Carlo product */ double deviationCalibrationSum = 0.0; double deviationCalibrationSquaredSum = 0.0; double deviationValuationSum = 0.0; double deviationValuationSquaredSum = 0.0; for (int i = 0; i < calibrationProducts.size(); i++) { final TermStructureMonteCarloProduct calibrationProduct = calibrationProducts.get(i).getProduct(); final TermStructureMonteCarloProduct calibrationBenchmark = calibrationBenchmarks.get(i).getProduct(); try { final double valueModel = calibrationProduct.getValue(simulationCalibrated); final double valueBenchmarkModel = calibrationBenchmark.getValue(simulationBenchmark); final double valueTarget = calibrationProducts.get(i).getTargetValue().getAverage(); final double priceModel = calibrationMonteCarloValue.get(i).getProduct().getValue(simulationBenchmark); final double priceTarget = calibrationMonteCarloValue.get(i).getTargetValue().getAverage(); final double errorCalibration = valueModel-valueTarget; deviationCalibrationSum += errorCalibration; deviationCalibrationSquaredSum += errorCalibration*errorCalibration; final double errorValuation = valueBenchmarkModel-valueTarget; deviationValuationSum += errorValuation; deviationValuationSquaredSum += errorValuation*errorValuation; if(isPrintResults) { System.out.println(calibrationItemNames.get(i) + "\t Model: " + formatterValue.format(valueModel) + "\t Benchmark: " + formatterValue.format(valueBenchmarkModel) + "\t Target: " + formatterValue.format(valueTarget) + "\t Deviation: " + formatterDeviation.format(valueModel-valueTarget) + "\t Deviation benchmark: " + formatterDeviation.format(valueModel-valueBenchmarkModel) + "\t Price: " + formatterValue.format(priceModel) + "\t Target: " + formatterValue.format(priceTarget)); } } catch(final Exception e) { } } final double averageCalibrationDeviation = deviationCalibrationSum/calibrationProducts.size(); System.out.println("\nValuation using the calibration product (" + calibrationProductType + ") and calibration model (paths=" + numberOfPathsCalibration + "):"); System.out.println("\tCalibration Mean Deviation:" + formatterValue.format(averageCalibrationDeviation)); System.out.println("\tCalibration RMS Error.....:" + formatterValue.format(Math.sqrt(deviationCalibrationSquaredSum/calibrationProducts.size()))); final double averageValuationDeviation = deviationValuationSum/calibrationProducts.size(); System.out.println("\nValuation using the benchmark product (" + CalibrationProductType.MONTECARLO + ") and benchmark model (paths=" + numberOfPathsBenchmark + "):"); System.out.println("\tValuation Mean Deviation:" + formatterValue.format(averageValuationDeviation)); System.out.println("\tValuation RMS Error.....:" + formatterValue.format(Math.sqrt(deviationValuationSquaredSum/calibrationProducts.size()))); System.out.println(); System.out.println("_".repeat(120) + "\n"); } private CalibrationProduct createCalibrationItem(double weight, double exerciseDate, double swapPeriodLength, int numberOfPeriods, double moneyness, double targetVolatility, String targetVolatilityType, ForwardCurve forwardCurve, DiscountCurve discountCurve, CalibrationProductType calibrationProductType) throws CalculationException { final double[] fixingDates = new double[numberOfPeriods]; final double[] paymentDates = new double[numberOfPeriods]; final double[] swapTenor = new double[numberOfPeriods + 1]; for (int periodStartIndex = 0; periodStartIndex < numberOfPeriods; periodStartIndex++) { fixingDates[periodStartIndex] = exerciseDate + periodStartIndex * swapPeriodLength; paymentDates[periodStartIndex] = exerciseDate + (periodStartIndex + 1) * swapPeriodLength; swapTenor[periodStartIndex] = exerciseDate + periodStartIndex * swapPeriodLength; } swapTenor[numberOfPeriods] = exerciseDate + numberOfPeriods * swapPeriodLength; // Swaptions swap rate final double swaprate = moneyness + getParSwaprate(forwardCurve, discountCurve, swapTenor); // Set swap rates for each period final double[] swaprates = new double[numberOfPeriods]; Arrays.fill(swaprates, swaprate); /* * We use Monte-Carlo calibration on implied volatility. * Alternatively you may change here to Monte-Carlo valuation on price or * use an analytic approximation formula, etc. */ Double targetValue; switch(targetVolatilityType) { case "VOLATILITYNORMAL": case "VOLATILITYLOGNORMAL": targetValue = targetVolatility; break; case "VALUE": targetValue = AnalyticFormulas.bachelierOptionValue(swaprate, targetVolatility, fixingDates[0], swaprate, SwapAnnuity.getSwapAnnuity(new TimeDiscretizationFromArray(swapTenor), discountCurve)); break; default: throw new IllegalArgumentException("Unknown targetVolatilityType " + targetVolatilityType); } AbstractTermStructureMonteCarloProduct product; switch(calibrationProductType) { case MONTECARLO: product = new SwaptionSimple(swaprate, swapTenor, SwaptionSimple.ValueUnit.valueOf(targetVolatilityType)); break; case ANALYTIC: product = new SwaptionGeneralizedAnalyticApproximation( swaprate, swapTenor, SwaptionGeneralizedAnalyticApproximation.ValueUnit.VOLATILITY, SwaptionGeneralizedAnalyticApproximation.StateSpace.NORMAL); break; default: throw new IllegalArgumentException("Unknown producType " + calibrationProductType); } return new CalibrationProduct(product, targetValue, weight); } public AnalyticModel getCalibratedCurve() throws SolverException { final String[] maturity = { "6M", "1Y", "2Y", "3Y", "4Y", "5Y", "6Y", "7Y", "8Y", "9Y", "10Y", "11Y", "12Y", "15Y", "20Y", "25Y", "30Y", "35Y", "40Y", "45Y", "50Y" }; final String[] frequency = { "annual", "annual", "annual", "annual", "annual", "annual", "annual", "annual", "annual", "annual", "annual", "annual", "annual", "annual", "annual", "annual", "annual", "annual", "annual", "annual", "annual" }; final String[] frequencyFloat = { "semiannual", "semiannual", "semiannual", "semiannual", "semiannual", "semiannual", "semiannual", "semiannual", "semiannual", "semiannual", "semiannual", "semiannual", "semiannual", "semiannual", "semiannual", "semiannual", "semiannual", "semiannual", "semiannual", "semiannual", "semiannual" }; final String[] daycountConventions = { "ACT/360", "E30/360", "E30/360", "E30/360", "E30/360", "E30/360", "E30/360", "E30/360", "E30/360", "E30/360", "E30/360", "E30/360", "E30/360", "E30/360", "E30/360", "E30/360", "E30/360", "E30/360", "E30/360", "E30/360", "E30/360" }; final String[] daycountConventionsFloat = { "ACT/360", "ACT/360", "ACT/360", "ACT/360", "ACT/360", "ACT/360", "ACT/360", "ACT/360", "ACT/360", "ACT/360", "ACT/360", "ACT/360", "ACT/360", "ACT/360", "ACT/360", "ACT/360", "ACT/360", "ACT/360", "ACT/360", "ACT/360", "ACT/360" }; final double[] rates = { -0.00216 ,-0.00208 ,-0.00222 ,-0.00216 ,-0.0019 ,-0.0014 ,-0.00072 ,0.00011 ,0.00103 ,0.00196 ,0.00285 ,0.00367 ,0.0044 ,0.00604 ,0.00733 ,0.00767 ,0.00773 ,0.00765 ,0.00752 ,0.007138 ,0.007 }; final HashMap<String, Object> parameters = new HashMap<>(); parameters.put("referenceDate", LocalDate.of(2016, Month.SEPTEMBER, 30)); parameters.put("currency", "EUR"); parameters.put("forwardCurveTenor", "6M"); parameters.put("maturities", maturity); parameters.put("fixLegFrequencies", frequency); parameters.put("floatLegFrequencies", frequencyFloat); parameters.put("fixLegDaycountConventions", daycountConventions); parameters.put("floatLegDaycountConventions", daycountConventionsFloat); parameters.put("rates", rates); return getCalibratedCurve(null, parameters); } private static AnalyticModel getCalibratedCurve(final AnalyticModel model2, final Map<String, Object> parameters) throws SolverException { if(isPrintResultsForCurves) { System.out.println("Calibration of rate curves:"); } final LocalDate referenceDate = (LocalDate) parameters.get("referenceDate"); final String currency = (String) parameters.get("currency"); final String forwardCurveTenor = (String) parameters.get("forwardCurveTenor"); final String[] maturities = (String[]) parameters.get("maturities"); final String[] frequency = (String[]) parameters.get("fixLegFrequencies"); final String[] frequencyFloat = (String[]) parameters.get("floatLegFrequencies"); final String[] daycountConventions = (String[]) parameters.get("fixLegDaycountConventions"); final String[] daycountConventionsFloat = (String[]) parameters.get("floatLegDaycountConventions"); final double[] rates = (double[]) parameters.get("rates"); Assert.assertEquals(maturities.length, frequency.length); Assert.assertEquals(maturities.length, daycountConventions.length); Assert.assertEquals(maturities.length, rates.length); Assert.assertEquals(frequency.length, frequencyFloat.length); Assert.assertEquals(daycountConventions.length, daycountConventionsFloat.length); final int spotOffsetDays = 2; final String forwardStartPeriod = "0D"; final String curveNameDiscount = "discountCurve-" + currency; /* * We create a forward curve by referencing the same discount curve, since * this is a single curve setup. * * Note that using an independent NSS forward curve with its own NSS parameters * would result in a problem where both, the forward curve and the discount curve * have free parameters. */ final ForwardCurve forwardCurve = new ForwardCurveFromDiscountCurve(curveNameDiscount, referenceDate, forwardCurveTenor); // Create a collection of objective functions (calibration products) final Vector<AnalyticProduct> calibrationProducts = new Vector<>(); final double[] curveMaturities = new double[rates.length+1]; final double[] curveValue = new double[rates.length+1]; final boolean[] curveIsParameter = new boolean[rates.length+1]; curveMaturities[0] = 0.0; curveValue[0] = 1.0; curveIsParameter[0] = false; for(int i=0; i<rates.length; i++) { final Schedule schedulePay = ScheduleGenerator.createScheduleFromConventions(referenceDate, spotOffsetDays, forwardStartPeriod, maturities[i], frequency[i], daycountConventions[i], "first", "following", new BusinessdayCalendarExcludingTARGETHolidays(), -2, 0); final Schedule scheduleRec = ScheduleGenerator.createScheduleFromConventions(referenceDate, spotOffsetDays, forwardStartPeriod, maturities[i], frequencyFloat[i], daycountConventionsFloat[i], "first", "following", new BusinessdayCalendarExcludingTARGETHolidays(), -2, 0); curveMaturities[i+1] = Math.max(schedulePay.getPayment(schedulePay.getNumberOfPeriods()-1),scheduleRec.getPayment(scheduleRec.getNumberOfPeriods()-1)); curveValue[i+1] = 1.0; curveIsParameter[i+1] = true; calibrationProducts.add(new Swap(schedulePay, null, rates[i], curveNameDiscount, scheduleRec, forwardCurve.getName(), 0.0, curveNameDiscount)); } final InterpolationMethod interpolationMethod = InterpolationMethod.LINEAR; // Create a discount curve final DiscountCurveInterpolation discountCurveInterpolation = DiscountCurveInterpolation.createDiscountCurveFromDiscountFactors( curveNameDiscount /* name */, referenceDate /* referenceDate */, curveMaturities /* maturities */, curveValue /* discount factors */, curveIsParameter, interpolationMethod , ExtrapolationMethod.CONSTANT, InterpolationEntity.LOG_OF_VALUE ); /* * Model consists of the two curves, but only one of them provides free parameters. */ AnalyticModel model = new AnalyticModelFromCurvesAndVols(new Curve[] { discountCurveInterpolation, forwardCurve }); /* * Create a collection of curves to calibrate */ final Set<ParameterObject> curvesToCalibrate = new HashSet<>(); curvesToCalibrate.add(discountCurveInterpolation); /* * Calibrate the curve */ final Solver solver = new Solver(model, calibrationProducts, 0.0, 1E-4 /* target accuracy */); final AnalyticModel calibratedModel = solver.getCalibratedModel(curvesToCalibrate); if(isPrintResultsForCurves) { System.out.println("Solver reported acccurary....: " + solver.getAccuracy()); } Assert.assertEquals("Calibration accurarcy", 0.0, solver.getAccuracy(), 1E-3); // Get best parameters final double[] parametersBest = calibratedModel.getDiscountCurve(discountCurveInterpolation.getName()).getParameter(); // Test calibration model = calibratedModel; double squaredErrorSum = 0.0; for(final AnalyticProduct c : calibrationProducts) { final double value = c.getValue(0.0, model); final double valueTaget = 0.0; final double error = value - valueTaget; squaredErrorSum += error*error; } final double rms = Math.sqrt(squaredErrorSum/calibrationProducts.size()); if(isPrintResultsForCurves) { System.out.println("Independent checked acccurary: " + rms); } if(isPrintResultsForCurves && isPrintResults) { System.out.println("Calibrated discount curve: "); for(int i=0; i<curveMaturities.length; i++) { final double maturity = curveMaturities[i]; System.out.println(maturity + "\t" + calibratedModel.getDiscountCurve(discountCurveInterpolation.getName()).getDiscountFactor(maturity)); } } return model; } private static double getParSwaprate(final ForwardCurve forwardCurve, final DiscountCurve discountCurve, final double[] swapTenor) { return net.finmath.marketdata.products.Swap.getForwardSwapRate(new TimeDiscretizationFromArray(swapTenor), new TimeDiscretizationFromArray(swapTenor), forwardCurve, discountCurve); } }
apache-2.0
skalscheuer/jvaultconnector
src/test/java/de/stklcode/jvault/connector/model/response/HealthResponseTest.java
3780
/* * Copyright 2016-2020 Stefan Kalscheuer * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.stklcode.jvault.connector.model.response; import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.jupiter.api.Test; import java.io.IOException; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.notNullValue; import static org.junit.jupiter.api.Assertions.fail; /** * JUnit Test for {@link AuthResponse} model. * * @author Stefan Kalscheuer * @since 0.7.0 */ public class HealthResponseTest { private static final String CLUSTER_ID = "c9abceea-4f46-4dab-a688-5ce55f89e228"; private static final String CLUSTER_NAME = "vault-cluster-5515c810"; private static final String VERSION = "0.9.2"; private static final Long SERVER_TIME_UTC = 1469555798L; private static final Boolean STANDBY = false; private static final Boolean SEALED = false; private static final Boolean INITIALIZED = true; private static final Boolean PERF_STANDBY = false; private static final String REPL_PERF_MODE = "disabled"; private static final String REPL_DR_MODE = "disabled"; private static final String RES_JSON = "{\n" + " \"cluster_id\": \"" + CLUSTER_ID + "\",\n" + " \"cluster_name\": \"" + CLUSTER_NAME + "\",\n" + " \"version\": \"" + VERSION + "\",\n" + " \"server_time_utc\": " + SERVER_TIME_UTC + ",\n" + " \"standby\": " + STANDBY + ",\n" + " \"sealed\": " + SEALED + ",\n" + " \"initialized\": " + INITIALIZED + ",\n" + " \"replication_perf_mode\": \"" + REPL_PERF_MODE + "\",\n" + " \"replication_dr_mode\": \"" + REPL_DR_MODE + "\",\n" + " \"performance_standby\": " + PERF_STANDBY + "\n" + "}"; /** * Test creation from JSON value as returned by Vault (JSON example copied from Vault documentation). */ @Test public void jsonRoundtrip() { try { HealthResponse res = new ObjectMapper().readValue(RES_JSON, HealthResponse.class); assertThat("Parsed response is NULL", res, is(notNullValue())); assertThat("Incorrect cluster ID", res.getClusterID(), is(CLUSTER_ID)); assertThat("Incorrect cluster name", res.getClusterName(), is(CLUSTER_NAME)); assertThat("Incorrect version", res.getVersion(), is(VERSION)); assertThat("Incorrect server time", res.getServerTimeUTC(), is(SERVER_TIME_UTC)); assertThat("Incorrect standby state", res.isStandby(), is(STANDBY)); assertThat("Incorrect seal state", res.isSealed(), is(SEALED)); assertThat("Incorrect initialization state", res.isInitialized(), is(INITIALIZED)); assertThat("Incorrect performance standby state", res.isPerformanceStandby(), is(PERF_STANDBY)); assertThat("Incorrect replication perf mode", res.getReplicationPerfMode(), is(REPL_PERF_MODE)); assertThat("Incorrect replication DR mode", res.getReplicationDrMode(), is(REPL_DR_MODE)); } catch (IOException e) { fail("Health deserialization failed: " + e.getMessage()); } } }
apache-2.0
gstevey/gradle
subprojects/ide/src/main/java/org/gradle/plugins/ide/idea/IdeaPlugin.java
25003
/* * Copyright 2010 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.plugins.ide.idea; import com.google.common.base.Function; import com.google.common.base.Predicate; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.gradle.api.Action; import org.gradle.api.JavaVersion; import org.gradle.api.Project; import org.gradle.api.Task; import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.ConfigurationContainer; import org.gradle.api.artifacts.PublishArtifact; import org.gradle.api.artifacts.component.ProjectComponentIdentifier; import org.gradle.api.file.FileCollection; import org.gradle.api.internal.ConventionMapping; import org.gradle.api.internal.IConventionAware; import org.gradle.api.internal.artifacts.ivyservice.projectmodule.LocalComponentRegistry; import org.gradle.api.internal.artifacts.ivyservice.projectmodule.ProjectLocalComponentProvider; import org.gradle.api.internal.artifacts.publish.DefaultPublishArtifact; import org.gradle.api.internal.project.ProjectInternal; import org.gradle.api.plugins.JavaBasePlugin; import org.gradle.api.plugins.JavaPlugin; import org.gradle.api.plugins.JavaPluginConvention; import org.gradle.api.plugins.WarPlugin; import org.gradle.api.plugins.scala.ScalaBasePlugin; import org.gradle.api.tasks.SourceSetContainer; import org.gradle.api.tasks.TaskDependency; import org.gradle.initialization.ProjectPathRegistry; import org.gradle.internal.component.local.model.LocalComponentArtifactMetadata; import org.gradle.internal.component.local.model.PublishArtifactLocalArtifactMetadata; import org.gradle.internal.reflect.Instantiator; import org.gradle.internal.service.ServiceRegistry; import org.gradle.language.scala.plugins.ScalaLanguagePlugin; import org.gradle.plugins.ide.api.XmlFileContentMerger; import org.gradle.plugins.ide.idea.internal.IdeaScalaConfigurer; import org.gradle.plugins.ide.idea.model.IdeaLanguageLevel; import org.gradle.plugins.ide.idea.model.IdeaModel; import org.gradle.plugins.ide.idea.model.IdeaModule; import org.gradle.plugins.ide.idea.model.IdeaModuleIml; import org.gradle.plugins.ide.idea.model.IdeaProject; import org.gradle.plugins.ide.idea.model.IdeaWorkspace; import org.gradle.plugins.ide.idea.model.PathFactory; import org.gradle.plugins.ide.idea.model.internal.GeneratedIdeaScope; import org.gradle.plugins.ide.idea.model.internal.IdeaDependenciesProvider; import org.gradle.plugins.ide.internal.IdePlugin; import org.gradle.plugins.ide.internal.configurer.UniqueProjectNameProvider; import org.gradle.util.Path; import org.gradle.util.SingleMessageLogger; import javax.inject.Inject; import java.io.File; import java.util.Collection; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.Callable; import static org.gradle.internal.component.local.model.DefaultProjectComponentIdentifier.newProjectId; /** * Adds a GenerateIdeaModule task. When applied to a root project, also adds a GenerateIdeaProject task. For projects that have the Java plugin applied, the tasks receive additional Java-specific * configuration. */ public class IdeaPlugin extends IdePlugin { private static final Predicate<Project> HAS_IDEA_AND_JAVA_PLUGINS = new Predicate<Project>() { @Override public boolean apply(Project project) { return project.getPlugins().hasPlugin(IdeaPlugin.class) && project.getPlugins().hasPlugin(JavaBasePlugin.class); } }; public static final Function<Project, JavaVersion> SOURCE_COMPATIBILITY = new Function<Project, JavaVersion>() { @Override public JavaVersion apply(Project p) { return p.getConvention().getPlugin(JavaPluginConvention.class).getSourceCompatibility(); } }; public static final Function<Project, JavaVersion> TARGET_COMPATIBILITY = new Function<Project, JavaVersion>() { @Override public JavaVersion apply(Project p) { return p.getConvention().getPlugin(JavaPluginConvention.class).getTargetCompatibility(); } }; private final Instantiator instantiator; private IdeaModel ideaModel; private List<Project> allJavaProjects; private final UniqueProjectNameProvider uniqueProjectNameProvider; @Inject public IdeaPlugin(Instantiator instantiator, UniqueProjectNameProvider uniqueProjectNameProvider) { this.instantiator = instantiator; this.uniqueProjectNameProvider = uniqueProjectNameProvider; } public IdeaModel getModel() { return ideaModel; } @Override protected String getLifecycleTaskName() { return "idea"; } @Override protected void onApply(final Project project) { getLifecycleTask().setDescription("Generates IDEA project files (IML, IPR, IWS)"); getCleanTask().setDescription("Cleans IDEA project files (IML, IPR)"); ideaModel = project.getExtensions().create("idea", IdeaModel.class); configureIdeaWorkspace(project); configureIdeaProject(project); configureIdeaModule(project); configureForJavaPlugin(project); configureForWarPlugin(project); configureForScalaPlugin(); registerImlArtifact(project); linkCompositeBuildDependencies((ProjectInternal) project); } // No one should be calling this. @Deprecated public void performPostEvaluationActions() { SingleMessageLogger.nagUserOfDiscontinuedMethod("performPostEvaluationActions"); } private void registerImlArtifact(Project project) { ProjectLocalComponentProvider projectComponentProvider = ((ProjectInternal) project).getServices().get(ProjectLocalComponentProvider.class); ProjectComponentIdentifier projectId = newProjectId(project); projectComponentProvider.registerAdditionalArtifact(projectId, createImlArtifact(projectId, project)); } private static LocalComponentArtifactMetadata createImlArtifact(ProjectComponentIdentifier projectId, Project project) { IdeaModule module = project.getExtensions().getByType(IdeaModel.class).getModule(); Task byName = project.getTasks().getByName("ideaModule"); PublishArtifact publishArtifact = new ImlArtifact(module, byName); return new PublishArtifactLocalArtifactMetadata(projectId, publishArtifact); } private void configureIdeaWorkspace(final Project project) { if (isRoot(project)) { GenerateIdeaWorkspace task = project.getTasks().create("ideaWorkspace", GenerateIdeaWorkspace.class); task.setDescription("Generates an IDEA workspace file (IWS)"); IdeaWorkspace workspace = new IdeaWorkspace(); workspace.setIws(new XmlFileContentMerger(task.getXmlTransformer())); task.setWorkspace(workspace); ideaModel.setWorkspace(task.getWorkspace()); task.setOutputFile(new File(project.getProjectDir(), project.getName() + ".iws")); addWorker(task, false); } } private void configureIdeaProject(final Project project) { if (isRoot(project)) { final GenerateIdeaProject task = project.getTasks().create("ideaProject", GenerateIdeaProject.class); task.setDescription("Generates IDEA project file (IPR)"); XmlFileContentMerger ipr = new XmlFileContentMerger(task.getXmlTransformer()); IdeaProject ideaProject = instantiator.newInstance(IdeaProject.class, project, ipr); task.setIdeaProject(ideaProject); ideaModel.setProject(ideaProject); ideaProject.setOutputFile(new File(project.getProjectDir(), project.getName() + ".ipr")); ConventionMapping conventionMapping = ((IConventionAware) ideaProject).getConventionMapping(); conventionMapping.map("jdkName", new Callable<String>() { @Override public String call() throws Exception { return JavaVersion.current().toString(); } }); conventionMapping.map("languageLevel", new Callable<IdeaLanguageLevel>() { @Override public IdeaLanguageLevel call() throws Exception { JavaVersion maxSourceCompatibility = getMaxJavaModuleCompatibilityVersionFor(SOURCE_COMPATIBILITY); return new IdeaLanguageLevel(maxSourceCompatibility); } }); conventionMapping.map("targetBytecodeVersion", new Callable<JavaVersion>() { @Override public JavaVersion call() throws Exception { return getMaxJavaModuleCompatibilityVersionFor(TARGET_COMPATIBILITY); } }); ideaProject.setWildcards(Sets.newHashSet("!?*.class", "!?*.scala", "!?*.groovy", "!?*.java")); conventionMapping.map("modules", new Callable<List<IdeaModule>>() { @Override public List<IdeaModule> call() throws Exception { return Lists.newArrayList(Iterables.transform(Sets.filter(project.getRootProject().getAllprojects(), new Predicate<Project>() { @Override public boolean apply(Project p) { return p.getPlugins().hasPlugin(IdeaPlugin.class); } }), new Function<Project, IdeaModule>() { @Override public IdeaModule apply(Project p) { return ideaModelFor(p).getModule(); } })); } }); conventionMapping.map("pathFactory", new Callable<PathFactory>() { @Override public PathFactory call() throws Exception { return new PathFactory().addPathVariable("PROJECT_DIR", task.getOutputFile().getParentFile()); } }); addWorker(task); } } private static IdeaModel ideaModelFor(Project project) { return project.getExtensions().getByType(IdeaModel.class); } private JavaVersion getMaxJavaModuleCompatibilityVersionFor(Function<Project, JavaVersion> toJavaVersion) { List<Project> allJavaProjects = getAllJavaProjects(); if (allJavaProjects.isEmpty()) { return JavaVersion.VERSION_1_6; } else { return Collections.max(Lists.transform(allJavaProjects, toJavaVersion)); } } private List<Project> getAllJavaProjects() { if (allJavaProjects != null) { // cache result because it is pretty expensive to compute return allJavaProjects; } allJavaProjects = Lists.newArrayList(Iterables.filter(project.getRootProject().getAllprojects(), HAS_IDEA_AND_JAVA_PLUGINS)); return allJavaProjects; } private void configureIdeaModule(final Project project) { final GenerateIdeaModule task = project.getTasks().create("ideaModule", GenerateIdeaModule.class); task.setDescription("Generates IDEA module files (IML)"); IdeaModuleIml iml = new IdeaModuleIml(task.getXmlTransformer(), project.getProjectDir()); final IdeaModule module = instantiator.newInstance(IdeaModule.class, project, iml); task.setModule(module); ideaModel.setModule(module); final String defaultModuleName = uniqueProjectNameProvider.getUniqueName(project); module.setName(defaultModuleName); ConventionMapping conventionMapping = ((IConventionAware) module).getConventionMapping(); conventionMapping.map("sourceDirs", new Callable<Set<File>>() { @Override public Set<File> call() throws Exception { return Sets.newHashSet(); } }); conventionMapping.map("contentRoot", new Callable<File>() { @Override public File call() throws Exception { return project.getProjectDir(); } }); conventionMapping.map("testSourceDirs", new Callable<Set<File>>() { @Override public Set<File> call() throws Exception { return Sets.newHashSet(); } }); conventionMapping.map("excludeDirs", new Callable<Set<File>>() { @Override public Set<File> call() throws Exception { return Sets.newHashSet(project.getBuildDir(), project.file(".gradle")); } }); conventionMapping.map("pathFactory", new Callable<PathFactory>() { @Override public PathFactory call() throws Exception { final PathFactory factory = new PathFactory(); factory.addPathVariable("MODULE_DIR", task.getOutputFile().getParentFile()); for (Map.Entry<String, File> entry : module.getPathVariables().entrySet()) { factory.addPathVariable(entry.getKey(), entry.getValue()); } return factory; } }); addWorker(task); } private void configureForJavaPlugin(final Project project) { project.getPlugins().withType(JavaPlugin.class, new Action<JavaPlugin>() { @Override public void execute(JavaPlugin javaPlugin) { configureIdeaModuleForJava(project); } }); } private void configureForWarPlugin(final Project project) { project.getPlugins().withType(WarPlugin.class, new Action<WarPlugin>() { @Override public void execute(WarPlugin warPlugin) { configureIdeaModuleForWar(project); } }); } private void configureIdeaModuleForJava(final Project project) { project.getTasks().withType(GenerateIdeaModule.class, new Action<GenerateIdeaModule>() { @Override public void execute(GenerateIdeaModule ideaModule) { // Defaults setupScopes(ideaModule); // Convention ConventionMapping convention = ((IConventionAware) ideaModule.getModule()).getConventionMapping(); convention.map("sourceDirs", new Callable<Set<File>>() { @Override public Set<File> call() throws Exception { SourceSetContainer sourceSets = project.getConvention().getPlugin(JavaPluginConvention.class).getSourceSets(); return sourceSets.getByName("main").getAllSource().getSrcDirs(); } }); convention.map("testSourceDirs", new Callable<Set<File>>() { @Override public Set<File> call() throws Exception { SourceSetContainer sourceSets = project.getConvention().getPlugin(JavaPluginConvention.class).getSourceSets(); return sourceSets.getByName("test").getAllSource().getSrcDirs(); } }); convention.map("singleEntryLibraries", new Callable<Map<String, FileCollection>>() { @Override public Map<String, FileCollection> call() throws Exception { SourceSetContainer sourceSets = project.getConvention().getPlugin(JavaPluginConvention.class).getSourceSets(); LinkedHashMap<String, FileCollection> map = new LinkedHashMap<String, FileCollection>(2); map.put("RUNTIME", sourceSets.getByName("main").getOutput().getDirs()); map.put("TEST", sourceSets.getByName("test").getOutput().getDirs()); return map; } }); convention.map("targetBytecodeVersion", new Callable<JavaVersion>() { @Override public JavaVersion call() throws Exception { JavaVersion moduleTargetBytecodeLevel = project.getConvention().getPlugin(JavaPluginConvention.class).getTargetCompatibility(); return includeModuleBytecodeLevelOverride(project.getRootProject(), moduleTargetBytecodeLevel) ? moduleTargetBytecodeLevel : null; } }); convention.map("languageLevel", new Callable<IdeaLanguageLevel>() { @Override public IdeaLanguageLevel call() throws Exception { IdeaLanguageLevel moduleLanguageLevel = new IdeaLanguageLevel(project.getConvention().getPlugin(JavaPluginConvention.class).getSourceCompatibility()); return includeModuleLanguageLevelOverride(project.getRootProject(), moduleLanguageLevel) ? moduleLanguageLevel : null; } }); // Dependencies ideaModule.dependsOn(new Callable<FileCollection>() { @Override public FileCollection call() throws Exception { SourceSetContainer sourceSets = project.getConvention().getPlugin(JavaPluginConvention.class).getSourceSets(); return sourceSets.getByName("main").getOutput().getDirs().plus(sourceSets.getByName("test").getOutput().getDirs()); } }); } }); } private void setupScopes(GenerateIdeaModule ideaModule) { Map<String, Map<String, Collection<Configuration>>> scopes = Maps.newLinkedHashMap(); for (GeneratedIdeaScope scope : GeneratedIdeaScope.values()) { Map<String, Collection<Configuration>> plusMinus = Maps.newLinkedHashMap(); plusMinus.put(IdeaDependenciesProvider.SCOPE_PLUS, Lists.<Configuration>newArrayList()); plusMinus.put(IdeaDependenciesProvider.SCOPE_MINUS, Lists.<Configuration>newArrayList()); scopes.put(scope.name(), plusMinus); } Project project = ideaModule.getProject(); ConfigurationContainer configurations = project.getConfigurations(); Collection<Configuration> provided = scopes.get(GeneratedIdeaScope.PROVIDED.name()).get(IdeaDependenciesProvider.SCOPE_PLUS); provided.add(configurations.getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME)); Collection<Configuration> runtime = scopes.get(GeneratedIdeaScope.RUNTIME.name()).get(IdeaDependenciesProvider.SCOPE_PLUS); runtime.add(configurations.getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME)); Collection<Configuration> test = scopes.get(GeneratedIdeaScope.TEST.name()).get(IdeaDependenciesProvider.SCOPE_PLUS); test.add(configurations.getByName(JavaPlugin.TEST_COMPILE_CLASSPATH_CONFIGURATION_NAME)); test.add(configurations.getByName(JavaPlugin.TEST_RUNTIME_CLASSPATH_CONFIGURATION_NAME)); ideaModule.getModule().setScopes(scopes); } private void configureIdeaModuleForWar(final Project project) { project.getTasks().withType(GenerateIdeaModule.class, new Action<GenerateIdeaModule>() { @Override public void execute(GenerateIdeaModule ideaModule) { ConfigurationContainer configurations = project.getConfigurations(); Configuration providedRuntime = configurations.getByName(WarPlugin.PROVIDED_RUNTIME_CONFIGURATION_NAME); Collection<Configuration> providedPlus = ideaModule.getModule().getScopes().get(GeneratedIdeaScope.PROVIDED.name()).get(IdeaDependenciesProvider.SCOPE_PLUS); providedPlus.add(providedRuntime); Collection<Configuration> runtimeMinus = ideaModule.getModule().getScopes().get(GeneratedIdeaScope.RUNTIME.name()).get(IdeaDependenciesProvider.SCOPE_MINUS); runtimeMinus.add(providedRuntime); Collection<Configuration> testMinus = ideaModule.getModule().getScopes().get(GeneratedIdeaScope.TEST.name()).get(IdeaDependenciesProvider.SCOPE_MINUS); testMinus.add(providedRuntime); } }); } private static boolean includeModuleBytecodeLevelOverride(Project rootProject, JavaVersion moduleTargetBytecodeLevel) { if (!rootProject.getPlugins().hasPlugin(IdeaPlugin.class)) { return true; } IdeaProject ideaProject = ideaModelFor(rootProject).getProject(); return !moduleTargetBytecodeLevel.equals(ideaProject.getTargetBytecodeVersion()); } private static boolean includeModuleLanguageLevelOverride(Project rootProject, IdeaLanguageLevel moduleLanguageLevel) { if (!rootProject.getPlugins().hasPlugin(IdeaPlugin.class)) { return true; } IdeaProject ideaProject = ideaModelFor(rootProject).getProject(); return !moduleLanguageLevel.equals(ideaProject.getLanguageLevel()); } private void configureForScalaPlugin() { project.getPlugins().withType(ScalaBasePlugin.class, new Action<ScalaBasePlugin>() { @Override public void execute(ScalaBasePlugin scalaBasePlugin) { ideaModuleDependsOnRoot(); } }); project.getPlugins().withType(ScalaLanguagePlugin.class, new Action<ScalaLanguagePlugin>() { @Override public void execute(ScalaLanguagePlugin scalaLanguagePlugin) { ideaModuleDependsOnRoot(); } }); if (isRoot(project)) { new IdeaScalaConfigurer(project).configure(); } } private void ideaModuleDependsOnRoot() { // see IdeaScalaConfigurer which requires the ipr to be generated first project.getTasks().findByName("ideaModule").dependsOn(project.getRootProject().getTasks().findByName("ideaProject")); } private void linkCompositeBuildDependencies(final ProjectInternal project) { if (isRoot(project)) { getLifecycleTask().dependsOn(new Callable<List<TaskDependency>>() { @Override public List<TaskDependency> call() throws Exception { return allImlArtifactsInComposite(project); } }); } } private List<TaskDependency> allImlArtifactsInComposite(ProjectInternal project) { List<TaskDependency> dependencies = Lists.newArrayList(); ServiceRegistry services = project.getServices(); ProjectPathRegistry projectPathRegistry = services.get(ProjectPathRegistry.class); LocalComponentRegistry localComponentRegistry = services.get(LocalComponentRegistry.class); ProjectComponentIdentifier thisProjectId = projectPathRegistry.getProjectComponentIdentifier(project.getIdentityPath()); for (Path projectPath : projectPathRegistry.getAllProjectPaths()) { final ProjectComponentIdentifier otherProjectId = projectPathRegistry.getProjectComponentIdentifier(projectPath); if (thisProjectId.getBuild().equals(otherProjectId.getBuild())) { // IDEA Module for project in current build: handled via `modules` model elements. continue; } LocalComponentArtifactMetadata imlArtifact = localComponentRegistry.findAdditionalArtifact(otherProjectId, "iml"); if (imlArtifact != null) { dependencies.add(imlArtifact.getBuildDependencies()); } } return dependencies; } private static boolean isRoot(Project project) { return project.getParent() == null; } private static class ImlArtifact extends DefaultPublishArtifact { private final IdeaModule module; private final File projectDir; public ImlArtifact(IdeaModule module, Object... tasks) { super(null, "iml", "iml", null, null, null, tasks); this.module = module; this.projectDir = module.getProject().getProjectDir(); } @Override public String getName() { return module.getName(); } @Override public File getFile() { return new File(projectDir, getName() + ".iml"); } } }
apache-2.0
gstevey/gradle
subprojects/logging/src/main/java/org/gradle/internal/logging/console/DefaultTextArea.java
3081
/* * Copyright 2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.internal.logging.console; import org.gradle.api.Action; import org.gradle.internal.logging.text.AbstractLineChoppingStyledTextOutput; public class DefaultTextArea extends AbstractLineChoppingStyledTextOutput implements TextArea { private static final Action<AnsiContext> NEW_LINE_ACTION = new Action<AnsiContext>() { @Override public void execute(AnsiContext ansi) { ansi.newLine(); } }; private static final int CHARS_PER_TAB_STOP = 8; private final Cursor writePos = new Cursor(); private final AnsiExecutor ansiExecutor; public DefaultTextArea(AnsiExecutor ansiExecutor) { this.ansiExecutor = ansiExecutor; } public Cursor getWritePosition() { return writePos; } public void newLineAdjustment() { writePos.row++; } @Override protected void doLineText(final CharSequence text) { if (text.length() == 0) { return; } ansiExecutor.writeAt(writePos, new Action<AnsiContext>() { @Override public void execute(AnsiContext ansi) { ansi.withStyle(getStyle(), new Action<AnsiContext>() { @Override public void execute(AnsiContext ansi) { String textStr = text.toString(); int pos = 0; while (pos < text.length()) { int next = textStr.indexOf('\t', pos); if (next == pos) { int charsToNextStop = CHARS_PER_TAB_STOP - (writePos.col % CHARS_PER_TAB_STOP); for(int i = 0; i < charsToNextStop; i++) { ansi.a(" "); } pos++; } else if (next > pos) { ansi.a(textStr.substring(pos, next)); pos = next; } else { ansi.a(textStr.substring(pos, textStr.length())); pos = textStr.length(); } } } }); } }); } @Override protected void doEndLine(CharSequence endOfLine) { ansiExecutor.writeAt(writePos, NEW_LINE_ACTION); } }
apache-2.0
ryctabo/udc-project-api
src/main/java/co/edu/unicartagena/platf/rest/converters/RoleTypeConverter.java
2340
/* * Copyright 2016 Gustavo Pacheco <ryctabo@gmail.com>. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package co.edu.unicartagena.platf.rest.converters; import co.edu.unicartagena.platf.entity.RoleType; import java.lang.annotation.Annotation; import java.lang.reflect.Type; import javax.ws.rs.ext.ParamConverter; import javax.ws.rs.ext.ParamConverterProvider; import javax.ws.rs.ext.Provider; /** * * @author Gustavo Pacheco <ryctabo@gmail.com> * @version 1.0-SNAPSHOT */ @Provider public class RoleTypeConverter implements ParamConverterProvider { @Override public <T> ParamConverter<T> getConverter(final Class<T> rawType, Type genericType, Annotation[] annotations) { if (rawType.getName().equals(RoleType.class.getName())) { return new ParamConverter<T>() { @Override public T fromString(String roleString) { if (roleString == null) return null; switch (roleString) { case "ADMINISTRADOR": return rawType.cast(RoleType.ADMINISTRATOR); case "DECANATURA": return rawType.cast(RoleType.DEANCHIP); case "FACULTAD": return rawType.cast(RoleType.FACULTY); case "PROGRAMA": return rawType.cast(RoleType.PROGRAM); default: throw new AssertionError(); } } @Override public String toString(T roleObject) { return roleObject == null ? null : roleObject.toString(); } }; } return null; } }
apache-2.0
debop/debop4k
debop4k-data-orm/src/main/java/debop4k/data/orm/hibernate/usertypes/compress/CompressedByteArrayUserType.java
2463
/* * Copyright (c) 2016. Sunghyouk Bae <sunghyouk.bae@gmail.com> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package debop4k.data.orm.hibernate.usertypes.compress; import debop4k.core.collections.Arrayx; import org.hibernate.HibernateException; import org.hibernate.engine.spi.SharedSessionContractImplementor; import org.hibernate.type.BinaryType; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; /** * 정보를 압축하여 바이트 배열로 저장합니다. * * @author sunghyouk.bae@gmail.com * @since 2015. 8. 26. */ public abstract class CompressedByteArrayUserType extends CompressedUserType { public byte[] compress(byte[] plainBytes) { if (Arrayx.isNullOrEmpty(plainBytes)) return null; return compressor().compress(plainBytes); } public byte[] decompress(byte[] compressedByte) { if (Arrayx.isNullOrEmpty(compressedByte)) return null; return compressor().decompress(compressedByte); } @Override public Class returnedClass() { return byte[].class; } @Override public Object nullSafeGet(ResultSet rs, String[] names, SharedSessionContractImplementor session, Object owner) throws HibernateException, SQLException { byte[] compressedBytes = BinaryType.INSTANCE.nullSafeGet(rs, names[0], session); return decompress(compressedBytes); } @Override public void nullSafeSet(PreparedStatement st, Object value, int index, SharedSessionContractImplementor session) throws HibernateException, SQLException { if (value == null) { BinaryType.INSTANCE.nullSafeSet(st, null, index, session); } else { byte[] compressedBytes = compress((byte[]) value); BinaryType.INSTANCE.nullSafeSet(st, compressedBytes, index, session); } } }
apache-2.0
googleads/google-ads-java
google-ads-stubs-v9/src/main/java/com/google/ads/googleads/v9/services/stub/GrpcCampaignSharedSetServiceStub.java
8703
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.ads.googleads.v9.services.stub; import com.google.ads.googleads.v9.resources.CampaignSharedSet; import com.google.ads.googleads.v9.services.GetCampaignSharedSetRequest; import com.google.ads.googleads.v9.services.MutateCampaignSharedSetsRequest; import com.google.ads.googleads.v9.services.MutateCampaignSharedSetsResponse; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcStubCallableFactory; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.UnaryCallable; import com.google.common.collect.ImmutableMap; import com.google.longrunning.stub.GrpcOperationsStub; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; import java.io.IOException; import java.util.Map; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * gRPC stub implementation for the CampaignSharedSetService service API. * * <p>This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator-java") public class GrpcCampaignSharedSetServiceStub extends CampaignSharedSetServiceStub { private static final MethodDescriptor<GetCampaignSharedSetRequest, CampaignSharedSet> getCampaignSharedSetMethodDescriptor = MethodDescriptor.<GetCampaignSharedSetRequest, CampaignSharedSet>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.ads.googleads.v9.services.CampaignSharedSetService/GetCampaignSharedSet") .setRequestMarshaller( ProtoUtils.marshaller(GetCampaignSharedSetRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(CampaignSharedSet.getDefaultInstance())) .build(); private static final MethodDescriptor< MutateCampaignSharedSetsRequest, MutateCampaignSharedSetsResponse> mutateCampaignSharedSetsMethodDescriptor = MethodDescriptor .<MutateCampaignSharedSetsRequest, MutateCampaignSharedSetsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.ads.googleads.v9.services.CampaignSharedSetService/MutateCampaignSharedSets") .setRequestMarshaller( ProtoUtils.marshaller(MutateCampaignSharedSetsRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(MutateCampaignSharedSetsResponse.getDefaultInstance())) .build(); private final UnaryCallable<GetCampaignSharedSetRequest, CampaignSharedSet> getCampaignSharedSetCallable; private final UnaryCallable<MutateCampaignSharedSetsRequest, MutateCampaignSharedSetsResponse> mutateCampaignSharedSetsCallable; private final BackgroundResource backgroundResources; private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcCampaignSharedSetServiceStub create( CampaignSharedSetServiceStubSettings settings) throws IOException { return new GrpcCampaignSharedSetServiceStub(settings, ClientContext.create(settings)); } public static final GrpcCampaignSharedSetServiceStub create(ClientContext clientContext) throws IOException { return new GrpcCampaignSharedSetServiceStub( CampaignSharedSetServiceStubSettings.newBuilder().build(), clientContext); } public static final GrpcCampaignSharedSetServiceStub create( ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { return new GrpcCampaignSharedSetServiceStub( CampaignSharedSetServiceStubSettings.newBuilder().build(), clientContext, callableFactory); } /** * Constructs an instance of GrpcCampaignSharedSetServiceStub, using the given settings. This is * protected so that it is easy to make a subclass, but otherwise, the static factory methods * should be preferred. */ protected GrpcCampaignSharedSetServiceStub( CampaignSharedSetServiceStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new GrpcCampaignSharedSetServiceCallableFactory()); } /** * Constructs an instance of GrpcCampaignSharedSetServiceStub, using the given settings. This is * protected so that it is easy to make a subclass, but otherwise, the static factory methods * should be preferred. */ protected GrpcCampaignSharedSetServiceStub( CampaignSharedSetServiceStubSettings settings, ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); GrpcCallSettings<GetCampaignSharedSetRequest, CampaignSharedSet> getCampaignSharedSetTransportSettings = GrpcCallSettings.<GetCampaignSharedSetRequest, CampaignSharedSet>newBuilder() .setMethodDescriptor(getCampaignSharedSetMethodDescriptor) .setParamsExtractor( request -> { ImmutableMap.Builder<String, String> params = ImmutableMap.builder(); params.put("resource_name", String.valueOf(request.getResourceName())); return params.build(); }) .build(); GrpcCallSettings<MutateCampaignSharedSetsRequest, MutateCampaignSharedSetsResponse> mutateCampaignSharedSetsTransportSettings = GrpcCallSettings .<MutateCampaignSharedSetsRequest, MutateCampaignSharedSetsResponse>newBuilder() .setMethodDescriptor(mutateCampaignSharedSetsMethodDescriptor) .setParamsExtractor( request -> { ImmutableMap.Builder<String, String> params = ImmutableMap.builder(); params.put("customer_id", String.valueOf(request.getCustomerId())); return params.build(); }) .build(); this.getCampaignSharedSetCallable = callableFactory.createUnaryCallable( getCampaignSharedSetTransportSettings, settings.getCampaignSharedSetSettings(), clientContext); this.mutateCampaignSharedSetsCallable = callableFactory.createUnaryCallable( mutateCampaignSharedSetsTransportSettings, settings.mutateCampaignSharedSetsSettings(), clientContext); this.backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } public GrpcOperationsStub getOperationsStub() { return operationsStub; } @Override public UnaryCallable<GetCampaignSharedSetRequest, CampaignSharedSet> getCampaignSharedSetCallable() { return getCampaignSharedSetCallable; } @Override public UnaryCallable<MutateCampaignSharedSetsRequest, MutateCampaignSharedSetsResponse> mutateCampaignSharedSetsCallable() { return mutateCampaignSharedSetsCallable; } @Override public final void close() { try { backgroundResources.close(); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new IllegalStateException("Failed to close resource", e); } } @Override public void shutdown() { backgroundResources.shutdown(); } @Override public boolean isShutdown() { return backgroundResources.isShutdown(); } @Override public boolean isTerminated() { return backgroundResources.isTerminated(); } @Override public void shutdownNow() { backgroundResources.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return backgroundResources.awaitTermination(duration, unit); } }
apache-2.0
dlwhitehurst/blackhole
src/test/java/org/ciwise/blackhole/web/rest/UserResourceIntTest.java
2571
package org.ciwise.blackhole.web.rest; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import javax.inject.Inject; import org.ciwise.blackhole.BlackholeApp; import org.ciwise.blackhole.repository.UserRepository; import org.ciwise.blackhole.service.UserService; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; //import org.springframework.boot.test.IntegrationTest; //import org.springframework.boot.test.SpringApplicationConfiguration; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.http.MediaType; import org.springframework.test.context.junit4.SpringRunner; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.setup.MockMvcBuilders; /** * Test class for the UserResource REST controller. * * @see UserResource */ //@RunWith(SpringJUnit4ClassRunner.class) //@SpringApplicationConfiguration(classes = BlackholeApp.class) //@WebAppConfiguration //@IntegrationTest @RunWith(SpringRunner.class) @SpringBootTest(classes = BlackholeApp.class) public class UserResourceIntTest { @Inject private UserRepository userRepository; @Inject private UserService userService; private MockMvc restUserMockMvc; @Before public void setup() { UserResource userResource = new UserResource(); ReflectionTestUtils.setField(userResource, "userRepository", userRepository); ReflectionTestUtils.setField(userResource, "userService", userService); this.restUserMockMvc = MockMvcBuilders.standaloneSetup(userResource).build(); } @Test public void testGetExistingUser() throws Exception { restUserMockMvc.perform(get("/api/users/admin") .accept(MediaType.APPLICATION_JSON)) .andExpect(status().isOk()) .andExpect(content().contentType("application/json")) .andExpect(jsonPath("$.lastName").value("Administrator")); } @Test public void testGetUnknownUser() throws Exception { restUserMockMvc.perform(get("/api/users/unknown") .accept(MediaType.APPLICATION_JSON)) .andExpect(status().isNotFound()); } }
apache-2.0