repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
ConsecroMUD/ConsecroMUD | com/suscipio_solutions/consecro_mud/Abilities/Prayers/Prayer_MassFreedom.java | 4610 | package com.suscipio_solutions.consecro_mud.Abilities.Prayers;
import java.util.List;
import java.util.Vector;
import com.suscipio_solutions.consecro_mud.Abilities.interfaces.Ability;
import com.suscipio_solutions.consecro_mud.Abilities.interfaces.MendingSkill;
import com.suscipio_solutions.consecro_mud.Common.interfaces.CMMsg;
import com.suscipio_solutions.consecro_mud.Locales.interfaces.Room;
import com.suscipio_solutions.consecro_mud.MOBS.interfaces.MOB;
import com.suscipio_solutions.consecro_mud.core.CMClass;
import com.suscipio_solutions.consecro_mud.core.CMLib;
import com.suscipio_solutions.consecro_mud.core.CMProps;
import com.suscipio_solutions.consecro_mud.core.CMath;
import com.suscipio_solutions.consecro_mud.core.interfaces.Physical;
@SuppressWarnings({"unchecked","rawtypes"})
public class Prayer_MassFreedom extends Prayer implements MendingSkill
{
@Override public String ID() { return "Prayer_MassFreedom"; }
private final static String localizedName = CMLib.lang().L("Mass Freedom");
@Override public String name() { return localizedName; }
@Override public int classificationCode(){return Ability.ACODE_PRAYER|Ability.DOMAIN_RESTORATION;}
@Override public int abstractQuality(){ return Ability.QUALITY_OK_OTHERS;}
@Override public long flags(){return Ability.FLAG_HOLY;}
@Override
public boolean supportsMending(Physical item)
{
if(!(item instanceof MOB)) return false;
final MOB caster=CMClass.getFactoryMOB();
caster.basePhyStats().setLevel(CMProps.getIntVar(CMProps.Int.LASTPLAYERLEVEL));
caster.phyStats().setLevel(CMProps.getIntVar(CMProps.Int.LASTPLAYERLEVEL));
final boolean canMend=returnOffensiveAffects(caster,item).size()>0;
caster.destroy();
return canMend;
}
public List<Ability> returnOffensiveAffects(MOB caster, Physical fromMe)
{
final MOB newMOB=CMClass.getFactoryMOB();
final Vector offenders=new Vector(1);
final CMMsg msg=CMClass.getMsg(newMOB,null,null,CMMsg.MSG_SIT,null);
for(int a=0;a<fromMe.numEffects();a++) // personal
{
final Ability A=fromMe.fetchEffect(a);
if(A!=null)
{
try
{
newMOB.recoverPhyStats();
A.affectPhyStats(newMOB,newMOB.phyStats());
final int clas=A.classificationCode()&Ability.ALL_ACODES;
if((!CMLib.flags().aliveAwakeMobileUnbound(newMOB,true))
||(CMath.bset(A.flags(),Ability.FLAG_BINDING))
||(!A.okMessage(newMOB,msg)))
if((A.invoker()==null)
||((clas!=Ability.ACODE_SPELL)&&(clas!=Ability.ACODE_CHANT)&&(clas!=Ability.ACODE_PRAYER)&&(clas!=Ability.ACODE_SONG))
||((A.invoker()!=null)
&&(A.invoker().phyStats().level()<=(caster.phyStats().level()+1+(2*super.getXLEVELLevel(caster))))))
offenders.addElement(A);
}
catch(final Exception e)
{}
}
}
newMOB.destroy();
return offenders;
}
@Override
public int castingQuality(MOB mob, Physical target)
{
if(mob!=null)
{
if(target instanceof MOB)
{
if(supportsMending(target))
return super.castingQuality(mob, target,Ability.QUALITY_BENEFICIAL_OTHERS);
}
}
return super.castingQuality(mob,target);
}
@Override
public boolean invoke(MOB mob, Vector commands, Physical givenTarget, boolean auto, int asLevel)
{
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
final boolean success=proficiencyCheck(mob,0,auto);
if(success)
{
final CMMsg msg=CMClass.getMsg(mob,null,this,verbalCastCode(mob,null,auto),auto?L("A feeling of freedom flows through the air"):L("^S<S-NAME> @x1 for freedom, and the area begins to fill with divine glory.^?",prayWord(mob)));
final Room room=mob.location();
if((room!=null)&&(room.okMessage(mob,msg)))
{
room.send(mob,msg);
for(int i=0;i<room.numInhabitants();i++)
{
final MOB target=room.fetchInhabitant(i);
if(target==null) break;
final List<Ability> offensiveAffects=returnOffensiveAffects(mob,target);
if(offensiveAffects.size()>0)
{
// it worked, so build a copy of this ability,
// and add it to the affects list of the
// affected MOB. Then tell everyone else
// what happened.
for(int a=offensiveAffects.size()-1;a>=0;a--)
offensiveAffects.get(a).unInvoke();
if((!CMLib.flags().stillAffectedBy(target,offensiveAffects,false))&&(target.location()!=null))
target.location().show(target,null,CMMsg.MSG_OK_VISUAL,L("<S-NAME> seem(s) less constricted."));
}
}
}
}
else
this.beneficialWordsFizzle(mob,null,L("<S-NAME> @x1 for freedom, but nothing happens.",prayWord(mob)));
// return whether it worked
return success;
}
}
| apache-2.0 |
micrometer-metrics/micrometer | micrometer-binders/src/test/java/io/micrometer/binder/cache/CaffeineStatsCounterTest.java | 3644 | /*
* Copyright 2021 VMware, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.micrometer.binder.cache;
import static org.assertj.core.api.Assertions.assertThat;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
import com.github.benmanes.caffeine.cache.RemovalCause;
import io.micrometer.core.instrument.DistributionSummary;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.Tags;
import io.micrometer.core.instrument.Timer;
import io.micrometer.core.instrument.search.RequiredSearch;
import io.micrometer.core.instrument.simple.SimpleMeterRegistry;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.EnumSource;
import java.util.concurrent.TimeUnit;
/**
* Tests for {@link CaffeineStatsCounter}.
*
* @author John Karp
*/
class CaffeineStatsCounterTest {
private static final String CACHE_NAME = "foo";
private static final Tags USER_TAGS = Tags.of("k", "v");
private static final Tags TAGS = Tags.concat(USER_TAGS, "cache", CACHE_NAME);
private CaffeineStatsCounter stats;
private MeterRegistry registry;
@BeforeEach
void setUp() {
registry = new SimpleMeterRegistry();
stats = new CaffeineStatsCounter(registry, CACHE_NAME, USER_TAGS);
}
@Test
void registerSize() {
Cache<String, String> cache = Caffeine.newBuilder().maximumSize(10).recordStats(() -> stats).build();
stats.registerSizeMetric(cache);
assertThat(fetch("cache.size").gauge().value()).isEqualTo(0);
cache.put("foo", "bar");
assertThat(fetch("cache.size").gauge().value()).isEqualTo(1);
}
@Test
void hit() {
stats.recordHits(2);
assertThat(fetch("cache.gets", "result", "hit").counter().count()).isEqualTo(2);
}
@Test
void miss() {
stats.recordMisses(2);
assertThat(fetch("cache.gets", "result", "miss").counter().count()).isEqualTo(2);
}
@Test
void loadSuccess() {
stats.recordLoadSuccess(256);
Timer timer = fetch("cache.loads", "result", "success").timer();
assertThat(timer.count()).isEqualTo(1);
assertThat(timer.totalTime(TimeUnit.NANOSECONDS)).isEqualTo(256);
}
@Test
void loadFailure() {
stats.recordLoadFailure(256);
Timer timer = fetch("cache.loads", "result", "failure").timer();
assertThat(timer.count()).isEqualTo(1);
assertThat(timer.totalTime(TimeUnit.NANOSECONDS)).isEqualTo(256);
}
@ParameterizedTest
@EnumSource(RemovalCause.class)
void evictionWithCause(RemovalCause cause) {
stats.recordEviction(3, cause);
DistributionSummary summary = fetch("cache.evictions", "cause", cause.name()).summary();
assertThat(summary.count()).isEqualTo(1);
assertThat(summary.totalAmount()).isEqualTo(3);
}
private RequiredSearch fetch(String name, String... tags) {
return registry.get(name).tags(TAGS).tags(tags);
}
}
| apache-2.0 |
scranton/camel | platforms/spring-boot/components-starter/camel-core-starter/src/main/java/org/apache/camel/language/property/springboot/ExchangePropertyLanguageAutoConfiguration.java | 5088 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.language.property.springboot;
import java.util.HashMap;
import java.util.Map;
import javax.annotation.Generated;
import org.apache.camel.CamelContext;
import org.apache.camel.CamelContextAware;
import org.apache.camel.language.property.ExchangePropertyLanguage;
import org.apache.camel.util.IntrospectionSupport;
import org.springframework.boot.autoconfigure.AutoConfigureAfter;
import org.springframework.boot.autoconfigure.condition.ConditionMessage;
import org.springframework.boot.autoconfigure.condition.ConditionOutcome;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.condition.SpringBootCondition;
import org.springframework.boot.bind.RelaxedPropertyResolver;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ConditionContext;
import org.springframework.context.annotation.Conditional;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Scope;
import org.springframework.core.type.AnnotatedTypeMetadata;
/**
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@Generated("org.apache.camel.maven.packaging.SpringBootAutoConfigurationMojo")
@Configuration
@ConditionalOnBean(type = "org.apache.camel.spring.boot.CamelAutoConfiguration")
@Conditional(ExchangePropertyLanguageAutoConfiguration.Condition.class)
@AutoConfigureAfter(name = "org.apache.camel.spring.boot.CamelAutoConfiguration")
@EnableConfigurationProperties(ExchangePropertyLanguageConfiguration.class)
public class ExchangePropertyLanguageAutoConfiguration {
@Bean(name = "exchangeProperty-language")
@Scope("prototype")
@ConditionalOnClass(CamelContext.class)
@ConditionalOnMissingBean(ExchangePropertyLanguage.class)
public ExchangePropertyLanguage configureExchangePropertyLanguage(
CamelContext camelContext,
ExchangePropertyLanguageConfiguration configuration)
throws Exception {
ExchangePropertyLanguage language = new ExchangePropertyLanguage();
if (CamelContextAware.class
.isAssignableFrom(ExchangePropertyLanguage.class)) {
CamelContextAware contextAware = CamelContextAware.class
.cast(language);
if (contextAware != null) {
contextAware.setCamelContext(camelContext);
}
}
Map<String, Object> parameters = new HashMap<>();
IntrospectionSupport.getProperties(configuration, parameters, null,
false);
IntrospectionSupport.setProperties(camelContext,
camelContext.getTypeConverter(), language, parameters);
return language;
}
@Generated("org.apache.camel.maven.packaging.SpringBootAutoConfigurationMojo")
public static class Condition extends SpringBootCondition {
@Override
public ConditionOutcome getMatchOutcome(
ConditionContext conditionContext,
AnnotatedTypeMetadata annotatedTypeMetadata) {
boolean groupEnabled = isEnabled(conditionContext,
"camel.language.", true);
ConditionMessage.Builder message = ConditionMessage
.forCondition("camel.language.exchangeproperty");
if (isEnabled(conditionContext, "camel.language.exchangeproperty.",
groupEnabled)) {
return ConditionOutcome.match(message.because("enabled"));
}
return ConditionOutcome.noMatch(message.because("not enabled"));
}
private boolean isEnabled(
org.springframework.context.annotation.ConditionContext context,
java.lang.String prefix, boolean defaultValue) {
RelaxedPropertyResolver resolver = new RelaxedPropertyResolver(
context.getEnvironment(), prefix);
return resolver.getProperty("enabled", Boolean.class, defaultValue);
}
}
}
| apache-2.0 |
b-slim/hive | beeline/src/test/org/apache/hive/beeline/TestBeelineArgParsing.java | 13992 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hive.beeline;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.FileOutputStream;
import java.io.PrintStream;
import java.nio.file.Files;
import java.nio.file.Paths;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hive.common.util.HiveTestUtils;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
/**
* Unit test for Beeline arg parser.
*/
@RunWith(Parameterized.class)
public class TestBeelineArgParsing {
private static final Logger LOG = LoggerFactory.getLogger(TestBeelineArgParsing.class.getName());
private static final String dummyDriverClazzName = "DummyDriver";
private String connectionString;
private String driverClazzName;
private String driverJarFileName;
private boolean defaultSupported;
public TestBeelineArgParsing(String connectionString, String driverClazzName, String driverJarFileName,
boolean defaultSupported) {
this.connectionString = connectionString;
this.driverClazzName = driverClazzName;
this.driverJarFileName = driverJarFileName;
this.defaultSupported = defaultSupported;
}
public class TestBeeline extends BeeLine {
String connectArgs = null;
List<String> properties = new ArrayList<String>();
List<String> queries = new ArrayList<String>();
@Override
boolean dispatch(String command) {
String connectCommand = "!connect";
String propertyCommand = "!properties";
if (command.startsWith(connectCommand)) {
this.connectArgs = command.substring(connectCommand.length() + 1, command.length());
} else if (command.startsWith(propertyCommand)) {
this.properties.add(command.substring(propertyCommand.length() + 1, command.length()));
} else {
this.queries.add(command);
}
return true;
}
public boolean addlocaldrivername(String driverName) {
String line = "addlocaldrivername " + driverName;
return getCommands().addlocaldrivername(line);
}
public boolean addLocalJar(String url){
String line = "addlocaldriverjar " + url;
return getCommands().addlocaldriverjar(line);
}
}
@Parameters(name="{1}")
public static Collection<Object[]> data() throws IOException, InterruptedException {
// generate the dummy driver by using txt file
String u = HiveTestUtils.getFileFromClasspath("DummyDriver.txt");
Map<File, String> extraContent=new HashMap<>();
extraContent.put(new File("META-INF/services/java.sql.Driver"), dummyDriverClazzName);
File jarFile = HiveTestUtils.genLocalJarForTest(u, dummyDriverClazzName, extraContent);
String pathToDummyDriver = jarFile.getAbsolutePath();
return Arrays.asList(new Object[][] {
{ "jdbc:postgresql://host:5432/testdb", "org.postgresql.Driver",
System.getProperty("maven.local.repository") + File.separator + "postgresql"
+ File.separator + "postgresql" + File.separator + "9.1-901.jdbc4" + File.separator
+ "postgresql-9.1-901.jdbc4.jar", true },
{ "jdbc:dummy://host:5432/testdb", dummyDriverClazzName, pathToDummyDriver, false } });
}
@Test
public void testSimpleArgs() throws Exception {
TestBeeline bl = new TestBeeline();
String args[] = new String[] {"-u", "url", "-n", "name",
"-p", "password", "-d", "driver", "-a", "authType"};
org.junit.Assert.assertEquals(0, bl.initArgs(args));
Assert.assertTrue(bl.connectArgs.equals("url name password driver"));
Assert.assertTrue(bl.getOpts().getAuthType().equals("authType"));
}
@Test
public void testPasswordFileArgs() throws Exception {
TestBeeline bl = new TestBeeline();
File passFile = new File("file.password");
passFile.deleteOnExit();
FileOutputStream passFileOut = new FileOutputStream(passFile);
passFileOut.write("mypass\n".getBytes());
passFileOut.close();
String args[] = new String[] {"-u", "url", "-n", "name",
"-w", "file.password", "-p", "not-taken-if-w-is-present",
"-d", "driver", "-a", "authType"};
bl.initArgs(args);
System.out.println(bl.connectArgs);
// Password file contents are trimmed of trailing whitespaces and newlines
Assert.assertTrue(bl.connectArgs.equals("url name mypass driver"));
Assert.assertTrue(bl.getOpts().getAuthType().equals("authType"));
passFile.delete();
}
/**
* The first flag is taken by the parser.
*/
@Test
public void testDuplicateArgs() throws Exception {
TestBeeline bl = new TestBeeline();
String args[] = new String[] {"-u", "url", "-u", "url2", "-n", "name",
"-p", "password", "-d", "driver"};
Assert.assertEquals(0, bl.initArgs(args));
Assert.assertTrue(bl.connectArgs.equals("url name password driver"));
}
@Test
public void testQueryScripts() throws Exception {
TestBeeline bl = new TestBeeline();
String args[] = new String[] {"-u", "url", "-n", "name",
"-p", "password", "-d", "driver", "-e", "select1", "-e", "select2"};
Assert.assertEquals(0, bl.initArgs(args));
Assert.assertTrue(bl.connectArgs.equals("url name password driver"));
Assert.assertTrue(bl.queries.contains("select1"));
Assert.assertTrue(bl.queries.contains("select2"));
}
/**
* Test setting hive conf and hive vars with --hiveconf and --hivevar
*/
@Test
public void testHiveConfAndVars() throws Exception {
TestBeeline bl = new TestBeeline();
String args[] = new String[] {"-u", "url", "-n", "name",
"-p", "password", "-d", "driver", "--hiveconf", "a=avalue", "--hiveconf", "b=bvalue",
"--hivevar", "c=cvalue", "--hivevar", "d=dvalue"};
Assert.assertEquals(0, bl.initArgs(args));
Assert.assertTrue(bl.connectArgs.equals("url name password driver"));
Assert.assertTrue(bl.getOpts().getHiveConfVariables().get("a").equals("avalue"));
Assert.assertTrue(bl.getOpts().getHiveConfVariables().get("b").equals("bvalue"));
Assert.assertTrue(bl.getOpts().getHiveVariables().get("c").equals("cvalue"));
Assert.assertTrue(bl.getOpts().getHiveVariables().get("d").equals("dvalue"));
}
@Test
public void testBeelineOpts() throws Exception {
TestBeeline bl = new TestBeeline();
String args[] =
new String[] { "-u", "url", "-n", "name", "-p", "password", "-d", "driver",
"--autoCommit=true", "--verbose", "--truncateTable" };
Assert.assertEquals(0, bl.initArgs(args));
Assert.assertTrue(bl.connectArgs.equals("url name password driver"));
Assert.assertTrue(bl.getOpts().getAutoCommit());
Assert.assertTrue(bl.getOpts().getVerbose());
Assert.assertTrue(bl.getOpts().getTruncateTable());
}
@Test
public void testBeelineAutoCommit() throws Exception {
TestBeeline bl = new TestBeeline();
String[] args = {};
bl.initArgs(args);
Assert.assertTrue(bl.getOpts().getAutoCommit());
args = new String[] {"--autoCommit=false"};
bl.initArgs(args);
Assert.assertFalse(bl.getOpts().getAutoCommit());
args = new String[] {"--autoCommit=true"};
bl.initArgs(args);
Assert.assertTrue(bl.getOpts().getAutoCommit());
bl.close();
}
@Test
public void testBeelineShowDbInPromptOptsDefault() throws Exception {
TestBeeline bl = new TestBeeline();
String args[] = new String[] { "-u", "url" };
Assert.assertEquals(0, bl.initArgs(args));
Assert.assertFalse(bl.getOpts().getShowDbInPrompt());
Assert.assertEquals("", bl.getFormattedDb());
}
@Test
public void testBeelineShowDbInPromptOptsTrue() throws Exception {
TestBeeline bl = new TestBeeline();
String args[] = new String[] { "-u", "url", "--showDbInPrompt=true" };
Assert.assertEquals(0, bl.initArgs(args));
Assert.assertTrue(bl.getOpts().getShowDbInPrompt());
Assert.assertEquals(" (default)", bl.getFormattedDb());
}
/**
* Test setting script file with -f option.
*/
@Test
public void testScriptFile() throws Exception {
TestBeeline bl = new TestBeeline();
String args[] = new String[] {"-u", "url", "-n", "name",
"-p", "password", "-d", "driver", "-f", "myscript"};
Assert.assertEquals(0, bl.initArgs(args));
Assert.assertTrue(bl.connectArgs.equals("url name password driver"));
Assert.assertTrue(bl.getOpts().getScriptFile().equals("myscript"));
}
/**
* Test beeline with -f and -e simultaneously
*/
@Test
public void testCommandAndFileSimultaneously() throws Exception {
TestBeeline bl = new TestBeeline();
String args[] = new String[] {"-e", "myselect", "-f", "myscript"};
Assert.assertEquals(1, bl.initArgs(args));
}
/**
* Test beeline with multiple initfiles in -i.
*/
@Test
public void testMultipleInitFiles() {
TestBeeline bl = new TestBeeline();
String[] args = new String[] {"-i", "/url/to/file1", "-i", "/url/to/file2"};
Assert.assertEquals(0, bl.initArgs(args));
String[] files = bl.getOpts().getInitFiles();
Assert.assertEquals("/url/to/file1", files[0]);
Assert.assertEquals("/url/to/file2", files[1]);
}
/**
* Displays the usage.
*/
@Test
public void testHelp() throws Exception {
TestBeeline bl = new TestBeeline();
String args[] = new String[] {"--help"};
Assert.assertEquals(0, bl.initArgs(args));
Assert.assertEquals(true, bl.getOpts().isHelpAsked());
}
/**
* Displays the usage.
*/
@Test
public void testUnmatchedArgs() throws Exception {
TestBeeline bl = new TestBeeline();
String args[] = new String[] {"-u", "url", "-n"};
Assert.assertEquals(-1, bl.initArgs(args));
}
@Test
public void testAddLocalJar() throws Exception {
TestBeeline bl = new TestBeeline();
Assert.assertNull(bl.findLocalDriver(connectionString));
LOG.info("Add " + driverJarFileName + " for the driver class " + driverClazzName);
bl.addLocalJar(driverJarFileName);
bl.addlocaldrivername(driverClazzName);
Assert.assertEquals(bl.findLocalDriver(connectionString).getClass().getName(), driverClazzName);
}
@Test
public void testAddLocalJarWithoutAddDriverClazz() throws Exception {
TestBeeline bl = new TestBeeline();
LOG.info("Add " + driverJarFileName + " for the driver class " + driverClazzName);
assertTrue("expected to exists: "+driverJarFileName,new File(driverJarFileName).exists());
bl.addLocalJar(driverJarFileName);
if (!defaultSupported) {
Assert.assertNull(bl.findLocalDriver(connectionString));
} else {
// no need to add for the default supported local jar driver
Assert.assertNotNull(bl.findLocalDriver(connectionString));
Assert.assertEquals(bl.findLocalDriver(connectionString).getClass().getName(), driverClazzName);
}
}
@Test
public void testBeelinePasswordMask() throws Exception {
TestBeeline bl = new TestBeeline();
File errFile = File.createTempFile("test", "tmp");
bl.setErrorStream(new PrintStream(new FileOutputStream(errFile)));
String args[] =
new String[] { "-u", "url", "-n", "name", "-p", "password", "-d", "driver",
"--autoCommit=true", "--verbose", "--truncateTable" };
bl.initArgs(args);
bl.close();
String errContents = new String(Files.readAllBytes(Paths.get(errFile.toString())));
Assert.assertTrue(errContents.contains(BeeLine.PASSWD_MASK));
}
/**
* Test property file parameter option.
*/
@Test
public void testPropertyFile() throws Exception {
TestBeeline bl = new TestBeeline();
String args[] = new String[] {"--property-file", "props"};
Assert.assertEquals(0, bl.initArgs(args));
Assert.assertTrue(bl.properties.get(0).equals("props"));
bl.close();
}
/**
* Test maxHistoryRows parameter option.
*/
@Test
public void testMaxHistoryRows() throws Exception {
TestBeeline bl = new TestBeeline();
String args[] = new String[] {"--maxHistoryRows=100"};
Assert.assertEquals(0, bl.initArgs(args));
Assert.assertTrue(bl.getOpts().getMaxHistoryRows() == 100);
bl.close();
}
/**
* Test the file parameter option
* @throws Exception
*/
@Test
public void testFileParam() throws Exception {
TestBeeline bl = new TestBeeline();
String args[] = new String[] {"-u", "url", "-n", "name",
"-p", "password", "-d", "driver", "-f", "hdfs://myscript"};
Assert.assertEquals(0, bl.initArgs(args));
Assert.assertTrue(bl.connectArgs.equals("url name password driver"));
Assert.assertTrue(bl.getOpts().getScriptFile().equals("hdfs://myscript"));
}
/**
* Test the report parameter option.
* @throws Exception
*/
@Test
public void testReport() throws Exception {
TestBeeline bl = new TestBeeline();
String args[] = new String[] {"--report=true"};
Assert.assertEquals(0, bl.initArgs(args));
Assert.assertTrue(bl.getOpts().isReport());
bl.close();
}
}
| apache-2.0 |
maciejole/fBot | src/test/java/pl/hycom/pip/messanger/handler/processor/FindKeywordToAskProcessorTest.java | 8464 | /**
* Copyright 2012-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package pl.hycom.pip.messanger.handler.processor;
import org.assertj.core.api.Assertions;
import org.junit.Before;
import org.junit.Test;
import pl.hycom.pip.messanger.pipeline.PipelineException;
import pl.hycom.pip.messanger.repository.model.Keyword;
import pl.hycom.pip.messanger.repository.model.Product;
import java.util.*;
/**
* Created by szale_000 on 2017-04-06.
*/
public class FindKeywordToAskProcessorTest {
private FindKeywordToAskProcessor sut = new FindKeywordToAskProcessor();
private List<Keyword> keywords;
@Before
public void initKeywords() {
keywords = Arrays.asList(new Keyword("K0"), new Keyword("K1"), new Keyword("K2"), new Keyword("K3"), new Keyword("K4"));
}
private Product createProduct(List<Keyword> keywords) {
final Product product = new Product();
product.setId(1);
product.setName("Foo");
product.setDescription("Foo");
product.setImageUrl("Foo");
product.setKeywords(new HashSet<>(keywords));
return product;
}
@Test
public void findKeywordThatAppearsInHalfOfProducts() throws Exception {
// given
final List<Product> products = new ArrayList<>();
products.add(createProduct(Arrays.asList(keywords.get(0), keywords.get(1), keywords.get(2))));
products.add(createProduct(Arrays.asList(keywords.get(0), keywords.get(1), keywords.get(4))));
products.add(createProduct(Arrays.asList(keywords.get(0), keywords.get(3), keywords.get(4))));
products.add(createProduct(Arrays.asList(keywords.get(1), keywords.get(2), keywords.get(3))));
products.add(createProduct(Arrays.asList(keywords.get(0), keywords.get(1), keywords.get(2))));
products.add(createProduct(Arrays.asList(keywords.get(0), keywords.get(2), keywords.get(4))));
List<Keyword> keywordsWanted = Collections.emptyList();
/*
* keyword 0 appears in 5 products
* keyword 1 appears in 4 products
* keyword 2 appears in 4 products
* keyword 3 appears in 2 products
* keyword 4 appears in 3 products
* Total 6 products, so we want keyword that appears in closest to 3 products (keyword 4)
*/
// when
Optional<Keyword> foundKeyword = sut.findKeywordToAsk(products, keywordsWanted);
// then
Assertions.assertThat(foundKeyword.get()).isEqualTo(keywords.get(4));
}
@Test
public void findKeywordThatAppearsInClosestToHalfOfProducts() throws Exception {
// given
final List<Product> products = new ArrayList<>();
products.add(createProduct(Arrays.asList(keywords.get(0), keywords.get(1), keywords.get(2))));
products.add(createProduct(Arrays.asList(keywords.get(0), keywords.get(1), keywords.get(2))));
products.add(createProduct(Arrays.asList(keywords.get(0), keywords.get(1), keywords.get(2))));
products.add(createProduct(Arrays.asList(keywords.get(0), keywords.get(1), keywords.get(2))));
products.add(createProduct(Arrays.asList(keywords.get(0), keywords.get(1), keywords.get(2))));
products.add(createProduct(Arrays.asList(keywords.get(3), keywords.get(1), keywords.get(2))));
products.add(createProduct(Arrays.asList(keywords.get(0), keywords.get(3), keywords.get(2))));
products.add(createProduct(Arrays.asList(keywords.get(0), keywords.get(1), keywords.get(3))));
List<Keyword> keywordsWanted = Collections.emptyList();
/*
* keyword 0 appears in 7 products
* keyword 1 appears in 7 products
* keyword 2 appears in 7 products
* keyword 3 appears in 3 products
* Total 8 products, so we want keyword that appears in closest to 4 products (keyword 3)
*/
// when
Optional<Keyword> foundKeyword = sut.findKeywordToAsk(products, keywordsWanted);
// then
Assertions.assertThat(foundKeyword.get()).isEqualTo(keywords.get(3));
}
@Test
public void findsClosestToMiddleBesidesWantedKeyword() throws Exception {
// given
final List<Product> products = new ArrayList<>();
products.add(createProduct(Arrays.asList(keywords.get(0), keywords.get(2))));
products.add(createProduct(Arrays.asList(keywords.get(0), keywords.get(1), keywords.get(2))));
products.add(createProduct(Arrays.asList(keywords.get(0), keywords.get(1), keywords.get(2))));
products.add(createProduct(Arrays.asList(keywords.get(0), keywords.get(4), keywords.get(2))));
products.add(createProduct(Arrays.asList(keywords.get(0), keywords.get(3), keywords.get(2))));
products.add(createProduct(Arrays.asList(keywords.get(3), keywords.get(4), keywords.get(2))));
products.add(createProduct(Arrays.asList(keywords.get(0), keywords.get(3), keywords.get(2))));
products.add(createProduct(Arrays.asList(keywords.get(0), keywords.get(1), keywords.get(3))));
List<Keyword> keywordsWanted = Collections.singletonList(keywords.get(3));
/*
* keyword 0 appears in 7 products
* keyword 1 appears in 3 products
* keyword 2 appears in 7 products
* keyword 3 appears in 4 products, but appears in user's request
* keyword 4 appears in 2 products
* Total 8 products, so we want keyword that appears in closest to 4 products but not in keywordsWanted (keyword 1)
*/
// when
Optional<Keyword> foundKeyword = sut.findKeywordToAsk(products, keywordsWanted);
// then
Assertions.assertThat(foundKeyword.get()).isEqualTo(keywords.get(1));
}
@Test
public void findsNullIfAllKeywordsAreWanted() throws Exception {
// given
final List<Product> products = new ArrayList<>();
products.add(createProduct(Arrays.asList(keywords.get(0), keywords.get(1), keywords.get(2))));
products.add(createProduct(Arrays.asList(keywords.get(0), keywords.get(1), keywords.get(2))));
products.add(createProduct(Arrays.asList(keywords.get(0), keywords.get(1), keywords.get(2))));
products.add(createProduct(Arrays.asList(keywords.get(0), keywords.get(1), keywords.get(2))));
products.add(createProduct(Arrays.asList(keywords.get(0), keywords.get(1), keywords.get(2))));
products.add(createProduct(Arrays.asList(keywords.get(3), keywords.get(1), keywords.get(2))));
products.add(createProduct(Arrays.asList(keywords.get(0), keywords.get(3), keywords.get(2))));
products.add(createProduct(Arrays.asList(keywords.get(0), keywords.get(1), keywords.get(3))));
List<Keyword> keywordsWanted = keywords;
/*
* All keywords were mentioned by user so there is no keyword we could possibly ask
*/
// when
Optional<Keyword> foundKeyword = sut.findKeywordToAsk(products, keywordsWanted);
// then
Assertions.assertThat(foundKeyword.isPresent()).isFalse();
}
@Test
public void throwsExceptionWhenEmptyList() throws Exception {
// given
List<Product> products = Collections.emptyList();
List<Keyword> keywordsWanted = Collections.emptyList();
// when
Throwable thrown = Assertions.catchThrowable(() -> sut.findKeywordToAsk(products, keywordsWanted));
// then
Assertions.assertThat(thrown).isInstanceOf(PipelineException.class);
}
@Test
public void throwsExceptionWhenNullList() throws Exception {
// given
List<Product> products = null;
List<Keyword> keywordsWanted = Collections.emptyList();
// when
Throwable thrown = Assertions.catchThrowable(() -> sut.findKeywordToAsk(products, keywordsWanted));
// then
Assertions.assertThat(thrown).isInstanceOf(PipelineException.class);
}
}
| apache-2.0 |
dzaiats/java.automation.library | src/test/java/ResponsiveValidatorCompatibleTest.java | 6307 | import net.itarray.automotion.tools.driver.DriverHelper;
import net.itarray.automotion.tools.driver.WebDriverFactory;
import net.itarray.automotion.tools.helpers.EnvironmentHelper;
import net.itarray.automotion.validation.ResponsiveUIValidator;
import net.itarray.automotion.validation.properties.Padding;
import org.assertj.core.api.SoftAssertions;
import org.junit.After;
import org.junit.Ignore;
import org.junit.Test;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.phantomjs.PhantomJSDriverService;
import java.awt.*;
import java.util.HashMap;
import java.util.Map;
@Ignore
public class ResponsiveValidatorCompatibleTest {
private static WebDriver driver;
private static long start;
public static void main(String[] args) {
ManualTestSupport.deleteOutputDirectory();
ResponsiveValidatorCompatibleTest test = new ResponsiveValidatorCompatibleTest();
try {
test.testThatResponsiveValidatorWorks();
} finally {
test.tearDown();
long stop = System.currentTimeMillis();
System.out.println((stop - start) + " ms");
ManualTestSupport.openReportInDefaultBrowser();;
}
}
@Test
public void testThatResponsiveValidatorWorks() {
Map<String, String> sysProp = new HashMap<>();
//sysProp.put("BROWSER", "Chrome");
//sysProp.put("IS_LOCAL", "true");
sysProp.put("IS_HEADLESS", "true");
sysProp.put(PhantomJSDriverService.PHANTOMJS_EXECUTABLE_PATH_PROPERTY, "/Users/" + System.getProperty("user.name") + "/Downloads/phantomjs-2.1.1-macosx/bin/phantomjs");
EnvironmentHelper.setEnv(sysProp);
WebDriverFactory driverFactory = new WebDriverFactory();
driver = driverFactory.getDriver();
driver.get("http://visual.itarray.net");
driver.manage().window().maximize();
start = System.currentTimeMillis();
TestPage page = new TestPage(driver);
ResponsiveUIValidator uiValidator = new ResponsiveUIValidator(driver);
uiValidator.setLinesColor(Color.BLACK);
SoftAssertions softly = new SoftAssertions();
boolean success1 = uiValidator.init("Validation of Top Slider Element")
.findElement(page.topSlider(), "Top Slider")
.sameOffsetLeftAs(page.gridContainer(), "Grid Container")
.sameOffsetBottomAs(page.topTextBlock(), "Text Block")
.changeMetricsUnitsTo(ResponsiveUIValidator.Units.PX)
.widthBetween(300, 500)
.sameSizeAs(page.gridElements())
.equalLeftRightOffset()
.equalTopBottomOffset()
.isInsideOf(page.mainContainer(), "Main container", new Padding(10, 50, 10, 20))
.drawMap()
.validate();
softly.assertThat(success1).isEqualTo(true).overridingErrorMessage("Failed validation of Top Slider element");
boolean success0 = uiValidator.init("Validation of Grid view")
.findElement(page.gridContainer(), "Grid Container")
.equalLeftRightOffset()
.drawMap()
.validate();
softly.assertThat(success0).isEqualTo(true).overridingErrorMessage("Failed validation of Grid Container");
boolean success01 = uiValidator.init("Validation of Main container")
.findElement(page.mainContainer(), "Main Container")
.equalLeftRightOffset()
.drawMap()
.validate();
softly.assertThat(success01).isEqualTo(true).overridingErrorMessage("Failed validation of Main Container");
boolean success2 = uiValidator.init("Validation of Top Text block")
.findElement(page.topTextBlock(), "Top Text block")
.sameOffsetRightAs(page.gridContainer(), "Grid Container")
.sameOffsetTopAs(page.topSlider(), "Top Slider")
.drawMap()
.validate();
softly.assertThat(success2).isEqualTo(true).overridingErrorMessage("Failed validation of Top Text block");
boolean success3 = uiValidator.init("Validation of a grid view")
.findElements(page.gridElements())
.alignedAsGrid(4, 3)
.withSameSize()
.areNotOverlappedWithEachOther()
.sameTopOffset()
.equalLeftRightOffset()
.equalTopBottomOffset()
.drawMap()
.validate();
softly.assertThat(success3).isEqualTo(true).overridingErrorMessage("Failed validation of Grid");
for (WebElement card : page.gridElements()) {
boolean success = uiValidator.init("Validation of style for each of cards in a grid view")
.findElement(card.findElement(By.className("project-details")), "Project details block")
.withCssValue("background", "#f8f8f8")
.withCssValue("color", "#6f6f6f")
.notOverlapWith(card.findElement(By.className("gallery-hover-4col")), "Image Container")
.sameWidthAs(card.findElement(By.className("gallery-hover-4col")), "Image Container")
.drawMap()
.validate();
softly.assertThat(success).isEqualTo(true).overridingErrorMessage("Failed validation of Grid in a list");
}
int[] zoomRange = {50, 70, 100, 120, 150};
for (int val : zoomRange) {
DriverHelper.zoomInOutPage(driver, val);
boolean success = uiValidator.init("Validate on page zoom " + val + "%")
.findElement(page.mainContainer(), "Main container")
.equalLeftRightOffset()
.sameWidthAs(page.gridContainer(), "Grid Container")
.drawMap()
.validate();
softly.assertThat(success).isEqualTo(true).overridingErrorMessage("Failed validation of Container");
}
uiValidator.generateReport("Home Page");
softly.assertAll();
}
@After
public void tearDown() {
if (driver != null) {
driver.quit();
}
}
} | apache-2.0 |
googleapis/java-aiplatform | proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/IndexPrivateEndpoints.java | 28291 | /*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/index_endpoint.proto
package com.google.cloud.aiplatform.v1beta1;
/**
*
*
* <pre>
* IndexPrivateEndpoints proto is used to provide paths for users to send
* requests via private endpoints (e.g. private service access, private service
* connect).
* To send request via private service access, use match_grpc_address.
* To send request via private service connect, use service_attachment.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints}
*/
public final class IndexPrivateEndpoints extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints)
IndexPrivateEndpointsOrBuilder {
private static final long serialVersionUID = 0L;
// Use IndexPrivateEndpoints.newBuilder() to construct.
private IndexPrivateEndpoints(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private IndexPrivateEndpoints() {
matchGrpcAddress_ = "";
serviceAttachment_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new IndexPrivateEndpoints();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private IndexPrivateEndpoints(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
matchGrpcAddress_ = s;
break;
}
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
serviceAttachment_ = s;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.IndexEndpointProto
.internal_static_google_cloud_aiplatform_v1beta1_IndexPrivateEndpoints_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.IndexEndpointProto
.internal_static_google_cloud_aiplatform_v1beta1_IndexPrivateEndpoints_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints.class,
com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints.Builder.class);
}
public static final int MATCH_GRPC_ADDRESS_FIELD_NUMBER = 1;
private volatile java.lang.Object matchGrpcAddress_;
/**
*
*
* <pre>
* Output only. The ip address used to send match gRPC requests.
* </pre>
*
* <code>string match_grpc_address = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The matchGrpcAddress.
*/
@java.lang.Override
public java.lang.String getMatchGrpcAddress() {
java.lang.Object ref = matchGrpcAddress_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
matchGrpcAddress_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. The ip address used to send match gRPC requests.
* </pre>
*
* <code>string match_grpc_address = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for matchGrpcAddress.
*/
@java.lang.Override
public com.google.protobuf.ByteString getMatchGrpcAddressBytes() {
java.lang.Object ref = matchGrpcAddress_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
matchGrpcAddress_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SERVICE_ATTACHMENT_FIELD_NUMBER = 2;
private volatile java.lang.Object serviceAttachment_;
/**
*
*
* <pre>
* Output only. The name of the service attachment resource. Populated if private service
* connect is enabled.
* </pre>
*
* <code>string service_attachment = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The serviceAttachment.
*/
@java.lang.Override
public java.lang.String getServiceAttachment() {
java.lang.Object ref = serviceAttachment_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
serviceAttachment_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. The name of the service attachment resource. Populated if private service
* connect is enabled.
* </pre>
*
* <code>string service_attachment = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for serviceAttachment.
*/
@java.lang.Override
public com.google.protobuf.ByteString getServiceAttachmentBytes() {
java.lang.Object ref = serviceAttachment_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
serviceAttachment_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(matchGrpcAddress_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, matchGrpcAddress_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serviceAttachment_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, serviceAttachment_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(matchGrpcAddress_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, matchGrpcAddress_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serviceAttachment_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, serviceAttachment_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints other =
(com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints) obj;
if (!getMatchGrpcAddress().equals(other.getMatchGrpcAddress())) return false;
if (!getServiceAttachment().equals(other.getServiceAttachment())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + MATCH_GRPC_ADDRESS_FIELD_NUMBER;
hash = (53 * hash) + getMatchGrpcAddress().hashCode();
hash = (37 * hash) + SERVICE_ATTACHMENT_FIELD_NUMBER;
hash = (53 * hash) + getServiceAttachment().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* IndexPrivateEndpoints proto is used to provide paths for users to send
* requests via private endpoints (e.g. private service access, private service
* connect).
* To send request via private service access, use match_grpc_address.
* To send request via private service connect, use service_attachment.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints)
com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpointsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.IndexEndpointProto
.internal_static_google_cloud_aiplatform_v1beta1_IndexPrivateEndpoints_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.IndexEndpointProto
.internal_static_google_cloud_aiplatform_v1beta1_IndexPrivateEndpoints_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints.class,
com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
matchGrpcAddress_ = "";
serviceAttachment_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.IndexEndpointProto
.internal_static_google_cloud_aiplatform_v1beta1_IndexPrivateEndpoints_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints build() {
com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints buildPartial() {
com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints result =
new com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints(this);
result.matchGrpcAddress_ = matchGrpcAddress_;
result.serviceAttachment_ = serviceAttachment_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints) {
return mergeFrom((com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints other) {
if (other == com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints.getDefaultInstance())
return this;
if (!other.getMatchGrpcAddress().isEmpty()) {
matchGrpcAddress_ = other.matchGrpcAddress_;
onChanged();
}
if (!other.getServiceAttachment().isEmpty()) {
serviceAttachment_ = other.serviceAttachment_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object matchGrpcAddress_ = "";
/**
*
*
* <pre>
* Output only. The ip address used to send match gRPC requests.
* </pre>
*
* <code>string match_grpc_address = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The matchGrpcAddress.
*/
public java.lang.String getMatchGrpcAddress() {
java.lang.Object ref = matchGrpcAddress_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
matchGrpcAddress_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. The ip address used to send match gRPC requests.
* </pre>
*
* <code>string match_grpc_address = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for matchGrpcAddress.
*/
public com.google.protobuf.ByteString getMatchGrpcAddressBytes() {
java.lang.Object ref = matchGrpcAddress_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
matchGrpcAddress_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. The ip address used to send match gRPC requests.
* </pre>
*
* <code>string match_grpc_address = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The matchGrpcAddress to set.
* @return This builder for chaining.
*/
public Builder setMatchGrpcAddress(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
matchGrpcAddress_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The ip address used to send match gRPC requests.
* </pre>
*
* <code>string match_grpc_address = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearMatchGrpcAddress() {
matchGrpcAddress_ = getDefaultInstance().getMatchGrpcAddress();
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The ip address used to send match gRPC requests.
* </pre>
*
* <code>string match_grpc_address = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The bytes for matchGrpcAddress to set.
* @return This builder for chaining.
*/
public Builder setMatchGrpcAddressBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
matchGrpcAddress_ = value;
onChanged();
return this;
}
private java.lang.Object serviceAttachment_ = "";
/**
*
*
* <pre>
* Output only. The name of the service attachment resource. Populated if private service
* connect is enabled.
* </pre>
*
* <code>string service_attachment = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The serviceAttachment.
*/
public java.lang.String getServiceAttachment() {
java.lang.Object ref = serviceAttachment_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
serviceAttachment_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. The name of the service attachment resource. Populated if private service
* connect is enabled.
* </pre>
*
* <code>string service_attachment = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for serviceAttachment.
*/
public com.google.protobuf.ByteString getServiceAttachmentBytes() {
java.lang.Object ref = serviceAttachment_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
serviceAttachment_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. The name of the service attachment resource. Populated if private service
* connect is enabled.
* </pre>
*
* <code>string service_attachment = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The serviceAttachment to set.
* @return This builder for chaining.
*/
public Builder setServiceAttachment(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
serviceAttachment_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The name of the service attachment resource. Populated if private service
* connect is enabled.
* </pre>
*
* <code>string service_attachment = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearServiceAttachment() {
serviceAttachment_ = getDefaultInstance().getServiceAttachment();
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The name of the service attachment resource. Populated if private service
* connect is enabled.
* </pre>
*
* <code>string service_attachment = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The bytes for serviceAttachment to set.
* @return This builder for chaining.
*/
public Builder setServiceAttachmentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
serviceAttachment_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints)
private static final com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints();
}
public static com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<IndexPrivateEndpoints> PARSER =
new com.google.protobuf.AbstractParser<IndexPrivateEndpoints>() {
@java.lang.Override
public IndexPrivateEndpoints parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new IndexPrivateEndpoints(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<IndexPrivateEndpoints> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<IndexPrivateEndpoints> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.IndexPrivateEndpoints getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| apache-2.0 |
IWSDevelopers/iws | iws-api/src/main/java/net/iaeste/iws/api/responses/exchange/FetchGroupsForSharingResponse.java | 3167 | /*
* Licensed to IAESTE A.s.b.l. (IAESTE) under one or more contributor
* license agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership. The Authors
* (See the AUTHORS file distributed with this work) licenses this file to
* You under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.iaeste.iws.api.responses.exchange;
import static net.iaeste.iws.api.util.Immutable.immutableList;
import net.iaeste.iws.api.constants.IWSConstants;
import net.iaeste.iws.api.constants.IWSError;
import net.iaeste.iws.api.dtos.Group;
import net.iaeste.iws.api.responses.Response;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
import java.util.ArrayList;
import java.util.List;
/**
* Returns a list of national groups. The list is ordered by name.
*
* @author Michael Pickelbauer / last $Author:$
* @version $Revision:$ / $Date:$
* @since IWS 1.0
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "fetchGroupsForSharingResponse", propOrder = "groups")
public final class FetchGroupsForSharingResponse extends Response {
/** {@link IWSConstants#SERIAL_VERSION_UID}. */
private static final long serialVersionUID = IWSConstants.SERIAL_VERSION_UID;
@XmlElement(required = true, nillable = true)
private final List<Group> groups = new ArrayList<>(0);
// =========================================================================
// Object Constructors
// =========================================================================
/**
* Empty Constructor, to use if the setters are invoked. This is required
* for WebServices to work properly.
*/
public FetchGroupsForSharingResponse() {
// Required for WebServices to work. Comment added to please Sonar.
}
public FetchGroupsForSharingResponse(final List<Group> groups) {
setGroups(groups);
}
/**
* Error Constructor.
*
* @param error IWS Error Object
* @param message Error Message
*/
public FetchGroupsForSharingResponse(final IWSError error, final String message) {
super(error, message);
}
// =========================================================================
// Standard Setters & Getters
// =========================================================================
public void setGroups(final List<Group> groups) {
this.groups.addAll(groups);
}
public List<Group> getGroups() {
return immutableList(groups);
}
}
| apache-2.0 |
Wenpei/incubator-systemml | src/main/java/org/apache/sysml/api/MLContext.java | 58727 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.api;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Scanner;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.spark.SparkContext;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.rdd.RDD;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.SQLContext;
import org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM;
import org.apache.sysml.api.jmlc.JMLCUtils;
import org.apache.sysml.api.monitoring.SparkMonitoringUtil;
import org.apache.sysml.conf.CompilerConfig;
import org.apache.sysml.conf.CompilerConfig.ConfigType;
import org.apache.sysml.conf.ConfigurationManager;
import org.apache.sysml.conf.DMLConfig;
import org.apache.sysml.hops.OptimizerUtils;
import org.apache.sysml.hops.OptimizerUtils.OptimizationLevel;
import org.apache.sysml.hops.globalopt.GlobalOptimizerWrapper;
import org.apache.sysml.hops.rewrite.ProgramRewriter;
import org.apache.sysml.hops.rewrite.RewriteRemovePersistentReadWrite;
import org.apache.sysml.parser.AParserWrapper;
import org.apache.sysml.parser.DMLProgram;
import org.apache.sysml.parser.DMLTranslator;
import org.apache.sysml.parser.DataExpression;
import org.apache.sysml.parser.Expression;
import org.apache.sysml.parser.Expression.ValueType;
import org.apache.sysml.parser.IntIdentifier;
import org.apache.sysml.parser.LanguageException;
import org.apache.sysml.parser.ParseException;
import org.apache.sysml.parser.StringIdentifier;
import org.apache.sysml.runtime.DMLRuntimeException;
import org.apache.sysml.runtime.controlprogram.LocalVariableMap;
import org.apache.sysml.runtime.controlprogram.Program;
import org.apache.sysml.runtime.controlprogram.caching.CacheableData;
import org.apache.sysml.runtime.controlprogram.caching.MatrixObject;
import org.apache.sysml.runtime.controlprogram.context.ExecutionContext;
import org.apache.sysml.runtime.controlprogram.context.ExecutionContextFactory;
import org.apache.sysml.runtime.controlprogram.context.SparkExecutionContext;
import org.apache.sysml.runtime.instructions.Instruction;
import org.apache.sysml.runtime.instructions.cp.Data;
import org.apache.sysml.runtime.instructions.spark.data.RDDObject;
import org.apache.sysml.runtime.instructions.spark.functions.ConvertStringToLongTextPair;
import org.apache.sysml.runtime.instructions.spark.functions.CopyBlockPairFunction;
import org.apache.sysml.runtime.instructions.spark.functions.CopyTextInputFunction;
import org.apache.sysml.runtime.instructions.spark.functions.SparkListener;
import org.apache.sysml.runtime.instructions.spark.utils.RDDConverterUtilsExt;
import org.apache.sysml.runtime.matrix.MatrixCharacteristics;
import org.apache.sysml.runtime.matrix.MatrixFormatMetaData;
import org.apache.sysml.runtime.matrix.data.CSVFileFormatProperties;
import org.apache.sysml.runtime.matrix.data.FileFormatProperties;
import org.apache.sysml.runtime.matrix.data.InputInfo;
import org.apache.sysml.runtime.matrix.data.MatrixBlock;
import org.apache.sysml.runtime.matrix.data.MatrixIndexes;
import org.apache.sysml.runtime.matrix.data.OutputInfo;
import org.apache.sysml.utils.Explain;
import org.apache.sysml.utils.Explain.ExplainCounts;
import org.apache.sysml.utils.Statistics;
/**
* MLContext is useful for passing RDDs as input/output to SystemML. This API avoids the need to read/write
* from HDFS (which is another way to pass inputs to SystemML).
* <p>
* Typical usage for MLContext is as follows:
* <pre><code>
* scala> import org.apache.sysml.api.MLContext
* </code></pre>
* <p>
* Create input DataFrame from CSV file and potentially perform some feature transformation
* <pre><code>
* scala> val W = sqlContext.load("com.databricks.spark.csv", Map("path" -> "W.csv", "header" -> "false"))
* scala> val H = sqlContext.load("com.databricks.spark.csv", Map("path" -> "H.csv", "header" -> "false"))
* scala> val V = sqlContext.load("com.databricks.spark.csv", Map("path" -> "V.csv", "header" -> "false"))
* </code></pre>
* <p>
* Create MLContext
* <pre><code>
* scala> val ml = new MLContext(sc)
* </code></pre>
* <p>
* Register input and output DataFrame/RDD
* Supported format:
* <ol>
* <li> DataFrame
* <li> CSV/Text (as JavaRDD<String> or JavaPairRDD<LongWritable, Text>)
* <li> Binary blocked RDD (JavaPairRDD<MatrixIndexes,MatrixBlock>))
* </ol>
* Also overloaded to support metadata information such as format, rlen, clen, ...
* Please note the variable names given below in quotes correspond to the variables in DML script.
* These variables need to have corresponding read/write associated in DML script.
* Currently, only matrix variables are supported through registerInput/registerOutput interface.
* To pass scalar variables, use named/positional arguments (described later) or wrap them into matrix variable.
* <pre><code>
* scala> ml.registerInput("V", V)
* scala> ml.registerInput("W", W)
* scala> ml.registerInput("H", H)
* scala> ml.registerOutput("H")
* scala> ml.registerOutput("W")
* </code></pre>
* <p>
* Call script with default arguments:
* <pre><code>
* scala> val outputs = ml.execute("GNMF.dml")
* </code></pre>
* <p>
* Also supported: calling script with positional arguments (args) and named arguments (nargs):
* <pre><code>
* scala> val args = Array("V.mtx", "W.mtx", "H.mtx", "2000", "1500", "50", "1", "WOut.mtx", "HOut.mtx")
* scala> val nargs = Map("maxIter"->"1", "V" -> "")
* scala> val outputs = ml.execute("GNMF.dml", args) # or ml.execute("GNMF_namedArgs.dml", nargs)
* </code></pre>
* <p>
* To run the script again using different (or even same arguments), but using same registered input/outputs:
* <pre><code>
* scala> val new_outputs = ml.execute("GNMF.dml", new_args)
* </code></pre>
* <p>
* However, to register new input/outputs, you need to first reset MLContext
* <pre><code>
* scala> ml.reset()
* scala> ml.registerInput("V", newV)
* </code></pre>
* <p>
* Experimental API:
* To monitor performance (only supported for Spark 1.4.0 or higher),
* <pre><code>
* scala> val ml = new MLContext(sc, true)
* </code></pre>
* <p>
* If monitoring performance is enabled,
* <pre><code>
* scala> print(ml.getMonitoringUtil().getExplainOutput())
* scala> ml.getMonitoringUtil().getRuntimeInfoInHTML("runtime.html")
* </code></pre>
* <p>
* Note: The execute(...) methods does not support parallel calls from same or different MLContext.
* This is because current SystemML engine does not allow multiple invocation in same JVM.
* So, if you plan to create a system which potentially creates multiple MLContext,
* it is recommended to guard the execute(...) call using
* <pre><code>
* synchronized(MLContext.class) { ml.execute(...); }
* </code></pre>
*/
public class MLContext {
// ----------------------------------------------------
// TODO: To make MLContext multi-threaded, track getCurrentMLContext and also all singletons and
// static variables in SystemML codebase.
private static MLContext _activeMLContext = null;
// Package protected so as to maintain a clean public API for MLContext.
// Use MLContextProxy.getActiveMLContext() if necessary
static MLContext getActiveMLContext() {
return _activeMLContext;
}
// ----------------------------------------------------
private SparkContext _sc = null; // Read while creating SystemML's spark context
public SparkContext getSparkContext() {
if(_sc == null) {
throw new RuntimeException("No spark context set in MLContext");
}
return _sc;
}
private ArrayList<String> _inVarnames = null;
private ArrayList<String> _outVarnames = null;
private LocalVariableMap _variables = null; // temporary symbol table
private Program _rtprog = null;
private Map<String, String> _additionalConfigs = new HashMap<String, String>();
// --------------------------------------------------
// _monitorUtils is set only when MLContext(sc, true)
private SparkMonitoringUtil _monitorUtils = null;
/**
* Experimental API. Not supported in Python MLContext API.
* @return
*/
public SparkMonitoringUtil getMonitoringUtil() {
return _monitorUtils;
}
// --------------------------------------------------
/**
* Create an associated MLContext for given spark session.
* @param sc
* @throws DMLRuntimeException
*/
public MLContext(SparkContext sc) throws DMLRuntimeException {
initializeSpark(sc, false, false);
}
/**
* Create an associated MLContext for given spark session.
* @param sc
* @throws DMLRuntimeException
*/
public MLContext(JavaSparkContext sc) throws DMLRuntimeException {
initializeSpark(sc.sc(), false, false);
}
/**
* Allow users to provide custom named-value configuration.
* @param paramName
* @param paramVal
*/
public void setConfig(String paramName, String paramVal) {
_additionalConfigs.put(paramName, paramVal);
}
// ====================================================================================
// Register input APIs
// 1. DataFrame
/**
* Register DataFrame as input. DataFrame is assumed to be in row format and each cell can be converted into double
* through Double.parseDouble(cell.toString()). This is suitable for passing dense matrices. For sparse matrices,
* consider passing through text format (using JavaRDD<String>, format="text")
* <p>
* Marks the variable in the DML script as input variable.
* Note that this expects a "varName = read(...)" statement in the DML script which through non-MLContext invocation
* would have been created by reading a HDFS file.
* @param varName
* @param df
* @throws DMLRuntimeException
*/
public void registerInput(String varName, DataFrame df) throws DMLRuntimeException {
registerInput(varName, df, false);
}
/**
* Register DataFrame as input.
* Marks the variable in the DML script as input variable.
* Note that this expects a "varName = read(...)" statement in the DML script which through non-MLContext invocation
* would have been created by reading a HDFS file.
* @param varName
* @param df
* @param containsID false if the DataFrame has an column ID which denotes the row ID.
* @throws DMLRuntimeException
*/
public void registerInput(String varName, DataFrame df, boolean containsID) throws DMLRuntimeException {
MatrixCharacteristics mcOut = new MatrixCharacteristics();
JavaPairRDD<MatrixIndexes, MatrixBlock> rdd = RDDConverterUtilsExt.dataFrameToBinaryBlock(new JavaSparkContext(_sc), df, mcOut, containsID);
registerInput(varName, rdd, mcOut);
}
/**
* Experimental API. Not supported in Python MLContext API.
* @param varName
* @param df
* @throws DMLRuntimeException
*/
public void registerInput(String varName, MLMatrix df) throws DMLRuntimeException {
registerInput(varName, MLMatrix.getRDDLazily(df), df.mc);
}
// ------------------------------------------------------------------------------------
// 2. CSV/Text: Usually JavaRDD<String>, but also supports JavaPairRDD<LongWritable, Text>
/**
* Register CSV/Text as inputs: Method for supplying csv file format properties, but without dimensions or nnz
* <p>
* Marks the variable in the DML script as input variable.
* Note that this expects a "varName = read(...)" statement in the DML script which through non-MLContext invocation
* would have been created by reading a HDFS file.
* @param varName
* @param rdd
* @param format
* @param hasHeader
* @param delim
* @param fill
* @param fillValue
* @throws DMLRuntimeException
*/
public void registerInput(String varName, JavaRDD<String> rdd, String format, boolean hasHeader,
String delim, boolean fill, double fillValue) throws DMLRuntimeException {
registerInput(varName, rdd, format, hasHeader, delim, fill, fillValue, -1, -1, -1);
}
/**
* Register CSV/Text as inputs: Method for supplying csv file format properties, but without dimensions or nnz
* <p>
* Marks the variable in the DML script as input variable.
* Note that this expects a "varName = read(...)" statement in the DML script which through non-MLContext invocation
* would have been created by reading a HDFS file.
* @param varName
* @param rdd
* @param format
* @param hasHeader
* @param delim
* @param fill
* @param fillValue
* @throws DMLRuntimeException
*/
public void registerInput(String varName, RDD<String> rdd, String format, boolean hasHeader,
String delim, boolean fill, double fillValue) throws DMLRuntimeException {
registerInput(varName, rdd.toJavaRDD(), format, hasHeader, delim, fill, fillValue, -1, -1, -1);
}
/**
* Register CSV/Text as inputs: Method for supplying csv file format properties along with dimensions or nnz
* <p>
* Marks the variable in the DML script as input variable.
* Note that this expects a "varName = read(...)" statement in the DML script which through non-MLContext invocation
* would have been created by reading a HDFS file.
* @param varName
* @param rdd
* @param format
* @param hasHeader
* @param delim
* @param fill
* @param fillValue
* @param rlen
* @param clen
* @param nnz
* @throws DMLRuntimeException
*/
public void registerInput(String varName, RDD<String> rdd, String format, boolean hasHeader,
String delim, boolean fill, double fillValue, long rlen, long clen, long nnz) throws DMLRuntimeException {
registerInput(varName, rdd.toJavaRDD(), format, hasHeader, delim, fill, fillValue, -1, -1, -1);
}
/**
* Register CSV/Text as inputs: Method for supplying csv file format properties along with dimensions or nnz
* <p>
* Marks the variable in the DML script as input variable.
* Note that this expects a "varName = read(...)" statement in the DML script which through non-MLContext invocation
* would have been created by reading a HDFS file.
* @param varName
* @param rdd
* @param format
* @param hasHeader
* @param delim
* @param fill
* @param fillValue
* @param rlen
* @param clen
* @param nnz
* @throws DMLRuntimeException
*/
public void registerInput(String varName, JavaRDD<String> rdd, String format, boolean hasHeader,
String delim, boolean fill, double fillValue, long rlen, long clen, long nnz) throws DMLRuntimeException {
CSVFileFormatProperties props = new CSVFileFormatProperties(hasHeader, delim, fill, fillValue, "");
registerInput(varName, rdd.mapToPair(new ConvertStringToLongTextPair()), format, rlen, clen, nnz, props);
}
/**
* Register CSV/Text as inputs: Convenience method without dimensions and nnz. It uses default file properties (example: delim, fill, ..)
* <p>
* Marks the variable in the DML script as input variable.
* Note that this expects a "varName = read(...)" statement in the DML script which through non-MLContext invocation
* would have been created by reading a HDFS file.
* @param varName
* @param rdd
* @param format
* @throws DMLRuntimeException
*/
public void registerInput(String varName, RDD<String> rdd, String format) throws DMLRuntimeException {
registerInput(varName, rdd.toJavaRDD().mapToPair(new ConvertStringToLongTextPair()), format, -1, -1, -1, null);
}
/**
* Register CSV/Text as inputs: Convenience method without dimensions and nnz. It uses default file properties (example: delim, fill, ..)
* <p>
* Marks the variable in the DML script as input variable.
* Note that this expects a "varName = read(...)" statement in the DML script which through non-MLContext invocation
* would have been created by reading a HDFS file.
* @param varName
* @param rdd
* @param format
* @throws DMLRuntimeException
*/
public void registerInput(String varName, JavaRDD<String> rdd, String format) throws DMLRuntimeException {
registerInput(varName, rdd.mapToPair(new ConvertStringToLongTextPair()), format, -1, -1, -1, null);
}
/**
* Register CSV/Text as inputs: Convenience method with dimensions and but no nnz. It uses default file properties (example: delim, fill, ..)
* <p>
* Marks the variable in the DML script as input variable.
* Note that this expects a "varName = read(...)" statement in the DML script which through non-MLContext invocation
* would have been created by reading a HDFS file.
* @param varName
* @param rdd
* @param format
* @param rlen
* @param clen
* @throws DMLRuntimeException
*/
public void registerInput(String varName, JavaRDD<String> rdd, String format, long rlen, long clen) throws DMLRuntimeException {
registerInput(varName, rdd.mapToPair(new ConvertStringToLongTextPair()), format, rlen, clen, -1, null);
}
/**
* Register CSV/Text as inputs: Convenience method with dimensions and but no nnz. It uses default file properties (example: delim, fill, ..)
* <p>
* Marks the variable in the DML script as input variable.
* Note that this expects a "varName = read(...)" statement in the DML script which through non-MLContext invocation
* would have been created by reading a HDFS file.
* @param varName
* @param rdd
* @param format
* @param rlen
* @param clen
* @throws DMLRuntimeException
*/
public void registerInput(String varName, RDD<String> rdd, String format, long rlen, long clen) throws DMLRuntimeException {
registerInput(varName, rdd.toJavaRDD().mapToPair(new ConvertStringToLongTextPair()), format, rlen, clen, -1, null);
}
/**
* Register CSV/Text as inputs: with dimensions and nnz. It uses default file properties (example: delim, fill, ..)
* <p>
* Marks the variable in the DML script as input variable.
* Note that this expects a "varName = read(...)" statement in the DML script which through non-MLContext invocation
* would have been created by reading a HDFS file.
* @param varName
* @param rdd
* @param format
* @param rlen
* @param clen
* @param nnz
* @throws DMLRuntimeException
*/
public void registerInput(String varName, JavaRDD<String> rdd, String format, long rlen, long clen, long nnz) throws DMLRuntimeException {
registerInput(varName, rdd.mapToPair(new ConvertStringToLongTextPair()), format, rlen, clen, nnz, null);
}
/**
* Register CSV/Text as inputs: with dimensions and nnz. It uses default file properties (example: delim, fill, ..)
* <p>
* Marks the variable in the DML script as input variable.
* Note that this expects a "varName = read(...)" statement in the DML script which through non-MLContext invocation
* would have been created by reading a HDFS file.
* @param varName
* @param rdd
* @param format
* @param rlen
* @param clen
* @param nnz
* @throws DMLRuntimeException
*/
public void registerInput(String varName, RDD<String> rdd, String format, long rlen, long clen, long nnz) throws DMLRuntimeException {
registerInput(varName, rdd.toJavaRDD().mapToPair(new ConvertStringToLongTextPair()), format, rlen, clen, nnz, null);
}
// All CSV related methods call this ... It provides access to dimensions, nnz, file properties.
private void registerInput(String varName, JavaPairRDD<LongWritable, Text> textOrCsv_rdd, String format, long rlen, long clen, long nnz, FileFormatProperties props) throws DMLRuntimeException {
if(!(DMLScript.rtplatform == RUNTIME_PLATFORM.SPARK || DMLScript.rtplatform == RUNTIME_PLATFORM.HYBRID_SPARK)) {
throw new DMLRuntimeException("The registerInput functionality only supported for spark runtime. Please use MLContext(sc) instead of default constructor.");
}
if(_variables == null)
_variables = new LocalVariableMap();
if(_inVarnames == null)
_inVarnames = new ArrayList<String>();
MatrixObject mo;
if( format.equals("csv") ) {
//TODO replace default block size
MatrixCharacteristics mc = new MatrixCharacteristics(rlen, clen, OptimizerUtils.DEFAULT_BLOCKSIZE, OptimizerUtils.DEFAULT_BLOCKSIZE, nnz);
mo = new MatrixObject(ValueType.DOUBLE, null, new MatrixFormatMetaData(mc, OutputInfo.CSVOutputInfo, InputInfo.CSVInputInfo));
}
else if( format.equals("text") ) {
if(rlen == -1 || clen == -1) {
throw new DMLRuntimeException("The metadata is required in registerInput for format:" + format);
}
//TODO replace default block size
MatrixCharacteristics mc = new MatrixCharacteristics(rlen, clen, OptimizerUtils.DEFAULT_BLOCKSIZE, OptimizerUtils.DEFAULT_BLOCKSIZE, nnz);
mo = new MatrixObject(ValueType.DOUBLE, null, new MatrixFormatMetaData(mc, OutputInfo.TextCellOutputInfo, InputInfo.TextCellInputInfo));
}
else if( format.equals("mm") ) {
// TODO: Handle matrix market
throw new DMLRuntimeException("Matrixmarket format is not yet implemented in registerInput: " + format);
}
else {
throw new DMLRuntimeException("Incorrect format in registerInput: " + format);
}
JavaPairRDD<LongWritable, Text> rdd = textOrCsv_rdd.mapToPair(new CopyTextInputFunction());
if(props != null)
mo.setFileFormatProperties(props);
mo.setRDDHandle(new RDDObject(rdd, varName));
_variables.put(varName, mo);
_inVarnames.add(varName);
checkIfRegisteringInputAllowed();
}
// ------------------------------------------------------------------------------------
// 3. Binary blocked RDD: Support JavaPairRDD<MatrixIndexes,MatrixBlock>
/**
* Register binary blocked RDD with given dimensions, default block sizes and no nnz
* <p>
* Marks the variable in the DML script as input variable.
* Note that this expects a "varName = read(...)" statement in the DML script which through non-MLContext invocation
* would have been created by reading a HDFS file.
* @param varName
* @param rdd
* @param rlen
* @param clen
* @throws DMLRuntimeException
*/
public void registerInput(String varName, JavaPairRDD<MatrixIndexes,MatrixBlock> rdd, long rlen, long clen) throws DMLRuntimeException {
//TODO replace default blocksize
registerInput(varName, rdd, rlen, clen, OptimizerUtils.DEFAULT_BLOCKSIZE, OptimizerUtils.DEFAULT_BLOCKSIZE);
}
/**
* Register binary blocked RDD with given dimensions, given block sizes and no nnz
* <p>
* Marks the variable in the DML script as input variable.
* Note that this expects a "varName = read(...)" statement in the DML script which through non-MLContext invocation
* would have been created by reading a HDFS file.
* @param varName
* @param rdd
* @param rlen
* @param clen
* @param brlen
* @param bclen
* @throws DMLRuntimeException
*/
public void registerInput(String varName, JavaPairRDD<MatrixIndexes,MatrixBlock> rdd, long rlen, long clen, int brlen, int bclen) throws DMLRuntimeException {
registerInput(varName, rdd, rlen, clen, brlen, bclen, -1);
}
/**
* Register binary blocked RDD with given dimensions, given block sizes and given nnz (preferred).
* <p>
* Marks the variable in the DML script as input variable.
* Note that this expects a "varName = read(...)" statement in the DML script which through non-MLContext invocation
* would have been created by reading a HDFS file.
* @param varName
* @param rdd
* @param rlen
* @param clen
* @param brlen
* @param bclen
* @param nnz
* @throws DMLRuntimeException
*/
public void registerInput(String varName, JavaPairRDD<MatrixIndexes,MatrixBlock> rdd, long rlen, long clen, int brlen, int bclen, long nnz) throws DMLRuntimeException {
if(rlen == -1 || clen == -1) {
throw new DMLRuntimeException("The metadata is required in registerInput for binary format");
}
MatrixCharacteristics mc = new MatrixCharacteristics(rlen, clen, brlen, bclen, nnz);
registerInput(varName, rdd, mc);
}
// All binary blocked method call this.
public void registerInput(String varName, JavaPairRDD<MatrixIndexes,MatrixBlock> rdd, MatrixCharacteristics mc) throws DMLRuntimeException {
if(_variables == null)
_variables = new LocalVariableMap();
if(_inVarnames == null)
_inVarnames = new ArrayList<String>();
// Bug in Spark is messing up blocks and indexes due to too eager reuse of data structures
JavaPairRDD<MatrixIndexes, MatrixBlock> copyRDD = rdd.mapToPair( new CopyBlockPairFunction() );
MatrixObject mo = new MatrixObject(ValueType.DOUBLE, "temp", new MatrixFormatMetaData(mc, OutputInfo.BinaryBlockOutputInfo, InputInfo.BinaryBlockInputInfo));
mo.setRDDHandle(new RDDObject(copyRDD, varName));
_variables.put(varName, mo);
_inVarnames.add(varName);
checkIfRegisteringInputAllowed();
}
// =============================================================================================
/**
* Marks the variable in the DML script as output variable.
* Note that this expects a "write(varName, ...)" statement in the DML script which through non-MLContext invocation
* would have written the matrix to HDFS.
* @param varName
* @throws DMLRuntimeException
*/
public void registerOutput(String varName) throws DMLRuntimeException {
if(!(DMLScript.rtplatform == RUNTIME_PLATFORM.SPARK || DMLScript.rtplatform == RUNTIME_PLATFORM.HYBRID_SPARK)) {
throw new DMLRuntimeException("The registerOutput functionality only supported for spark runtime. Please use MLContext(sc) instead of default constructor.");
}
if(_outVarnames == null)
_outVarnames = new ArrayList<String>();
_outVarnames.add(varName);
if(_variables == null)
_variables = new LocalVariableMap();
}
// =============================================================================================
/**
* Execute DML script by passing named arguments using specified config file.
* @param dmlScriptFilePath the dml script can be in local filesystem or in HDFS
* @param namedArgs
* @param parsePyDML
* @param configFilePath
* @throws IOException
* @throws DMLException
* @throws ParseException
*/
public MLOutput execute(String dmlScriptFilePath, Map<String, String> namedArgs, boolean parsePyDML, String configFilePath) throws IOException, DMLException, ParseException {
String [] args = new String[namedArgs.size()];
int i = 0;
for(Entry<String, String> entry : namedArgs.entrySet()) {
if(entry.getValue().trim().isEmpty())
args[i] = entry.getKey() + "=\"" + entry.getValue() + "\"";
else
args[i] = entry.getKey() + "=" + entry.getValue();
i++;
}
return compileAndExecuteScript(dmlScriptFilePath, args, true, parsePyDML, configFilePath);
}
/**
* Execute DML script by passing named arguments using specified config file.
* @param dmlScriptFilePath the dml script can be in local filesystem or in HDFS
* @param namedArgs
* @param configFilePath
* @throws IOException
* @throws DMLException
* @throws ParseException
*/
public MLOutput execute(String dmlScriptFilePath, Map<String, String> namedArgs, String configFilePath) throws IOException, DMLException, ParseException {
String [] args = new String[namedArgs.size()];
int i = 0;
for(Entry<String, String> entry : namedArgs.entrySet()) {
if(entry.getValue().trim().isEmpty())
args[i] = entry.getKey() + "=\"" + entry.getValue() + "\"";
else
args[i] = entry.getKey() + "=" + entry.getValue();
i++;
}
return compileAndExecuteScript(dmlScriptFilePath, args, true, false, configFilePath);
}
/**
* Execute DML script by passing named arguments with default configuration.
* @param dmlScriptFilePath the dml script can be in local filesystem or in HDFS
* @param namedArgs
* @throws IOException
* @throws DMLException
* @throws ParseException
*/
public MLOutput execute(String dmlScriptFilePath, Map<String, String> namedArgs) throws IOException, DMLException, ParseException {
return execute(dmlScriptFilePath, namedArgs, false, null);
}
/**
* Execute DML script by passing named arguments.
* @param dmlScriptFilePath
* @param namedArgs
* @return
* @throws IOException
* @throws DMLException
* @throws ParseException
*/
public MLOutput execute(String dmlScriptFilePath, scala.collection.immutable.Map<String, String> namedArgs) throws IOException, DMLException, ParseException {
return execute(dmlScriptFilePath, new HashMap<String, String>(scala.collection.JavaConversions.mapAsJavaMap(namedArgs)));
}
/**
* Experimental: Execute PyDML script by passing named arguments if parsePyDML=true.
* @param dmlScriptFilePath
* @param namedArgs
* @param parsePyDML
* @return
* @throws IOException
* @throws DMLException
* @throws ParseException
*/
public MLOutput execute(String dmlScriptFilePath, Map<String, String> namedArgs, boolean parsePyDML) throws IOException, DMLException, ParseException {
return execute(dmlScriptFilePath, namedArgs, parsePyDML, null);
}
/**
* Experimental: Execute PyDML script by passing named arguments if parsePyDML=true.
* @param dmlScriptFilePath
* @param namedArgs
* @param parsePyDML
* @return
* @throws IOException
* @throws DMLException
* @throws ParseException
*/
public MLOutput execute(String dmlScriptFilePath, scala.collection.immutable.Map<String, String> namedArgs, boolean parsePyDML) throws IOException, DMLException, ParseException {
return execute(dmlScriptFilePath, new HashMap<String, String>(scala.collection.JavaConversions.mapAsJavaMap(namedArgs)), parsePyDML);
}
/**
* Execute DML script by passing positional arguments using specified config file
* @param dmlScriptFilePath
* @param args
* @param configFilePath
* @throws IOException
* @throws DMLException
* @throws ParseException
*/
public MLOutput execute(String dmlScriptFilePath, String [] args, String configFilePath) throws IOException, DMLException, ParseException {
return execute(dmlScriptFilePath, args, false, configFilePath);
}
/**
* Execute DML script by passing positional arguments using specified config file
* This method is implemented for compatibility with Python MLContext.
* Java/Scala users should use 'MLOutput execute(String dmlScriptFilePath, String [] args, String configFilePath)' instead as
* equivalent scala collections (Seq/ArrayBuffer) is not implemented.
* @param dmlScriptFilePath
* @param args
* @param configFilePath
* @throws IOException
* @throws DMLException
* @throws ParseException
*/
public MLOutput execute(String dmlScriptFilePath, ArrayList<String> args, String configFilePath) throws IOException, DMLException, ParseException {
String [] argsArr = new String[args.size()];
argsArr = args.toArray(argsArr);
return execute(dmlScriptFilePath, argsArr, false, configFilePath);
}
/**
* Execute DML script by passing positional arguments using default configuration
* @param dmlScriptFilePath
* @param args
* @throws IOException
* @throws DMLException
* @throws ParseException
*/
public MLOutput execute(String dmlScriptFilePath, String [] args) throws IOException, DMLException, ParseException {
return execute(dmlScriptFilePath, args, false, null);
}
/**
* Execute DML script by passing positional arguments using default configuration.
* This method is implemented for compatibility with Python MLContext.
* Java/Scala users should use 'MLOutput execute(String dmlScriptFilePath, String [] args)' instead as
* equivalent scala collections (Seq/ArrayBuffer) is not implemented.
* @param dmlScriptFilePath
* @param args
* @throws IOException
* @throws DMLException
* @throws ParseException
*/
public MLOutput execute(String dmlScriptFilePath, ArrayList<String> args) throws IOException, DMLException, ParseException {
String [] argsArr = new String[args.size()];
argsArr = args.toArray(argsArr);
return execute(dmlScriptFilePath, argsArr, false, null);
}
/**
* Experimental: Execute DML script by passing positional arguments if parsePyDML=true, using default configuration.
* This method is implemented for compatibility with Python MLContext.
* Java/Scala users should use 'MLOutput execute(String dmlScriptFilePath, String [] args, boolean parsePyDML)' instead as
* equivalent scala collections (Seq/ArrayBuffer) is not implemented.
* @param dmlScriptFilePath
* @param args
* @param parsePyDML
* @return
* @throws IOException
* @throws DMLException
* @throws ParseException
*/
public MLOutput execute(String dmlScriptFilePath, ArrayList<String> args, boolean parsePyDML) throws IOException, DMLException, ParseException {
String [] argsArr = new String[args.size()];
argsArr = args.toArray(argsArr);
return execute(dmlScriptFilePath, argsArr, parsePyDML, null);
}
/**
* Experimental: Execute DML script by passing positional arguments if parsePyDML=true, using specified config file.
* This method is implemented for compatibility with Python MLContext.
* Java/Scala users should use 'MLOutput execute(String dmlScriptFilePath, String [] args, boolean parsePyDML, String configFilePath)' instead as
* equivalent scala collections (Seq/ArrayBuffer) is not implemented.
* @param dmlScriptFilePath
* @param args
* @param parsePyDML
* @param configFilePath
* @return
* @throws IOException
* @throws DMLException
* @throws ParseException
*/
public MLOutput execute(String dmlScriptFilePath, ArrayList<String> args, boolean parsePyDML, String configFilePath) throws IOException, DMLException, ParseException {
String [] argsArr = new String[args.size()];
argsArr = args.toArray(argsArr);
return execute(dmlScriptFilePath, argsArr, parsePyDML, configFilePath);
}
/**
* Experimental: Execute DML script by passing positional arguments if parsePyDML=true, using specified config file.
* @param dmlScriptFilePath
* @param args
* @param parsePyDML
* @param configFilePath
* @return
* @throws IOException
* @throws DMLException
* @throws ParseException
*/
public MLOutput execute(String dmlScriptFilePath, String [] args, boolean parsePyDML, String configFilePath) throws IOException, DMLException, ParseException {
return compileAndExecuteScript(dmlScriptFilePath, args, false, parsePyDML, configFilePath);
}
/**
* Experimental: Execute DML script by passing positional arguments if parsePyDML=true, using default configuration.
* @param dmlScriptFilePath
* @param args
* @param parsePyDML
* @return
* @throws IOException
* @throws DMLException
* @throws ParseException
*/
public MLOutput execute(String dmlScriptFilePath, String [] args, boolean parsePyDML) throws IOException, DMLException, ParseException {
return execute(dmlScriptFilePath, args, parsePyDML, null);
}
/**
* Execute DML script without any arguments using specified config path
* @param dmlScriptFilePath
* @param configFilePath
* @throws IOException
* @throws DMLException
* @throws ParseException
*/
public MLOutput execute(String dmlScriptFilePath, String configFilePath) throws IOException, DMLException, ParseException {
return execute(dmlScriptFilePath, false, configFilePath);
}
/**
* Execute DML script without any arguments using default configuration.
* @param dmlScriptFilePath
* @throws IOException
* @throws DMLException
* @throws ParseException
*/
public MLOutput execute(String dmlScriptFilePath) throws IOException, DMLException, ParseException {
return execute(dmlScriptFilePath, false, null);
}
/**
* Experimental: Execute DML script without any arguments if parsePyDML=true, using specified config path.
* @param dmlScriptFilePath
* @param parsePyDML
* @param configFilePath
* @return
* @throws IOException
* @throws DMLException
* @throws ParseException
*/
public MLOutput execute(String dmlScriptFilePath, boolean parsePyDML, String configFilePath) throws IOException, DMLException, ParseException {
return compileAndExecuteScript(dmlScriptFilePath, null, false, parsePyDML, configFilePath);
}
/**
* Experimental: Execute DML script without any arguments if parsePyDML=true, using default configuration.
* @param dmlScriptFilePath
* @param parsePyDML
* @return
* @throws IOException
* @throws DMLException
* @throws ParseException
*/
public MLOutput execute(String dmlScriptFilePath, boolean parsePyDML) throws IOException, DMLException, ParseException {
return execute(dmlScriptFilePath, parsePyDML, null);
}
// -------------------------------- Utility methods begins ----------------------------------------------------------
/**
* Call this method if you want to clear any RDDs set via registerInput, registerOutput.
* This is required if ml.execute(..) has been called earlier and you want to call a new DML script.
* Note: By default this doesnot clean up configuration set using setConfig method.
* To clean the configuration as along with registered input/outputs, please use reset(true);
* @throws DMLRuntimeException
*/
public void reset()
throws DMLRuntimeException
{
reset(false);
}
public void reset(boolean cleanupConfig)
throws DMLRuntimeException
{
//cleanup variables from bufferpool, incl evicted files
//(otherwise memory leak because bufferpool holds references)
CacheableData.cleanupCacheDir();
//clear mlcontext state
_inVarnames = null;
_outVarnames = null;
_variables = null;
if(cleanupConfig)
_additionalConfigs.clear();
}
/**
* Used internally
* @param source
* @param target
* @throws LanguageException
*/
void setAppropriateVarsForRead(Expression source, String target)
throws LanguageException
{
boolean isTargetRegistered = isRegisteredAsInput(target);
boolean isReadExpression = (source instanceof DataExpression && ((DataExpression) source).isRead());
if(isTargetRegistered && isReadExpression) {
// Do not check metadata file for registered reads
((DataExpression) source).setCheckMetadata(false);
MatrixObject mo = null;
try {
mo = getMatrixObject(target);
int blp = source.getBeginLine(); int bcp = source.getBeginColumn();
int elp = source.getEndLine(); int ecp = source.getEndColumn();
((DataExpression) source).addVarParam(DataExpression.READROWPARAM, new IntIdentifier(mo.getNumRows(), source.getFilename(), blp, bcp, elp, ecp));
((DataExpression) source).addVarParam(DataExpression.READCOLPARAM, new IntIdentifier(mo.getNumColumns(), source.getFilename(), blp, bcp, elp, ecp));
((DataExpression) source).addVarParam(DataExpression.READNUMNONZEROPARAM, new IntIdentifier(mo.getNnz(), source.getFilename(), blp, bcp, elp, ecp));
((DataExpression) source).addVarParam(DataExpression.DATATYPEPARAM, new StringIdentifier("matrix", source.getFilename(), blp, bcp, elp, ecp));
((DataExpression) source).addVarParam(DataExpression.VALUETYPEPARAM, new StringIdentifier("double", source.getFilename(), blp, bcp, elp, ecp));
if(mo.getMetaData() instanceof MatrixFormatMetaData) {
MatrixFormatMetaData metaData = (MatrixFormatMetaData) mo.getMetaData();
if(metaData.getOutputInfo() == OutputInfo.CSVOutputInfo) {
((DataExpression) source).addVarParam(DataExpression.FORMAT_TYPE, new StringIdentifier(DataExpression.FORMAT_TYPE_VALUE_CSV, source.getFilename(), blp, bcp, elp, ecp));
}
else if(metaData.getOutputInfo() == OutputInfo.TextCellOutputInfo) {
((DataExpression) source).addVarParam(DataExpression.FORMAT_TYPE, new StringIdentifier(DataExpression.FORMAT_TYPE_VALUE_TEXT, source.getFilename(), blp, bcp, elp, ecp));
}
else if(metaData.getOutputInfo() == OutputInfo.BinaryBlockOutputInfo) {
((DataExpression) source).addVarParam(DataExpression.ROWBLOCKCOUNTPARAM, new IntIdentifier(mo.getNumRowsPerBlock(), source.getFilename(), blp, bcp, elp, ecp));
((DataExpression) source).addVarParam(DataExpression.COLUMNBLOCKCOUNTPARAM, new IntIdentifier(mo.getNumColumnsPerBlock(), source.getFilename(), blp, bcp, elp, ecp));
((DataExpression) source).addVarParam(DataExpression.FORMAT_TYPE, new StringIdentifier(DataExpression.FORMAT_TYPE_VALUE_BINARY, source.getFilename(), blp, bcp, elp, ecp));
}
else {
throw new LanguageException("Unsupported format through MLContext");
}
}
} catch (DMLRuntimeException e) {
throw new LanguageException(e);
}
}
}
/**
* Used internally
* @param tmp
* @return
*/
ArrayList<Instruction> performCleanupAfterRecompilation(ArrayList<Instruction> tmp) {
String [] outputs = (_outVarnames != null) ? _outVarnames.toArray(new String[0]) : new String[0];
return JMLCUtils.cleanupRuntimeInstructions(tmp, outputs);
}
// -------------------------------- Utility methods ends ----------------------------------------------------------
// -------------------------------- Experimental API begins ----------------------------------------------------------
/**
* Experimental api:
* Setting monitorPerformance to true adds additional overhead of storing state. So, use it only if necessary.
* @param sc
* @param monitorPerformance
* @throws DMLRuntimeException
*/
public MLContext(SparkContext sc, boolean monitorPerformance) throws DMLRuntimeException {
initializeSpark(sc, monitorPerformance, false);
}
/**
* Experimental api:
* Setting monitorPerformance to true adds additional overhead of storing state. So, use it only if necessary.
* @param sc
* @param monitorPerformance
* @throws DMLRuntimeException
*/
public MLContext(JavaSparkContext sc, boolean monitorPerformance) throws DMLRuntimeException {
initializeSpark(sc.sc(), monitorPerformance, false);
}
/**
* Experimental api:
* Setting monitorPerformance to true adds additional overhead of storing state. So, use it only if necessary.
* @param sc
* @param monitorPerformance
* @param setForcedSparkExecType
* @throws DMLRuntimeException
*/
public MLContext(SparkContext sc, boolean monitorPerformance, boolean setForcedSparkExecType) throws DMLRuntimeException {
initializeSpark(sc, monitorPerformance, setForcedSparkExecType);
}
/**
* Experimental api:
* Setting monitorPerformance to true adds additional overhead of storing state. So, use it only if necessary.
* @param sc
* @param monitorPerformance
* @param setForcedSparkExecType
* @throws DMLRuntimeException
*/
public MLContext(JavaSparkContext sc, boolean monitorPerformance, boolean setForcedSparkExecType) throws DMLRuntimeException {
initializeSpark(sc.sc(), monitorPerformance, setForcedSparkExecType);
}
// -------------------------------- Experimental API ends ----------------------------------------------------------
// -------------------------------- Private methods begins ----------------------------------------------------------
private boolean isRegisteredAsInput(String varName) {
if(_inVarnames != null) {
for(String v : _inVarnames) {
if(v.equals(varName)) {
return true;
}
}
}
return false;
}
private MatrixObject getMatrixObject(String varName) throws DMLRuntimeException {
if(_variables != null) {
Data mo = _variables.get(varName);
if(mo instanceof MatrixObject) {
return (MatrixObject) mo;
}
else {
throw new DMLRuntimeException("ERROR: Incorrect type");
}
}
throw new DMLRuntimeException("ERROR: getMatrixObject not set for variable:" + varName);
}
private int compareVersion(String versionStr1, String versionStr2) {
Scanner s1 = null;
Scanner s2 = null;
try {
s1 = new Scanner(versionStr1); s1.useDelimiter("\\.");
s2 = new Scanner(versionStr2); s2.useDelimiter("\\.");
while(s1.hasNextInt() && s2.hasNextInt()) {
int version1 = s1.nextInt();
int version2 = s2.nextInt();
if(version1 < version2) {
return -1;
} else if(version1 > version2) {
return 1;
}
}
if(s1.hasNextInt()) return 1;
}
finally {
if(s1 != null) s1.close();
if(s2 != null) s2.close();
}
return 0;
}
private void initializeSpark(SparkContext sc, boolean monitorPerformance, boolean setForcedSparkExecType) throws DMLRuntimeException {
MLContextProxy.setActive(true);
this._sc = sc;
if(compareVersion(sc.version(), "1.3.0") < 0 ) {
throw new DMLRuntimeException("Expected spark version >= 1.3.0 for running SystemML");
}
if(setForcedSparkExecType)
DMLScript.rtplatform = RUNTIME_PLATFORM.SPARK;
else
DMLScript.rtplatform = RUNTIME_PLATFORM.HYBRID_SPARK;
if(monitorPerformance) {
initializeSparkListener(sc);
}
}
private void initializeSparkListener(SparkContext sc) throws DMLRuntimeException {
if(compareVersion(sc.version(), "1.4.0") < 0 ) {
throw new DMLRuntimeException("Expected spark version >= 1.4.0 for monitoring MLContext performance");
}
SparkListener sparkListener = new SparkListener(sc);
_monitorUtils = new SparkMonitoringUtil(sparkListener);
sc.addSparkListener(sparkListener);
}
/**
* Execute a script stored in a string.
*
* @param dmlScript
* @return
* @throws IOException
* @throws DMLException
* @throws ParseException
*/
public MLOutput executeScript(String dmlScript)
throws IOException, DMLException {
return executeScript(dmlScript, false);
}
public MLOutput executeScript(String dmlScript, boolean isPyDML)
throws IOException, DMLException {
return executeScript(dmlScript, isPyDML, null);
}
public MLOutput executeScript(String dmlScript, String configFilePath)
throws IOException, DMLException {
return executeScript(dmlScript, false, configFilePath);
}
public MLOutput executeScript(String dmlScript, boolean isPyDML, String configFilePath)
throws IOException, DMLException {
return compileAndExecuteScript(dmlScript, null, false, false, isPyDML, configFilePath);
}
public MLOutput executeScript(String dmlScript, scala.collection.immutable.Map<String, String> namedArgs)
throws IOException, DMLException {
return executeScript(dmlScript, new HashMap<String, String>(scala.collection.JavaConversions.mapAsJavaMap(namedArgs)), null);
}
public MLOutput executeScript(String dmlScript, scala.collection.immutable.Map<String, String> namedArgs, boolean isPyDML)
throws IOException, DMLException {
return executeScript(dmlScript, new HashMap<String, String>(scala.collection.JavaConversions.mapAsJavaMap(namedArgs)), isPyDML, null);
}
public MLOutput executeScript(String dmlScript, scala.collection.immutable.Map<String, String> namedArgs, String configFilePath)
throws IOException, DMLException {
return executeScript(dmlScript, new HashMap<String, String>(scala.collection.JavaConversions.mapAsJavaMap(namedArgs)), configFilePath);
}
public MLOutput executeScript(String dmlScript, scala.collection.immutable.Map<String, String> namedArgs, boolean isPyDML, String configFilePath)
throws IOException, DMLException {
return executeScript(dmlScript, new HashMap<String, String>(scala.collection.JavaConversions.mapAsJavaMap(namedArgs)), isPyDML, configFilePath);
}
public MLOutput executeScript(String dmlScript, Map<String, String> namedArgs)
throws IOException, DMLException {
return executeScript(dmlScript, namedArgs, null);
}
public MLOutput executeScript(String dmlScript, Map<String, String> namedArgs, boolean isPyDML)
throws IOException, DMLException {
return executeScript(dmlScript, namedArgs, isPyDML, null);
}
public MLOutput executeScript(String dmlScript, Map<String, String> namedArgs, String configFilePath)
throws IOException, DMLException {
return executeScript(dmlScript, namedArgs, false, configFilePath);
}
public MLOutput executeScript(String dmlScript, Map<String, String> namedArgs, boolean isPyDML, String configFilePath)
throws IOException, DMLException {
String [] args = new String[namedArgs.size()];
int i = 0;
for(Entry<String, String> entry : namedArgs.entrySet()) {
if(entry.getValue().trim().isEmpty())
args[i] = entry.getKey() + "=\"" + entry.getValue() + "\"";
else
args[i] = entry.getKey() + "=" + entry.getValue();
i++;
}
return compileAndExecuteScript(dmlScript, args, false, true, isPyDML, configFilePath);
}
private void checkIfRegisteringInputAllowed() throws DMLRuntimeException {
if(!(DMLScript.rtplatform == RUNTIME_PLATFORM.SPARK || DMLScript.rtplatform == RUNTIME_PLATFORM.HYBRID_SPARK)) {
throw new DMLRuntimeException("ERROR: registerInput is only allowed for spark execution mode");
}
}
private MLOutput compileAndExecuteScript(String dmlScriptFilePath, String [] args, boolean isNamedArgument, boolean isPyDML, String configFilePath) throws IOException, DMLException {
return compileAndExecuteScript(dmlScriptFilePath, args, true, isNamedArgument, isPyDML, configFilePath);
}
/**
* All the execute() methods call this, which after setting appropriate input/output variables
* calls _compileAndExecuteScript
* We have explicitly synchronized this function because MLContext/SystemML does not yet support multi-threading.
* @param dmlScriptFilePath
* @param args
* @param isNamedArgument
* @return
* @throws IOException
* @throws DMLException
* @throws ParseException
*/
private synchronized MLOutput compileAndExecuteScript(String dmlScriptFilePath, String [] args, boolean isFile, boolean isNamedArgument, boolean isPyDML, String configFilePath) throws IOException, DMLException {
try {
if(getActiveMLContext() != null) {
throw new DMLRuntimeException("SystemML (and hence by definition MLContext) doesnot support parallel execute() calls from same or different MLContexts. "
+ "As a temporary fix, please do explicit synchronization, i.e. synchronized(MLContext.class) { ml.execute(...) } ");
}
// Set active MLContext.
_activeMLContext = this;
if(_monitorUtils != null) {
_monitorUtils.resetMonitoringData();
}
if(DMLScript.rtplatform == RUNTIME_PLATFORM.SPARK || DMLScript.rtplatform == RUNTIME_PLATFORM.HYBRID_SPARK) {
Map<String, JavaPairRDD<MatrixIndexes,MatrixBlock>> retVal = null;
// Depending on whether registerInput/registerOutput was called initialize the variables
String[] inputs; String[] outputs;
if(_inVarnames != null) {
inputs = _inVarnames.toArray(new String[0]);
}
else {
inputs = new String[0];
}
if(_outVarnames != null) {
outputs = _outVarnames.toArray(new String[0]);
}
else {
outputs = new String[0];
}
Map<String, MatrixCharacteristics> outMetadata = new HashMap<String, MatrixCharacteristics>();
Map<String, String> argVals = DMLScript.createArgumentsMap(isNamedArgument, args);
// Run the DML script
ExecutionContext ec = executeUsingSimplifiedCompilationChain(dmlScriptFilePath, isFile, argVals, isPyDML, inputs, outputs, _variables, configFilePath);
// Now collect the output
if(_outVarnames != null) {
if(_variables == null) {
throw new DMLRuntimeException("The symbol table returned after executing the script is empty");
}
for( String ovar : _outVarnames ) {
if( _variables.keySet().contains(ovar) ) {
if(retVal == null) {
retVal = new HashMap<String, JavaPairRDD<MatrixIndexes,MatrixBlock>>();
}
retVal.put(ovar, ((SparkExecutionContext) ec).getBinaryBlockRDDHandleForVariable(ovar));
outMetadata.put(ovar, ec.getMatrixCharacteristics(ovar)); // For converting output to dataframe
}
else {
throw new DMLException("Error: The variable " + ovar + " is not available as output after the execution of the DMLScript.");
}
}
}
return new MLOutput(retVal, outMetadata);
}
else {
throw new DMLRuntimeException("Unsupported runtime:" + DMLScript.rtplatform.name());
}
}
finally {
// Remove global dml config and all thread-local configs
// TODO enable cleanup whenever invalid GNMF MLcontext is fixed
// (the test is invalid because it assumes that status of previous execute is kept)
//ConfigurationManager.setGlobalConfig(new DMLConfig());
//ConfigurationManager.clearLocalConfigs();
// Reset active MLContext.
_activeMLContext = null;
}
}
/**
* This runs the DML script and returns the ExecutionContext for the caller to extract the output variables.
* The caller (which is compileAndExecuteScript) is expected to set inputSymbolTable with appropriate matrix representation (RDD, MatrixObject).
*
* @param dmlScriptFilePath
* @param isFile
* @param argVals
* @param parsePyDML
* @param inputs
* @param outputs
* @param inputSymbolTable
* @param configFilePath
* @return
* @throws IOException
* @throws DMLException
* @throws ParseException
*/
private ExecutionContext executeUsingSimplifiedCompilationChain(String dmlScriptFilePath, boolean isFile, Map<String, String> argVals, boolean parsePyDML,
String[] inputs, String[] outputs, LocalVariableMap inputSymbolTable, String configFilePath)
throws IOException, DMLException
{
//construct dml configuration
DMLConfig config = (configFilePath == null) ? new DMLConfig() : new DMLConfig(configFilePath);
for(Entry<String, String> param : _additionalConfigs.entrySet()) {
config.setTextValue(param.getKey(), param.getValue());
}
//set global dml and specialized compiler configurations
ConfigurationManager.setGlobalConfig(config);
CompilerConfig cconf = new CompilerConfig();
cconf.set(ConfigType.IGNORE_UNSPECIFIED_ARGS, true);
cconf.set(ConfigType.REJECT_READ_WRITE_UNKNOWNS, false);
cconf.set(ConfigType.ALLOW_CSE_PERSISTENT_READS, false);
ConfigurationManager.setGlobalConfig(cconf);
//read dml script string
String dmlScriptStr = DMLScript.readDMLScript( isFile?"-f":"-s", dmlScriptFilePath);
if(_monitorUtils != null) {
_monitorUtils.setDMLString(dmlScriptStr);
}
//simplified compilation chain
_rtprog = null;
//parsing
AParserWrapper parser = AParserWrapper.createParser(parsePyDML);
DMLProgram prog;
if (isFile) {
prog = parser.parse(dmlScriptFilePath, null, argVals);
} else {
prog = parser.parse(null, dmlScriptStr, argVals);
}
//language validate
DMLTranslator dmlt = new DMLTranslator(prog);
dmlt.liveVariableAnalysis(prog);
dmlt.validateParseTree(prog);
//hop construct/rewrite
dmlt.constructHops(prog);
dmlt.rewriteHopsDAG(prog);
Explain.explain(prog);
//rewrite persistent reads/writes
if(inputSymbolTable != null) {
RewriteRemovePersistentReadWrite rewrite = new RewriteRemovePersistentReadWrite(inputs, outputs);
ProgramRewriter rewriter2 = new ProgramRewriter(rewrite);
rewriter2.rewriteProgramHopDAGs(prog);
}
//lop construct and runtime prog generation
dmlt.constructLops(prog);
_rtprog = prog.getRuntimeProgram(config);
//optional global data flow optimization
if(OptimizerUtils.isOptLevel(OptimizationLevel.O4_GLOBAL_TIME_MEMORY) ) {
_rtprog = GlobalOptimizerWrapper.optimizeProgram(prog, _rtprog);
}
// launch SystemML appmaster not required as it is already launched
//count number compiled MR jobs / SP instructions
ExplainCounts counts = Explain.countDistributedOperations(_rtprog);
Statistics.resetNoOfCompiledJobs( counts.numJobs );
// Initialize caching and scratch space
DMLScript.initHadoopExecution(config);
//final cleanup runtime prog
JMLCUtils.cleanupRuntimeProgram(_rtprog, outputs);
//create and populate execution context
ExecutionContext ec = ExecutionContextFactory.createContext(_rtprog);
if(inputSymbolTable != null) {
ec.setVariables(inputSymbolTable);
}
//core execute runtime program
_rtprog.execute( ec );
if(_monitorUtils != null)
_monitorUtils.setExplainOutput(Explain.explain(_rtprog));
return ec;
}
// -------------------------------- Private methods ends ----------------------------------------------------------
// TODO: Add additional create to provide sep, missing values, etc. for CSV
/**
* Experimental API: Might be discontinued in future release
* @param sqlContext
* @param filePath
* @param format
* @return
* @throws IOException
* @throws DMLException
* @throws ParseException
*/
public MLMatrix read(SQLContext sqlContext, String filePath, String format) throws IOException, DMLException, ParseException {
this.reset();
this.registerOutput("output");
MLOutput out = this.executeScript("output = read(\"" + filePath + "\", format=\"" + format + "\"); " + MLMatrix.writeStmt);
JavaPairRDD<MatrixIndexes, MatrixBlock> blocks = out.getBinaryBlockedRDD("output");
MatrixCharacteristics mcOut = out.getMatrixCharacteristics("output");
return MLMatrix.createMLMatrix(this, sqlContext, blocks, mcOut);
}
// // TODO: Test this in different scenarios: sparse/dense/mixed
// /**
// * Experimental unstable API: Might be discontinued in future release
// * @param ml
// * @param sqlContext
// * @param mllibMatrix
// * @return
// * @throws DMLRuntimeException
// */
// public MLMatrix read(SQLContext sqlContext, BlockMatrix mllibMatrix) throws DMLRuntimeException {
// long nnz = -1; // TODO: Find number of non-zeros from mllibMatrix ... This is important !!
//
// JavaPairRDD<Tuple2<Object, Object>, Matrix> mllibBlocks = JavaPairRDD.fromJavaRDD(mllibMatrix.blocks().toJavaRDD());
// long rlen = mllibMatrix.numRows(); long clen = mllibMatrix.numCols();
// int brlen = mllibMatrix.numRowBlocks();
// int bclen = mllibMatrix.numColBlocks();
// if(mllibMatrix.numRowBlocks() != DMLTranslator.DMLBlockSize && mllibMatrix.numColBlocks() != DMLTranslator.DMLBlockSize) {
// System.err.println("WARNING: Since the block size of mllib matrix is not " + DMLTranslator.DMLBlockSize + ", it may cause "
// + "reblocks");
// }
//
// JavaPairRDD<MatrixIndexes, MatrixBlock> blocks = mllibBlocks
// .mapToPair(new ConvertMLLibBlocksToBinaryBlocks(rlen, clen, brlen, bclen));
//
// MatrixCharacteristics mc = new MatrixCharacteristics(rlen, clen, brlen, bclen, nnz);
// return MLMatrix.createMLMatrix(this, sqlContext, blocks, mc);
// }
}
| apache-2.0 |
jodzga/parseq | src/com/linkedin/parseq/CallableTask.java | 1848 | /*
* Copyright 2012 LinkedIn, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.linkedin.parseq;
import com.linkedin.parseq.promise.Promise;
import com.linkedin.parseq.promise.Promises;
import java.util.concurrent.Callable;
/**
* A {@link Task} that will run a {@link Callable} and will set the task's value
* to the value returned from the callable.
* <p/>
* Use {@link Tasks#callable(String, java.util.concurrent.Callable)} to create
* instances of this class.
*
* @author Chris Pettitt (cpettitt@linkedin.com)
*/
/* package private */ class CallableTask<T> extends BaseTask<T>
{
private final ThrowableCallable<? extends T> _callable;
public CallableTask(final String name, final Callable<? extends T> callable)
{
this(name, adaptCallable(callable));
}
public CallableTask(final String name, final ThrowableCallable<? extends T> callable)
{
super(name);
_callable = callable;
}
@Override
protected Promise<? extends T> run(final Context context) throws Throwable
{
return Promises.value(_callable.call());
}
private static <T> ThrowableCallable<T> adaptCallable(final Callable<? extends T> callable)
{
return new ThrowableCallable<T>()
{
@Override
public T call() throws Throwable
{
return callable.call();
}
};
}
}
| apache-2.0 |
jexp/idea2 | java/idea-ui/src/com/intellij/openapi/roots/ui/configuration/DefaultModuleConfigurationEditorFactoryImpl.java | 2085 | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.roots.ui.configuration;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleConfigurationEditor;
import com.intellij.openapi.roots.ModifiableRootModel;
/**
* @author Eugene Zhuravlev
* Date: Oct 28, 2004
*/
public class DefaultModuleConfigurationEditorFactoryImpl extends DefaultModuleConfigurationEditorFactory {
public ModuleConfigurationEditor createModuleContentRootsEditor(ModuleConfigurationState state) {
final ModifiableRootModel rootModel = state.getRootModel();
final Module module = rootModel.getModule();
final String moduleName = module.getName();
return new ContentEntriesEditor(state.getProject(), moduleName, rootModel, state.getModulesProvider());
}
public ModuleConfigurationEditor createClasspathEditor(ModuleConfigurationState state) {
return new ClasspathEditor(state.getProject(), state.getRootModel(), state.getModulesProvider());
}
public ModuleConfigurationEditor createJavadocEditor(ModuleConfigurationState state) {
return new JavadocEditor(state.getProject(), state.getRootModel());
}
public ModuleConfigurationEditor createOutputEditor(ModuleConfigurationState state) {
return new OutputEditor(state.getProject(), state.getRootModel());
}
@Deprecated
public ModuleConfigurationEditor createCompilerOutputEditor(ModuleConfigurationState state) {
return new BuildElementsEditor(state.getProject(), state.getRootModel());
}
}
| apache-2.0 |
gawkermedia/googleads-java-lib | modules/adwords_appengine/src/main/java/com/google/api/ads/adwords/jaxws/v201509/cm/MediaBundle.java | 2738 |
package com.google.api.ads.adwords.jaxws.v201509.cm;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlType;
/**
*
* Represents a ZIP archive media the content of which contains HTML5 assets.
*
*
* <p>Java class for MediaBundle complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="MediaBundle">
* <complexContent>
* <extension base="{https://adwords.google.com/api/adwords/cm/v201509}Media">
* <sequence>
* <element name="data" type="{http://www.w3.org/2001/XMLSchema}base64Binary" minOccurs="0"/>
* <element name="mediaBundleUrl" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="entryPoint" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "MediaBundle", propOrder = {
"data",
"mediaBundleUrl",
"entryPoint"
})
public class MediaBundle
extends Media
{
protected byte[] data;
protected String mediaBundleUrl;
protected String entryPoint;
/**
* Gets the value of the data property.
*
* @return
* possible object is
* byte[]
*/
public byte[] getData() {
return data;
}
/**
* Sets the value of the data property.
*
* @param value
* allowed object is
* byte[]
*/
public void setData(byte[] value) {
this.data = value;
}
/**
* Gets the value of the mediaBundleUrl property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getMediaBundleUrl() {
return mediaBundleUrl;
}
/**
* Sets the value of the mediaBundleUrl property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setMediaBundleUrl(String value) {
this.mediaBundleUrl = value;
}
/**
* Gets the value of the entryPoint property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getEntryPoint() {
return entryPoint;
}
/**
* Sets the value of the entryPoint property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setEntryPoint(String value) {
this.entryPoint = value;
}
}
| apache-2.0 |
b2ihealthcare/snow-owl | snomed/com.b2international.snowowl.snomed.datastore/src/com/b2international/snowowl/snomed/core/store/SnomedAttributeValueReferenceSetMemberBuilder.java | 1449 | /*
* Copyright 2011-2018 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.b2international.snowowl.snomed.core.store;
import com.b2international.snowowl.core.domain.TransactionContext;
import com.b2international.snowowl.snomed.common.SnomedRf2Headers;
import com.b2international.snowowl.snomed.datastore.index.entry.SnomedRefSetMemberIndexEntry;
/**
* @since 4.5
*/
public final class SnomedAttributeValueReferenceSetMemberBuilder extends SnomedMemberBuilder<SnomedAttributeValueReferenceSetMemberBuilder> {
private String valueId;
public SnomedAttributeValueReferenceSetMemberBuilder withValueId(String valueId) {
this.valueId = valueId;
return getSelf();
}
@Override
public void init(SnomedRefSetMemberIndexEntry.Builder component, TransactionContext context) {
super.init(component, context);
component.field(SnomedRf2Headers.FIELD_VALUE_ID, valueId);
}
}
| apache-2.0 |
blusechen/venus | venus-backend/src/main/java/com/meidusa/venus/backend/interceptor/config/CacheOperation.java | 104 | package com.meidusa.venus.backend.interceptor.config;
public enum CacheOperation {
DELETE, GET;
}
| apache-2.0 |
xhoong/incubator-calcite | linq4j/src/test/java/org/apache/calcite/linq4j/MemoryEnumerableTest.java | 2628 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.linq4j;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.junit.Assert.assertThat;
/** Tests for {@link org.apache.calcite.linq4j.MemoryEnumerable} */
public class MemoryEnumerableTest {
@Test public void testHistoryAndFuture() {
final Enumerable<Integer> input =
Linq4j.asEnumerable(IntStream.range(0, 100)
.boxed().collect(Collectors.toList()));
final MemoryEnumerable<Integer> integers = new MemoryEnumerable<>(input, 5, 1);
final Enumerator<MemoryFactory.Memory<Integer>> enumerator = integers.enumerator();
final List<MemoryFactory.Memory<Integer>> results = new ArrayList<>();
while (enumerator.moveNext()) {
final MemoryFactory.Memory<Integer> current = enumerator.current();
results.add(current);
}
assertThat(results.size(), is(100));
// First entry
assertThat((int) results.get(0).get(), is(0));
assertThat((int) results.get(0).get(1), is(1));
assertThat(results.get(0).get(-2), nullValue());
// Last entry
assertThat((int) results.get(99).get(), is(99));
assertThat((int) results.get(99).get(-2), is(97));
assertThat(results.get(99).get(1), nullValue());
}
@Test public void testModularInteger() {
final ModularInteger modularInteger = new ModularInteger(4, 5);
assertThat(modularInteger.toString(), is("4 mod 5"));
final ModularInteger plus = modularInteger.plus(1);
assertThat(plus.toString(), is("0 mod 5"));
final ModularInteger minus = modularInteger.plus(-6);
assertThat(minus.toString(), is("3 mod 5"));
}
}
// End MemoryEnumerableTest.java
| apache-2.0 |
vybs/sqoop-on-spark | common/src/main/java/org/apache/sqoop/schema/type/Bit.java | 1473 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sqoop.schema.type;
import org.apache.sqoop.classification.InterfaceAudience;
import org.apache.sqoop.classification.InterfaceStability;
/**
* True/False value.
*
* JDBC Types: bit, boolean
*/
@InterfaceAudience.Public
@InterfaceStability.Unstable
public class Bit extends Column {
public Bit(String name) {
super(name);
}
public Bit(String name, Boolean nullable) {
super(name, nullable);
}
@Override
public ColumnType getType() {
return ColumnType.BIT;
}
@Override
public String toString() {
return new StringBuilder("Bit{")
.append(super.toString())
.append("}")
.toString();
}
}
| apache-2.0 |
rprobinson/MediPi | MediPiPatient/MediPi/src/main/java/org/medipi/PollDownloads.java | 8911 | /*
Copyright 2016 Richard Robinson @ NHS Digital <rrobinson@nhs.net>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.medipi;
import java.time.Instant;
import java.util.HashMap;
import java.util.List;
import java.util.UUID;
import javax.ws.rs.ProcessingException;
import javax.ws.rs.core.GenericType;
import javax.ws.rs.core.Response;
import org.medipi.logging.MediPiLogger;
import org.medipi.messaging.rest.RESTfulMessagingEngine;
import org.medipi.messaging.vpn.VPNServiceManager;
import org.medipi.model.DownloadableDO;
/**
* Class to poll the MediPi Concentrator and request any downloads for the user
* or device
*
* This class polls the concentrator receives the list of responses and calls
* the appropriate handler
*
* @author rick@robinsonhq.com
*/
public class PollDownloads
implements Runnable {
private static final String MEDIPITRANSMITRESOURCEPATH = "medipi.transmit.resourcepath";
private static final String MEDIPIDEVICECERTNAME = "medipi.device.cert.name";
private static final String MEDIPIPATIENTCERTNAME = "medipi.patient.cert.name";
private static final String MEDIPIDOWNLOADABLERESILIENCEATTEMPTS = "medipi.downloadable.resilienceattempts";
private String patientCertName;
private final String deviceCertName;
private final String resourcePath;
private final MediPi medipi;
private RESTfulMessagingEngine rme;
private int resilienceAttempts = 0;
private int remainingResilienceAttempts = 0;
/**
* Constructor for PollIncomingMessage class
*
* @param medipi
*/
public PollDownloads(MediPi medipi) throws Exception {
this.medipi = medipi;
resourcePath = medipi.getProperties().getProperty(MEDIPITRANSMITRESOURCEPATH);
if (resourcePath == null || resourcePath.trim().equals("")) {
MediPiLogger.getInstance().log(PollDownloads.class.getName() + ".error", "MediPi resource base path is not set");
medipi.makeFatalErrorMessage(resourcePath + " - MediPi resource base path is not set", null);
}
// Get the device Cert
deviceCertName = System.getProperty(MEDIPIDEVICECERTNAME);
if (deviceCertName == null || deviceCertName.trim().length() == 0) {
medipi.makeFatalErrorMessage("MediPi device cert not found", null);
}
String[] params = {"{deviceId}", "{patientId}"};
rme = new RESTfulMessagingEngine(resourcePath + "download", params);
String s = medipi.getProperties().getProperty(MEDIPIDOWNLOADABLERESILIENCEATTEMPTS);
if (s == null || s.trim().length() == 0) {
s = "0";
}
resilienceAttempts = Integer.parseInt(s);
remainingResilienceAttempts = resilienceAttempts;
}
@Override
public void run() {
System.out.println("PollDownloads run at: " + Instant.now());
UUID uuid = UUID.randomUUID();
VPNServiceManager vpnm = null;
try {
// get the patient cert - this is only available after the first login
// and therefore no downloads are attempted before the first login
patientCertName = System.getProperty(MEDIPIPATIENTCERTNAME);
if (!medipi.wifiSync.get()) {
System.out.println("WIFI not available - no polling");
// Do not try and download anything before wifi is available
} else if (patientCertName == null || patientCertName.trim().length() == 0) {
System.out.println("Patient Certificate Name not known");
// Do not try and download anything before the user password is input for the first time
} else {
vpnm = VPNServiceManager.getInstance();
if (vpnm.isEnabled()) {
vpnm.VPNConnection(VPNServiceManager.OPEN, uuid);
}
HashMap<String, Object> hs = new HashMap<>();
hs.put("deviceId", deviceCertName);
hs.put("patientId", patientCertName);
Response listResponse = rme.executeGet(hs);
//
if (listResponse != null) {
System.out.println("Poll Download returned status = " + listResponse.getStatus());
//POSITIVE RESPONSE
if (listResponse.getStatus() == Response.Status.OK.getStatusCode()) {
List<DownloadableDO> ld = listResponse.readEntity(new GenericType<List<DownloadableDO>>() {
});
for (DownloadableDO d : ld) {
MediPiLogger.getInstance().log(PollDownloads.class.getName() + ".info", "New Downloadable List detected - Downloadable UUID: " + d.getDownloadableUuid());
try {
medipi.getDownloadableHandlerManager().handle(d);
} catch (Exception e) {
MediPiMessageBox.getInstance().makeErrorMessage("Error in attempting to download an incoming message/update ", e);
}
}
// Remember that list may be empty - therefore no action
} else {
//ERROR RESPONSE
String err = listResponse.readEntity(String.class);
switch (listResponse.getStatus()) {
// NOT FOUND
case 404:
// This is returned when the hardware name and patientId do not match
// ***************** DO SOMETHING WITH 404 *******************
// UPDATE REQUIRED
case 426:
// ***************** DO SOMETHING WITH 426 *******************
// INTERNAL SERVER ERROR
case 500:
default:
// ***************** DO SOMETHING WITH EVERY OTHER STATUS CODE *******************
System.out.println(err);
}
MediPiLogger.getInstance().log(PollDownloads.class.getName() + ".error", "Error code: " + listResponse.getStatus() + " detected when trying to return downloadable list");
}
}
}
} catch (ProcessingException pe) {
if (remainingResilienceAttempts == 0) {
MediPiLogger.getInstance().log(PollDownloads.class.getName() + ".error", "Attempt(s) to retreive incoming messages have failed - MediPi Concentrator is not available - please try again later. " + pe.getLocalizedMessage());
MediPiMessageBox.getInstance().makeErrorMessage("Attempt(s) to retreive incoming messages from the MediPi Server have failed - This may be an issue with your connection to the Internet or the MediPi Server could be down.\nIf this message appears persistently please check your connection and/or try again later.", null);
} else {
remainingResilienceAttempts--;
System.out.println("remaining attempts at polling the concentrator:"+remainingResilienceAttempts);
return;
}
} catch (Exception e) {
MediPiLogger.getInstance().log(PollDownloads.class.getName() + ".error", "Error detected when attempting to poll the Concentrator: " + e.getLocalizedMessage());
MediPiMessageBox.getInstance().makeErrorMessage("Error detected when attempting to poll the Concentrator: " + e.getLocalizedMessage(), e);
} finally {
System.out.println("pollFinally1");
if (vpnm != null && vpnm.isEnabled()) {
try {
System.out.println("pollFinally2");
vpnm.VPNConnection(VPNServiceManager.CLOSE, uuid);
System.out.println("pollFinally3");
} catch (Exception ex) {
MediPiLogger.getInstance().log(PollDownloads.class.getName(), ex);
}
}
}
remainingResilienceAttempts = resilienceAttempts;
}
}
| apache-2.0 |
travisolbrich/315-P2-Reversi | Reversi/src/reversi/server/ReversiServerResponse.java | 2394 | package reversi.server;
import java.io.IOException;
import java.io.PrintWriter;
import base.models.IOModel;
import reversi.models.game.ReversiInput;
/**
* Class that wraps up the server responses.
* @author dereekb
*
*/
public class ReversiServerResponse
{
public enum ServerResponseType
{
ServerResponseTypeWelcome("WELCOME"),
ServerResponseTypeOk("OK"),
ServerResponseTypeMove("%s"),
ServerResponseTypeIllegal("ILLEGAL"),
ServerResponseTypeComment(";%s");
private final String format;
ServerResponseType(String format)
{
this.format = format;
}
public String getFormat() {
return format;
}
}
private final ServerResponseType type;
private final IOModel client;
private final String comments;
public ReversiServerResponse(IOModel client, ServerResponseType type)
{
this.client = client;
this.type = type;
this.comments = null;
}
public ReversiServerResponse(IOModel client, ServerResponseType type, String comments)
{
this.client = client;
this.type = type;
this.comments = comments;
}
public void send() throws IOException
{
PrintWriter writer = this.client.getWriter();
String message = String.format(this.type.format, this.comments);
writer.println(message);
}
public static void sendWelcome(IOModel client) throws IOException
{
ReversiServerResponse response = new ReversiServerResponse(client, ServerResponseType.ServerResponseTypeWelcome);
response.send();
}
public static void sendOk(IOModel client) throws IOException
{
ReversiServerResponse response = new ReversiServerResponse(client, ServerResponseType.ServerResponseTypeOk);
response.send();
}
public static void sendMove(IOModel client, ReversiInput input) throws IOException
{
String move = input.toString();
ReversiServerResponse response = new ReversiServerResponse(client, ServerResponseType.ServerResponseTypeMove, move);
response.send();
}
public static void sendIllegal(IOModel client) throws IOException
{
ReversiServerResponse response = new ReversiServerResponse(client, ServerResponseType.ServerResponseTypeIllegal);
response.send();
}
public static void sendComment(IOModel client, String comment) throws IOException
{
ReversiServerResponse response = new ReversiServerResponse(client, ServerResponseType.ServerResponseTypeComment, comment);
response.send();
}
}
| apache-2.0 |
jentfoo/aws-sdk-java | aws-java-sdk-cognitoidp/src/main/java/com/amazonaws/services/cognitoidp/model/AdminDisableUserResult.java | 2458 | /*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.cognitoidp.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* Represents the response received from the server to disable the user as an administrator.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/AdminDisableUser" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AdminDisableUserResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof AdminDisableUserResult == false)
return false;
AdminDisableUserResult other = (AdminDisableUserResult) obj;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
return hashCode;
}
@Override
public AdminDisableUserResult clone() {
try {
return (AdminDisableUserResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| apache-2.0 |
drz0910/hackerrank | src/Algorithms/Strings/Gemstones/SolutionTest.java | 1691 | package Algorithms.Strings.Gemstones;
import static org.junit.Assert.assertEquals;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
/**
* @className SolutionTest
* @description Unit test
* @author dair
* @date Jun 14, 2016
*
*/
public class SolutionTest {
private final String path = this.getClass().getResource(".").getPath().toString().replaceFirst("/", "")
.replaceAll("\\w+/classes", "resource");
private final ByteArrayOutputStream outContent = new ByteArrayOutputStream();
@Before
public void setUpStreams() {
System.setOut(new PrintStream(outContent));
}
@After
public void cleanUpStreams() {
System.setOut(null);
}
@Test
public void testMain() throws IOException {
int index = 1;
Path inputPath = Paths.get(path, "input.txt");
Path outputPath = Paths.get(path, "output.txt");
// for multiple test case
do {
System.setIn(Files.newInputStream(inputPath));
Solution.main(null);
assertEquals(new String(Files.readAllBytes(outputPath)), outContent.toString().replaceAll("\\s+$", "").replaceAll("\\s+\\n", "\r\n"));
outContent.reset();
index++;
} while (Files.exists(inputPath = Paths.get(path, "input" + index + ".txt"))
&& Files.exists(outputPath = Paths.get(path, "output" + index + ".txt")));
}
}
| apache-2.0 |
aricooperman/jLean | src/main/java/com/quantconnect/lean/data/market/Dividend.java | 4401 | /*
* QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
* Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.quantconnect.lean.data.market;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.time.LocalDate;
import java.time.LocalDateTime;
import com.quantconnect.lean.MarketDataType;
import com.quantconnect.lean.Symbol;
import com.quantconnect.lean.data.BaseData;
import com.quantconnect.lean.data.SubscriptionDataConfig;
import com.quantconnect.lean.data.SubscriptionDataSource;
/**
* Dividend event from a security
*/
public class Dividend extends BaseData {
private BigDecimal distribution;
/**
* Initializes a new instance of the Dividend class
*/
public Dividend() {
setDataType( MarketDataType.Auxiliary );
}
/**
* Initializes a new instance of the Dividend class
* @param symbol The symbol
* @param date The date
* @param close The close
* @param priceFactorRatio The ratio of the price factors, pf_i/pf_i+1
*/
public Dividend( Symbol symbol, LocalDateTime date, BigDecimal close, BigDecimal priceFactorRatio ) {
this();
setSymbol( symbol );
setTime( date );
this.distribution = close.subtract( (close.multiply( priceFactorRatio )) );
}
/**
* Initializes a new instance of the Dividend class
* @param symbol The symbol
* @param date The date
* @param distribution The dividend amount
*/
public Dividend( Symbol symbol, LocalDateTime date, BigDecimal distribution ) {
this();
setSymbol( symbol );
setTime( date );
this.distribution = distribution;
}
/**
* Gets the dividend payment
*/
public BigDecimal getDistribution() {
return distribution;
}
public void setDistribution( BigDecimal value ) {
distribution = value.setScale( 2, RoundingMode.HALF_UP );
}
/**
* Reader converts each line of the data source into BaseData objects. Each data type creates its own factory method, and returns a new instance of the object
* each time it is called.
* @param config Subscription data config setup object
* @param line Line of the source document
* @param date Date of the requested data
* @param isLiveMode true if we're in live mode, false for backtesting mode
* @returns Instance of the T:BaseData object generated by this line of the CSV
*/
@Override
public BaseData reader( SubscriptionDataConfig config, String line, LocalDate date, boolean isLiveMode ) {
// this is implemented in the SubscriptionDataReader.CheckForDividend
throw new UnsupportedOperationException( "This method is not supposed to be called on the Dividend type." );
}
/**
* Return the URL String source of the file. This will be converted to a stream
* @param config Configuration object
* @param date Date of this source file
* @param isLiveMode true if we're in live mode, false for backtesting mode
* @returns String URL of source file.
*/
@Override
public SubscriptionDataSource getSource( SubscriptionDataConfig config, LocalDate date, boolean isLiveMode ) {
// this data is derived from map files and factor files in backtesting
throw new UnsupportedOperationException( "This method is not supposed to be called on the Dividend type." );
}
/**
* Return a new instance clone of this object, used in fill forward
*
* This base implementation uses reflection to copy all public fields and properties
*
* @returns A clone of the current object
*/
@Override
public BaseData clone() {
return new Dividend( getSymbol(), getTime(), distribution );
}
}
| apache-2.0 |
lockerfish/mysunshine | app/src/main/java/com/lockerfish/sunshine/DetailFragment.java | 10553 | package com.lockerfish.sunshine;
import android.content.Intent;
import android.database.Cursor;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.LoaderManager.LoaderCallbacks;
import android.support.v4.content.CursorLoader;
import android.support.v4.content.Loader;
import android.support.v4.view.MenuItemCompat;
import android.support.v7.widget.ShareActionProvider;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MenuInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import android.widget.ImageView;
import android.util.Log;
import android.net.Uri;
import com.lockerfish.sunshine.data.WeatherContract.WeatherEntry;
import com.lockerfish.sunshine.data.WeatherContract.LocationEntry;
/**
* A placeholder fragment containing a simple view.
*/
public class DetailFragment extends Fragment implements LoaderCallbacks<Cursor> {
private final String TAG = getClass().getSimpleName();
private final boolean D = Log.isLoggable(TAG, Log.DEBUG);
public static final String DETAIL_URI = "URI";
private static final String FORECAST_SHARE_HASHTAG = " #SunshineApp";
private ShareActionProvider mShareActionProvider;
private String mForecastStr;
private Uri mUri;
private static final int DETAIL_LOADER = 0;
private static final String[] FORECAST_COLUMNS = {
WeatherEntry.TABLE_NAME + "." + WeatherEntry._ID,
WeatherEntry.COLUMN_DATE,
WeatherEntry.COLUMN_SHORT_DESC,
WeatherEntry.COLUMN_MAX_TEMP,
WeatherEntry.COLUMN_MIN_TEMP,
WeatherEntry.COLUMN_HUMIDITY,
WeatherEntry.COLUMN_PRESSURE,
WeatherEntry.COLUMN_WIND_SPEED,
WeatherEntry.COLUMN_DEGREES,
WeatherEntry.COLUMN_WEATHER_ID,
// This works because the WeatherProvider returns location data joined with
// weather data, even though they're stored in two different tables.
LocationEntry.COLUMN_LOCATION_SETTING
};
// these constants correspond to the projection defined above, and must change if the
// projection changes
public static final int COL_WEATHER_ID = 0;
public static final int COL_WEATHER_DATE = 1;
public static final int COL_WEATHER_DESC = 2;
public static final int COL_WEATHER_MAX_TEMP = 3;
public static final int COL_WEATHER_MIN_TEMP = 4;
public static final int COL_WEATHER_HUMIDITY = 5;
public static final int COL_WEATHER_PRESSURE = 6;
public static final int COL_WEATHER_WIND_SPEED = 7;
public static final int COL_WEATHER_DEGREES = 8;
public static final int COL_WEATHER_CONDITION_ID = 9;
public DetailFragment() {
if (D) { Log.v(TAG, "DetailFragment"); }
setHasOptionsMenu(true);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
if (D) { Log.v(TAG, "onCreateView: inflater" + inflater
+ " container: " + container
+ " savedInstanceState: " + savedInstanceState);
}
Bundle arguments = getArguments();
if (D) { Log.v(TAG, "arguments: " + arguments); }
if (arguments != null) {
mUri = arguments.getParcelable(DetailFragment.DETAIL_URI);
}
if (D) { Log.v(TAG, "mUri: " + mUri); }
return inflater.inflate(R.layout.fragment_detail, container, false);
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
if (D) { Log.v(TAG, "onCreateOptionsMenu: menu: " + menu + " inflater: " + inflater); }
// Inflate the menu; this adds items to the action bar if it is present.
inflater.inflate(R.menu.detail_fragment, menu);
// Locate MenuItem with ShareActionProvider
MenuItem item = menu.findItem(R.id.action_share);
// Fetch and store ShareActionProvider
ShareActionProvider mShareActionProvider = (ShareActionProvider) MenuItemCompat.getActionProvider(item);
if (mForecastStr != null) {
mShareActionProvider.setShareIntent(createShareForecastIntent());
} else {
if (D) { Log.d(TAG, "Share Action Provider is null?"); }
}
}
private Intent createShareForecastIntent() {
if (D) { Log.v(TAG, "createShareForecastIntent");}
Intent shareIntent = new Intent(Intent.ACTION_SEND);
shareIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
shareIntent.setType("text/plain");
shareIntent.putExtra(Intent.EXTRA_TEXT, mForecastStr + FORECAST_SHARE_HASHTAG);
return shareIntent;
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
if (D) { Log.v(TAG, "onActivityCreated: savedInstanceState: " + savedInstanceState);}
if (mUri == null) {
String location = Utility.getPreferredLocation(getActivity());
// Bundle bundle = new Bundle();
Uri contentUri = WeatherEntry.buildWeatherLocationWithDate(
location, System.currentTimeMillis());
if (D) { Log.v(TAG, "contentUri: " + contentUri); }
// bundle.putParcelable(DetailFragment.DETAIL_URI, contentUri);
mUri = contentUri;
}
getLoaderManager().initLoader(DETAIL_LOADER, savedInstanceState, this);
super.onActivityCreated(savedInstanceState);
}
@Override
public Loader<Cursor> onCreateLoader(int id, Bundle args) {
if (D) { Log.v(TAG, "onCreateLoader: id: " + id + " args: " + args);}
// if (args != null) {
// mUri = args.getParcelable(DetailFragment.DETAIL_URI);
// }
if ( null != mUri) {
// Now create and return a CursorLoader that will take care of
// creating a Cursor for the data being displayed.
return new CursorLoader(
getActivity(),
mUri,
FORECAST_COLUMNS,
null,
null,
null
);
}
if (D) { Log.v(TAG, "loader NOT created it is NULL"); }
return null;
}
@Override
public void onLoadFinished(Loader<Cursor> loader, Cursor data) {
if (D) { Log.v(TAG, "onLoadFinished: loader: " + loader + " data: " + data);}
if (!data.moveToFirst()) { return; }
int weatherId = data.getInt(data.getColumnIndex(WeatherEntry.COLUMN_WEATHER_ID));
long date = data.getLong(COL_WEATHER_DATE);
boolean isMetric = Utility.isMetric(getActivity());
String dateText = Utility.getFormattedMonthDay(getActivity(), date);
String dayName = Utility.getDayName(getActivity(), data.getLong(COL_WEATHER_DATE));
TextView dayNameView = (TextView) getView().findViewById(R.id.detail_day_textview);
dayNameView.setText(dayName);
String monthday = Utility.getFormattedMonthDay(getActivity(), data.getLong(COL_WEATHER_DATE));
TextView monthdayView = (TextView) getView().findViewById(R.id.detail_date_textview);
monthdayView.setText(monthday);
String high = Utility.formatTemperature(getActivity(),
data.getDouble(COL_WEATHER_MAX_TEMP), isMetric);
TextView highView = (TextView) getView().findViewById(R.id.detail_high_textview);
highView.setText(high);
String low = Utility.formatTemperature(getActivity(),
data.getDouble(COL_WEATHER_MIN_TEMP), isMetric);
TextView lowView = (TextView) getView().findViewById(R.id.detail_low_textview);
lowView.setText(low);
ImageView iconView = (ImageView) getView().findViewById(R.id.detail_icon);
iconView.setImageResource(Utility.getArtResourceForWeatherCondition(weatherId));
String condition = data.getString(COL_WEATHER_DESC);
TextView conditionView = (TextView) getView().findViewById(R.id.detail_forecast_textview);
conditionView.setText(condition);
// Read humidity from cursor and update view
float humidity = data.getFloat(COL_WEATHER_HUMIDITY);
TextView humidityView = (TextView) getView().findViewById(R.id.detail_humidity_textview);
humidityView.setText(getActivity().getString(R.string.format_humidity, humidity));
// Read wind speed and direction from cursor and update view
float windSpeedStr = data.getFloat(COL_WEATHER_WIND_SPEED);
float windDirStr = data.getFloat(COL_WEATHER_DEGREES);
TextView windView = (TextView) getView().findViewById(R.id.detail_wind_textview);
windView.setText(Utility.getFormattedWind(getActivity(), windSpeedStr, windDirStr));
// Read pressure from cursor and update view
float pressure = data.getFloat(COL_WEATHER_PRESSURE);
TextView pressureView = (TextView) getView().findViewById(R.id.detail_pressure_textview);
pressureView.setText(getActivity().getString(R.string.format_pressure, pressure));
TurbineView turbine = (TurbineView) getView().findViewById(R.id.turbine);
turbine.setSpeed(data.getFloat(data.getColumnIndex(WeatherEntry.COLUMN_WIND_SPEED)));
WindDirectionView windDirection = (WindDirectionView) getView().findViewById(R.id.wind_direction);
windDirection.setDegrees(data.getFloat(data.getColumnIndex(WeatherEntry.COLUMN_DEGREES)));
// We still need this for the share intent
mForecastStr = String.format("%s - %s - %s/%s", dateText, condition, high, low);
// If onCreateOptionsMenu has already happened, we need to update the share intent now.
if (mShareActionProvider != null) {
mShareActionProvider.setShareIntent(createShareForecastIntent());
}
}
@Override
public void onLoaderReset(Loader<Cursor> loader) {
}
public void onLocationChanged( String newLocation ) {
if (D) { Log.v(TAG, "onLocationChanged: newLocation: " + newLocation);}
// replace the uri, since the location has changed
Uri uri = mUri;
if (null != uri) {
long date = WeatherEntry.getDateFromUri(uri);
Uri updatedUri = WeatherEntry.buildWeatherLocationWithDate(newLocation, date);
mUri = updatedUri;
if (D) { Log.v(TAG, "mUri: " + mUri);}
getLoaderManager().restartLoader(DETAIL_LOADER, null, this);
}
}
} | apache-2.0 |
dianfengxiawu/fixed-assets | safe4j-web/src/main/java/com/saicmotor/cms/dao/LinkDao.java | 742 | /**
* Copyright (c) 2007-2016 SAIC. All Rights Reserved.
* This software is published under the terms of the SAIC IS Dept.
*
* @Project: safe4j-web
* @Title: LinkDao.java
* @Package com.saicmotor.cms.dao
* @Description:
*
* @CreateDate : 2016年4月18日
* @CreateBy : kojrf
*/
package com.saicmotor.cms.dao;
import java.util.List;
import com.saicmotor.cms.entity.Link;
import com.saicmotor.framework.persistence.CrudDao;
import com.saicmotor.framework.persistence.MyBatisDao;
/**
* @ClassName: LinkDao
* @Description:
* @author kojrf
* @date 2016年4月18日 下午1:22:27
*/
@MyBatisDao
public interface LinkDao extends CrudDao<Link> {
List<Link> findByIdIn(String[] ids);
int updateExpiredWeight(Link link);
}
| apache-2.0 |
ThomasYangLin/DbToJson | src/test/java/com/yanglin/test/ExportTaiheJson.java | 989 | package com.yanglin.test;
import org.junit.Before;
import org.junit.Test;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import com.yanglin.service.UserServiceI;
/**
* @desc 类功能描述:
* @author devuser
* @createTime 2016年8月15日 上午11:32:36
*
* @version V2.0
*/
public class ExportTaiheJson {
private ApplicationContext ac = null;
private UserServiceI userServiceI;
@Before
public void before(){
//使用"spring-context.xml"和"spring-mybatis.xml"这两个配置文件创建Spring上下文
ac = new ClassPathXmlApplicationContext(new String[]{"spring-context.xml","spring-mybatis.xml"});
//从Spring容器中根据bean的id取出我们要使用的userService对象
userServiceI = (UserServiceI) ac.getBean("userService");
}
@Test
public void exportYingDa(){
}
}
| apache-2.0 |
ashigeru/asakusafw-compiler | bridge-project/runtime-hadoop/src/main/java/com/asakusafw/bridge/hadoop/directio/package-info.java | 703 | /**
* Copyright 2011-2019 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Classes for Direct I/O in Hadoop.
*/
package com.asakusafw.bridge.hadoop.directio; | apache-2.0 |
yutta13/java_36 | mantis-test/src/test/java/ru/stqa/pft/github/mantis/appmanager/FtpHelper.java | 1077 | package ru.stqa.pft.github.mantis.appmanager;
import org.apache.commons.net.ftp.FTPClient;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
/**
* Created by uttabondarenko on 16.02.17.
*/
public class FtpHelper {
private final ApplicationManager app;
private FTPClient ftp;
public FtpHelper(ApplicationManager app) {
this.app = app;
ftp = new FTPClient();
}
public void upload(File file, String target, String backup) throws IOException {
ftp.connect(app.getProperty("ftp.host"));
ftp.login(app.getProperty("ftp.login"), app.getProperty("ftp.password"));
ftp.deleteFile(backup);
ftp.rename(target, backup);
ftp.enterLocalPassiveMode();
ftp.storeFile(target, new FileInputStream(file));
ftp.disconnect();
}
public void restore(String target, String backup) throws IOException {ftp.connect(app.getProperty("ftp.host"));
ftp.login(app.getProperty("ftp.login"), app.getProperty("ftp.password"));
ftp.deleteFile(target);
ftp.rename(backup, target);
ftp.disconnect();
}
}
| apache-2.0 |
skarpushin/summerb | summerb-easycrud/src/main/java/org/summerb/easycrud/impl/dataset/DataSetUpdaterOnEntityChangedEventImpl.java | 2755 | /*******************************************************************************
* Copyright 2015-2021 Sergey Karpushin
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package org.summerb.easycrud.impl.dataset;
import org.springframework.beans.factory.annotation.Autowired;
import org.summerb.easycrud.api.EasyCrudService;
import org.summerb.easycrud.api.EasyCrudServiceResolver;
import org.summerb.easycrud.api.dataset.DataSetUpdaterOnEntityChangedEvent;
import org.summerb.easycrud.api.dto.EntityChangedEvent;
import org.summerb.easycrud.api.dto.EntityChangedEvent.ChangeType;
import org.summerb.easycrud.api.dto.HasId;
import org.summerb.easycrud.api.dto.datapackage.DataSet;
import org.summerb.easycrud.api.dto.datapackage.DataTable;
/**
* This impl will simply update dataSet tables with updated entities
*
* WARNING: It doesn't not update any back-refs. Only table rows and only if
* table is created before this operation. Former is actually hard to implement
* since DataSet is not carrying information regarding references and it's not
* clear how to avoid n+1 problems.
*
* @author sergeyk
*
*/
public class DataSetUpdaterOnEntityChangedEventImpl implements DataSetUpdaterOnEntityChangedEvent {
private EasyCrudServiceResolver easyCrudServiceResolver;
@SuppressWarnings({ "rawtypes", "unchecked" })
@Override
public void updateDataSet(DataSet dataSet, EntityChangedEvent<?> e) {
if (!e.isTypeOf(HasId.class)) {
return;
}
EasyCrudService service = easyCrudServiceResolver.resolveByDtoClass(e.getValue().getClass());
if (!dataSet.getTables().containsKey(service.getEntityTypeMessageCode())) {
return;
}
DataTable table = dataSet.getTables().get(service.getEntityTypeMessageCode());
HasId dto = (HasId) e.getValue();
if (e.getChangeType() == ChangeType.REMOVED) {
table.getRows().remove(dto.getId());
} else {
table.put(dto);
}
}
public EasyCrudServiceResolver getEasyCrudServiceResolver() {
return easyCrudServiceResolver;
}
@Autowired
public void setEasyCrudServiceResolver(EasyCrudServiceResolver easyCrudServiceResolver) {
this.easyCrudServiceResolver = easyCrudServiceResolver;
}
}
| apache-2.0 |
twitter-forks/presto | presto-hive-metastore/src/test/java/com/facebook/presto/hive/metastore/TestRecordingHiveMetastore.java | 12796 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.hive.metastore;
import com.facebook.presto.common.predicate.Domain;
import com.facebook.presto.common.type.Type;
import com.facebook.presto.hive.HiveBasicStatistics;
import com.facebook.presto.hive.HiveBucketProperty;
import com.facebook.presto.hive.HiveType;
import com.facebook.presto.hive.MetastoreClientConfig;
import com.facebook.presto.hive.metastore.HivePrivilegeInfo.HivePrivilege;
import com.facebook.presto.hive.metastore.SortingColumn.Order;
import com.facebook.presto.spi.security.PrestoPrincipal;
import com.facebook.presto.spi.security.RoleGrant;
import com.facebook.presto.spi.statistics.ColumnStatisticType;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import io.airlift.units.Duration;
import org.testng.annotations.Test;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalLong;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import static com.facebook.presto.common.type.VarcharType.VARCHAR;
import static com.facebook.presto.common.type.VarcharType.createVarcharType;
import static com.facebook.presto.hive.BucketFunctionType.HIVE_COMPATIBLE;
import static com.facebook.presto.hive.HiveBasicStatistics.createEmptyStatistics;
import static com.facebook.presto.hive.metastore.MetastoreUtil.convertPredicateToParts;
import static com.facebook.presto.hive.metastore.PrestoTableType.OTHER;
import static com.facebook.presto.spi.security.PrincipalType.USER;
import static com.facebook.presto.spi.statistics.ColumnStatisticType.MAX_VALUE;
import static com.facebook.presto.spi.statistics.ColumnStatisticType.MIN_VALUE;
import static io.airlift.slice.Slices.utf8Slice;
import static org.testng.Assert.assertEquals;
public class TestRecordingHiveMetastore
{
private static final Database DATABASE = new Database(
"database",
Optional.of("location"),
"owner",
USER,
Optional.of("comment"),
ImmutableMap.of("param", "value"));
private static final Column TABLE_COLUMN = new Column(
"column",
HiveType.HIVE_INT,
Optional.of("comment"));
private static final Storage TABLE_STORAGE = new Storage(
StorageFormat.create("serde", "input", "output"),
"location",
Optional.of(new HiveBucketProperty(
ImmutableList.of("column"),
10,
ImmutableList.of(new SortingColumn("column", Order.ASCENDING)),
HIVE_COMPATIBLE,
Optional.empty())),
true,
ImmutableMap.of("param", "value2"),
ImmutableMap.of());
private static final Table TABLE = new Table(
"database",
"table",
"owner",
OTHER,
TABLE_STORAGE,
ImmutableList.of(TABLE_COLUMN),
ImmutableList.of(TABLE_COLUMN),
ImmutableMap.of("param", "value3"),
Optional.of("original_text"),
Optional.of("expanded_text"));
private static final Partition PARTITION = new Partition(
"database",
"table",
ImmutableList.of("value"),
TABLE_STORAGE,
ImmutableList.of(TABLE_COLUMN),
ImmutableMap.of("param", "value4"));
private static final PartitionStatistics PARTITION_STATISTICS = new PartitionStatistics(
new HiveBasicStatistics(10, 11, 10000, 10001),
ImmutableMap.of("column", new HiveColumnStatistics(
Optional.of(new IntegerStatistics(
OptionalLong.of(-100),
OptionalLong.of(102))),
Optional.empty(),
Optional.empty(),
Optional.empty(),
Optional.empty(),
OptionalLong.of(1234),
OptionalLong.of(1235),
OptionalLong.of(1),
OptionalLong.of(8))));
private static final HivePrivilegeInfo PRIVILEGE_INFO = new HivePrivilegeInfo(HivePrivilege.SELECT, true, new PrestoPrincipal(USER, "grantor"), new PrestoPrincipal(USER, "grantee"));
private static final RoleGrant ROLE_GRANT = new RoleGrant(new PrestoPrincipal(USER, "grantee"), "role", true);
@Test
public void testRecordingHiveMetastore()
throws IOException
{
MetastoreClientConfig recordingHiveClientConfig = new MetastoreClientConfig()
.setRecordingPath(File.createTempFile("recording_test", "json").getAbsolutePath())
.setRecordingDuration(new Duration(10, TimeUnit.MINUTES));
RecordingHiveMetastore recordingHiveMetastore = new RecordingHiveMetastore(new TestingHiveMetastore(), recordingHiveClientConfig);
validateMetadata(recordingHiveMetastore);
recordingHiveMetastore.dropDatabase("other_database");
recordingHiveMetastore.writeRecording();
MetastoreClientConfig replayingHiveClientConfig = recordingHiveClientConfig
.setReplay(true);
recordingHiveMetastore = new RecordingHiveMetastore(new UnimplementedHiveMetastore(), replayingHiveClientConfig);
recordingHiveMetastore.loadRecording();
validateMetadata(recordingHiveMetastore);
}
private void validateMetadata(ExtendedHiveMetastore hiveMetastore)
{
assertEquals(hiveMetastore.getDatabase("database"), Optional.of(DATABASE));
assertEquals(hiveMetastore.getAllDatabases(), ImmutableList.of("database"));
assertEquals(hiveMetastore.getTable("database", "table"), Optional.of(TABLE));
assertEquals(hiveMetastore.getSupportedColumnStatistics(createVarcharType(123)), ImmutableSet.of(MIN_VALUE, MAX_VALUE));
assertEquals(hiveMetastore.getTableStatistics("database", "table"), PARTITION_STATISTICS);
assertEquals(hiveMetastore.getPartitionStatistics("database", "table", ImmutableSet.of("value")), ImmutableMap.of("value", PARTITION_STATISTICS));
assertEquals(hiveMetastore.getAllTables("database"), Optional.of(ImmutableList.of("table")));
assertEquals(hiveMetastore.getAllViews("database"), Optional.empty());
assertEquals(hiveMetastore.getPartition("database", "table", ImmutableList.of("value")), Optional.of(PARTITION));
assertEquals(hiveMetastore.getPartitionNames("database", "table"), Optional.of(ImmutableList.of("value")));
Map<Column, Domain> map = new HashMap<>();
Column column = new Column("column", HiveType.HIVE_STRING, Optional.empty());
map.put(column, Domain.singleValue(VARCHAR, utf8Slice("value")));
assertEquals(hiveMetastore.getPartitionNamesByFilter("database", "table", map), ImmutableList.of("value"));
assertEquals(hiveMetastore.getPartitionsByNames("database", "table", ImmutableList.of("value")), ImmutableMap.of("value", Optional.of(PARTITION)));
assertEquals(hiveMetastore.listTablePrivileges("database", "table", new PrestoPrincipal(USER, "user")), ImmutableSet.of(PRIVILEGE_INFO));
assertEquals(hiveMetastore.listRoles(), ImmutableSet.of("role"));
assertEquals(hiveMetastore.listRoleGrants(new PrestoPrincipal(USER, "user")), ImmutableSet.of(ROLE_GRANT));
}
private static class TestingHiveMetastore
extends UnimplementedHiveMetastore
{
@Override
public Optional<Database> getDatabase(String databaseName)
{
if (databaseName.equals("database")) {
return Optional.of(DATABASE);
}
return Optional.empty();
}
@Override
public List<String> getAllDatabases()
{
return ImmutableList.of("database");
}
@Override
public Optional<Table> getTable(String databaseName, String tableName)
{
if (databaseName.equals("database") && tableName.equals("table")) {
return Optional.of(TABLE);
}
return Optional.empty();
}
@Override
public Set<ColumnStatisticType> getSupportedColumnStatistics(Type type)
{
if (type.equals(createVarcharType(123))) {
return ImmutableSet.of(MIN_VALUE, MAX_VALUE);
}
return ImmutableSet.of();
}
@Override
public PartitionStatistics getTableStatistics(String databaseName, String tableName)
{
if (databaseName.equals("database") && tableName.equals("table")) {
return PARTITION_STATISTICS;
}
return new PartitionStatistics(createEmptyStatistics(), ImmutableMap.of());
}
@Override
public Map<String, PartitionStatistics> getPartitionStatistics(String databaseName, String tableName, Set<String> partitionNames)
{
if (databaseName.equals("database") && tableName.equals("table") && partitionNames.contains("value")) {
return ImmutableMap.of("value", PARTITION_STATISTICS);
}
return ImmutableMap.of();
}
@Override
public Optional<List<String>> getAllTables(String databaseName)
{
if (databaseName.equals("database")) {
return Optional.of(ImmutableList.of("table"));
}
return Optional.empty();
}
@Override
public Optional<List<String>> getAllViews(String databaseName)
{
return Optional.empty();
}
@Override
public void dropDatabase(String databaseName)
{
// noop for test purpose
}
@Override
public Optional<Partition> getPartition(String databaseName, String tableName, List<String> partitionValues)
{
if (databaseName.equals("database") && tableName.equals("table") && partitionValues.equals(ImmutableList.of("value"))) {
return Optional.of(PARTITION);
}
return Optional.empty();
}
@Override
public Optional<List<String>> getPartitionNames(String databaseName, String tableName)
{
if (databaseName.equals("database") && tableName.equals("table")) {
return Optional.of(ImmutableList.of("value"));
}
return Optional.empty();
}
@Override
public List<String> getPartitionNamesByFilter(
String databaseName,
String tableName,
Map<Column, Domain> partitionPredicates)
{
List<String> parts = convertPredicateToParts(partitionPredicates);
if (databaseName.equals("database") && tableName.equals("table") && parts.equals(ImmutableList.of("value"))) {
return ImmutableList.of("value");
}
return ImmutableList.of();
}
@Override
public Map<String, Optional<Partition>> getPartitionsByNames(String databaseName, String tableName, List<String> partitionNames)
{
if (databaseName.equals("database") && tableName.equals("table") && partitionNames.contains("value")) {
return ImmutableMap.of("value", Optional.of(PARTITION));
}
return ImmutableMap.of();
}
@Override
public Set<HivePrivilegeInfo> listTablePrivileges(String database, String table, PrestoPrincipal prestoPrincipal)
{
if (database.equals("database") && table.equals("table") && prestoPrincipal.getType() == USER && prestoPrincipal.getName().equals("user")) {
return ImmutableSet.of(PRIVILEGE_INFO);
}
return ImmutableSet.of();
}
@Override
public Set<String> listRoles()
{
return ImmutableSet.of("role");
}
@Override
public Set<RoleGrant> listRoleGrants(PrestoPrincipal principal)
{
return ImmutableSet.of(ROLE_GRANT);
}
}
}
| apache-2.0 |
lburgazzoli/apache-activemq-artemis | tests/activemq5-unit-tests/src/test/java/org/apache/activemq/usecases/DurableSubsOfflineSelectorIndexUseTest.java | 7922 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.usecases;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import javax.jms.Connection;
import javax.jms.Message;
import javax.jms.MessageConsumer;
import javax.jms.MessageListener;
import javax.jms.MessageProducer;
import javax.jms.Session;
import junit.framework.Test;
import org.apache.activemq.ActiveMQConnectionFactory;
import org.apache.activemq.broker.BrokerFactory;
import org.apache.activemq.broker.BrokerService;
import org.apache.activemq.command.ActiveMQTopic;
import org.apache.activemq.store.PersistenceAdapter;
import org.apache.activemq.store.kahadb.KahaDBPersistenceAdapter;
import org.apache.activemq.store.kahadb.KahaDBStore;
import org.apache.activemq.util.Wait;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DurableSubsOfflineSelectorIndexUseTest extends org.apache.activemq.TestSupport {
private static final Logger LOG = LoggerFactory.getLogger(DurableSubsOfflineSelectorIndexUseTest.class);
public int messageCount = 400;
private BrokerService broker;
private ActiveMQTopic topic;
private List<Throwable> exceptions = new ArrayList<>();
@Override
protected ActiveMQConnectionFactory createConnectionFactory() throws Exception {
ActiveMQConnectionFactory connectionFactory = new ActiveMQConnectionFactory("vm://" + getName(true));
connectionFactory.setWatchTopicAdvisories(false);
return connectionFactory;
}
@Override
protected Connection createConnection() throws Exception {
return createConnection("id");
}
protected Connection createConnection(String name) throws Exception {
Connection con = super.createConnection();
con.setClientID(name);
con.start();
return con;
}
public static Test suite() {
return suite(DurableSubsOfflineSelectorIndexUseTest.class);
}
@Override
protected void setUp() throws Exception {
exceptions.clear();
topic = (ActiveMQTopic) createDestination();
createBroker();
super.setUp();
}
@Override
protected void tearDown() throws Exception {
super.tearDown();
destroyBroker();
}
private void createBroker() throws Exception {
createBroker(true);
}
private void createBroker(boolean deleteAllMessages) throws Exception {
broker = BrokerFactory.createBroker("broker:(vm://" + getName(true) + ")");
broker.setBrokerName(getName(true));
broker.setDeleteAllMessagesOnStartup(deleteAllMessages);
broker.getManagementContext().setCreateConnector(false);
broker.setAdvisorySupport(false);
broker.addConnector("tcp://0.0.0.0:0");
setDefaultPersistenceAdapter(broker);
broker.start();
}
private void destroyBroker() throws Exception {
if (broker != null)
broker.stop();
}
public void initCombosForTestIndexPageUsage() {
addCombinationValues("messageCount", new Integer[]{890, 900, 400});
}
public void testIndexPageUsage() throws Exception {
Connection con = createConnection();
Session session = con.createSession(false, Session.AUTO_ACKNOWLEDGE);
session.createDurableSubscriber(topic, "true", "filter = 'true'", true);
session.close();
session = con.createSession(false, Session.AUTO_ACKNOWLEDGE);
session.createDurableSubscriber(topic, "false", "filter = 'false'", true);
session.close();
con.close();
// send messages
final Connection sendCon = createConnection("send");
final Session sendSession = sendCon.createSession(false, Session.AUTO_ACKNOWLEDGE);
final MessageProducer producer = sendSession.createProducer(null);
Thread sendThread = new Thread() {
@Override
public void run() {
try {
for (int i = 0; i < messageCount; i++) {
boolean filter = i % 2 == 1;
Message message = sendSession.createMessage();
message.setStringProperty("filter", filter ? "true" : "false");
producer.send(topic, message);
if (i > 0 && i % 1000 == 0) {
LOG.info("Sent:" + i);
}
}
sendSession.close();
sendCon.close();
}
catch (Exception e) {
exceptions.add(e);
}
}
};
sendThread.start();
sendThread.join();
// settle with sent messages
TimeUnit.SECONDS.sleep(4);
// consume messages
con = createConnection();
session = con.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageConsumer consumerTrue = session.createDurableSubscriber(topic, "true", "filter = 'true'", true);
Listener listenerT = new Listener();
consumerTrue.setMessageListener(listenerT);
waitFor(listenerT, messageCount / 2);
MessageConsumer consumerFalse = session.createDurableSubscriber(topic, "false", "filter = 'false'", true);
Listener listenerF = new Listener();
consumerFalse.setMessageListener(listenerF);
waitFor(listenerF, messageCount / 2);
assertEquals(messageCount / 2, listenerT.count);
assertEquals(messageCount / 2, listenerF.count);
consumerTrue.close();
session.unsubscribe("true");
consumerFalse.close();
session.unsubscribe("false");
session.close();
con.close();
PersistenceAdapter persistenceAdapter = broker.getPersistenceAdapter();
if (persistenceAdapter instanceof KahaDBPersistenceAdapter) {
final KahaDBStore store = ((KahaDBPersistenceAdapter) persistenceAdapter).getStore();
LOG.info("Store page count: " + store.getPageFile().getPageCount());
LOG.info("Store free page count: " + store.getPageFile().getFreePageCount());
LOG.info("Store page in-use: " + (store.getPageFile().getPageCount() - store.getPageFile().getFreePageCount()));
assertTrue("no leak of pages, always use just 10", Wait.waitFor(new Wait.Condition() {
@Override
public boolean isSatisified() throws Exception {
return 10 == store.getPageFile().getPageCount() - store.getPageFile().getFreePageCount();
}
}, TimeUnit.SECONDS.toMillis(10)));
}
}
private void waitFor(final Listener listener, final int count) throws Exception {
assertTrue("got all messages on time", Wait.waitFor(new Wait.Condition() {
@Override
public boolean isSatisified() throws Exception {
return listener.count == count;
}
}, TimeUnit.MINUTES.toMillis(10)));
}
public static class Listener implements MessageListener {
int count = 0;
String id = null;
Listener() {
}
@Override
public void onMessage(Message message) {
count++;
if (id != null) {
try {
LOG.info(id + ", " + message.getJMSMessageID());
}
catch (Exception ignored) {
}
}
}
}
}
| apache-2.0 |
m-m-m/util | value/src/main/java/net/sf/mmm/util/value/api/StringValueConverter.java | 561 | /* Copyright (c) The m-m-m Team, Licensed under the Apache License, Version 2.0
* http://www.apache.org/licenses/LICENSE-2.0 */
package net.sf.mmm.util.value.api;
/**
* This is a sub-interface of {@link GenericValueConverter} for the most common value type {@link String}. It
* is typically used for dealing with values (e.g. when reading configurations).
*
* @see ComposedValueConverter
*
* @author Joerg Hohwiller (hohwille at users.sourceforge.net)
* @since 1.0.2
*/
public interface StringValueConverter extends GenericValueConverter<String> {
}
| apache-2.0 |
Donnerbart/hazelcast-simulator | simulator/src/test/java/com/hazelcast/simulator/worker/testcontainer/TimeStepRunStrategyIntegrationTest.java | 5183 | package com.hazelcast.simulator.worker.testcontainer;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.simulator.common.TestCase;
import com.hazelcast.simulator.common.TestPhase;
import com.hazelcast.simulator.protocol.connector.WorkerConnector;
import com.hazelcast.simulator.test.BaseThreadState;
import com.hazelcast.simulator.test.annotations.AfterRun;
import com.hazelcast.simulator.test.annotations.BeforeRun;
import com.hazelcast.simulator.test.annotations.TimeStep;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicLong;
import static com.hazelcast.simulator.TestEnvironmentUtils.setupFakeUserDir;
import static com.hazelcast.simulator.TestEnvironmentUtils.teardownFakeUserDir;
import static com.hazelcast.simulator.TestSupport.spawn;
import static com.hazelcast.simulator.common.TestPhase.RUN;
import static com.hazelcast.simulator.common.TestPhase.SETUP;
import static com.hazelcast.simulator.utils.CommonUtils.sleepSeconds;
import static junit.framework.TestCase.assertTrue;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
public class TimeStepRunStrategyIntegrationTest {
private static final String TEST_ID = "SomeId";
@Before
public void before() {
setupFakeUserDir();
}
@After
public void after() {
teardownFakeUserDir();
}
@Test
public void testWithAllRunPhases() throws Exception {
int threadCount = 2;
TestWithAllRunPhases testInstance = new TestWithAllRunPhases();
TestCase testCase = new TestCase(TEST_ID)
.setProperty("threadCount", threadCount)
.setProperty("class", testInstance.getClass());
TestContextImpl testContext = new TestContextImpl(
mock(HazelcastInstance.class), testCase.getId(), "localhost", mock(WorkerConnector.class));
final TestContainer container = new TestContainer(testContext, testInstance, testCase);
container.invoke(SETUP);
Future runFuture = spawn(new Callable() {
@Override
public Object call() throws Exception {
container.invoke(RUN);
return null;
}
});
sleepSeconds(5);
testContext.stop();
runFuture.get();
container.invoke(TestPhase.LOCAL_TEARDOWN);
System.out.println("done");
assertEquals(threadCount, testInstance.beforeRunCount.get());
assertEquals(threadCount, testInstance.afterRunCount.get());
System.out.println(testInstance.timeStepCount);
}
public static class TestWithAllRunPhases {
private final AtomicLong beforeRunCount = new AtomicLong();
private final AtomicLong afterRunCount = new AtomicLong();
private final AtomicLong timeStepCount = new AtomicLong();
@BeforeRun
public void beforeRun() {
beforeRunCount.incrementAndGet();
}
@TimeStep
public void timeStep() {
timeStepCount.incrementAndGet();
}
@AfterRun
public void afterRun() {
afterRunCount.incrementAndGet();
}
}
@Test
public void testWithThreadContext() throws Exception {
int threadCount = 2;
TestWithThreadState testInstance = new TestWithThreadState();
TestCase testCase = new TestCase("someid")
.setProperty("threadCount", threadCount)
.setProperty("class", testInstance.getClass());
TestContextImpl testContext = new TestContextImpl(
mock(HazelcastInstance.class), testCase.getId(), "localhost", mock(WorkerConnector.class));
final TestContainer container = new TestContainer(testContext, testInstance, testCase);
container.invoke(SETUP);
Future runFuture = spawn(new Callable() {
@Override
public Object call() throws Exception {
container.invoke(RUN);
return null;
}
});
Thread.sleep(5000);
testContext.stop();
runFuture.get();
container.invoke(TestPhase.LOCAL_TEARDOWN);
assertEquals(threadCount, testInstance.map.size());
// each context should be unique
Set<BaseThreadState> contexts = new HashSet<BaseThreadState>(testInstance.map.values());
assertEquals(threadCount, contexts.size());
}
public static class TestWithThreadState {
private final Map<Thread, BaseThreadState> map = new ConcurrentHashMap<Thread, BaseThreadState>();
@TimeStep
public void timeStep(BaseThreadState state) {
BaseThreadState found = map.get(Thread.currentThread());
if (found == null) {
map.put(Thread.currentThread(), state);
} else if (found != state) {
throw new RuntimeException("Unexpected context");
}
}
}
}
| apache-2.0 |
Sellegit/j2objc | runtime/src/main/java/apple/security/SSLConnectionType.java | 645 | package apple.security;
import java.io.*;
import java.nio.*;
import java.util.*;
import com.google.j2objc.annotations.*;
import com.google.j2objc.runtime.*;
import com.google.j2objc.runtime.block.*;
import apple.audiotoolbox.*;
import apple.corefoundation.*;
import apple.coregraphics.*;
import apple.coreservices.*;
import apple.dispatch.*;
@Library("Security/Security.h")
@Mapping("SSLConnectionType")
public final class SSLConnectionType extends ObjCEnum {
@GlobalConstant("kSSLStreamType")
public static final long StreamType = 0L;
@GlobalConstant("kSSLDatagramType")
public static final long DatagramType = 1L;
}
| apache-2.0 |
HuaJiTeam/ZhBus | app/src/main/java/huajiteam/zhuhaibus/GetWebContent.java | 508 | package huajiteam.zhuhaibus;
import java.io.IOException;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;
/**
* Created by KelaKim on 2016/5/15.
*/
public class GetWebContent {
OkHttpClient client = new OkHttpClient();
Response httpGet(String url) throws IOException {
Request request = new Request.Builder()
.url(url)
.build();
Response response = client.newCall(request).execute();
return response;
}
}
| apache-2.0 |
Tema/screenshot | src/com/screenshot/gui/ScreenshotApp.java | 3635 | /*
* Copyright (C) Tema
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.screenshot.gui;
import java.awt.Cursor;
import java.awt.Rectangle;
import java.awt.event.*;
import javax.swing.*;
import com.screenshot.ScreenshotListener;
import com.screenshot.Settings;
import com.screenshot.util.Messenger;
import com.screenshot.util.ScreenUtils;
public class ScreenshotApp {
private final JFrame frame;
ScreenshotPanel screenshotPanel;
private final Messenger messenger;
private boolean closed = false;
public ScreenshotApp(ScreenshotListener listener, Messenger messenger) {
this.messenger = messenger;
frame = new JFrame("Screenshot");
screenshotPanel = new ScreenshotPanel(listener, messenger);
frame.getContentPane().add(screenshotPanel);
frame.setUndecorated(true);
frame.getContentPane().setCursor(Cursor.getPredefinedCursor(Cursor.CROSSHAIR_CURSOR));
frame.addKeyListener(new KeyAdapter() {
@Override
public void keyPressed(KeyEvent e) {
if (e.getKeyCode() == KeyEvent.VK_ESCAPE) {
close("ESC button");
}
}
});
frame.addFocusListener(new FocusAdapter() {
public void focusLost(FocusEvent e) {
close("focus lost");
}
});
frame.addWindowListener(new WindowAdapter() {
public void windowIconified(WindowEvent e) {
close("inconified");
}
});
open();
}
private void open() {
Rectangle screen = ScreenUtils.getScreenBounds();
frame.setLocation(screen.getLocation());
frame.setSize(screen.width, screen.height);
frame.setVisible(true);
//If we don't dispose but simply hide window between snapshots
// then we need the code below to cope with iconified windows
//frame.setState(Frame.NORMAL);
//frame.setVisible(true);
//frame.setVisible(true);
}
public void close(String reason){
close(reason, false, true);
}
public void close(String reason, boolean synchGC, boolean exit) {
if (closed){
return;
} else {
closed = true;
}
messenger.debug("Window is closed: " + reason);
screenshotPanel.clear();
frame.dispose();
if (Settings.getInstance().isSystemTrayMode()) {
// need explicit GC for two reason
// first keep heap small and don't waste PC memory
// second avoid OOM in case of abnormal frequent user clicks
if (synchGC) {
GC.run();
} else {
SwingUtilities.invokeLater(GC);
}
} else if (exit) {
System.exit(0);
}
}
private final static Runnable GC = new Runnable() {
public void run() {
System.gc();
try {
Thread.sleep(100L);
} catch (InterruptedException e) {
//ignore
}
}
};
}
| apache-2.0 |
joewalnes/idea-community | plugins/svn4idea/src/org/jetbrains/idea/svn/update/UpdateEventHandler.java | 9296 | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.idea.svn.update;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.vcs.VcsBundle;
import com.intellij.openapi.vcs.update.FileGroup;
import com.intellij.openapi.vcs.update.UpdatedFiles;
import com.intellij.openapi.wm.StatusBar;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.idea.svn.SvnBundle;
import org.jetbrains.idea.svn.SvnFileUrlMapping;
import org.jetbrains.idea.svn.SvnVcs;
import org.tmatesoft.svn.core.SVNCancelException;
import org.tmatesoft.svn.core.SVNURL;
import org.tmatesoft.svn.core.internal.wc.SVNErrorManager;
import org.tmatesoft.svn.core.wc.ISVNEventHandler;
import org.tmatesoft.svn.core.wc.SVNEvent;
import org.tmatesoft.svn.core.wc.SVNEventAction;
import org.tmatesoft.svn.core.wc.SVNStatusType;
import org.tmatesoft.svn.util.SVNLogType;
import java.io.File;
/**
* @author lesya
*/
public class UpdateEventHandler implements ISVNEventHandler {
private ProgressIndicator myProgressIndicator;
private UpdatedFiles myUpdatedFiles;
private int myExternalsCount;
private final SvnVcs myVCS;
@Nullable private final SvnUpdateContext mySequentialUpdatesContext;
protected String myText;
protected String myText2;
public UpdateEventHandler(SvnVcs vcs, ProgressIndicator progressIndicator,
@Nullable final SvnUpdateContext sequentialUpdatesContext) {
myProgressIndicator = progressIndicator;
myVCS = vcs;
mySequentialUpdatesContext = sequentialUpdatesContext;
myExternalsCount = 1;
}
public void setUpdatedFiles(final UpdatedFiles updatedFiles) {
myUpdatedFiles = updatedFiles;
}
public void handleEvent(final SVNEvent event, double progress) {
if (event == null || event.getFile() == null) {
return;
}
String path = event.getFile().getAbsolutePath();
String displayPath = event.getFile().getName();
myText2 = null;
myText = null;
if (handleInDescendants(event)) {
updateProgressIndicator();
return;
}
if (event.getAction() == SVNEventAction.TREE_CONFLICT) {
myText2 = SvnBundle.message("progress.text2.treeconflicted", displayPath);
updateProgressIndicator();
myUpdatedFiles.registerGroup(createFileGroup(VcsBundle.message("update.group.name.merged.with.tree.conflicts"),
FileGroup.MERGED_WITH_TREE_CONFLICT));
addFileToGroup(FileGroup.MERGED_WITH_TREE_CONFLICT, event);
}
if (event.getAction() == SVNEventAction.UPDATE_ADD ||
event.getAction() == SVNEventAction.ADD) {
myText2 = SvnBundle.message("progress.text2.added", displayPath);
if (event.getContentsStatus() == SVNStatusType.CONFLICTED || event.getPropertiesStatus() == SVNStatusType.CONFLICTED) {
addFileToGroup(FileGroup.MERGED_WITH_CONFLICT_ID, event);
myText2 = SvnBundle.message("progress.text2.conflicted", displayPath);
} else if (myUpdatedFiles.getGroupById(FileGroup.REMOVED_FROM_REPOSITORY_ID).getFiles().contains(path)) {
myUpdatedFiles.getGroupById(FileGroup.REMOVED_FROM_REPOSITORY_ID).getFiles().remove(path);
if (myUpdatedFiles.getGroupById(AbstractSvnUpdateIntegrateEnvironment.REPLACED_ID) == null) {
myUpdatedFiles.registerGroup(createFileGroup(SvnBundle.message("status.group.name.replaced"),
AbstractSvnUpdateIntegrateEnvironment.REPLACED_ID));
}
addFileToGroup(AbstractSvnUpdateIntegrateEnvironment.REPLACED_ID, event);
} else {
addFileToGroup(FileGroup.CREATED_ID, event);
}
}
else if (event.getAction() == SVNEventAction.UPDATE_NONE) {
// skip it
return;
}
else if (event.getAction() == SVNEventAction.UPDATE_DELETE) {
myText2 = SvnBundle.message("progress.text2.deleted", displayPath);
addFileToGroup(FileGroup.REMOVED_FROM_REPOSITORY_ID, event);
}
else if (event.getAction() == SVNEventAction.UPDATE_UPDATE) {
if (event.getContentsStatus() == SVNStatusType.CONFLICTED || event.getPropertiesStatus() == SVNStatusType.CONFLICTED) {
if (event.getContentsStatus() == SVNStatusType.CONFLICTED) {
addFileToGroup(FileGroup.MERGED_WITH_CONFLICT_ID, event);
}
if (event.getPropertiesStatus() == SVNStatusType.CONFLICTED) {
addFileToGroup(FileGroup.MERGED_WITH_PROPERTY_CONFLICT_ID, event);
}
myText2 = SvnBundle.message("progress.text2.conflicted", displayPath);
}
else if (event.getContentsStatus() == SVNStatusType.MERGED || event.getPropertiesStatus() == SVNStatusType.MERGED) {
myText2 = SvnBundle.message("progres.text2.merged", displayPath);
addFileToGroup(FileGroup.MERGED_ID, event);
}
else if (event.getContentsStatus() == SVNStatusType.CHANGED || event.getPropertiesStatus() == SVNStatusType.CHANGED) {
myText2 = SvnBundle.message("progres.text2.updated", displayPath);
addFileToGroup(FileGroup.UPDATED_ID, event);
}
else if (event.getContentsStatus() == SVNStatusType.UNCHANGED &&
(event.getPropertiesStatus() == SVNStatusType.UNCHANGED || event.getPropertiesStatus() == SVNStatusType.UNKNOWN)) {
myText2 = SvnBundle.message("progres.text2.updated", displayPath);
}
else {
myText2 = "";
addFileToGroup(FileGroup.UNKNOWN_ID, event);
}
}
else if (event.getAction() == SVNEventAction.UPDATE_EXTERNAL) {
if (mySequentialUpdatesContext != null) {
mySequentialUpdatesContext.registerExternalRootBeingUpdated(event.getFile());
}
myExternalsCount++;
if (myUpdatedFiles.getGroupById(AbstractSvnUpdateIntegrateEnvironment.EXTERNAL_ID) == null) {
myUpdatedFiles.registerGroup(new FileGroup(SvnBundle.message("status.group.name.externals"),
SvnBundle.message("status.group.name.externals"),
false, AbstractSvnUpdateIntegrateEnvironment.EXTERNAL_ID, true));
}
addFileToGroup(AbstractSvnUpdateIntegrateEnvironment.EXTERNAL_ID, event);
myText = SvnBundle.message("progress.text.updating.external.location", event.getFile().getAbsolutePath());
}
else if (event.getAction() == SVNEventAction.RESTORE) {
myText2 = SvnBundle.message("progress.text2.restored.file", displayPath);
addFileToGroup(FileGroup.RESTORED_ID, event);
}
else if (event.getAction() == SVNEventAction.UPDATE_COMPLETED && event.getRevision() >= 0) {
myExternalsCount--;
myText2 = SvnBundle.message("progres.text2.updated.to.revision", event.getRevision());
if (myExternalsCount == 0) {
myExternalsCount = 1;
StatusBar.Info.set(SvnBundle.message("status.text.updated.to.revision", event.getRevision()), myVCS.getProject());
}
}
else if (event.getAction() == SVNEventAction.SKIP) {
myText2 = SvnBundle.message("progress.text2.skipped.file", displayPath);
addFileToGroup(FileGroup.SKIPPED_ID, event);
}
updateProgressIndicator();
}
private boolean itemSwitched(final SVNEvent event) {
final File file = event.getFile();
final SvnFileUrlMapping urlMapping = myVCS.getSvnFileUrlMapping();
final SVNURL currentUrl = urlMapping.getUrlForFile(file);
return (currentUrl != null) && (! currentUrl.equals(event.getURL()));
}
private void updateProgressIndicator() {
if (myProgressIndicator != null) {
if (myText != null) {
myProgressIndicator.setText(myText);
}
if (myText2 != null) {
myProgressIndicator.setText2(myText2);
}
}
}
protected boolean handleInDescendants(final SVNEvent event) {
return false;
}
protected void addFileToGroup(final String id, final SVNEvent event) {
final FileGroup fileGroup = myUpdatedFiles.getGroupById(id);
final String path = event.getFile().getAbsolutePath();
fileGroup.add(path, SvnVcs.getKey(), null);
if (event.getErrorMessage() != null) {
fileGroup.addError(path, event.getErrorMessage().getMessage());
}
}
public void checkCancelled() throws SVNCancelException {
if (myProgressIndicator != null) {
myProgressIndicator.checkCanceled();
if (myProgressIndicator.isCanceled()) {
SVNErrorManager.cancel(SvnBundle.message("exception.text.update.operation.cancelled"), SVNLogType.DEFAULT);
}
}
}
private static FileGroup createFileGroup(String name, String id) {
return new FileGroup(name, name, false, id, true);
}
public void setProgressIndicator(ProgressIndicator progressIndicator) {
myProgressIndicator = progressIndicator;
}
}
| apache-2.0 |
magott/spring-social-yammer | spring-social-yammer/src/main/java/org/springframework/social/yammer/api/SubscriptionOperations.java | 2029 | /*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.social.yammer.api;
import org.springframework.social.OperationNotPermittedException;
/**
* Sub-API for subscriptions (following/unfollowing).
* @author Morten Andersen-Gott
*
*/
public interface SubscriptionOperations {
/**
* Follow a user with the given id
* @param userId
* @throws OperationNotPermittedException if user does not exist
*/
void followUser(long userId);
/**
* Follow a topic with the given id
* @param topicId
* @throws OperationNotPermittedException if user does not exist
*/
void followTopic(long topicId);
/**
* Check whether you are following the given user
* @param userId
* @return <code>true</code> if you are following the user, otherwise <code>false</code>
*/
boolean isFollowingUser(long userId);
/**
* Check whether you are following the given topic
* @param topicId
* @return <code>true</code> if you are following the topic, otherwise <code>false</code>
*/
boolean isFollowingTopic(long topicId);
/**
* Check whether you are following the given thread
* @param threadId
* @return <code>true</code> if you are following the thread, otherwise <code>false</code>
*/
boolean isFollowingThread(long threadId);
/**
* Stop following a topic
* @param topicId
*/
void unfollowTopic(long topicId);
/**
* Stop following a user
* @param userId
*/
void unfollowUser(long userId);
}
| apache-2.0 |
IHTSDO/MLDS | src/main/java/ca/intelliware/ihtsdo/mlds/config/CacheConfiguration.java | 3416 | package ca.intelliware.ihtsdo.mlds.config;
import java.util.Set;
import java.util.SortedSet;
import javax.annotation.PreDestroy;
import javax.inject.Inject;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.metamodel.EntityType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.autoconfigure.AutoConfigureAfter;
import org.springframework.cache.CacheManager;
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.cache.ehcache.EhCacheCacheManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.env.Environment;
import org.springframework.util.Assert;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.ehcache.InstrumentedEhcache;
@Configuration
@EnableCaching
//@AutoConfigureAfter(value = {MetricsConfiguration.class, DatabaseConfiguration.class})
public class CacheConfiguration {
private final Logger log = LoggerFactory.getLogger(CacheConfiguration.class);
@PersistenceContext
private EntityManager entityManager;
@Inject
private Environment env;
@Inject
private MetricRegistry metricRegistry;
private static net.sf.ehcache.CacheManager cacheManager;
private static EhCacheCacheManager ehCacheManager;
@PreDestroy
public void destroy() {
log.info("Remove Cache Manager metrics");
SortedSet<String> names = metricRegistry.getNames();
for (String name : names) {
metricRegistry.remove(name);
}
//log.info("Closing Cache Manager");
//cacheManager.shutdown();
}
@Bean
public CacheManager cacheManager() {
if (cacheManager != null) {
log.debug("Skipping creation of EHcache manager - already exists");
} else {
log.debug("Starting Ehcache");
net.sf.ehcache.config.Configuration config = new net.sf.ehcache.config.Configuration();
net.sf.ehcache.config.CacheConfiguration cacheConfiguration = new net.sf.ehcache.config.CacheConfiguration().maxElementsInMemory(1600);
config.setDefaultCacheConfiguration(cacheConfiguration);
cacheManager = net.sf.ehcache.CacheManager.create(config);
log.debug("Registring Ehcache Metrics gauges");
Set<EntityType<?>> entities = entityManager.getMetamodel().getEntities();
for (EntityType<?> entity : entities) {
String name = entity.getName();
if (name == null || entity.getJavaType() != null) {
name = entity.getJavaType().getName();
}
Assert.notNull(name, "entity cannot exist without a identifier");
net.sf.ehcache.Cache cache = cacheManager.getCache(name);
if (cache != null) {
cache.getCacheConfiguration().setTimeToLiveSeconds(env.getProperty("cache.timeToLiveSeconds", Integer.class, 3600));
net.sf.ehcache.Ehcache decoratedCache = InstrumentedEhcache.instrument(metricRegistry, cache);
cacheManager.replaceCacheWithDecoratedCache(cache, decoratedCache);
}
}
ehCacheManager = new EhCacheCacheManager();
ehCacheManager.setCacheManager(cacheManager);
}
return ehCacheManager;
}
}
| apache-2.0 |
21592464/coolweather | app/src/main/java/com/coolweather/android/gson/Weather.java | 393 | package com.coolweather.android.gson;
import com.google.gson.annotations.SerializedName;
import java.util.List;
/**
* Created by peekaboo on 2017/5/27.
*/
public class Weather {
public String status;
public Basic basic;
public AQI aqi;
public Now now;
public Suggestion suggestion;
@SerializedName("daily_forecast")
public List<Forecast> forecastList;
}
| apache-2.0 |
inab/GOPHER | GOPHERPrepare/src/org/cnio/scombio/jmfernandez/GOPHER/CIFDict.java | 7461 | package org.cnio.scombio.jmfernandez.GOPHER;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.HashMap;
import java.util.HashSet;
import java.util.logging.Logger;
import java.util.Map;
import java.util.zip.GZIPInputStream;
/**
*
* @author jmfernandez
*
*/
public class CIFDict {
protected final static Logger LOG=Logger.getLogger(CIFDict.class.getName());
static {
LOG.setUseParentHandlers(false);
};
protected HashMap<String, Character> toOneAA;
protected HashSet<String> notAA;
public CIFDict(File cifdict)
throws IOException
{
toOneAA = new HashMap<String,Character>();
HashMap<String,String[]> toOneAADeriv = new HashMap<String,String[]>();
notAA = new HashSet<String>();
FileInputStream fis = null;
GZIPInputStream gis = null;
fis = new FileInputStream(cifdict);
try {
gis = new GZIPInputStream(fis);
} catch(IOException ioe) {
// IgnoreIT(R)
}
InputStreamReader isr = new InputStreamReader((gis!=null)?gis:fis);
BufferedReader br = new BufferedReader(isr);
boolean isPep = false;
String threeLet = null;
// boolean isAmb = false;
Character oneLet = null;
String[] parents = new String[]{};
try {
String line;
while((line = br.readLine())!=null) {
// '_chem_comp.type' => if($type eq 'ATOMP' || $type eq 'L-PEPTIDE LINKING');
// '_chem_comp.pdbx_type' => if($type eq 'ATOMP' || $type eq 'L-PEPTIDE LINKING');
if(line.startsWith("_chem_comp.type") || line.startsWith("_chem_comp.pdbx_type")) {
if(isPep && threeLet!=null) {
// Let's save it!
// if(isAmb) {
// LOG.warning("aminoacid "+threeLet+" is ambiguous (one letter "+oneLet+", parents "+parents.toString()+")");
// }
// toOneAA.put(threeLet, (oneLet!=null && oneLet.equals('?'))?((parents.length>0)?parents:PDBAmino.UnknownAmino):oneLet);
if(oneLet!=null && oneLet.equals('?')) {
if(parents.length>0) {
toOneAADeriv.put(threeLet, parents);
} else {
toOneAA.put(threeLet, PDBAmino.UnknownAmino);
}
} else {
toOneAA.put(threeLet,oneLet);
}
// LOG.fine("aminoacid "+threeLet+" is "+oneLet");
}
String[] tokens=line.split("[ \t]+",2);
String first = tokens[0];
String type = tokens[1].toUpperCase().replaceAll("[\"']+", "").replaceFirst("[ \t]+$","");
if("_chem_comp.type".equals(first)) {
isPep = ("ATOMP".equals(type) || "L-PEPTIDE LINKING".equals(type));
} else {
isPep = isPep || ("ATOMP".equals(type) || "L-PEPTIDE LINKING".equals(type));
}
// isAmb=false;
oneLet=null;
threeLet=null;
parents=new String[]{};
} else if(!isPep && line.startsWith("_chem_comp.three_letter_code")) {
String[] tokens=line.split("[ \t]+",2);
String elem = tokens[1].toUpperCase().replaceAll("[\"']+", "").replaceFirst("[ \t]+$","");
notAA.add(elem);
} else if(isPep) {
if(
line.startsWith("_chem_comp.pdbx_ambiguous_flag") ||
line.startsWith("_chem_comp.one_letter_code") ||
line.startsWith("_chem_comp.three_letter_code") ||
line.startsWith("_chem_comp.mon_nstd_parent_comp_id") ||
line.startsWith("_chem_comp.pdbx_replaced_by")
) {
String[] tokens=line.split("[ \t]+",2);
String first = tokens[0];
String elem = tokens[1].toUpperCase().replaceAll("[\"']+", "").replaceFirst("[ \t]+$","");
if("_chem_comp.pdbx_ambiguous_flag".equals(first)) {
// if(!"N".equals(elem))
// isAmb=true;
} else if("_chem_comp.one_letter_code".equals(first)) {
oneLet=elem.charAt(0);
} else if("_chem_comp.three_letter_code".equals(first)) {
threeLet=elem;
} else if("_chem_comp.mon_nstd_parent_comp_id".equals(first)) {
parents=elem.split("[ ,]+");
if("?".equals(parents[0])) {
parents=new String[] {};
}
} else if("_chem_comp.pdbx_replaced_by".equals(first)) {
if(! "?".equals(elem) && parents.length==0) {
parents=new String[] {elem};
}
}
}
}
}
} finally {
try {
br.close();
} catch(IOException ioe) {
// IgnoreIT(R)
}
try {
isr.close();
} catch(IOException ioe) {
// IgnoreIT(R)
}
try {
if(gis!=null)
gis.close();
} catch(IOException ioe) {
// IgnoreIT(R)
}
try {
if(fis!=null)
fis.close();
} catch(IOException ioe) {
// IgnoreIT(R)
}
}
if(isPep && threeLet!=null) {
// Let's save it!
// if(isAmb) {
// LOG.warning("aminoacid $threeLet is ambiguous (one letter "+oneLet+" parents "+parents.toString()+")");
// }
// toOneAA.put(threeLet, (oneLet!=null && oneLet.equals('?'))?((parents.length>0)?parents:PDBAmino.UnknownAmino):oneLet);
if(oneLet!=null && oneLet.equals('?')) {
if(parents.length>0) {
toOneAADeriv.put(threeLet, parents);
} else {
toOneAA.put(threeLet, PDBAmino.UnknownAmino);
}
} else {
toOneAA.put(threeLet,oneLet);
}
// LOG.fine("aminoacid "+threeLet+" is "+oneLet);
}
// Last, setting up the hashes!
for(Map.Entry<String,String[]> kv: toOneAADeriv.entrySet()) {
// Character one;
String[] tval = kv.getValue();
do {
String alt = (tval.length>0)?tval[0]:"UNK";
if(toOneAA.containsKey(alt)) {
toOneAA.put(kv.getKey(), toOneAA.get(alt));
break;
} else if(toOneAADeriv.containsKey(alt)) {
tval = toOneAADeriv.get(alt);
} else {
toOneAA.put(kv.getKey(), PDBAmino.UnknownAmino);
break;
}
} while(tval.getClass().isArray());
// LOG.fine(kv.getKey()+" interpreted as "+alt);
// one=(Character)tval;
// } else {
// one=(Character)val;
// LOG.fine(kv.getKey()+" is "+one);
}
}
/**
* Some PDBPre sequences contain surrounded in parentheses non-standard aminoacids
* in their three-code representation. So this function 'purifies' the sequence
* @param seq The sequence
* @return The purified sequence
*/
public StringBuilder purifySequence(final String seq) {
StringBuilder result=new StringBuilder();
for(int base=0;base<seq.length();) {
int leftPar=seq.indexOf("(",base);
if(leftPar!=-1) {
// Do we append?
if(base<leftPar)
result.append(seq.subSequence(base, leftPar));
int rightPar=seq.indexOf(")",leftPar+1);
if(rightPar!=-1) {
String amino=seq.substring(leftPar+1,rightPar);
// Although by definition aminoacids in PDB are expressed as 3-letter codes,
// we are bypassing that fact here.
if(toOneAA.containsKey(amino)) {
result.append(toOneAA.get(amino));
} else if(notAA.contains(amino)) {
LOG.warning("Jammed chain: '"+amino+"' in "+seq);
result.append(PDBAmino.UnknownAmino);
} else {
result.append(PDBAmino.UnknownAmino);
LOG.warning("Unknown aminoacid '"+amino+"' in "+seq+"!!!");
}
// Skipping to right parentheses
base=rightPar+1;
} else {
// Jammed content, collapsed to an unknown amino
result.append(PDBAmino.UnknownAmino);
break;
}
} else {
result.append(seq.subSequence(base,seq.length()));
break;
}
}
return result;
}
public HashMap<String,Character> getMapping()
{
return toOneAA;
}
public HashSet<String> getNotMapping()
{
return notAA;
}
}
| apache-2.0 |
blademainer/MessagingSystem | src/main/java/com/xiongyingqi/util/EncryptMD5.java | 2321 | /**
* YIXUN_1.5_EE
*/
package com.xiongyingqi.util;
import java.io.UnsupportedEncodingException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
/**
* MD5加密帮助类
*
* @author 瑛琪 <a href="http://xiongyingqi.com">xiongyingqi.com</a>
* @version 2013-11-4 下午6:06:54
*/
public class EncryptMD5 extends Encrypt{
/**
* MD5 加密, 内容默认使用UTF-8编码,结果为小写 <br>
* 2013-11-4 下午6:33:06
*
* @param message
* 要加密的内容
* @return 加密后的信息
*/
private static String encrypt(String message) {
return encrypt(message, "UTF-8", false);
}
/**
* MD5 加密, 结果默认为小写 <br>
* 2013-11-4 下午6:33:06
*
* @param message
* 要加密的内容
* @param encode
* message的编码方式
* @param toUpperCase
* 是否转换成大写
* @return 加密后的信息
*/
private static String encrypt(String message, String encode) {
return encrypt(message, encode, false);
}
/**
* MD5 加密, 内容默认使用UTF-8编码 <br>
* 2013-11-4 下午6:33:06
*
* @param message
* 要加密的内容
* @param encode
* message的编码方式
* @param toUpperCase
* 是否转换成大写
* @return 加密后的信息
*/
private static String encrypt(String message, boolean toUpperCase) {
return encrypt(message, "UTF-8", toUpperCase);
}
/**
* MD5 加密 <br>
* 2013-11-4 下午6:33:06
*
* @param message
* 要加密的内容
* @param encode
* message的编码方式
* @param toUpperCase
* 是否转换成大写
* @return 加密后的信息
*/
private static String encrypt(String message, String encode, boolean toUpperCase) {
MessageDigest messageDigest = null;
try {
messageDigest = MessageDigest.getInstance("MD5");
messageDigest.reset();
messageDigest.update(message.getBytes(encode));
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
byte[] byteArray = messageDigest.digest();
return bytesToString(byteArray, toUpperCase);
}
public static void main(String[] args) {
System.out.println(encrypt(""));
}
}
| apache-2.0 |
adamkewley/jobson | jobson/src/main/java/com/github/jobson/api/v1/APIRestLink.java | 1340 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.github.jobson.api.v1;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.media.Schema;
import java.net.URI;
@Schema(description = "A related resource link")
public final class APIRestLink {
@Schema(description = "The link's URL")
private URI href;
/**
* @deprecated Used by JSON deserializer.
*/
public APIRestLink() {}
public APIRestLink(URI href) {
this.href = href;
}
public URI getHref() {
return href;
}
}
| apache-2.0 |
crate/crate | libs/sql-parser/src/main/java/io/crate/sql/tree/NotNullColumnConstraint.java | 1927 | /*
* Licensed to Crate.io GmbH ("Crate") under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership. Crate licenses
* this file to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial agreement.
*/
package io.crate.sql.tree;
import java.util.function.Consumer;
import java.util.function.Function;
public class NotNullColumnConstraint<T> extends ColumnConstraint<T> {
private static final String NAME = "NOT NULL";
@Override
public boolean equals(Object o) {
if (this == o) return true;
return !(o == null || getClass() != o.getClass());
}
@Override
public int hashCode() {
return NAME.hashCode();
}
@Override
public String toString() {
return NAME;
}
@Override
public <R, C> R accept(AstVisitor<R, C> visitor, C context) {
return visitor.visitNotNullColumnConstraint(this, context);
}
@Override
public <U> ColumnConstraint<U> map(Function<? super T, ? extends U> mapper) {
return new NotNullColumnConstraint<>();
}
@Override
public void visit(Consumer<? super T> consumer) {
}
}
| apache-2.0 |
alibaba/easyexcel | src/test/java/com/alibaba/easyexcel/test/core/style/StyleDataTest.java | 12710 | package com.alibaba.easyexcel.test.core.style;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import com.alibaba.easyexcel.test.core.StyleTestUtils;
import com.alibaba.easyexcel.test.util.TestFileUtil;
import com.alibaba.excel.EasyExcel;
import com.alibaba.excel.annotation.write.style.HeadFontStyle;
import com.alibaba.excel.annotation.write.style.HeadStyle;
import com.alibaba.excel.metadata.Head;
import com.alibaba.excel.metadata.data.DataFormatData;
import com.alibaba.excel.metadata.property.FontProperty;
import com.alibaba.excel.metadata.property.StyleProperty;
import com.alibaba.excel.write.merge.LoopMergeStrategy;
import com.alibaba.excel.write.merge.OnceAbsoluteMergeStrategy;
import com.alibaba.excel.write.metadata.style.WriteCellStyle;
import com.alibaba.excel.write.metadata.style.WriteFont;
import com.alibaba.excel.write.style.AbstractVerticalCellStyleStrategy;
import com.alibaba.excel.write.style.HorizontalCellStyleStrategy;
import com.alibaba.excel.write.style.column.SimpleColumnWidthStyleStrategy;
import com.alibaba.excel.write.style.row.SimpleRowHeightStyleStrategy;
import org.apache.poi.ss.usermodel.BorderStyle;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.FillPatternType;
import org.apache.poi.ss.usermodel.Font;
import org.apache.poi.ss.usermodel.HorizontalAlignment;
import org.apache.poi.ss.usermodel.IndexedColors;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.ss.usermodel.VerticalAlignment;
import org.apache.poi.ss.usermodel.Workbook;
import org.apache.poi.ss.usermodel.WorkbookFactory;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runners.MethodSorters;
/**
* @author Jiaju Zhuang
*/
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
public class StyleDataTest {
private static File file07;
private static File file03;
private static File fileVerticalCellStyleStrategy07;
private static File fileVerticalCellStyleStrategy207;
private static File fileLoopMergeStrategy;
@BeforeClass
public static void init() {
file07 = TestFileUtil.createNewFile("style07.xlsx");
file03 = TestFileUtil.createNewFile("style03.xls");
fileVerticalCellStyleStrategy07 = TestFileUtil.createNewFile("verticalCellStyle.xlsx");
fileVerticalCellStyleStrategy207 = TestFileUtil.createNewFile("verticalCellStyle2.xlsx");
fileLoopMergeStrategy = TestFileUtil.createNewFile("loopMergeStrategy.xlsx");
}
@Test
public void t01ReadAndWrite07() throws Exception {
readAndWrite(file07);
}
@Test
public void t02ReadAndWrite03() throws Exception {
readAndWrite(file03);
}
@Test
public void t03AbstractVerticalCellStyleStrategy() {
AbstractVerticalCellStyleStrategy verticalCellStyleStrategy = new AbstractVerticalCellStyleStrategy() {
@Override
protected WriteCellStyle headCellStyle(Head head) {
WriteCellStyle writeCellStyle = new WriteCellStyle();
writeCellStyle.setFillPatternType(FillPatternType.SOLID_FOREGROUND);
DataFormatData dataFormatData = new DataFormatData();
dataFormatData.setIndex((short)0);
writeCellStyle.setDataFormatData(dataFormatData);
writeCellStyle.setHidden(false);
writeCellStyle.setLocked(true);
writeCellStyle.setQuotePrefix(true);
writeCellStyle.setHorizontalAlignment(HorizontalAlignment.CENTER);
writeCellStyle.setWrapped(true);
writeCellStyle.setVerticalAlignment(VerticalAlignment.CENTER);
writeCellStyle.setRotation((short)0);
writeCellStyle.setIndent((short)10);
writeCellStyle.setBorderLeft(BorderStyle.THIN);
writeCellStyle.setBorderRight(BorderStyle.THIN);
writeCellStyle.setBorderTop(BorderStyle.THIN);
writeCellStyle.setBorderBottom(BorderStyle.THIN);
writeCellStyle.setLeftBorderColor(IndexedColors.RED.getIndex());
writeCellStyle.setRightBorderColor(IndexedColors.RED.getIndex());
writeCellStyle.setTopBorderColor(IndexedColors.RED.getIndex());
writeCellStyle.setBottomBorderColor(IndexedColors.RED.getIndex());
writeCellStyle.setFillBackgroundColor(IndexedColors.RED.getIndex());
writeCellStyle.setShrinkToFit(Boolean.TRUE);
if (head.getColumnIndex() == 0) {
writeCellStyle.setFillForegroundColor(IndexedColors.YELLOW.getIndex());
WriteFont writeFont = new WriteFont();
writeFont.setItalic(true);
writeFont.setStrikeout(true);
writeFont.setTypeOffset(Font.SS_NONE);
writeFont.setUnderline(Font.U_DOUBLE);
writeFont.setBold(true);
writeFont.setCharset((int)Font.DEFAULT_CHARSET);
} else {
writeCellStyle.setFillForegroundColor(IndexedColors.BLUE.getIndex());
}
return writeCellStyle;
}
@Override
protected WriteCellStyle contentCellStyle(Head head) {
WriteCellStyle writeCellStyle = new WriteCellStyle();
writeCellStyle.setFillPatternType(FillPatternType.SOLID_FOREGROUND);
if (head.getColumnIndex() == 0) {
writeCellStyle.setFillForegroundColor(IndexedColors.DARK_GREEN.getIndex());
} else {
writeCellStyle.setFillForegroundColor(IndexedColors.PINK.getIndex());
}
return writeCellStyle;
}
};
EasyExcel.write(fileVerticalCellStyleStrategy07, StyleData.class).registerWriteHandler(
verticalCellStyleStrategy).sheet()
.doWrite(data());
}
@Test
public void t04AbstractVerticalCellStyleStrategy02() {
final StyleProperty styleProperty = StyleProperty.build(StyleData.class.getAnnotation(HeadStyle.class));
final FontProperty fontProperty = FontProperty.build(StyleData.class.getAnnotation(HeadFontStyle.class));
AbstractVerticalCellStyleStrategy verticalCellStyleStrategy = new AbstractVerticalCellStyleStrategy() {
@Override
protected WriteCellStyle headCellStyle(Head head) {
WriteCellStyle writeCellStyle = WriteCellStyle.build(styleProperty, fontProperty);
if (head.getColumnIndex() == 0) {
writeCellStyle.setFillForegroundColor(IndexedColors.YELLOW.getIndex());
WriteFont writeFont = new WriteFont();
writeFont.setItalic(true);
writeFont.setStrikeout(true);
writeFont.setTypeOffset(Font.SS_NONE);
writeFont.setUnderline(Font.U_DOUBLE);
writeFont.setBold(true);
writeFont.setCharset((int)Font.DEFAULT_CHARSET);
} else {
writeCellStyle.setFillForegroundColor(IndexedColors.BLUE.getIndex());
}
return writeCellStyle;
}
@Override
protected WriteCellStyle contentCellStyle(Head head) {
WriteCellStyle writeCellStyle = new WriteCellStyle();
writeCellStyle.setFillPatternType(FillPatternType.SOLID_FOREGROUND);
if (head.getColumnIndex() == 0) {
writeCellStyle.setFillForegroundColor(IndexedColors.DARK_GREEN.getIndex());
} else {
writeCellStyle.setFillForegroundColor(IndexedColors.PINK.getIndex());
}
return writeCellStyle;
}
};
EasyExcel.write(fileVerticalCellStyleStrategy207, StyleData.class).registerWriteHandler(
verticalCellStyleStrategy).sheet()
.doWrite(data());
}
@Test
public void t05LoopMergeStrategy() {
EasyExcel.write(fileLoopMergeStrategy, StyleData.class).sheet().registerWriteHandler(
new LoopMergeStrategy(2, 1))
.doWrite(data10());
}
private void readAndWrite(File file) throws Exception {
SimpleColumnWidthStyleStrategy simpleColumnWidthStyleStrategy = new SimpleColumnWidthStyleStrategy(50);
SimpleRowHeightStyleStrategy simpleRowHeightStyleStrategy =
new SimpleRowHeightStyleStrategy((short)40, (short)50);
WriteCellStyle headWriteCellStyle = new WriteCellStyle();
headWriteCellStyle.setFillForegroundColor(IndexedColors.YELLOW.getIndex());
WriteFont headWriteFont = new WriteFont();
headWriteFont.setFontHeightInPoints((short)20);
headWriteFont.setColor(IndexedColors.DARK_YELLOW.getIndex());
headWriteCellStyle.setWriteFont(headWriteFont);
WriteCellStyle contentWriteCellStyle = new WriteCellStyle();
contentWriteCellStyle.setFillPatternType(FillPatternType.SOLID_FOREGROUND);
contentWriteCellStyle.setFillForegroundColor(IndexedColors.TEAL.getIndex());
WriteFont contentWriteFont = new WriteFont();
contentWriteFont.setFontHeightInPoints((short)30);
contentWriteFont.setColor(IndexedColors.DARK_TEAL.getIndex());
contentWriteCellStyle.setWriteFont(contentWriteFont);
HorizontalCellStyleStrategy horizontalCellStyleStrategy =
new HorizontalCellStyleStrategy(headWriteCellStyle, contentWriteCellStyle);
OnceAbsoluteMergeStrategy onceAbsoluteMergeStrategy = new OnceAbsoluteMergeStrategy(2, 2, 0, 1);
EasyExcel.write(file, StyleData.class).registerWriteHandler(simpleColumnWidthStyleStrategy)
.registerWriteHandler(simpleRowHeightStyleStrategy).registerWriteHandler(horizontalCellStyleStrategy)
.registerWriteHandler(onceAbsoluteMergeStrategy).sheet().doWrite(data());
EasyExcel.read(file, StyleData.class, new StyleDataListener()).sheet().doRead();
Workbook workbook = WorkbookFactory.create(file);
Sheet sheet = workbook.getSheetAt(0);
Assert.assertEquals(50 * 256, sheet.getColumnWidth(0), 0);
Row row0 = sheet.getRow(0);
Assert.assertEquals(800, row0.getHeight(), 0);
Cell cell00 = row0.getCell(0);
Assert.assertArrayEquals(new byte[] {-1, -1, 0}, StyleTestUtils.getFillForegroundColor(cell00));
Assert.assertArrayEquals(new byte[] {-128, -128, 0}, StyleTestUtils.getFontColor(cell00, workbook));
Assert.assertEquals(20, StyleTestUtils.getFontHeightInPoints(cell00, workbook));
Cell cell01 = row0.getCell(1);
Assert.assertArrayEquals(new byte[] {-1, -1, 0}, StyleTestUtils.getFillForegroundColor(cell01));
Assert.assertArrayEquals(new byte[] {-128, -128, 0}, StyleTestUtils.getFontColor(cell01, workbook));
Assert.assertEquals(20, StyleTestUtils.getFontHeightInPoints(cell01, workbook));
Row row1 = sheet.getRow(1);
Assert.assertEquals(1000, row1.getHeight(), 0);
Cell cell10 = row1.getCell(0);
Assert.assertArrayEquals(new byte[] {0, -128, -128}, StyleTestUtils.getFillForegroundColor(cell10));
Assert.assertArrayEquals(new byte[] {0, 51, 102}, StyleTestUtils.getFontColor(cell10, workbook));
Assert.assertEquals(30, StyleTestUtils.getFontHeightInPoints(cell10, workbook));
Cell cell11 = row1.getCell(1);
Assert.assertArrayEquals(new byte[] {0, -128, -128}, StyleTestUtils.getFillForegroundColor(cell11));
Assert.assertArrayEquals(new byte[] {0, 51, 102}, StyleTestUtils.getFontColor(cell11, workbook));
Assert.assertEquals(30, StyleTestUtils.getFontHeightInPoints(cell11, workbook));
}
private List<StyleData> data() {
List<StyleData> list = new ArrayList<StyleData>();
StyleData data = new StyleData();
data.setString("字符串0");
data.setString1("字符串01");
StyleData data1 = new StyleData();
data1.setString("字符串1");
data1.setString1("字符串11");
list.add(data);
list.add(data1);
return list;
}
private List<StyleData> data10() {
List<StyleData> list = new ArrayList<StyleData>();
for (int i = 0; i < 10; i++) {
StyleData data = new StyleData();
data.setString("字符串0");
data.setString1("字符串01");
list.add(data);
}
return list;
}
}
| apache-2.0 |
jdgwartney/vsphere-ws | java/JAXWS/samples/com/vmware/vim25/VirtualMachineRuntimeInfoDasProtectionState.java | 1378 |
package com.vmware.vim25;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for VirtualMachineRuntimeInfoDasProtectionState complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="VirtualMachineRuntimeInfoDasProtectionState">
* <complexContent>
* <extension base="{urn:vim25}DynamicData">
* <sequence>
* <element name="dasProtected" type="{http://www.w3.org/2001/XMLSchema}boolean"/>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "VirtualMachineRuntimeInfoDasProtectionState", propOrder = {
"dasProtected"
})
public class VirtualMachineRuntimeInfoDasProtectionState
extends DynamicData
{
protected boolean dasProtected;
/**
* Gets the value of the dasProtected property.
*
*/
public boolean isDasProtected() {
return dasProtected;
}
/**
* Sets the value of the dasProtected property.
*
*/
public void setDasProtected(boolean value) {
this.dasProtected = value;
}
}
| apache-2.0 |
stserp/erp1 | source/src/com/baosight/sts/st/cw/domain/STCW0201.java | 26598 | /**
* Generate time : 2009-06-15 18:12:38
* Version : 1.0.1.V20070717
*/
package com.baosight.sts.st.cw.domain;
//import com.baosight.bfms.as.as.common.bill.IBillHead;
import com.baosight.bfms.as.as.common.bill.IBillHead;
import com.baosight.iplat4j.util.NumberUtils;
import java.math.BigDecimal;
import java.util.Date;
import com.baosight.iplat4j.util.DateUtils;
import com.baosight.iplat4j.core.ei.EiColumn;
import com.baosight.iplat4j.ep.DaoEPBase;
import java.util.HashMap;
import java.util.Map;
import com.baosight.iplat4j.util.StringUtils;
/**
* TApSupplierRefund
*
*/
public class STCW0201 extends DaoEPBase implements IBillHead {
private String segNo = " "; // 业务单元代码
private Long accsetNo = new Long(0); // 帐套序号
private String refundId = " "; // 退款管理号
private Date refundDate; // 退款日期
private String refundType = " "; // 退款类型
private String refundName = " "; // 退款类型name
private String providerId = " "; // 供应商代码
private String providerName = " "; // 供应商name
private String bankAccNum = " "; // 供应商银行帐号
private String bankAccName = " "; // 供应商银行名称
private String bankSubjectid = " "; // 退款银行
private String bankSubjectidName = " "; // 退款银行name
private String noteType = " "; // 票据种类
private String noteTypeName = " "; // 票据种类name
private String noteId = " "; // 票据号
private BigDecimal amount = new BigDecimal("0"); // 退款金额
private BigDecimal bankFee = new BigDecimal("0"); // 银行手续费
private String currencyId = " "; // 币种
private String currencyIdName = " "; // 币种name
private BigDecimal currencyRate = new BigDecimal("0"); // 汇率
private String refundReason = " "; // 退款说明
private String redFlag = " "; // 红冲标记
private String redFlagName = " "; // 红冲标记name
private String originSettleId = " "; // 红冲清单号
private String factStatus = " "; // 实收状态
private Date factDate; // 实收日期
private Date forecastDate; // 预计实收日
private String accountStatus = " "; // 记帐状态
private String accountStatusName = " "; // 记帐状态name
private String voucherId = " "; // 凭证号
private String periodId = " "; // 期间编号
private Long noteCounts = new Long(0); // 票据张数
private String modiPerson = " "; // 更新人
private Date modiDate; // 更新时间
private String companyCode = "";
private String accountId = "";
private String voucherDate = " "; //凭证日期
/**
* @return the voucherDate
*/
public String getVoucherDate() {
return voucherDate;
}
/**
* @param voucherDate the voucherDate to set
*/
public void setVoucherDate(String voucherDate) {
this.voucherDate = voucherDate;
}
/**
* @return the accountId
*/
public String getAccountId() {
return accountId;
}
/**
* @param accountId the accountId to set
*/
public void setAccountId(String accountId) {
this.accountId = accountId;
}
/**
* initialize the metadata
*/
public void initMetaData() {
EiColumn eiColumn;
eiColumn = new EiColumn("segNo");
eiColumn.setPrimaryKey(true);
eiColumn.setFieldLength(20);
eiColumn.setDescName("业务单元代码");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("companyCode");
eiColumn.setPrimaryKey(true);
eiColumn.setFieldLength(20);
eiColumn.setDescName("帐套");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("accsetNo");
eiColumn.setPrimaryKey(true);
eiColumn.setType("N");
eiColumn.setScaleLength(0);
eiColumn.setFieldLength(11);
eiColumn.setDescName("帐套序号");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("refundId");
eiColumn.setPrimaryKey(true);
eiColumn.setFieldLength(20);
eiColumn.setDescName("退款管理号");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("refundDate");
eiColumn.setDescName("退款日期");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("refundType");
eiColumn.setFieldLength(2);
eiColumn.setDescName("退款类型");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("providerId");
eiColumn.setFieldLength(10);
eiColumn.setDescName("供应商代码");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("bankAccNum");
eiColumn.setFieldLength(30);
eiColumn.setDescName("供应商银行帐号");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("bankAccName");
eiColumn.setFieldLength(100);
eiColumn.setDescName("供应商银行名称");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("bankSubjectid");
eiColumn.setFieldLength(13);
eiColumn.setDescName("退款银行");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("noteType");
eiColumn.setFieldLength(10);
eiColumn.setDescName("票据种类");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("noteId");
eiColumn.setFieldLength(20);
eiColumn.setDescName("票据号");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("amount");
eiColumn.setType("N");
eiColumn.setScaleLength(2);
eiColumn.setFieldLength(14);
eiColumn.setDescName("退款金额");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("bankFee");
eiColumn.setType("N");
eiColumn.setScaleLength(2);
eiColumn.setFieldLength(14);
eiColumn.setDescName("银行手续费");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("currencyId");
eiColumn.setFieldLength(2);
eiColumn.setDescName("币种");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("currencyRate");
eiColumn.setType("N");
eiColumn.setScaleLength(8);
eiColumn.setFieldLength(12);
eiColumn.setDescName("汇率");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("refundReason");
eiColumn.setFieldLength(200);
eiColumn.setDescName("退款说明");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("redFlag");
eiColumn.setFieldLength(1);
eiColumn.setDescName("红冲标记");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("originSettleId");
eiColumn.setFieldLength(20);
eiColumn.setDescName("红冲清单号");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("factStatus");
eiColumn.setFieldLength(2);
eiColumn.setDescName("实收状态");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("factDate");
eiColumn.setDescName("实收日期");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("forecastDate");
eiColumn.setDescName("预计实收日");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("accountStatus");
eiColumn.setFieldLength(1);
eiColumn.setDescName("记帐状态");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("accountStatusName");
eiColumn.setFieldLength(10);
eiColumn.setDescName("记帐状态名称");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("voucherId");
eiColumn.setFieldLength(9);
eiColumn.setDescName("凭证号");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("periodId");
eiColumn.setFieldLength(6);
eiColumn.setDescName("期间编号");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("noteCounts");
eiColumn.setType("N");
eiColumn.setScaleLength(0);
eiColumn.setFieldLength(11);
eiColumn.setDescName("票据张数");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("modiPerson");
eiColumn.setFieldLength(30);
eiColumn.setDescName("更新人");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("modiDate");
eiColumn.setDescName("更新时间");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("providerName");
eiColumn.setDescName("供应商名称");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("redFlagName");
eiColumn.setDescName("红冲标记");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("bankSubjectidName");
eiColumn.setDescName("退款银行");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("refundName");
eiColumn.setDescName("退款类型");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("noteTypeName");
eiColumn.setDescName("票据类型");
eiMetadata.addMeta(eiColumn);
eiColumn = new EiColumn("currencyIdName");
eiColumn.setDescName("币种");
eiMetadata.addMeta(eiColumn);
}
/**
* the constructor
*/
public STCW0201() {
initMetaData();
}
/**
* get the segNo - 业务单元代码
* @return the segNo
*/
public String getSegNo() {
return this.segNo;
}
/**
* set the segNo - 业务单元代码
*/
public void setSegNo(String segNo) {
this.segNo = segNo;
}
/**
* get the accsetNo - 帐套序号
* @return the accsetNo
*/
public Long getAccsetNo() {
return this.accsetNo;
}
/**
* set the accsetNo - 帐套序号
*/
public void setAccsetNo(Long accsetNo) {
this.accsetNo = accsetNo;
}
/**
* get the refundId - 退款管理号
* @return the refundId
*/
public String getRefundId() {
return this.refundId;
}
/**
* set the refundId - 退款管理号
*/
public void setRefundId(String refundId) {
this.refundId = refundId;
}
/**
* get the refundDate - 退款日期
* @return the refundDate
*/
public Date getRefundDate() {
return this.refundDate;
}
/**
* set the refundDate - 退款日期
*/
public void setRefundDate(Date refundDate) {
this.refundDate = refundDate;
}
/**
* get the refundType - 退款类型
* @return the refundType
*/
public String getRefundType() {
return this.refundType;
}
/**
* set the refundType - 退款类型
*/
public void setRefundType(String refundType) {
this.refundType = refundType;
}
/**
* get the providerId - 供应商代码
* @return the providerId
*/
public String getProviderId() {
return this.providerId;
}
/**
* set the providerId - 供应商代码
*/
public void setProviderId(String providerId) {
this.providerId = providerId;
}
/**
* get the bankAccNum - 供应商银行帐号
* @return the bankAccNum
*/
public String getBankAccNum() {
return this.bankAccNum;
}
/**
* set the bankAccNum - 供应商银行帐号
*/
public void setBankAccNum(String bankAccNum) {
this.bankAccNum = bankAccNum;
}
/**
* get the bankAccName - 供应商银行名称
* @return the bankAccName
*/
public String getBankAccName() {
return this.bankAccName;
}
/**
* set the bankAccName - 供应商银行名称
*/
public void setBankAccName(String bankAccName) {
this.bankAccName = bankAccName;
}
/**
* get the bankSubjectid - 退款银行
* @return the bankSubjectid
*/
public String getBankSubjectid() {
return this.bankSubjectid;
}
/**
* set the bankSubjectid - 退款银行
*/
public void setBankSubjectid(String bankSubjectid) {
this.bankSubjectid = bankSubjectid;
}
/**
* get the noteType - 票据种类
* @return the noteType
*/
public String getNoteType() {
return this.noteType;
}
/**
* set the noteType - 票据种类
*/
public void setNoteType(String noteType) {
this.noteType = noteType;
}
/**
* get the noteId - 票据号
* @return the noteId
*/
public String getNoteId() {
return this.noteId;
}
/**
* set the noteId - 票据号
*/
public void setNoteId(String noteId) {
this.noteId = noteId;
}
/**
* get the amount - 退款金额
* @return the amount
*/
public BigDecimal getAmount() {
return this.amount;
}
/**
* set the amount - 退款金额
*/
public void setAmount(BigDecimal amount) {
this.amount = amount;
}
/**
* get the bankFee - 银行手续费
* @return the bankFee
*/
public BigDecimal getBankFee() {
return this.bankFee;
}
/**
* set the bankFee - 银行手续费
*/
public void setBankFee(BigDecimal bankFee) {
this.bankFee = bankFee;
}
/**
* get the currencyId - 币种
* @return the currencyId
*/
public String getCurrencyId() {
return this.currencyId;
}
/**
* set the currencyId - 币种
*/
public void setCurrencyId(String currencyId) {
this.currencyId = currencyId;
}
/**
* get the currencyRate - 汇率
* @return the currencyRate
*/
public BigDecimal getCurrencyRate() {
return this.currencyRate;
}
/**
* set the currencyRate - 汇率
*/
public void setCurrencyRate(BigDecimal currencyRate) {
this.currencyRate = currencyRate;
}
/**
* get the refundReason - 退款说明
* @return the refundReason
*/
public String getRefundReason() {
return this.refundReason;
}
/**
* set the refundReason - 退款说明
*/
public void setRefundReason(String refundReason) {
this.refundReason = refundReason;
}
/**
* get the redFlag - 红冲标记
* @return the redFlag
*/
public String getRedFlag() {
return this.redFlag;
}
/**
* set the redFlag - 红冲标记
*/
public void setRedFlag(String redFlag) {
this.redFlag = redFlag;
}
/**
* get the originSettleId - 红冲清单号
* @return the originSettleId
*/
public String getOriginSettleId() {
return this.originSettleId;
}
/**
* set the originSettleId - 红冲清单号
*/
public void setOriginSettleId(String originSettleId) {
this.originSettleId = originSettleId;
}
/**
* get the factStatus - 实收状态
* @return the factStatus
*/
public String getFactStatus() {
return this.factStatus;
}
/**
* set the factStatus - 实收状态
*/
public void setFactStatus(String factStatus) {
this.factStatus = factStatus;
}
/**
* get the factDate - 实收日期
* @return the factDate
*/
public Date getFactDate() {
return this.factDate;
}
/**
* set the factDate - 实收日期
*/
public void setFactDate(Date factDate) {
this.factDate = factDate;
}
/**
* get the forecastDate - 预计实收日
* @return the forecastDate
*/
public Date getForecastDate() {
return this.forecastDate;
}
/**
* set the forecastDate - 预计实收日
*/
public void setForecastDate(Date forecastDate) {
this.forecastDate = forecastDate;
}
/**
* get the accountStatus - 记帐状态
* @return the accountStatus
*/
public String getAccountStatus() {
return this.accountStatus;
}
/**
* set the accountStatus - 记帐状态
*/
public void setAccountStatus(String accountStatus) {
this.accountStatus = accountStatus;
}
/**
* get the voucherId - 凭证号
* @return the voucherId
*/
public String getVoucherId() {
return this.voucherId;
}
/**
* set the voucherId - 凭证号
*/
public void setVoucherId(String voucherId) {
this.voucherId = voucherId;
}
/**
* get the periodId - 期间编号
* @return the periodId
*/
public String getPeriodId() {
return this.periodId;
}
/**
* set the periodId - 期间编号
*/
public void setPeriodId(String periodId) {
this.periodId = periodId;
}
/**
* get the noteCounts - 票据张数
* @return the noteCounts
*/
public Long getNoteCounts() {
return this.noteCounts;
}
/**
* set the noteCounts - 票据张数
*/
public void setNoteCounts(Long noteCounts) {
this.noteCounts = noteCounts;
}
/**
* get the modiPerson - 更新人
* @return the modiPerson
*/
public String getModiPerson() {
return this.modiPerson;
}
/**
* set the modiPerson - 更新人
*/
public void setModiPerson(String modiPerson) {
this.modiPerson = modiPerson;
}
/**
* get the modiDate - 更新时间
* @return the modiDate
*/
public Date getModiDate() {
return this.modiDate;
}
/**
* set the modiDate - 更新时间
*/
public void setModiDate(Date modiDate) {
this.modiDate = modiDate;
}
/**
* get the value from Map
*/
public void fromMap(Map map) {
setSegNo(StringUtils.defaultIfEmpty(((String)map.get("segNo")), segNo));
setAccsetNo(NumberUtils.toLong(((String)map.get("accsetNo")), accsetNo));
setRefundId(StringUtils.defaultIfEmpty(((String)map.get("refundId")), refundId));
setRefundDate(DateUtils.toDate((String)map.get("refundDate")));
setRefundType(StringUtils.defaultIfEmpty(((String)map.get("refundType")), refundType));
setProviderId(StringUtils.defaultIfEmpty(((String)map.get("providerId")), providerId));
setBankAccNum(StringUtils.defaultIfEmpty(((String)map.get("bankAccNum")), bankAccNum));
setBankAccName(StringUtils.defaultIfEmpty(((String)map.get("bankAccName")), bankAccName));
setBankSubjectid(StringUtils.defaultIfEmpty(((String)map.get("bankSubjectid")), bankSubjectid));
setNoteType(StringUtils.defaultIfEmpty(((String)map.get("noteType")), noteType));
setNoteId(StringUtils.defaultIfEmpty(((String)map.get("noteId")), noteId));
setAmount(NumberUtils.toBigDecimal(((String)map.get("amount")), amount));
setBankFee(NumberUtils.toBigDecimal(((String)map.get("bankFee")), bankFee));
setCurrencyId(StringUtils.defaultIfEmpty(((String)map.get("currencyId")), currencyId));
setCurrencyRate(NumberUtils.toBigDecimal(((String)map.get("currencyRate")), currencyRate));
setRefundReason(StringUtils.defaultIfEmpty(((String)map.get("refundReason")), refundReason));
setRedFlag(StringUtils.defaultIfEmpty(((String)map.get("redFlag")), redFlag));
setOriginSettleId(StringUtils.defaultIfEmpty(((String)map.get("originSettleId")), originSettleId));
setFactStatus(StringUtils.defaultIfEmpty(((String)map.get("factStatus")), factStatus));
setFactDate(DateUtils.toDate((String)map.get("factDate")));
setForecastDate(DateUtils.toDate((String)map.get("forecastDate")));
setAccountStatus(StringUtils.defaultIfEmpty(((String)map.get("accountStatus")), accountStatus));
setVoucherId(StringUtils.defaultIfEmpty(((String)map.get("voucherId")), voucherId));
setPeriodId(StringUtils.defaultIfEmpty(((String)map.get("periodId")), periodId));
setNoteCounts(NumberUtils.toLong(((String)map.get("noteCounts")), noteCounts));
setModiPerson(StringUtils.defaultIfEmpty(((String)map.get("modiPerson")), modiPerson));
setModiDate(DateUtils.toDate((String)map.get("modiDate")));
setBankSubjectidName(StringUtils.defaultIfEmpty(((String)map.get("bankSubjectidName")), bankSubjectidName));
setCurrencyIdName(StringUtils.defaultIfEmpty(((String)map.get("currencyIdName")), currencyIdName));
setNoteTypeName(StringUtils.defaultIfEmpty(((String)map.get("noteTypeName")), noteTypeName));
setProviderName(StringUtils.defaultIfEmpty(((String)map.get("providerName")), providerName));
setRedFlagName(StringUtils.defaultIfEmpty(((String)map.get("redFlagName")), redFlagName));
setRefundName(StringUtils.defaultIfEmpty(((String)map.get("refundName")), refundName));
setAccountStatusName(StringUtils.defaultIfEmpty(((String)map.get("accountStatusName")), accountStatusName));
setCompanyCode(StringUtils.defaultIfEmpty(((String)map.get("companyCode")), companyCode));
}
/**
* set the value to Map
*/
public Map toMap() {
Map map = new HashMap();
map.put("segNo",StringUtils.toString(segNo, eiMetadata.getMeta("segNo").getFieldLength(), eiMetadata.getMeta("segNo").getScaleLength()));
map.put("accsetNo",StringUtils.toString(accsetNo, eiMetadata.getMeta("accsetNo").getFieldLength(), eiMetadata.getMeta("accsetNo").getScaleLength()));
map.put("refundId",StringUtils.toString(refundId, eiMetadata.getMeta("refundId").getFieldLength(), eiMetadata.getMeta("refundId").getScaleLength()));
map.put("refundDate",StringUtils.toString(refundDate, eiMetadata.getMeta("refundDate").getFieldLength(), eiMetadata.getMeta("refundDate").getScaleLength()));
map.put("refundType",StringUtils.toString(refundType, eiMetadata.getMeta("refundType").getFieldLength(), eiMetadata.getMeta("refundType").getScaleLength()));
map.put("providerId",StringUtils.toString(providerId, eiMetadata.getMeta("providerId").getFieldLength(), eiMetadata.getMeta("providerId").getScaleLength()));
map.put("bankAccNum",StringUtils.toString(bankAccNum, eiMetadata.getMeta("bankAccNum").getFieldLength(), eiMetadata.getMeta("bankAccNum").getScaleLength()));
map.put("bankAccName",StringUtils.toString(bankAccName, eiMetadata.getMeta("bankAccName").getFieldLength(), eiMetadata.getMeta("bankAccName").getScaleLength()));
map.put("bankSubjectid",StringUtils.toString(bankSubjectid, eiMetadata.getMeta("bankSubjectid").getFieldLength(), eiMetadata.getMeta("bankSubjectid").getScaleLength()));
map.put("noteType",StringUtils.toString(noteType, eiMetadata.getMeta("noteType").getFieldLength(), eiMetadata.getMeta("noteType").getScaleLength()));
map.put("noteId",StringUtils.toString(noteId, eiMetadata.getMeta("noteId").getFieldLength(), eiMetadata.getMeta("noteId").getScaleLength()));
map.put("amount",StringUtils.toString(amount, eiMetadata.getMeta("amount").getFieldLength(), eiMetadata.getMeta("amount").getScaleLength()));
map.put("bankFee",StringUtils.toString(bankFee, eiMetadata.getMeta("bankFee").getFieldLength(), eiMetadata.getMeta("bankFee").getScaleLength()));
map.put("currencyId",StringUtils.toString(currencyId, eiMetadata.getMeta("currencyId").getFieldLength(), eiMetadata.getMeta("currencyId").getScaleLength()));
map.put("currencyRate",StringUtils.toString(currencyRate, eiMetadata.getMeta("currencyRate").getFieldLength(), eiMetadata.getMeta("currencyRate").getScaleLength()));
map.put("refundReason",StringUtils.toString(refundReason, eiMetadata.getMeta("refundReason").getFieldLength(), eiMetadata.getMeta("refundReason").getScaleLength()));
map.put("redFlag",StringUtils.toString(redFlag, eiMetadata.getMeta("redFlag").getFieldLength(), eiMetadata.getMeta("redFlag").getScaleLength()));
map.put("originSettleId",StringUtils.toString(originSettleId, eiMetadata.getMeta("originSettleId").getFieldLength(), eiMetadata.getMeta("originSettleId").getScaleLength()));
map.put("factStatus",StringUtils.toString(factStatus, eiMetadata.getMeta("factStatus").getFieldLength(), eiMetadata.getMeta("factStatus").getScaleLength()));
map.put("factDate",StringUtils.toString(factDate, eiMetadata.getMeta("factDate").getFieldLength(), eiMetadata.getMeta("factDate").getScaleLength()));
map.put("forecastDate",StringUtils.toString(forecastDate, eiMetadata.getMeta("forecastDate").getFieldLength(), eiMetadata.getMeta("forecastDate").getScaleLength()));
map.put("accountStatus",StringUtils.toString(accountStatus, eiMetadata.getMeta("accountStatus").getFieldLength(), eiMetadata.getMeta("accountStatus").getScaleLength()));
map.put("voucherId",StringUtils.toString(voucherId, eiMetadata.getMeta("voucherId").getFieldLength(), eiMetadata.getMeta("voucherId").getScaleLength()));
map.put("periodId",StringUtils.toString(periodId, eiMetadata.getMeta("periodId").getFieldLength(), eiMetadata.getMeta("periodId").getScaleLength()));
map.put("noteCounts",StringUtils.toString(noteCounts, eiMetadata.getMeta("noteCounts").getFieldLength(), eiMetadata.getMeta("noteCounts").getScaleLength()));
map.put("modiPerson",StringUtils.toString(modiPerson, eiMetadata.getMeta("modiPerson").getFieldLength(), eiMetadata.getMeta("modiPerson").getScaleLength()));
map.put("modiDate",StringUtils.toString(modiDate, eiMetadata.getMeta("modiDate").getFieldLength(), eiMetadata.getMeta("modiDate").getScaleLength()));
map.put("bankSubjectidName",StringUtils.toString(bankSubjectidName, eiMetadata.getMeta("bankSubjectidName").getFieldLength(), eiMetadata.getMeta("bankSubjectidName").getScaleLength()));
map.put("currencyIdName",StringUtils.toString(currencyIdName, eiMetadata.getMeta("currencyIdName").getFieldLength(), eiMetadata.getMeta("currencyIdName").getScaleLength()));
map.put("noteTypeName",StringUtils.toString(noteTypeName, eiMetadata.getMeta("noteTypeName").getFieldLength(), eiMetadata.getMeta("noteTypeName").getScaleLength()));
map.put("providerName",StringUtils.toString(providerName, eiMetadata.getMeta("providerName").getFieldLength(), eiMetadata.getMeta("providerName").getScaleLength()));
map.put("redFlagName",StringUtils.toString(redFlagName, eiMetadata.getMeta("redFlagName").getFieldLength(), eiMetadata.getMeta("redFlagName").getScaleLength()));
map.put("refundName",StringUtils.toString(refundName, eiMetadata.getMeta("refundName").getFieldLength(), eiMetadata.getMeta("refundName").getScaleLength()));
map.put("accountStatusName",StringUtils.toString(accountStatusName, eiMetadata.getMeta("accountStatusName").getFieldLength(), eiMetadata.getMeta("accountStatusName").getScaleLength()));
map.put("companyCode",StringUtils.toString(companyCode, eiMetadata.getMeta("companyCode").getFieldLength(), eiMetadata.getMeta("companyCode").getScaleLength()));
return map;
}
public String getBankSubjectidName() {
return bankSubjectidName;
}
public void setBankSubjectidName(String bankSubjectidName) {
this.bankSubjectidName = bankSubjectidName;
}
public String getCurrencyIdName() {
return currencyIdName;
}
public void setCurrencyIdName(String currencyIdName) {
this.currencyIdName = currencyIdName;
}
public String getNoteTypeName() {
return noteTypeName;
}
public void setNoteTypeName(String noteTypeName) {
this.noteTypeName = noteTypeName;
}
public String getProviderName() {
return providerName;
}
public void setProviderName(String providerName) {
this.providerName = providerName;
}
public String getRedFlagName() {
return redFlagName;
}
public void setRedFlagName(String redFlagName) {
this.redFlagName = redFlagName;
}
public String getRefundName() {
return refundName;
}
public void setRefundName(String refundName) {
this.refundName = refundName;
}
public String getAccountStatusName() {
return accountStatusName;
}
public void setAccountStatusName(String accountStatusName) {
this.accountStatusName = accountStatusName;
}
public String getCompanyCode() {
return companyCode;
}
public void setCompanyCode(String companyCode) {
this.companyCode = companyCode;
}
} | apache-2.0 |
wisobi/leanbean | leanbean-api/src/main/java/com/wisobi/leanbean/restlet/resource/MeetingResource.java | 2584 | package com.wisobi.leanbean.restlet.resource;
import com.wisobi.leanbean.Hashids;
import com.wisobi.leanbean.LeanBeanDao;
import com.wisobi.leanbean.LeanBeanUtil;
import com.wisobi.leanbean.dto.DAO2DTOMapper;
import com.wisobi.leanbean.dto.DTO2DAOMapper;
import com.wisobi.leanbean.dto.MeetingTO;
import com.wisobi.leanbean.dto.MeetingViewTO;
import com.wisobi.leanbean.jpa.LeanBeanJpaDao;
import com.wisobi.leanbean.jpa.entity.Meeting;
import org.restlet.data.Status;
import org.restlet.resource.Get;
import org.restlet.resource.Post;
import org.restlet.resource.ResourceException;
import org.restlet.resource.ServerResource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Created by bjork on 25/08/14.
*/
public class MeetingResource extends ServerResource {
final private static Logger logger = LoggerFactory.getLogger(MeetingResource.class);
final private LeanBeanDao dao = new LeanBeanJpaDao();
@Get("json")
public MeetingViewTO findMeetingById() {
String hash = getRequestAttributes().get("meeting-id").toString();
long meetingId = LeanBeanUtil.idDecode(hash);
return findMeetingById(meetingId);
}
public MeetingViewTO findMeetingById(long meetingId) {
if(meetingId < 0) {
// If the hash cannot be decoded to an integer, no need to check the database
getResponse().setStatus(Status.CLIENT_ERROR_NOT_FOUND);
throw new ResourceException(Status.CLIENT_ERROR_NOT_FOUND);
}
Meeting meeting = dao.findByMeetingId(meetingId);
try {
dao.close();
} catch (Exception e) {
logger.debug(e.getMessage());
}
if (meeting == null) {
// No meeting with decoded id found in the database
getResponse().setStatus(Status.CLIENT_ERROR_NOT_FOUND);
throw new ResourceException(Status.CLIENT_ERROR_NOT_FOUND);
}
MeetingViewTO meetingViewTO = DAO2DTOMapper.mapMeeting(meeting);
return meetingViewTO;
}
@Post("json")
public MeetingViewTO addMeeting(MeetingTO meetingTO) {
Meeting meeting = DTO2DAOMapper.mapMeeting(meetingTO);
logger.info("Meeting: " + meeting.getTitle() + ", Device: " + meeting.getDevice().getId());
try {
dao.addMeeting(meeting);
getResponse().setStatus(Status.SUCCESS_CREATED);
} catch (Exception e) {
logger.debug(e.getMessage());
throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, e.getMessage(), e);
} finally {
try {
dao.close();
} catch (Exception e) {
logger.error(e.getMessage());
}
}
return DAO2DTOMapper.mapMeeting(meeting);
}
}
| apache-2.0 |
adragomir/hbaseindex | src/java/org/apache/hadoop/hbase/rest/TableModel.java | 8310 | /**
* Copyright 2008 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.rest;
import java.io.IOException;
import java.util.ArrayList;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.rest.exception.HBaseRestException;
import org.apache.hadoop.hbase.rest.serializer.IRestSerializer;
import org.apache.hadoop.hbase.rest.serializer.ISerializable;
import org.apache.hadoop.hbase.util.Bytes;
import agilejson.TOJSON;
public class TableModel extends AbstractModel {
@SuppressWarnings("unused")
private Log LOG = LogFactory.getLog(TableModel.class);
public TableModel(HBaseConfiguration config, HBaseAdmin admin) {
super.initialize(config, admin);
}
// Get Methods
public Result[] get(byte [] tableName) throws HBaseRestException {
return get(tableName, getColumns(tableName));
}
/**
* Returns all cells from all rows from the given table in the given columns.
* The output is in the order that the columns are given.
*
* @param tableName
* table name
* @param columnNames
* column names
* @return resultant rows
* @throws org.apache.hadoop.hbase.rest.exception.HBaseRestException
*/
public Result[] get(byte [] tableName, byte[][] columnNames)
throws HBaseRestException {
try {
ArrayList<Result> a = new ArrayList<Result>();
HTable table = new HTable(tableName);
Scan scan = new Scan();
scan.addColumns(columnNames);
ResultScanner s = table.getScanner(scan);
Result r;
while ((r = s.next()) != null) {
a.add(r);
}
return a.toArray(new Result[0]);
} catch (Exception e) {
throw new HBaseRestException(e);
}
}
protected boolean doesTableExist(byte [] tableName) throws HBaseRestException {
try {
return this.admin.tableExists(tableName);
} catch (IOException e) {
throw new HBaseRestException(e);
}
}
protected void disableTable(byte [] tableName) throws HBaseRestException {
try {
this.admin.disableTable(tableName);
} catch (IOException e) {
throw new HBaseRestException("IOException disabling table", e);
}
}
protected void enableTable(byte [] tableName) throws HBaseRestException {
try {
this.admin.enableTable(tableName);
} catch (IOException e) {
throw new HBaseRestException("IOException enabiling table", e);
}
}
public boolean updateTable(String tableName,
ArrayList<HColumnDescriptor> columns) throws HBaseRestException {
HTableDescriptor htc = null;
try {
htc = this.admin.getTableDescriptor(Bytes.toBytes(tableName));
} catch (IOException e) {
throw new HBaseRestException("Table does not exist");
}
for (HColumnDescriptor column : columns) {
if (htc.hasFamily(Bytes.toBytes(column.getNameAsString()))) {
try {
this.admin.disableTable(tableName);
this.admin.modifyColumn(tableName, column.getNameAsString(), column);
this.admin.enableTable(tableName);
} catch (IOException e) {
throw new HBaseRestException("unable to modify column "
+ column.getNameAsString(), e);
}
} else {
try {
this.admin.disableTable(tableName);
this.admin.addColumn(tableName, column);
this.admin.enableTable(tableName);
} catch (IOException e) {
throw new HBaseRestException("unable to add column "
+ column.getNameAsString(), e);
}
}
}
return true;
}
/**
* Get table metadata.
*
* @param tableName
* @return HTableDescriptor
* @throws HBaseRestException
*/
public HTableDescriptor getTableMetadata(final String tableName)
throws HBaseRestException {
HTableDescriptor descriptor = null;
try {
HTableDescriptor[] tables = this.admin.listTables();
for (int i = 0; i < tables.length; i++) {
if (Bytes.toString(tables[i].getName()).equals(tableName)) {
descriptor = tables[i];
break;
}
}
if (descriptor == null) {
} else {
return descriptor;
}
} catch (IOException e) {
throw new HBaseRestException("error processing request.");
}
return descriptor;
}
/**
* Return region offsets.
* @param tableName
* @return Regions
* @throws HBaseRestException
*/
public Regions getTableRegions(final String tableName)
throws HBaseRestException {
try {
HTable table = new HTable(this.conf, tableName);
// Presumption is that this.table has already been focused on target
// table.
Regions regions = new Regions(table.getStartKeys());
// Presumption is that this.table has already been set against target
// table
return regions;
} catch (IOException e) {
throw new HBaseRestException("Unable to get regions from table");
}
}
// Post Methods
/**
* Creates table tableName described by the json in input.
*
* @param tableName
* table name
* @param htd
* HBaseTableDescriptor for the table to be created
*
* @return true if operation does not fail due to a table with the given
* tableName not existing.
* @throws org.apache.hadoop.hbase.rest.exception.HBaseRestException
*/
public boolean post(byte [] tableName, HTableDescriptor htd)
throws HBaseRestException {
try {
if (!this.admin.tableExists(tableName)) {
this.admin.createTable(htd);
return true;
}
} catch (IOException e) {
throw new HBaseRestException(e);
}
return false;
}
/**
* Deletes table tableName
*
* @param tableName
* name of the table.
* @return true if table exists and deleted, false if table does not exist.
* @throws org.apache.hadoop.hbase.rest.exception.HBaseRestException
*/
public boolean delete(byte [] tableName) throws HBaseRestException {
try {
if (this.admin.tableExists(tableName)) {
this.admin.disableTable(tableName);
this.admin.deleteTable(tableName);
return true;
}
return false;
} catch (Exception e) {
throw new HBaseRestException(e);
}
}
public static class Regions implements ISerializable {
byte[][] regionKey;
public Regions(byte [][] bs) {
super();
this.regionKey = bs;
}
@SuppressWarnings("unused")
private Regions() {
}
/**
* @return the regionKey
*/
@TOJSON(fieldName = "region")
public byte[][] getRegionKey() {
return regionKey;
}
/**
* @param regionKey
* the regionKey to set
*/
public void setRegionKey(byte[][] regionKey) {
this.regionKey = regionKey;
}
/*
* (non-Javadoc)
*
* @see org.apache.hadoop.hbase.rest.xml.IOutputXML#toXML()
*/
public void restSerialize(IRestSerializer serializer)
throws HBaseRestException {
serializer.serializeRegionData(this);
}
}
}
| apache-2.0 |
jonathanlocke/biokit | src/main/java/org/locke/biokit/biology/genetics/bases/ambiguous/Any.java | 200 | package org.locke.biokit.biology.genetics.bases.ambiguous;
import org.locke.biokit.biology.genetics.Base;
public class Any extends Base
{
@Override
public char asCharacter()
{
return 'N';
}
}
| apache-2.0 |
wu-sheng/sky-walking | oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/metric/promethues/counter/ID.java | 1179 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.core.metric.promethues.counter;
import com.google.common.collect.ImmutableMap;
import lombok.EqualsAndHashCode;
import lombok.RequiredArgsConstructor;
import lombok.ToString;
@RequiredArgsConstructor
@EqualsAndHashCode
@ToString
class ID {
private final String name;
private final ImmutableMap<String, String> labels;
}
| apache-2.0 |
joachimbjorklund/jibe-tools-fsm | src/main/java/jibe/tools/fsm/annotations/TransitionOnTimeout.java | 403 | package jibe.tools.fsm.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.util.concurrent.TimeUnit;
/**
*
*/
@Target({ ElementType.METHOD })
@Retention(RetentionPolicy.RUNTIME)
public @interface TransitionOnTimeout {
long period();
TimeUnit timeUnit();
}
| apache-2.0 |
doodelicious/cas | core/cas-server-core-configuration/src/main/java/org/apereo/cas/configuration/model/core/audit/AuditProperties.java | 3841 | package org.apereo.cas.configuration.model.core.audit;
import org.apereo.cas.configuration.model.support.jpa.AbstractJpaProperties;
import org.apereo.inspektr.audit.support.AbstractStringAuditTrailManager;
/**
* This is {@link AuditProperties}.
*
* @author Misagh Moayyed
* @since 5.0.0
*/
public class AuditProperties {
private String appCode = "CAS";
private String singlelineSeparator = "|";
private String alternateServerAddrHeaderName;
private String alternateClientAddrHeaderName;
private boolean useServerHostAddress;
private boolean useSingleLine;
private Jdbc jdbc = new Jdbc();
private AbstractStringAuditTrailManager.AuditFormats auditFormat =
AbstractStringAuditTrailManager.AuditFormats.DEFAULT;
private boolean ignoreAuditFailures;
public Jdbc getJdbc() {
return jdbc;
}
public void setJdbc(final Jdbc jdbc) {
this.jdbc = jdbc;
}
public String getAppCode() {
return appCode;
}
public void setAppCode(final String appCode) {
this.appCode = appCode;
}
public String getSinglelineSeparator() {
return singlelineSeparator;
}
public void setSinglelineSeparator(final String singlelineSeparator) {
this.singlelineSeparator = singlelineSeparator;
}
public boolean isUseSingleLine() {
return useSingleLine;
}
public void setUseSingleLine(final boolean useSingleLine) {
this.useSingleLine = useSingleLine;
}
public AbstractStringAuditTrailManager.AuditFormats getAuditFormat() {
return auditFormat;
}
public void setAuditFormat(final AbstractStringAuditTrailManager.AuditFormats auditFormat) {
this.auditFormat = auditFormat;
}
public boolean isIgnoreAuditFailures() {
return ignoreAuditFailures;
}
public void setIgnoreAuditFailures(final boolean ignoreAuditFailures) {
this.ignoreAuditFailures = ignoreAuditFailures;
}
public String getAlternateServerAddrHeaderName() {
return alternateServerAddrHeaderName;
}
public void setAlternateServerAddrHeaderName(final String alternateServerAddrHeaderName) {
this.alternateServerAddrHeaderName = alternateServerAddrHeaderName;
}
public String getAlternateClientAddrHeaderName() {
return alternateClientAddrHeaderName;
}
public void setAlternateClientAddrHeaderName(final String alternateClientAddrHeaderName) {
this.alternateClientAddrHeaderName = alternateClientAddrHeaderName;
}
public boolean isUseServerHostAddress() {
return useServerHostAddress;
}
public void setUseServerHostAddress(final boolean useServerHostAddress) {
this.useServerHostAddress = useServerHostAddress;
}
public static class Jdbc extends AbstractJpaProperties {
private static final long serialVersionUID = 4227475246873515918L;
private int maxAgeDays = 180;
private String isolationLevelName = "ISOLATION_READ_COMMITTED";
private String propagationBehaviorName = "PROPAGATION_REQUIRED";
public int getMaxAgeDays() {
return maxAgeDays;
}
public void setMaxAgeDays(final int maxAgeDays) {
this.maxAgeDays = maxAgeDays;
}
public String getPropagationBehaviorName() {
return propagationBehaviorName;
}
public void setPropagationBehaviorName(final String propagationBehaviorName) {
this.propagationBehaviorName = propagationBehaviorName;
}
public String getIsolationLevelName() {
return isolationLevelName;
}
public void setIsolationLevelName(final String isolationLevelName) {
this.isolationLevelName = isolationLevelName;
}
}
}
| apache-2.0 |
zkidkid/elasticsearch | core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java | 36391 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.cluster.node.tasks;
import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.TaskOperationFailure;
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest;
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskGroup;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.action.support.nodes.BaseNodeRequest;
import org.elasticsearch.action.support.nodes.BaseNodesRequest;
import org.elasticsearch.action.support.tasks.BaseTasksRequest;
import org.elasticsearch.action.support.tasks.BaseTasksResponse;
import org.elasticsearch.action.support.tasks.TransportTasksAction;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.tasks.TaskInfo;
import org.elasticsearch.test.tasks.MockTaskManager;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import static org.elasticsearch.action.support.PlainActionFuture.newFuture;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.endsWith;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.not;
public class TransportTasksActionTests extends TaskManagerTestCase {
public static class NodeRequest extends BaseNodeRequest {
protected String requestName;
private boolean enableTaskManager;
public NodeRequest() {
super();
}
public NodeRequest(NodesRequest request, String nodeId) {
super(nodeId);
requestName = request.requestName;
enableTaskManager = request.enableTaskManager;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
requestName = in.readString();
enableTaskManager = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(requestName);
out.writeBoolean(enableTaskManager);
}
@Override
public String getDescription() {
return "CancellableNodeRequest[" + requestName + ", " + enableTaskManager + "]";
}
@Override
public Task createTask(long id, String type, String action, TaskId parentTaskId) {
if (enableTaskManager) {
return super.createTask(id, type, action, parentTaskId);
} else {
return null;
}
}
}
public static class NodesRequest extends BaseNodesRequest<NodesRequest> {
private String requestName;
private boolean enableTaskManager;
NodesRequest() {
super();
}
public NodesRequest(String requestName, String... nodesIds) {
this(requestName, true, nodesIds);
}
public NodesRequest(String requestName, boolean enableTaskManager, String... nodesIds) {
super(nodesIds);
this.requestName = requestName;
this.enableTaskManager = enableTaskManager;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
requestName = in.readString();
enableTaskManager = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(requestName);
out.writeBoolean(enableTaskManager);
}
@Override
public String getDescription() {
return "CancellableNodesRequest[" + requestName + ", " + enableTaskManager + "]";
}
@Override
public Task createTask(long id, String type, String action, TaskId parentTaskId) {
if (enableTaskManager) {
return super.createTask(id, type, action, parentTaskId);
} else {
return null;
}
}
}
/**
* Simulates node-based task that can be used to block node tasks so they are guaranteed to be registered by task manager
*/
abstract class TestNodesAction extends AbstractTestNodesAction<NodesRequest, NodeRequest> {
TestNodesAction(Settings settings, String actionName, ThreadPool threadPool,
ClusterService clusterService, TransportService transportService) {
super(settings, actionName, threadPool, clusterService, transportService, NodesRequest::new, NodeRequest::new);
}
@Override
protected NodeRequest newNodeRequest(String nodeId, NodesRequest request) {
return new NodeRequest(request, nodeId);
}
@Override
protected NodeResponse newNodeResponse() {
return new NodeResponse();
}
}
static class TestTaskResponse implements Writeable {
private final String status;
public TestTaskResponse(StreamInput in) throws IOException {
status = in.readString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(status);
}
public TestTaskResponse(String status) {
this.status = status;
}
public String getStatus() {
return status;
}
}
static class TestTasksRequest extends BaseTasksRequest<TestTasksRequest> {
}
static class TestTasksResponse extends BaseTasksResponse {
private List<TestTaskResponse> tasks;
public TestTasksResponse() {
}
public TestTasksResponse(List<TestTaskResponse> tasks, List<TaskOperationFailure> taskFailures,
List<? extends FailedNodeException> nodeFailures) {
super(taskFailures, nodeFailures);
this.tasks = tasks == null ? Collections.emptyList() : Collections.unmodifiableList(new ArrayList<>(tasks));
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
int taskCount = in.readVInt();
List<TestTaskResponse> builder = new ArrayList<>();
for (int i = 0; i < taskCount; i++) {
builder.add(new TestTaskResponse(in));
}
tasks = Collections.unmodifiableList(builder);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVInt(tasks.size());
for (TestTaskResponse task : tasks) {
task.writeTo(out);
}
}
}
/**
* Test class for testing task operations
*/
abstract static class TestTasksAction extends TransportTasksAction<Task, TestTasksRequest, TestTasksResponse, TestTaskResponse> {
protected TestTasksAction(Settings settings, String actionName, ThreadPool threadPool,
ClusterService clusterService, TransportService transportService) {
super(settings, actionName, threadPool, clusterService, transportService, new ActionFilters(new HashSet<>()),
new IndexNameExpressionResolver(Settings.EMPTY), TestTasksRequest::new, TestTasksResponse::new,
ThreadPool.Names.MANAGEMENT);
}
@Override
protected TestTasksResponse newResponse(TestTasksRequest request, List<TestTaskResponse> tasks,
List<TaskOperationFailure> taskOperationFailures, List<FailedNodeException> failedNodeExceptions) {
return new TestTasksResponse(tasks, taskOperationFailures, failedNodeExceptions);
}
@Override
protected TestTaskResponse readTaskResponse(StreamInput in) throws IOException {
return new TestTaskResponse(in);
}
@Override
protected boolean accumulateExceptions() {
return true;
}
}
private ActionFuture<NodesResponse> startBlockingTestNodesAction(CountDownLatch checkLatch) throws InterruptedException {
return startBlockingTestNodesAction(checkLatch, new NodesRequest("Test Request"));
}
private ActionFuture<NodesResponse> startBlockingTestNodesAction(CountDownLatch checkLatch, NodesRequest request)
throws InterruptedException {
PlainActionFuture<NodesResponse> future = newFuture();
startBlockingTestNodesAction(checkLatch, request, future);
return future;
}
private Task startBlockingTestNodesAction(CountDownLatch checkLatch, ActionListener<NodesResponse> listener)
throws InterruptedException {
return startBlockingTestNodesAction(checkLatch, new NodesRequest("Test Request"), listener);
}
private Task startBlockingTestNodesAction(CountDownLatch checkLatch, NodesRequest request, ActionListener<NodesResponse> listener)
throws InterruptedException {
CountDownLatch actionLatch = new CountDownLatch(nodesCount);
TestNodesAction[] actions = new TestNodesAction[nodesCount];
for (int i = 0; i < testNodes.length; i++) {
final int node = i;
actions[i] = new TestNodesAction(CLUSTER_SETTINGS, "testAction", threadPool, testNodes[i].clusterService,
testNodes[i].transportService) {
@Override
protected NodeResponse nodeOperation(NodeRequest request) {
logger.info("Action on node {}", node);
actionLatch.countDown();
try {
checkLatch.await();
} catch (InterruptedException ex) {
Thread.currentThread().interrupt();
}
logger.info("Action on node {} finished", node);
return new NodeResponse(testNodes[node].discoveryNode);
}
};
}
// Make sure no tasks are running
for (TestNode node : testNodes) {
assertEquals(0, node.transportService.getTaskManager().getTasks().size());
}
Task task = actions[0].execute(request, listener);
logger.info("Awaiting for all actions to start");
assertTrue(actionLatch.await(10, TimeUnit.SECONDS));
logger.info("Done waiting for all actions to start");
return task;
}
public void testRunningTasksCount() throws Exception {
setupTestNodes(Settings.EMPTY);
connectNodes(testNodes);
CountDownLatch checkLatch = new CountDownLatch(1);
CountDownLatch responseLatch = new CountDownLatch(1);
final AtomicReference<NodesResponse> responseReference = new AtomicReference<>();
Task mainTask = startBlockingTestNodesAction(checkLatch, new ActionListener<NodesResponse>() {
@Override
public void onResponse(NodesResponse listTasksResponse) {
responseReference.set(listTasksResponse);
responseLatch.countDown();
}
@Override
public void onFailure(Exception e) {
logger.warn("Couldn't get list of tasks", e);
responseLatch.countDown();
}
});
// Check task counts using taskManager
Map<Long, Task> localTasks = testNodes[0].transportService.getTaskManager().getTasks();
assertEquals(2, localTasks.size()); // all node tasks + 1 coordinating task
Task coordinatingTask = localTasks.get(Collections.min(localTasks.keySet()));
Task subTask = localTasks.get(Collections.max(localTasks.keySet()));
assertThat(subTask.getAction(), endsWith("[n]"));
assertThat(coordinatingTask.getAction(), not(endsWith("[n]")));
for (int i = 1; i < testNodes.length; i++) {
Map<Long, Task> remoteTasks = testNodes[i].transportService.getTaskManager().getTasks();
assertEquals(1, remoteTasks.size());
Task remoteTask = remoteTasks.values().iterator().next();
assertThat(remoteTask.getAction(), endsWith("[n]"));
}
// Check task counts using transport
int testNodeNum = randomIntBetween(0, testNodes.length - 1);
TestNode testNode = testNodes[testNodeNum];
ListTasksRequest listTasksRequest = new ListTasksRequest();
listTasksRequest.setActions("testAction*"); // pick all test actions
logger.info("Listing currently running tasks using node [{}]", testNodeNum);
ListTasksResponse response = testNode.transportListTasksAction.execute(listTasksRequest).get();
logger.info("Checking currently running tasks");
assertEquals(testNodes.length, response.getPerNodeTasks().size());
// Coordinating node
assertEquals(2, response.getPerNodeTasks().get(testNodes[0].discoveryNode.getId()).size());
// Other nodes node
for (int i = 1; i < testNodes.length; i++) {
assertEquals(1, response.getPerNodeTasks().get(testNodes[i].discoveryNode.getId()).size());
}
// There should be a single main task when grouped by tasks
assertEquals(1, response.getTaskGroups().size());
// And as many child tasks as we have nodes
assertEquals(testNodes.length, response.getTaskGroups().get(0).getChildTasks().size());
// Check task counts using transport with filtering
testNode = testNodes[randomIntBetween(0, testNodes.length - 1)];
listTasksRequest = new ListTasksRequest();
listTasksRequest.setActions("testAction[n]"); // only pick node actions
response = testNode.transportListTasksAction.execute(listTasksRequest).get();
assertEquals(testNodes.length, response.getPerNodeTasks().size());
for (Map.Entry<String, List<TaskInfo>> entry : response.getPerNodeTasks().entrySet()) {
assertEquals(1, entry.getValue().size());
assertNull(entry.getValue().get(0).getDescription());
}
// Since the main task is not in the list - all tasks should be by themselves
assertEquals(testNodes.length, response.getTaskGroups().size());
for (TaskGroup taskGroup : response.getTaskGroups()) {
assertEquals(0, taskGroup.getChildTasks().size());
}
// Check task counts using transport with detailed description
listTasksRequest.setDetailed(true); // same request only with detailed description
response = testNode.transportListTasksAction.execute(listTasksRequest).get();
assertEquals(testNodes.length, response.getPerNodeTasks().size());
for (Map.Entry<String, List<TaskInfo>> entry : response.getPerNodeTasks().entrySet()) {
assertEquals(1, entry.getValue().size());
assertEquals("CancellableNodeRequest[Test Request, true]", entry.getValue().get(0).getDescription());
}
// Make sure that the main task on coordinating node is the task that was returned to us by execute()
listTasksRequest.setActions("testAction"); // only pick the main task
response = testNode.transportListTasksAction.execute(listTasksRequest).get();
assertEquals(1, response.getTasks().size());
assertEquals(mainTask.getId(), response.getTasks().get(0).getId());
// Release all tasks and wait for response
checkLatch.countDown();
assertTrue(responseLatch.await(10, TimeUnit.SECONDS));
NodesResponse responses = responseReference.get();
assertEquals(0, responses.failureCount());
// Make sure that we don't have any lingering tasks
for (TestNode node : testNodes) {
assertEquals(0, node.transportService.getTaskManager().getTasks().size());
}
}
public void testFindChildTasks() throws Exception {
setupTestNodes(Settings.EMPTY);
connectNodes(testNodes);
CountDownLatch checkLatch = new CountDownLatch(1);
ActionFuture<NodesResponse> future = startBlockingTestNodesAction(checkLatch);
TestNode testNode = testNodes[randomIntBetween(0, testNodes.length - 1)];
// Get the parent task
ListTasksRequest listTasksRequest = new ListTasksRequest();
listTasksRequest.setActions("testAction");
ListTasksResponse response = testNode.transportListTasksAction.execute(listTasksRequest).get();
assertEquals(1, response.getTasks().size());
String parentNode = response.getTasks().get(0).getTaskId().getNodeId();
long parentTaskId = response.getTasks().get(0).getId();
// Find tasks with common parent
listTasksRequest = new ListTasksRequest();
listTasksRequest.setParentTaskId(new TaskId(parentNode, parentTaskId));
response = testNode.transportListTasksAction.execute(listTasksRequest).get();
assertEquals(testNodes.length, response.getTasks().size());
for (TaskInfo task : response.getTasks()) {
assertEquals("testAction[n]", task.getAction());
assertEquals(parentNode, task.getParentTaskId().getNodeId());
assertEquals(parentTaskId, task.getParentTaskId().getId());
}
// Release all tasks and wait for response
checkLatch.countDown();
NodesResponse responses = future.get();
assertEquals(0, responses.failureCount());
}
public void testTaskManagementOptOut() throws Exception {
setupTestNodes(Settings.EMPTY);
connectNodes(testNodes);
CountDownLatch checkLatch = new CountDownLatch(1);
// Starting actions that disable task manager
ActionFuture<NodesResponse> future = startBlockingTestNodesAction(checkLatch, new NodesRequest("Test Request", false));
TestNode testNode = testNodes[randomIntBetween(0, testNodes.length - 1)];
// Get the parent task
ListTasksRequest listTasksRequest = new ListTasksRequest();
listTasksRequest.setActions("testAction*");
ListTasksResponse response = testNode.transportListTasksAction.execute(listTasksRequest).get();
assertEquals(0, response.getTasks().size());
// Release all tasks and wait for response
checkLatch.countDown();
NodesResponse responses = future.get();
assertEquals(0, responses.failureCount());
}
public void testTasksDescriptions() throws Exception {
long minimalStartTime = System.currentTimeMillis();
setupTestNodes(Settings.EMPTY);
connectNodes(testNodes);
CountDownLatch checkLatch = new CountDownLatch(1);
ActionFuture<NodesResponse> future = startBlockingTestNodesAction(checkLatch);
long maximumStartTimeNanos = System.nanoTime();
// Check task counts using transport with filtering
TestNode testNode = testNodes[randomIntBetween(0, testNodes.length - 1)];
ListTasksRequest listTasksRequest = new ListTasksRequest();
listTasksRequest.setActions("testAction[n]"); // only pick node actions
ListTasksResponse response = testNode.transportListTasksAction.execute(listTasksRequest).get();
assertEquals(testNodes.length, response.getPerNodeTasks().size());
for (Map.Entry<String, List<TaskInfo>> entry : response.getPerNodeTasks().entrySet()) {
assertEquals(1, entry.getValue().size());
assertNull(entry.getValue().get(0).getDescription());
}
// Check task counts using transport with detailed description
long minimalDurationNanos = System.nanoTime() - maximumStartTimeNanos;
listTasksRequest.setDetailed(true); // same request only with detailed description
response = testNode.transportListTasksAction.execute(listTasksRequest).get();
assertEquals(testNodes.length, response.getPerNodeTasks().size());
for (Map.Entry<String, List<TaskInfo>> entry : response.getPerNodeTasks().entrySet()) {
assertEquals(1, entry.getValue().size());
assertEquals("CancellableNodeRequest[Test Request, true]", entry.getValue().get(0).getDescription());
assertThat(entry.getValue().get(0).getStartTime(), greaterThanOrEqualTo(minimalStartTime));
assertThat(entry.getValue().get(0).getRunningTimeNanos(), greaterThanOrEqualTo(minimalDurationNanos));
}
// Release all tasks and wait for response
checkLatch.countDown();
NodesResponse responses = future.get();
assertEquals(0, responses.failureCount());
}
public void testCancellingTasksThatDontSupportCancellation() throws Exception {
setupTestNodes(Settings.EMPTY);
connectNodes(testNodes);
CountDownLatch checkLatch = new CountDownLatch(1);
CountDownLatch responseLatch = new CountDownLatch(1);
Task task = startBlockingTestNodesAction(checkLatch, new ActionListener<NodesResponse>() {
@Override
public void onResponse(NodesResponse nodeResponses) {
responseLatch.countDown();
}
@Override
public void onFailure(Exception e) {
responseLatch.countDown();
}
});
String actionName = "testAction"; // only pick the main action
// Try to cancel main task using action name
CancelTasksRequest request = new CancelTasksRequest();
request.setNodesIds(testNodes[0].discoveryNode.getId());
request.setReason("Testing Cancellation");
request.setActions(actionName);
CancelTasksResponse response = testNodes[randomIntBetween(0, testNodes.length - 1)].transportCancelTasksAction.execute(request)
.get();
// Shouldn't match any tasks since testAction doesn't support cancellation
assertEquals(0, response.getTasks().size());
assertEquals(0, response.getTaskFailures().size());
assertEquals(0, response.getNodeFailures().size());
// Try to cancel main task using id
request = new CancelTasksRequest();
request.setReason("Testing Cancellation");
request.setTaskId(new TaskId(testNodes[0].discoveryNode.getId(), task.getId()));
response = testNodes[randomIntBetween(0, testNodes.length - 1)].transportCancelTasksAction.execute(request).get();
// Shouldn't match any tasks since testAction doesn't support cancellation
assertEquals(0, response.getTasks().size());
assertEquals(0, response.getTaskFailures().size());
assertEquals(1, response.getNodeFailures().size());
assertThat(response.getNodeFailures().get(0).getDetailedMessage(), containsString("doesn't support cancellation"));
// Make sure that task is still running
ListTasksRequest listTasksRequest = new ListTasksRequest();
listTasksRequest.setActions(actionName);
ListTasksResponse listResponse = testNodes[randomIntBetween(0, testNodes.length - 1)].transportListTasksAction.execute
(listTasksRequest).get();
assertEquals(1, listResponse.getPerNodeTasks().size());
// Verify that tasks are marked as non-cancellable
for (TaskInfo taskInfo : listResponse.getTasks()) {
assertFalse(taskInfo.isCancellable());
}
// Release all tasks and wait for response
checkLatch.countDown();
responseLatch.await(10, TimeUnit.SECONDS);
}
public void testFailedTasksCount() throws ExecutionException, InterruptedException, IOException {
Settings settings = Settings.builder().put(MockTaskManager.USE_MOCK_TASK_MANAGER_SETTING.getKey(), true).build();
setupTestNodes(settings);
connectNodes(testNodes);
TestNodesAction[] actions = new TestNodesAction[nodesCount];
RecordingTaskManagerListener[] listeners = setupListeners(testNodes, "testAction*");
for (int i = 0; i < testNodes.length; i++) {
final int node = i;
actions[i] = new TestNodesAction(CLUSTER_SETTINGS, "testAction", threadPool, testNodes[i].clusterService,
testNodes[i].transportService) {
@Override
protected NodeResponse nodeOperation(NodeRequest request) {
logger.info("Action on node {}", node);
throw new RuntimeException("Test exception");
}
};
}
for (TestNode testNode : testNodes) {
assertEquals(0, testNode.transportService.getTaskManager().getTasks().size());
}
NodesRequest request = new NodesRequest("Test Request");
NodesResponse responses = actions[0].execute(request).get();
assertEquals(nodesCount, responses.failureCount());
// Make sure that actions are still registered in the task manager on all nodes
// Twice on the coordinating node and once on all other nodes.
assertEquals(4, listeners[0].getEvents().size());
assertEquals(2, listeners[0].getRegistrationEvents().size());
assertEquals(2, listeners[0].getUnregistrationEvents().size());
for (int i = 1; i < listeners.length; i++) {
assertEquals(2, listeners[i].getEvents().size());
assertEquals(1, listeners[i].getRegistrationEvents().size());
assertEquals(1, listeners[i].getUnregistrationEvents().size());
}
}
public void testTaskLevelActionFailures() throws ExecutionException, InterruptedException, IOException {
setupTestNodes(Settings.EMPTY);
connectNodes(testNodes);
CountDownLatch checkLatch = new CountDownLatch(1);
ActionFuture<NodesResponse> future = startBlockingTestNodesAction(checkLatch);
TestTasksAction[] tasksActions = new TestTasksAction[nodesCount];
final int failTaskOnNode = randomIntBetween(1, nodesCount - 1);
for (int i = 0; i < testNodes.length; i++) {
final int node = i;
// Simulate task action that fails on one of the tasks on one of the nodes
tasksActions[i] = new TestTasksAction(CLUSTER_SETTINGS, "testTasksAction", threadPool, testNodes[i].clusterService,
testNodes[i].transportService) {
@Override
protected TestTaskResponse taskOperation(TestTasksRequest request, Task task) {
logger.info("Task action on node {}", node);
if (failTaskOnNode == node && task.getParentTaskId().isSet()) {
logger.info("Failing on node {}", node);
throw new RuntimeException("Task level failure");
}
return new TestTaskResponse("Success on node " + node);
}
};
}
// Run task action on node tasks that are currently running
// should be successful on all nodes except one
TestTasksRequest testTasksRequest = new TestTasksRequest();
testTasksRequest.setActions("testAction[n]"); // pick all test actions
TestTasksResponse response = tasksActions[0].execute(testTasksRequest).get();
// Get successful responses from all nodes except one
assertEquals(testNodes.length - 1, response.tasks.size());
assertEquals(1, response.getTaskFailures().size()); // one task failed
assertThat(response.getTaskFailures().get(0).getReason(), containsString("Task level failure"));
assertEquals(0, response.getNodeFailures().size()); // no nodes failed
// Release all node tasks and wait for response
checkLatch.countDown();
NodesResponse responses = future.get();
assertEquals(0, responses.failureCount());
}
/**
* This test starts nodes actions that blocks on all nodes. While node actions are blocked in the middle of execution
* it executes a tasks action that targets these blocked node actions. The test verifies that task actions are only
* getting executed on nodes that are not listed in the node filter.
*/
public void testTaskNodeFiltering() throws ExecutionException, InterruptedException, IOException {
setupTestNodes(Settings.EMPTY);
connectNodes(testNodes);
CountDownLatch checkLatch = new CountDownLatch(1);
// Start some test nodes action so we could have something to run tasks actions on
ActionFuture<NodesResponse> future = startBlockingTestNodesAction(checkLatch);
String[] allNodes = new String[testNodes.length];
for (int i = 0; i < testNodes.length; i++) {
allNodes[i] = testNodes[i].getNodeId();
}
int filterNodesSize = randomInt(allNodes.length);
Set<String> filterNodes = new HashSet<>(randomSubsetOf(filterNodesSize, allNodes));
logger.info("Filtering out nodes {} size: {}", filterNodes, filterNodesSize);
TestTasksAction[] tasksActions = new TestTasksAction[nodesCount];
for (int i = 0; i < testNodes.length; i++) {
final int node = i;
// Simulate a task action that works on all nodes except nodes listed in filterNodes.
// We are testing that it works.
tasksActions[i] = new TestTasksAction(CLUSTER_SETTINGS, "testTasksAction", threadPool,
testNodes[i].clusterService, testNodes[i].transportService) {
@Override
protected String[] filterNodeIds(DiscoveryNodes nodes, String[] nodesIds) {
String[] superNodes = super.filterNodeIds(nodes, nodesIds);
List<String> filteredNodes = new ArrayList<>();
for (String node : superNodes) {
if (filterNodes.contains(node) == false) {
filteredNodes.add(node);
}
}
return filteredNodes.toArray(new String[filteredNodes.size()]);
}
@Override
protected TestTaskResponse taskOperation(TestTasksRequest request, Task task) {
return new TestTaskResponse(testNodes[node].getNodeId());
}
};
}
// Run task action on node tasks that are currently running
// should be successful on all nodes except nodes that we filtered out
TestTasksRequest testTasksRequest = new TestTasksRequest();
testTasksRequest.setActions("testAction[n]"); // pick all test actions
TestTasksResponse response = tasksActions[randomIntBetween(0, nodesCount - 1)].execute(testTasksRequest).get();
// Get successful responses from all nodes except nodes that we filtered out
assertEquals(testNodes.length - filterNodes.size(), response.tasks.size());
assertEquals(0, response.getTaskFailures().size()); // no task failed
assertEquals(0, response.getNodeFailures().size()); // no nodes failed
// Make sure that filtered nodes didn't send any responses
for (TestTaskResponse taskResponse : response.tasks) {
String nodeId = taskResponse.getStatus();
assertFalse("Found response from filtered node " + nodeId, filterNodes.contains(nodeId));
}
// Release all node tasks and wait for response
checkLatch.countDown();
NodesResponse responses = future.get();
assertEquals(0, responses.failureCount());
}
@SuppressWarnings("unchecked")
public void testTasksToXContentGrouping() throws Exception {
setupTestNodes(Settings.EMPTY);
connectNodes(testNodes);
// Get the parent task
ListTasksRequest listTasksRequest = new ListTasksRequest();
listTasksRequest.setActions(ListTasksAction.NAME + "*");
ListTasksResponse response = testNodes[0].transportListTasksAction.execute(listTasksRequest).get();
assertEquals(testNodes.length + 1, response.getTasks().size());
Map<String, Object> byNodes = serialize(response, new ToXContent.MapParams(Collections.singletonMap("group_by", "nodes")));
byNodes = (Map<String, Object>) byNodes.get("nodes");
// One element on the top level
assertEquals(testNodes.length, byNodes.size());
Map<String, Object> firstNode = (Map<String, Object>) byNodes.get(testNodes[0].discoveryNode.getId());
firstNode = (Map<String, Object>) firstNode.get("tasks");
assertEquals(2, firstNode.size()); // two tasks for the first node
for (int i = 1; i < testNodes.length; i++) {
Map<String, Object> otherNode = (Map<String, Object>) byNodes.get(testNodes[i].discoveryNode.getId());
otherNode = (Map<String, Object>) otherNode.get("tasks");
assertEquals(1, otherNode.size()); // one tasks for the all other nodes
}
// Group by parents
Map<String, Object> byParent = serialize(response, new ToXContent.MapParams(Collections.singletonMap("group_by", "parents")));
byParent = (Map<String, Object>) byParent.get("tasks");
// One element on the top level
assertEquals(1, byParent.size()); // Only one top level task
Map<String, Object> topTask = (Map<String, Object>) byParent.values().iterator().next();
List<Object> children = (List<Object>) topTask.get("children");
assertEquals(testNodes.length, children.size()); // two tasks for the first node
for (int i = 0; i < testNodes.length; i++) {
Map<String, Object> child = (Map<String, Object>) children.get(i);
assertNull(child.get("children"));
}
}
private Map<String, Object> serialize(ToXContent response, ToXContent.Params params) throws IOException {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.startObject();
response.toXContent(builder, params);
builder.endObject();
builder.flush();
logger.info(builder.string());
return XContentHelper.convertToMap(builder.bytes(), false).v2();
}
}
| apache-2.0 |
igor-sfdc/aura | aura-impl/src/main/java/org/auraframework/impl/javascript/controller/JavascriptControllerDef.java | 4470 | /*
* Copyright (C) 2013 salesforce.com, inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.auraframework.impl.javascript.controller;
import static org.auraframework.instance.ValueProviderType.LABEL;
import java.io.IOException;
import java.util.Map;
import java.util.Set;
import org.auraframework.Aura;
import org.auraframework.def.ActionDef;
import org.auraframework.def.ControllerDef;
import org.auraframework.def.DefDescriptor;
import org.auraframework.expression.PropertyReference;
import org.auraframework.impl.system.DefinitionImpl;
import org.auraframework.impl.system.SubDefDescriptorImpl;
import org.auraframework.impl.util.AuraUtil;
import org.auraframework.instance.Action;
import org.auraframework.instance.GlobalValueProvider;
import org.auraframework.throwable.quickfix.DefinitionNotFoundException;
import org.auraframework.throwable.quickfix.QuickFixException;
import org.auraframework.util.json.Json;
import com.google.common.collect.Sets;
/**
* def for client controllers
*/
public class JavascriptControllerDef extends DefinitionImpl<ControllerDef> implements ControllerDef {
/**
*/
private static final long serialVersionUID = 133829572661899255L;
private final Map<String, JavascriptActionDef> actionMap;
private final Set<PropertyReference> expressionRefs;
protected JavascriptControllerDef(Builder builder) {
super(builder);
this.actionMap = AuraUtil.immutableMap(builder.actionDefs);
this.expressionRefs = builder.expressionRefs;
}
@Override
public JavascriptActionDef getSubDefinition(String name) {
return actionMap.get(name);
}
@Override
public void serialize(Json json) throws IOException {
json.writeMapBegin();
json.writeMapEntry("descriptor", descriptor);
json.writeMapEntry("actionDefs", actionMap.values());
json.writeMapEnd();
}
@Override
public Map<String, JavascriptActionDef> getActionDefs() {
return actionMap;
}
/**
* We cannot sensibly <em>run</em> Javascript actions at the server, but the objects
* are sometimes created for bookkeeping. In particular, if a client-side action execution
* fails, the failure is logged via ExceptionAdapter, which likes to have an action object,
* including the action instance identifier in case that helps debugging.
*
* @throws DefinitionNotFoundException
*
* @returns an Action for the given action name.
*/
@Override
public Action createAction(String actionName, Map<String, Object> paramValues) throws DefinitionNotFoundException {
JavascriptActionDef actionDef = actionMap.get(actionName);
if(actionDef == null){
DefDescriptor<ActionDef> desc = SubDefDescriptorImpl.getInstance(actionName, getDescriptor(), ActionDef.class);
throw new DefinitionNotFoundException(desc);
}
return new JavascriptPseudoAction(actionDef);
}
@Override
public Object getValue(PropertyReference key) {
return getSubDefinition(key.toString());
}
@Override
public void retrieveLabels() throws QuickFixException {
GlobalValueProvider labelProvider = Aura.getContextService().getCurrentContext().getGlobalProviders()
.get(LABEL);
for (PropertyReference e : expressionRefs) {
if (e.getRoot().equals(LABEL.getPrefix())) {
labelProvider.getValue(e.getStem());
}
}
}
public static class Builder extends DefinitionImpl.BuilderImpl<ControllerDef> {
public Builder() {
super(ControllerDef.class);
}
public Map<String, JavascriptActionDef> actionDefs;
public Set<PropertyReference> expressionRefs = Sets.newHashSet();
@Override
public JavascriptControllerDef build() {
return new JavascriptControllerDef(this);
}
}
}
| apache-2.0 |
vbousquet/libgdx-jbullet | src/com/bulletphysics/collision/shapes/StaticPlaneShape.java | 4957 | /*
* Java port of Bullet (c) 2008 Martin Dvorak <jezek2@advel.cz>
*
* Bullet Continuous Collision Detection and Physics Library
* Copyright (c) 2003-2008 Erwin Coumans http://www.bulletphysics.com/
*
* This software is provided 'as-is', without any express or implied warranty.
* In no event will the authors be held liable for any damages arising from
* the use of this software.
*
* Permission is granted to anyone to use this software for any purpose,
* including commercial applications, and to alter it and redistribute it
* freely, subject to the following restrictions:
*
* 1. The origin of this software must not be misrepresented; you must not
* claim that you wrote the original software. If you use this software
* in a product, an acknowledgment in the product documentation would be
* appreciated but is not required.
* 2. Altered source versions must be plainly marked as such, and must not be
* misrepresented as being the original software.
* 3. This notice may not be removed or altered from any source distribution.
*/
package com.bulletphysics.collision.shapes;
import com.badlogic.gdx.math.Vector3;
import com.bulletphysics.collision.broadphase.BroadphaseNativeType;
import com.bulletphysics.linearmath.Transform;
import com.bulletphysics.linearmath.TransformUtil;
import com.bulletphysics.linearmath.VectorUtil;
import com.bulletphysics.util.Stack;
/** StaticPlaneShape simulates an infinite non-moving (static) collision plane.
*
* @author jezek2 */
public class StaticPlaneShape extends ConcaveShape {
protected final Vector3 localAabbMin = new Vector3();
protected final Vector3 localAabbMax = new Vector3();
protected final Vector3 planeNormal = new Vector3();
protected float planeConstant;
protected final Vector3 localScaling = new Vector3(0f, 0f, 0f);
public StaticPlaneShape (Vector3 planeNormal, float planeConstant) {
this.planeNormal.set(planeNormal).nor();
this.planeConstant = planeConstant;
}
public Vector3 getPlaneNormal (Vector3 out) {
out.set(planeNormal);
return out;
}
public float getPlaneConstant () {
return planeConstant;
}
@Override
public void processAllTriangles (TriangleCallback callback, Vector3 aabbMin, Vector3 aabbMax) {
Stack stack = Stack.enter();
Vector3 tmp = stack.allocVector3();
Vector3 tmp1 = stack.allocVector3();
Vector3 tmp2 = stack.allocVector3();
Vector3 halfExtents = stack.allocVector3();
halfExtents.set(aabbMax).set(aabbMin);
halfExtents.scl(0.5f);
float radius = halfExtents.len();
Vector3 center = stack.allocVector3();
center.set(aabbMax).add(aabbMin);
center.scl(0.5f);
// this is where the triangles are generated, given AABB and plane equation (normal/constant)
Vector3 tangentDir0 = stack.allocVector3(), tangentDir1 = stack.allocVector3();
// tangentDir0/tangentDir1 can be precalculated
TransformUtil.planeSpace1(planeNormal, tangentDir0, tangentDir1);
Vector3 supVertex0 = stack.allocVector3(), supVertex1 = stack.allocVector3();
Vector3 projectedCenter = stack.allocVector3();
tmp.set(planeNormal).scl(planeNormal.dot(center) - planeConstant);
projectedCenter.set(center).sub(tmp);
Vector3[] triangle = new Vector3[] {stack.allocVector3(), stack.allocVector3(), stack.allocVector3()};
tmp1.set(tangentDir0).scl(radius);
tmp2.set(tangentDir1).scl(radius);
VectorUtil.add(triangle[0], projectedCenter, tmp1, tmp2);
tmp1.set(tangentDir0).scl(radius);
tmp2.set(tangentDir1).scl(radius);
tmp.set(tmp1).sub(tmp2);
VectorUtil.add(triangle[1], projectedCenter, tmp);
tmp1.set(tangentDir0).scl(radius);
tmp2.set(tangentDir1).scl(radius);
tmp.set(tmp1).sub(tmp2);
triangle[2].set(projectedCenter).sub(tmp);
callback.processTriangle(triangle, 0, 0);
tmp1.set(tangentDir0).scl(radius);
tmp2.set(tangentDir1).scl(radius);
tmp.set(tmp1).sub(tmp2);
triangle[0].set(projectedCenter).sub(tmp);
tmp1.set(tangentDir0).scl(radius);
tmp2.set(tangentDir1).scl(radius);
tmp.set(tmp1).add(tmp2);
triangle[1].set(projectedCenter).sub(tmp);
tmp1.set(tangentDir0).scl(radius);
tmp2.set(tangentDir1).scl(radius);
VectorUtil.add(triangle[2], projectedCenter, tmp1, tmp2);
callback.processTriangle(triangle, 0, 1);
stack.leave();
}
@Override
public void getAabb (Transform t, Vector3 aabbMin, Vector3 aabbMax) {
aabbMin.set(-1e30f, -1e30f, -1e30f);
aabbMax.set(1e30f, 1e30f, 1e30f);
}
@Override
public BroadphaseNativeType getShapeType () {
return BroadphaseNativeType.STATIC_PLANE_PROXYTYPE;
}
@Override
public void setLocalScaling (Vector3 scaling) {
localScaling.set(scaling);
}
@Override
public Vector3 getLocalScaling (Vector3 out) {
out.set(localScaling);
return out;
}
@Override
public void calculateLocalInertia (float mass, Vector3 inertia) {
// moving concave objects not supported
inertia.set(0f, 0f, 0f);
}
@Override
public String getName () {
return "STATICPLANE";
}
}
| apache-2.0 |
google-code-export/google-api-dfp-java | src/com/google/api/ads/dfp/v201306/LineItemOperationError.java | 4414 | /**
* LineItemOperationError.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Apr 22, 2006 (06:55:48 PDT) WSDL2Java emitter.
*/
package com.google.api.ads.dfp.v201306;
/**
* Lists all errors for executing operations on line items
*/
public class LineItemOperationError extends com.google.api.ads.dfp.v201306.ApiError implements java.io.Serializable {
/* The error reason represented by an enum. */
private com.google.api.ads.dfp.v201306.LineItemOperationErrorReason reason;
public LineItemOperationError() {
}
public LineItemOperationError(
java.lang.String fieldPath,
java.lang.String trigger,
java.lang.String errorString,
java.lang.String apiErrorType,
com.google.api.ads.dfp.v201306.LineItemOperationErrorReason reason) {
super(
fieldPath,
trigger,
errorString,
apiErrorType);
this.reason = reason;
}
/**
* Gets the reason value for this LineItemOperationError.
*
* @return reason * The error reason represented by an enum.
*/
public com.google.api.ads.dfp.v201306.LineItemOperationErrorReason getReason() {
return reason;
}
/**
* Sets the reason value for this LineItemOperationError.
*
* @param reason * The error reason represented by an enum.
*/
public void setReason(com.google.api.ads.dfp.v201306.LineItemOperationErrorReason reason) {
this.reason = reason;
}
private java.lang.Object __equalsCalc = null;
public synchronized boolean equals(java.lang.Object obj) {
if (!(obj instanceof LineItemOperationError)) return false;
LineItemOperationError other = (LineItemOperationError) obj;
if (obj == null) return false;
if (this == obj) return true;
if (__equalsCalc != null) {
return (__equalsCalc == obj);
}
__equalsCalc = obj;
boolean _equals;
_equals = super.equals(obj) &&
((this.reason==null && other.getReason()==null) ||
(this.reason!=null &&
this.reason.equals(other.getReason())));
__equalsCalc = null;
return _equals;
}
private boolean __hashCodeCalc = false;
public synchronized int hashCode() {
if (__hashCodeCalc) {
return 0;
}
__hashCodeCalc = true;
int _hashCode = super.hashCode();
if (getReason() != null) {
_hashCode += getReason().hashCode();
}
__hashCodeCalc = false;
return _hashCode;
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(LineItemOperationError.class, true);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201306", "LineItemOperationError"));
org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("reason");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201306", "reason"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201306", "LineItemOperationError.Reason"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
/**
* Get Custom Serializer
*/
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanSerializer(
_javaType, _xmlType, typeDesc);
}
/**
* Get Custom Deserializer
*/
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanDeserializer(
_javaType, _xmlType, typeDesc);
}
}
| apache-2.0 |
FRedEnergy/bright-sunset | core/src/ru/redenergy/bs/screen/FinishScreen.java | 2457 | package ru.redenergy.bs.screen;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Screen;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.GL20;
import com.badlogic.gdx.graphics.g2d.BitmapFont;
import com.badlogic.gdx.scenes.scene2d.InputEvent;
import com.badlogic.gdx.scenes.scene2d.Stage;
import com.badlogic.gdx.scenes.scene2d.ui.*;
import com.badlogic.gdx.scenes.scene2d.utils.ClickListener;
import ru.redenergy.bs.BrightSunsetGame;
public class FinishScreen implements Screen {
private Stage stage;
private final int score;
private Skin skin;
public FinishScreen(int score) {
this.score = score;
}
@Override
public void show() {
skin = new Skin(Gdx.files.internal("skin/uiskin.json"));
stage = new Stage();
Table mainTable = new Window("", skin);
mainTable.setBounds(0, 0, Gdx.graphics.getWidth(), Gdx.graphics.getHeight());
mainTable.center();
mainTable.add(new Label("You've scored " + score + " points!", skin.get("title", Label.LabelStyle.class)));
mainTable.row().padTop(50F);
TextButton toMain = new TextButton("Main Menu", skin);
TextButton restart = new TextButton("Restart", skin);
toMain.addListener(new ClickListener(){
@Override
public boolean touchDown(InputEvent event, float x, float y, int pointer, int button) {
BrightSunsetGame.instance.setScreen(new MainMenuScreen());
return true;
}
});
restart.addListener(new ClickListener(){
@Override
public boolean touchDown(InputEvent event, float x, float y, int pointer, int button) {
BrightSunsetGame.instance.setScreen(new GameScreen());
return true;
}
});
mainTable.add(toMain);
mainTable.row().pad(5F);
mainTable.add(restart);
stage.addActor(mainTable);
Gdx.input.setInputProcessor(stage);
}
@Override
public void render(float delta) {
Gdx.gl20.glClearColor(0, 0, 0, 1);
Gdx.gl20.glClear(GL20.GL_COLOR_BUFFER_BIT);
stage.draw();
}
@Override
public void resize(int width, int height) {
}
@Override
public void pause() {
}
@Override
public void resume() {
}
@Override
public void hide() {
}
@Override
public void dispose() {
}
}
| apache-2.0 |
gentics/mesh | tests/tests-common/src/main/java/com/gentics/mesh/core/webroot/PathPrefixUtilTest.java | 1290 | package com.gentics.mesh.core.webroot;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.when;
import org.junit.Test;
import org.mockito.Mockito;
import com.gentics.mesh.core.data.branch.HibBranch;
public class PathPrefixUtilTest {
@Test
public void testSanitize() {
assertEquals("", PathPrefixUtil.sanitize(""));
assertEquals("/bla", PathPrefixUtil.sanitize("bla"));
assertEquals("/bla", PathPrefixUtil.sanitize("bla/"));
assertEquals("", PathPrefixUtil.sanitize("/"));
}
@Test
public void testStrip() {
HibBranch branch = Mockito.mock(HibBranch.class);
when(branch.getPathPrefix()).thenReturn("");
assertEquals("", PathPrefixUtil.strip(branch, ""));
when(branch.getPathPrefix()).thenReturn("abc");
assertEquals("", PathPrefixUtil.strip(branch, ""));
assertEquals("", PathPrefixUtil.strip(branch, "/abc"));
}
@Test
public void testStartsWithPrefix() {
HibBranch branch = Mockito.mock(HibBranch.class);
when(branch.getPathPrefix()).thenReturn("cba");
assertFalse(PathPrefixUtil.startsWithPrefix(branch, "/abc"));
when(branch.getPathPrefix()).thenReturn("abc");
assertTrue(PathPrefixUtil.startsWithPrefix(branch, "/abc"));
}
}
| apache-2.0 |
jdgwartney/vsphere-ws | java/JAXWS/samples/com/vmware/vim25/InvalidDasRestartPriorityForFtVm.java | 2225 |
package com.vmware.vim25;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for InvalidDasRestartPriorityForFtVm complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="InvalidDasRestartPriorityForFtVm">
* <complexContent>
* <extension base="{urn:vim25}InvalidArgument">
* <sequence>
* <element name="vm" type="{urn:vim25}ManagedObjectReference"/>
* <element name="vmName" type="{http://www.w3.org/2001/XMLSchema}string"/>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "InvalidDasRestartPriorityForFtVm", propOrder = {
"vm",
"vmName"
})
public class InvalidDasRestartPriorityForFtVm
extends InvalidArgument
{
@XmlElement(required = true)
protected ManagedObjectReference vm;
@XmlElement(required = true)
protected String vmName;
/**
* Gets the value of the vm property.
*
* @return
* possible object is
* {@link ManagedObjectReference }
*
*/
public ManagedObjectReference getVm() {
return vm;
}
/**
* Sets the value of the vm property.
*
* @param value
* allowed object is
* {@link ManagedObjectReference }
*
*/
public void setVm(ManagedObjectReference value) {
this.vm = value;
}
/**
* Gets the value of the vmName property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getVmName() {
return vmName;
}
/**
* Sets the value of the vmName property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setVmName(String value) {
this.vmName = value;
}
}
| apache-2.0 |
streamingpool/streamingpool-ext-analysis | src/demo/demo/analysis/DemoStreams.java | 464 | /**
* Copyright (c) 2016 European Organisation for Nuclear Research (CERN), All Rights Reserved.
*/
package demo.analysis;
import static cern.streaming.pool.core.testing.NamedStreamId.ofName;
import cern.streaming.pool.core.service.StreamId;
public class DemoStreams {
public static final StreamId<Integer> START_ANALYSIS_A = ofName("start a");
public static final StreamId<Boolean> BOOLEAN_INTERVAL = ofName("boolean interval");
}
| apache-2.0 |
Comcast/Oscar | src/com/comcast/oscar/cablelabsdefinitions/Constants.java | 13139 | package com.comcast.oscar.cablelabsdefinitions;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* @bannerLicense
Copyright 2015 Comcast Cable Communications Management, LLC<br>
___________________________________________________________________<br>
Licensed under the Apache License, Version 2.0 (the "License")<br>
you may not use this file except in compliance with the License.<br>
You may obtain a copy of the License at<br>
http://www.apache.org/licenses/LICENSE-2.0<br>
Unless required by applicable law or agreed to in writing, software<br>
distributed under the License is distributed on an "AS IS" BASIS,<br>
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<br>
See the License for the specific language governing permissions and<br>
limitations under the License.<br>
* @author Maurice Garcia (maurice.garcia.2015@gmail.com)
*/
public class Constants {
public static final Integer VARIABLE_LENGTH = -1;
public static final Integer NO_LENGTH = -2;
public static final Integer DEPRECATED_TLV = -100;
public static final Integer NO_MAX_VALUE = -200;
public static final Integer NO_MIN_VALUE = -201;
public static final Integer STRING_TYPE = 0;
public static final Integer INTEGER_TYPE = 1;
public static final Integer TEXT_TYPE = 2;
public static final Integer OID_TYPE = 3;
public static final Integer BITS_TYPE = 4;
public static final Integer SUB_TLV_NOT_INCLUDED = 0;
public static final Integer SUB_TLV_INCLUDED = 1;
public static final Integer CONFIGURATION_FILE_TYPE_DOCSIS = 0;
public static final Integer DOCSIS_TLV_MIN = 1;
public static final Integer DOCSIS_TLV_MAX = 254;
public static final Integer PAD = 0;
public static final Integer DS_FREQUENCY = 1;
public static final Integer US_CHANN_ID = 2;
public static final Integer NETWORK_ACC_CTRL_OBJ = 3;
public static final Integer DOC_10_CLASS_OF_SERV = 4;
public static final Integer MODEM_CAP = 5;
public static final Integer CM_MIC = 6;
public static final Integer CMTS_MIC = 7;
public static final Integer VENDOR_ID_ENC = 8;
public static final Integer SW_UPGRADE_FILENAME = 9;
public static final Integer SNMP_WRITE_ACC_CTRL = 10;
public static final Integer SNMP_MIB_OBJ = 11;
public static final Integer MODEM_IP_ADDR = 12;
public static final Integer SRV_NOT_AVA_RSP = 13;
public static final Integer CPE_ETH_MAC_ADDR = 14;
public static final Integer TELEPHONE_SETTINGS_OPTION = 15;
public static final Integer BPI = 17;
public static final Integer MAX_CPES = 18;
public static final Integer TFTP_SRV_TIMESTAMP = 19;
public static final Integer TFTP_SRV_PROV_MODEM_IPV4_ADDR = 20;
public static final Integer SW_UPGRADE_IPV4_TFTP_SRV = 21;
public static final Integer US_PKT_CLASS = 22;
public static final Integer DS_PKT_CLASS = 23;
public static final Integer US_SERVICE_FLOW = 24;
public static final Integer DS_SERVICE_FLOW = 25;
public static final Integer PHS = 26;
public static final Integer HMAC_DIGEST = 27;
public static final Integer MAX_NUM_CLASS = 28;
public static final Integer PRIVACY_ENABLE = 29;
public static final Integer AUTHORIZATION_BLOCK = 30;
public static final Integer KEY_SEQUENCE_NUMBER = 31;
public static final Integer MANUFACTURER_CVC = 32;
public static final Integer CO_SIGNER_CVC = 33;
public static final Integer SNMPV3_KICKSTART_VALUE = 34;
public static final Integer SUB_MGMT_CTRL = 35;
public static final Integer SUB_MGMT_CPE_IPV4_LIST = 36;
public static final Integer SUB_MGMT_FILTER_GROUPS = 37;
public static final Integer SNMPV3_NOTIFICATION_RCVR = 38;
public static final Integer ENABLE_20_MODE = 39;
public static final Integer ENABLE_TEST_MODES = 40;
public static final Integer DS_CHANN_LIST = 41;
public static final Integer STATIC_MULTICAST_MAC_ADDRESS = 42;
public static final Integer DOC_EXT_FIELD = 43;
public static final Integer VENDOR_SPECIFIC_CAP = 44;
public static final Integer DS_UNENCRYPTED_TRAFFIC_FLTR = 45;
public static final Integer TRANSMIT_CHANN_CONFIG = 46;
public static final Integer SERVICE_FLOW_SID_CLUSTER_ASSIGNMENT = 47;
public static final Integer RECEIVE_CHANN_PROFILE = 48;
public static final Integer RECEIVE_CHANN_CONFIG = 49;
public static final Integer DSID_ENCS = 50;
public static final Integer SECURITY_ASSOCIATION_ENC = 51;
public static final Integer INITIALIZING_CHANN_TIMEOUT = 52;
public static final Integer SNMPV1V2C_COEXIST = 53;
public static final Integer SNMPV3_ACC_VIEW = 54;
public static final Integer SNMP_CPE_ACC_CTRL = 55;
public static final Integer CHANN_ASSIGNMENT = 56;
public static final Integer CM_INIT_REASON = 57;
public static final Integer SW_UPGRADE_IPV6_TFTP_SRV = 58;
public static final Integer TFTP_SRV_PROVISIONED_MODEMIPV6_ADDRESS = 59;
public static final Integer US_DROP_PKT_CLASS = 60;
public static final Integer SUB_MGMT_CPE_IPV6_PREFIXLIST = 61;
public static final Integer US_DROP_CLASSIFIER_GROUP_ID = 62;
public static final Integer SUB_MGMT_CTRL_MAX_CPEIPV6_ADDRESSES = 63;
public static final Integer CMTS_STATIC_MULTICAST_SESSION_ENC = 64;
public static final Integer L2VPN_MAC_AGING_ENC = 65;
public static final Integer MGT_EVENT_CTRL_ENC = 66;
public static final Integer SUB_MGMT_CPE_IPV6_LIST = 67;
public static final Integer DEFAULT_US_TARGET_BUFFER_CONFIG = 68;
public static final Integer MAC_ADDR_LEARN_CTRL_ENC = 69;
public static final Integer US_AGGREGATE_SERVICE_FLOW = 70;
public static final Integer DS_AGGREGATE_SERVICEFLOW = 71;
public static final Integer MESP = 72;
public static final Integer NETWORK_TIMING_PROFILE = 73;
public static final Integer ENERGY_MGT_PARA_ENC = 74;
public static final Integer ENERGY_MGT_MODE_INDICATOR = 75;
public static final Integer END_OF_FILE = 255;
/**
* D.2.1 CMTS MIC Calculation652
The CMTS MUST calculate a CMTS MIC Digest value on TLVs of the REG-REQ/REG-REQ-MP message and
compare it to the CMTS Message Integrity Check configuration setting in TLV7. If the Extended CMTS MIC
Encoding is present but does not include an Explicit E-MIC Digest subtype, it indicates that the Extended CMTS
MIC digest is implicitly provided in the CMTS MIC Configuration Setting of TLV7. In this case, the CMTS
calculates only an Extended CMTS MIC digest using the TLVs indicated in the E-MIC Bitmap and compares it to
the CMTS MIC Configuration Setting in TLV7. When the Extended CMTS MIC is implicitly provided in TLV7,
the CMTS MUST confirm that the calculated Extended CMTS MIC digest matches the implicit digest in TLV7 in
order to authorize the CM for registration.
If the Extended CMTS MIC Encoding is present and provides an Explicit E-MIC Digest subtype, the CMTS
calculates both an Extended MIC Digest value and a "pre-3.0 DOCSIS" CMTS MIC digest value using the TLVs
reported in REG-REQ or REG-REQ-MP. When both the Extended MIC digest and the pre-3.0 DOCSIS CMTS
Digest are checked, the CMTS MUST consider a CM to be authorized when only the pre-3.0 DOCSIS CMTS
Digest matches. If the pre-3.0 DOCSIS CMTS MIC digest matches but the explicit Extended CMTS MIC does not,
the CMTS MUST silently ignore TLVs in REG-REQ and REG-REQ-MP which were marked as protected by the
Extended CMTS MIC Bitmap and are not one of the pre-3.0 DOCSIS CMTS MIC TLVs provided in the Pre-3.0
DOCSIS CMTS MIC TLV List below.
If the Extended CMTS MIC Encoding TLV is not present, or if the Extended CMTS MIC Encoding TLV is present
and includes an Explicit E-MIC Digest Subtype, then the CMTS MUST calculate the message integrity check
configuration setting by performing an MD5 digest over the following configuration setting fields when present in
the REG-REQ or REG-REQ-MP messages, in the order shown:
* Downstream Frequency Configuration Setting
* Upstream Channel ID Configuration Setting
* Network Access Configuration Setting
* DOCSIS 1.0 Class of Service Configuration Setting
* Baseline Privacy Configuration Setting
* DOCSIS Extension Field Configuration Settings (including Extended CMTS MIC Params)
* CM MIC Configuration Setting
* Maximum Number of CPEs
* TFTP Server Timestamp
* TFTP Server Provisioned Modem Address
* Upstream Packet Classification Setting
* Downstream Packet Classification Setting
* Upstream Service Flow Configuration Setting
* Downstream Service Flow Configuration Setting
* Maximum Number of Classifiers
* Privacy Enable Configuration Setting
* Payload Header Suppression
* Subscriber Management Control
* Subscriber Management CPE IP Table
* Subscriber Management Filter Groups
* Enable Test Modes
*/
public static final List<Integer> DOCSIS_CMTS_MIC_TLV_LIST = new ArrayList<Integer>(Arrays.asList( DS_FREQUENCY,
US_CHANN_ID ,
NETWORK_ACC_CTRL_OBJ,
DOC_10_CLASS_OF_SERV,
BPI,
DOC_EXT_FIELD,
CM_MIC,
MAX_CPES,
TFTP_SRV_TIMESTAMP,
TFTP_SRV_PROV_MODEM_IPV4_ADDR,
US_PKT_CLASS,
DS_PKT_CLASS,
US_SERVICE_FLOW,
DS_SERVICE_FLOW,
MAX_NUM_CLASS,
PRIVACY_ENABLE,
PHS,
SUB_MGMT_CTRL,
SUB_MGMT_CPE_IPV4_LIST,
SUB_MGMT_FILTER_GROUPS,
ENABLE_TEST_MODES));
/**
* D.1.2 Configuration File Settings
The following configuration settings are included in the configuration file and MUST be supported by all CMs. The
CM MUST NOT send a REG-REQ or REG-REQ-MP based on a configuration file that lacks these mandatory items.
* Network Access Configuration Setting
* CM MIC Configuration Setting
* CMTS MIC Configuration Setting
* End Configuration Setting
* DOCSIS 1.0 Class of Service Configuration Setting
OR
* Upstream Service Flow Configuration Setting
* Downstream Service Flow Configuration Setting
*/
public static final List<Integer> DOCSIS_MIN_TLV = new ArrayList<Integer>(Arrays.asList( NETWORK_ACC_CTRL_OBJ,
CM_MIC,
CMTS_MIC,
END_OF_FILE));
/**
* DOCSIS 1.0 or Greater
*/
public static final List<Integer> DOCSIS_10_GTR_MIN_TLV = new ArrayList<Integer>(Arrays.asList(DOC_10_CLASS_OF_SERV));
/**
* DOCSIS 1.1 or Greater
*/
public static final List<Integer> DOCSIS_11_GTR_MIN_TLV = new ArrayList<Integer>(Arrays.asList(US_SERVICE_FLOW,DS_SERVICE_FLOW));
}
| apache-2.0 |
alancnet/artifactory | base/config/src/main/java/org/artifactory/logging/version/v1/LogbackConfigSwapper.java | 2072 | /*
* Artifactory is a binaries repository manager.
* Copyright (C) 2012 JFrog Ltd.
*
* Artifactory is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Artifactory is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Artifactory. If not, see <http://www.gnu.org/licenses/>.
*/
package org.artifactory.logging.version.v1;
import org.artifactory.common.ArtifactoryHome;
import org.artifactory.util.XmlUtils;
import org.artifactory.version.converter.XmlConverter;
import org.jdom2.Document;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.InputStream;
/**
* TO BE USED ONLY UP TO v210<p/> Early logback config "converter". Replaces the existing config to keep it up to date
* With different changes.
*
* @author Noam Tenne
*/
public class LogbackConfigSwapper implements XmlConverter {
private static final Logger log = LoggerFactory.getLogger(LogbackConfigSwapper.class);
/**
* Replaces the content of the given logback configuration with the content of the latest
*
* @param doc Logback configuration
*/
@Override
public void convert(Document doc) {
//Get the updated config
InputStream newConfigFile =
getClass().getResourceAsStream("/META-INF/default/" + ArtifactoryHome.LOGBACK_CONFIG_FILE_NAME);
if (newConfigFile == null) {
log.error("Replacement logback configuration file was not found in '/META-INF/default/'.");
return;
}
doc.detachRootElement();
doc.setRootElement(XmlUtils.parse(newConfigFile).detachRootElement());
}
} | apache-2.0 |
MobileManAG/Project-H-Backend | src/main/java/com/mobileman/projecth/business/ConfigurationService.java | 1652 | /*******************************************************************************
* Copyright 2015 MobileMan GmbH
* www.mobileman.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
/**
* ConfigurationService.java
*
* Project: projecth
*
* @author mobileman
* @date 3.2.2011
* @version 1.0
*
* (c) 2010 MobileMan GmbH
*/
package com.mobileman.projecth.business;
/**
* @author mobileman
*
*/
public interface ConfigurationService {
/**
* @return root dir path to directory containing projecth images
*/
String getImagesRootDirectoryPath();
/**
* @return nachrichten@projecth.com
*/
String getMessageCenterSenderEmail();
/**
* @return kontakt@projecth.com
*/
String getPublicContactReceiverEmail();
/**
* @return min length of user's password
*/
int getMinPasswordLength();
/**
* @return max length of user's password
*/
int getMaxPasswordLength();
/**
* @return min length of user's login
*/
int getMinLoginLength();
/**
* @return max length of user's login
*/
int getMaxLoginLength();
}
| apache-2.0 |
mrdon/AMPS | plugin-module-codegen-engine/src/test/java/com/atlassian/plugins/codegen/modules/common/web/WebPanelRendererTest.java | 2219 | package com.atlassian.plugins.codegen.modules.common.web;
import java.io.File;
import java.util.regex.Matcher;
import com.atlassian.plugins.codegen.AbstractCodegenTestCase;
import com.atlassian.plugins.codegen.modules.PluginModuleLocation;
import org.dom4j.Document;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
/**
* @since 3.6
*/
public class WebPanelRendererTest extends AbstractCodegenTestCase<WebPanelRendererProperties>
{
public static final String PACKAGE_NAME = "com.atlassian.plugins.web";
@Before
public void runGenerator() throws Exception
{
setCreator(new WebPanelRendererModuleCreator());
setModuleLocation(new PluginModuleLocation.Builder(srcDir)
.resourcesDirectory(resourcesDir)
.testDirectory(testDir)
.templateDirectory(templateDir)
.build());
setProps(new WebPanelRendererProperties(PACKAGE_NAME + ".MyWebPanelRenderer"));
props.setIncludeExamples(false);
creator.createModule(moduleLocation, props);
}
@Test
public void allFilesAreGenerated() throws Exception
{
String packagePath = PACKAGE_NAME.replaceAll("\\.", Matcher.quoteReplacement(File.separator));
assertTrue("main class not generated", new File(srcDir, packagePath + File.separator + "MyWebPanelRenderer.java").exists());
assertTrue("test class not generated", new File(testDir, packagePath + File.separator + "MyWebPanelRendererTest.java").exists());
assertTrue("plugin.xml not generated", new File(resourcesDir, "atlassian-plugin.xml").exists());
}
@Test
public void moduleIsValid() throws Exception
{
String xpath = "/atlassian-plugin/web-panel-renderer[@name='My Web Panel Renderer' and @key='my-web-panel-renderer' and @i18n-name-key='my-web-panel-renderer.name' and @class='" + PACKAGE_NAME + ".MyWebPanelRenderer']";
creator.createModule(moduleLocation, props);
Document pluginDoc = getXmlDocument(pluginXml);
assertNotNull("valid web-panel-renderer not found", pluginDoc.selectSingleNode(xpath));
}
}
| apache-2.0 |
elahrvivaz/geomesa | geomesa-convert/geomesa-convert-common/src/main/java/org/locationtech/geomesa/convert/ConverterConfigProvider.java | 651 | /***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.convert;
import com.typesafe.config.Config;
import java.util.Map;
public interface ConverterConfigProvider {
Map<String, Config> loadConfigs();
}
| apache-2.0 |
googleapis/java-compute | proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/DeleteDiskRequest.java | 42420 | /*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* A request message for Disks.Delete. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.DeleteDiskRequest}
*/
public final class DeleteDiskRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.DeleteDiskRequest)
DeleteDiskRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeleteDiskRequest.newBuilder() to construct.
private DeleteDiskRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DeleteDiskRequest() {
disk_ = "";
project_ = "";
requestId_ = "";
zone_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new DeleteDiskRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private DeleteDiskRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 24669418:
{
java.lang.String s = input.readStringRequireUtf8();
disk_ = s;
break;
}
case 29957474:
{
java.lang.String s = input.readStringRequireUtf8();
zone_ = s;
break;
}
case 296879706:
{
java.lang.String s = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
requestId_ = s;
break;
}
case 1820481738:
{
java.lang.String s = input.readStringRequireUtf8();
project_ = s;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_DeleteDiskRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_DeleteDiskRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.DeleteDiskRequest.class,
com.google.cloud.compute.v1.DeleteDiskRequest.Builder.class);
}
private int bitField0_;
public static final int DISK_FIELD_NUMBER = 3083677;
private volatile java.lang.Object disk_;
/**
*
*
* <pre>
* Name of the persistent disk to delete.
* </pre>
*
* <code>string disk = 3083677 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The disk.
*/
@java.lang.Override
public java.lang.String getDisk() {
java.lang.Object ref = disk_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
disk_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name of the persistent disk to delete.
* </pre>
*
* <code>string disk = 3083677 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for disk.
*/
@java.lang.Override
public com.google.protobuf.ByteString getDiskBytes() {
java.lang.Object ref = disk_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
disk_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PROJECT_FIELD_NUMBER = 227560217;
private volatile java.lang.Object project_;
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @return The project.
*/
@java.lang.Override
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @return The bytes for project.
*/
@java.lang.Override
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REQUEST_ID_FIELD_NUMBER = 37109963;
private volatile java.lang.Object requestId_;
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return Whether the requestId field is set.
*/
@java.lang.Override
public boolean hasRequestId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return The requestId.
*/
@java.lang.Override
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
}
}
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return The bytes for requestId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ZONE_FIELD_NUMBER = 3744684;
private volatile java.lang.Object zone_;
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>
* string zone = 3744684 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "zone"];
* </code>
*
* @return The zone.
*/
@java.lang.Override
public java.lang.String getZone() {
java.lang.Object ref = zone_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
zone_ = s;
return s;
}
}
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>
* string zone = 3744684 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "zone"];
* </code>
*
* @return The bytes for zone.
*/
@java.lang.Override
public com.google.protobuf.ByteString getZoneBytes() {
java.lang.Object ref = zone_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
zone_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(disk_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3083677, disk_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(zone_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3744684, zone_);
}
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 37109963, requestId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 227560217, project_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(disk_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3083677, disk_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(zone_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3744684, zone_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(37109963, requestId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(227560217, project_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.DeleteDiskRequest)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.DeleteDiskRequest other =
(com.google.cloud.compute.v1.DeleteDiskRequest) obj;
if (!getDisk().equals(other.getDisk())) return false;
if (!getProject().equals(other.getProject())) return false;
if (hasRequestId() != other.hasRequestId()) return false;
if (hasRequestId()) {
if (!getRequestId().equals(other.getRequestId())) return false;
}
if (!getZone().equals(other.getZone())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + DISK_FIELD_NUMBER;
hash = (53 * hash) + getDisk().hashCode();
hash = (37 * hash) + PROJECT_FIELD_NUMBER;
hash = (53 * hash) + getProject().hashCode();
if (hasRequestId()) {
hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER;
hash = (53 * hash) + getRequestId().hashCode();
}
hash = (37 * hash) + ZONE_FIELD_NUMBER;
hash = (53 * hash) + getZone().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.DeleteDiskRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.DeleteDiskRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.DeleteDiskRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.DeleteDiskRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.DeleteDiskRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.DeleteDiskRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.DeleteDiskRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.DeleteDiskRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.DeleteDiskRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.DeleteDiskRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.DeleteDiskRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.DeleteDiskRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.compute.v1.DeleteDiskRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A request message for Disks.Delete. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.DeleteDiskRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.DeleteDiskRequest)
com.google.cloud.compute.v1.DeleteDiskRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_DeleteDiskRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_DeleteDiskRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.DeleteDiskRequest.class,
com.google.cloud.compute.v1.DeleteDiskRequest.Builder.class);
}
// Construct using com.google.cloud.compute.v1.DeleteDiskRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
disk_ = "";
project_ = "";
requestId_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
zone_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_DeleteDiskRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.DeleteDiskRequest getDefaultInstanceForType() {
return com.google.cloud.compute.v1.DeleteDiskRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.DeleteDiskRequest build() {
com.google.cloud.compute.v1.DeleteDiskRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.DeleteDiskRequest buildPartial() {
com.google.cloud.compute.v1.DeleteDiskRequest result =
new com.google.cloud.compute.v1.DeleteDiskRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
result.disk_ = disk_;
result.project_ = project_;
if (((from_bitField0_ & 0x00000001) != 0)) {
to_bitField0_ |= 0x00000001;
}
result.requestId_ = requestId_;
result.zone_ = zone_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.DeleteDiskRequest) {
return mergeFrom((com.google.cloud.compute.v1.DeleteDiskRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.compute.v1.DeleteDiskRequest other) {
if (other == com.google.cloud.compute.v1.DeleteDiskRequest.getDefaultInstance()) return this;
if (!other.getDisk().isEmpty()) {
disk_ = other.disk_;
onChanged();
}
if (!other.getProject().isEmpty()) {
project_ = other.project_;
onChanged();
}
if (other.hasRequestId()) {
bitField0_ |= 0x00000001;
requestId_ = other.requestId_;
onChanged();
}
if (!other.getZone().isEmpty()) {
zone_ = other.zone_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.compute.v1.DeleteDiskRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.cloud.compute.v1.DeleteDiskRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object disk_ = "";
/**
*
*
* <pre>
* Name of the persistent disk to delete.
* </pre>
*
* <code>string disk = 3083677 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The disk.
*/
public java.lang.String getDisk() {
java.lang.Object ref = disk_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
disk_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name of the persistent disk to delete.
* </pre>
*
* <code>string disk = 3083677 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for disk.
*/
public com.google.protobuf.ByteString getDiskBytes() {
java.lang.Object ref = disk_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
disk_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name of the persistent disk to delete.
* </pre>
*
* <code>string disk = 3083677 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The disk to set.
* @return This builder for chaining.
*/
public Builder setDisk(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
disk_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the persistent disk to delete.
* </pre>
*
* <code>string disk = 3083677 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearDisk() {
disk_ = getDefaultInstance().getDisk();
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the persistent disk to delete.
* </pre>
*
* <code>string disk = 3083677 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for disk to set.
* @return This builder for chaining.
*/
public Builder setDiskBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
disk_ = value;
onChanged();
return this;
}
private java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @return The project.
*/
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @return The bytes for project.
*/
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @param value The project to set.
* @return This builder for chaining.
*/
public Builder setProject(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
project_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearProject() {
project_ = getDefaultInstance().getProject();
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @param value The bytes for project to set.
* @return This builder for chaining.
*/
public Builder setProjectBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
project_ = value;
onChanged();
return this;
}
private java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return Whether the requestId field is set.
*/
public boolean hasRequestId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return The requestId.
*/
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return The bytes for requestId.
*/
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @param value The requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
requestId_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return This builder for chaining.
*/
public Builder clearRequestId() {
bitField0_ = (bitField0_ & ~0x00000001);
requestId_ = getDefaultInstance().getRequestId();
onChanged();
return this;
}
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @param value The bytes for requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
bitField0_ |= 0x00000001;
requestId_ = value;
onChanged();
return this;
}
private java.lang.Object zone_ = "";
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>
* string zone = 3744684 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "zone"];
* </code>
*
* @return The zone.
*/
public java.lang.String getZone() {
java.lang.Object ref = zone_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
zone_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>
* string zone = 3744684 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "zone"];
* </code>
*
* @return The bytes for zone.
*/
public com.google.protobuf.ByteString getZoneBytes() {
java.lang.Object ref = zone_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
zone_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>
* string zone = 3744684 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "zone"];
* </code>
*
* @param value The zone to set.
* @return This builder for chaining.
*/
public Builder setZone(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
zone_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>
* string zone = 3744684 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "zone"];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearZone() {
zone_ = getDefaultInstance().getZone();
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>
* string zone = 3744684 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "zone"];
* </code>
*
* @param value The bytes for zone to set.
* @return This builder for chaining.
*/
public Builder setZoneBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
zone_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.DeleteDiskRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.DeleteDiskRequest)
private static final com.google.cloud.compute.v1.DeleteDiskRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.DeleteDiskRequest();
}
public static com.google.cloud.compute.v1.DeleteDiskRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DeleteDiskRequest> PARSER =
new com.google.protobuf.AbstractParser<DeleteDiskRequest>() {
@java.lang.Override
public DeleteDiskRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new DeleteDiskRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<DeleteDiskRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DeleteDiskRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.DeleteDiskRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| apache-2.0 |
zimmermatt/flink | flink-contrib/flink-statebackend-rocksdb/src/test/java/org/apache/flink/contrib/streaming/state/RocksDBAsyncSnapshotTest.java | 16165 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.contrib.streaming.state;
import org.apache.flink.api.common.JobID;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeutils.base.StringSerializer;
import org.apache.flink.api.common.typeutils.base.VoidSerializer;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.ConfigConstants;
import org.apache.flink.core.testutils.OneShotLatch;
import org.apache.flink.runtime.checkpoint.CheckpointMetaData;
import org.apache.flink.runtime.checkpoint.CheckpointMetrics;
import org.apache.flink.runtime.checkpoint.CheckpointOptions;
import org.apache.flink.runtime.checkpoint.OperatorSubtaskState;
import org.apache.flink.runtime.checkpoint.TaskStateSnapshot;
import org.apache.flink.runtime.execution.CancelTaskException;
import org.apache.flink.runtime.execution.Environment;
import org.apache.flink.runtime.jobgraph.OperatorID;
import org.apache.flink.runtime.operators.testutils.DummyEnvironment;
import org.apache.flink.runtime.operators.testutils.MockInputSplitProvider;
import org.apache.flink.runtime.state.AbstractKeyedStateBackend;
import org.apache.flink.runtime.state.AbstractStateBackend;
import org.apache.flink.runtime.state.CheckpointStreamFactory;
import org.apache.flink.runtime.state.KeyGroupRange;
import org.apache.flink.runtime.state.KeyedStateHandle;
import org.apache.flink.runtime.state.VoidNamespace;
import org.apache.flink.runtime.state.VoidNamespaceSerializer;
import org.apache.flink.runtime.state.memory.MemCheckpointStreamFactory;
import org.apache.flink.runtime.state.memory.MemoryStateBackend;
import org.apache.flink.runtime.util.BlockerCheckpointStreamFactory;
import org.apache.flink.streaming.api.graph.StreamConfig;
import org.apache.flink.streaming.api.operators.AbstractStreamOperator;
import org.apache.flink.streaming.api.operators.OneInputStreamOperator;
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
import org.apache.flink.streaming.runtime.tasks.OneInputStreamTask;
import org.apache.flink.streaming.runtime.tasks.OneInputStreamTaskTestHarness;
import org.apache.flink.streaming.runtime.tasks.StreamMockEnvironment;
import org.apache.flink.streaming.runtime.tasks.StreamTask;
import org.apache.flink.util.FutureUtil;
import org.apache.flink.util.IOUtils;
import org.apache.flink.util.TestLogger;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.powermock.modules.junit4.PowerMockRunner;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Field;
import java.util.Arrays;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.RunnableFuture;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
/**
* Tests for asynchronous RocksDB Key/Value state checkpoints.
*/
@RunWith(PowerMockRunner.class)
@PowerMockIgnore({"javax.management.*", "com.sun.jndi.*", "org.apache.log4j.*"})
@SuppressWarnings("serial")
public class RocksDBAsyncSnapshotTest extends TestLogger {
/**
* This ensures that asynchronous state handles are actually materialized asynchronously.
*
* <p>We use latches to block at various stages and see if the code still continues through
* the parts that are not asynchronous. If the checkpoint is not done asynchronously the
* test will simply lock forever.
*/
@Test
public void testFullyAsyncSnapshot() throws Exception {
final OneInputStreamTask<String, String> task = new OneInputStreamTask<>();
final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(task, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);
testHarness.setupOutputForSingletonOperatorChain();
testHarness.configureForKeyedStream(new KeySelector<String, String>() {
@Override
public String getKey(String value) throws Exception {
return value;
}
}, BasicTypeInfo.STRING_TYPE_INFO);
StreamConfig streamConfig = testHarness.getStreamConfig();
File dbDir = new File(new File(ConfigConstants.DEFAULT_TASK_MANAGER_TMP_PATH, UUID.randomUUID().toString()), "state");
RocksDBStateBackend backend = new RocksDBStateBackend(new MemoryStateBackend());
backend.setDbStoragePath(dbDir.getAbsolutePath());
streamConfig.setStateBackend(backend);
streamConfig.setStreamOperator(new AsyncCheckpointOperator());
streamConfig.setOperatorID(new OperatorID());
final OneShotLatch delayCheckpointLatch = new OneShotLatch();
final OneShotLatch ensureCheckpointLatch = new OneShotLatch();
StreamMockEnvironment mockEnv = new StreamMockEnvironment(
testHarness.jobConfig,
testHarness.taskConfig,
testHarness.memorySize,
new MockInputSplitProvider(),
testHarness.bufferSize) {
@Override
public void acknowledgeCheckpoint(
long checkpointId,
CheckpointMetrics checkpointMetrics,
TaskStateSnapshot checkpointStateHandles) {
super.acknowledgeCheckpoint(checkpointId, checkpointMetrics);
// block on the latch, to verify that triggerCheckpoint returns below,
// even though the async checkpoint would not finish
try {
delayCheckpointLatch.await();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
boolean hasManagedKeyedState = false;
for (Map.Entry<OperatorID, OperatorSubtaskState> entry : checkpointStateHandles.getSubtaskStateMappings()) {
OperatorSubtaskState state = entry.getValue();
if (state != null) {
hasManagedKeyedState |= state.getManagedKeyedState() != null;
}
}
// should be one k/v state
assertTrue(hasManagedKeyedState);
// we now know that the checkpoint went through
ensureCheckpointLatch.trigger();
}
};
testHarness.invoke(mockEnv);
// wait for the task to be running
for (Field field: StreamTask.class.getDeclaredFields()) {
if (field.getName().equals("isRunning")) {
field.setAccessible(true);
while (!field.getBoolean(task)) {
Thread.sleep(10);
}
}
}
task.triggerCheckpoint(new CheckpointMetaData(42, 17), CheckpointOptions.forCheckpoint());
testHarness.processElement(new StreamRecord<>("Wohoo", 0));
// now we allow the checkpoint
delayCheckpointLatch.trigger();
// wait for the checkpoint to go through
ensureCheckpointLatch.await();
testHarness.endInput();
ExecutorService threadPool = task.getAsyncOperationsThreadPool();
threadPool.shutdown();
Assert.assertTrue(threadPool.awaitTermination(60_000, TimeUnit.MILLISECONDS));
testHarness.waitForTaskCompletion();
if (mockEnv.wasFailedExternally()) {
fail("Unexpected exception during execution.");
}
}
/**
* This tests ensures that canceling of asynchronous snapshots works as expected and does not block.
* @throws Exception
*/
@Test
public void testCancelFullyAsyncCheckpoints() throws Exception {
final OneInputStreamTask<String, String> task = new OneInputStreamTask<>();
final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>(task, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);
testHarness.setupOutputForSingletonOperatorChain();
testHarness.configureForKeyedStream(new KeySelector<String, String>() {
@Override
public String getKey(String value) throws Exception {
return value;
}
}, BasicTypeInfo.STRING_TYPE_INFO);
StreamConfig streamConfig = testHarness.getStreamConfig();
File dbDir = new File(new File(ConfigConstants.DEFAULT_TASK_MANAGER_TMP_PATH, UUID.randomUUID().toString()), "state");
BlockingStreamMemoryStateBackend memoryStateBackend = new BlockingStreamMemoryStateBackend();
BlockerCheckpointStreamFactory blockerCheckpointStreamFactory =
new BlockerCheckpointStreamFactory(4 * 1024 * 1024) {
int count = 1;
@Override
public MemCheckpointStreamFactory.MemoryCheckpointOutputStream createCheckpointStateOutputStream(
long checkpointID,
long timestamp) throws Exception {
// we skip the first created stream, because it is used to checkpoint the timer service, which is
// currently not asynchronous.
if (count > 0) {
--count;
return new MemCheckpointStreamFactory.MemoryCheckpointOutputStream(maxSize);
} else {
return super.createCheckpointStateOutputStream(checkpointID, timestamp);
}
}
};
BlockingStreamMemoryStateBackend.blockerCheckpointStreamFactory = blockerCheckpointStreamFactory;
RocksDBStateBackend backend = new RocksDBStateBackend(memoryStateBackend);
backend.setDbStoragePath(dbDir.getAbsolutePath());
streamConfig.setStateBackend(backend);
streamConfig.setStreamOperator(new AsyncCheckpointOperator());
streamConfig.setOperatorID(new OperatorID());
StreamMockEnvironment mockEnv = new StreamMockEnvironment(
testHarness.jobConfig,
testHarness.taskConfig,
testHarness.memorySize,
new MockInputSplitProvider(),
testHarness.bufferSize);
blockerCheckpointStreamFactory.setBlockerLatch(new OneShotLatch());
blockerCheckpointStreamFactory.setWaiterLatch(new OneShotLatch());
testHarness.invoke(mockEnv);
// wait for the task to be running
for (Field field: StreamTask.class.getDeclaredFields()) {
if (field.getName().equals("isRunning")) {
field.setAccessible(true);
while (!field.getBoolean(task)) {
Thread.sleep(10);
}
}
}
task.triggerCheckpoint(
new CheckpointMetaData(42, 17),
CheckpointOptions.forCheckpoint());
testHarness.processElement(new StreamRecord<>("Wohoo", 0));
blockerCheckpointStreamFactory.getWaiterLatch().await();
task.cancel();
blockerCheckpointStreamFactory.getBlockerLatch().trigger();
testHarness.endInput();
Assert.assertTrue(blockerCheckpointStreamFactory.getLastCreatedStream().isClosed());
try {
ExecutorService threadPool = task.getAsyncOperationsThreadPool();
threadPool.shutdown();
Assert.assertTrue(threadPool.awaitTermination(60_000, TimeUnit.MILLISECONDS));
testHarness.waitForTaskCompletion();
fail("Operation completed. Cancel failed.");
} catch (Exception expected) {
Throwable cause = expected.getCause();
if (!(cause instanceof CancelTaskException)) {
fail("Unexpected exception: " + expected);
}
}
}
/**
* Test that the snapshot files are cleaned up in case of a failure during the snapshot
* procedure.
*/
@Test
public void testCleanupOfSnapshotsInFailureCase() throws Exception {
long checkpointId = 1L;
long timestamp = 42L;
Environment env = new DummyEnvironment("test task", 1, 0);
CheckpointStreamFactory.CheckpointStateOutputStream outputStream = mock(CheckpointStreamFactory.CheckpointStateOutputStream.class);
CheckpointStreamFactory checkpointStreamFactory = mock(CheckpointStreamFactory.class);
AbstractStateBackend stateBackend = mock(AbstractStateBackend.class);
final IOException testException = new IOException("Test exception");
doReturn(checkpointStreamFactory).when(stateBackend).createStreamFactory(any(JobID.class), anyString());
doThrow(testException).when(outputStream).write(anyInt());
doReturn(outputStream).when(checkpointStreamFactory).createCheckpointStateOutputStream(eq(checkpointId), eq(timestamp));
RocksDBStateBackend backend = new RocksDBStateBackend(stateBackend);
backend.setDbStoragePath("file:///tmp/foobar");
AbstractKeyedStateBackend<Void> keyedStateBackend = backend.createKeyedStateBackend(
env,
new JobID(),
"test operator",
VoidSerializer.INSTANCE,
1,
new KeyGroupRange(0, 0),
null);
try {
keyedStateBackend.restore(null);
// register a state so that the state backend has to checkpoint something
keyedStateBackend.getPartitionedState(
"namespace",
StringSerializer.INSTANCE,
new ValueStateDescriptor<>("foobar", String.class));
RunnableFuture<KeyedStateHandle> snapshotFuture = keyedStateBackend.snapshot(
checkpointId, timestamp, checkpointStreamFactory, CheckpointOptions.forCheckpoint());
try {
FutureUtil.runIfNotDoneAndGet(snapshotFuture);
fail("Expected an exception to be thrown here.");
} catch (ExecutionException e) {
Assert.assertEquals(testException, e.getCause());
}
verify(outputStream).close();
} finally {
IOUtils.closeQuietly(keyedStateBackend);
keyedStateBackend.dispose();
}
}
@Test
public void testConsistentSnapshotSerializationFlagsAndMasks() {
Assert.assertEquals(0xFFFF, RocksDBKeyedStateBackend.RocksDBFullSnapshotOperation.END_OF_KEY_GROUP_MARK);
Assert.assertEquals(0x80, RocksDBKeyedStateBackend.RocksDBFullSnapshotOperation.FIRST_BIT_IN_BYTE_MASK);
byte[] expectedKey = new byte[] {42, 42};
byte[] modKey = expectedKey.clone();
Assert.assertFalse(
RocksDBKeyedStateBackend.RocksDBFullSnapshotOperation.hasMetaDataFollowsFlag(modKey));
RocksDBKeyedStateBackend.RocksDBFullSnapshotOperation.setMetaDataFollowsFlagInKey(modKey);
Assert.assertTrue(RocksDBKeyedStateBackend.RocksDBFullSnapshotOperation.hasMetaDataFollowsFlag(modKey));
RocksDBKeyedStateBackend.RocksDBFullSnapshotOperation.clearMetaDataFollowsFlag(modKey);
Assert.assertFalse(
RocksDBKeyedStateBackend.RocksDBFullSnapshotOperation.hasMetaDataFollowsFlag(modKey));
Assert.assertTrue(Arrays.equals(expectedKey, modKey));
}
// ------------------------------------------------------------------------
/**
* Creates us a CheckpointStateOutputStream that blocks write ops on a latch to delay writing of snapshots.
*/
static class BlockingStreamMemoryStateBackend extends MemoryStateBackend {
public static volatile BlockerCheckpointStreamFactory blockerCheckpointStreamFactory = null;
@Override
public CheckpointStreamFactory createStreamFactory(JobID jobId, String operatorIdentifier) throws IOException {
return blockerCheckpointStreamFactory;
}
}
private static class AsyncCheckpointOperator
extends AbstractStreamOperator<String>
implements OneInputStreamOperator<String, String> {
@Override
public void open() throws Exception {
super.open();
// also get the state in open, this way we are sure that it was created before
// we trigger the test checkpoint
ValueState<String> state = getPartitionedState(
VoidNamespace.INSTANCE,
VoidNamespaceSerializer.INSTANCE,
new ValueStateDescriptor<>("count", StringSerializer.INSTANCE));
}
@Override
public void processElement(StreamRecord<String> element) throws Exception {
// we also don't care
ValueState<String> state = getPartitionedState(
VoidNamespace.INSTANCE,
VoidNamespaceSerializer.INSTANCE,
new ValueStateDescriptor<>("count", StringSerializer.INSTANCE));
state.update(element.getValue());
}
}
}
| apache-2.0 |
leapframework/framework | base/core/src/test/java/tests/core/ioc/BeanProxyTest.java | 2385 | /*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tests.core.ioc;
import leap.core.annotation.Inject;
import org.junit.Test;
import tested.beans.proxy.TAddiType;
import tested.beans.proxy.TBeanProxy;
import tested.beans.proxy.TBeanType;
import tested.beans.proxy.TBeanType1;
import tests.core.CoreTestCase;
public class BeanProxyTest extends CoreTestCase {
//primary
private @Inject TBeanType bean;
//id
private @Inject(id = "testProxyBean1") TBeanType idBean1;
private @Inject(id = "testProxyBean2") TBeanType idBean2;
//name
private @Inject(name = "bean1") TBeanType nameBean1;
private @Inject(name = "bean2") TBeanType nameBean2;
private @Inject(name = "bean1") TBeanType1 nameBean11;
private @Inject(name = "bean2") TBeanType1 nameBean12;
//addi type
private @Inject TAddiType addiBean;
private @Inject TAddiType[] addiBeans;
@Test
public void testPrimaryBeanProxy() {
assertEquals("proxy", bean.getTestValue());
TBeanProxy proxy = (TBeanProxy) bean;
assertNotNull(proxy.getTargetBean());
}
@Test
public void testIdentifiedBeanProxy() {
assertEquals("impl", idBean1.getTestValue());
assertEquals("proxy", idBean2.getTestValue());
}
@Test
public void testNamedBeanProxy() {
assertEquals("proxy", nameBean1.getTestValue());
assertEquals("impl", nameBean2.getTestValue());
}
@Test
public void testTypedBeanProxy() {
assertEquals(2, nameBean11.getCount());
assertEquals(1, nameBean12.getCount());
}
@Test
public void testProxyWithAdditionalType() {
assertNotNull(addiBean);
assertEquals(1, addiBeans.length);
assertSame(addiBean, addiBeans[0]);
assertSame(bean, addiBean);
}
}
| apache-2.0 |
christophd/citrus | vintage/citrus-java-dsl/src/test/java/com/consol/citrus/integration/runner/SendSoapAttachmentTestRunnerIT.java | 6070 | /*
* Copyright 2006-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.consol.citrus.integration.runner;
import com.consol.citrus.annotations.CitrusTest;
import com.consol.citrus.dsl.testng.TestNGCitrusTestRunner;
import org.springframework.core.io.ClassPathResource;
import org.testng.annotations.Test;
/**
* @author Christoph Deppisch
*/
@Test
public class SendSoapAttachmentTestRunnerIT extends TestNGCitrusTestRunner {
@CitrusTest
public void sendSoapAttachment() {
parallel().actions(
soap(builder -> builder.client("webServiceClient")
.send()
.payload("<ns0:SoapMessageWithAttachmentRequest xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHello.xsd\">" +
"<ns0:Operation>Read the attachment</ns0:Operation>" +
"</ns0:SoapMessageWithAttachmentRequest>")
.attachment("MySoapAttachment", "text/plain", new ClassPathResource("com/consol/citrus/ws/soapAttachment.txt"))),
sequential().actions(
soap(builder -> builder.server("webServiceRequestReceiver")
.receive()
.payload("<ns0:SoapMessageWithAttachmentRequest xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHello.xsd\">" +
"<ns0:Operation>Read the attachment</ns0:Operation>" +
"</ns0:SoapMessageWithAttachmentRequest>")
.schemaValidation(false)
.extractFromHeader("citrus_jms_messageId", "internal_correlation_id")
.attachment("MySoapAttachment", "text/plain", new ClassPathResource("com/consol/citrus/ws/soapAttachment.txt"))
.timeout(5000L)),
soap(builder -> builder.server("webServiceResponseSender")
.send()
.payload("<ns0:SoapMessageWithAttachmentResponse xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHello.xsd\">" +
"<ns0:Operation>Read the attachment</ns0:Operation>" +
"<ns0:Success>true</ns0:Success>" +
"</ns0:SoapMessageWithAttachmentResponse>")
.header("citrus_jms_correlationId", "${internal_correlation_id}"))
)
);
soap(builder -> builder.client("webServiceClient")
.receive()
.payload("<ns0:SoapMessageWithAttachmentResponse xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHello.xsd\">" +
"<ns0:Operation>Read the attachment</ns0:Operation>" +
"<ns0:Success>true</ns0:Success>" +
"</ns0:SoapMessageWithAttachmentResponse>")
.schemaValidation(false));
parallel().actions(
soap(builder -> builder.client("webServiceClient")
.send()
.payload("<ns0:SoapMessageWithAttachmentRequest xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHello.xsd\">" +
"<ns0:Operation>Read the attachment</ns0:Operation>" +
"</ns0:SoapMessageWithAttachmentRequest>")
.attachment("MySoapAttachment", "text/plain", "This is an attachment!")),
sequential().actions(
soap(builder -> builder.server("webServiceRequestReceiver")
.receive()
.payload("<ns0:SoapMessageWithAttachmentRequest xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHello.xsd\">" +
"<ns0:Operation>Read the attachment</ns0:Operation>" +
"</ns0:SoapMessageWithAttachmentRequest>")
.schemaValidation(false)
.extractFromHeader("citrus_jms_messageId", "internal_correlation_id")
.attachment("MySoapAttachment", "text/plain", "This is an attachment!")
.timeout(5000L)),
soap(builder -> builder.server("webServiceResponseSender")
.send()
.payload("<ns0:SoapMessageWithAttachmentResponse xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHello.xsd\">" +
"<ns0:Operation>Read the attachment</ns0:Operation>" +
"<ns0:Success>true</ns0:Success>" +
"</ns0:SoapMessageWithAttachmentResponse>")
.header("citrus_jms_correlationId", "${internal_correlation_id}"))
)
);
soap(builder -> builder.client("webServiceClient")
.receive()
.payload("<ns0:SoapMessageWithAttachmentResponse xmlns:ns0=\"http://www.consol.de/schemas/samples/sayHello.xsd\">" +
"<ns0:Operation>Read the attachment</ns0:Operation>" +
"<ns0:Success>true</ns0:Success>" +
"</ns0:SoapMessageWithAttachmentResponse>")
.schemaValidation(false));
}
}
| apache-2.0 |
ThiagoGarciaAlves/intellij-community | platform/lang-api/src/com/intellij/execution/configuration/RunConfigurationExtensionsManager.java | 6878 | /*
* Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
*/
package com.intellij.execution.configuration;
import com.intellij.execution.ExecutionException;
import com.intellij.execution.Location;
import com.intellij.execution.configurations.GeneralCommandLine;
import com.intellij.execution.configurations.RunConfigurationBase;
import com.intellij.execution.configurations.RunnerSettings;
import com.intellij.execution.process.ProcessHandler;
import com.intellij.openapi.extensions.ExtensionPointName;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.options.SettingsEditor;
import com.intellij.openapi.options.SettingsEditorGroup;
import com.intellij.openapi.util.JDOMUtil;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.WriteExternalException;
import com.intellij.util.SmartList;
import gnu.trove.THashMap;
import org.jdom.Element;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.stream.Collectors;
/**
* @author traff
*/
public class RunConfigurationExtensionsManager<U extends RunConfigurationBase, T extends RunConfigurationExtensionBase<U>> {
private static final Key<List<Element>> RUN_EXTENSIONS = Key.create("run.extension.elements");
private static final String EXT_ID_ATTR = "ID";
private static final String EXTENSION_ROOT_ATTR = "EXTENSION";
private final ExtensionPointName<T> myExtensionPointName;
public RunConfigurationExtensionsManager(ExtensionPointName<T> extensionPointName) {
myExtensionPointName = extensionPointName;
}
public void readExternal(@NotNull U configuration, @NotNull Element parentNode) {
Map<String, T> extensions = new THashMap<>();
for (T extension : getApplicableExtensions(configuration)) {
extensions.put(extension.getSerializationId(), extension);
}
List<Element> children = parentNode.getChildren(getExtensionRootAttr());
// if some of extensions settings weren't found we should just keep it because some plugin with extension
// may be turned off
boolean hasUnknownExtension = false;
for (Element element : children) {
final T extension = extensions.remove(element.getAttributeValue(getIdAttrName()));
if (extension == null) {
hasUnknownExtension = true;
}
else {
extension.readExternal(configuration, element);
}
}
if (hasUnknownExtension) {
List<Element> copy = children.stream().map(JDOMUtil::internElement).collect(Collectors.toList());
configuration.putCopyableUserData(RUN_EXTENSIONS, copy);
}
}
@NotNull
protected String getIdAttrName() {
return EXT_ID_ATTR;
}
@NotNull
protected String getExtensionRootAttr() {
return EXTENSION_ROOT_ATTR;
}
public void writeExternal(@NotNull U configuration, @NotNull Element parentNode) {
Map<String, Element> map = new TreeMap<>();
final List<Element> elements = configuration.getCopyableUserData(RUN_EXTENSIONS);
if (elements != null) {
for (Element element : elements) {
map.put(element.getAttributeValue(getIdAttrName()), element.clone());
}
}
for (T extension : getApplicableExtensions(configuration)) {
Element element = new Element(getExtensionRootAttr());
element.setAttribute(getIdAttrName(), extension.getSerializationId());
try {
extension.writeExternal(configuration, element);
}
catch (WriteExternalException ignored) {
continue;
}
if (!element.getContent().isEmpty() || element.getAttributes().size() > 1) {
map.put(extension.getSerializationId(), element);
}
}
for (Element values : map.values()) {
parentNode.addContent(values);
}
}
public <V extends U> void appendEditors(@NotNull final U configuration,
@NotNull final SettingsEditorGroup<V> group) {
for (T extension : getApplicableExtensions(configuration)) {
@SuppressWarnings("unchecked")
final SettingsEditor<V> editor = extension.createEditor((V)configuration);
if (editor != null) {
group.addEditor(extension.getEditorTitle(), editor);
}
}
}
public void validateConfiguration(@NotNull final U configuration,
final boolean isExecution) throws Exception {
// only for enabled extensions
for (T extension : getEnabledExtensions(configuration, null)) {
extension.validateConfiguration(configuration, isExecution);
}
}
public void extendCreatedConfiguration(@NotNull final U configuration,
@NotNull final Location location) {
for (T extension : getApplicableExtensions(configuration)) {
extension.extendCreatedConfiguration(configuration, location);
}
}
public void extendTemplateConfiguration(@NotNull final U configuration) {
for (T extension : getApplicableExtensions(configuration)) {
extension.extendTemplateConfiguration(configuration);
}
}
public void patchCommandLine(@NotNull final U configuration,
final RunnerSettings runnerSettings,
@NotNull final GeneralCommandLine cmdLine,
@NotNull final String runnerId) throws ExecutionException {
// only for enabled extensions
for (T extension : getEnabledExtensions(configuration, runnerSettings)) {
extension.patchCommandLine(configuration, runnerSettings, cmdLine, runnerId);
}
}
public void attachExtensionsToProcess(@NotNull final U configuration,
@NotNull final ProcessHandler handler,
RunnerSettings runnerSettings) {
// only for enabled extensions
for (T extension : getEnabledExtensions(configuration, runnerSettings)) {
extension.attachToProcess(configuration, handler, runnerSettings);
}
}
@NotNull
protected List<T> getApplicableExtensions(@NotNull U configuration) {
List<T> extensions = new SmartList<>();
for (T extension : Extensions.getExtensions(myExtensionPointName)) {
if (extension.isApplicableFor(configuration)) {
extensions.add(extension);
}
}
return extensions;
}
@NotNull
protected List<T> getEnabledExtensions(@NotNull U configuration, @Nullable RunnerSettings runnerSettings) {
List<T> extensions = new SmartList<>();
for (T extension : Extensions.getExtensions(myExtensionPointName)) {
if (extension.isApplicableFor(configuration) && extension.isEnabledFor(configuration, runnerSettings)) {
extensions.add(extension);
}
}
return extensions;
}
}
| apache-2.0 |
torch2424/CECS343JavaResturant | JavaFXProject/src/appElements/FxAlert.java | 606 | package appElements;
import javafx.scene.control.Alert;
import javafx.scene.control.Alert.AlertType;
public class FxAlert {
//General Fucntion for alerting
public static void alertInfo(String header, String body) {
//Show an alert
Alert a = new Alert(AlertType.INFORMATION);
a.setTitle("Resturant Application");
a.setHeaderText(header);
a.setResizable(true);
a.setContentText(body);
a.showAndWait();
//Also print to the console, with spacing
System.out.println(header);
System.out.println(body);
System.out.println();
}
}
| apache-2.0 |
jnuyanfa/YanFa-LeetCode-with-JAVA | src/leetcode033_SearchInRotatedSortedArray/LeetCode_033_SearchInRotated.java | 657 | import org.junit.Test;
import java.util.Arrays;
/**
* @author yanfa
* @version 1.0 2016-05-02
*/
public class LeetCode_033_SearchInRotated
{
public int search(int[] nums, int target)
{
int pivot = 0;
while(pivot < nums.length - 1 && nums[pivot] < nums[pivot + 1])
pivot++;
int res;
if(nums[0] <= target)
res = Arrays.binarySearch(nums,0,pivot+1,target);
else
res = Arrays.binarySearch(nums,pivot+1,nums.length,target);
return res < 0 ? -1 : res;
}
@Test
public void testSearch()
{
System.out.println(search(new int[]{1}, 1));
}
}
| apache-2.0 |
zhouzhuo810/ZzApiDoc | src/main/java/me/zhouzhuo810/zzapidoc/project/service/ErrorCodeService.java | 843 | package me.zhouzhuo810.zzapidoc.project.service;
import me.zhouzhuo810.zzapidoc.common.result.BaseResult;
import me.zhouzhuo810.zzapidoc.common.service.BaseService;
import me.zhouzhuo810.zzapidoc.project.entity.ErrorCodeEntity;
/**
* Created by zz on 2017/12/29.
*/
public interface ErrorCodeService extends BaseService<ErrorCodeEntity> {
BaseResult addErrorCode(int code, String note, String interfaceId, String groupId, String projectId, boolean isGlobal, boolean isGroup, String userId);
BaseResult getAllErrorCode(boolean global, boolean group, String projectId, String groupId, String interfaceId, String userId);
BaseResult deleteErrorCode(String id, String userId);
BaseResult deleteErrorCodeWeb(String ids, String userId);
BaseResult updateErrorCode(String codeId, int code, String note, String userId);
}
| apache-2.0 |
YojhanLR/ProyectoSTPI-JavaEE | src/java/com/stpi/controller/MonitoreoTransfer_Index.java | 2425 | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.stpi.controller;
import java.io.IOException;
import java.io.PrintWriter;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
*
* @author YojhanLR
*/
public class MonitoreoTransfer_Index extends HttpServlet {
/**
* Processes requests for both HTTP <code>GET</code> and <code>POST</code>
* methods.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
protected void processRequest(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
getServletContext().getRequestDispatcher("/views/Administrador/Transfer/Monitoreo/index.jsp").forward(request, response);
}
// <editor-fold defaultstate="collapsed" desc="HttpServlet methods. Click on the + sign on the left to edit the code.">
/**
* Handles the HTTP <code>GET</code> method.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
}
/**
* Handles the HTTP <code>POST</code> method.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
}
/**
* Returns a short description of the servlet.
*
* @return a String containing servlet description
*/
@Override
public String getServletInfo() {
return "Short description";
}// </editor-fold>
}
| apache-2.0 |
JNDX25219/XiaoShangXing-old | app/src/main/java/com/xiaoshangxing/Private/QCW/tongxuededian/AKyifudianActivity.java | 10573 | package com.xiaoshangxing.Private.QCW.tongxuededian;
import android.app.AlertDialog;
import android.content.Context;
import android.content.Intent;
import android.graphics.drawable.BitmapDrawable;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.Display;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.PopupWindow;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.xiaoshangxing.R;
import util.ImageTools;
public class AKyifudianActivity extends AppCompatActivity implements View.OnClickListener {
private PopupWindow fabuPop = null;
private Button back;
private TextView textTile;
private ImageButton btn_more;
private Toolbar tb_reward;
private ImageButton ib_AK_zfhy;
private ImageButton ib_AK_zfxyq;
private ImageButton ib_AK_zfwx;
private ImageButton ib_AK_zfwb;
private ImageButton ib_AK_zfqq;
private Button btn_surround_info_cancel;
private LinearLayout ll_surround_info_zhuanfa;
private RelativeLayout relativeLayoutTransprant;
private RelativeLayout rl_main;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_akyifudian);
initView();
Intent k = getIntent();
String data = k.getStringExtra("data");
if (data!=null&&!data.equals("")) {
if (data.equals("dialog")) {
showShareFriend(this);
}
}
}
private void initView() {
relativeLayoutTransprant = (RelativeLayout)findViewById(R.id.rl_akfz_transprant);
back = (Button) findViewById(R.id.back);
rl_main = (RelativeLayout)findViewById(R.id.rl_ak_main);
textTile = (TextView) findViewById(R.id.textTile);
btn_more = (ImageButton) findViewById(R.id.btn_more);
tb_reward = (Toolbar) findViewById(R.id.tb_reward);
ib_AK_zfhy = (ImageButton) findViewById(R.id.ib_AK_zfhy);
ib_AK_zfxyq = (ImageButton) findViewById(R.id.ib_AK_zfxyq);
ib_AK_zfwx = (ImageButton) findViewById(R.id.ib_AK_zfwx);
ib_AK_zfwb = (ImageButton) findViewById(R.id.ib_AK_zfwb);
ib_AK_zfqq = (ImageButton) findViewById(R.id.ib_AK_zfqq);
btn_surround_info_cancel = (Button) findViewById(R.id.btn_surround_info_cancel);
ll_surround_info_zhuanfa = (LinearLayout) findViewById(R.id.ll_surround_info_zhuanfa);
back.setOnClickListener(this);
btn_more.setOnClickListener(this);
ib_AK_zfhy.setOnClickListener(this);
ib_AK_zfxyq.setOnClickListener(this);
ib_AK_zfwx.setOnClickListener(this);
ib_AK_zfwb.setOnClickListener(this);
ib_AK_zfqq.setOnClickListener(this);
btn_surround_info_cancel.setOnClickListener(this);
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.back:
finish();
break;
case R.id.btn_more:
showRelese(v);
break;
case R.id.ib_AK_zfhy:
Intent zfhy = new Intent(AKyifudianActivity.this,FenXiangHaoYou.class);
zfhy.putExtra("data","dialog");
startActivity(zfhy);
break;
case R.id.ib_AK_zfxyq:
Intent releseDongTai = new Intent(AKyifudianActivity.this,ReleseDongTaiActivity.class);
startActivity(releseDongTai);
break;
case R.id.ib_AK_zfwx:
showZhuanFaDia(AKyifudianActivity.this,"微信");
break;
case R.id.ib_AK_zfwb:
showZhuanFaDia(AKyifudianActivity.this,"微博");
break;
case R.id.ib_AK_zfqq:
showZhuanFaDia(AKyifudianActivity.this,"QQ");
break;
case R.id.btn_surround_info_cancel:
relativeLayoutTransprant.setVisibility(View.GONE);
back.setEnabled(true);
btn_more.setEnabled(true);
rl_main.setAlpha(1.0f);
break;
}
}
private void showRelese(View moreBtnView) {
Log.d("我要发布","你好");
LayoutInflater li = (LayoutInflater) AKyifudianActivity.this.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
View content = li.inflate(R.layout.pop_akclothes_relese, null);
if (fabuPop == null) {
fabuPop = new PopupWindow(content, ViewGroup.LayoutParams.WRAP_CONTENT,
ViewGroup.LayoutParams.WRAP_CONTENT);
// int i = 5 * fl.getMeasuredWidth()/18;
// int a = (new Double(0.25*layout.getMeasuredHeight())).intValue();
//
// window = new PopupWindow(v, i,a);
Button wyfb = (Button) content.findViewById(R.id.btn_pop_reward_relese_wyfb);
Button yfb = (Button) content.findViewById(R.id.btn_pop_reward_relese_yfb);
Button share = (Button)content.findViewById(R.id.btn_pop_reward_relese_share);
Button sc = (Button) content.findViewById(R.id.btn_pop_reward_relese_sc);
wyfb.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(AKyifudianActivity.this, AKyFaqiHuiHua.class);
startActivity(intent);
fabuPop.dismiss();
}
});
share.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
relativeLayoutTransprant.setVisibility(View.VISIBLE);
rl_main.setAlpha(0.6f);
fabuPop.dismiss();
back.setEnabled(false);
btn_more.setEnabled(false);
}
});
yfb.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// Intent intent = new Intent(SchoolRewardActivity.this,MyRewardActiviy.class);
// startActivity(intent);
// fabuPop.dismiss();
}
});
sc.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// Intent intent = new Intent(SchoolRewardActivity.this,RewardFavoriteActivity.class);
// startActivity(intent);
// fabuPop.dismiss();
}
});
}
fabuPop.setBackgroundDrawable(new BitmapDrawable());
fabuPop.setFocusable(true);
fabuPop.update();
int popupWidth;
int popupHeight;
content.measure(View.MeasureSpec.UNSPECIFIED, View.MeasureSpec.UNSPECIFIED);
popupWidth = content.getMeasuredWidth();
popupHeight = content.getMeasuredHeight();
View parent = fabuPop.getContentView();
int heightMoreBtnView = moreBtnView.getHeight();
int widthMoreBtnView = moreBtnView.getWidth();
WindowManager m = (WindowManager) getSystemService(Context.WINDOW_SERVICE);
Display d = m.getDefaultDisplay(); // 获取屏幕宽、高用
fabuPop.showAsDropDown(moreBtnView, -230,
50);
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event)
{
if (keyCode == KeyEvent.KEYCODE_BACK )
{
if (relativeLayoutTransprant.getVisibility()==View.VISIBLE){
relativeLayoutTransprant.setVisibility(View.GONE);
back.setEnabled(true);
btn_more.setEnabled(true);
rl_main.setAlpha(1.0f);
}
else {
finish();
}
}
return false;
}
public void showShareFriend(Context context) {
final AlertDialog customDia = new AlertDialog.Builder(context).create();
final View viewDia = LayoutInflater.from(context).inflate(R.layout.dialog_akyifu_share_friend, null);
customDia.setView(viewDia);
EditText et = (EditText)viewDia.findViewById(R.id.et_dialog_akyifu_share);
ImageView iv = (ImageView)viewDia.findViewById(R.id.iv_yili);
iv.setImageBitmap(ImageTools.bigdrawableToBitmap(getResources().getDrawable(R.mipmap.dian1)));
Button open = (Button) viewDia.findViewById(R.id.btn_reward_list_dialog_send);
Button cancel = (Button) viewDia.findViewById(R.id.btn_reward_list_dialog_cancel);
customDia.setCanceledOnTouchOutside(false);
cancel.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
customDia.dismiss();
}
});
open.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
customDia.dismiss();
}
});
customDia.setCanceledOnTouchOutside(false);
customDia.show();
}
public void showZhuanFaDia(Context context,String name) {
final AlertDialog customDia = new AlertDialog.Builder(context).create();
final View viewDia = LayoutInflater.from(context).inflate(R.layout.dialog_fasong, null);
customDia.setView(viewDia);
TextView textView = (TextView)viewDia.findViewById(R.id.tv_dialog_ak_zhuanfa);
Button open = (Button) viewDia.findViewById(R.id.btn_dialog_open);
Button cancel = (Button) viewDia.findViewById(R.id.btn_dialog_cancel);
textView.setText("“校上行”想要打开“"+name+"”");
cancel.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
customDia.dismiss();
}
});
open.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
customDia.dismiss();
}
});
customDia.setCanceledOnTouchOutside(false);
customDia.show();
}
}
| apache-2.0 |
Sca09/intelygenz_rss | app/src/main/java/es/intelygenz/rss/presentation/internal/di/PerActivity.java | 1072 | /**
* Copyright (C) 2015 Fernando Cejas Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package es.intelygenz.rss.presentation.internal.di;
import java.lang.annotation.Retention;
import javax.inject.Scope;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
* Created by davidtorralbo on 02/11/16.
*/
/**
* A scoping annotation to permit objects whose lifetime should
* conform to the life of the activity to be memorized in the
* correct component.
*/
@Scope
@Retention(RUNTIME)
public @interface PerActivity {}
| apache-2.0 |
allga/java_pft | mantis-tests/src/test/java/ru/stqa/ptf/mantis/appmanager/UserHelper.java | 999 | package ru.stqa.ptf.mantis.appmanager;
import org.openqa.selenium.By;
import ru.stqa.ptf.mantis.model.User;
import ru.stqa.ptf.mantis.model.Users;
/**
* Created by Olga on 15.04.2016.
*/
public class UserHelper extends HelperBase{
public UserHelper(ApplicationManager app) {
super(app);
}
public User getAnyUserFromBD() {
Users users = app.db().getUsersFromBD();
return users.stream().filter((u) -> u.getAccessLevel() != 90).iterator().next();
}
public void startResetPassword() {
click(By.cssSelector("input[value=\"Reset Password\"]"));
}
public void login(String username, String password) {
type(By.name("username"), username);
type(By.name("password"), password);
click(By.cssSelector("input[value=\"Login\"]"));
}
public User getUserByIdFromBD(int id) {
Users users = app.db().getUsersFromBD();
return users.stream().filter((u) -> u.getId() == id).findFirst().get();
}
}
| apache-2.0 |
jentfoo/aws-sdk-java | aws-java-sdk-elasticloadbalancingv2/src/main/java/com/amazonaws/services/elasticloadbalancingv2/model/DescribeSSLPoliciesRequest.java | 7666 | /*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.elasticloadbalancingv2.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticloadbalancingv2-2015-12-01/DescribeSSLPolicies"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeSSLPoliciesRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The names of the policies.
* </p>
*/
private java.util.List<String> names;
/**
* <p>
* The marker for the next set of results. (You received this marker from a previous call.)
* </p>
*/
private String marker;
/**
* <p>
* The maximum number of results to return with this call.
* </p>
*/
private Integer pageSize;
/**
* <p>
* The names of the policies.
* </p>
*
* @return The names of the policies.
*/
public java.util.List<String> getNames() {
return names;
}
/**
* <p>
* The names of the policies.
* </p>
*
* @param names
* The names of the policies.
*/
public void setNames(java.util.Collection<String> names) {
if (names == null) {
this.names = null;
return;
}
this.names = new java.util.ArrayList<String>(names);
}
/**
* <p>
* The names of the policies.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setNames(java.util.Collection)} or {@link #withNames(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param names
* The names of the policies.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeSSLPoliciesRequest withNames(String... names) {
if (this.names == null) {
setNames(new java.util.ArrayList<String>(names.length));
}
for (String ele : names) {
this.names.add(ele);
}
return this;
}
/**
* <p>
* The names of the policies.
* </p>
*
* @param names
* The names of the policies.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeSSLPoliciesRequest withNames(java.util.Collection<String> names) {
setNames(names);
return this;
}
/**
* <p>
* The marker for the next set of results. (You received this marker from a previous call.)
* </p>
*
* @param marker
* The marker for the next set of results. (You received this marker from a previous call.)
*/
public void setMarker(String marker) {
this.marker = marker;
}
/**
* <p>
* The marker for the next set of results. (You received this marker from a previous call.)
* </p>
*
* @return The marker for the next set of results. (You received this marker from a previous call.)
*/
public String getMarker() {
return this.marker;
}
/**
* <p>
* The marker for the next set of results. (You received this marker from a previous call.)
* </p>
*
* @param marker
* The marker for the next set of results. (You received this marker from a previous call.)
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeSSLPoliciesRequest withMarker(String marker) {
setMarker(marker);
return this;
}
/**
* <p>
* The maximum number of results to return with this call.
* </p>
*
* @param pageSize
* The maximum number of results to return with this call.
*/
public void setPageSize(Integer pageSize) {
this.pageSize = pageSize;
}
/**
* <p>
* The maximum number of results to return with this call.
* </p>
*
* @return The maximum number of results to return with this call.
*/
public Integer getPageSize() {
return this.pageSize;
}
/**
* <p>
* The maximum number of results to return with this call.
* </p>
*
* @param pageSize
* The maximum number of results to return with this call.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeSSLPoliciesRequest withPageSize(Integer pageSize) {
setPageSize(pageSize);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getNames() != null)
sb.append("Names: ").append(getNames()).append(",");
if (getMarker() != null)
sb.append("Marker: ").append(getMarker()).append(",");
if (getPageSize() != null)
sb.append("PageSize: ").append(getPageSize());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeSSLPoliciesRequest == false)
return false;
DescribeSSLPoliciesRequest other = (DescribeSSLPoliciesRequest) obj;
if (other.getNames() == null ^ this.getNames() == null)
return false;
if (other.getNames() != null && other.getNames().equals(this.getNames()) == false)
return false;
if (other.getMarker() == null ^ this.getMarker() == null)
return false;
if (other.getMarker() != null && other.getMarker().equals(this.getMarker()) == false)
return false;
if (other.getPageSize() == null ^ this.getPageSize() == null)
return false;
if (other.getPageSize() != null && other.getPageSize().equals(this.getPageSize()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getNames() == null) ? 0 : getNames().hashCode());
hashCode = prime * hashCode + ((getMarker() == null) ? 0 : getMarker().hashCode());
hashCode = prime * hashCode + ((getPageSize() == null) ? 0 : getPageSize().hashCode());
return hashCode;
}
@Override
public DescribeSSLPoliciesRequest clone() {
return (DescribeSSLPoliciesRequest) super.clone();
}
}
| apache-2.0 |
AndreJCL/JCL | JCL_Android/app/src/main/java/org/jf/dexlib2/iface/value/LongEncodedValue.java | 3048 | /*
* Copyright 2012, Google Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.jf.dexlib2.iface.value;
/**
* This class represents an encoded long value.
*/
public interface LongEncodedValue extends EncodedValue {
/**
* Gets the long value.
*
* @return the long value
*/
long getValue();
/**
* Returns a hashcode for this EncodedLongValue.
*
* This hashCode is defined to be the following:
*
* <pre>
* {@code
* long v = getValue();
* int hashCode = (int)(v^(v>>>32));
* }</pre>
*
* @return The hash code value for this EncodedLongValue
*/
@Override int hashCode();
/**
* Compares this LongEncodedValue to another LongEncodedValue for equality.
*
* This LongEncodedValue is equal to another LongEncodedValue if the values returned by getValue() are equal.
*
* @param o The object to be compared for equality with this LongEncodedValue
* @return true if the specified object is equal to this LongEncodedValue
*/
@Override boolean equals( Object o);
/**
* Compare this LongEncodedValue to another EncodedValue.
*
* The comparison is first done on the return values of getValueType(). If the other value is another
* LongEncodedValue, the return values of getValue() are compared.
*
* @param o The EncodedValue to compare with this LongEncodedValue
* @return An integer representing the result of the comparison
*/
@Override int compareTo( EncodedValue o);
}
| apache-2.0 |
emag/codereading-undertow | core/src/main/java/io/undertow/util/QValueParser.java | 7813 | /*
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.undertow.util;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* Utility class for parsing headers that accept q values
*
* @author Stuart Douglas
*/
public class QValueParser {
private QValueParser() {
}
/**
* Parses a set of headers that take q values to determine the most preferred one.
*
* It returns the result in the form of a sorted list of list, with every element in
* the list having the same q value. This means the highest priority items are at the
* front of the list. The container should use its own internal preferred ordering
* to determinately pick the correct item to use
*
* @param headers The headers
* @return The q value results
*/
public static List<List<QValueResult>> parse(List<String> headers) {
final List<QValueResult> found = new ArrayList<QValueResult>();
QValueResult current = null;
for (final String header : headers) {
final int l = header.length();
//we do not use a string builder
//we just keep track of where the current string starts and call substring()
int stringStart = 0;
for (int i = 0; i < l; ++i) {
char c = header.charAt(i);
switch (c) {
case ',': {
if (current != null &&
(i - stringStart > 2 && header.charAt(stringStart) == 'q' &&
header.charAt(stringStart + 1) == '=')) {
//if this is a valid qvalue
current.qvalue = header.substring(stringStart + 2, i);
current = null;
} else if (stringStart != i) {
current = handleNewEncoding(found, header, stringStart, i);
}
stringStart = i + 1;
break;
}
case ';': {
if (stringStart != i) {
current = handleNewEncoding(found, header, stringStart, i);
stringStart = i + 1;
}
break;
}
case ' ': {
if (stringStart != i) {
if (current != null &&
(i - stringStart > 2 && header.charAt(stringStart) == 'q' &&
header.charAt(stringStart + 1) == '=')) {
//if this is a valid qvalue
current.qvalue = header.substring(stringStart + 2, i);
} else {
current = handleNewEncoding(found, header, stringStart, i);
}
}
stringStart = i + 1;
}
}
}
if (stringStart != l) {
if (current != null &&
(l - stringStart > 2 && header.charAt(stringStart) == 'q' &&
header.charAt(stringStart + 1) == '=')) {
//if this is a valid qvalue
current.qvalue = header.substring(stringStart + 2, l);
} else {
current = handleNewEncoding(found, header, stringStart, l);
}
}
}
Collections.sort(found, Collections.reverseOrder());
String currentQValue = null;
List<List<QValueResult>> values = new ArrayList<List<QValueResult>>();
List<QValueResult> currentSet = null;
for(QValueResult val : found) {
if(!val.qvalue.equals(currentQValue)) {
currentQValue = val.qvalue;
currentSet = new ArrayList<QValueResult>();
values.add(currentSet);
}
currentSet.add(val);
}
return values;
}
private static QValueResult handleNewEncoding(final List<QValueResult> found, final String header, final int stringStart, final int i) {
final QValueResult current = new QValueResult();
current.value = header.substring(stringStart, i);
found.add(current);
return current;
}
public static class QValueResult implements Comparable<QValueResult> {
/**
* The string value of the result
*/
private String value;
/**
* we keep the qvalue as a string to avoid parsing the double.
* <p/>
* This should give both performance and also possible security improvements
*/
private String qvalue = "1";
public String getValue() {
return value;
}
public String getQvalue() {
return qvalue;
}
@Override
public int compareTo(final QValueResult other) {
//we compare the strings as if they were decimal values.
//we know they can only be
final String t = qvalue;
final String o = other.qvalue;
if (t == null && o == null) {
//neither of them has a q value
//we compare them via the server specified default precedence
//note that encoding is never null here, a * without a q value is meaningless
//and will be discarded before this
return 0;
}
if (o == null) {
return 1;
} else if (t == null) {
return -1;
}
final int tl = t.length();
final int ol = o.length();
//we only compare the first 5 characters as per spec
for (int i = 0; i < 5; ++i) {
if (tl == i || ol == i) {
return ol - tl; //longer one is higher
}
if (i == 1) continue; // this is just the decimal point
final int tc = t.charAt(i);
final int oc = o.charAt(i);
int res = tc - oc;
if (res != 0) {
return res;
}
}
return 0;
}
public boolean isQValueZero() {
//we ignore * without a qvalue
if (qvalue != null) {
int length = Math.min(5, qvalue.length());
//we need to find out if this is prohibiting identity
//encoding (q=0). Otherwise we just treat it as the identity encoding
boolean zero = true;
for (int j = 0; j < length; ++j) {
if (j == 1) continue;//decimal point
if (qvalue.charAt(j) != '0') {
zero = false;
break;
}
}
return zero;
}
return false;
}
}
}
| apache-2.0 |
IHTSDO/snow-owl | snomed/com.b2international.snowowl.snomed.datastore/src/com/b2international/snowowl/snomed/datastore/index/change/RelationshipChangeProcessor.java | 5246 | /*
* Copyright 2011-2016 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.b2international.snowowl.snomed.datastore.index.change;
import static com.google.common.collect.Sets.newHashSet;
import java.io.IOException;
import java.util.Collection;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import com.b2international.index.Hits;
import com.b2international.index.query.Query;
import com.b2international.index.revision.RevisionSearcher;
import com.b2international.snowowl.core.api.ComponentUtils;
import com.b2international.snowowl.datastore.ICDOCommitChangeSet;
import com.b2international.snowowl.datastore.index.ChangeSetProcessorBase;
import com.b2international.snowowl.snomed.Relationship;
import com.b2international.snowowl.snomed.SnomedPackage;
import com.b2international.snowowl.snomed.common.SnomedTerminologyComponentConstants;
import com.b2international.snowowl.snomed.datastore.index.entry.SnomedRelationshipIndexEntry;
import com.b2international.snowowl.snomed.datastore.index.entry.SnomedRelationshipIndexEntry.Builder;
import com.b2international.snowowl.snomed.datastore.index.refset.RefSetMemberChange;
import com.b2international.snowowl.snomed.datastore.index.update.ReferenceSetMembershipUpdater;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import com.google.common.collect.Multimap;
/**
* @since 4.3
*/
public class RelationshipChangeProcessor extends ChangeSetProcessorBase {
private final ReferringMemberChangeProcessor memberChangeProcessor;
public RelationshipChangeProcessor() {
super("relationship changes");
this.memberChangeProcessor = new ReferringMemberChangeProcessor(SnomedTerminologyComponentConstants.RELATIONSHIP_NUMBER);
}
@Override
public void process(ICDOCommitChangeSet commitChangeSet, RevisionSearcher searcher) throws IOException {
final Multimap<String, RefSetMemberChange> referringRefSets = memberChangeProcessor.process(commitChangeSet, searcher);
deleteRevisions(SnomedRelationshipIndexEntry.class, commitChangeSet.getDetachedComponents(SnomedPackage.Literals.RELATIONSHIP));
final Map<String, Relationship> newRelationshipsById = StreamSupport
.stream(commitChangeSet.getNewComponents(Relationship.class).spliterator(), false)
.collect(Collectors.toMap(relationship -> relationship.getId(), relationship -> relationship));
for (Relationship relationship : commitChangeSet.getNewComponents(Relationship.class)) {
final Builder doc = SnomedRelationshipIndexEntry.builder(relationship);
indexNewRevision(relationship.cdoID(), doc.build());
}
final Map<String, Relationship> changedRelationshipsById = StreamSupport
.stream(commitChangeSet.getDirtyComponents(Relationship.class).spliterator(), false)
.collect(Collectors.toMap(relationship -> relationship.getId(), relationship -> relationship));
final Set<String> changedRelationshipIds = newHashSet(changedRelationshipsById.keySet());
final Set<String> referencedRelationshipIds = newHashSet(referringRefSets.keySet());
referencedRelationshipIds.removeAll(newRelationshipsById.keySet());
changedRelationshipIds.addAll(referencedRelationshipIds);
final Query<SnomedRelationshipIndexEntry> query = Query.select(SnomedRelationshipIndexEntry.class)
.where(SnomedRelationshipIndexEntry.Expressions.ids(changedRelationshipIds))
.limit(changedRelationshipIds.size())
.build();
final Hits<SnomedRelationshipIndexEntry> changedRelationshipHits = searcher.search(query);
final ImmutableMap<String, SnomedRelationshipIndexEntry> changedRelationshipRevisionsById = Maps
.uniqueIndex(changedRelationshipHits, ComponentUtils.<String> getIdFunction());
for (final String id : changedRelationshipIds) {
final SnomedRelationshipIndexEntry currentDoc = changedRelationshipRevisionsById.get(id);
if (currentDoc == null) {
throw new IllegalStateException(String.format("Current relationship revision should not be null for %s", id));
}
final Relationship relationship = changedRelationshipsById.get(id);
final Builder doc;
if (relationship != null) {
doc = SnomedRelationshipIndexEntry.builder(relationship);
} else {
doc = SnomedRelationshipIndexEntry.builder(currentDoc);
}
final Collection<String> currentMemberOf = currentDoc.getMemberOf();
final Collection<String> currentActiveMemberOf = currentDoc.getActiveMemberOf();
new ReferenceSetMembershipUpdater(referringRefSets.removeAll(id), currentMemberOf, currentActiveMemberOf)
.update(doc);
indexChangedRevision(currentDoc.getStorageKey(), doc.build());
}
}
}
| apache-2.0 |
eemirtekin/Sakai-10.6-TR | samigo/samigo-services/src/java/org/sakaiproject/tool/assessment/facade/authz/AuthorizationFacadeQueriesAPI.java | 1719 | /**********************************************************************************
* $URL: https://source.sakaiproject.org/svn/sam/tags/sakai-10.6/samigo-services/src/java/org/sakaiproject/tool/assessment/facade/authz/AuthorizationFacadeQueriesAPI.java $
* $Id: AuthorizationFacadeQueriesAPI.java 106463 2012-04-02 12:20:09Z david.horwitz@uct.ac.za $
***********************************************************************************
*
* Copyright (c) 2004, 2005, 2006, 2008 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.tool.assessment.facade.authz;
import org.sakaiproject.tool.assessment.data.ifc.authz.AuthorizationIfc;
import org.sakaiproject.tool.assessment.data.ifc.authz.QualifierIfc;
public interface AuthorizationFacadeQueriesAPI
{
public QualifierIteratorFacade getQualifierParents(String qualifierId);
public QualifierIteratorFacade getQualifierChildren(String qualifierId);
public void showQualifiers(QualifierIteratorFacade iter);
public void addAuthz(AuthorizationIfc a);
public void addQualifier(QualifierIfc q);
}
| apache-2.0 |
iraghumitra/incubator-metron | metron-analytics/metron-profiler/src/main/java/org/apache/metron/profiler/bolt/ProfileBuilderBolt.java | 17502 | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.metron.profiler.bolt;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.curator.RetryPolicy;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.CuratorFrameworkFactory;
import org.apache.curator.framework.recipes.cache.TreeCacheEvent;
import org.apache.curator.retry.ExponentialBackoffRetry;
import org.apache.metron.common.Constants;
import org.apache.metron.common.configuration.ConfigurationType;
import org.apache.metron.common.configuration.ConfigurationsUtils;
import org.apache.metron.common.configuration.profiler.ProfileConfig;
import org.apache.metron.common.configuration.profiler.ProfilerConfigurations;
import org.apache.metron.common.zookeeper.configurations.ConfigurationsUpdater;
import org.apache.metron.common.zookeeper.configurations.ProfilerUpdater;
import org.apache.metron.common.zookeeper.configurations.Reloadable;
import org.apache.metron.profiler.DefaultMessageDistributor;
import org.apache.metron.profiler.MessageDistributor;
import org.apache.metron.profiler.MessageRoute;
import org.apache.metron.profiler.ProfileMeasurement;
import org.apache.metron.stellar.common.utils.ConversionUtils;
import org.apache.metron.stellar.dsl.Context;
import org.apache.metron.zookeeper.SimpleEventListener;
import org.apache.metron.zookeeper.ZKCache;
import org.apache.storm.task.OutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseWindowedBolt;
import org.apache.storm.tuple.Tuple;
import org.apache.storm.windowing.TupleWindow;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.invoke.MethodHandles;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import static java.lang.String.format;
import static org.apache.metron.profiler.bolt.ProfileSplitterBolt.ENTITY_TUPLE_FIELD;
import static org.apache.metron.profiler.bolt.ProfileSplitterBolt.MESSAGE_TUPLE_FIELD;
import static org.apache.metron.profiler.bolt.ProfileSplitterBolt.PROFILE_TUPLE_FIELD;
import static org.apache.metron.profiler.bolt.ProfileSplitterBolt.TIMESTAMP_TUPLE_FIELD;
/**
* A Storm bolt that is responsible for building a profile.
*
* <p>This bolt maintains the state required to build a Profile. When the window
* period expires, the data is summarized as a {@link ProfileMeasurement}, all state is
* flushed, and the {@link ProfileMeasurement} is emitted.
*/
public class ProfileBuilderBolt extends BaseWindowedBolt implements Reloadable {
protected static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private OutputCollector collector;
/**
* The URL to connect to Zookeeper.
*/
private String zookeeperUrl;
/**
* The Zookeeper client connection.
*/
protected CuratorFramework zookeeperClient;
/**
* The Zookeeper cache.
*/
protected ZKCache zookeeperCache;
/**
* Manages configuration for the Profiler.
*/
private ProfilerConfigurations configurations;
/**
* The duration of each profile period in milliseconds.
*/
private long periodDurationMillis;
/**
* The duration of Storm's event window.
*/
private long windowDurationMillis;
/**
* If a message has not been applied to a Profile in this number of milliseconds,
* the Profile will be forgotten and its resources will be cleaned up.
*
* <p>WARNING: The TTL must be at least greater than the period duration.
*/
private long profileTimeToLiveMillis;
/**
* The maximum number of {@link MessageRoute} routes that will be maintained by
* this bolt. After this value is exceeded, lesser used routes will be evicted
* from the internal cache.
*/
private long maxNumberOfRoutes;
/**
* Distributes messages to the profile builders.
*
* <p>Since expired profiles are flushed on a separate thread, all access to this
* {@code MessageDistributor} needs to be protected.
*/
private MessageDistributor messageDistributor;
/**
* Parses JSON messages.
*/
private transient JSONParser parser;
/**
* Responsible for emitting {@link ProfileMeasurement} values.
*
* <p>The {@link ProfileMeasurement} values generated by a profile can be written to
* multiple endpoints like HBase or Kafka. Each endpoint is handled by a separate
* {@link ProfileMeasurementEmitter}.
*/
private List<ProfileMeasurementEmitter> emitters;
/**
* Signals when it is time to flush the active profiles.
*/
private FlushSignal activeFlushSignal;
/**
* An executor that flushes expired profiles at a regular interval on a separate
* thread.
*
* <p>Flushing expired profiles ensures that any profiles that stop receiving messages
* for an extended period of time will continue to be flushed.
*
* <p>This introduces concurrency issues as the bolt is no longer single threaded. Due
* to this, all access to the {@code MessageDistributor} needs to be protected.
*/
private transient ScheduledExecutorService flushExpiredExecutor;
public ProfileBuilderBolt() {
this.emitters = new ArrayList<>();
}
@Override
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
super.prepare(stormConf, context, collector);
if(periodDurationMillis <= 0) {
throw new IllegalArgumentException("expect 'profiler.period.duration' >= 0");
}
if(profileTimeToLiveMillis <= 0) {
throw new IllegalArgumentException("expect 'profiler.ttl' >= 0");
}
if(profileTimeToLiveMillis < periodDurationMillis) {
throw new IllegalArgumentException("expect 'profiler.ttl' >= 'profiler.period.duration'");
}
if(maxNumberOfRoutes <= 0) {
throw new IllegalArgumentException("expect 'profiler.max.routes.per.bolt' > 0");
}
if(windowDurationMillis <= 0) {
throw new IllegalArgumentException("expect 'profiler.window.duration' > 0");
}
if(windowDurationMillis > periodDurationMillis) {
throw new IllegalArgumentException("expect 'profiler.period.duration' >= 'profiler.window.duration'");
}
if(periodDurationMillis % windowDurationMillis != 0) {
throw new IllegalArgumentException("expect 'profiler.period.duration' % 'profiler.window.duration' == 0");
}
this.collector = collector;
this.parser = new JSONParser();
this.messageDistributor = new DefaultMessageDistributor(periodDurationMillis, profileTimeToLiveMillis, maxNumberOfRoutes);
this.configurations = new ProfilerConfigurations();
this.activeFlushSignal = new FixedFrequencyFlushSignal(periodDurationMillis);
setupZookeeper();
startFlushingExpiredProfiles();
}
@Override
public void cleanup() {
try {
zookeeperCache.close();
zookeeperClient.close();
flushExpiredExecutor.shutdown();
} catch(Throwable e) {
LOG.error("Exception when cleaning up", e);
}
}
/**
* Setup connectivity to Zookeeper which provides the necessary configuration for the bolt.
*/
private void setupZookeeper() {
try {
if (zookeeperClient == null) {
RetryPolicy retryPolicy = new ExponentialBackoffRetry(1000, 3);
zookeeperClient = CuratorFrameworkFactory.newClient(zookeeperUrl, retryPolicy);
}
zookeeperClient.start();
// this is temporary to ensure that any validation passes. the individual bolt
// will reinitialize stellar to dynamically pull from zookeeper.
ConfigurationsUtils.setupStellarStatically(zookeeperClient);
if (zookeeperCache == null) {
ConfigurationsUpdater<ProfilerConfigurations> updater = createUpdater();
SimpleEventListener listener = new SimpleEventListener.Builder()
.with( updater::update, TreeCacheEvent.Type.NODE_ADDED, TreeCacheEvent.Type.NODE_UPDATED)
.with( updater::delete, TreeCacheEvent.Type.NODE_REMOVED)
.build();
zookeeperCache = new ZKCache.Builder()
.withClient(zookeeperClient)
.withListener(listener)
.withRoot(Constants.ZOOKEEPER_TOPOLOGY_ROOT)
.build();
updater.forceUpdate(zookeeperClient);
zookeeperCache.start();
}
} catch (Exception e) {
LOG.error(e.getMessage(), e);
throw new RuntimeException(e);
}
}
protected ConfigurationsUpdater<ProfilerConfigurations> createUpdater() {
return new ProfilerUpdater(this, this::getConfigurations);
}
public ProfilerConfigurations getConfigurations() {
return configurations;
}
@Override
public void reloadCallback(String name, ConfigurationType type) {
// nothing to do
}
@Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
if(emitters.size() == 0) {
throw new IllegalStateException("At least one destination handler must be defined.");
}
// allow each emitter to define its own stream
emitters.forEach(emitter -> emitter.declareOutputFields(declarer));
}
private Context getStellarContext() {
Map<String, Object> global = getConfigurations().getGlobalConfig();
return new Context.Builder()
.with(Context.Capabilities.ZOOKEEPER_CLIENT, () -> zookeeperClient)
.with(Context.Capabilities.GLOBAL_CONFIG, () -> global)
.with(Context.Capabilities.STELLAR_CONFIG, () -> global)
.build();
}
@Override
public void execute(TupleWindow window) {
LOG.debug("Tuple window contains {} tuple(s), {} expired, {} new",
CollectionUtils.size(window.get()),
CollectionUtils.size(window.getExpired()),
CollectionUtils.size(window.getNew()));
try {
// handle each tuple in the window
for(Tuple tuple : window.get()) {
handleMessage(tuple);
}
// time to flush active profiles?
if(activeFlushSignal.isTimeToFlush()) {
flushActive();
}
} catch (Throwable e) {
LOG.error("Unexpected error", e);
collector.reportError(e);
}
}
/**
* Flush all active profiles.
*/
protected void flushActive() {
activeFlushSignal.reset();
// flush the active profiles
List<ProfileMeasurement> measurements;
synchronized(messageDistributor) {
measurements = messageDistributor.flush();
emitMeasurements(measurements);
}
LOG.debug("Flushed active profiles and found {} measurement(s).", measurements.size());
}
/**
* Flushes all expired profiles.
*
* <p>If a profile has not received a message for an extended period of time then it is
* marked as expired. Periodically we need to flush these expired profiles to ensure
* that their state is not lost.
*/
protected void flushExpired() {
// flush the expired profiles
List<ProfileMeasurement> measurements;
synchronized (messageDistributor) {
measurements = messageDistributor.flushExpired();
emitMeasurements(measurements);
}
LOG.debug("Flushed expired profiles and found {} measurement(s).", measurements.size());
}
/**
* Handles the processing of a single tuple.
*
* @param input The tuple containing a telemetry message.
*/
private void handleMessage(Tuple input) {
// crack open the tuple
JSONObject message = getField(MESSAGE_TUPLE_FIELD, input, JSONObject.class);
ProfileConfig definition = getField(PROFILE_TUPLE_FIELD, input, ProfileConfig.class);
String entity = getField(ENTITY_TUPLE_FIELD, input, String.class);
Long timestamp = getField(TIMESTAMP_TUPLE_FIELD, input, Long.class);
// keep track of time
activeFlushSignal.update(timestamp);
// distribute the message
MessageRoute route = new MessageRoute(definition, entity);
synchronized (messageDistributor) {
messageDistributor.distribute(message, timestamp, route, getStellarContext());
}
LOG.debug("Message distributed: profile={}, entity={}, timestamp={}", definition.getProfile(), entity, timestamp);
}
/**
* Handles the {@code ProfileMeasurement}s that are created when a profile is flushed.
*
* @param measurements The measurements to handle.
*/
private void emitMeasurements(List<ProfileMeasurement> measurements) {
// flush each profile
for(ProfileMeasurement measurement: measurements) {
// allow each 'emitter' to emit the measurement
for (ProfileMeasurementEmitter emitter : emitters) {
emitter.emit(measurement, collector);
LOG.debug("Measurement emitted; stream={}, profile={}, entity={}, value={}, start={}, end={}, duration={}, period={}",
emitter.getStreamId(),
measurement.getProfileName(),
measurement.getEntity(),
measurement.getProfileValue(),
measurement.getPeriod().getStartTimeMillis(),
measurement.getPeriod().getEndTimeMillis(),
measurement.getPeriod().getDurationMillis(),
measurement.getPeriod().getPeriod());
}
}
LOG.debug("Emitted {} measurement(s).", measurements.size());
}
/**
* Retrieves an expected field from a Tuple. If the field is missing an exception is thrown to
* indicate a fatal error.
* @param fieldName The name of the field.
* @param tuple The tuple from which to retrieve the field.
* @param clazz The type of the field value.
* @param <T> The type of the field value.
*/
private <T> T getField(String fieldName, Tuple tuple, Class<T> clazz) {
T value = ConversionUtils.convert(tuple.getValueByField(fieldName), clazz);
if(value == null) {
throw new IllegalStateException(format("Invalid tuple: missing or invalid field '%s'", fieldName));
}
return value;
}
/**
* Creates a separate thread that regularly flushes expired profiles.
*/
private void startFlushingExpiredProfiles() {
flushExpiredExecutor = Executors.newSingleThreadScheduledExecutor();
flushExpiredExecutor.scheduleAtFixedRate(() -> flushExpired(), 0, profileTimeToLiveMillis, TimeUnit.MILLISECONDS);
}
@Override
public BaseWindowedBolt withTumblingWindow(BaseWindowedBolt.Duration duration) {
// need to capture the window duration to validate it along with other profiler settings
this.windowDurationMillis = duration.value;
return super.withTumblingWindow(duration);
}
public long getPeriodDurationMillis() {
return periodDurationMillis;
}
public ProfileBuilderBolt withPeriodDurationMillis(long periodDurationMillis) {
this.periodDurationMillis = periodDurationMillis;
return this;
}
public ProfileBuilderBolt withPeriodDuration(int duration, TimeUnit units) {
return withPeriodDurationMillis(units.toMillis(duration));
}
public ProfileBuilderBolt withProfileTimeToLiveMillis(long timeToLiveMillis) {
this.profileTimeToLiveMillis = timeToLiveMillis;
return this;
}
public long getWindowDurationMillis() {
return windowDurationMillis;
}
public ProfileBuilderBolt withProfileTimeToLive(int duration, TimeUnit units) {
return withProfileTimeToLiveMillis(units.toMillis(duration));
}
public ProfileBuilderBolt withEmitter(ProfileMeasurementEmitter emitter) {
this.emitters.add(emitter);
return this;
}
public MessageDistributor getMessageDistributor() {
return messageDistributor;
}
public ProfileBuilderBolt withZookeeperUrl(String zookeeperUrl) {
this.zookeeperUrl = zookeeperUrl;
return this;
}
public ProfileBuilderBolt withZookeeperClient(CuratorFramework zookeeperClient) {
this.zookeeperClient = zookeeperClient;
return this;
}
public ProfileBuilderBolt withZookeeperCache(ZKCache zookeeperCache) {
this.zookeeperCache = zookeeperCache;
return this;
}
public ProfileBuilderBolt withProfilerConfigurations(ProfilerConfigurations configurations) {
this.configurations = configurations;
return this;
}
public ProfileBuilderBolt withMaxNumberOfRoutes(long maxNumberOfRoutes) {
this.maxNumberOfRoutes = maxNumberOfRoutes;
return this;
}
public ProfileBuilderBolt withFlushSignal(FlushSignal flushSignal) {
this.activeFlushSignal = flushSignal;
return this;
}
public ProfileBuilderBolt withMessageDistributor(MessageDistributor messageDistributor) {
this.messageDistributor = messageDistributor;
return this;
}
}
| apache-2.0 |
profesorfalken/jSensors | src/main/java/com/profesorfalken/jsensors/model/sensors/Fan.java | 864 | /*
* Copyright 2016-2018 Javier Garcia Alonso.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.profesorfalken.jsensors.model.sensors;
/**
*
* @author Javier Garcia Alonso
*/
public class Fan {
public final String name;
public final Double value;
public Fan(String name, Double value) {
this.name = name;
this.value = value;
}
}
| apache-2.0 |
method76/android-MarvelApp | app/src/main/java/com/method76/comics/marvel/data/MarvelCharacters.java | 436 | package com.method76.comics.marvel.data;
import com.method76.comics.marvel.data.substr.MarvelCharacter;
import java.util.List;
/**
* Created by Sungjoon Kim on 2016-01-30.
*/
public class MarvelCharacters {
private List<MarvelCharacter> results;
public List<MarvelCharacter> getResults() {
return results;
}
public void setResults(List<MarvelCharacter> results) {
this.results = results;
}
}
| apache-2.0 |
stevenhva/InfoLearn_OpenOLAT | src/main/java/org/olat/ims/qti/statistics/manager/QTIStatisticsManagerImpl.java | 18480 | /**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) frentix GmbH<br>
* http://www.frentix.com<br>
* <p>
*/
package org.olat.ims.qti.statistics.manager;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.persistence.TypedQuery;
import org.olat.basesecurity.SecurityGroupMembershipImpl;
import org.olat.core.commons.persistence.DB;
import org.olat.course.assessment.AssessmentManager;
import org.olat.ims.qti.editor.beecom.objects.Item;
import org.olat.ims.qti.editor.beecom.objects.Response;
import org.olat.ims.qti.statistics.QTIStatisticSearchParams;
import org.olat.ims.qti.statistics.QTIStatisticsManager;
import org.olat.ims.qti.statistics.model.StatisticsItem;
import org.olat.ims.qti.statistics.model.QTIStatisticResult;
import org.olat.ims.qti.statistics.model.QTIStatisticResultSet;
import org.olat.ims.qti.statistics.model.StatisticAnswerOption;
import org.olat.ims.qti.statistics.model.StatisticAssessment;
import org.olat.ims.qti.statistics.model.StatisticChoiceOption;
import org.olat.ims.qti.statistics.model.StatisticItem;
import org.olat.ims.qti.statistics.model.StatisticKPrimOption;
import org.olat.ims.qti.statistics.model.StatisticSurveyItem;
import org.olat.ims.qti.statistics.model.StatisticSurveyItemResponse;
import org.olat.properties.Property;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
*
* @author srosse, stephane.rosse@frentix.com, http://www.frentix.com
*
*/
@Service
public class QTIStatisticsManagerImpl implements QTIStatisticsManager {
@Autowired
private DB dbInstance;
private StringBuilder decorateRSet(StringBuilder sb, QTIStatisticSearchParams searchParams) {
sb.append(" where rset.olatResource=:resourceId and rset.olatResourceDetail=:resSubPath")
.append(" and rset.lastModified = (select max(r2set.lastModified) from ").append(QTIStatisticResultSet.class.getName()).append(" r2set")
.append(" where r2set.identityKey=rset.identityKey and r2set.olatResource=rset.olatResource and r2set.olatResourceDetail=rset.olatResourceDetail")
.append(" )");
if(searchParams.getLimitToSecGroups() != null && searchParams.getLimitToSecGroups().size() > 0) {
sb.append(" and rset.identityKey in ( select secMembership.identity.key from ").append(SecurityGroupMembershipImpl.class.getName()).append(" secMembership ")
.append(" where secMembership.securityGroup in (:secGroups)")
.append(" )");
}
if(searchParams.isMayViewAllUsersAssessments()) {
sb.append(" and rset.identityKey in ( select p.identity.key from ").append(Property.class.getName()).append(" p ")
.append(" where p.resourceTypeId=:resourceId and p.resourceTypeName='CourseModule'")
.append(" and p.name in ('").append(AssessmentManager.SCORE).append("','").append(AssessmentManager.PASSED).append("')")
.append(" )");
}
return sb;
}
private void decorateRSetQuery(TypedQuery<?> query, QTIStatisticSearchParams searchParams) {
query.setParameter("resourceId", searchParams.getResourceableId())
.setParameter("resSubPath", searchParams.getResSubPath());
if(searchParams.getLimitToSecGroups() != null && searchParams.getLimitToSecGroups().size() > 0) {
query.setParameter("secGroups", searchParams.getLimitToSecGroups());
}
}
@Override
public StatisticAssessment getAssessmentStatistics(QTIStatisticSearchParams searchParams) {
StringBuilder sb = new StringBuilder();
sb.append("select rset.score, rset.duration, rset.isPassed from ").append(QTIStatisticResultSet.class.getName()).append(" rset ");
decorateRSet(sb, searchParams);
sb.append(" order by rset.duration asc");
TypedQuery<Object[]> rawDataQuery = dbInstance.getCurrentEntityManager()
.createQuery(sb.toString(), Object[].class);
decorateRSetQuery(rawDataQuery, searchParams);
List<Object[]> rawDatas = rawDataQuery.getResultList();
int numOfPassed = 0;
int numOfFailed = 0;
double totalDuration = 0.0;
double maxScore = 0.0;
double minScore = Double.MAX_VALUE;
double[] scores = new double[rawDatas.size()];
long[] durationMinutes = new long[rawDatas.size()];
long minDuration = Integer.MAX_VALUE;
long maxDuration = 0;
int dataPos = 0;
for(Object[] rawData:rawDatas) {
Boolean passed = (Boolean)rawData[2];
if(passed != null) {
if(passed.booleanValue()) {
numOfPassed++;
} else {
numOfFailed++;
}
}
Float score = (Float)rawData[0];
if(score != null) {
double scored = score.doubleValue();
scores[dataPos] = scored;
maxScore = Math.max(maxScore, scored);
minScore = Math.min(minScore, scored);
}
Long duration = (Long)rawData[1];
if(duration != null) {
double durationd = duration.doubleValue();
long durationMinute = Math.round(durationd / 60000.0d);
durationMinutes[dataPos] = durationMinute;
totalDuration += durationd;
minDuration = Math.min(minDuration, durationMinute);
maxDuration = Math.max(maxDuration, durationMinute);
}
dataPos++;
}
Statistics statisticsHelper = new Statistics(scores);
int numOfParticipants = rawDatas.size();
StatisticAssessment stats = new StatisticAssessment();
stats.setNumOfParticipants(numOfParticipants);
stats.setNumOfPassed(numOfPassed);
stats.setNumOfFailed(numOfFailed);
long averageDuration = Math.round(totalDuration / numOfParticipants);
stats.setAverageDuration(averageDuration);
stats.setAverage(statisticsHelper.getMean());
double range = maxScore - minScore;
stats.setRange(range);
stats.setMaxScore(maxScore);
stats.setMinScore(minScore);
stats.setStandardDeviation(statisticsHelper.getStdDev());
stats.setMedian(statisticsHelper.median());
stats.setMode(statisticsHelper.mode());
stats.setDurations(durationMinutes);
stats.setScores(scores);
return stats;
}
@Override
public List<QTIStatisticResultSet> getAllResultSets(QTIStatisticSearchParams searchParams) {
StringBuilder sb = new StringBuilder();
sb.append("select rset from qtistatsresultset rset ");
decorateRSet(sb, searchParams);
sb.append(" order by rset.duration asc");
TypedQuery<QTIStatisticResultSet> query = dbInstance.getCurrentEntityManager()
.createQuery(sb.toString(), QTIStatisticResultSet.class);
decorateRSetQuery(query, searchParams);
return query.getResultList();
}
@Override
public List<QTIStatisticResult> getResults(QTIStatisticSearchParams searchParams) {
StringBuilder sb = new StringBuilder();
sb.append("select res from qtistatsresult res ")
.append(" inner join res.resultSet rset");
decorateRSet(sb, searchParams);
TypedQuery<QTIStatisticResult> query = dbInstance.getCurrentEntityManager()
.createQuery(sb.toString(), QTIStatisticResult.class);
decorateRSetQuery(query, searchParams);
return query.getResultList();
}
@Override
public List<StatisticItem> getStatisticPerItem(List<Item> items, QTIStatisticSearchParams searchParams,
double numOfParticipants) {
StringBuilder sb = new StringBuilder();
sb.append("select res.itemIdent, res.score, count(res.key) from qtistatsresult res ")
.append(" inner join res.resultSet rset");
decorateRSet(sb, searchParams);
sb.append(" group by res.itemIdent, res.score");
TypedQuery<Object[]> query = dbInstance.getCurrentEntityManager()
.createQuery(sb.toString(), Object[].class);
decorateRSetQuery(query, searchParams);
List<Object[]> results = query.getResultList();
Map<String, StatisticItemHelper> itemToHelpers = new HashMap<>();
for (Object[] result : results) {
String itemIdent = (String)result[0];
Float score = (Float)result[1];
Long count = (Long)result[2];
if(count == null || score == null || itemIdent == null) continue;
StatisticItemHelper helper = itemToHelpers.get(itemIdent);
if(helper == null) {
helper = new StatisticItemHelper();
itemToHelpers.put(itemIdent, helper);
}
helper.count += count.longValue();
helper.totalScore += (count.longValue() * score.doubleValue());
for (Item item:items) {
if(item.getIdent().equals(itemIdent)) {
double maxValue = item.getQuestion().getMaxValue();
if(Math.abs(score.doubleValue() - maxValue) < 0.0001) {
helper.countCorrectAnswers += count.longValue();
}
}
}
}
List<StatisticItem> averages = new ArrayList<>();
for (Item item:items) {
StatisticItemHelper helper = itemToHelpers.get(item.getIdent());
if(helper == null) {
averages.add(new StatisticItem(item, -1.0, -1.0, -1, -1));
} else {
long numOfAnswersItem = helper.count;
long numOfCorrectAnswers = helper.countCorrectAnswers;
double average = (helper.totalScore / helper.count);
double averageParticipants = (helper.totalScore / numOfParticipants);
averages.add(new StatisticItem(item, average, averageParticipants, numOfAnswersItem, numOfCorrectAnswers));
}
}
return averages;
}
private static class StatisticItemHelper {
private long count;
private double totalScore;
private long countCorrectAnswers;
}
@Override
public StatisticsItem getItemStatistics(String itemIdent, double maxScore, QTIStatisticSearchParams searchParams) {
StringBuilder sb = new StringBuilder();
sb.append("select res.score, count(res.key), avg(res.duration) from qtistatsresult res ")
.append(" inner join res.resultSet rset");
decorateRSet(sb, searchParams);
sb.append(" and res.itemIdent=:itemIdent and res.duration > 0 group by res.score");
TypedQuery<Object[]> query = dbInstance.getCurrentEntityManager()
.createQuery(sb.toString(), Object[].class)
.setParameter("itemIdent", itemIdent);
decorateRSetQuery(query, searchParams);
List<Object[]> results = query.getResultList();
if(results.isEmpty()) {
return new StatisticsItem();
}
int totalResults = 0;
double totalScore = 0.0;
double totalDuration = 0.0;
long numOfCorrectAnswers = 0;
long numOfIncorrectAnswers = 0;
for(Object[] result:results) {
long numOfResults = ((Long)result[1]).longValue();
//average
double score = ((Float)result[0]).doubleValue();
totalScore += (score * numOfResults);
totalResults += numOfResults;
if((maxScore - score) < 0.0001) {
numOfCorrectAnswers += numOfResults;
} else {
numOfIncorrectAnswers += numOfResults;
}
double averageDuration = ((Double)result[2]).doubleValue();
totalDuration += (averageDuration * numOfResults);
}
double averageScore = totalScore / totalResults;
double difficulty = averageScore / maxScore;
double averageDuration = totalDuration / totalResults;
StatisticsItem stats = new StatisticsItem();
stats.setAverageDuration(Math.round(averageDuration));
stats.setAverageScore(averageScore);
stats.setNumOfResults(totalResults);
stats.setDifficulty(difficulty);
stats.setNumOfCorrectAnswers(numOfCorrectAnswers);
stats.setNumOfIncorrectAnswers(numOfIncorrectAnswers);
return stats;
}
/**
* calculates how many participants selected answer option 1 and/or option 2
* and/or option 3...
*
* @param aQuestion
* @param olatResource
* @param olatResourceDetail
* @return
*/
@Override
public List<StatisticChoiceOption> getNumOfAnswersPerSingleChoiceAnswerOption(Item item, QTIStatisticSearchParams searchParams) {
List<StatisticAnswerOption> answerToNumberList = getStatisticAnswerOptionsOfItem(item.getIdent(), searchParams);
List<Response> answerOptions = item.getQuestion().getResponses();
List<StatisticChoiceOption> numOfAnswersPerOption = new ArrayList<>();
for(int i=0; i<answerOptions.size(); i++) {
Response response = answerOptions.get(i);
String responseIdent = response.getIdent();
long num = 0;
for(StatisticAnswerOption answerToNumber:answerToNumberList) {
String answer = answerToNumber.getAnswer();
if(answer.indexOf(responseIdent) >= 0) {
num += answerToNumber.getCount();
}
}
numOfAnswersPerOption.add(new StatisticChoiceOption(response, num));
}
return numOfAnswersPerOption;
}
/**
* calculates the percentage of participants that answered a answer option
* correctly.<br>
* Number at index 0 = answer option 1, Number at index 1 = answer option 2,
* etc.
*
* @param item
* @param numberOfParticipants
* @param olatResource
* @param olatResourceDetail
* @return
*/
@Override
public List<StatisticChoiceOption> getNumOfRightAnsweredMultipleChoice(Item item, QTIStatisticSearchParams searchParams) {
List<StatisticAnswerOption> answerToNumberList = getStatisticAnswerOptionsOfItem(item.getIdent(), searchParams);
List<Response> responses = item.getQuestion().getResponses();
List<StatisticChoiceOption> percentageRightAnswered = new ArrayList<StatisticChoiceOption>();
for (Response response:responses) {
String answerIdent = response.getIdent();
long num = 0;
for(StatisticAnswerOption answerToNumber:answerToNumberList) {
String answer = answerToNumber.getAnswer();
if(answer.indexOf(answerIdent) >= 0) {
num += answerToNumber.getCount();
}
}
percentageRightAnswered.add(new StatisticChoiceOption(response, num));
}
return percentageRightAnswered;
}
@Override
public List<StatisticKPrimOption> getNumbersInKPrim(Item item, QTIStatisticSearchParams searchParams) {
List<StatisticAnswerOption> rawDatas = getStatisticAnswerOptionsOfItem(item.getIdent(), searchParams);
List<Response> responses = item.getQuestion().getResponses();
List<StatisticKPrimOption> kprimPoints = new ArrayList<>();
for(Response response:responses) {
String answerIdent = response.getIdent();
boolean isCorrect = response.isCorrect();
String rightFlag = answerIdent + ":" + (isCorrect ? "correct" : "wrong");
String wrongFlag = answerIdent + ":" + (isCorrect ? "wrong" : "correct");
long numCorrect = 0;
long numIncorrect = 0;
long numUnanswered = 0;
for(StatisticAnswerOption rawData:rawDatas) {
String answer = rawData.getAnswer();
if(answer.indexOf(rightFlag) >= 0) {
numCorrect += rawData.getCount();
} else if(answer.indexOf(wrongFlag) >= 0) {
numIncorrect += rawData.getCount();
} else {
numUnanswered += rawData.getCount();
}
}
kprimPoints.add(new StatisticKPrimOption(response, numCorrect, numIncorrect, numUnanswered));
}
return kprimPoints;
}
@Override
public List<StatisticAnswerOption> getStatisticAnswerOptionsOfItem(String itemIdent, QTIStatisticSearchParams searchParams) {
StringBuilder sb = new StringBuilder();
sb.append("select res.answer, count(res.key) from qtistatsresult res ")
.append(" inner join res.resultSet rset");
decorateRSet(sb, searchParams);
sb.append(" and res.itemIdent=:itemIdent and res.duration > 0 group by res.answer");
TypedQuery<Object[]> query = dbInstance.getCurrentEntityManager().createQuery(sb.toString(), Object[].class)
.setParameter("itemIdent", itemIdent);
decorateRSetQuery(query, searchParams);
List<Object[]> results = query.getResultList();
if(results.isEmpty()) {
return null;
}
List<StatisticAnswerOption> answerToNumberList = new ArrayList<>();
for(Object[] result:results) {
String answer = (String)result[0];
Long numOfAnswers = (Long)result[1];
answerToNumberList.add(new StatisticAnswerOption(answer, numOfAnswers.longValue()));
}
return answerToNumberList;
}
@Override
public List<StatisticSurveyItem> getStatisticAnswerOptions(QTIStatisticSearchParams searchParams, List<Item> items) {
StringBuilder sb = new StringBuilder();
sb.append("select res.itemIdent, res.answer, count(res.key) from qtistatsresult res ")
.append(" inner join res.resultSet rset");
decorateRSet(sb, searchParams)
.append(" and res.duration > 0")
.append(" group by res.itemIdent, res.answer")
.append(" order by res.itemIdent");
TypedQuery<Object[]> query = dbInstance.getCurrentEntityManager()
.createQuery(sb.toString(), Object[].class);
decorateRSetQuery(query, searchParams);
List<Object[]> results = query.getResultList();
if(results.isEmpty()) {
return null;
}
Map<String, Item> identToItemMap = new HashMap<>();
for(Item item:items) {
identToItemMap.put(item.getIdent(), item);
}
StatisticSurveyItem currentItem = null;
List<StatisticSurveyItem> answerToNumberList = new ArrayList<>();
for(Object[] result:results) {
String itemIdent = (String)result[0];
String answer = (String)result[1];
Long numOfAnswers = (Long)result[2];
Item item = identToItemMap.get(itemIdent);
if(currentItem == null || !currentItem.getItem().getIdent().equals(itemIdent)) {
currentItem = new StatisticSurveyItem(item);
answerToNumberList.add(currentItem);
}
Response response = findResponses(item, answer);
currentItem.getResponses().add(new StatisticSurveyItemResponse(response, answer, numOfAnswers));
}
return answerToNumberList;
}
private Response findResponses(Item item, String answer) {
List<Response> responses = item.getQuestion().getResponses();
if(responses != null) {
for(Response response:responses) {
if(answer.indexOf(response.getIdent()) > 0) {
return response;
}
}
}
return null;
}
@Override
public List<String> getAnswers(String itemIdent, QTIStatisticSearchParams searchParams) {
StringBuilder sb = new StringBuilder();
sb.append("select res.answer from qtistatsresult res ")
.append(" inner join res.resultSet rset");
decorateRSet(sb, searchParams);
sb.append(" and res.itemIdent=:itemIdent and res.duration > 0");
TypedQuery<String> query = dbInstance.getCurrentEntityManager()
.createQuery(sb.toString(), String.class)
.setParameter("itemIdent", itemIdent);
decorateRSetQuery(query, searchParams);
return query.getResultList();
}
} | apache-2.0 |
ferstl/pedantic-pom-enforcers | src/test/java/com/github/ferstl/maven/pomenforcers/ErrorReportMatcher.java | 1894 | /*
* Copyright (c) 2012 - 2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.ferstl.maven.pomenforcers;
import org.hamcrest.Description;
import org.hamcrest.TypeSafeMatcher;
/**
* Matcher that shows the {@link ErrorReport} in case of an unexpected failure.
*/
class ErrorReportMatcher extends TypeSafeMatcher<ErrorReport> {
private final boolean expectedErrors;
private ErrorReportMatcher(boolean expectedErrors) {
this.expectedErrors = expectedErrors;
}
@Override
protected boolean matchesSafely(ErrorReport item) {
return item.hasErrors() == this.expectedErrors;
}
@Override
public void describeTo(Description description) {
description.appendText("There should be " + (!this.expectedErrors ? "no " : "") + "errors in the error report");
}
@Override
protected void describeMismatchSafely(ErrorReport item, Description mismatchDescription) {
mismatchDescription.appendText("There were " + (this.expectedErrors ? "no " : "") + "errors\n");
if (!this.expectedErrors) {
mismatchDescription.appendValue(item);
}
}
public static ErrorReportMatcher hasErrors() {
return new ErrorReportMatcher(true);
}
public static ErrorReportMatcher hasNoErrors() {
return new ErrorReportMatcher(false);
}
}
| apache-2.0 |
Lab41/tinkerpop3 | gremlin-console/src/test/java/com/tinkerpop/gremlin/console/plugin/UtilitiesGremlinPluginTest.java | 2280 | package com.tinkerpop.gremlin.console.plugin;
import com.tinkerpop.gremlin.tinkergraph.structure.TinkerFactory;
import org.codehaus.groovy.tools.shell.Groovysh;
import org.codehaus.groovy.tools.shell.IO;
import org.junit.Test;
import java.util.HashMap;
import java.util.Map;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.isA;
import static org.hamcrest.CoreMatchers.startsWith;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
/**
* @author Stephen Mallette (http://stephen.genoprime.com)
*/
public class UtilitiesGremlinPluginTest {
@Test
public void shouldPluginToAndDoImports() throws Exception {
final UtilitiesGremlinPlugin plugin = new UtilitiesGremlinPlugin();
final SpyPluginAcceptor spy = new SpyPluginAcceptor();
plugin.pluginTo(spy);
assertEquals(4, spy.getImports().size());
}
@Test
public void shouldFailWithoutUtilitiesPlugin() throws Exception {
final Groovysh groovysh = new Groovysh();
try {
groovysh.execute("describeGraph(g.class)");
fail("Utilities were not loaded - this should fail.");
} catch (Exception ignored) { }
}
@Test
public void shouldPluginUtilities() throws Exception {
final UtilitiesGremlinPlugin plugin = new UtilitiesGremlinPlugin();
final Groovysh groovysh = new Groovysh();
groovysh.getInterp().getContext().setProperty("g", TinkerFactory.createClassic());
final Map<String,Object> env = new HashMap<>();
env.put("ConsolePluginAcceptor.io", new IO());
env.put("ConsolePluginAcceptor.shell", groovysh);
final SpyPluginAcceptor spy = new SpyPluginAcceptor(groovysh::execute, () -> env);
plugin.pluginTo(spy);
assertThat(groovysh.execute("describeGraph(com.tinkerpop.gremlin.tinkergraph.structure.TinkerGraph)").toString(), containsString("IMPLEMENTATION - com.tinkerpop.gremlin.tinkergraph.structure.TinkerGraph"));
assertThat(groovysh.execute("clock {g.V().count().next()}"), is(instanceOf(Number.class)));
}
}
| apache-2.0 |
AndreJCL/JCL | JCL_Android/app/src/main/java/org/jf/dexlib2/dexbacked/instruction/DexBackedInstruction4rcc.java | 3009 | /*
* Copyright 2016, Google Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.jf.dexlib2.dexbacked.instruction;
import org.jf.dexlib2.Opcode;
import org.jf.dexlib2.dexbacked.DexBackedDexFile;
import org.jf.dexlib2.dexbacked.reference.DexBackedReference;
import org.jf.dexlib2.iface.instruction.formats.Instruction4rcc;
import org.jf.dexlib2.iface.reference.Reference;
public class DexBackedInstruction4rcc extends DexBackedInstruction implements Instruction4rcc {
public DexBackedInstruction4rcc( DexBackedDexFile dexFile,
Opcode opcode,
int instructionStart) {
super(dexFile, opcode, instructionStart);
}
@Override public int getRegisterCount() {
return dexFile.readUbyte(instructionStart + 1);
}
@Override
public int getStartRegister() {
return dexFile.readUshort(instructionStart + 4);
}
@Override
public Reference getReference() {
return DexBackedReference.makeReference(dexFile, opcode.referenceType,
dexFile.readUshort(instructionStart + 2));
}
@Override
public int getReferenceType() {
return opcode.referenceType;
}
@Override
public Reference getReference2() {
return DexBackedReference.makeReference(dexFile, opcode.referenceType2,
dexFile.readUshort(instructionStart + 3));
}
@Override
public int getReferenceType2() {
return opcode.referenceType2;
}
}
| apache-2.0 |
dodaro/ea2014 | Lesson3/src/simple/Computer.java | 268 | package simple;
public class Computer {
private CPU cpu;
public Computer() {
}
public Computer(CPU c) {
cpu = c;
}
public CPU getCpu() {
return cpu;
}
public void setCpu(CPU cpu) {
this.cpu = cpu;
}
public void print() {
cpu.print();
}
}
| apache-2.0 |
maritime-web/NoGoService | nogo/src/main/java/dk/dma/nogoservice/service/RemoteWeatherService.java | 3020 | /* Copyright (c) 2011 Danish Maritime Authority.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dk.dma.nogoservice.service;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.io.CharStreams;
import dk.dma.common.dto.JSonError;
import dk.dma.common.dto.JsonErrorException;
import dk.dma.dmiweather.dto.GridRequest;
import dk.dma.dmiweather.dto.GridResponse;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.ResponseEntity;
import org.springframework.http.client.ClientHttpResponse;
import org.springframework.http.client.HttpComponentsClientHttpRequestFactory;
import org.springframework.stereotype.Component;
import org.springframework.web.client.DefaultResponseErrorHandler;
import org.springframework.web.client.RestTemplate;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
/**
* @author Klaus Groenbaek
* Created 04/04/17.
*/
@Component
public class RemoteWeatherService implements WeatherService {
private final RestTemplate template;
private ObjectMapper mapper = new ObjectMapper();
@Value("${weatherservice.url}")
private String weatherServiceURL;
@Autowired
public RemoteWeatherService(PoolingHttpClientConnectionManager connectionManager) {
CloseableHttpClient httpClient = HttpClients.custom().setConnectionManager(connectionManager).build();
template = new RestTemplate(new HttpComponentsClientHttpRequestFactory(httpClient));
template.setErrorHandler(new RemoteErrorHandler());
}
@Override
public GridResponse getWeather(GridRequest request) {
ResponseEntity<GridResponse> postForEntity = template.postForEntity(weatherServiceURL + "grid?gridMetrics=true", request, GridResponse.class);
return postForEntity.getBody();
}
private class RemoteErrorHandler extends DefaultResponseErrorHandler {
@Override
public void handleError(ClientHttpResponse response) throws IOException {
JSonError jSonError = mapper.readValue(CharStreams.toString(new InputStreamReader(response.getBody(), StandardCharsets.UTF_8)), JSonError.class);
throw new JsonErrorException(jSonError);
}
}
}
| apache-2.0 |
DHASA2017/slide_viewpager | app/src/test/java/com/dl/commonviewpager/ExampleUnitTest.java | 315 | package com.dl.commonviewpager;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* To work on unit tests, switch the Test Artifact in the Build Variants view.
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() throws Exception {
assertEquals(4, 2 + 2);
}
} | apache-2.0 |
reallyinsane/mathan-latex-maven-plugin | mathan-latex-it/src/test/java/io/mathan/gradle/latex/configuration/MakeindexstylefileTest.java | 1233 | /*
* Copyright 2017 Matthias Hanisch
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.mathan.gradle.latex.configuration;
import io.mathan.gradle.latex.AbstractIntegrationTest;
import io.mathan.latex.core.Step;
import io.mathan.maven.it.Verifier;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public class MakeindexstylefileTest extends AbstractIntegrationTest {
public MakeindexstylefileTest(Build build) {
super(build);
}
@Test
public void stylefileExists() throws Exception {
Verifier verifier = verifier("configuration", "makeindexstylefile");
assertStepExecuted(verifier, Step.STEP_MAKEINDEX);
}
}
| apache-2.0 |
govindrgaikwad/java_repo | EmpresaMarco/src/main/java/com/empresa/marco/data/ObjectDefinationData.java | 3170 | package com.empresa.marco.data;
import java.util.Date;
import com.fasterxml.jackson.annotation.JsonFormat;
public class ObjectDefinationData {
private Integer objectId;
private String name;
private String schemaName;
private String dataBaseName;
private String userDefinedName;
private String camelCaseName;
private Boolean embaddable;
private Boolean updated;
private Boolean primaryKey;
private Integer projectId;
private Integer projectVersionId;
private String createdBy;
@JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd,HH:00", timezone = "CET")
private Date createdDate;
private String updatedBy;
@JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd,HH:00", timezone = "CET")
private Date updatedDate;
public void setObjectId(Integer objectId) {
this.objectId = objectId;
}
public Integer getObjectId() {
return this.objectId;
}
public void setName(String name) {
this.name = name;
}
public String getName() {
return this.name;
}
public void setSchemaName(String schemaName) {
this.schemaName = schemaName;
}
public String getSchemaName() {
return this.schemaName;
}
public void setDataBaseName(String dataBaseName) {
this.dataBaseName = dataBaseName;
}
public String getDataBaseName() {
return this.dataBaseName;
}
public void setUserDefinedName(String userDefinedName) {
this.userDefinedName = userDefinedName;
}
public String getUserDefinedName() {
return this.userDefinedName;
}
public void setCamelCaseName(String camelCaseName) {
this.camelCaseName = camelCaseName;
}
public String getCamelCaseName() {
return this.camelCaseName;
}
public void setEmbaddable(Boolean embaddable) {
this.embaddable = embaddable;
}
public Boolean getEmbaddable() {
return this.embaddable;
}
public void setUpdated(Boolean updated) {
this.updated = updated;
}
public Boolean getUpdated() {
return this.updated;
}
public void setPrimaryKey(Boolean primaryKey) {
this.primaryKey = primaryKey;
}
public Boolean getPrimaryKey() {
return this.primaryKey;
}
public void setProjectId(Integer projectId) {
this.projectId = projectId;
}
public Integer getProjectId() {
return this.projectId;
}
public void setProjectVersionId(Integer projectVersionId) {
this.projectVersionId = projectVersionId;
}
public Integer getProjectVersionId() {
return this.projectVersionId;
}
public void setCreatedBy(String createdBy) {
this.createdBy = createdBy;
}
public String getCreatedBy() {
return this.createdBy;
}
public void setCreatedDate(Date createdDate) {
this.createdDate = createdDate;
}
public Date getCreatedDate() {
return this.createdDate;
}
public void setUpdatedBy(String updatedBy) {
this.updatedBy = updatedBy;
}
public String getUpdatedBy() {
return this.updatedBy;
}
public void setUpdatedDate(Date updatedDate) {
this.updatedDate = updatedDate;
}
public Date getUpdatedDate() {
return this.updatedDate;
}
} | apache-2.0 |
darlyhellen/oto | DLVideo/src/com/darly/dlvideo/bean/Person.java | 671 | /**上午11:13:57
* @author zhangyh2
* Person.java
* TODO
*/
package com.darly.dlvideo.bean;
/**
* @author zhangyh2 Person 上午11:13:57 TODO
*/
public class Person {
private int id;
private String url;
private String title;
private String icon;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getIcon() {
return icon;
}
public void setIcon(String icon) {
this.icon = icon;
}
}
| apache-2.0 |
lucasponce/rhq-alerts | rhq-alerts/src/main/java/org/rhq/alerts/impl/BasicCepEngineImpl.java | 4929 | package org.rhq.alerts.impl;
import org.kie.api.KieServices;
import org.kie.api.builder.KieBuilder;
import org.kie.api.builder.KieFileSystem;
import org.kie.api.builder.KieRepository;
import org.kie.api.builder.Message;
import org.kie.api.runtime.KieContainer;
import org.kie.api.runtime.KieSession;
import org.kie.api.runtime.rule.FactHandle;
import org.rhq.alerts.cep.CepEngine;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.PostConstruct;
import javax.inject.Singleton;
import java.util.Collection;
/**
TODO
*/
@Singleton
public class BasicCepEngineImpl implements CepEngine {
private static final Logger LOG = LoggerFactory.getLogger(BasicCepEngineImpl.class);
private static final String PATH = "src/main/resources/org/poc/rules";
private KieServices ks;
private KieRepository kr;
private KieFileSystem kfs;
private KieBuilder kb;
private KieContainer kc;
private KieSession kSession;
private String path(String id) {
return PATH + "/" + id + ".drl";
}
public BasicCepEngineImpl() {
LOG.info("Creating INSTANCE...");
}
@PostConstruct
public void init() {
initKieArtifacts();
}
private void initKieArtifacts() {
ks = KieServices.Factory.get();
kr = ks.getRepository();
kfs = ks.newKieFileSystem();
}
private boolean initSession() {
if (kc == null) {
LOG.warn("No rules detected.");
return false;
}
if (kSession == null) {
kSession = kc.newKieSession();
}
return true;
}
@Override
public void addRule(String id, String rule) {
if (id == null) {
throw new IllegalArgumentException("Id must not be null");
}
LOG.info("Adding rule " + id + " ...");
String path = path(id);
if (kfs.read(path) != null) {
throw new IllegalArgumentException("Id argument exists on current repository");
}
kfs.write(path, rule);
kb = ks.newKieBuilder(kfs);
kb.buildAll();
if (kb.getResults().hasMessages(Message.Level.ERROR)) {
throw new RuntimeException("Build Errors:\n" + kb.getResults().toString());
}
kc = ks.newKieContainer(kr.getDefaultReleaseId());
if (kSession != null) {
kSession.dispose();
kSession = null;
}
}
@Override
public void removeRule(String id) {
if (id == null) {
throw new IllegalArgumentException("Id must not be null");
}
LOG.info("Removing rule " + id + " ...");
String path = path(id);
if (kfs.read(path) == null) {
throw new IllegalArgumentException("Id argument does not exist on current repository");
}
kfs.delete(path);
kb = ks.newKieBuilder(kfs);
kb.buildAll();
if (kb.getResults().hasMessages(Message.Level.ERROR)) {
throw new RuntimeException("Build Errors:\n" + kb.getResults().toString());
}
kc = ks.newKieContainer(kr.getDefaultReleaseId());
if (kSession != null) {
kSession.dispose();
kSession = null;
}
}
@Override
public void addGlobal(String name, Object global) {
if (!initSession()) return;
LOG.info("Adding global " + name + " - " + global + " ...");
kSession.setGlobal(name, global);
}
@Override
public void addFact(Object fact) {
if (!initSession()) return;
LOG.info("Adding fact " + fact + " ...");
kSession.insert(fact);
}
@Override
public void addFacts(Collection facts) {
if (!initSession()) return;
if (facts != null && !facts.isEmpty()) {
for (Object fact : facts) {
LOG.info("Adding fact " + fact + " ...");
kSession.insert(fact);
}
}
}
@Override
public void removeFact(Object fact) {
if (!initSession()) return;
FactHandle fh = kSession.getFactHandle(fact);
if (fh != null) {
kSession.delete(fh);
}
}
@Override
public void fire() {
if (!initSession()) return;
LOG.info("Firing rules ... ");
LOG.info("BEFORE Facts: " + kSession.getFactCount());
kSession.fireAllRules();
LOG.info("AFTER Facts: " + kSession.getFactCount());
}
@Override
public void clear() {
if (kSession != null) {
Collection<FactHandle> facts = kSession.getFactHandles();
for (FactHandle fact : facts) {
kSession.delete(fact);
}
}
}
@Override
public void reset() {
kfs = ks.newKieFileSystem();
if (kSession != null) {
kSession.dispose();
kSession = null;
}
}
}
| apache-2.0 |
asomov/snakeyaml | src/test/java/org/yaml/snakeyaml/issues/issue318/ContextClassLoaderTest.java | 4598 | /**
* Copyright (c) 2008, http://www.snakeyaml.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.yaml.snakeyaml.issues.issue318;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.Properties;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.yaml.snakeyaml.Yaml;
public class ContextClassLoaderTest {
static public class DomainBean {
private int value = 0;
public void setValue(int value) {
this.value = value;
}
public int getValue() {
return value;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + value;
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
DomainBean other = (DomainBean) obj;
if (value != other.value)
return false;
return true;
}
}
private URLClassLoader yamlCL;
@Before
public void before() throws MalformedURLException {
Properties classpath = new Properties();
InputStream cpProperties = getClass().getResourceAsStream("classpath.properties");
try {
classpath.load(cpProperties);
} catch (IOException e2) {
fail(e2.getLocalizedMessage());
}
File runtimeClassesDir = new File(classpath.getProperty("runtime_classes_dir"));
ClassLoader noSnakeYAMLClassLoader = new ClassLoader(
Thread.currentThread().getContextClassLoader()) {
@Override
protected Class<?> loadClass(String name, boolean resolve)
throws ClassNotFoundException {
if (!name.startsWith("org.yaml.snakeyaml")) {
return super.loadClass(name, resolve);
}
throw new ClassNotFoundException(
"Can't load SnakeYaml classes by this ClassLoader");
}
};
yamlCL = new URLClassLoader(new URL[]{runtimeClassesDir.toURI().toURL()},
noSnakeYAMLClassLoader);
}
@After
public void after() {
if (yamlCL != null) {
try {
yamlCL.close();
} catch (IOException e) {
e.printStackTrace();
} finally {
yamlCL = null;
}
}
}
@Test(expected = ClassNotFoundException.class)
public void expectNoDomainClassInYamlCL() throws ClassNotFoundException {
yamlCL.loadClass(DomainBean.class.getName());
}
@Test
public void yamlClassInYAMLCL() throws ClassNotFoundException {
yamlCL.loadClass(Yaml.class.getName());
}
@Test
public void domainInDifferentConstructor() throws ClassNotFoundException,
InstantiationException, IllegalAccessException, NoSuchMethodException,
SecurityException, IllegalArgumentException, InvocationTargetException {
Class<?> yamlClass = yamlCL.loadClass(Yaml.class.getName());
DomainBean bean = new DomainBean();
bean.setValue(13);
Object yaml = yamlClass.newInstance();
Method dumpMethod = yaml.getClass().getMethod("dump", new Class<?>[]{Object.class});
String dump = dumpMethod.invoke(yaml, bean).toString();
Method loadMethod = yaml.getClass().getMethod("load", new Class<?>[]{String.class});
DomainBean object = (DomainBean) loadMethod.invoke(yaml, dump);
assertEquals(bean, object);
}
}
| apache-2.0 |
allanbank/mongodb-async-examples | src/main/java/aggregation/AggregationLetDemo.java | 6637 | /*
* Copyright 2014 - Allanbank Consulting, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package aggregation;
import static com.allanbank.mongodb.bson.builder.BuilderFactory.d;
import static com.allanbank.mongodb.bson.builder.BuilderFactory.e;
import static com.allanbank.mongodb.builder.AggregationProjectFields.include;
import static com.allanbank.mongodb.builder.expression.Expressions.add;
import static com.allanbank.mongodb.builder.expression.Expressions.cond;
import static com.allanbank.mongodb.builder.expression.Expressions.constant;
import static com.allanbank.mongodb.builder.expression.Expressions.field;
import static com.allanbank.mongodb.builder.expression.Expressions.let;
import static com.allanbank.mongodb.builder.expression.Expressions.multiply;
import static com.allanbank.mongodb.builder.expression.Expressions.set;
import static com.allanbank.mongodb.builder.expression.Expressions.var;
import java.io.IOException;
import com.allanbank.mongodb.MongoClient;
import com.allanbank.mongodb.MongoCollection;
import com.allanbank.mongodb.MongoFactory;
import com.allanbank.mongodb.bson.Document;
import com.allanbank.mongodb.bson.builder.DocumentBuilder;
import com.allanbank.mongodb.builder.Aggregate;
import com.allanbank.mongodb.builder.Find;
/**
* AggregationMapDemo provides a simple example of using the aggregation
* framework with a {@code $let} expression.
* <p>
* Inspired by the <a href=
* "http://docs.mongodb.org/master/reference/operator/aggregation/let/">
* <code>let</code> expression's documentation</a>.
* </p>
*
* @copyright 2014, Allanbank Consulting, Inc., All Rights Reserved
*/
public class AggregationLetDemo {
/**
* The handle to the MongoDB client. We assume MongoDB is running on your
* machine on the default port of 27017.
*/
private final static MongoClient client = MongoFactory
.createClient("mongodb://localhost:27017/");
/** The collection we will be using. */
private final static MongoCollection theCollection = client.getDatabase(
"db").getCollection("collection");
/**
* Run the demo.
*
* @param args
* Command line arguments. Ignored.
* @throws IOException
* On a failure closing the MongoCLient.
*/
public static void main(final String[] args) throws IOException {
// Before we start lets make sure there is not already a document.
theCollection.delete(Find.ALL);
/**
* <pre>
* <code>
* Inserted :
* {
* '_id' : 1,
* price : 10,
* tax : 0.5,
* applyDiscount : true
* }
* {
* '_id' : 2,
* price : 10,
* tax : 0.25,
* applyDiscount : false
* }
* </code>
* </pre>
*/
// { _id: 1, price: 10, tax: 0.50, applyDiscount: true }
// { _id: 2, price: 10, tax: 0.25, applyDiscount: false }
DocumentBuilder inserted = d(e("_id", 1), e("price", 10),
e("tax", 0.50), e("applyDiscount", true));
theCollection.insert(inserted);
inserted = d(e("_id", 2), e("price", 10), e("tax", 0.25),
e("applyDiscount", false));
theCollection.insert(inserted);
System.out.println("Inserted : ");
for (final Document doc : theCollection.find(Find.ALL)) {
System.out.println(doc);
}
/**
* <pre>
* <code>
* $project: {
* finalTotal: {
* $let: {
* vars: {
* total: { $add: [ '$price', '$tax' ] },
* discounted: { $cond: { if: '$applyDiscount', then: 0.9, else: 1 } }
* },
* in: { $multiply: [ "$$total", "$$discounted" ] }
* }
* }
* }
* </code>
* </pre>
*/
final Aggregate.Builder aggregation = Aggregate.builder();
aggregation.project(
include(),
set("finalTotal",
let("total", add(field("price"), field("tax"))).let(
"discounted",
cond(field("applyDiscount"), constant(0.9),
constant(1))).in(
multiply(var("total"), var("discounted")))));
/**
* <pre>
* <code>
* Aggregation Pipeline : '$pipeline' : [
* {
* '$project' : {
* finalTotal : {
* '$let' : {
* vars : {
* total : {
* '$add' : [
* '$price',
* '$tax'
* ]
* },
* discounted : {
* '$cond' : [
* '$applyDiscount',
* 0.9,
* 1
* ]
* }
* },
* in : {
* '$multiply' : [
* '$$total',
* '$$discounted'
* ]
* }
* }
* }
* }
* }
* ]
* </code>
* </pre>
*/
System.out.println("Aggregation Pipeline : " + aggregation);
/**
* <pre>
* <code>
* Results :
* {
* '_id' : 1,
* finalTotal : 9.450000000000001
* }
* {
* '_id' : 2,
* finalTotal : 10.25
* }
* </code>
* </pre>
*/
System.out.println("Results : ");
for (final Document doc : theCollection.aggregate(aggregation)) {
System.out.println(doc);
}
// Always remember to close your client!
client.close();
}
}
| apache-2.0 |
STRiDGE/dozer | core/src/test/java/org/dozer/functional_tests/MapTypeTest.java | 26706 | /*
* Copyright 2005-2017 Dozer Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.dozer.functional_tests;
import java.io.Serializable;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.Map;
import java.util.TreeMap;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.dozer.Mapper;
import org.dozer.vo.TestObject;
import org.dozer.vo.TestObjectPrime;
import org.dozer.vo.map.ChildDOM;
import org.dozer.vo.map.CustomMap;
import org.dozer.vo.map.CustomMapIF;
import org.dozer.vo.map.GenericDOM;
import org.dozer.vo.map.MapTestObject;
import org.dozer.vo.map.MapTestObjectPrime;
import org.dozer.vo.map.MapToMap;
import org.dozer.vo.map.MapToMapPrime;
import org.dozer.vo.map.MapToProperty;
import org.dozer.vo.map.NestedObj;
import org.dozer.vo.map.NestedObjPrime;
import org.dozer.vo.map.ParentDOM;
import org.dozer.vo.map.PropertyToMap;
import org.dozer.vo.map.SimpleObj;
import org.dozer.vo.map.SimpleObjPrime;
import org.junit.Ignore;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
/**
* @author tierney.matt
* @author garsombke.franz
*/
public class MapTypeTest extends AbstractFunctionalTest {
@Test
public void testMapToVo() throws Exception {
// Test simple Map --> Vo with custom mappings defined.
mapper = getMapper(new String[] { "mapMapping2.xml" });
NestedObj nestedObj = newInstance(NestedObj.class);
nestedObj.setField1("nestedfield1value");
Map<String, Serializable> src = newInstance(HashMap.class);
src.put("field1", "mapnestedfield1value");
src.put("nested", nestedObj);
SimpleObjPrime result = mapper.map(src, SimpleObjPrime.class, "caseA");
assertEquals(src.get("field1"), result.getField1());
assertEquals(nestedObj.getField1(), result.getNested().getField1());
}
@Test
public void testMapToVoSimple() throws Exception {
mapper = getMapper(new String[] { });
NestedObj nestedObj = newInstance(NestedObj.class);
nestedObj.setField1("nestedfield1value");
Map<String, Serializable> src = newInstance(HashMap.class);
src.put("field1", "mapnestedfield1value");
SimpleObjPrime result = mapper.map(src, SimpleObjPrime.class);
assertEquals(src.get("field1"), result.getField1());
}
@Test
public void testMapToVoWithRenameField() throws Exception {
// Test simple Map --> Vo with custom mappings defined.
mapper = getMapper(new String[] { "mapMapping2.xml" });
NestedObj nestedObj = newInstance(NestedObj.class);
nestedObj.setField1("nestedfield1value");
Map<String, Object> src = new HashMap<String, Object>();
src.put("first", "mapnestedfield1value");
src.put("nested", nestedObj);
SimpleObjPrime result = mapper.map(src, SimpleObjPrime.class, "caseC");
assertEquals(src.get("first"), result.getField1());
assertEquals(nestedObj.getField1(), result.getNested().getField1());
}
@Test
public void testMapToVoWithRenameFieldReverse() throws Exception {
// Test simple Map --> Vo with custom mappings defined.
mapper = getMapper(new String[] { "mapMapping2.xml" });
NestedObj nestedObj = newInstance(NestedObj.class);
nestedObj.setField1("nestedfield1value");
Map<String, Object> src = new HashMap<String, Object>();
src.put("first", "mapnestedfield1value");
src.put("nested", nestedObj);
SimpleObjPrime result = mapper.map(src, SimpleObjPrime.class, "caseD");
assertEquals(src.get("first"), result.getField1());
assertEquals(nestedObj.getField1(), result.getNested().getField1());
}
@Test
public void testMapToVo_CustomMappings() throws Exception {
// Test simple Map --> Vo with custom mappings defined.
mapper = getMapper(new String[] { "mapMapping2.xml" });
Map<String, String> src = newInstance(HashMap.class);
src.put("field1", "field1value");
src.put("field2", "field2value");
SimpleObjPrime result = mapper.map(src, SimpleObjPrime.class, "caseB");
assertNull(result.getField1());
assertEquals(src.get("field2"), result.getField2());
}
@Test
public void testMapToVoUsingMapId() {
// Simple map --> vo using a map-id
mapper = super.getMapper(new String[] { "mapMapping.xml" });
Map<String, String> src = newInstance(HashMap.class);
src.put("field1", "field1value");
src.put("field2", "field2value");
NestedObjPrime dest = mapper.map(src, NestedObjPrime.class, "caseB");
assertEquals(src.get("field1"), dest.getField1());
assertEquals(src.get("field2"), dest.getField2());
}
@Test
public void testMapToVoUsingMapId_FieldExclude() {
// Simple map --> vo using a map-id
mapper = super.getMapper(new String[] { "mapMapping.xml" });
Map<String, String> src = newInstance(HashMap.class);
src.put("field1", "field1value");
src.put("field2", "field2value");
NestedObjPrime dest = mapper.map(src, NestedObjPrime.class, "caseC");
assertNull("field was excluded and should be null", dest.getField1());
assertEquals(src.get("field2"), dest.getField2());
}
@Test
public void testNestedMapToVoUsingMapId() {
// Another test for nested Map --> Vo using <field map-id=....>
mapper = super.getMapper("mapMapping.xml");
SimpleObj src = newInstance(SimpleObj.class);
src.setField1("field1");
NestedObj nested = newInstance(NestedObj.class);
nested.setField1("nestedfield1");
src.setNested(nested);
Map<String, String> nested2 = newInstance(HashMap.class);
nested2.put("field1", "field1MapValue");
src.setNested2(nested2);
SimpleObjPrime result = mapper.map(src, SimpleObjPrime.class, "caseA2");
assertNull(result.getNested2().getField1());// field was excluded
assertEquals(src.getField1(), result.getField1());
assertEquals(src.getNested().getField1(), result.getNested().getField1());
}
@Test
public void testMapToVo_NoCustomMappings() throws Exception {
// Test simple Map --> Vo without any custom mappings defined.
NestedObj nestedObj = newInstance(NestedObj.class);
nestedObj.setField1("nestedfield1value");
Map<String, Serializable> src = newInstance(HashMap.class);
src.put("field1", "mapnestedfield1value");
src.put("nested", nestedObj);
SimpleObjPrime result = mapper.map(src, SimpleObjPrime.class);
assertEquals(src.get("field1"), result.getField1());
assertEquals(nestedObj.getField1(), result.getNested().getField1());
}
@Test
public void testVoToMap_NoCustomMappings() throws Exception {
// Test simple Vo --> Map without any custom mappings defined.
SimpleObjPrime src = newInstance(SimpleObjPrime.class);
src.setField1("someValueField1");
src.setField2("someValueField2");
src.setSimpleobjprimefield("someOtherValue");
NestedObjPrime nested = newInstance(NestedObjPrime.class);
nested.setField1("field1Value");
nested.setField2("field2Value");
src.setNested(nested);
NestedObjPrime nested2 = newInstance(NestedObjPrime.class);
src.setNested2(nested2);
// Map complex object to HashMap
Map<?, ?> destMap = newInstance(HashMap.class);
mapper.map(src, destMap);
// Map HashMap back to new instance of the complex object
SimpleObjPrime mappedSrc = mapper.map(destMap, SimpleObjPrime.class);
// Remapped complex type should equal original src if all fields were mapped both ways.
assertEquals(src, mappedSrc);
}
@Test
public void testMapToMap() throws Exception {
Mapper mapper = getMapper(new String[] { "mapInterfaceMapping.xml", "dozerBeanMapping.xml" });
TestObject to = newInstance(TestObject.class);
to.setOne("one");
TestObject to2 = newInstance(TestObject.class);
to2.setTwo(new Integer(2));
Map<String, TestObject> map = newInstance(HashMap.class);
map.put("to", to);
map.put("to2", to2);
Map<String, TestObject> map2 = newInstance(HashMap.class);
map2.put("to", to);
map2.put("to2", to2);
MapToMap mtm = new MapToMap(map, map2);
MapToMapPrime mtmp = mapper.map(mtm, MapToMapPrime.class);
assertEquals("one", ((TestObject) mtmp.getStandardMap().get("to")).getOne());
assertEquals(2, ((TestObject) mtmp.getStandardMap().get("to2")).getTwo().intValue());
// verify that we transformed from object to object prime
assertEquals("one", ((TestObjectPrime) mtmp.getStandardMapWithHint().get("to")).getOnePrime());
assertEquals(2, ((TestObjectPrime) mtmp.getStandardMapWithHint().get("to2")).getTwoPrime().intValue());
}
@Test
public void testMapToMapExistingDestination() throws Exception {
Mapper mapper = getMapper(new String[] { "mapInterfaceMapping.xml", "dozerBeanMapping.xml" });
TestObject to = newInstance(TestObject.class);
to.setOne("one");
TestObject to2 = newInstance(TestObject.class);
to2.setTwo(new Integer(2));
Map<String, TestObject> map = newInstance(HashMap.class);
map.put("to", to);
map.put("to2", to2);
MapToMap mtm = newInstance(MapToMap.class);
mtm.setStandardMap(map);
// create an existing map and set a value so we can test if it exists after
// mapping
MapToMapPrime mtmp = newInstance(MapToMapPrime.class);
Map<String, Serializable> map2 = newInstance(Hashtable.class);
map2.put("toDest", to);
mtmp.setStandardMap(map2);
mapper.map(mtm, mtmp);
assertEquals("one", ((TestObject) mtmp.getStandardMap().get("to")).getOne());
assertEquals(2, ((TestObject) mtmp.getStandardMap().get("to2")).getTwo().intValue());
assertEquals("one", ((TestObject) mtmp.getStandardMap().get("toDest")).getOne());
}
@Test
public void testPropertyClassLevelMap() throws Exception {
mapper = getMapper(new String[] { "dozerBeanMapping.xml" });
PropertyToMap ptm = newInstance(PropertyToMap.class);
ptm.setStringProperty("stringPropertyValue");
ptm.addStringProperty2("stringProperty2Value");
Map<?, ?> map = mapper.map(ptm, HashMap.class, "myTestMapping");
assertEquals("stringPropertyValue", map.get("stringProperty"));
assertEquals("stringProperty2Value", map.get("myStringProperty"));
CustomMapIF customMap = mapper.map(ptm, CustomMap.class, "myCustomTestMapping");
assertEquals("stringPropertyValue", customMap.getValue("stringProperty"));
assertEquals("stringProperty2Value", customMap.getValue("myStringProperty"));
CustomMapIF custom = newInstance(CustomMap.class);
custom.putValue("myKey", "myValue");
mapper.map(ptm, custom, "myCustomTestMapping");
assertEquals("stringPropertyValue", custom.getValue("stringProperty"));
assertEquals("myValue", custom.getValue("myKey"));
}
@Test
public void testPropertyClassLevelMap2() throws Exception {
mapper = getMapper(new String[] { "dozerBeanMapping.xml" });
PropertyToMap ptm = newInstance(PropertyToMap.class);
ptm.setStringProperty("stringPropertyValue");
ptm.addStringProperty2("stringProperty2Value");
CustomMapIF customMap = mapper.map(ptm, CustomMap.class, "myCustomTestMapping");
assertEquals("stringPropertyValue", customMap.getValue("stringProperty"));
assertEquals("stringProperty2Value", customMap.getValue("myStringProperty"));
}
@Test
public void testPropertyClassLevelMapBack() throws Exception {
// Map Back
mapper = getMapper(new String[] { "dozerBeanMapping.xml" });
Map<String, Object> map = newInstance(HashMap.class);
map.put("stringProperty", "stringPropertyValue");
map.put("integerProperty", new Integer("567"));
PropertyToMap property = mapper.map(map, PropertyToMap.class, "myTestMapping");
assertEquals("stringPropertyValue", property.getStringProperty());
CustomMapIF custom = newInstance(CustomMap.class);
custom.putValue("stringProperty", "stringPropertyValue");
PropertyToMap property2 = mapper.map(custom, PropertyToMap.class, "myCustomTestMapping");
assertEquals("stringPropertyValue", property2.getStringProperty());
map.put("stringProperty3", "myValue");
mapper.map(map, property, "myTestMapping");
assertEquals("myValue", property.getStringProperty3());
}
@Test
public void testPropertyToMap() throws Exception {
mapper = getMapper(new String[] { "dozerBeanMapping.xml" });
PropertyToMap ptm = newInstance(PropertyToMap.class);
ptm.setStringProperty("stringPropertyValue");
ptm.addStringProperty2("stringProperty2Value");
ptm.setStringProperty6("string6Value");
Map<String, Object> hashMap = newInstance(HashMap.class);
hashMap.put("reverseMapString", "reverseMapStringValue");
hashMap.put("reverseMapInteger", new Integer("567"));
ptm.setReverseMap(hashMap);
MapToProperty mtp = mapper.map(ptm, MapToProperty.class);
assertTrue(mtp.getHashMap().containsKey("stringProperty"));
assertTrue(mtp.getHashMap().containsValue("stringPropertyValue"));
assertTrue(mtp.getHashMap().containsKey("myStringProperty"));
assertTrue(mtp.getHashMap().containsValue("stringProperty2Value"));
assertFalse(mtp.getHashMap().containsValue("nullStringProperty"));
assertTrue(mtp.getNullHashMap().containsValue("string6Value"));
assertEquals("reverseMapStringValue", mtp.getReverseMapString());
assertEquals(((Integer) hashMap.get("reverseMapInteger")).toString(), mtp.getReverseMapInteger());
// Map Back
PropertyToMap dest = mapper.map(mtp, PropertyToMap.class);
assertTrue(dest.getStringProperty().equals("stringPropertyValue"));
assertTrue(dest.getStringProperty2().equals("stringProperty2Value"));
assertTrue(dest.getReverseMap().containsKey("reverseMapString"));
assertTrue(dest.getReverseMap().containsValue("reverseMapStringValue"));
assertNull(dest.getNullStringProperty());
}
@Test
public void testPropertyToCustomMap() throws Exception {
mapper = getMapper(new String[] { "dozerBeanMapping.xml" });
PropertyToMap ptm = newInstance(PropertyToMap.class);
ptm.setStringProperty3("stringProperty3Value");
ptm.setStringProperty4("stringProperty4Value");
ptm.setStringProperty5("stringProperty5Value");
MapToProperty mtp = mapper.map(ptm, MapToProperty.class);
assertEquals("stringProperty3Value", mtp.getCustomMap().getValue("myCustomProperty"));
assertEquals("stringProperty5Value", mtp.getCustomMap().getValue("stringProperty5"));
assertEquals("stringProperty4Value", mtp.getNullCustomMap().getValue("myCustomNullProperty"));
assertEquals("stringProperty5Value", mtp.getCustomMapWithDiffSetMethod().getValue("stringProperty5"));
// Map Back
PropertyToMap dest = mapper.map(mtp, PropertyToMap.class);
assertEquals("stringProperty3Value", dest.getStringProperty3());
assertEquals("stringProperty4Value", dest.getStringProperty4());
assertEquals("stringProperty5Value", dest.getStringProperty5());
}
@Test
public void testPropertyToClassLevelMap() throws Exception {
mapper = getMapper(new String[] { "dozerBeanMapping.xml" });
MapTestObject mto = newInstance(MapTestObject.class);
PropertyToMap ptm = newInstance(PropertyToMap.class);
Map<String, String> map = newInstance(HashMap.class);
map.put("reverseClassLevelMapString", "reverseClassLevelMapStringValue");
mto.setPropertyToMapMapReverse(map);
ptm.setStringProperty("stringPropertyValue");
ptm.addStringProperty2("stringProperty2Value");
ptm.setStringProperty3("stringProperty3Value");
ptm.setStringProperty4("stringProperty4Value");
ptm.setStringProperty5("stringProperty5Value");
mto.setPropertyToMap(ptm);
PropertyToMap ptm2 = newInstance(PropertyToMap.class);
ptm2.setStringProperty("stringPropertyValue");
mto.setPropertyToMapToNullMap(ptm2);
MapTestObjectPrime mtop = mapper.map(mto, MapTestObjectPrime.class);
assertTrue(mtop.getPropertyToMapMap().containsKey("stringProperty"));
assertTrue(mtop.getPropertyToMapMap().containsKey("myStringProperty"));
assertTrue(mtop.getPropertyToMapMap().containsKey("stringProperty3"));
assertTrue(mtop.getPropertyToMapMap().containsKey("stringProperty4"));
assertTrue(mtop.getPropertyToMapMap().containsKey("stringProperty5"));
assertTrue(mtop.getPropertyToMapMap().containsKey("nullStringProperty"));
assertTrue(mtop.getPropertyToMapMap().containsValue("stringPropertyValue"));
assertTrue(mtop.getPropertyToMapMap().containsValue("stringProperty2Value"));
assertTrue(mtop.getPropertyToMapMap().containsValue("stringProperty3Value"));
assertTrue(mtop.getPropertyToMapMap().containsValue("stringProperty4Value"));
assertTrue(mtop.getPropertyToMapMap().containsValue("stringProperty5Value"));
assertFalse(mtop.getPropertyToMapMap().containsValue("nullStringProperty"));
assertFalse(mtop.getPropertyToMapMap().containsKey("excludeMe"));
assertEquals("reverseClassLevelMapStringValue", mtop.getPropertyToMapReverse().getReverseClassLevelMapString());
assertTrue(mtop.getNullPropertyToMapMap().containsKey("stringProperty"));
assertEquals("stringPropertyValue", mtop.getNullPropertyToMapMap().get("stringProperty"));
// Map Back
MapTestObject mto2 = mapper.map(mtop, MapTestObject.class);
assertEquals("stringPropertyValue", mto2.getPropertyToMap().getStringProperty());
assertEquals("stringProperty2Value", mto2.getPropertyToMap().getStringProperty2());
assertEquals("stringProperty3Value", mto2.getPropertyToMap().getStringProperty3());
assertEquals("stringProperty4Value", mto2.getPropertyToMap().getStringProperty4());
assertEquals("stringProperty5Value", mto2.getPropertyToMap().getStringProperty5());
assertTrue(mto2.getPropertyToMapMapReverse().containsKey("reverseClassLevelMapString"));
assertEquals("reverseClassLevelMapStringValue", mto2.getPropertyToMapMapReverse().get("reverseClassLevelMapString"));
}
@Test
public void testPropertyToCustomClassLevelMap() throws Exception {
mapper = getMapper(new String[] { "dozerBeanMapping.xml" });
MapTestObject mto = newInstance(MapTestObject.class);
PropertyToMap ptm = newInstance(PropertyToMap.class);
ptm.setStringProperty("stringPropertyValue");
ptm.setStringProperty2("stringProperty2Value");
mto.setPropertyToCustomMap(ptm);
CustomMapIF customMap = newInstance(CustomMap.class);
customMap.putValue("stringProperty", "stringPropertyValue");
mto.setPropertyToCustomMapMapWithInterface(customMap);
MapTestObjectPrime mtop = mapper.map(mto, MapTestObjectPrime.class);
assertEquals("stringPropertyValue", mtop.getPropertyToCustomMapMap().getValue("stringProperty"));
assertNull(mtop.getPropertyToCustomMapMap().getValue("excludeMe"));
assertEquals("stringProperty2Value", mtop.getPropertyToCustomMapMap().getValue("myStringProperty"));
assertEquals("stringPropertyValue", mtop.getPropertyToCustomMapWithInterface().getStringProperty());
// Map Back
MapTestObject mto2 = mapper.map(mtop, MapTestObject.class);
assertEquals("stringPropertyValue", mto2.getPropertyToCustomMap().getStringProperty());
assertEquals("stringProperty2Value", mto2.getPropertyToCustomMap().getStringProperty2());
assertNull(mto2.getPropertyToCustomMap().getExcludeMe());
assertEquals("stringPropertyValue", mto2.getPropertyToCustomMapMapWithInterface().getValue("stringProperty"));
}
@Test
public void testMapGetSetMethod_ClassLevel() throws Exception {
runMapGetSetMethodTest("useCase1");
}
@Test
public void testMapGetSetMethod_FieldLevel() throws Exception {
runMapGetSetMethodTest("useCase2");
}
@Test
public void testDateFormat_CustomMapType() throws Exception {
// Test that date format works for mapping between String and Custom Map Type
mapper = getMapper(new String[] { "mapMapping3.xml" });
DateFormat df = new SimpleDateFormat("MM/dd/yyyy");
String dateStr = "10/15/2005";
CustomMap src = newInstance(CustomMap.class);
src.putValue("fieldA", dateStr);
org.dozer.vo.SimpleObj dest = mapper.map(src, org.dozer.vo.SimpleObj.class);
assertNotNull("dest field should not be null", dest.getField5());
assertEquals("dest field contains wrong date value", df.parse(dateStr), dest.getField5().getTime());
CustomMap remappedSrc = mapper.map(dest, CustomMap.class);
assertEquals("remapped src field contains wrong date string", dateStr, remappedSrc.getValue("fieldA"));
}
private void runMapGetSetMethodTest(String mapId) throws Exception {
// Test that custom field converter works for Custom Map Types
mapper = getMapper(new String[] { "mapGetSetMethodMapping.xml" });
CustomMap src = newInstance(CustomMap.class);
src.putValue("fieldA", "someStringValue");
src.putValue("field2", "someOtherStringValue");
src.putValue("fieldC", "1");
src.putValue("fieldD", "2");
src.putValue("fieldE", "10-15-2005");
SimpleObj dest = mapper.map(src, SimpleObj.class, mapId);
assertEquals("wrong value for field1", src.getValue("fieldA"), dest.getField1());
assertEquals("wrong value for field2", src.getValue("field2"), dest.getField2());
assertEquals("wrong value for field3", Integer.valueOf("1"), dest.getField3());
assertEquals("wrong value for field4", Integer.valueOf("2"), dest.getField4());
Calendar expected = Calendar.getInstance();
expected.set(2005, 10, 15);
assertEquals(expected.get(Calendar.YEAR), dest.getField5().get(Calendar.YEAR));
assertEquals(Calendar.OCTOBER, dest.getField5().get(Calendar.MONTH));
assertEquals(expected.get(Calendar.DATE), dest.getField5().get(Calendar.DATE));
// Remap to test bi-directional mapping
CustomMap remappedSrc = mapper.map(dest, CustomMap.class, mapId);
assertTrue("remapped src should equal original src", EqualsBuilder.reflectionEquals(src.getMap(), remappedSrc.getMap()));
}
@Test
public void testMapType_NestedMapToVo_NoCustomMappings() throws Exception {
// Simple test checking that Maps get mapped to a VO without any custom mappings or map-id.
// Should behave like Vo --> Vo, matching on common attr(key) names.
Map<String, String> nested2 = newInstance(HashMap.class);
nested2.put("field1", "mapnestedfield1");
nested2.put("field2", null);
SimpleObj src = newInstance(SimpleObj.class);
src.setNested2(nested2);
SimpleObjPrime result = mapper.map(src, SimpleObjPrime.class);
assertNotNull(result.getNested2());
assertEquals(nested2.get("field1"), result.getNested2().getField1());
SimpleObj result2 = mapper.map(result, SimpleObj.class);
assertEquals(src, result2);
}
@Test
public void testMapType_MapToVo_CustomMapping_NoMapId() {
// Test nested Map --> Vo using custom mappings without map-id
mapper = getMapper("mapMapping3.xml");
NestedObj nested = newInstance(NestedObj.class);
nested.setField1("field1Value");
Map<String, String> nested2 = newInstance(HashMap.class);
nested2.put("field1", "mapnestedfield1value");
nested2.put("field2", "mapnestedfield2value");
SimpleObj src = newInstance(SimpleObj.class);
src.setNested2(nested2);
SimpleObjPrime result = mapper.map(src, SimpleObjPrime.class);
assertNull(result.getNested2().getField1());// field exclude in mappings file
assertEquals(nested2.get("field2"), result.getNested2().getField2());
}
@Ignore("Started failing for some reason. Tests very exotic functionality.")
@Test
public void testNestedCustomMap() {
mapper = getMapper("mapMapping4.xml");
ParentDOM src = newInstance(ParentDOM.class);
src.setTest("someTestValue");
ChildDOM child = newInstance(ChildDOM.class);
child.setChildName("someChildName");
src.setChild(child);
GenericDOM result = mapper.map(src, GenericDOM.class);
assertEquals("someTestValue", result.get("test"));
GenericDOM resultChild = (GenericDOM) result.get("child");
assertEquals("someChildName", resultChild.get("childName"));
}
@Test
public void testMapToVoUsingMapInterface() throws Exception {
// Test simple Map --> Vo with custom mappings defined.
mapper = getMapper("mapMapping5.xml");
Map<String, String> src = newInstance(HashMap.class);
src.put("stringValue", "somevalue");
SimpleObj dest = mapper.map(src, SimpleObj.class, "test-id");
assertEquals("wrong value found for field1", "somevalue", dest.getField1());
}
@Test
@Ignore("Known bug")
public void testEmptyMapToVo() throws Exception {
mapper = getMapper("mapMapping5.xml");
Map<String, String> src = newInstance(HashMap.class);
assertTrue(src.isEmpty());
SimpleObj dest = new SimpleObj();
dest.setField1("existingValue");
mapper.map(src, dest, "test-id");
assertEquals("existingValue", dest.getField1());
}
@Test
public void testMapToVoOverwritesExistingValue() throws Exception {
mapper = getMapper("mapMapping5.xml");
Map<String, String> src = newInstance(HashMap.class);
src.put("stringValue", "overwritten");
SimpleObj dest = new SimpleObj();
dest.setField1("existingValue");
mapper.map(src, dest, "test-id");
assertEquals("overwritten", dest.getField1());
}
@Test
public void testTreeMap() {
TreeMap map = new TreeMap();
map.put("a", "b");
TreeMap result = mapper.map(map, TreeMap.class);
assertNotNull(result);
assertEquals(1, result.size());
}
}
| apache-2.0 |
ryansgot/forsuredbcompiler | forsuredbcompiler/src/main/java/com/fsryan/forsuredb/annotationprocessor/generator/code/JavaSourceGenerator.java | 3018 | package com.fsryan.forsuredb.annotationprocessor.generator.code;
import com.fsryan.forsuredb.annotationprocessor.FSAnnotationProcessor;
import com.fsryan.forsuredb.annotationprocessor.generator.BaseGenerator;
import com.fsryan.forsuredb.api.TypedRecordContainer;
import com.squareup.javapoet.AnnotationSpec;
import com.squareup.javapoet.ClassName;
import com.squareup.javapoet.CodeBlock;
import javax.annotation.Generated;
import javax.annotation.processing.ProcessingEnvironment;
import javax.tools.JavaFileObject;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static com.fsryan.forsuredb.annotationprocessor.util.PropertyRetriever.properties;
public abstract class JavaSourceGenerator extends BaseGenerator<JavaFileObject> {
private static final AnnotationSpec GENERATED_ANNOTATION = AnnotationSpec.builder(Generated.class)
.addMember("value", CodeBlock.builder()
.add("$S", ClassName.get(FSAnnotationProcessor.class))
.build())
.build();
private final String fqClassName;
private String packageName;
private String simpleClassName;
private String resultParameter;
private String recordContainer;
public JavaSourceGenerator(ProcessingEnvironment processingEnv, String fqClassName) {
super(processingEnv);
this.fqClassName = fqClassName;
}
@Override
protected JavaFileObject createFileObject(ProcessingEnvironment processingEnv) throws IOException {
return processingEnv.getFiler().createSourceFile(getOutputClassName(true));
}
protected List<AnnotationSpec> getClassAnnotations() {
if (properties().addGeneratedAnnotation()) {
return Arrays.asList(GENERATED_ANNOTATION);
}
return Collections.emptyList();
}
protected String getOutputClassName(boolean fullyQualified) {
if (fullyQualified) {
return fqClassName;
}
if (simpleClassName == null) {
simpleClassName = CodeUtil.simpleClassNameFrom(fqClassName);
}
return simpleClassName;
}
protected String getOutputPackageName() {
if (packageName == null) {
packageName = CodeUtil.packageNameFrom(fqClassName);
}
return packageName;
}
protected String getResultParameter() {
if (resultParameter == null) {
resultParameter = properties().resultParameter();
resultParameter = resultParameter == null ? "java.lang.Object" : resultParameter;
}
return resultParameter;
}
protected String getRecordContainer() {
if (recordContainer == null) {
recordContainer = properties().recordContainer();
recordContainer = recordContainer == null || recordContainer.isEmpty()
? TypedRecordContainer.class.getName()
: recordContainer;
}
return recordContainer;
}
}
| apache-2.0 |
flyhero/flyapi2.0 | flyapi-dao/src/main/java/com/flyhero/flyapi/dao/OperateLogMapper.java | 617 | package com.flyhero.flyapi.dao;
import java.util.List;
import com.flyhero.flyapi.entity.OperateLog;
import com.flyhero.flyapi.pojo.LogPojo;
public interface OperateLogMapper {
List<LogPojo> findLog(Integer projectId);
List<OperateLog> findLogDetialByProId(Integer projectId);
List<OperateLog> findAllLogByUserId(Integer userId);
int deleteByPrimaryKey(Integer logId);
int insert(OperateLog record);
int addLog(OperateLog record);
OperateLog selectByPrimaryKey(Integer logId);
int updateByPrimaryKeySelective(OperateLog record);
int updateByPrimaryKey(OperateLog record);
} | apache-2.0 |