repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15
values |
|---|---|---|---|---|
nate-rcl/irplus | dspace_urresearch_import/test/edu/ur/dspace/test/PropertiesLoader.java | 1786 | /**
Copyright 2008 University of Rochester
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package edu.ur.dspace.test;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
/**
* Properties loader to help with testing on both unix and
* windows platforms.
*
* @author Nathan Sarr
*
*/
public class PropertiesLoader {
public static final String UNIX_PATH_SEPERATOR = "/";
public static final String WINDOWS_PATH_SEPERATOR = "\\";
public Properties getProperties()
{
ClassLoader cl = this.getClass().getClassLoader();
InputStream is = null;
if( File.separator.equals(UNIX_PATH_SEPERATOR))
{
is = cl.getResourceAsStream("testing_unix.properties");
}
else if( File.separator.equals(WINDOWS_PATH_SEPERATOR))
{
is = cl.getResourceAsStream("testing_windows.properties");
}
else
{
throw new RuntimeException("Could not determine system type");
}
Properties properties = new Properties();
try {
if( is == null )
{
throw new IllegalStateException("IS is NULL");
}
properties.load(is);
} catch (IOException e) {
throw new IllegalStateException("Could not read testing.properties file");
}
return properties;
}
}
| apache-2.0 |
mazl123321/LuceneInAction.Version_46 | src/main/java/com/mathworks/xzheng/tools/XmlQueryParser/AgoFilterBuilder.java | 3030 | package com.mathworks.xzheng.tools.XmlQueryParser;
/**
* Copyright Manning Publications Co.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific lan
*/
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.HashMap;
import org.apache.lucene.queryparser.xml.DOMUtils;
import org.apache.lucene.queryparser.xml.FilterBuilder;
import org.apache.lucene.queryparser.xml.ParserException;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.NumericRangeFilter;
import org.w3c.dom.Element;
// From chapter 9
public class AgoFilterBuilder implements FilterBuilder {
static HashMap<String,Integer> timeUnits=new HashMap<String,Integer>();
public Filter getFilter(Element element) throws ParserException {
String fieldName = DOMUtils.getAttributeWithInheritanceOrFail(element, // A
"fieldName"); // A
String timeUnit = DOMUtils.getAttribute(element, "timeUnit", "days"); // A
Integer calUnit = timeUnits.get(timeUnit); // A
if (calUnit == null) { // A
throw new ParserException("Illegal time unit:" // A
+timeUnit+" - must be days, months or years"); // A
} // A
int agoStart = DOMUtils.getAttribute(element, "from",0); // A
int agoEnd = DOMUtils.getAttribute(element, "to", 0); // A
if (agoStart < agoEnd) {
int oldAgoStart = agoStart;
agoStart = agoEnd;
agoEnd = oldAgoStart;
}
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd"); // B
Calendar start = Calendar.getInstance(); // B
start.add(calUnit, agoStart*-1); // B
Calendar end = Calendar.getInstance(); // B
end.add(calUnit, agoEnd*-1); // B
return NumericRangeFilter.newIntRange( // C
fieldName, // C
Integer.valueOf(sdf.format(start.getTime())), // C
Integer.valueOf(sdf.format(end.getTime())), // C
true, true); // C
}
static {
timeUnits.put("days", Calendar.DAY_OF_YEAR);
timeUnits.put("months",Calendar.MONTH);
timeUnits.put("years", Calendar.YEAR);
}
}
/*
#A Extract field, time unit, from and to
#B Parse date/times
#C Create NumericRangeFilter
*/
| apache-2.0 |
crate/crate | server/src/main/java/io/crate/metadata/information/InformationSqlFeaturesTableInfo.java | 2557 | /*
* Licensed to Crate.io GmbH ("Crate") under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership. Crate licenses
* this file to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial agreement.
*/
package io.crate.metadata.information;
import static io.crate.types.DataTypes.BOOLEAN;
import static io.crate.types.DataTypes.STRING;
import io.crate.execution.engine.collect.files.SqlFeatureContext;
import io.crate.metadata.ColumnIdent;
import io.crate.metadata.RelationName;
import io.crate.metadata.SystemTable;
public class InformationSqlFeaturesTableInfo {
public static final String NAME = "sql_features";
public static final RelationName IDENT = new RelationName(InformationSchemaInfo.NAME, NAME);
public static SystemTable<SqlFeatureContext> create() {
return SystemTable.<SqlFeatureContext>builder(IDENT)
.add("feature_id", STRING, SqlFeatureContext::getFeatureId)
.add("feature_name", STRING, SqlFeatureContext::getFeatureName)
.add("sub_feature_id", STRING, SqlFeatureContext::getSubFeatureId)
.add("sub_feature_name", STRING, SqlFeatureContext::getSubFeatureName)
.add("is_supported", BOOLEAN, SqlFeatureContext::isSupported)
.add("is_verified_by", STRING, SqlFeatureContext::getIsVerifiedBy)
.add("comments", STRING, SqlFeatureContext::getComments)
.setPrimaryKeys(
new ColumnIdent("feature_id"),
new ColumnIdent("feature_name"),
new ColumnIdent("sub_feature_id"),
new ColumnIdent("sub_feature_name"),
new ColumnIdent("is_supported"),
new ColumnIdent("is_verified_by")
)
.build();
}
}
| apache-2.0 |
kiritbasu/datacollector | miniIT/src/test/java/com/streamsets/datacollector/multiple/TestMultiplePipelinesComplex.java | 8149 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.datacollector.multiple;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.io.Resources;
import com.streamsets.datacollector.base.TestMultiplePipelinesBase;
import com.streamsets.datacollector.util.TestUtil;
import com.streamsets.pipeline.lib.KafkaTestUtil;
import com.streamsets.pipeline.lib.util.ThreadUtil;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import org.apache.flume.Channel;
import org.apache.flume.ChannelSelector;
import org.apache.flume.Context;
import org.apache.flume.channel.ChannelProcessor;
import org.apache.flume.channel.MemoryChannel;
import org.apache.flume.channel.ReplicatingChannelSelector;
import org.apache.flume.conf.Configurables;
import org.apache.flume.source.AvroSource;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.server.namenode.EditLogFileOutputStream;
import org.apache.hadoop.security.UserGroupInformation;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Ignore;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
@Ignore
public class TestMultiplePipelinesComplex extends TestMultiplePipelinesBase {
private static final String TOPIC1 = "KafkaToFlume";
private static final String TOPIC2 = "KafkaToHDFS";
private static final String TOPIC3 = "randomToKafka";
//Flume destination related
private static AvroSource source;
private static Channel ch;
private static Producer<String, String> producer1;
private static Producer<String, String> producer2;
private static int flumePort;
private static ExecutorService e;
//HDFS
private static MiniDFSCluster miniDFS;
private static List<String> getPipelineJson() throws URISyntaxException, IOException {
//random to kafka
URI uri = Resources.getResource("kafka_destination_pipeline_operations.json").toURI();
String randomToKafka = new String(Files.readAllBytes(Paths.get(uri)), StandardCharsets.UTF_8);
randomToKafka = randomToKafka.replace("topicName", TOPIC3);
randomToKafka = randomToKafka.replaceAll("localhost:9092", KafkaTestUtil.getMetadataBrokerURI());
randomToKafka = randomToKafka.replaceAll("localhost:2181", KafkaTestUtil.getZkServer().connectString());
//kafka to flume pipeline
uri = Resources.getResource("cluster_kafka_flume.json").toURI();
String kafkaToFlume = new String(Files.readAllBytes(Paths.get(uri)), StandardCharsets.UTF_8);
kafkaToFlume = kafkaToFlume.replace("topicName", TOPIC1);
kafkaToFlume = kafkaToFlume.replaceAll("localhost:9092", KafkaTestUtil.getMetadataBrokerURI());
kafkaToFlume = kafkaToFlume.replaceAll("localhost:2181", KafkaTestUtil.getZkConnect());
kafkaToFlume = kafkaToFlume.replaceAll("localhost:9050", "localhost:" + flumePort);
kafkaToFlume = kafkaToFlume.replaceAll("CLUSTER", "STANDALONE");
//kafka to hdfs pipeline
uri = Resources.getResource("cluster_kafka_hdfs.json").toURI();
String kafkaToHDFS = new String(Files.readAllBytes(Paths.get(uri)), StandardCharsets.UTF_8);
kafkaToHDFS = kafkaToHDFS.replace("topicName", TOPIC2);
kafkaToHDFS = kafkaToHDFS.replaceAll("localhost:9092", KafkaTestUtil.getMetadataBrokerURI());
kafkaToHDFS = kafkaToHDFS.replaceAll("localhost:2181", KafkaTestUtil.getZkConnect());
kafkaToHDFS = kafkaToHDFS.replaceAll("CLUSTER", "STANDALONE");
kafkaToHDFS = kafkaToHDFS.replaceAll("/uri", miniDFS.getURI().toString());
return ImmutableList.of(randomToKafka, kafkaToFlume, kafkaToHDFS);
}
@Override
protected Map<String, String> getPipelineNameAndRev() {
return ImmutableMap.of("kafka_destination_pipeline", "0", "kafka_origin_pipeline_cluster", "0", "cluster_kafka_hdfs", "0");
}
/**
* The extending test must call this method in the method scheduled to run before class
* @throws Exception
*/
@BeforeClass
public static void beforeClass() throws Exception {
//setup kafka to read from
KafkaTestUtil.startZookeeper();
KafkaTestUtil.startKafkaBrokers(1);
KafkaTestUtil.createTopic(TOPIC1, 1, 1);
KafkaTestUtil.createTopic(TOPIC2, 1, 1);
KafkaTestUtil.createTopic(TOPIC3, 1, 1);
producer1 = KafkaTestUtil.createProducer(KafkaTestUtil.getMetadataBrokerURI(), true);
producer2 = KafkaTestUtil.createProducer(KafkaTestUtil.getMetadataBrokerURI(), true);
e = Executors.newFixedThreadPool(2);
e.submit(new Runnable() {
@Override
public void run() {
int index = 0;
while (true) {
producer1.send(new KeyedMessage<>(TOPIC1, "0", "Hello Kafka" + index));
ThreadUtil.sleep(200);
index = (index+1)%10;
}
}
});
e.submit(new Runnable() {
@Override
public void run() {
int index = 0;
while (true) {
producer2.send(new KeyedMessage<>(TOPIC2, "0", "Hello Kafka" + index));
ThreadUtil.sleep(200);
index = (index+1)%10;
}
}
});
//setup flume to write to
source = new AvroSource();
ch = new MemoryChannel();
Configurables.configure(ch, new Context());
Context context = new Context();
//This should match whats present in the pipeline.json file
flumePort = TestUtil.getFreePort();
context.put("port", String.valueOf(flumePort));
context.put("bind", "localhost");
Configurables.configure(source, context);
List<Channel> channels = new ArrayList<>();
channels.add(ch);
ChannelSelector rcs = new ReplicatingChannelSelector();
rcs.setChannels(channels);
source.setChannelProcessor(new ChannelProcessor(rcs));
source.start();
//HDFS settings
// setting some dummy kerberos settings to be able to test a mis-setting
System.setProperty("java.security.krb5.realm", "foo");
System.setProperty("java.security.krb5.kdc", "localhost:0");
File minidfsDir = new File("target/minidfs").getAbsoluteFile();
if (!minidfsDir.exists()) {
Assert.assertTrue(minidfsDir.mkdirs());
}
System.setProperty(MiniDFSCluster.PROP_TEST_BUILD_DATA, minidfsDir.getPath());
Configuration conf = new HdfsConfiguration();
conf.set("hadoop.proxyuser." + System.getProperty("user.name") + ".hosts", "*");
conf.set("hadoop.proxyuser." + System.getProperty("user.name") + ".groups", "*");
UserGroupInformation.createUserForTesting("foo", new String[]{"all", "supergroup"});
EditLogFileOutputStream.setShouldSkipFsyncForTesting(true);
miniDFS = new MiniDFSCluster.Builder(conf).build();
TestMultiplePipelinesBase.beforeClass(getPipelineJson());
}
@AfterClass
public static void afterClass() throws Exception {
e.shutdownNow();
if (miniDFS != null) {
miniDFS.shutdown();
miniDFS = null;
}
source.stop();
ch.stop();
KafkaTestUtil.shutdown();
TestMultiplePipelinesBase.afterClass();
}
}
| apache-2.0 |
JulienDelRio/AndroidKitKatForTAUG | sources/src/com/juliendelrio/kitkat4taug/subjects/SubjectFragmentNewSMSFramework.java | 4829 | package com.juliendelrio.kitkat4taug.subjects;
import android.content.ActivityNotFoundException;
import android.content.Context;
import android.content.Intent;
import android.database.Cursor;
import android.net.Uri;
import android.os.Bundle;
import android.provider.ContactsContract;
import android.provider.ContactsContract.PhoneLookup;
import android.support.v4.widget.CursorAdapter;
import android.telephony.SmsManager;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import com.juliendelrio.kitkat4taug.R;
public class SubjectFragmentNewSMSFramework extends AbstractSubjectFragment {
private ListView listView;
private EditText editTextReceiver;
private EditText editTextMessage;
private Button buttonSendManager;
private Button buttonSendApi;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
listView = (ListView) inflater.inflate(R.layout.fragment_newsms_root, container, false);
View header = inflater.inflate(R.layout.fragment_newsms_header, listView, false);
listView.addHeaderView(header);
// header
editTextReceiver = (EditText) header.findViewById(R.id.editText_receiver);
editTextMessage = (EditText) header.findViewById(R.id.editText_message);
buttonSendManager = (Button) header.findViewById(R.id.button_send_manager);
buttonSendManager.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
SmsManager smsManager = SmsManager.getDefault();
String phoneNumber = editTextReceiver.getText().toString();
String smsMessage = editTextMessage.getText().toString();
smsManager.sendTextMessage(phoneNumber, null, smsMessage, null, null);
}
});
buttonSendApi = (Button) header.findViewById(R.id.button_send_api);
buttonSendApi.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
Intent smsIntent = new Intent(Intent.ACTION_VIEW);
String phoneNumber = editTextReceiver.getText().toString();
String smsMessage = editTextMessage.getText().toString();
smsIntent.setType("vnd.android-dir/mms-sms");
smsIntent.putExtra("address", phoneNumber);
smsIntent.putExtra("sms_body", smsMessage);
try {
startActivity(smsIntent);
} catch (ActivityNotFoundException e) {
Toast.makeText(getActivity(), R.string.no_app_to_handle, Toast.LENGTH_SHORT)
.show();
}
}
});
// list content
Uri uriSms = Uri.parse("content://sms/inbox");
Cursor c = inflater.getContext().getContentResolver().query(uriSms, null, null, null, null);
listView.setAdapter(new CustomAdapter(inflater.getContext(), c,
CursorAdapter.FLAG_REGISTER_CONTENT_OBSERVER));
return listView;
}
private class CustomAdapter extends CursorAdapter {
public CustomAdapter(Context context, Cursor c, boolean autoRequery) {
super(context, c, autoRequery);
}
public CustomAdapter(Context context, Cursor c, int flags) {
super(context, c, flags);
}
@Override
public void bindView(View view, Context context, Cursor cursor) {
Holder holder = (Holder) view.getTag();
setContent(cursor, holder);
}
@Override
protected void onContentChanged() {
super.onContentChanged();
getCursor().requery();
notifyDataSetChanged();
}
@Override
public View newView(Context context, Cursor cursor, ViewGroup parent) {
// Init view
View view = LayoutInflater.from(context).inflate(android.R.layout.simple_list_item_2,
parent, false);
Holder holder = new Holder();
holder.text1 = (TextView) view.findViewById(android.R.id.text1);
holder.text2 = (TextView) view.findViewById(android.R.id.text2);
view.setTag(holder);
// Add content
setContent(cursor, holder);
return view;
}
private void setContent(Cursor cursor, Holder holder) {
// Message
int bodyIndex = cursor.getColumnIndex("body");
String body = cursor.getString(bodyIndex);
holder.text2.setText(body);
// Contact
String personName = cursor.getString(cursor.getColumnIndex("address"));
Uri personUri = Uri.withAppendedPath(ContactsContract.PhoneLookup.CONTENT_FILTER_URI,
personName);
Cursor curPerson = mContext.getContentResolver().query(personUri,
new String[] { PhoneLookup._ID, PhoneLookup.DISPLAY_NAME }, null, null, null);
if (curPerson.moveToFirst()) {
int nameIndex = curPerson.getColumnIndex(PhoneLookup.DISPLAY_NAME);
personName = curPerson.getString(nameIndex);
}
curPerson.close();
holder.text1.setText(personName);
}
private class Holder {
TextView text1;
TextView text2;
}
}
}
| apache-2.0 |
Geomatys/sis | core/sis-metadata/src/main/java/org/apache/sis/internal/jaxb/metadata/MI_RangeElementDescription.java | 3431 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sis.internal.jaxb.metadata;
import javax.xml.bind.annotation.XmlElementRef;
import org.opengis.metadata.content.RangeElementDescription;
import org.apache.sis.metadata.iso.content.DefaultRangeElementDescription;
import org.apache.sis.internal.jaxb.gco.PropertyType;
/**
* JAXB adapter mapping implementing class to the GeoAPI interface. See
* package documentation for more information about JAXB and interface.
*
* @author Cédric Briançon (Geomatys)
* @author Martin Desruisseaux (Geomatys)
* @version 0.3
* @since 0.3
* @module
*/
public final class MI_RangeElementDescription extends
PropertyType<MI_RangeElementDescription, RangeElementDescription>
{
/**
* Empty constructor for JAXB only.
*/
public MI_RangeElementDescription() {
}
/**
* Returns the GeoAPI interface which is bound by this adapter.
* This method is indirectly invoked by the private constructor
* below, so it shall not depend on the state of this object.
*
* @return {@code RangeElementDescription.class}
*/
@Override
protected Class<RangeElementDescription> getBoundType() {
return RangeElementDescription.class;
}
/**
* Constructor for the {@link #wrap} method only.
*/
private MI_RangeElementDescription(final RangeElementDescription metadata) {
super(metadata);
}
/**
* Invoked by {@link PropertyType} at marshalling time for wrapping the given metadata value
* in a {@code <gmi:MI_RangeElementDescription>} XML element.
*
* @param metadata the metadata element to marshall.
* @return a {@code PropertyType} wrapping the given the metadata element.
*/
@Override
protected MI_RangeElementDescription wrap(final RangeElementDescription metadata) {
return new MI_RangeElementDescription(metadata);
}
/**
* Invoked by JAXB at marshalling time for getting the actual metadata to write
* inside the {@code <gmi:MI_RangeElementDescription>} XML element.
* This is the value or a copy of the value given in argument to the {@code wrap} method.
*
* @return the metadata to be marshalled.
*/
@XmlElementRef
public DefaultRangeElementDescription getElement() {
return DefaultRangeElementDescription.castOrCopy(metadata);
}
/**
* Invoked by JAXB at unmarshalling time for storing the result temporarily.
*
* @param metadata the unmarshalled metadata.
*/
public void setElement(final DefaultRangeElementDescription metadata) {
this.metadata = metadata;
}
}
| apache-2.0 |
claucece/twu-biblioteca-sofiaceli | src/com/twu/biblioteca/helpers/ErrorPrinter.java | 266 | package com.twu.biblioteca.helpers;
import com.twu.biblioteca.models.color.ColorList;
public interface ErrorPrinter {
String errorColor = ColorList.getColor("RED");
String resetErrorColor = ColorList.getColor("RESET");
StringBuilder printError();
}
| apache-2.0 |
pengood/CodeInterview | src/com/nowcoder/offer/FindNumsAppearOnceTest.java | 1050 | package com.nowcoder.offer;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map.Entry;
/*
* 一个整型数组里除了两个数字之外,其他的数字都出现了两次。请写程序找出这两个只出现一次的数字
*
*/
public class FindNumsAppearOnceTest {
// num1,num2分别为长度为1的数组。传出参数
// 将num1[0],num2[0]设置为返回结果
public void FindNumsAppearOnce(int[] array, int num1[], int num2[]) {
if(array==null||array.length<2){
num1[0]=0;
num2[0]=0;
return;
}
HashMap<Integer, Integer> map=new HashMap<>();
for(int i=0;i<array.length;i++){
if(map.get(array[i])==null){
map.put(array[i], 1);
}else {
map.put(array[i], map.get(array[i])-1);
}
}
int[] a=new int[2];
int i=0;
Iterator<Entry<Integer, Integer>> iterator=map.entrySet().iterator();
while(iterator.hasNext()){
Entry<Integer, Integer> entry=iterator.next();
if(entry.getValue()==1){
a[i]=entry.getKey();
i++;
}
}
num1[0]=a[0];
num2[0]=a[1];
}
}
| apache-2.0 |
Yorxxx/playednext | app/src/test/java/com/piticlistudio/playednext/relationinterval/model/entity/RelationIntervalTest.java | 1673 | package com.piticlistudio.playednext.relationinterval.model.entity;
import com.piticlistudio.playednext.BaseTest;
import org.junit.Test;
import java.util.Calendar;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.*;
/**
* Test cases
* Created by jorge.garcia on 27/02/2017.
*/
public class RelationIntervalTest extends BaseTest {
@Test
public void given_empty_When_CreatesNewInterval_Then_ReturnsNewInterval() throws Exception {
RelationInterval data = RelationInterval.create(50, RelationInterval.RelationType.PLAYING, 5000);
assertNotNull(data);
assertEquals(50, data.id());
assertEquals(RelationInterval.RelationType.PLAYING, data.type());
assertEquals(5000, data.startAt());
assertEquals(0, data.getEndAt());
}
@Test
public void given_4hoursInterval_When_getHours_Then_Returns4() throws Exception {
long startAt = 5000;
long endAt = startAt + TimeUnit.HOURS.toMillis(4);
RelationInterval data = RelationInterval.create(50, RelationInterval.RelationType.PLAYING, 5000);
data.setEndAt(endAt);
// Act
double value = data.getHours();
// Assert
assertEquals(4, value, 0);
}
@Test
public void given_notFinishedInterval_When_getHours_Then_ComparesAgainstCurrentTime() throws Exception {
long startAt = System.currentTimeMillis() - TimeUnit.HOURS.toMillis(10);
RelationInterval data = RelationInterval.create(50, RelationInterval.RelationType.PLAYING, startAt);
// Act
double value = data.getHours();
// Assert
assertEquals(10, value, 0);
}
} | apache-2.0 |
triceo/zonkybot | robozonky-api/src/main/java/com/github/robozonky/internal/remote/EntityCollectionPageSource.java | 2080 | /*
* Copyright 2020 The RoboZonky Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.robozonky.internal.remote;
import java.util.List;
import java.util.function.Function;
import java.util.function.LongConsumer;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import com.github.robozonky.internal.util.stream.PageSource;
class EntityCollectionPageSource<T, S> implements PageSource<T> {
private static final Logger LOGGER = LogManager.getLogger(EntityCollectionPageSource.class);
private final PaginatedApi<T, S> api;
private final Function<S, List<T>> function;
private final Select select;
private final int pageSize;
public EntityCollectionPageSource(final PaginatedApi<T, S> api, final Function<S, List<T>> function,
final Select select, final int pageSize) {
this.api = api;
this.function = function;
this.select = select;
this.pageSize = pageSize;
}
@Override
public List<T> fetch(final long offset, final long limit, final LongConsumer totalSizeSink) {
LOGGER.trace("Requested with offset {}, limit {}.", offset, limit);
final int pageId = offset < 1 ? 0 : (int) (offset / pageSize);
// limit is ignored, as the page size determines the page number; offset+limit is not supported by Zonky
final PaginatedResult<T> result = api.execute(function, this.select, pageId, pageSize);
totalSizeSink.accept(result.getTotalResultCount());
return result.getPage();
}
}
| apache-2.0 |
jentfoo/aws-sdk-java | aws-java-sdk-connect/src/main/java/com/amazonaws/services/connect/model/transform/UserPhoneConfigMarshaller.java | 2976 | /*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.connect.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.services.connect.model.*;
import com.amazonaws.protocol.*;
import com.amazonaws.annotation.SdkInternalApi;
/**
* UserPhoneConfigMarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class UserPhoneConfigMarshaller {
private static final MarshallingInfo<String> PHONETYPE_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD)
.marshallLocationName("PhoneType").build();
private static final MarshallingInfo<Boolean> AUTOACCEPT_BINDING = MarshallingInfo.builder(MarshallingType.BOOLEAN)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("AutoAccept").build();
private static final MarshallingInfo<Integer> AFTERCONTACTWORKTIMELIMIT_BINDING = MarshallingInfo.builder(MarshallingType.INTEGER)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("AfterContactWorkTimeLimit").build();
private static final MarshallingInfo<String> DESKPHONENUMBER_BINDING = MarshallingInfo.builder(MarshallingType.STRING)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("DeskPhoneNumber").build();
private static final UserPhoneConfigMarshaller instance = new UserPhoneConfigMarshaller();
public static UserPhoneConfigMarshaller getInstance() {
return instance;
}
/**
* Marshall the given parameter object.
*/
public void marshall(UserPhoneConfig userPhoneConfig, ProtocolMarshaller protocolMarshaller) {
if (userPhoneConfig == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(userPhoneConfig.getPhoneType(), PHONETYPE_BINDING);
protocolMarshaller.marshall(userPhoneConfig.getAutoAccept(), AUTOACCEPT_BINDING);
protocolMarshaller.marshall(userPhoneConfig.getAfterContactWorkTimeLimit(), AFTERCONTACTWORKTIMELIMIT_BINDING);
protocolMarshaller.marshall(userPhoneConfig.getDeskPhoneNumber(), DESKPHONENUMBER_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
| apache-2.0 |
ty1er/incubator-asterixdb | hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndex.java | 7995 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.storage.am.lsm.invertedindex.inmemory;
import java.util.List;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ITypeTraits;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.io.FileReference;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.storage.am.btree.impls.BTree.BTreeAccessor;
import org.apache.hyracks.storage.am.common.api.IIndexOperationContext;
import org.apache.hyracks.storage.am.common.api.IPageManager;
import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearcher;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IPartitionedInvertedIndex;
import org.apache.hyracks.storage.am.lsm.invertedindex.search.InvertedListPartitions;
import org.apache.hyracks.storage.am.lsm.invertedindex.search.PartitionedTOccurrenceSearcher;
import org.apache.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
import org.apache.hyracks.storage.am.lsm.invertedindex.util.PartitionedInvertedIndexTokenizingTupleIterator;
import org.apache.hyracks.storage.common.IIndexAccessParameters;
import org.apache.hyracks.storage.common.buffercache.IBufferCache;
public class PartitionedInMemoryInvertedIndex extends InMemoryInvertedIndex implements IPartitionedInvertedIndex {
protected final ReentrantReadWriteLock partitionIndexLock = new ReentrantReadWriteLock(true);
protected short minPartitionIndex = Short.MAX_VALUE;
protected short maxPartitionIndex = Short.MIN_VALUE;
public PartitionedInMemoryInvertedIndex(IBufferCache memBufferCache, IPageManager memFreePageManager,
ITypeTraits[] invListTypeTraits, IBinaryComparatorFactory[] invListCmpFactories,
ITypeTraits[] tokenTypeTraits, IBinaryComparatorFactory[] tokenCmpFactories,
IBinaryTokenizerFactory tokenizerFactory, FileReference btreeFileRef) throws HyracksDataException {
super(memBufferCache, memFreePageManager, invListTypeTraits, invListCmpFactories, tokenTypeTraits,
tokenCmpFactories, tokenizerFactory, btreeFileRef);
}
@Override
public void insert(ITupleReference tuple, BTreeAccessor btreeAccessor, IIndexOperationContext ictx)
throws HyracksDataException {
super.insert(tuple, btreeAccessor, ictx);
PartitionedInMemoryInvertedIndexOpContext ctx = (PartitionedInMemoryInvertedIndexOpContext) ictx;
PartitionedInvertedIndexTokenizingTupleIterator tupleIter =
(PartitionedInvertedIndexTokenizingTupleIterator) ctx.getTupleIter();
updatePartitionIndexes(tupleIter.getNumTokens());
}
@Override
public void clear() throws HyracksDataException {
super.clear();
minPartitionIndex = Short.MAX_VALUE;
maxPartitionIndex = Short.MIN_VALUE;
}
public void updatePartitionIndexes(short numTokens) {
partitionIndexLock.writeLock().lock();
try {
if (numTokens < minPartitionIndex) {
minPartitionIndex = numTokens;
}
if (numTokens > maxPartitionIndex) {
maxPartitionIndex = numTokens;
}
} finally {
partitionIndexLock.writeLock().unlock();
}
}
@Override
public PartitionedInMemoryInvertedIndexAccessor createAccessor(IIndexAccessParameters iap)
throws HyracksDataException {
return new PartitionedInMemoryInvertedIndexAccessor(this,
new PartitionedInMemoryInvertedIndexOpContext(btree, tokenCmpFactories, tokenizerFactory));
}
@Override
public PartitionedInMemoryInvertedIndexAccessor createAccessor(int[] nonIndexFields) throws HyracksDataException {
return new PartitionedInMemoryInvertedIndexAccessor(this,
new PartitionedInMemoryInvertedIndexOpContext(btree, tokenCmpFactories, tokenizerFactory),
nonIndexFields);
}
@Override
public boolean openInvertedListPartitionCursors(IInvertedIndexSearcher searcher, IIndexOperationContext ictx,
short numTokensLowerBound, short numTokensUpperBound, InvertedListPartitions invListPartitions,
List<IInvertedListCursor> cursorsOrderedByTokens) throws HyracksDataException {
short minPartitionIndex;
short maxPartitionIndex;
partitionIndexLock.readLock().lock();
minPartitionIndex = this.minPartitionIndex;
maxPartitionIndex = this.maxPartitionIndex;
partitionIndexLock.readLock().unlock();
if (minPartitionIndex == Short.MAX_VALUE && maxPartitionIndex == Short.MIN_VALUE) {
// Index must be empty.
return false;
}
short partitionStartIndex = minPartitionIndex;
short partitionEndIndex = maxPartitionIndex;
if (numTokensLowerBound >= 0) {
partitionStartIndex = (short) Math.max(minPartitionIndex, numTokensLowerBound);
}
if (numTokensUpperBound >= 0) {
partitionEndIndex = (short) Math.min(maxPartitionIndex, numTokensUpperBound);
}
PartitionedTOccurrenceSearcher partSearcher = (PartitionedTOccurrenceSearcher) searcher;
PartitionedInMemoryInvertedIndexOpContext ctx = (PartitionedInMemoryInvertedIndexOpContext) ictx;
ctx.setOperation(IndexOperation.SEARCH);
// We can pick either of the full low or high search key, since they should be identical here.
ITupleReference searchKey = partSearcher.getFullLowSearchKey();
ctx.getBtreePred().setLowKey(searchKey, true);
ctx.getBtreePred().setHighKey(searchKey, true);
// Go through all possibly partitions and see if the token matches.
// TODO: This procedure could be made more efficient by determining the next partition to search
// using the last existing partition and re-searching the BTree with an open interval as low key.
for (short i = partitionStartIndex; i <= partitionEndIndex; i++) {
partSearcher.setNumTokensBoundsInSearchKeys(i, i);
InMemoryInvertedListCursor inMemListCursor =
(InMemoryInvertedListCursor) partSearcher.getCachedInvertedListCursor();
inMemListCursor.prepare(ctx.getBtreeAccessor(), ctx.getBtreePred(), ctx.getTokenFieldsCmp(),
ctx.getBtreeCmp());
inMemListCursor.reset(searchKey);
invListPartitions.addInvertedListCursor(inMemListCursor, i);
}
return true;
}
@Override
public boolean isEmpty() {
partitionIndexLock.readLock().lock();
if (minPartitionIndex == Short.MAX_VALUE && maxPartitionIndex == Short.MIN_VALUE) {
// Index must be empty.
partitionIndexLock.readLock().unlock();
return true;
}
partitionIndexLock.readLock().unlock();
return false;
}
}
| apache-2.0 |
nortal/spring-mvc-component-web | modules/component-web-jsp/src/main/java/com/nortal/spring/cw/jsp/util/enums/PrivilegeEnum.java | 462 | package com.nortal.spring.cw.jsp.util.enums;
import com.nortal.spring.cw.core.security.CwPrivilege;
/**
* Portaali privileegid
*
* @author Lauri Lättemäe (lauri.lattemae@nortal.com)
* @since 12.09.2013
*/
public enum PrivilegeEnum implements CwPrivilege {
// @formatter:off
;
//@formatter:on
private String code;
private PrivilegeEnum(String code) {
this.code = code;
}
public String getCode() {
return code;
}
}
| apache-2.0 |
termsuite/termsuite-core | src/test/java/fr/univnantes/termsuite/tools/ClearTempFiles.java | 337 | package fr.univnantes.termsuite.tools;
import java.io.IOException;
import org.apache.commons.io.FileUtils;
import fr.univnantes.termsuite.test.func.FunctionalTests;
public class ClearTempFiles {
public static void main(String[] args) throws IOException {
FileUtils.deleteDirectory(FunctionalTests.getTestTmpDir().toFile());
}
}
| apache-2.0 |
aleo72/ww-ceem-radar | src/main/java/gov/nasa/worldwind/ogc/OGCContactInformation.java | 6994 | /*
* Copyright (C) 2012 United States Government as represented by the Administrator of the
* National Aeronautics and Space Administration.
* All Rights Reserved.
*/
package gov.nasa.worldwind.ogc;
import gov.nasa.worldwind.util.xml.*;
import javax.xml.namespace.QName;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.events.XMLEvent;
/**
* Parses an OGC ContactInformation element.
*
* @author tag
* @version $Id: OGCContactInformation.java 1171 2013-02-11 21:45:02Z dcollins $
*/
public class OGCContactInformation extends AbstractXMLEventParser
{
protected QName CONTACT_POSITION;
protected QName CONTACT_VOICE_TELEPHONE;
protected QName CONTACT_FACSIMILE_TELEPHONE;
protected QName CONTACT_ELECTRONIC_MAIL_ADDRESS;
protected QName CONTACT_PERSON_PRIMARY;
protected QName CONTACT_ADDRESS;
protected QName CONTACT_PERSON;
protected QName CONTACT_ORGANIZATION;
protected String personPrimary;
protected String organization;
protected String position;
protected String voiceTelephone;
protected String facsimileTelephone;
protected String electronicMailAddress;
protected OGCAddress contactAddress;
public OGCContactInformation(String namespaceURI)
{
super(namespaceURI);
this.initialize();
}
private void initialize()
{
CONTACT_POSITION = new QName(this.getNamespaceURI(), "ContactPosition");
CONTACT_VOICE_TELEPHONE = new QName(this.getNamespaceURI(), "ContactVoiceTelephone");
CONTACT_FACSIMILE_TELEPHONE = new QName(this.getNamespaceURI(), "ContactFacsimileTelephone");
CONTACT_ELECTRONIC_MAIL_ADDRESS = new QName(this.getNamespaceURI(), "ContactElectronicMailAddress");
CONTACT_PERSON_PRIMARY = new QName(this.getNamespaceURI(), "ContactPersonPrimary");
CONTACT_ADDRESS = new QName(this.getNamespaceURI(), "ContactAddress");
CONTACT_PERSON = new QName(this.getNamespaceURI(), "ContactPerson");
CONTACT_ORGANIZATION = new QName(this.getNamespaceURI(), "ContactOrganization");
}
@Override
public XMLEventParser allocate(XMLEventParserContext ctx, XMLEvent event)
{
XMLEventParser defaultParser = null;
if (ctx.isStartElement(event, CONTACT_ADDRESS))
defaultParser = new OGCAddress(this.getNamespaceURI());
return ctx.allocate(event, defaultParser);
}
@Override
protected void doParseEventContent(XMLEventParserContext ctx, XMLEvent event, Object... args)
throws XMLStreamException
{
if (ctx.isStartElement(event, CONTACT_POSITION))
{
this.setPosition(ctx.getStringParser().parseString(ctx, event));
}
else if (ctx.isStartElement(event, CONTACT_VOICE_TELEPHONE))
{
this.setVoiceTelephone(ctx.getStringParser().parseString(ctx, event));
}
else if (ctx.isStartElement(event, CONTACT_FACSIMILE_TELEPHONE))
{
this.setFacsimileTelephone(ctx.getStringParser().parseString(ctx, event));
}
else if (ctx.isStartElement(event, CONTACT_ELECTRONIC_MAIL_ADDRESS))
{
this.setElectronicMailAddress(ctx.getStringParser().parseString(ctx, event));
}
else if (ctx.isStartElement(event, CONTACT_PERSON_PRIMARY))
{
String[] sa = this.parseContactPersonPrimary(ctx, event);
this.setPersonPrimary(sa[0]);
this.setOrganization(sa[1]);
}
else if (ctx.isStartElement(event, CONTACT_ADDRESS))
{
XMLEventParser parser = this.allocate(ctx, event);
if (parser != null)
{
Object o = parser.parse(ctx, event, args);
if (o != null && o instanceof OGCAddress)
this.setContactAddress((OGCAddress) o);
}
}
}
protected String[] parseContactPersonPrimary(XMLEventParserContext ctx, XMLEvent cppEvent) throws XMLStreamException
{
String[] items = new String[2];
for (XMLEvent event = ctx.nextEvent(); event != null; event = ctx.nextEvent())
{
if (ctx.isEndElement(event, cppEvent))
return items;
if (ctx.isStartElement(event, CONTACT_PERSON))
{
items[0] = ctx.getStringParser().parseString(ctx, event);
}
else if (ctx.isStartElement(event, CONTACT_ORGANIZATION))
{
items[1] = ctx.getStringParser().parseString(ctx, event);
}
}
return null;
}
public String getPersonPrimary()
{
return personPrimary;
}
protected void setPersonPrimary(String personPrimary)
{
this.personPrimary = personPrimary;
}
public String getOrganization()
{
return organization;
}
protected void setOrganization(String organization)
{
this.organization = organization;
}
public String getPosition()
{
return position;
}
protected void setPosition(String position)
{
this.position = position;
}
public String getVoiceTelephone()
{
return voiceTelephone;
}
protected void setVoiceTelephone(String voiceTelephone)
{
this.voiceTelephone = voiceTelephone;
}
public String getFacsimileTelephone()
{
return facsimileTelephone;
}
protected void setFacsimileTelephone(String facsimileTelephone)
{
this.facsimileTelephone = facsimileTelephone;
}
public String getElectronicMailAddress()
{
return electronicMailAddress;
}
protected void setElectronicMailAddress(String electronicMailAddress)
{
this.electronicMailAddress = electronicMailAddress;
}
public OGCAddress getContactAddress()
{
return contactAddress;
}
protected void setContactAddress(OGCAddress contactAddress)
{
this.contactAddress = contactAddress;
}
@Override
public String toString()
{
StringBuilder sb = new StringBuilder();
sb.append("PersonPrimary: ").append(this.personPrimary != null ? this.personPrimary : "none").append("\n");
sb.append("Organization: ").append(this.organization != null ? this.organization : "none").append("\n");
sb.append("Position: ").append(this.position != null ? this.position : "none").append("\n");
sb.append("VoiceTelephone: ").append(this.voiceTelephone != null ? this.voiceTelephone : "none").append("\n");
sb.append("FacsimileTelephone: ").append(
this.facsimileTelephone != null ? this.facsimileTelephone : "none").append("\n");
sb.append("ElectronicMailAddress: ").append(
this.electronicMailAddress != null ? this.electronicMailAddress : "none").append("\n");
sb.append(this.contactAddress != null ? this.contactAddress : "none");
return sb.toString();
}
}
| apache-2.0 |
mosaic-cloud/mosaic-java-platform | tools-callbacks/src/main/java/eu/mosaic_cloud/tools/callbacks/tools/CallbackCompletionWorkflows.java | 2894 | /*
* #%L
* mosaic-tools-callbacks
* %%
* Copyright (C) 2010 - 2013 Institute e-Austria Timisoara (Romania)
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package eu.mosaic_cloud.tools.callbacks.tools;
import java.util.ArrayDeque;
import java.util.concurrent.Callable;
import eu.mosaic_cloud.tools.callbacks.core.CallbackCompletion;
import eu.mosaic_cloud.tools.callbacks.core.CallbackCompletionObserver;
import com.google.common.base.Preconditions;
public class CallbackCompletionWorkflows
{
@SafeVarargs
public static final CallbackCompletion<Void> executeSequence (final Callable<CallbackCompletion<Void>> ... operations_) {
Preconditions.checkNotNull (operations_);
final ArrayDeque<Callable<CallbackCompletion<Void>>> operations = new ArrayDeque<Callable<CallbackCompletion<Void>>> (operations_.length);
for (final Callable<CallbackCompletion<Void>> operation : operations_) {
Preconditions.checkNotNull (operation);
operations.add (operation);
}
final CallbackCompletionDeferredFuture<Void> future = CallbackCompletionDeferredFuture.create (Void.class);
final Runnable chainer = new Runnable () {
@Override
public void run () {
final Runnable chainer = this;
if (operations.isEmpty ()) {
future.trigger.triggerSucceeded (null);
return;
}
final CallbackCompletion<Void> completion;
try {
final Callable<CallbackCompletion<Void>> operation = operations.removeFirst ();
completion = operation.call ();
} catch (final Throwable exception) {
// FIXME: log...
future.trigger.triggerFailed (exception);
return;
}
if (completion.isCompleted ()) {
final Throwable exception = completion.getException ();
if (exception != null) {
future.trigger.triggerFailed (exception);
return;
} else {
chainer.run ();
return;
}
}
completion.observe (new CallbackCompletionObserver () {
@Override
public CallbackCompletion<Void> completed (final CallbackCompletion<?> completion_) {
Preconditions.checkArgument (completion_ == completion);
final Throwable exception = completion.getException ();
if (exception != null)
future.trigger.triggerFailed (exception);
else
chainer.run ();
return (null);
}
});
}
};
chainer.run ();
return (future.completion);
}
}
| apache-2.0 |
OpenUniversity/ovirt-engine | frontend/webadmin/modules/gwt-common/src/main/java/org/ovirt/engine/ui/common/widget/renderer/ClusterTypeRenderer.java | 916 | package org.ovirt.engine.ui.common.widget.renderer;
import org.ovirt.engine.ui.common.CommonApplicationConstants;
import org.ovirt.engine.ui.common.gin.AssetProvider;
import org.ovirt.engine.ui.uicommonweb.models.clusters.ClusterGeneralModel;
import com.google.gwt.text.shared.AbstractRenderer;
public class ClusterTypeRenderer extends AbstractRenderer<ClusterGeneralModel.ClusterType> {
private static final CommonApplicationConstants constants = AssetProvider.getConstants();
@Override
public String render(ClusterGeneralModel.ClusterType object) {
switch (object) {
case BOTH:
return constants.virt() + constants.andBreak() + constants.gluster();
case GLUSTER:
return constants.gluster();
case VIRT:
return constants.virt();
default:
return ""; //$NON-NLS-1$
}
}
}
| apache-2.0 |
rahulmaddineni/Stayfit | app/libs/MPAndroidChart-2.2.3/MPAndroidChart-2.2.3/MPChartLib/src/com/github/mikephil/charting/charts/BarLineChartBase.java | 49563 |
package com.github.mikephil.charting.charts;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Paint.Style;
import android.graphics.PointF;
import android.util.AttributeSet;
import android.util.Log;
import android.view.MotionEvent;
import com.github.mikephil.charting.components.Legend.LegendPosition;
import com.github.mikephil.charting.components.XAxis;
import com.github.mikephil.charting.components.XAxis.XAxisPosition;
import com.github.mikephil.charting.components.YAxis;
import com.github.mikephil.charting.components.YAxis.AxisDependency;
import com.github.mikephil.charting.data.BarData;
import com.github.mikephil.charting.data.BarEntry;
import com.github.mikephil.charting.data.BarLineScatterCandleBubbleData;
import com.github.mikephil.charting.data.Entry;
import com.github.mikephil.charting.highlight.ChartHighlighter;
import com.github.mikephil.charting.highlight.Highlight;
import com.github.mikephil.charting.interfaces.dataprovider.BarLineScatterCandleBubbleDataProvider;
import com.github.mikephil.charting.interfaces.datasets.IBarLineScatterCandleBubbleDataSet;
import com.github.mikephil.charting.jobs.AnimatedMoveViewJob;
import com.github.mikephil.charting.jobs.AnimatedZoomJob;
import com.github.mikephil.charting.jobs.MoveViewJob;
import com.github.mikephil.charting.jobs.ZoomJob;
import com.github.mikephil.charting.listener.BarLineChartTouchListener;
import com.github.mikephil.charting.listener.OnDrawListener;
import com.github.mikephil.charting.renderer.XAxisRenderer;
import com.github.mikephil.charting.renderer.YAxisRenderer;
import com.github.mikephil.charting.utils.PointD;
import com.github.mikephil.charting.utils.Transformer;
import com.github.mikephil.charting.utils.Utils;
/**
* Base-class of LineChart, BarChart, ScatterChart and CandleStickChart.
*
* @author Philipp Jahoda
*/
@SuppressLint("RtlHardcoded")
public abstract class BarLineChartBase<T extends BarLineScatterCandleBubbleData<? extends IBarLineScatterCandleBubbleDataSet<? extends Entry>>>
extends Chart<T> implements BarLineScatterCandleBubbleDataProvider {
/**
* the maximum number of entries to which values will be drawn
* (entry numbers greater than this value will cause value-labels to disappear)
*/
protected int mMaxVisibleCount = 100;
/**
* flag that indicates if auto scaling on the y axis is enabled
*/
private boolean mAutoScaleMinMaxEnabled = false;
private Integer mAutoScaleLastLowestVisibleXIndex = null;
private Integer mAutoScaleLastHighestVisibleXIndex = null;
/**
* flag that indicates if pinch-zoom is enabled. if true, both x and y axis
* can be scaled with 2 fingers, if false, x and y axis can be scaled
* separately
*/
protected boolean mPinchZoomEnabled = false;
/**
* flag that indicates if double tap zoom is enabled or not
*/
protected boolean mDoubleTapToZoomEnabled = true;
/**
* flag that indicates if highlighting per dragging over a fully zoomed out
* chart is enabled
*/
protected boolean mHighlightPerDragEnabled = true;
/**
* if true, dragging is enabled for the chart
*/
private boolean mDragEnabled = true;
private boolean mScaleXEnabled = true;
private boolean mScaleYEnabled = true;
/**
* paint object for the (by default) lightgrey background of the grid
*/
protected Paint mGridBackgroundPaint;
protected Paint mBorderPaint;
/**
* flag indicating if the grid background should be drawn or not
*/
protected boolean mDrawGridBackground = false;
protected boolean mDrawBorders = false;
/**
* Sets the minimum offset (padding) around the chart, defaults to 15
*/
protected float mMinOffset = 15.f;
/**
* the listener for user drawing on the chart
*/
protected OnDrawListener mDrawListener;
/**
* the object representing the labels on the left y-axis
*/
protected YAxis mAxisLeft;
/**
* the object representing the labels on the right y-axis
*/
protected YAxis mAxisRight;
/**
* the object representing the labels on the x-axis
*/
protected XAxis mXAxis;
protected YAxisRenderer mAxisRendererLeft;
protected YAxisRenderer mAxisRendererRight;
protected Transformer mLeftAxisTransformer;
protected Transformer mRightAxisTransformer;
protected XAxisRenderer mXAxisRenderer;
// /** the approximator object used for data filtering */
// private Approximator mApproximator;
public BarLineChartBase(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
}
public BarLineChartBase(Context context, AttributeSet attrs) {
super(context, attrs);
}
public BarLineChartBase(Context context) {
super(context);
}
@Override
protected void init() {
super.init();
mAxisLeft = new YAxis(AxisDependency.LEFT);
mAxisRight = new YAxis(AxisDependency.RIGHT);
mXAxis = new XAxis();
mLeftAxisTransformer = new Transformer(mViewPortHandler);
mRightAxisTransformer = new Transformer(mViewPortHandler);
mAxisRendererLeft = new YAxisRenderer(mViewPortHandler, mAxisLeft, mLeftAxisTransformer);
mAxisRendererRight = new YAxisRenderer(mViewPortHandler, mAxisRight, mRightAxisTransformer);
mXAxisRenderer = new XAxisRenderer(mViewPortHandler, mXAxis, mLeftAxisTransformer);
setHighlighter(new ChartHighlighter(this));
mChartTouchListener = new BarLineChartTouchListener(this, mViewPortHandler.getMatrixTouch());
mGridBackgroundPaint = new Paint();
mGridBackgroundPaint.setStyle(Style.FILL);
// mGridBackgroundPaint.setColor(Color.WHITE);
mGridBackgroundPaint.setColor(Color.rgb(240, 240, 240)); // light
// grey
mBorderPaint = new Paint();
mBorderPaint.setStyle(Style.STROKE);
mBorderPaint.setColor(Color.BLACK);
mBorderPaint.setStrokeWidth(Utils.convertDpToPixel(1f));
}
// for performance tracking
private long totalTime = 0;
private long drawCycles = 0;
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
if (mData == null)
return;
long starttime = System.currentTimeMillis();
calcModulus();
mXAxisRenderer.calcXBounds(this, mXAxis.mAxisLabelModulus);
mRenderer.calcXBounds(this, mXAxis.mAxisLabelModulus);
// execute all drawing commands
drawGridBackground(canvas);
if (mAxisLeft.isEnabled())
mAxisRendererLeft.computeAxis(mAxisLeft.mAxisMinimum, mAxisLeft.mAxisMaximum);
if (mAxisRight.isEnabled())
mAxisRendererRight.computeAxis(mAxisRight.mAxisMinimum, mAxisRight.mAxisMaximum);
mXAxisRenderer.renderAxisLine(canvas);
mAxisRendererLeft.renderAxisLine(canvas);
mAxisRendererRight.renderAxisLine(canvas);
if (mAutoScaleMinMaxEnabled) {
final int lowestVisibleXIndex = getLowestVisibleXIndex();
final int highestVisibleXIndex = getHighestVisibleXIndex();
if (mAutoScaleLastLowestVisibleXIndex == null ||
mAutoScaleLastLowestVisibleXIndex != lowestVisibleXIndex ||
mAutoScaleLastHighestVisibleXIndex == null ||
mAutoScaleLastHighestVisibleXIndex != highestVisibleXIndex) {
calcMinMax();
calculateOffsets();
mAutoScaleLastLowestVisibleXIndex = lowestVisibleXIndex;
mAutoScaleLastHighestVisibleXIndex = highestVisibleXIndex;
}
}
// make sure the graph values and grid cannot be drawn outside the
// content-rect
int clipRestoreCount = canvas.save();
canvas.clipRect(mViewPortHandler.getContentRect());
mXAxisRenderer.renderGridLines(canvas);
mAxisRendererLeft.renderGridLines(canvas);
mAxisRendererRight.renderGridLines(canvas);
if (mXAxis.isDrawLimitLinesBehindDataEnabled())
mXAxisRenderer.renderLimitLines(canvas);
if (mAxisLeft.isDrawLimitLinesBehindDataEnabled())
mAxisRendererLeft.renderLimitLines(canvas);
if (mAxisRight.isDrawLimitLinesBehindDataEnabled())
mAxisRendererRight.renderLimitLines(canvas);
mRenderer.drawData(canvas);
if (!mXAxis.isDrawLimitLinesBehindDataEnabled())
mXAxisRenderer.renderLimitLines(canvas);
if (!mAxisLeft.isDrawLimitLinesBehindDataEnabled())
mAxisRendererLeft.renderLimitLines(canvas);
if (!mAxisRight.isDrawLimitLinesBehindDataEnabled())
mAxisRendererRight.renderLimitLines(canvas);
// if highlighting is enabled
if (valuesToHighlight())
mRenderer.drawHighlighted(canvas, mIndicesToHighlight);
// Removes clipping rectangle
canvas.restoreToCount(clipRestoreCount);
mRenderer.drawExtras(canvas);
mXAxisRenderer.renderAxisLabels(canvas);
mAxisRendererLeft.renderAxisLabels(canvas);
mAxisRendererRight.renderAxisLabels(canvas);
mRenderer.drawValues(canvas);
mLegendRenderer.renderLegend(canvas);
drawMarkers(canvas);
drawDescription(canvas);
if (mLogEnabled) {
long drawtime = (System.currentTimeMillis() - starttime);
totalTime += drawtime;
drawCycles += 1;
long average = totalTime / drawCycles;
Log.i(LOG_TAG, "Drawtime: " + drawtime + " ms, average: " + average + " ms, cycles: "
+ drawCycles);
}
}
/**
* RESET PERFORMANCE TRACKING FIELDS
*/
public void resetTracking() {
totalTime = 0;
drawCycles = 0;
}
protected void prepareValuePxMatrix() {
if (mLogEnabled)
Log.i(LOG_TAG, "Preparing Value-Px Matrix, xmin: " + mXChartMin + ", xmax: "
+ mXChartMax + ", xdelta: " + mDeltaX);
mRightAxisTransformer.prepareMatrixValuePx(mXChartMin, mDeltaX, mAxisRight.mAxisRange,
mAxisRight.mAxisMinimum);
mLeftAxisTransformer.prepareMatrixValuePx(mXChartMin, mDeltaX, mAxisLeft.mAxisRange,
mAxisLeft.mAxisMinimum);
}
protected void prepareOffsetMatrix() {
mRightAxisTransformer.prepareMatrixOffset(mAxisRight.isInverted());
mLeftAxisTransformer.prepareMatrixOffset(mAxisLeft.isInverted());
}
@Override
public void notifyDataSetChanged() {
if (mData == null) {
if (mLogEnabled)
Log.i(LOG_TAG, "Preparing... DATA NOT SET.");
return;
} else {
if (mLogEnabled)
Log.i(LOG_TAG, "Preparing...");
}
if (mRenderer != null)
mRenderer.initBuffers();
calcMinMax();
mAxisRendererLeft.computeAxis(mAxisLeft.mAxisMinimum, mAxisLeft.mAxisMaximum);
mAxisRendererRight.computeAxis(mAxisRight.mAxisMinimum, mAxisRight.mAxisMaximum);
mXAxisRenderer.computeAxis(mData.getXValMaximumLength(), mData.getXVals());
if (mLegend != null)
mLegendRenderer.computeLegend(mData);
calculateOffsets();
}
@Override
protected void calcMinMax() {
if (mAutoScaleMinMaxEnabled)
mData.calcMinMax(getLowestVisibleXIndex(), getHighestVisibleXIndex());
float minLeft = !Float.isNaN(mAxisLeft.getAxisMinValue())
? mAxisLeft.getAxisMinValue()
: mData.getYMin(AxisDependency.LEFT);
float maxLeft = !Float.isNaN(mAxisLeft.getAxisMaxValue())
? mAxisLeft.getAxisMaxValue()
: mData.getYMax(AxisDependency.LEFT);
float minRight = !Float.isNaN(mAxisRight.getAxisMinValue())
? mAxisRight.getAxisMinValue()
: mData.getYMin(AxisDependency.RIGHT);
float maxRight = !Float.isNaN(mAxisRight.getAxisMaxValue())
? mAxisRight.getAxisMaxValue()
: mData.getYMax(AxisDependency.RIGHT);
float leftRange = Math.abs(maxLeft - minLeft);
float rightRange = Math.abs(maxRight - minRight);
// in case all values are equal
if (leftRange == 0f) {
maxLeft = maxLeft + 1f;
minLeft = minLeft - 1f;
}
if (rightRange == 0f) {
maxRight = maxRight + 1f;
minRight = minRight - 1f;
}
float topSpaceLeft = leftRange / 100f * mAxisLeft.getSpaceTop();
float topSpaceRight = rightRange / 100f * mAxisRight.getSpaceTop();
float bottomSpaceLeft = leftRange / 100f * mAxisLeft.getSpaceBottom();
float bottomSpaceRight = rightRange / 100f * mAxisRight.getSpaceBottom();
mXChartMax = mData.getXVals().size() - 1;
mDeltaX = Math.abs(mXChartMax - mXChartMin);
// Use the values as they are
mAxisLeft.mAxisMinimum = !Float.isNaN(mAxisLeft.getAxisMinValue())
? mAxisLeft.getAxisMinValue()
: (minLeft - bottomSpaceLeft);
mAxisLeft.mAxisMaximum = !Float.isNaN(mAxisLeft.getAxisMaxValue())
? mAxisLeft.getAxisMaxValue()
: (maxLeft + topSpaceLeft);
mAxisRight.mAxisMinimum = !Float.isNaN(mAxisRight.getAxisMinValue())
? mAxisRight.getAxisMinValue()
: (minRight - bottomSpaceRight);
mAxisRight.mAxisMaximum = !Float.isNaN(mAxisRight.getAxisMaxValue())
? mAxisRight.getAxisMaxValue()
: (maxRight + topSpaceRight);
mAxisLeft.mAxisRange = Math.abs(mAxisLeft.mAxisMaximum - mAxisLeft.mAxisMinimum);
mAxisRight.mAxisRange = Math.abs(mAxisRight.mAxisMaximum - mAxisRight.mAxisMinimum);
}
@Override
public void calculateOffsets() {
if (!mCustomViewPortEnabled) {
float offsetLeft = 0f, offsetRight = 0f, offsetTop = 0f, offsetBottom = 0f;
// setup offsets for legend
if (mLegend != null && mLegend.isEnabled()) {
if (mLegend.getPosition() == LegendPosition.RIGHT_OF_CHART
|| mLegend.getPosition() == LegendPosition.RIGHT_OF_CHART_CENTER) {
offsetRight += Math.min(mLegend.mNeededWidth, mViewPortHandler.getChartWidth()
* mLegend.getMaxSizePercent())
+ mLegend.getXOffset() * 2f;
} else if (mLegend.getPosition() == LegendPosition.LEFT_OF_CHART
|| mLegend.getPosition() == LegendPosition.LEFT_OF_CHART_CENTER) {
offsetLeft += Math.min(mLegend.mNeededWidth, mViewPortHandler.getChartWidth()
* mLegend.getMaxSizePercent())
+ mLegend.getXOffset() * 2f;
} else if (mLegend.getPosition() == LegendPosition.BELOW_CHART_LEFT
|| mLegend.getPosition() == LegendPosition.BELOW_CHART_RIGHT
|| mLegend.getPosition() == LegendPosition.BELOW_CHART_CENTER) {
// It's possible that we do not need this offset anymore as it
// is available through the extraOffsets, but changing it can mean
// changing default visibility for existing apps.
float yOffset = mLegend.mTextHeightMax;
offsetBottom += Math.min(mLegend.mNeededHeight + yOffset,
mViewPortHandler.getChartHeight() * mLegend.getMaxSizePercent());
} else if (mLegend.getPosition() == LegendPosition.ABOVE_CHART_LEFT
|| mLegend.getPosition() == LegendPosition.ABOVE_CHART_RIGHT
|| mLegend.getPosition() == LegendPosition.ABOVE_CHART_CENTER) {
// It's possible that we do not need this offset anymore as it
// is available through the extraOffsets, but changing it can mean
// changing default visibility for existing apps.
float yOffset = mLegend.mTextHeightMax;
offsetTop += Math.min(mLegend.mNeededHeight + yOffset,
mViewPortHandler.getChartHeight() * mLegend.getMaxSizePercent());
}
}
// offsets for y-labels
if (mAxisLeft.needsOffset()) {
offsetLeft += mAxisLeft.getRequiredWidthSpace(mAxisRendererLeft
.getPaintAxisLabels());
}
if (mAxisRight.needsOffset()) {
offsetRight += mAxisRight.getRequiredWidthSpace(mAxisRendererRight
.getPaintAxisLabels());
}
if (mXAxis.isEnabled() && mXAxis.isDrawLabelsEnabled()) {
float xlabelheight = mXAxis.mLabelRotatedHeight + mXAxis.getYOffset();
// offsets for x-labels
if (mXAxis.getPosition() == XAxisPosition.BOTTOM) {
offsetBottom += xlabelheight;
} else if (mXAxis.getPosition() == XAxisPosition.TOP) {
offsetTop += xlabelheight;
} else if (mXAxis.getPosition() == XAxisPosition.BOTH_SIDED) {
offsetBottom += xlabelheight;
offsetTop += xlabelheight;
}
}
offsetTop += getExtraTopOffset();
offsetRight += getExtraRightOffset();
offsetBottom += getExtraBottomOffset();
offsetLeft += getExtraLeftOffset();
float minOffset = Utils.convertDpToPixel(mMinOffset);
mViewPortHandler.restrainViewPort(
Math.max(minOffset, offsetLeft),
Math.max(minOffset, offsetTop),
Math.max(minOffset, offsetRight),
Math.max(minOffset, offsetBottom));
if (mLogEnabled) {
Log.i(LOG_TAG, "offsetLeft: " + offsetLeft + ", offsetTop: " + offsetTop
+ ", offsetRight: " + offsetRight + ", offsetBottom: " + offsetBottom);
Log.i(LOG_TAG, "Content: " + mViewPortHandler.getContentRect().toString());
}
}
prepareOffsetMatrix();
prepareValuePxMatrix();
}
/**
* calculates the modulus for x-labels and grid
*/
protected void calcModulus() {
if (mXAxis == null || !mXAxis.isEnabled())
return;
if (!mXAxis.isAxisModulusCustom()) {
float[] values = new float[9];
mViewPortHandler.getMatrixTouch().getValues(values);
mXAxis.mAxisLabelModulus = (int) Math
.ceil((mData.getXValCount() * mXAxis.mLabelRotatedWidth)
/ (mViewPortHandler.contentWidth() * values[Matrix.MSCALE_X]));
}
if (mLogEnabled)
Log.i(LOG_TAG, "X-Axis modulus: " + mXAxis.mAxisLabelModulus +
", x-axis label width: " + mXAxis.mLabelWidth +
", x-axis label rotated width: " + mXAxis.mLabelRotatedWidth +
", content width: " + mViewPortHandler.contentWidth());
if (mXAxis.mAxisLabelModulus < 1)
mXAxis.mAxisLabelModulus = 1;
}
@Override
protected float[] getMarkerPosition(Entry e, Highlight highlight) {
int dataSetIndex = highlight.getDataSetIndex();
float xPos = e.getXIndex();
float yPos = e.getVal();
if (this instanceof BarChart) {
BarData bd = (BarData) mData;
float space = bd.getGroupSpace();
int setCount = mData.getDataSetCount();
int i = e.getXIndex();
if (this instanceof HorizontalBarChart) {
// calculate the x-position, depending on datasetcount
float y = i + i * (setCount - 1) + dataSetIndex + space * i + space / 2f;
yPos = y;
BarEntry entry = (BarEntry) e;
if (entry.getVals() != null) {
xPos = highlight.getRange().to;
} else {
xPos = e.getVal();
}
xPos *= mAnimator.getPhaseY();
} else {
float x = i + i * (setCount - 1) + dataSetIndex + space * i + space / 2f;
xPos = x;
BarEntry entry = (BarEntry) e;
if (entry.getVals() != null) {
yPos = highlight.getRange().to;
} else {
yPos = e.getVal();
}
yPos *= mAnimator.getPhaseY();
}
} else {
yPos *= mAnimator.getPhaseY();
}
// position of the marker depends on selected value index and value
float[] pts = new float[]{
xPos, yPos
};
getTransformer(mData.getDataSetByIndex(dataSetIndex).getAxisDependency())
.pointValuesToPixel(pts);
return pts;
}
/**
* draws the grid background
*/
protected void drawGridBackground(Canvas c) {
if (mDrawGridBackground) {
// draw the grid background
c.drawRect(mViewPortHandler.getContentRect(), mGridBackgroundPaint);
}
if (mDrawBorders) {
c.drawRect(mViewPortHandler.getContentRect(), mBorderPaint);
}
}
/**
* Returns the Transformer class that contains all matrices and is
* responsible for transforming values into pixels on the screen and
* backwards.
*
* @return
*/
public Transformer getTransformer(AxisDependency which) {
if (which == AxisDependency.LEFT)
return mLeftAxisTransformer;
else
return mRightAxisTransformer;
}
@Override
public boolean onTouchEvent(MotionEvent event) {
super.onTouchEvent(event);
if (mChartTouchListener == null || mData == null)
return false;
// check if touch gestures are enabled
if (!mTouchEnabled)
return false;
else
return mChartTouchListener.onTouch(this, event);
}
@Override
public void computeScroll() {
if (mChartTouchListener instanceof BarLineChartTouchListener)
((BarLineChartTouchListener) mChartTouchListener).computeScroll();
}
/**
* ################ ################ ################ ################
*/
/**
* CODE BELOW THIS RELATED TO SCALING AND GESTURES AND MODIFICATION OF THE
* VIEWPORT
*/
/**
* Zooms in by 1.4f, into the charts center. center.
*/
public void zoomIn() {
PointF center = mViewPortHandler.getContentCenter();
Matrix save = mViewPortHandler.zoomIn(center.x, -center.y);
mViewPortHandler.refresh(save, this, false);
// Range might have changed, which means that Y-axis labels
// could have changed in size, affecting Y-axis size.
// So we need to recalculate offsets.
calculateOffsets();
postInvalidate();
}
/**
* Zooms out by 0.7f, from the charts center. center.
*/
public void zoomOut() {
PointF center = mViewPortHandler.getContentCenter();
Matrix save = mViewPortHandler.zoomOut(center.x, -center.y);
mViewPortHandler.refresh(save, this, false);
// Range might have changed, which means that Y-axis labels
// could have changed in size, affecting Y-axis size.
// So we need to recalculate offsets.
calculateOffsets();
postInvalidate();
}
/**
* Zooms in or out by the given scale factor. x and y are the coordinates
* (in pixels) of the zoom center.
*
* @param scaleX if < 1f --> zoom out, if > 1f --> zoom in
* @param scaleY if < 1f --> zoom out, if > 1f --> zoom in
* @param x
* @param y
*/
public void zoom(float scaleX, float scaleY, float x, float y) {
Matrix save = mViewPortHandler.zoom(scaleX, scaleY, x, -y);
mViewPortHandler.refresh(save, this, false);
// Range might have changed, which means that Y-axis labels
// could have changed in size, affecting Y-axis size.
// So we need to recalculate offsets.
calculateOffsets();
postInvalidate();
}
/**
* Zooms in or out by the given scale factor.
* x and y are the values (NOT PIXELS) which to zoom to or from (the values of the zoom center).
*
* @param scaleX
* @param scaleY
* @param xValue
* @param yValue
* @param axis the axis relative to which the zoom should take place
*/
public void zoom(float scaleX, float scaleY, float xValue, float yValue, AxisDependency axis) {
Runnable job = new ZoomJob(mViewPortHandler, scaleX, scaleY, xValue, yValue, getTransformer(axis), axis, this);
addViewportJob(job);
}
/**
* Zooms by the specified scale factor to the specified values on the specified axis.
*
* @param scaleX
* @param scaleY
* @param xValue
* @param yValue
* @param axis
* @param duration
*/
@TargetApi(11)
public void zoomAndCenterAnimated(float scaleX, float scaleY, float xValue, float yValue, AxisDependency axis, long duration) {
if (android.os.Build.VERSION.SDK_INT >= 11) {
PointD origin = getValuesByTouchPoint(mViewPortHandler.contentLeft(), mViewPortHandler.contentTop(), axis);
Runnable job = new AnimatedZoomJob(mViewPortHandler, this, getTransformer(axis), getAxis(axis), mXAxis.getValues().size(), scaleX, scaleY, mViewPortHandler.getScaleX(), mViewPortHandler.getScaleY(), xValue, yValue, (float) origin.x, (float) origin.y, duration);
addViewportJob(job);
} else {
Log.e(LOG_TAG, "Unable to execute zoomAndCenterAnimated(...) on API level < 11");
}
}
/**
* Resets all zooming and dragging and makes the chart fit exactly it's
* bounds.
*/
public void fitScreen() {
Matrix save = mViewPortHandler.fitScreen();
mViewPortHandler.refresh(save, this, false);
calculateOffsets();
postInvalidate();
}
/**
* Sets the minimum scale factor value to which can be zoomed out. 1f =
* fitScreen
*
* @param scaleX
* @param scaleY
*/
public void setScaleMinima(float scaleX, float scaleY) {
mViewPortHandler.setMinimumScaleX(scaleX);
mViewPortHandler.setMinimumScaleY(scaleY);
}
/**
* Sets the size of the area (range on the x-axis) that should be maximum
* visible at once (no further zooming out allowed). If this is e.g. set to
* 10, no more than 10 values on the x-axis can be viewed at once without
* scrolling.
*
* @param maxXRange The maximum visible range of x-values.
*/
public void setVisibleXRangeMaximum(float maxXRange) {
float xScale = mDeltaX / (maxXRange);
mViewPortHandler.setMinimumScaleX(xScale);
}
/**
* Sets the size of the area (range on the x-axis) that should be minimum
* visible at once (no further zooming in allowed). If this is e.g. set to
* 10, no less than 10 values on the x-axis can be viewed at once without
* scrolling.
*
* @param minXRange The minimum visible range of x-values.
*/
public void setVisibleXRangeMinimum(float minXRange) {
float xScale = mDeltaX / (minXRange);
mViewPortHandler.setMaximumScaleX(xScale);
}
/**
* Limits the maximum and minimum value count that can be visible by
* pinching and zooming. e.g. minRange=10, maxRange=100 no less than 10
* values and no more that 100 values can be viewed at once without
* scrolling
*
* @param minXRange
* @param maxXRange
*/
public void setVisibleXRange(float minXRange, float maxXRange) {
float maxScale = mDeltaX / minXRange;
float minScale = mDeltaX / maxXRange;
mViewPortHandler.setMinMaxScaleX(minScale, maxScale);
}
/**
* Sets the size of the area (range on the y-axis) that should be maximum
* visible at once.
*
* @param maxYRange the maximum visible range on the y-axis
* @param axis - the axis for which this limit should apply
*/
public void setVisibleYRangeMaximum(float maxYRange, AxisDependency axis) {
float yScale = getDeltaY(axis) / maxYRange;
mViewPortHandler.setMinimumScaleY(yScale);
}
/**
* Moves the left side of the current viewport to the specified x-index.
* This also refreshes the chart by calling invalidate().
*
* @param xIndex
*/
public void moveViewToX(float xIndex) {
Runnable job = new MoveViewJob(mViewPortHandler, xIndex, 0f,
getTransformer(AxisDependency.LEFT), this);
addViewportJob(job);
}
/**
* Centers the viewport to the specified y-value on the y-axis.
* This also refreshes the chart by calling invalidate().
*
* @param yValue
* @param axis - which axis should be used as a reference for the y-axis
*/
public void moveViewToY(float yValue, AxisDependency axis) {
float valsInView = getDeltaY(axis) / mViewPortHandler.getScaleY();
Runnable job = new MoveViewJob(mViewPortHandler, 0f, yValue + valsInView / 2f,
getTransformer(axis), this);
addViewportJob(job);
}
/**
* This will move the left side of the current viewport to the specified
* x-value on the x-axis, and center the viewport to the specified y-value
* on the y-axis.
* This also refreshes the chart by calling invalidate().
*
* @param xIndex
* @param yValue
* @param axis - which axis should be used as a reference for the y-axis
*/
public void moveViewTo(float xIndex, float yValue, AxisDependency axis) {
float valsInView = getDeltaY(axis) / mViewPortHandler.getScaleY();
Runnable job = new MoveViewJob(mViewPortHandler, xIndex, yValue + valsInView / 2f,
getTransformer(axis), this);
addViewportJob(job);
}
/**
* This will move the left side of the current viewport to the specified x-position
* and center the viewport to the specified y-position animated.
* This also refreshes the chart by calling invalidate().
*
* @param xIndex
* @param yValue
* @param axis
* @param duration the duration of the animation in milliseconds
*/
@TargetApi(11)
public void moveViewToAnimated(float xIndex, float yValue, AxisDependency axis, long duration) {
if (android.os.Build.VERSION.SDK_INT >= 11) {
PointD bounds = getValuesByTouchPoint(mViewPortHandler.contentLeft(), mViewPortHandler.contentTop(), axis);
float valsInView = getDeltaY(axis) / mViewPortHandler.getScaleY();
Runnable job = new AnimatedMoveViewJob(mViewPortHandler, xIndex, yValue + valsInView / 2f,
getTransformer(axis), this, (float) bounds.x, (float) bounds.y, duration);
addViewportJob(job);
} else {
Log.e(LOG_TAG, "Unable to execute moveViewToAnimated(...) on API level < 11");
}
}
/**
* This will move the center of the current viewport to the specified
* x-value and y-value.
* This also refreshes the chart by calling invalidate().
*
* @param xIndex
* @param yValue
* @param axis - which axis should be used as a reference for the y-axis
*/
public void centerViewTo(float xIndex, float yValue, AxisDependency axis) {
float valsInView = getDeltaY(axis) / mViewPortHandler.getScaleY();
float xsInView = getXAxis().getValues().size() / mViewPortHandler.getScaleX();
Runnable job = new MoveViewJob(mViewPortHandler,
xIndex - xsInView / 2f, yValue + valsInView / 2f,
getTransformer(axis), this);
addViewportJob(job);
}
/**
* This will move the center of the current viewport to the specified
* x-value and y-value animated.
*
* @param xIndex
* @param yValue
* @param axis
* @param duration the duration of the animation in milliseconds
*/
@TargetApi(11)
public void centerViewToAnimated(float xIndex, float yValue, AxisDependency axis, long duration) {
if (android.os.Build.VERSION.SDK_INT >= 11) {
PointD bounds = getValuesByTouchPoint(mViewPortHandler.contentLeft(), mViewPortHandler.contentTop(), axis);
float valsInView = getDeltaY(axis) / mViewPortHandler.getScaleY();
float xsInView = getXAxis().getValues().size() / mViewPortHandler.getScaleX();
Runnable job = new AnimatedMoveViewJob(mViewPortHandler,
xIndex - xsInView / 2f, yValue + valsInView / 2f,
getTransformer(axis), this, (float) bounds.x, (float) bounds.y, duration);
addViewportJob(job);
} else {
Log.e(LOG_TAG, "Unable to execute centerViewToAnimated(...) on API level < 11");
}
}
/**
* flag that indicates if a custom viewport offset has been set
*/
private boolean mCustomViewPortEnabled = false;
/**
* Sets custom offsets for the current ViewPort (the offsets on the sides of
* the actual chart window). Setting this will prevent the chart from
* automatically calculating it's offsets. Use resetViewPortOffsets() to
* undo this. ONLY USE THIS WHEN YOU KNOW WHAT YOU ARE DOING, else use
* setExtraOffsets(...).
*
* @param left
* @param top
* @param right
* @param bottom
*/
public void setViewPortOffsets(final float left, final float top,
final float right, final float bottom) {
mCustomViewPortEnabled = true;
post(new Runnable() {
@Override
public void run() {
mViewPortHandler.restrainViewPort(left, top, right, bottom);
prepareOffsetMatrix();
prepareValuePxMatrix();
}
});
}
/**
* Resets all custom offsets set via setViewPortOffsets(...) method. Allows
* the chart to again calculate all offsets automatically.
*/
public void resetViewPortOffsets() {
mCustomViewPortEnabled = false;
calculateOffsets();
}
/**
* ################ ################ ################ ################
*/
/** CODE BELOW IS GETTERS AND SETTERS */
/**
* Returns the delta-y value (y-value range) of the specified axis.
*
* @param axis
* @return
*/
public float getDeltaY(AxisDependency axis) {
if (axis == AxisDependency.LEFT)
return mAxisLeft.mAxisRange;
else
return mAxisRight.mAxisRange;
}
/**
* Sets the OnDrawListener
*
* @param drawListener
*/
public void setOnDrawListener(OnDrawListener drawListener) {
this.mDrawListener = drawListener;
}
/**
* Gets the OnDrawListener. May be null.
*
* @return
*/
public OnDrawListener getDrawListener() {
return mDrawListener;
}
/**
* Returns the position (in pixels) the provided Entry has inside the chart
* view or null, if the provided Entry is null.
*
* @param e
* @return
*/
public PointF getPosition(Entry e, AxisDependency axis) {
if (e == null)
return null;
float[] vals = new float[]{
e.getXIndex(), e.getVal()
};
getTransformer(axis).pointValuesToPixel(vals);
return new PointF(vals[0], vals[1]);
}
/**
* sets the number of maximum visible drawn values on the chart only active
* when setDrawValues() is enabled
*
* @param count
*/
public void setMaxVisibleValueCount(int count) {
this.mMaxVisibleCount = count;
}
public int getMaxVisibleCount() {
return mMaxVisibleCount;
}
/**
* Set this to true to allow highlighting per dragging over the chart
* surface when it is fully zoomed out. Default: true
*
* @param enabled
*/
public void setHighlightPerDragEnabled(boolean enabled) {
mHighlightPerDragEnabled = enabled;
}
public boolean isHighlightPerDragEnabled() {
return mHighlightPerDragEnabled;
}
/**
* Sets the color for the background of the chart-drawing area (everything
* behind the grid lines).
*
* @param color
*/
public void setGridBackgroundColor(int color) {
mGridBackgroundPaint.setColor(color);
}
/**
* Set this to true to enable dragging (moving the chart with the finger)
* for the chart (this does not effect scaling).
*
* @param enabled
*/
public void setDragEnabled(boolean enabled) {
this.mDragEnabled = enabled;
}
/**
* Returns true if dragging is enabled for the chart, false if not.
*
* @return
*/
public boolean isDragEnabled() {
return mDragEnabled;
}
/**
* Set this to true to enable scaling (zooming in and out by gesture) for
* the chart (this does not effect dragging) on both X- and Y-Axis.
*
* @param enabled
*/
public void setScaleEnabled(boolean enabled) {
this.mScaleXEnabled = enabled;
this.mScaleYEnabled = enabled;
}
public void setScaleXEnabled(boolean enabled) {
mScaleXEnabled = enabled;
}
public void setScaleYEnabled(boolean enabled) {
mScaleYEnabled = enabled;
}
public boolean isScaleXEnabled() {
return mScaleXEnabled;
}
public boolean isScaleYEnabled() {
return mScaleYEnabled;
}
/**
* Set this to true to enable zooming in by double-tap on the chart.
* Default: enabled
*
* @param enabled
*/
public void setDoubleTapToZoomEnabled(boolean enabled) {
mDoubleTapToZoomEnabled = enabled;
}
/**
* Returns true if zooming via double-tap is enabled false if not.
*
* @return
*/
public boolean isDoubleTapToZoomEnabled() {
return mDoubleTapToZoomEnabled;
}
/**
* set this to true to draw the grid background, false if not
*
* @param enabled
*/
public void setDrawGridBackground(boolean enabled) {
mDrawGridBackground = enabled;
}
/**
* Sets drawing the borders rectangle to true. If this is enabled, there is
* no point drawing the axis-lines of x- and y-axis.
*
* @param enabled
*/
public void setDrawBorders(boolean enabled) {
mDrawBorders = enabled;
}
/**
* Sets the width of the border lines in dp.
*
* @param width
*/
public void setBorderWidth(float width) {
mBorderPaint.setStrokeWidth(Utils.convertDpToPixel(width));
}
/**
* Sets the color of the chart border lines.
*
* @param color
*/
public void setBorderColor(int color) {
mBorderPaint.setColor(color);
}
/**
* Gets the minimum offset (padding) around the chart, defaults to 15.f
*/
public float getMinOffset() {
return mMinOffset;
}
/**
* Sets the minimum offset (padding) around the chart, defaults to 15.f
*/
public void setMinOffset(float minOffset) {
mMinOffset = minOffset;
}
/**
* Returns the Highlight object (contains x-index and DataSet index) of the
* selected value at the given touch point inside the Line-, Scatter-, or
* CandleStick-Chart.
*
* @param x
* @param y
* @return
*/
public Highlight getHighlightByTouchPoint(float x, float y) {
if (mData == null) {
Log.e(LOG_TAG, "Can't select by touch. No data set.");
return null;
} else
return getHighlighter().getHighlight(x, y);
}
/**
* Returns the x and y values in the chart at the given touch point
* (encapsulated in a PointD). This method transforms pixel coordinates to
* coordinates / values in the chart. This is the opposite method to
* getPixelsForValues(...).
*
* @param x
* @param y
* @return
*/
public PointD getValuesByTouchPoint(float x, float y, AxisDependency axis) {
// create an array of the touch-point
float[] pts = new float[2];
pts[0] = x;
pts[1] = y;
getTransformer(axis).pixelsToValue(pts);
double xTouchVal = pts[0];
double yTouchVal = pts[1];
return new PointD(xTouchVal, yTouchVal);
}
/**
* Transforms the given chart values into pixels. This is the opposite
* method to getValuesByTouchPoint(...).
*
* @param x
* @param y
* @return
*/
public PointD getPixelsForValues(float x, float y, AxisDependency axis) {
float[] pts = new float[]{
x, y
};
getTransformer(axis).pointValuesToPixel(pts);
return new PointD(pts[0], pts[1]);
}
/**
* returns the y-value at the given touch position (must not necessarily be
* a value contained in one of the datasets)
*
* @param x
* @param y
* @return
*/
public float getYValueByTouchPoint(float x, float y, AxisDependency axis) {
return (float) getValuesByTouchPoint(x, y, axis).y;
}
/**
* returns the Entry object displayed at the touched position of the chart
*
* @param x
* @param y
* @return
*/
public Entry getEntryByTouchPoint(float x, float y) {
Highlight h = getHighlightByTouchPoint(x, y);
if (h != null) {
return mData.getEntryForHighlight(h);
}
return null;
}
/**
* returns the DataSet object displayed at the touched position of the chart
*
* @param x
* @param y
* @return
*/
public IBarLineScatterCandleBubbleDataSet getDataSetByTouchPoint(float x, float y) {
Highlight h = getHighlightByTouchPoint(x, y);
if (h != null) {
return mData.getDataSetByIndex(h.getDataSetIndex());
}
return null;
}
/**
* Returns the lowest x-index (value on the x-axis) that is still visible on
* the chart.
*
* @return
*/
@Override
public int getLowestVisibleXIndex() {
float[] pts = new float[]{
mViewPortHandler.contentLeft(), mViewPortHandler.contentBottom()
};
getTransformer(AxisDependency.LEFT).pixelsToValue(pts);
return (pts[0] <= 0) ? 0 : (int) (pts[0] + 1.0f);
}
/**
* Returns the highest x-index (value on the x-axis) that is still visible
* on the chart.
*
* @return
*/
@Override
public int getHighestVisibleXIndex() {
float[] pts = new float[]{
mViewPortHandler.contentRight(), mViewPortHandler.contentBottom()
};
getTransformer(AxisDependency.LEFT).pixelsToValue(pts);
return (pts[0] >= mData.getXValCount()) ? mData.getXValCount() - 1 : (int) pts[0];
}
/**
* returns the current x-scale factor
*/
public float getScaleX() {
if (mViewPortHandler == null)
return 1f;
else
return mViewPortHandler.getScaleX();
}
/**
* returns the current y-scale factor
*/
public float getScaleY() {
if (mViewPortHandler == null)
return 1f;
else
return mViewPortHandler.getScaleY();
}
/**
* if the chart is fully zoomed out, return true
*
* @return
*/
public boolean isFullyZoomedOut() {
return mViewPortHandler.isFullyZoomedOut();
}
/**
* Returns the left y-axis object. In the horizontal bar-chart, this is the
* top axis.
*
* @return
*/
public YAxis getAxisLeft() {
return mAxisLeft;
}
/**
* Returns the right y-axis object. In the horizontal bar-chart, this is the
* bottom axis.
*
* @return
*/
public YAxis getAxisRight() {
return mAxisRight;
}
/**
* Returns the y-axis object to the corresponding AxisDependency. In the
* horizontal bar-chart, LEFT == top, RIGHT == BOTTOM
*
* @param axis
* @return
*/
public YAxis getAxis(AxisDependency axis) {
if (axis == AxisDependency.LEFT)
return mAxisLeft;
else
return mAxisRight;
}
@Override
public boolean isInverted(AxisDependency axis) {
return getAxis(axis).isInverted();
}
/**
* Returns the object representing all x-labels, this method can be used to
* acquire the XAxis object and modify it (e.g. change the position of the
* labels)
*
* @return
*/
public XAxis getXAxis() {
return mXAxis;
}
/**
* If set to true, both x and y axis can be scaled simultaneously with 2 fingers, if false,
* x and y axis can be scaled separately. default: false
*
* @param enabled
*/
public void setPinchZoom(boolean enabled) {
mPinchZoomEnabled = enabled;
}
/**
* returns true if pinch-zoom is enabled, false if not
*
* @return
*/
public boolean isPinchZoomEnabled() {
return mPinchZoomEnabled;
}
/**
* Set an offset in dp that allows the user to drag the chart over it's
* bounds on the x-axis.
*
* @param offset
*/
public void setDragOffsetX(float offset) {
mViewPortHandler.setDragOffsetX(offset);
}
/**
* Set an offset in dp that allows the user to drag the chart over it's
* bounds on the y-axis.
*
* @param offset
*/
public void setDragOffsetY(float offset) {
mViewPortHandler.setDragOffsetY(offset);
}
/**
* Returns true if both drag offsets (x and y) are zero or smaller.
*
* @return
*/
public boolean hasNoDragOffset() {
return mViewPortHandler.hasNoDragOffset();
}
public XAxisRenderer getRendererXAxis() {
return mXAxisRenderer;
}
/**
* Sets a custom XAxisRenderer and overrides the existing (default) one.
*
* @param xAxisRenderer
*/
public void setXAxisRenderer(XAxisRenderer xAxisRenderer) {
mXAxisRenderer = xAxisRenderer;
}
public YAxisRenderer getRendererLeftYAxis() {
return mAxisRendererLeft;
}
/**
* Sets a custom axis renderer for the left axis and overwrites the existing one.
*
* @param rendererLeftYAxis
*/
public void setRendererLeftYAxis(YAxisRenderer rendererLeftYAxis) {
mAxisRendererLeft = rendererLeftYAxis;
}
public YAxisRenderer getRendererRightYAxis() {
return mAxisRendererRight;
}
/**
* Sets a custom axis renderer for the right acis and overwrites the existing one.
*
* @param rendererRightYAxis
*/
public void setRendererRightYAxis(YAxisRenderer rendererRightYAxis) {
mAxisRendererRight = rendererRightYAxis;
}
@Override
public float getYChartMax() {
return Math.max(mAxisLeft.mAxisMaximum, mAxisRight.mAxisMaximum);
}
@Override
public float getYChartMin() {
return Math.min(mAxisLeft.mAxisMinimum, mAxisRight.mAxisMinimum);
}
/**
* Returns true if either the left or the right or both axes are inverted.
*
* @return
*/
public boolean isAnyAxisInverted() {
if (mAxisLeft.isInverted())
return true;
if (mAxisRight.isInverted())
return true;
return false;
}
/**
* Flag that indicates if auto scaling on the y axis is enabled. This is
* especially interesting for charts displaying financial data.
*
* @param enabled the y axis automatically adjusts to the min and max y
* values of the current x axis range whenever the viewport
* changes
*/
public void setAutoScaleMinMaxEnabled(boolean enabled) {
mAutoScaleMinMaxEnabled = enabled;
}
/**
* @return true if auto scaling on the y axis is enabled.
* @default false
*/
public boolean isAutoScaleMinMaxEnabled() {
return mAutoScaleMinMaxEnabled;
}
@Override
public void setPaint(Paint p, int which) {
super.setPaint(p, which);
switch (which) {
case PAINT_GRID_BACKGROUND:
mGridBackgroundPaint = p;
break;
}
}
@Override
public Paint getPaint(int which) {
Paint p = super.getPaint(which);
if (p != null)
return p;
switch (which) {
case PAINT_GRID_BACKGROUND:
return mGridBackgroundPaint;
}
return null;
}
}
| apache-2.0 |
virendergit1/APMCore | src/main/java/com/apm/service/APMUserService.java | 13082 | package com.apm.service;
import java.util.Calendar;
import java.util.List;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.httpclient.HttpStatus;
import org.hsqldb.lib.StringUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.context.i18n.LocaleContextHolder;
import org.springframework.hateoas.ExposesResourceFor;
import org.springframework.http.MediaType;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.servlet.view.RedirectView;
import com.apm.Mappings;
import com.apm.models.APMUser;
import com.apm.models.Organization;
import com.apm.models.PasswordProfile;
import com.apm.models.Role;
import com.apm.models.VerificationToken;
import com.apm.repos.APMUserRepository;
import com.apm.repos.OrganizationRepository;
import com.apm.repos.PasswordProfileRepository;
import com.apm.repos.RoleRepository;
import com.apm.repos.VerificationTokenRepository;
import com.apm.utils.APMResponse;
import com.apm.utils.JSONView;
import com.apm.utils.OnRegistrationCompleteEvent;
import com.apm.utils.exception.InvalidUserIdNameCombinationException;
import com.apm.utils.exception.InvalidVerificationTokenException;
import com.apm.utils.exception.MissingMandatoryDataException;
import com.apm.utils.exception.RecordExistsException;
import com.apm.utils.exception.RecordNotFoundException;
import com.fasterxml.jackson.annotation.JsonView;
@RestController
@ExposesResourceFor(APMUser.class)
@RequestMapping(Mappings.API_BASE_PATH)
public class APMUserService {
public static final String API_USERS_PASSWORDPROFILE_PATH = "/users/{userId}/passwordProfile";
public static final String API_USER_CONFIRM_REGISTRATION = "/users/{userId}/registrationConfirm";
@Autowired
ApplicationEventPublisher eventPublisher;
@Autowired
private APMUserRepository userRepo;
@Autowired
private PasswordProfileRepository passwordProfileRepo;
@Autowired
private VerificationTokenRepository verificationTokenRepo;
@Autowired
private OrganizationRepository orgRepo;
@Autowired
private RoleRepository roleRepo;
// GET All Users
// GET User by various Search Operations
// Following operators are supported
// AND, OR, LIKE
//
@JsonView(JSONView.ParentObject.class)
@RequestMapping(value = "/users", produces = MediaType.APPLICATION_JSON_VALUE, method = RequestMethod.GET)
public List<APMUser> findAll(@RequestParam(value = "searchByOperator", required = false) String searchByOperator,
@RequestParam(value = "firstName", required = false) String firstName,
@RequestParam(value = "lastName", required = false) String lastName) {
if (StringUtils.hasLength(searchByOperator) && StringUtils.hasLength(firstName)
&& StringUtils.hasLength(lastName)) {
if (searchByOperator.equals("AND"))
return userRepo.findByFirstNameAndLastName(firstName, lastName);
else if (searchByOperator.equals("OR"))
return userRepo.findByFirstNameOrLastName(firstName, lastName);
} else if (StringUtils.hasLength(firstName) && !StringUtils.hasLength(lastName)) {
if (StringUtils.hasLength(searchByOperator) && searchByOperator.equals("LIKE"))
return userRepo.findByFirstNameLike(firstName + "%");
return userRepo.findByFirstName(firstName);
} else if (!StringUtils.hasLength(firstName) && StringUtils.hasLength(lastName)) {
if (StringUtils.hasLength(searchByOperator) && searchByOperator.equals("LIKE"))
return userRepo.findByLastNameLike(lastName + "%");
return userRepo.findByLastName(lastName);
}
return userRepo.findAll();
}
// GET User By Id
@JsonView(JSONView.ParentObject.class)
@RequestMapping(value = "/users/{userId}", produces = MediaType.APPLICATION_JSON_VALUE, method = RequestMethod.GET)
public APMUser getUserById(@PathVariable(value = "userId") Long userId) {
return userRepo.findOne(userId);
}
// GET User By Id with Child objects
@JsonView(JSONView.ParentObjectWithChildren.class)
@RequestMapping(value = "/users/{userId}/with-children", produces = MediaType.APPLICATION_JSON_VALUE, method = RequestMethod.GET)
public APMUser getUserByIdWithChildren(@PathVariable(value = "userId") Long userId) {
return userRepo.findOne(userId);
}
// ADD or Register a new User
@RequestMapping(value = "/users", produces = MediaType.APPLICATION_JSON_VALUE, method = RequestMethod.PUT)
@Transactional
public @ResponseBody APMResponse addUser(@RequestBody APMUser user) throws RecordExistsException,
RecordNotFoundException, InvalidUserIdNameCombinationException, MissingMandatoryDataException {
validateUserExistance("ADD_USER", user);
// validate mandatory fields are supplied
if (StringUtils.isEmpty(user.getFirstName()) || StringUtils.isEmpty(user.getLastName())) {
throw new MissingMandatoryDataException("MISSING_MANDATORY_DATA",
"FirstName and LastName both the mandatory fields");
}
// verify and associate child objects i.e. Org, Role and PasswordProfile
associateChildObjects(user);
// save the user
APMUser addedUser = userRepo.save(user);
// also initialize PasswordProfile
passwordProfileRepo.save(new PasswordProfile(addedUser.getUserId()));
// now publish the event for sending an email to the user for email
// validation
if (addedUser != null) {
eventPublisher.publishEvent(new OnRegistrationCompleteEvent(addedUser, LocaleContextHolder.getLocale()));
}
return new APMResponse("USER_CREATED", "User is created successfully").success();
}
// UPDATE User
@RequestMapping(value = "/users/{userId}", produces = MediaType.APPLICATION_JSON_VALUE, method = RequestMethod.POST)
public @ResponseBody APMResponse updateUser(@PathVariable(value = "userId") Long userId, @RequestBody APMUser user)
throws InvalidUserIdNameCombinationException, RecordExistsException, RecordNotFoundException, MissingMandatoryDataException {
// validate the supplied user first
user.setUserId(userId);
validateUserExistance("UPDATE_USER", user);
// validate mandatory fields are supplied
if (StringUtils.isEmpty(user.getFirstName()) || StringUtils.isEmpty(user.getLastName())) {
throw new MissingMandatoryDataException("MISSING_MANDATORY_DATA",
"FirstName and LastName both the mandatory fields");
}
// verify and associate child objects i.e. Org, Role and Password
// Profile
associateChildObjects(user);
// save the user
userRepo.save(user);
return new APMResponse("USER_UPDATED", "User is updated successfully").success();
}
// DELETE User
@RequestMapping(value = "/users/{userId}", produces = MediaType.APPLICATION_JSON_VALUE, method = RequestMethod.DELETE)
@Transactional
public @ResponseBody APMResponse deleteUser(@PathVariable(value = "userId") Long userId) {
APMUser user = userRepo.findOne(userId);
userRepo.delete(user);
return new APMResponse("USER_DELETED", "User is deleted successfully").success();
}
// User Email Validation
@RequestMapping(value = API_USER_CONFIRM_REGISTRATION, method = RequestMethod.GET)
public RedirectView confirmRegistration(@RequestParam("token") String token)
throws InvalidVerificationTokenException {
VerificationToken verificationToken = verificationTokenRepo.findByToken(token);
if (verificationToken == null) {
throw new InvalidVerificationTokenException("INVALID_TOKEN_ID", "Invalid Verification Token Id");
}
APMUser user = verificationToken.getUser();
Calendar cal = Calendar.getInstance();
if ((verificationToken.getExpiryDate().getTime() - cal.getTime().getTime()) <= 0) {
throw new InvalidVerificationTokenException("TOKEN_EXPIRED", "Verification Token has expired");
}
user.setEnabled(true);
userRepo.save(user);
return new RedirectView(Mappings.REDIRECT_URL_AFTER_REGISTRATION_CONFIRMATION + "?uservalidated=true&username="
+ user.getUsername() + "&firstname=" + user.getFirstName() + "&lastname=" + user.getLastName());
}
// GET User's Password Profile
@RequestMapping(value = API_USERS_PASSWORDPROFILE_PATH, produces = MediaType.APPLICATION_JSON_VALUE, method = RequestMethod.GET)
public PasswordProfile getUserPasswordProfileById(@PathVariable(value = "userId") Long userId) {
return passwordProfileRepo.findOne(userId);
}
// POST User's Password Profile
@RequestMapping(value = API_USERS_PASSWORDPROFILE_PATH, produces = MediaType.APPLICATION_JSON_VALUE, method = RequestMethod.POST)
public @ResponseBody APMResponse updateUserPasswordProfile(@PathVariable(value = "userId") Long userId,
@RequestBody PasswordProfile passwordProfile) {
passwordProfile.setUserId(userId);
passwordProfileRepo.save(passwordProfile);
return new APMResponse("PROFILE_UPDATED", "Password Profile is updated successfully").success();
}
private boolean validateUserExistance(String action, APMUser suppliedUser)
throws RecordNotFoundException, InvalidUserIdNameCombinationException, RecordExistsException {
boolean proceed = true;
if (suppliedUser.getUsername() == null || StringUtil.isEmpty(suppliedUser.getUsername()))
throw new RecordNotFoundException("USERNAME_NOT_SUPPLIED", "Username is mandatory field in the request");
APMUser user = userRepo.findByUsername(suppliedUser.getUsername());
if (user != null) {
// case of user add
if (action.equals("ADD_USER"))
throw new RecordExistsException("USER_EXISTS",
"User name " + suppliedUser.getUsername() + " already exists");
// case of User update/delete
else if (!action.equals("ADD_USER") && user.getUserId() != suppliedUser.getUserId())
throw new InvalidUserIdNameCombinationException("USERID_USERNAME_COMBINATION_DOES_NOT_MATCH",
"UserId and Username combination does not match");
} else if (!action.equals("ADD_USER"))
throw new RecordNotFoundException("USER_NOT_FOUND",
"No account found using this User name: " + suppliedUser.getUsername());
return proceed;
}
private void associateChildObjects(APMUser user) throws RecordNotFoundException {
if (user.getOrganization() != null) {
// check if the organization exists
Long organizationId = user.getOrganization().getOrganizationId();
Organization organization = orgRepo.findOne(organizationId);
if (organization != null)
user.setOrganization(organization);
else
throw new RecordNotFoundException("ORGANIZATION_NOT_FOUND",
"No Organization found with supplied OrganizationId");
}
if (user.getRole() != null) {
// check if the role exists
Long roleId = user.getRole().getRoleId();
Role role = roleRepo.findOne(roleId);
if (role != null)
user.setRole(role);
else
throw new RecordNotFoundException("ROLE_NOT_FOUND", "No Role found with supplied RoleId");
}
if (user.getPasswordProfile() != null) {
// check if the Password Profile exists
PasswordProfile passwordProfile = passwordProfileRepo.findOne(user.getUserId());
if (passwordProfile != null)
user.setPasswordProfile(passwordProfile);
else
throw new RecordNotFoundException("PASSWORDPROFILE_NOT_FOUND",
"No Password Profile found with supplied UserId");
}
}
@ExceptionHandler(RecordExistsException.class)
@ResponseBody
public APMResponse recordExistsResponse(RecordExistsException ex, HttpServletResponse response) {
response.setStatus(HttpStatus.SC_OK);
return new APMResponse(ex.getCode(), ex.getMessage()).error();
}
@ExceptionHandler(RecordNotFoundException.class)
@ResponseBody
public APMResponse recordNotFoundResponse(RecordNotFoundException ex, HttpServletResponse response) {
response.setStatus(HttpStatus.SC_OK);
return new APMResponse(ex.getCode(), ex.getMessage()).error();
}
@ExceptionHandler(InvalidVerificationTokenException.class)
@ResponseBody
public APMResponse invalidTokenResponse(InvalidVerificationTokenException ex, HttpServletResponse response) {
response.setStatus(HttpStatus.SC_OK);
return new APMResponse(ex.getCode(), ex.getMessage()).error();
}
@ExceptionHandler(InvalidUserIdNameCombinationException.class)
@ResponseBody
public APMResponse invalidUserIdNameCombinationResponse(InvalidUserIdNameCombinationException ex,
HttpServletResponse response) {
response.setStatus(HttpStatus.SC_OK);
return new APMResponse(ex.getCode(), ex.getMessage()).error();
}
@ExceptionHandler(MissingMandatoryDataException.class)
@ResponseBody
public APMResponse missingMandatoryDataResponse(MissingMandatoryDataException ex, HttpServletResponse response) {
response.setStatus(HttpStatus.SC_OK);
return new APMResponse(ex.getCode(), ex.getMessage()).error();
}
}
| apache-2.0 |
grubmatt/15-121 | HW3/DoubleNode.java | 341 | package HW3;
/*
*
* DO NOT CHANGE/EDIT THIS FILE!
*
*
*/
public class DoubleNode<E> {
public DoubleNode<E> prev;
public E data;
public DoubleNode<E> next;
public DoubleNode(E data) {
this.data = data;
}
public boolean equals(DoubleNode<E> otherNode) {
return this.data.equals(otherNode.data);
}
}
| apache-2.0 |
oskopek/optaplanner | optaplanner-examples/src/test/java/org/optaplanner/examples/nqueens/persistence/NQueensDaoTest.java | 1338 | /*
* Copyright 2013 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.optaplanner.examples.nqueens.persistence;
import java.io.File;
import java.util.Collection;
import org.junit.runners.Parameterized;
import org.optaplanner.examples.common.persistence.SolutionDao;
import org.optaplanner.examples.common.persistence.SolutionDaoTest;
public class NQueensDaoTest extends SolutionDaoTest {
@Override
protected SolutionDao createSolutionDao() {
return new NQueensDao();
}
@Parameterized.Parameters(name = "{index}: {0}")
public static Collection<Object[]> getSolutionFilesAsParameters() {
return getSolutionFilesAsParameters(new NQueensDao());
}
public NQueensDaoTest(File solutionFile) {
super(solutionFile);
}
}
| apache-2.0 |
lsmaira/gradle | subprojects/workers/src/main/java/org/gradle/workers/internal/DefaultWorkerConfiguration.java | 3563 | /*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.workers.internal;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.gradle.api.Action;
import org.gradle.api.internal.DefaultActionConfiguration;
import org.gradle.internal.file.PathToFileResolver;
import org.gradle.process.JavaForkOptions;
import org.gradle.process.internal.DefaultJavaForkOptions;
import org.gradle.util.GUtil;
import org.gradle.workers.ForkMode;
import org.gradle.workers.IsolationMode;
import org.gradle.workers.WorkerConfiguration;
import java.io.File;
import java.util.List;
public class DefaultWorkerConfiguration extends DefaultActionConfiguration implements WorkerConfiguration {
private final JavaForkOptions forkOptions;
private IsolationMode isolationMode = IsolationMode.AUTO;
private List<File> classpath = Lists.newArrayList();
private String displayName;
public DefaultWorkerConfiguration(PathToFileResolver fileResolver) {
this.forkOptions = new DefaultJavaForkOptions(fileResolver);
forkOptions.setEnvironment(Maps.<String, Object>newHashMap());
}
@Override
public Iterable<File> getClasspath() {
return classpath;
}
@Override
public void setClasspath(Iterable<File> classpath) {
this.classpath = Lists.newArrayList(classpath);
}
@Override
public IsolationMode getIsolationMode() {
return isolationMode;
}
public void setIsolationMode(IsolationMode isolationMode) {
this.isolationMode = isolationMode == null ? IsolationMode.AUTO : isolationMode;
}
@Override
public ForkMode getForkMode() {
switch (isolationMode) {
case AUTO:
return ForkMode.AUTO;
case NONE:
case CLASSLOADER:
return ForkMode.NEVER;
case PROCESS:
return ForkMode.ALWAYS;
default:
throw new IllegalStateException();
}
}
@Override
public void setForkMode(ForkMode forkMode) {
switch (forkMode) {
case AUTO:
setIsolationMode(IsolationMode.AUTO);
break;
case NEVER:
setIsolationMode(IsolationMode.CLASSLOADER);
break;
case ALWAYS:
setIsolationMode(IsolationMode.PROCESS);
break;
}
}
@Override
public JavaForkOptions getForkOptions() {
return forkOptions;
}
@Override
public void classpath(Iterable<File> files) {
GUtil.addToCollection(classpath, files);
}
@Override
public void forkOptions(Action<? super JavaForkOptions> forkOptionsAction) {
forkOptionsAction.execute(forkOptions);
}
@Override
public String getDisplayName() {
return displayName;
}
@Override
public void setDisplayName(String displayName) {
this.displayName = displayName;
}
}
| apache-2.0 |
qtproject/qtqa-gerrit | java/com/google/gerrit/server/extensions/events/ChangeRestored.java | 3311 | // Copyright (C) 2015 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.extensions.events;
import com.google.common.flogger.FluentLogger;
import com.google.gerrit.exceptions.StorageException;
import com.google.gerrit.extensions.api.changes.NotifyHandling;
import com.google.gerrit.extensions.common.AccountInfo;
import com.google.gerrit.extensions.common.ChangeInfo;
import com.google.gerrit.extensions.common.RevisionInfo;
import com.google.gerrit.extensions.events.ChangeRestoredListener;
import com.google.gerrit.reviewdb.client.Change;
import com.google.gerrit.reviewdb.client.PatchSet;
import com.google.gerrit.server.GpgException;
import com.google.gerrit.server.account.AccountState;
import com.google.gerrit.server.patch.PatchListNotAvailableException;
import com.google.gerrit.server.patch.PatchListObjectTooLargeException;
import com.google.gerrit.server.permissions.PermissionBackendException;
import com.google.gerrit.server.plugincontext.PluginSetContext;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import java.io.IOException;
import java.sql.Timestamp;
@Singleton
public class ChangeRestored {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
private final PluginSetContext<ChangeRestoredListener> listeners;
private final EventUtil util;
@Inject
ChangeRestored(PluginSetContext<ChangeRestoredListener> listeners, EventUtil util) {
this.listeners = listeners;
this.util = util;
}
public void fire(
Change change, PatchSet ps, AccountState restorer, String reason, Timestamp when) {
if (listeners.isEmpty()) {
return;
}
try {
Event event =
new Event(
util.changeInfo(change),
util.revisionInfo(change.getProject(), ps),
util.accountInfo(restorer),
reason,
when);
listeners.runEach(l -> l.onChangeRestored(event));
} catch (PatchListObjectTooLargeException e) {
logger.atWarning().log("Couldn't fire event: %s", e.getMessage());
} catch (PatchListNotAvailableException
| GpgException
| IOException
| StorageException
| PermissionBackendException e) {
logger.atSevere().withCause(e).log("Couldn't fire event");
}
}
private static class Event extends AbstractRevisionEvent implements ChangeRestoredListener.Event {
private String reason;
Event(
ChangeInfo change,
RevisionInfo revision,
AccountInfo restorer,
String reason,
Timestamp when) {
super(change, revision, restorer, when, NotifyHandling.ALL);
this.reason = reason;
}
@Override
public String getReason() {
return reason;
}
}
}
| apache-2.0 |
adragomir/hbaseindex | src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RowResultGenerator.java | 2804 | /*
* Copyright 2009 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.stargate;
import java.io.IOException;
import java.util.Iterator;
import java.util.NoSuchElementException;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.HTablePool;
import org.apache.hadoop.hbase.client.Result;
public class RowResultGenerator extends ResultGenerator {
private Iterator<KeyValue> valuesI;
public RowResultGenerator(String tableName, RowSpec rowspec)
throws IllegalArgumentException, IOException {
HTablePool pool = RESTServlet.getInstance().getTablePool(tableName);
HTable table = pool.get();
try {
Get get = new Get(rowspec.getRow());
if (rowspec.hasColumns()) {
get.addColumns(rowspec.getColumns());
} else {
// rowspec does not explicitly specify columns, return them all
for (HColumnDescriptor family:
table.getTableDescriptor().getFamilies()) {
get.addFamily(family.getName());
}
}
get.setTimeRange(rowspec.getStartTime(), rowspec.getEndTime());
get.setMaxVersions(rowspec.getMaxVersions());
Result result = table.get(get);
if (result != null && !result.isEmpty()) {
valuesI = result.list().iterator();
}
} finally {
pool.put(table);
}
}
public void close() {
}
public boolean hasNext() {
if (valuesI == null) {
return false;
}
return valuesI.hasNext();
}
public KeyValue next() {
if (valuesI == null) {
return null;
}
try {
return valuesI.next();
} catch (NoSuchElementException e) {
return null;
}
}
public void remove() {
throw new UnsupportedOperationException("remove not supported");
}
}
| apache-2.0 |
davidrudder23/OpenNotification | src/net/reliableresponse/notification/sender/RTSender.java | 6742 | /*
* Created on May 27, 2005
*
*Copyright Reliable Response, 2005
*/
package net.reliableresponse.notification.sender;
import java.sql.Connection;
import java.sql.Date;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import net.reliableresponse.notification.Notification;
import net.reliableresponse.notification.broker.BrokerFactory;
import net.reliableresponse.notification.usermgmt.Member;
/**
* @author drig
*
* Copyright 2004 - David Rudder
*/
public class RTSender extends AbstractNotificationSender {
public static final int DBTYPE=1;
public static final int DBNAME=2;
public static final int DBHOST=3;
public static final int DBUSER=4;
public static final int DBPASSWORD=5;
public static final int TRANSACTIONID=6;
private String dbType;
private String dbName;
private String dbHost;
private String dbUser;
private String dbPassword;
private int transactionID;
/* (non-Javadoc)
* @see net.reliableresponse.notification.sender.NotificationSender#addVariable(int, java.lang.String)
*/
public void addVariable(int index, String value) {
switch (index) {
case DBTYPE: dbType = value;
break;
case DBNAME: dbName = value;
break;
case DBHOST: dbHost = value;
break;
case DBUSER: dbUser = value;
break;
case DBPASSWORD: dbPassword = value;
break;
case TRANSACTIONID: try {
transactionID = Integer.parseInt(value);
} catch (NumberFormatException e) {
BrokerFactory.getLoggingBroker().logError(e);
}
break;
}
}
public String[] getVariables() {
return new String[] {dbType, dbName, dbHost, dbUser, dbPassword, transactionID+""};
}
public String[] getAvailableResponses(Notification notification) {
String[] responses = {"Open", "Set as New", "Set as Stalled", "Reject", "Resolve"};
return responses;
}
private void addAttachment (Connection connection, Member responder, Notification notification, String text) {
String sql = "INSERT INTO attachments (transactionID, parent, messageid, subject, "+
"contenttype, contentencoding, content, headers, creator, created) "+
"VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
PreparedStatement stmt = null;
ResultSet rs = null;
try {
stmt = connection.prepareStatement(sql);
stmt.setInt (1, transactionID);
stmt.setInt (2, 0);
stmt.setString(3, "Reliable-Response-Notification_UUID_"+System.currentTimeMillis()+": "+notification.getUuid());
stmt.setString (4, "Reliable Response Notification response by "+responder.toString());
stmt.setString (5, "text/plain");
stmt.setString (6, "none");
stmt.setString (7, text);
stmt.setString (8, "");
stmt.setInt(9, 0);
stmt.setDate (10, new Date(System.currentTimeMillis()));
stmt.executeUpdate();
} catch (SQLException e) {
BrokerFactory.getLoggingBroker().logError(e);
} finally {
try {
if (stmt != null)
stmt.close();
} catch (SQLException e1) {
BrokerFactory.getLoggingBroker().logError(e1);
}
}
}
private void setStatus (Connection connection, String status) {
String sql = "UPDATE tickets SET status=? WHERE id=(SELECT objectid FROM transactions WHERE id=?)";
PreparedStatement stmt = null;
ResultSet rs = null;
try {
stmt = connection.prepareStatement(sql);
stmt.setString(1, status);
stmt.setInt (2, transactionID);
stmt.executeUpdate();
} catch (SQLException e) {
BrokerFactory.getLoggingBroker().logError(e);
} finally {
try {
if (stmt != null)
stmt.close();
} catch (SQLException e1) {
BrokerFactory.getLoggingBroker().logError(e1);
}
}
}
public void handleResponse(Notification notification, Member responder, String response, String text) {
super.handleResponse(notification, responder, response, text);
if (notification.getStatus() == Notification.EXPIRED) {
BrokerFactory.getLoggingBroker().logInfo(responder+" tried to confirm an expired notification with uuid "+notification.getUuid());
return;
}
String url = null;
Connection connection = null;
try {
if ((dbType.equalsIgnoreCase("pg")) ||(dbType.equalsIgnoreCase("postgresql"))) {
Class.forName("org.postgresql.Driver");
url = "jdbc:postgresql://"+dbHost+"/"+dbName;
} else if (dbType.equalsIgnoreCase("mysql")) {
Class.forName("com.mysql.jdbc.Driver");
url = "jdbc:mysql://"+dbHost+"/"+dbName;
} else if (dbType.equalsIgnoreCase("oracle")) {
Class.forName("oracle.jdbc.driver.OracleDriver");
url = "dbc:oracle:thin:@"+dbHost+":1521:"+dbName;
}
connection = DriverManager.getConnection(url, dbUser, dbPassword);
if ((text != null) && (text.length()>0)) {
addAttachment(connection, responder, notification, text);
}
// TODO: Fill in the appropriate responses
if (response.equalsIgnoreCase("open")) {
notification.addMessage("Notification opened", responder);
notification.setStatus(Notification.PENDING, responder);
setStatus(connection, "open");
} else if (response.equalsIgnoreCase("set as new")) {
notification.addMessage("Notification set as new", responder);
notification.setStatus(Notification.CONFIRMED, responder);
BrokerFactory.getNotificationBroker().logConfirmation(responder, notification);
setStatus(connection, "new");
} else if (response.equalsIgnoreCase("set as stalled")) {
notification.addMessage("Notification set as stalled", responder);
notification.setStatus(Notification.CONFIRMED, responder);
BrokerFactory.getNotificationBroker().logConfirmation(responder, notification);
setStatus(connection, "stalled");
} else if (response.equalsIgnoreCase("reject")) {
notification.addMessage("Notification rejected", responder);
notification.setStatus(Notification.CONFIRMED, responder);
BrokerFactory.getNotificationBroker().logConfirmation(responder, notification);
setStatus(connection, "rejected");
} else if (response.equalsIgnoreCase("resolve")) {
notification.addMessage("Notification resolved", responder);
notification.setStatus(Notification.CONFIRMED, responder);
BrokerFactory.getNotificationBroker().logConfirmation(responder, notification);
setStatus(connection, "resolved");
} else {
addAttachment(connection, responder, notification, response);
}
} catch (ClassNotFoundException e) {
BrokerFactory.getLoggingBroker().logError(e);
} catch (SQLException e) {
BrokerFactory.getLoggingBroker().logError(e);
} finally {
try {
if (connection != null) connection.close();
} catch (SQLException e1) {
BrokerFactory.getLoggingBroker().logError(e1);
}
}
}
public String toString() {
return "RT: Request Tracker";
}
}
| apache-2.0 |
seanchenxi/gwt-wordpress | src/main/java/com/seanchenxi/gwt/wordpress/json/util/Configuration.java | 2079 | /*
* Copyright 2013 Xi CHEN
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.seanchenxi.gwt.wordpress.json.util;
import com.google.gwt.i18n.client.Dictionary;
/**
* Use {@link com.google.gwt.i18n.client.Dictionary} to realize dynamic settings
* @author Xi
*
*/
public class Configuration {
private static final String ConfigName = "WPJsonAPIConfig";
private static final String ServicePathKey = "servicePath";
private static final String RequestTimeoutKey = "requestTimeout";
private static Dictionary jServiceConfig;
private static Configuration instance;
public static Configuration getInstance() {
if(instance == null){
return instance = new Configuration();
}
return instance;
}
/**
* Get plugin Json API's service path
* If it is not defined in the dictionary "WPJsonAPIConfig", "/api" is returned as default value
* @return the relative
*/
public String getServicePath(){
try{
return jServiceConfig.get(ServicePathKey).trim();
}catch (Exception e) {
return "/api";
}
}
/**
* Get HTTP Request Timeout Time, 20s by default.
* If it is not set in the dictionary "WPJsonAPIConfig", 20s is returned as default value
*
* @return The defined HTTP Request Timeout Time.
*/
public int getRequestTimeoutTime(){
try{
return Integer.parseInt(jServiceConfig.get(RequestTimeoutKey));
}catch (Exception e) {
return 20000;
}
}
private Configuration(){
try {
jServiceConfig = Dictionary.getDictionary(ConfigName);
} catch (Exception e) {
jServiceConfig = null;
}
}
}
| apache-2.0 |
vzagnitko/TestProject | src/main/java/ua/test/repository/file/FileRepository.java | 198 | package ua.test.repository.file;
import ua.test.repository.Repository;
/**
* Describe methods to work with file
*
* @author vzagnitko
*/
public interface FileRepository extends Repository {
}
| apache-2.0 |
algohub/judge-engine | src/test/java/org/algohub/engine/serde/DeserializerTest.java | 3914 | package org.algohub.engine.serde;
import com.fasterxml.jackson.databind.node.BooleanNode;
import com.fasterxml.jackson.databind.node.DoubleNode;
import com.fasterxml.jackson.databind.node.IntNode;
import com.fasterxml.jackson.databind.node.TextNode;
import java.util.ArrayList;
import org.algohub.engine.collection.LinkedListNode;
import org.algohub.engine.type.TypeNode;
import org.junit.Test;
import java.util.HashMap;
import java.util.HashSet;
import static org.algohub.engine.codegenerator.DataTypes.*;
import static org.algohub.engine.serde.SharedData.*;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
@SuppressWarnings({"PMD.CommentRequired"})
public class DeserializerTest {
@Test public void deserializePrimitiveTest() {
assertEquals(Boolean.TRUE,
Deserializer.fromJson(TypeNode.fromString("bool"), BooleanNode.TRUE));
assertEquals(Boolean.FALSE,
Deserializer.fromJson(TypeNode.fromString("bool"), BooleanNode.FALSE));
assertEquals('a',
Deserializer.fromJson(TypeNode.fromString("char"), TextNode.valueOf("a")));
assertEquals(Integer.valueOf(123),
Deserializer.fromJson(TypeNode.fromString("int"), IntNode.valueOf(123)));
assertEquals(Long.valueOf(123),
Deserializer.fromJson(TypeNode.fromString("long"), IntNode.valueOf(123)));
assertEquals(Double.valueOf(123.0),
Deserializer.fromJson(TypeNode.fromString("double"), DoubleNode.valueOf(123.0)));
assertEquals("algohub",
Deserializer.fromJson(TypeNode.fromString("string"), TextNode.valueOf("algohub")));
}
@Test public void deserializeCollectionTest() {
assertArrayEquals(arrayInt, (int[]) Deserializer.fromJson(ARRAY_INT, arrayIntJson));
assertEquals(listInt, Deserializer.fromJson(LIST_INT, arrayIntJson));
assertEquals(setInt, Deserializer.fromJson(SET_INT, arrayIntJson));
assertEquals(linkedListInt, Deserializer.fromJson(LINKED_LIST_INT, arrayIntJson));
// empty linked list
assertEquals(null, Deserializer.fromJson(LINKED_LIST_INT, emptyArrayJson));
assertEquals(mapStringInt, Deserializer.fromJson(MAP_STRING_INT, mapStringIntJson));
assertEquals(mapIntDouble, Deserializer.fromJson(MAP_INT_DOUBLE, mapIntDoubleJson));
// empty binary tree
assertEquals(null, Deserializer.fromJson(BINARY_TREE_INT, emptyArrayJson));
assertEquals(binaryTree, Deserializer.fromJson(BINARY_TREE_INT, binaryTreeJson));
final int[][] arrayArrayIntActual = (int[][]) Deserializer.fromJson(
ARRAY_ARRAY_INT, arrayArrayIntJson);
assertArrayEquals(arrayArrayInt[0], arrayArrayIntActual[0]);
assertArrayEquals(arrayArrayInt[1], arrayArrayIntActual[1]);
final ArrayList<ArrayList<Integer>> listListIntActual =
(ArrayList<ArrayList<Integer>>) Deserializer.fromJson(LIST_LIST_INT, arrayArrayIntJson);
assertEquals(listListInt, listListIntActual);
final LinkedListNode<LinkedListNode<Integer>> linkedListLinkedListIntActual =
(LinkedListNode) Deserializer.fromJson(LINKED_LIST_LINKED_LIST_INT, arrayArrayIntJson);
assertEquals(linkedListLinkedListInt, linkedListLinkedListIntActual);
final LinkedListNode<Integer>[] arrayLinkedListIntActual = (LinkedListNode[]) Deserializer
.fromJson(ARRAY_LINKED_LIST_INT, arrayArrayIntJson);
assertArrayEquals(arrayLinkedListInt, arrayLinkedListIntActual);
final HashSet<LinkedListNode<Integer>> setLinkedListIntActual = (HashSet) Deserializer.fromJson(
SET_LINKED_LIST_INT, arrayArrayIntJson);
assertEquals(setLinkedListInt, setLinkedListIntActual);
final HashMap<String, LinkedListNode<Integer>> mapStringLinkedListIntActual =
(HashMap<String, LinkedListNode<Integer>>) Deserializer.fromJson(
MAP_STRING_LINKED_LIST_INT, mapStringLinkedListIntJson);
assertEquals(mapStringLinkedListInt, mapStringLinkedListIntActual);
}
}
| apache-2.0 |
realityforge/arez | doc-examples/src/main/java/arez/doc/examples/reference/GroupRepository.java | 449 | package arez.doc.examples.reference;
import arez.annotations.ArezComponent;
import arez.component.internal.AbstractRepository;
import javax.annotation.Nullable;
@ArezComponent
public abstract class GroupRepository
extends AbstractRepository<Integer, Group, GroupRepository>
{
//DOC ELIDE START
//DOC ELIDE END
@Nullable
public Group findById( final int id )
{
return findByArezId( id );
}
//DOC ELIDE START
//DOC ELIDE END
}
| apache-2.0 |
zafartahirov/AndroidGameDev | src/cc/rafazz/framework/Sound.java | 115 | package cc.rafazz.framework;
public interface Sound {
public void play(float volume);
public void dispose();
}
| apache-2.0 |
horsy/SchoolManage | src/com/school/view/web/AddBloodGroup.java | 347 | package com.school.view.web;
import com.opensymphony.xwork2.ActionSupport;
public class AddBloodGroup extends ActionSupport {
/**
*
*/
private static final long serialVersionUID = -166671487869915375L;
@Override
public String execute() throws Exception {
// TODO Auto-generated method stub
return SUCCESS;
}
}
| apache-2.0 |
johnjohndoe/ScheduleParser | library/src/main/java/info/metadude/android/library/schedule/parser/model/Lang.java | 1661 | package info.metadude.android.library.schedule.parser.model;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import java.util.HashMap;
import java.util.Map;
import javax.annotation.Generated;
@JsonInclude(JsonInclude.Include.NON_NULL)
@Generated("org.jsonschema2pojo")
@JsonPropertyOrder({
"id",
"label_en"
})
public class Lang {
@JsonProperty("id")
private String id;
@JsonProperty("label_en")
private String labelEn;
@JsonIgnore
private Map<String, Object> additionalProperties = new HashMap<String, Object>();
/**
* @return The id
*/
@JsonProperty("id")
public String getId() {
return id;
}
/**
* @param id The id
*/
@JsonProperty("id")
public void setId(String id) {
this.id = id;
}
/**
* @return The labelEn
*/
@JsonProperty("label_en")
public String getLabelEn() {
return labelEn;
}
/**
* @param labelEn The label_en
*/
@JsonProperty("label_en")
public void setLabelEn(String labelEn) {
this.labelEn = labelEn;
}
@JsonAnyGetter
public Map<String, Object> getAdditionalProperties() {
return this.additionalProperties;
}
@JsonAnySetter
public void setAdditionalProperty(String name, Object value) {
this.additionalProperties.put(name, value);
}
}
| apache-2.0 |
SSEHUB/EASyProducer | Plugins/VarModel/Model/src/net/ssehub/easy/varModel/cstEvaluation/LocalConfiguration.java | 8660 | /*
* Copyright 2009-2014 University of Hildesheim, Software Systems Engineering
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.ssehub.easy.varModel.cstEvaluation;
import java.util.HashMap;
import java.util.Map;
import net.ssehub.easy.basics.modelManagement.IRestrictionEvaluationContext;
import net.ssehub.easy.basics.modelManagement.IVariable;
import net.ssehub.easy.basics.modelManagement.RestrictionEvaluationException;
import net.ssehub.easy.basics.modelManagement.Version;
import net.ssehub.easy.varModel.confModel.AssignmentState;
import net.ssehub.easy.varModel.confModel.ConfigurationException;
import net.ssehub.easy.varModel.confModel.IConfiguration;
import net.ssehub.easy.varModel.confModel.IDecisionVariable;
import net.ssehub.easy.varModel.model.AbstractVariable;
import net.ssehub.easy.varModel.model.DecisionVariableDeclaration;
import net.ssehub.easy.varModel.model.datatypes.Compound;
import net.ssehub.easy.varModel.model.datatypes.CustomOperation;
import net.ssehub.easy.varModel.model.datatypes.IDatatype;
import net.ssehub.easy.varModel.model.datatypes.Reference;
import net.ssehub.easy.varModel.model.datatypes.Set;
import net.ssehub.easy.varModel.model.datatypes.TypeQueries;
import net.ssehub.easy.varModel.model.values.ReferenceValue;
import net.ssehub.easy.varModel.model.values.Value;
import net.ssehub.easy.varModel.model.values.ValueDoesNotMatchTypeException;
import net.ssehub.easy.varModel.model.values.ValueFactory;
/**
* Creates a local configuration instance.
*
* @author Holger Eichelberger
*/
public class LocalConfiguration implements IConfiguration, IRestrictionEvaluationContext {
private Map<AbstractVariable, IDecisionVariable> map = new HashMap<AbstractVariable, IDecisionVariable>();
/**
* Creates a local configuration instance.
*/
public LocalConfiguration() {
}
@Override
public IDecisionVariable getDecision(AbstractVariable declaration) {
return map.get(declaration);
}
/**
* Adds a (local) decision to this configuration.
*
* @param decision the related decision
* @return <code>decision</code> (builder pattern style)
*/
public IDecisionVariable addDecision(IDecisionVariable decision) {
map.put(decision.getDeclaration(), decision);
return decision;
}
@Override
public Value getAllInstances(IDatatype type) {
Value result;
try {
Set set = new Set("", type, null);
result = ValueFactory.createValue(set, new Object[]{});
} catch (ValueDoesNotMatchTypeException e) {
result = null; // shall not happen
}
return result;
}
/**
* Binds the given <code>type</code> to the first matching decision variable. Rationale: Static (all-quantized)
* variable access needs to be bound automatically. In that case, there is an implicitly created enclosing quantor
* (rewritten expression), and only one, i.e., the first one can be bound.
*
* @param type the type to bind to
* @param context the evaluation context
* @return the bound value (may be <b>null</b> if there is none)
*/
Value bind(IDatatype type, EvaluationContext context) {
Value result = null;
// is this unique??
for (IDecisionVariable var : map.values()) {
AbstractVariable decl = var.getDeclaration();
if (!LocalDecisionVariable.ITERATOR_RESULT_VARNAME.equals(decl.getName())) {
IDatatype varType = decl.getType();
if (TypeQueries.sameTypes(type, varType)) {
result = var.getValue();
break;
} else {
if (varType instanceof Reference) { // explicitly only 1 step dereference due to allInstances
varType = ((Reference) varType).getType();
if (TypeQueries.sameTypes(type, varType)) {
ReferenceValue ref = (ReferenceValue) var.getValue();
if (null != ref) {
IDecisionVariable refVar = context.getDecision(ref.getValue());
// refVar may be null if compound value is stated in container initializer
result = null == refVar ? null : refVar.getValue();
break;
}
}
}
}
}
}
return result;
}
// restriction evaluation
@Override
public void setValue(IVariable variable, Version version) throws RestrictionEvaluationException {
if (variable instanceof DecisionVariableDeclaration) {
DecisionVariableDeclaration decl = (DecisionVariableDeclaration) variable;
IDecisionVariable var = getDecision(decl);
if (null == var) {
var = addDecision(new LocalDecisionVariable(decl, this, null));
}
try {
Value val;
if (Compound.TYPE.isAssignableFrom(decl.getType())) {
// special case for legacy IVML notation project.variable
if (null == version) { // otherwise version itself is set to null in compound
version = Version.NULL_VALUE;
}
val = ValueFactory.createValue(decl.getType(), new Object[]{"version", version});
} else {
val = ValueFactory.createValue(decl.getType(), version);
}
var.setValue(val, AssignmentState.ASSIGNED);
} catch (ValueDoesNotMatchTypeException e) {
throw new RestrictionEvaluationException(e.getMessage(), e.getId());
} catch (ConfigurationException e) {
throw new RestrictionEvaluationException(e.getMessage(), e.getId());
}
} else {
throw new RestrictionEvaluationException("unsupported type", RestrictionEvaluationException.ID_INTERNAL);
}
}
@Override
public void unsetValue(IVariable variable) throws RestrictionEvaluationException {
if (variable instanceof DecisionVariableDeclaration) {
map.remove((DecisionVariableDeclaration) variable);
} else {
throw new RestrictionEvaluationException("unsupported type", RestrictionEvaluationException.ID_INTERNAL);
}
}
@Override
public Object startEvaluation() throws RestrictionEvaluationException {
// not relevant here as no contexts are considered and in the evaluation context this class
// is supposed to be used as a single instance rather than a stack
return null; // unused
}
@Override
public void endEvaluation(Object processor) throws RestrictionEvaluationException {
// not relevant here as no contexts are considered and in the evaluation context this class
// is supposed to be used as a single instance rather than a stack
}
/**
* Rebinds the parameters from <code>actual</code> to <code>replacement</code>, e.g., for dynamic
* dispatch. As a prerequisite, <code>actual</code> and <code>replacement</code> must have the same
* number of parameters and compatible parameter types, i.e., <code>replacement</code> must have the
* same or more specific parameter types then <code>actual</code>.
*
* @param actual the operation for which the parameters shall be replace
* @param replacement the operation replacing <code>actual</code>
*/
void rebind(CustomOperation actual, CustomOperation replacement) {
if (actual.getParameterCount() == replacement.getParameterCount()) {
for (int p = 0, n = actual.getParameterCount(); p < n; p++) {
map.put(replacement.getParameterDeclaration(p), map.remove(actual.getParameterDeclaration(p)));
}
}
}
@Override
public String toString() {
return map.toString();
}
}
| apache-2.0 |
mtunique/flink | flink-runtime/src/test/java/org/apache/flink/runtime/query/netty/message/KvStateRequestSerializerTest.java | 20016 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.query.netty.message;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.api.common.typeutils.base.LongSerializer;
import org.apache.flink.api.common.typeutils.base.StringSerializer;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.runtime.query.KvStateID;
import org.apache.flink.runtime.query.TaskKvStateRegistry;
import org.apache.flink.runtime.state.KeyGroupRange;
import org.apache.flink.runtime.state.VoidNamespace;
import org.apache.flink.runtime.state.VoidNamespaceSerializer;
import org.apache.flink.runtime.state.heap.HeapKeyedStateBackend;
import org.apache.flink.runtime.state.internal.InternalKvState;
import org.apache.flink.runtime.state.internal.InternalListState;
import org.apache.flink.runtime.state.internal.InternalMapState;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufAllocator;
import io.netty.buffer.UnpooledByteBufAllocator;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ThreadLocalRandom;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
/**
* Tests for {@link KvStateRequestSerializer}.
*/
@RunWith(Parameterized.class)
public class KvStateRequestSerializerTest {
private final ByteBufAllocator alloc = UnpooledByteBufAllocator.DEFAULT;
@Parameterized.Parameters
public static Collection<Boolean> parameters() {
return Arrays.asList(false, true);
}
@Parameterized.Parameter
public boolean async;
/**
* Tests KvState request serialization.
*/
@Test
public void testKvStateRequestSerialization() throws Exception {
long requestId = Integer.MAX_VALUE + 1337L;
KvStateID kvStateId = new KvStateID();
byte[] serializedKeyAndNamespace = randomByteArray(1024);
ByteBuf buf = KvStateRequestSerializer.serializeKvStateRequest(
alloc,
requestId,
kvStateId,
serializedKeyAndNamespace);
int frameLength = buf.readInt();
assertEquals(KvStateRequestType.REQUEST, KvStateRequestSerializer.deserializeHeader(buf));
KvStateRequest request = KvStateRequestSerializer.deserializeKvStateRequest(buf);
assertEquals(buf.readerIndex(), frameLength + 4);
assertEquals(requestId, request.getRequestId());
assertEquals(kvStateId, request.getKvStateId());
assertArrayEquals(serializedKeyAndNamespace, request.getSerializedKeyAndNamespace());
}
/**
* Tests KvState request serialization with zero-length serialized key and namespace.
*/
@Test
public void testKvStateRequestSerializationWithZeroLengthKeyAndNamespace() throws Exception {
byte[] serializedKeyAndNamespace = new byte[0];
ByteBuf buf = KvStateRequestSerializer.serializeKvStateRequest(
alloc,
1823,
new KvStateID(),
serializedKeyAndNamespace);
int frameLength = buf.readInt();
assertEquals(KvStateRequestType.REQUEST, KvStateRequestSerializer.deserializeHeader(buf));
KvStateRequest request = KvStateRequestSerializer.deserializeKvStateRequest(buf);
assertEquals(buf.readerIndex(), frameLength + 4);
assertArrayEquals(serializedKeyAndNamespace, request.getSerializedKeyAndNamespace());
}
/**
* Tests that we don't try to be smart about <code>null</code> key and namespace.
* They should be treated explicitly.
*/
@Test(expected = NullPointerException.class)
public void testNullPointerExceptionOnNullSerializedKeyAndNamepsace() throws Exception {
new KvStateRequest(0, new KvStateID(), null);
}
/**
* Tests KvState request result serialization.
*/
@Test
public void testKvStateRequestResultSerialization() throws Exception {
long requestId = Integer.MAX_VALUE + 72727278L;
byte[] serializedResult = randomByteArray(1024);
ByteBuf buf = KvStateRequestSerializer.serializeKvStateRequestResult(
alloc,
requestId,
serializedResult);
int frameLength = buf.readInt();
assertEquals(KvStateRequestType.REQUEST_RESULT, KvStateRequestSerializer.deserializeHeader(buf));
KvStateRequestResult request = KvStateRequestSerializer.deserializeKvStateRequestResult(buf);
assertEquals(buf.readerIndex(), frameLength + 4);
assertEquals(requestId, request.getRequestId());
assertArrayEquals(serializedResult, request.getSerializedResult());
}
/**
* Tests KvState request result serialization with zero-length serialized result.
*/
@Test
public void testKvStateRequestResultSerializationWithZeroLengthSerializedResult() throws Exception {
byte[] serializedResult = new byte[0];
ByteBuf buf = KvStateRequestSerializer.serializeKvStateRequestResult(
alloc,
72727278,
serializedResult);
int frameLength = buf.readInt();
assertEquals(KvStateRequestType.REQUEST_RESULT, KvStateRequestSerializer.deserializeHeader(buf));
KvStateRequestResult request = KvStateRequestSerializer.deserializeKvStateRequestResult(buf);
assertEquals(buf.readerIndex(), frameLength + 4);
assertArrayEquals(serializedResult, request.getSerializedResult());
}
/**
* Tests that we don't try to be smart about <code>null</code> results.
* They should be treated explicitly.
*/
@Test(expected = NullPointerException.class)
public void testNullPointerExceptionOnNullSerializedResult() throws Exception {
new KvStateRequestResult(0, null);
}
/**
* Tests KvState request failure serialization.
*/
@Test
public void testKvStateRequestFailureSerialization() throws Exception {
long requestId = Integer.MAX_VALUE + 1111222L;
IllegalStateException cause = new IllegalStateException("Expected test");
ByteBuf buf = KvStateRequestSerializer.serializeKvStateRequestFailure(
alloc,
requestId,
cause);
int frameLength = buf.readInt();
assertEquals(KvStateRequestType.REQUEST_FAILURE, KvStateRequestSerializer.deserializeHeader(buf));
KvStateRequestFailure request = KvStateRequestSerializer.deserializeKvStateRequestFailure(buf);
assertEquals(buf.readerIndex(), frameLength + 4);
assertEquals(requestId, request.getRequestId());
assertEquals(cause.getClass(), request.getCause().getClass());
assertEquals(cause.getMessage(), request.getCause().getMessage());
}
/**
* Tests KvState server failure serialization.
*/
@Test
public void testServerFailureSerialization() throws Exception {
IllegalStateException cause = new IllegalStateException("Expected test");
ByteBuf buf = KvStateRequestSerializer.serializeServerFailure(alloc, cause);
int frameLength = buf.readInt();
assertEquals(KvStateRequestType.SERVER_FAILURE, KvStateRequestSerializer.deserializeHeader(buf));
Throwable request = KvStateRequestSerializer.deserializeServerFailure(buf);
assertEquals(buf.readerIndex(), frameLength + 4);
assertEquals(cause.getClass(), request.getClass());
assertEquals(cause.getMessage(), request.getMessage());
}
/**
* Tests key and namespace serialization utils.
*/
@Test
public void testKeyAndNamespaceSerialization() throws Exception {
TypeSerializer<Long> keySerializer = LongSerializer.INSTANCE;
TypeSerializer<String> namespaceSerializer = StringSerializer.INSTANCE;
long expectedKey = Integer.MAX_VALUE + 12323L;
String expectedNamespace = "knilf";
byte[] serializedKeyAndNamespace = KvStateRequestSerializer.serializeKeyAndNamespace(
expectedKey, keySerializer, expectedNamespace, namespaceSerializer);
Tuple2<Long, String> actual = KvStateRequestSerializer.deserializeKeyAndNamespace(
serializedKeyAndNamespace, keySerializer, namespaceSerializer);
assertEquals(expectedKey, actual.f0.longValue());
assertEquals(expectedNamespace, actual.f1);
}
/**
* Tests key and namespace deserialization utils with too few bytes.
*/
@Test(expected = IOException.class)
public void testKeyAndNamespaceDeserializationEmpty() throws Exception {
KvStateRequestSerializer.deserializeKeyAndNamespace(
new byte[] {}, LongSerializer.INSTANCE, StringSerializer.INSTANCE);
}
/**
* Tests key and namespace deserialization utils with too few bytes.
*/
@Test(expected = IOException.class)
public void testKeyAndNamespaceDeserializationTooShort() throws Exception {
KvStateRequestSerializer.deserializeKeyAndNamespace(
new byte[] {1}, LongSerializer.INSTANCE, StringSerializer.INSTANCE);
}
/**
* Tests key and namespace deserialization utils with too many bytes.
*/
@Test(expected = IOException.class)
public void testKeyAndNamespaceDeserializationTooMany1() throws Exception {
// Long + null String + 1 byte
KvStateRequestSerializer.deserializeKeyAndNamespace(
new byte[] {1, 1, 1, 1, 1, 1, 1, 1, 42, 0, 2}, LongSerializer.INSTANCE,
StringSerializer.INSTANCE);
}
/**
* Tests key and namespace deserialization utils with too many bytes.
*/
@Test(expected = IOException.class)
public void testKeyAndNamespaceDeserializationTooMany2() throws Exception {
// Long + null String + 2 bytes
KvStateRequestSerializer.deserializeKeyAndNamespace(
new byte[] {1, 1, 1, 1, 1, 1, 1, 1, 42, 0, 2, 2}, LongSerializer.INSTANCE,
StringSerializer.INSTANCE);
}
/**
* Tests value serialization utils.
*/
@Test
public void testValueSerialization() throws Exception {
TypeSerializer<Long> valueSerializer = LongSerializer.INSTANCE;
long expectedValue = Long.MAX_VALUE - 1292929292L;
byte[] serializedValue = KvStateRequestSerializer.serializeValue(expectedValue, valueSerializer);
long actualValue = KvStateRequestSerializer.deserializeValue(serializedValue, valueSerializer);
assertEquals(expectedValue, actualValue);
}
/**
* Tests value deserialization with too few bytes.
*/
@Test(expected = IOException.class)
public void testDeserializeValueEmpty() throws Exception {
KvStateRequestSerializer.deserializeValue(new byte[] {}, LongSerializer.INSTANCE);
}
/**
* Tests value deserialization with too few bytes.
*/
@Test(expected = IOException.class)
public void testDeserializeValueTooShort() throws Exception {
// 1 byte (incomplete Long)
KvStateRequestSerializer.deserializeValue(new byte[] {1}, LongSerializer.INSTANCE);
}
/**
* Tests value deserialization with too many bytes.
*/
@Test(expected = IOException.class)
public void testDeserializeValueTooMany1() throws Exception {
// Long + 1 byte
KvStateRequestSerializer.deserializeValue(new byte[] {1, 1, 1, 1, 1, 1, 1, 1, 2},
LongSerializer.INSTANCE);
}
/**
* Tests value deserialization with too many bytes.
*/
@Test(expected = IOException.class)
public void testDeserializeValueTooMany2() throws Exception {
// Long + 2 bytes
KvStateRequestSerializer.deserializeValue(new byte[] {1, 1, 1, 1, 1, 1, 1, 1, 2, 2},
LongSerializer.INSTANCE);
}
/**
* Tests list serialization utils.
*/
@Test
public void testListSerialization() throws Exception {
final long key = 0L;
// objects for heap state list serialisation
final HeapKeyedStateBackend<Long> longHeapKeyedStateBackend =
new HeapKeyedStateBackend<>(
mock(TaskKvStateRegistry.class),
LongSerializer.INSTANCE,
ClassLoader.getSystemClassLoader(),
1,
new KeyGroupRange(0, 0),
async,
new ExecutionConfig()
);
longHeapKeyedStateBackend.setCurrentKey(key);
final InternalListState<VoidNamespace, Long> listState = longHeapKeyedStateBackend.createListState(
VoidNamespaceSerializer.INSTANCE,
new ListStateDescriptor<>("test", LongSerializer.INSTANCE));
testListSerialization(key, listState);
}
/**
* Verifies that the serialization of a list using the given list state
* matches the deserialization with {@link KvStateRequestSerializer#deserializeList}.
*
* @param key
* key of the list state
* @param listState
* list state using the {@link VoidNamespace}, must also be a {@link InternalKvState} instance
*
* @throws Exception
*/
public static void testListSerialization(
final long key,
final InternalListState<VoidNamespace, Long> listState) throws Exception {
TypeSerializer<Long> valueSerializer = LongSerializer.INSTANCE;
listState.setCurrentNamespace(VoidNamespace.INSTANCE);
// List
final int numElements = 10;
final List<Long> expectedValues = new ArrayList<>();
for (int i = 0; i < numElements; i++) {
final long value = ThreadLocalRandom.current().nextLong();
expectedValues.add(value);
listState.add(value);
}
final byte[] serializedKey =
KvStateRequestSerializer.serializeKeyAndNamespace(
key, LongSerializer.INSTANCE,
VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE);
final byte[] serializedValues = listState.getSerializedValue(serializedKey);
List<Long> actualValues = KvStateRequestSerializer.deserializeList(serializedValues, valueSerializer);
assertEquals(expectedValues, actualValues);
// Single value
long expectedValue = ThreadLocalRandom.current().nextLong();
byte[] serializedValue = KvStateRequestSerializer.serializeValue(expectedValue, valueSerializer);
List<Long> actualValue = KvStateRequestSerializer.deserializeList(serializedValue, valueSerializer);
assertEquals(1, actualValue.size());
assertEquals(expectedValue, actualValue.get(0).longValue());
}
/**
* Tests list deserialization with too few bytes.
*/
@Test
public void testDeserializeListEmpty() throws Exception {
List<Long> actualValue = KvStateRequestSerializer
.deserializeList(new byte[] {}, LongSerializer.INSTANCE);
assertEquals(0, actualValue.size());
}
/**
* Tests list deserialization with too few bytes.
*/
@Test(expected = IOException.class)
public void testDeserializeListTooShort1() throws Exception {
// 1 byte (incomplete Long)
KvStateRequestSerializer.deserializeList(new byte[] {1}, LongSerializer.INSTANCE);
}
/**
* Tests list deserialization with too few bytes.
*/
@Test(expected = IOException.class)
public void testDeserializeListTooShort2() throws Exception {
// Long + 1 byte (separator) + 1 byte (incomplete Long)
KvStateRequestSerializer.deserializeList(new byte[] {1, 1, 1, 1, 1, 1, 1, 1, 2, 3},
LongSerializer.INSTANCE);
}
/**
* Tests map serialization utils.
*/
@Test
public void testMapSerialization() throws Exception {
final long key = 0L;
// objects for heap state list serialisation
final HeapKeyedStateBackend<Long> longHeapKeyedStateBackend =
new HeapKeyedStateBackend<>(
mock(TaskKvStateRegistry.class),
LongSerializer.INSTANCE,
ClassLoader.getSystemClassLoader(),
1,
new KeyGroupRange(0, 0),
async,
new ExecutionConfig()
);
longHeapKeyedStateBackend.setCurrentKey(key);
final InternalMapState<VoidNamespace, Long, String> mapState = (InternalMapState<VoidNamespace, Long, String>) longHeapKeyedStateBackend.getPartitionedState(
VoidNamespace.INSTANCE,
VoidNamespaceSerializer.INSTANCE,
new MapStateDescriptor<>("test", LongSerializer.INSTANCE, StringSerializer.INSTANCE));
testMapSerialization(key, mapState);
}
/**
* Verifies that the serialization of a map using the given map state
* matches the deserialization with {@link KvStateRequestSerializer#deserializeList}.
*
* @param key
* key of the map state
* @param mapState
* map state using the {@link VoidNamespace}, must also be a {@link InternalKvState} instance
*
* @throws Exception
*/
public static void testMapSerialization(
final long key,
final InternalMapState<VoidNamespace, Long, String> mapState) throws Exception {
TypeSerializer<Long> userKeySerializer = LongSerializer.INSTANCE;
TypeSerializer<String> userValueSerializer = StringSerializer.INSTANCE;
mapState.setCurrentNamespace(VoidNamespace.INSTANCE);
// Map
final int numElements = 10;
final Map<Long, String> expectedValues = new HashMap<>();
for (int i = 1; i <= numElements; i++) {
final long value = ThreadLocalRandom.current().nextLong();
expectedValues.put(value, Long.toString(value));
mapState.put(value, Long.toString(value));
}
expectedValues.put(0L, null);
mapState.put(0L, null);
final byte[] serializedKey =
KvStateRequestSerializer.serializeKeyAndNamespace(
key, LongSerializer.INSTANCE,
VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE);
final byte[] serializedValues = mapState.getSerializedValue(serializedKey);
Map<Long, String> actualValues = KvStateRequestSerializer.deserializeMap(serializedValues, userKeySerializer, userValueSerializer);
assertEquals(expectedValues.size(), actualValues.size());
for (Map.Entry<Long, String> actualEntry : actualValues.entrySet()) {
assertEquals(expectedValues.get(actualEntry.getKey()), actualEntry.getValue());
}
// Single value
ByteArrayOutputStream baos = new ByteArrayOutputStream();
long expectedKey = ThreadLocalRandom.current().nextLong();
String expectedValue = Long.toString(expectedKey);
byte[] isNull = {0};
baos.write(KvStateRequestSerializer.serializeValue(expectedKey, userKeySerializer));
baos.write(isNull);
baos.write(KvStateRequestSerializer.serializeValue(expectedValue, userValueSerializer));
byte[] serializedValue = baos.toByteArray();
Map<Long, String> actualValue = KvStateRequestSerializer.deserializeMap(serializedValue, userKeySerializer, userValueSerializer);
assertEquals(1, actualValue.size());
assertEquals(expectedValue, actualValue.get(expectedKey));
}
/**
* Tests map deserialization with too few bytes.
*/
@Test
public void testDeserializeMapEmpty() throws Exception {
Map<Long, String> actualValue = KvStateRequestSerializer
.deserializeMap(new byte[] {}, LongSerializer.INSTANCE, StringSerializer.INSTANCE);
assertEquals(0, actualValue.size());
}
/**
* Tests map deserialization with too few bytes.
*/
@Test(expected = IOException.class)
public void testDeserializeMapTooShort1() throws Exception {
// 1 byte (incomplete Key)
KvStateRequestSerializer.deserializeMap(new byte[] {1}, LongSerializer.INSTANCE, StringSerializer.INSTANCE);
}
/**
* Tests map deserialization with too few bytes.
*/
@Test(expected = IOException.class)
public void testDeserializeMapTooShort2() throws Exception {
// Long (Key) + 1 byte (incomplete Value)
KvStateRequestSerializer.deserializeMap(new byte[]{1, 1, 1, 1, 1, 1, 1, 1, 0},
LongSerializer.INSTANCE, LongSerializer.INSTANCE);
}
/**
* Tests map deserialization with too few bytes.
*/
@Test(expected = IOException.class)
public void testDeserializeMapTooShort3() throws Exception {
// Long (Key1) + Boolean (false) + Long (Value1) + 1 byte (incomplete Key2)
KvStateRequestSerializer.deserializeMap(new byte[] {1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 3},
LongSerializer.INSTANCE, LongSerializer.INSTANCE);
}
private byte[] randomByteArray(int capacity) {
byte[] bytes = new byte[capacity];
ThreadLocalRandom.current().nextBytes(bytes);
return bytes;
}
}
| apache-2.0 |
boyangyuu/ILoveMovie | app/src/main/java/com/example/yuboyang/ilovemovie1/listing/MoviesListingInteractor.java | 288 | package com.example.yuboyang.ilovemovie1.listing;
import com.example.yuboyang.ilovemovie1.Movie;
import java.util.List;
import io.reactivex.Observable;
/**
* Created by yuboyang on 10/8/17.
*/
public interface MoviesListingInteractor {
Observable<List<Movie>> fetchMovies();
}
| apache-2.0 |
caskdata/cdap | cdap-watchdog/src/main/java/co/cask/cdap/logging/run/LogSaverTwillRunnable.java | 8306 | /*
* Copyright © 2014-2016 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package co.cask.cdap.logging.run;
import co.cask.cdap.api.metrics.MetricsCollectionService;
import co.cask.cdap.common.conf.CConfiguration;
import co.cask.cdap.common.conf.Constants;
import co.cask.cdap.common.guice.ConfigModule;
import co.cask.cdap.common.guice.DiscoveryRuntimeModule;
import co.cask.cdap.common.guice.IOModule;
import co.cask.cdap.common.guice.KafkaClientModule;
import co.cask.cdap.common.guice.LocationRuntimeModule;
import co.cask.cdap.common.guice.ZKClientModule;
import co.cask.cdap.common.namespace.guice.NamespaceClientRuntimeModule;
import co.cask.cdap.common.security.RemoteUGIProvider;
import co.cask.cdap.common.security.UGIProvider;
import co.cask.cdap.data.runtime.DataFabricModules;
import co.cask.cdap.data.runtime.DataSetsModules;
import co.cask.cdap.data2.audit.AuditModule;
import co.cask.cdap.logging.LoggingConfiguration;
import co.cask.cdap.logging.guice.LogSaverServiceModule;
import co.cask.cdap.logging.guice.LoggingModules;
import co.cask.cdap.logging.save.KafkaLogSaverService;
import co.cask.cdap.logging.service.LogSaverStatusService;
import co.cask.cdap.metrics.guice.MetricsClientRuntimeModule;
import co.cask.cdap.security.auth.context.AuthenticationContextModules;
import co.cask.cdap.security.authorization.AuthorizationEnforcementModule;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableMap;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.SettableFuture;
import com.google.inject.AbstractModule;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Scopes;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.ShutdownHookManager;
import org.apache.twill.api.AbstractTwillRunnable;
import org.apache.twill.api.TwillContext;
import org.apache.twill.api.TwillRunnableSpecification;
import org.apache.twill.internal.Services;
import org.apache.twill.kafka.client.KafkaClientService;
import org.apache.twill.zookeeper.ZKClientService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.util.Map;
import java.util.concurrent.ExecutionException;
/**
* Twill wrapper for running LogSaver through Twill.
*/
public final class LogSaverTwillRunnable extends AbstractTwillRunnable {
private static final Logger LOG = LoggerFactory.getLogger(LogSaverTwillRunnable.class);
private SettableFuture<?> completion;
private String name;
private String hConfName;
private String cConfName;
private ZKClientService zkClientService;
private KafkaClientService kafkaClientService;
private KafkaLogSaverService logSaverService;
private LogSaverStatusService logSaverStatusService;
private MetricsCollectionService metricsCollectionService;
public LogSaverTwillRunnable(String name, String hConfName, String cConfName) {
this.name = name;
this.hConfName = hConfName;
this.cConfName = cConfName;
}
@Override
public TwillRunnableSpecification configure() {
return TwillRunnableSpecification.Builder.with()
.setName(name)
.withConfigs(ImmutableMap.of(
"hConf", hConfName,
"cConf", cConfName
))
.build();
}
@Override
public void initialize(TwillContext context) {
super.initialize(context);
completion = SettableFuture.create();
name = context.getSpecification().getName();
Map<String, String> configs = context.getSpecification().getConfigs();
LOG.info("Initialize runnable: " + name);
try {
// Load configuration
Configuration hConf = new Configuration();
hConf.clear();
hConf.addResource(new File(configs.get("hConf")).toURI().toURL());
UserGroupInformation.setConfiguration(hConf);
CConfiguration cConf = CConfiguration.create(new File(configs.get("cConf")));
cConf.set(Constants.LogSaver.ADDRESS, context.getHost().getCanonicalHostName());
// Initialize ZK client
String zookeeper = cConf.get(Constants.Zookeeper.QUORUM);
if (zookeeper == null) {
LOG.error("No ZooKeeper quorum provided.");
throw new IllegalStateException("No ZooKeeper quorum provided.");
}
Injector injector = createGuiceInjector(cConf, hConf);
zkClientService = injector.getInstance(ZKClientService.class);
kafkaClientService = injector.getInstance(KafkaClientService.class);
logSaverService = injector.getInstance(KafkaLogSaverService.class);
int numPartitions = Integer.parseInt(cConf.get(LoggingConfiguration.NUM_PARTITIONS,
LoggingConfiguration.DEFAULT_NUM_PARTITIONS));
LOG.info("Num partitions = {}", numPartitions);
logSaverStatusService = injector.getInstance(LogSaverStatusService.class);
metricsCollectionService = injector.getInstance(MetricsCollectionService.class);
LOG.info("Runnable initialized: " + name);
} catch (Throwable t) {
LOG.error(t.getMessage(), t);
throw Throwables.propagate(t);
}
}
@Override
public void run() {
LOG.info("Starting runnable " + name);
// Register shutdown hook to stop Log Saver before Hadoop Filesystem shuts down
ShutdownHookManager.get().addShutdownHook(new Runnable() {
@Override
public void run() {
LOG.info("Shutdown hook triggered.");
stop();
}
}, FileSystem.SHUTDOWN_HOOK_PRIORITY + 1);
Futures.getUnchecked(Services.chainStart(zkClientService, kafkaClientService, metricsCollectionService,
logSaverService, logSaverStatusService));
LOG.info("Runnable started " + name);
try {
completion.get();
LOG.info("Runnable stopped " + name);
} catch (InterruptedException e) {
LOG.error("Waiting on completion interrupted", e);
Thread.currentThread().interrupt();
} catch (ExecutionException e) {
// Propagate the execution exception will causes TwillRunnable terminate with error,
// and AM would detect and restarts it.
LOG.error("Completed with exception. Exception get propagated", e);
throw Throwables.propagate(e);
}
}
@Override
public void stop() {
LOG.info("Stopping runnable " + name);
Futures.getUnchecked(Services.chainStop(logSaverStatusService, logSaverService,
metricsCollectionService, kafkaClientService, zkClientService));
completion.set(null);
}
@VisibleForTesting
static Injector createGuiceInjector(CConfiguration cConf, Configuration hConf) {
return Guice.createInjector(
new ConfigModule(cConf, hConf),
new IOModule(),
new ZKClientModule(),
new KafkaClientModule(),
new MetricsClientRuntimeModule().getDistributedModules(),
new DiscoveryRuntimeModule().getDistributedModules(),
new LocationRuntimeModule().getDistributedModules(),
new NamespaceClientRuntimeModule().getDistributedModules(),
new DataFabricModules().getDistributedModules(),
new DataSetsModules().getDistributedModules(),
new LogSaverServiceModule(),
new LoggingModules().getDistributedModules(),
new AuditModule().getDistributedModules(),
new AuthorizationEnforcementModule().getDistributedModules(),
new AuthenticationContextModules().getMasterModule(),
new AbstractModule() {
@Override
protected void configure() {
bind(UGIProvider.class).to(RemoteUGIProvider.class).in(Scopes.SINGLETON);
}
}
);
}
}
| apache-2.0 |
GuMengYu/AndroidLearnTest | app/src/main/java/com/example/administrator/androidlearntest/Service/MyIntentService.java | 615 | package com.example.administrator.androidlearntest.Service;
import android.app.IntentService;
import android.content.Intent;
import android.util.Log;
/**
* Created by Administrator on 2017/5/5.
*/
public class MyIntentService extends IntentService {
public MyIntentService(){
super("MyIntentService");
}
@Override
protected void onHandleIntent(Intent intent) {
Log.d("MyIntentService","Thread is" + Thread.currentThread().getId());
}
@Override
public void onDestroy() {
super.onDestroy();
Log.d("MyIntentService","onDestroy executed");
}
}
| apache-2.0 |
reynoldsm88/droolsjbpm-integration | kie-server-parent/kie-server-controller-plugin/src/main/java/org/kie/server/gateway/KieServerGateway.java | 2433 | /*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.server.gateway;
import java.util.concurrent.TimeUnit;
import javax.ws.rs.core.MediaType;
import org.jboss.resteasy.client.jaxrs.ResteasyClient;
import org.jboss.resteasy.client.jaxrs.ResteasyClientBuilder;
import org.kie.server.api.model.KieContainerResource;
import org.kie.server.api.model.ServiceResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class KieServerGateway {
private static final Logger LOG = LoggerFactory.getLogger(KieServerGateway.class);
private final ResteasyClient client;
public KieServerGateway(String username, String password, Integer connectionTimeout, Integer socketTimeout) {
client = new ResteasyClientBuilder()
.connectionPoolSize(1)
.establishConnectionTimeout(connectionTimeout, TimeUnit.SECONDS)
.socketTimeout(socketTimeout, TimeUnit.SECONDS)
.register(new Authenticator(username, password))
.register(new ErrorResponseFilter())
.build();
}
/**
* Verify the container status on remote process server.
*
* @param serverUrl remote process server URL
* @param container container id
* @return KieContainerResource with status or null if the container in not instantiated
*/
public KieContainerResource getContainer(String serverUrl, String container) {
// in case of container is not instantiated the response doesn't parse ServiceResponse
ServiceResponse<KieContainerResource> response = client.target(serverUrl)
.path("containers")
.path(container)
.request(MediaType.APPLICATION_JSON)
.get(ServiceResponse.class);
return response.getResult();
}
public void close() {
client.close();
}
}
| apache-2.0 |
ehsane/rainbownlp | src/main/java/rainbownlp/analyzer/sentenceclause/SentenceClauseManager.java | 23625 | package rainbownlp.analyzer.sentenceclause;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import rainbownlp.core.Artifact;
import rainbownlp.parser.DependencyLine;
import rainbownlp.parser.StanfordParser;
import rainbownlp.util.StanfordDependencyUtil;
public class SentenceClauseManager {
private Artifact relatedSentence;
private String sentContent;
private String posTags;
private String stanDependenciesStr;
public ArrayList<DependencyLine> sentDepLines = new ArrayList<DependencyLine>();
ArrayList<Clause> clauses;
public HashMap<Integer, String> offsetMap = new HashMap<Integer, String>();
//this keeps the offsets as the key and the value is the lemma
public HashMap<Integer, String> lemmaMap = new HashMap<Integer, String>();
// the same as above just has all the original tokens
public HashMap<Integer, String> tokenMap = new HashMap<Integer, String>();
//the mapping of lemmas to originals
//TODO: if we have repeated that are different originals it will be overwritten
public HashMap<String, String> lemmaTokenMap = new HashMap<String, String>();
// this hash keep the location of each observed offset in the sentence
public HashMap<Integer, Clause> clauseMap = new HashMap<Integer, Clause>();
//////////////////////////////////////
//this array will keep all the lines that the governor or dependent clause could not be resolved
ArrayList<DependencyLine> phrases = new ArrayList<DependencyLine>();
public String filename;
public String[] normalized_dependencies;
public ArrayList<String> getPhrases()
{
ArrayList<String> phrase_strings = new ArrayList<String>();
for (DependencyLine depLine:phrases)
{
if (depLine.firstOffset<depLine.secondOffset)
{
phrase_strings.add(depLine.firstPart+" "+depLine.secondPart);
}
else
{
phrase_strings.add(depLine.secondPart+" "+depLine.firstPart);
}
}
return phrase_strings;
}
public SentenceClauseManager(Artifact relatedSentence) throws Exception
{
setRelatedSentence(relatedSentence);
setSentContent(relatedSentence.getContent());
setPosTags(relatedSentence.getPOS());
loadClauses();
}
void loadClauses() throws Exception
{
if (relatedSentence.getPOS() ==null)
{
StanfordParser s_parser = new StanfordParser();
s_parser.parse(sentContent);
setPosTags(s_parser.getTagged());
setStanDependenciesStr(s_parser.getDependencies());
}
else
{
setPosTags(relatedSentence.getPOS());
setStanDependenciesStr(relatedSentence.getStanDependency());
}
tokenMap = StanfordDependencyUtil.getTokens(posTags);
//populate lemma
lemmaMap = StanfordDependencyUtil.getLemmaMap(posTags);
lemmaTokenMap = StanfordDependencyUtil.getLemmaTokenmaps(posTags);
analyzeSentence();
}
// TODO: generally improve this method, it is not perfect
private void analyzeSentence() throws Exception {
sentDepLines =StanfordDependencyUtil.parseDepLinesFromString(getStanDependenciesStr());
clauses = new ArrayList<Clause>();
Clause curClause = new Clause();
ArrayList<DependencyLine> toBeProcessesd = sentDepLines;
for(int i=0; i<sentDepLines.size();i++)
{
DependencyLine curLine = sentDepLines.get(i);
if(curLine.relationName == null) continue;
offsetMap.put(curLine.firstOffset, curLine.firstPart);
offsetMap.put(curLine.secondOffset, curLine.secondPart);
if(curLine.relationName.equals("nsubj") || curLine.relationName.equals("xsubj"))
{
// if (curLine.firstOffset -curLine.secondOffset>10)
// continue;
Clause governor_cl = clauseMap.get(curLine.firstOffset);
Artifact related_word = relatedSentence.getChildByWordIndex(curLine.firstOffset-1);
String pos = related_word.getPOS();
//if the verb is already observed
if (governor_cl !=null)
{
governor_cl.clauseSubject.add(curLine);
clauseMap.put(curLine.secondOffset, governor_cl);
}
else
{
governor_cl = new Clause();
// subj and verb will be added to the new clause
governor_cl.clauseSubject.add(curLine);
if (pos!= null && (pos.startsWith("VB") || pos.startsWith("MD")))
{
governor_cl.clauseVerb.verbMainPart = curLine.firstPart;
governor_cl.clauseVerb.offset = curLine.firstOffset;
clauseMap.put(curLine.firstOffset, governor_cl);
clauseMap.put(curLine.secondOffset, governor_cl);
}
//TODO: process more
else if(pos!= null && (pos.startsWith("JJ") || pos.startsWith("NN")))
{
//if the relation cop also is present where the first part is the complement
boolean is_comp = false;
for (DependencyLine d:sentDepLines)
{
if (d.relationName.equals("cop") && d.firstOffset==curLine.firstOffset)
is_comp = true;
}
if (is_comp==true)
{
governor_cl.complement = curLine.firstPart;
governor_cl.complementOffset = curLine.firstOffset;
clauseMap.put(curLine.firstOffset, governor_cl);
clauseMap.put(curLine.secondOffset, governor_cl);
}
}
}
//get all dep lines that are related to this
}
if(curLine.relationName.equals("dobj")||
curLine.relationName.equals("iobj")||
curLine.relationName.equals("nsubjpass"))
{
Clause governor_cl = getGovernorVerbOrComplement(curLine);
// String dep_tag = getPOSTag(curLine.secondOffset);
// if (dep_tag != null && dep_tag.startsWith("JJ"))
// {
// governor_cl.complement = curLine.secondPart;
// governor_cl.complementOffset= curLine.secondOffset;
//
// }
// else
// {
SentenceObject new_object = new SentenceObject();
new_object.content = curLine.secondPart;
new_object.contentOffset = curLine.secondOffset;
governor_cl.clauseObject.add(new_object);
// }
clauseMap.put(curLine.secondOffset, governor_cl);
}
if(curLine.relationName.equals("cop"))
{
Clause governor = clauseMap.get(curLine.firstOffset);
Clause dependent = clauseMap.get(curLine.secondOffset);
if (governor != null ||dependent != null)
{
// it means that we have observed the verb
if (dependent != null && governor == null)
{
dependent.complement = curLine.firstPart;
dependent.complementOffset = curLine.firstOffset;
clauseMap.put(curLine.firstOffset, dependent);
}
else if(governor != null)
{
governor.clauseVerb.verbMainPart = curLine.secondPart;
governor.clauseVerb.offset = curLine.secondOffset;
clauseMap.put(curLine.secondOffset, governor);
}
}
//we should add the verb and the complement
else
{
curClause = new Clause();
// complement and verb will be added to the new clause
curClause.complement =curLine.firstPart;
curClause.complementOffset = curLine.firstOffset;
curClause.clauseVerb.verbMainPart = curLine.secondPart;
curClause.clauseVerb.offset = curLine.secondOffset;
clauseMap.put(curLine.firstOffset, curClause);
clauseMap.put(curLine.secondOffset, curClause);
}
}
// toBeProcessesd.remove(i);
}
// xcomp, ccomp
for(int i=0; i<toBeProcessesd.size();i++)
{
DependencyLine curLine = sentDepLines.get(i);
handleComp(curLine);
// toBeProcessesd.remove(i);
}
// for(DependencyLine curLine:sentDepLines)
// {
// handleComp(curLine);
// }
// for(DependencyLine curLine:sentDepLines)
// {
// handleVerbDependencies(curLine);
// handleNegation(curLine);
// handleModifiers(curLine);
// handleIobj(curLine);
// handleMarks(curLine);
// }
for(int i=0; i<toBeProcessesd.size();i++)
{
DependencyLine curLine = sentDepLines.get(i);
handleVerbDependencies(curLine);
handleNegation(curLine);
handleModifiers(curLine);
handleIobj(curLine);
handleMarks(curLine);
// toBeProcessesd.remove(i);
}
for(int i=0; i<toBeProcessesd.size();i++)
{
DependencyLine curLine = sentDepLines.get(i);
handleNPClMod(curLine);
}
//add unique sentence clauses to clause
for (Clause c : clauseMap.values()) {
if (!clauses.contains(c) && c!= null) {
clauses.add(c);
}
}
}
void handleComp(DependencyLine curLine) throws SQLException
{
//“He says that you like to swim” ccomp(says, like)
Artifact related_word = relatedSentence.getChildByWordIndex(curLine.secondOffset-1);
String d_tag= related_word.getPOS();
if(curLine.relationName.equals("ccomp")|| curLine.relationName.equals("xcomp"))
{
Clause governor_clause= clauseMap.get(curLine.firstOffset);
Clause dependent_clause = clauseMap.get(curLine.secondOffset);
if (clauseMap.containsKey(curLine.firstOffset)&&
clauseMap.containsKey(curLine.secondOffset))
{
governor_clause.clauseComplements.add(dependent_clause);
dependent_clause.governer = governor_clause;
// if (d_tag.startsWith("JJ"))
// {
// governor_clause.complement = curLine.secondPart;
// governor_clause.complementOffset = curLine.secondOffset;
// }
}
else if (clauseMap.containsKey(curLine.firstOffset)&&
!clauseMap.containsKey(curLine.secondOffset))
{
dependent_clause = new Clause();
if (d_tag != null && d_tag.startsWith("VB"))
{
dependent_clause.clauseVerb.verbMainPart =curLine.secondPart;
dependent_clause.clauseVerb.offset =curLine.secondOffset;
clauseMap.put(curLine.secondOffset, dependent_clause);
governor_clause.clauseComplements.add(dependent_clause);
dependent_clause.governer = governor_clause;
}
// if (d_tag.startsWith("JJ"))
// {
// governor_clause.complement = curLine.secondPart;
// governor_clause.complementOffset = curLine.secondOffset;
//
// }
}
else if (!clauseMap.containsKey(curLine.firstOffset)&&
clauseMap.containsKey(curLine.secondOffset))
{
governor_clause = getGovernorVerbOrComplement(curLine);
ArrayList<Clause> cl_comps = new ArrayList<Clause>();
if (!governor_clause.clauseComplements.isEmpty())
{
cl_comps = governor_clause.clauseComplements;
}
cl_comps.add(dependent_clause);
governor_clause.clauseComplements = cl_comps;
}
else if (!clauseMap.containsKey(curLine.firstOffset)&&
!clauseMap.containsKey(curLine.secondOffset))
{
//create both clauses and add
governor_clause = getGovernorVerbOrComplement(curLine);
dependent_clause = new Clause();
if (d_tag.startsWith("VB"))
{
dependent_clause.clauseVerb.verbMainPart = curLine.secondPart;
dependent_clause.clauseVerb.offset = curLine.secondOffset;
clauseMap.put(curLine.secondOffset, dependent_clause);
}
// else
// {
// dependent_clause.complement = curLine.secondPart;
// dependent_clause.complementOffset = curLine.secondOffset;
// }
// clauseMap.put(curLine.secondOffset, dependent_clause);
}
}
else
{
// throw exception
}
}
void handleVerbDependencies(DependencyLine depLine) throws SQLException
{
if(depLine.relationName.equals("prt")|| depLine.relationName.equals("aux")
|| depLine.relationName.equals("auxpass"))
{
Clause governor_clause = getGovernorVerbOrComplement(depLine);
if(depLine.relationName.equals("aux") || depLine.relationName.equals("auxpass"))
{
governor_clause.clauseVerb.auxs.add(depLine.secondPart);
if (depLine.relationName.equals("auxpass"))
{
governor_clause.clauseVerb.isPassive = true;
}
}
else if(depLine.relationName.equals("prt"))
{
governor_clause.clauseVerb.prt = depLine.secondPart;
}
clauseMap.put(depLine.secondOffset, governor_clause);
}
}
void handleNegation(DependencyLine depLine) throws SQLException
{
if(depLine.relationName.equals("neg"))
{
Clause governor = getGovernorVerbOrComplement(depLine);
if (governor.clauseVerb.offset == depLine.firstOffset)
{
governor.clauseVerb.isNegated = true;
}
governor.isNegated = true;
clauseMap.put(depLine.secondOffset, governor);
}
if(depLine.relationName.equals("det") && depLine.secondPart.equalsIgnoreCase("no") )
{
Clause governor = clauseMap.get(depLine.firstOffset);
if (governor != null)
{
ArrayList<String> modifiers = new ArrayList<String>();
if (governor.modifierDepMap.containsKey(depLine.firstOffset))
{
modifiers = governor.modifierDepMap.get(depLine.firstOffset);
}
modifiers.add(depLine.secondPart);
governor.modifierDepMap.put(depLine.firstOffset,modifiers);
governor.isNegated = true;
clauseMap.put(depLine.secondOffset, governor);
}
else
{
phrases.add(depLine);
}
}
}
Clause getGovernorVerbOrComplement(DependencyLine depLine) throws SQLException
{
Clause governor_clause = clauseMap.get(depLine.firstOffset);
boolean create_new_required =false;
//if the governor is supposed to be verb but the content of existing is not equal
if (governor_clause != null)
{
Artifact related_word = relatedSentence.getChildByWordIndex(depLine.firstOffset-1);
String g_tag = related_word.getPOS();
if (g_tag!= null && (g_tag.startsWith("VB") || g_tag.startsWith("MD")))
{
if (governor_clause.clauseVerb.offset != depLine.firstOffset)
{
create_new_required =true;
}
}
}
if (governor_clause == null || create_new_required)
{
governor_clause = new Clause();
Artifact related_word =relatedSentence.getChildByWordIndex(depLine.firstOffset-1);
String g_tag = related_word.getPOS();
if (g_tag!= null && (g_tag.startsWith("VB") || g_tag.startsWith("MD")))
{
governor_clause.clauseVerb.verbMainPart = depLine.firstPart;
governor_clause.clauseVerb.offset = depLine.firstOffset;
}
else//TODO:it shoule be checked more
{
governor_clause.complement = depLine.firstPart;
governor_clause.complementOffset = depLine.firstOffset;
}
clauseMap.put(depLine.firstOffset, governor_clause);
}
return governor_clause;
}
void handleModifiers(DependencyLine depLine) throws SQLException
{
if (!(depLine.relationName.equals("amod")||
depLine.relationName.equals("advmod")
|| depLine.relationName.equals("dep")
|| depLine.relationName.equals("nn")
|| depLine.relationName.equals("det")
|| depLine.relationName.equals("tmod")
|| depLine.relationName.equals("poss")
|| depLine.relationName.startsWith("prepc_")
|| depLine.relationName.startsWith("prep_")))
{
return;
}
// TODO: may nor working fine
if (depLine.relationName.startsWith("prep_"))
{
Artifact related_word =relatedSentence.getChildByWordIndex(depLine.firstOffset-1);
String gov_pos = related_word.getPOS();
// if (!gov_pos.startsWith("NN"))
// {
// return;
// }
}
Clause governor_cl = clauseMap.get(depLine.firstOffset);
Clause dependent_cl = clauseMap.get(depLine.secondOffset);
if (governor_cl == null)
{
//TODO: Find a solid solution....
//try to find the related clause of the current governor
List<DependencyLine> related_dep_lines = StanfordDependencyUtil.getAllGovernors(sentDepLines, depLine.firstPart);
for (DependencyLine rel_dep:related_dep_lines)
{
if(rel_dep.secondOffset==depLine.firstOffset)
{
governor_cl = clauseMap.get(rel_dep.firstOffset);
break;
}
}
governor_cl = findMissingClause(depLine);
//if it is still not found
if (governor_cl==null)
{
phrases.add(depLine);
}
}
if (governor_cl != null && governor_cl != null)
{
ArrayList<String> modifiers = new ArrayList<String>();
if(governor_cl.modifierDepMap.containsKey(depLine.firstOffset))
{
modifiers =governor_cl.modifierDepMap.get(depLine.firstOffset);
}
modifiers.add(depLine.secondPart);
governor_cl.modifierDepMap.put(depLine.firstOffset, modifiers);
if (depLine.relationName.equals("amod")||
depLine.relationName.equals("advmod")
|| depLine.relationName.equals("nn") )
{
governor_cl.adjModifierDepMap.put(depLine.firstOffset, modifiers);
}
if (dependent_cl==null)
{
clauseMap.put(depLine.secondOffset, governor_cl);
}
}
}
void handleNPClMod(DependencyLine depLine) throws SQLException
{
if (!(depLine.relationName.equals("infmod") || depLine.relationName.equals("rcmod")))
{
return;
}
Clause governor_cl = clauseMap.get(depLine.firstOffset);
Clause dependent_cl = clauseMap.get(depLine.secondOffset);
if (governor_cl == null)
{
governor_cl = findMissingClause(depLine);
//if it is still not found
}
if (governor_cl==null)
{
phrases.add(depLine);
}
else if ((dependent_cl != null && governor_cl !=dependent_cl))
{
governor_cl.clauseComplements.add(dependent_cl);
dependent_cl.governer = governor_cl;
}
else if (dependent_cl == null)
{
// try to build it
dependent_cl = buildDependentClause(depLine);
if (dependent_cl != null)
{
clauseMap.put(depLine.secondOffset, dependent_cl);
governor_cl.clauseComplements.add(dependent_cl);
dependent_cl.governer = governor_cl;
}
else//this should not happen
{
phrases.add(depLine);
}
}
//they should be different
if (dependent_cl==governor_cl)
{
//get all governors of the second part
List<DependencyLine> governing_dep_lines =
StanfordDependencyUtil.getAllGovernors(sentDepLines, depLine.firstPart, depLine.firstOffset);
// from there select other one if exist
for(DependencyLine dep:governing_dep_lines)
{
if(dep.relationName.equals("nsubj") || dep.relationName.equals("xsubj")||
dep.relationName.equals("dobj")||
dep.relationName.equals("iobj")||
dep.relationName.equals("nsubjpass")||
dep.relationName.equals("cop"))
{
//if it is different form the cottenr clause
if (dep.firstOffset != depLine.secondOffset)
{
Clause new_cl = clauseMap.get(dep.firstOffset);
if (new_cl!=null)
{
clauseMap.put(depLine.firstOffset, new_cl);
}
}
}
}
}
}
private Clause findMissingClause(DependencyLine depLine)
{
Clause cl= null;
List<DependencyLine> related_dep_lines = StanfordDependencyUtil.getAllGovernors(sentDepLines, depLine.firstPart,depLine.firstOffset);
for (DependencyLine rel_dep:related_dep_lines)
{
cl = clauseMap.get(rel_dep.firstOffset);
break;
}
return cl;
}
//needs to be completed
Clause buildDependentClause(DependencyLine depLine) throws SQLException
{
Artifact related_word =relatedSentence.getChildByWordIndex(depLine.secondOffset-1);
String d_pos = related_word.getPOS();
Clause dependent_clause =null;
if (d_pos != null && (d_pos.startsWith("VB") || d_pos.startsWith("MD")))
{
dependent_clause = new Clause();
dependent_clause.clauseVerb.verbMainPart = depLine.secondPart;
dependent_clause.clauseVerb.offset = depLine.secondOffset;
}
else if (d_pos != null && d_pos.startsWith("JJ") )
{
dependent_clause = new Clause();
dependent_clause.complement = depLine.secondPart;
dependent_clause.complementOffset = depLine.secondOffset;
}
return dependent_clause;
}
void handleIobj(DependencyLine depLine) throws SQLException
{
if (!(depLine.relationName.startsWith("prep_")))
{
return;
}
// if governor is a noun it is handled in modifier
Artifact related_word =relatedSentence.getChildByWordIndex(depLine.firstOffset-1);
String gov_pos = related_word.getPOS();
if (gov_pos.startsWith("NN"))
{
return;
}
Clause gov_cl = clauseMap.get(depLine.firstOffset);
Clause dep_cl = clauseMap.get(depLine.secondOffset);
if(gov_cl != null && dep_cl!= null )
{
SentenceObject indirect_object_cl = new SentenceObject();
indirect_object_cl.clause = dep_cl;
gov_cl.clauseIObjPrep.put(indirect_object_cl,getPrep(depLine.relationName) );
gov_cl.clauseIObjs.add(depLine.secondPart);
}
else if (gov_cl != null && dep_cl== null)
{
SentenceObject indirect_object = new SentenceObject();
indirect_object.content = depLine.secondPart;
indirect_object.contentOffset = depLine.secondOffset;
gov_cl.clauseIObjPrep.put(indirect_object,getPrep(depLine.relationName) );
gov_cl.clauseIObjs.add(depLine.secondPart);
}
else
{
phrases.add(depLine);
}
}
void handleMarks(DependencyLine depLine)
{
if (!(depLine.relationName.equals("mark")))
{
return;
}
Clause gov_cl = clauseMap.get(depLine.firstOffset);
if(gov_cl != null)
{
gov_cl.isMarked = true;
gov_cl.clauseMark = depLine.secondPart;
clauseMap.put(depLine.secondOffset, gov_cl);
}
else
{
phrases.add(depLine);
}
}
public String getPrep(String rel_name)
{
String prep = null;
Pattern p = Pattern.compile("prep_(\\w+)");
Matcher m = p.matcher(rel_name);
if(m.matches())
{
prep = m.group(1);
}
return prep;
}
String getConj(String rel_name)
{
String conj = null;
Pattern p = Pattern.compile("conj_(\\w+)");
Matcher m = p.matcher(rel_name);
if(m.matches())
{
conj = m.group(1);
}
return conj;
}
void handleConjuction(DependencyLine depLine)
{
if(!depLine.relationName.startsWith("conj_"))
{
return;
}
Clause dep_cl = clauseMap.get(depLine.secondOffset);
if (dep_cl != null)
{
dep_cl.conjuctedBut = true;
}
else
{
phrases.add(depLine);
}
}
//it gets an offset as input and returns next lemmatized tokens
public ArrayList<String> getNextLemmaTokens(Integer offset,Integer token_count)
{
ArrayList<String> next_tokens = new ArrayList<String>();
Integer sent_token_count = lemmaMap.size();
if (lemmaMap.containsKey(offset))
{
for (int i = offset+1; i <= token_count+ offset && i<sent_token_count ; i++) {
if(lemmaMap.containsKey(i))
{
next_tokens.add(lemmaMap.get(i));
}
}
}
return next_tokens;
}
// it gets an offset as input and returns next lemmatized tokens
public ArrayList<String> getPreviousLemmaTokens(Integer offset,Integer token_count)
{
ArrayList<String> prev_tokens = new ArrayList<String>();
if (lemmaMap.containsKey(offset))
{
for (int i = offset-1; i >= offset-token_count && i>=0 ; i--) {
if(lemmaMap.containsKey(i))
{
prev_tokens.add(lemmaMap.get(i));
}
}
}
return prev_tokens;
}
// it gets an offset as input and returns the around lemmatized tokens
public ArrayList<String> getArroundLemmaTokens(Integer offset,Integer token_count)
{
ArrayList<String> arround_tokens = new ArrayList<String>();
Integer sent_token_count = lemmaMap.size();
if (lemmaMap.containsKey(offset))
{
for (int i = offset-1; i >= offset-token_count && i>=0 ; i--) {
if(lemmaMap.containsKey(i))
{
arround_tokens.add(lemmaMap.get(i));
}
}
}
if (lemmaMap.containsKey(offset))
{
for (int i = offset+1; i <= token_count+ offset && i<sent_token_count ; i++) {
if(lemmaMap.containsKey(i))
{
arround_tokens.add(lemmaMap.get(i));
}
}
}
return arround_tokens;
}
public String getPOSTag(Integer offset)
{
if (offset <1)
{
return "missing";
}
String pos = posTags.split(" ")[offset-1].split("/")[1];
return pos;
}
public ArrayList<Clause> getClauses() {
return clauses;
}
public String getContent() {
return getRelatedSentence().getContent();
}
public SentenceClauseManager() {
}
public void setRelatedSentence(Artifact relatedSentence) {
this.relatedSentence = relatedSentence;
}
public Artifact getRelatedSentence() {
return relatedSentence;
}
public void setSentContent(String sentContent) {
this.sentContent = sentContent;
}
public String getSentContent() {
return sentContent;
}
public void setPosTags(String posTags) {
this.posTags = posTags;
}
public String getPosTags() {
return posTags;
}
public void setStanDependenciesStr(String stanDependenciesStr) {
this.stanDependenciesStr = stanDependenciesStr;
}
public String getStanDependenciesStr() {
return stanDependenciesStr;
}
}
| apache-2.0 |
ctripcorp/dal | dal-client/src/main/java/com/ctrip/platform/dal/dao/configure/DataSourceConfigure.java | 22074 | package com.ctrip.platform.dal.dao.configure;
import com.ctrip.framework.dal.cluster.client.base.HostSpec;
import com.ctrip.platform.dal.common.enums.DBModel;
import com.ctrip.platform.dal.common.enums.DatabaseCategory;
import com.ctrip.platform.dal.dao.datasource.DataSourceIdentity;
import com.ctrip.platform.dal.dao.datasource.cluster.strategy.multi.MultiMasterStrategy;
import com.ctrip.platform.dal.dao.datasource.cluster.strategy.multi.validator.HostConnectionValidator;
import com.ctrip.platform.dal.dao.helper.EncryptionHelper;
import com.ctrip.platform.dal.exceptions.DalRuntimeException;
import org.apache.commons.lang.StringUtils;
import java.util.*;
public class DataSourceConfigure extends AbstractDataSourceConfigure
implements DataSourceConfigureConstants, DalConnectionStringConfigure, DalPoolPropertiesConfigure {
private String name;
private Properties properties = new Properties();
private String version;
private DalConnectionString connectionString;
private DataSourceIdentity dataSourceId;
private HostSpec host;
private HostConnectionValidator validator;
public DataSourceConfigure() {
}
public DataSourceConfigure(String name) {
this.name = name;
}
public DataSourceConfigure(String name, Properties properties) {
this(name);
this.properties = properties;
}
public DataSourceConfigure(String name, Map<String, String> propertyMap) {
this(name);
merge(propertyMap);
}
public void merge(Properties properties) {
for (Object keyObj : properties.keySet()) {
String key = (String) keyObj;
setProperty(key, properties.getProperty(key));
}
}
public void merge(Map<String, String> propertyMap) {
if (propertyMap != null) {
for (Map.Entry<String, String> entry : propertyMap.entrySet())
properties.setProperty(entry.getKey(), entry.getValue());
}
}
@Override
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Override
public String getUserName() {
return getProperty(USER_NAME);
}
public void setUserName(String userName) {
setProperty(USER_NAME, userName != null ? userName : "");
}
@Override
public String getPassword() {
return getProperty(PASSWORD);
}
public void setPassword(String password) {
setProperty(PASSWORD, password != null ? password : "");
}
@Override
public String getConnectionUrl() {
return getProperty(CONNECTION_URL);
}
public void setConnectionUrl(String connectionUrl) {
setProperty(CONNECTION_URL, connectionUrl);
}
@Override
public String getDriverClass() {
return getProperty(DRIVER_CLASS_NAME);
}
public void setDriverClass(String driverClass) {
setProperty(DRIVER_CLASS_NAME, driverClass);
}
@Override
public String getVersion() {
return version;
}
public void setHostName(String hostName) {
setProperty(HOST_NAME, hostName);
}
@Override
public String getHostName() {
return getProperty(HOST_NAME);
}
public void setVersion(String version) {
this.version = version;
}
public DalConnectionString getConnectionString() {
return connectionString;
}
public void setConnectionString(DalConnectionString connectionString) {
this.connectionString = connectionString;
}
public Properties getProperties() {
return properties;
}
public Map<String, String> getPoolProperties() {
return new HashMap<>((Map) properties);
}
public void setProperties(Properties properties) {
this.properties = properties;
}
public String getProperty(String key) {
return properties.getProperty(key);
}
public String getProperty(String key, String defaultValue) {
return properties.getProperty(key, defaultValue);
}
public void setProperty(String key, String value) {
if (key != null && value != null)
properties.setProperty(key, value);
}
public int getIntProperty(String key, int defaultValue) {
return properties.containsKey(key) ? Integer.parseInt(getProperty(key)) : defaultValue;
}
public long getLongProperty(String key, long defaultValue) {
return properties.containsKey(key) ? Long.parseLong(getProperty(key)) : defaultValue;
}
public boolean getBooleanProperty(String key, boolean defaultValue) {
return properties.containsKey(key) ? Boolean.parseBoolean(getProperty(key)) : defaultValue;
}
public Boolean getTestWhileIdle() {
return getBooleanProperty(TESTWHILEIDLE, DEFAULT_TESTWHILEIDLE);
}
public Boolean getTestOnBorrow() {
return getBooleanProperty(TESTONBORROW, DEFAULT_TESTONBORROW);
}
public Boolean getTestOnReturn() {
return getBooleanProperty(TESTONRETURN, DEFAULT_TESTONRETURN);
}
public String getValidationQuery() {
return getProperty(VALIDATIONQUERY, DEFAULT_VALIDATIONQUERY);
}
public Integer getValidationQueryTimeout() {
return getIntProperty(VALIDATIONQUERYTIMEOUT, DEFAULT_VALIDATIONQUERYTIMEOUT);
}
public Long getValidationInterval() {
return getLongProperty(VALIDATIONINTERVAL, DEFAULT_VALIDATIONINTERVAL);
}
public Integer getTimeBetweenEvictionRunsMillis() {
return getIntProperty(TIMEBETWEENEVICTIONRUNSMILLIS, DEFAULT_TIMEBETWEENEVICTIONRUNSMILLIS);
}
public Integer getMinEvictableIdleTimeMillis() {
return getIntProperty(MINEVICTABLEIDLETIMEMILLIS, DEFAULT_MINEVICTABLEIDLETIMEMILLIS);
}
public Integer getMaxAge() {
return getIntProperty(MAX_AGE, DEFAULT_MAXAGE);
}
public Integer getMaxActive() {
return getIntProperty(MAXACTIVE, DEFAULT_MAXACTIVE);
}
public Integer getMinIdle() {
return getIntProperty(MINIDLE, DEFAULT_MINIDLE);
}
public Integer getMaxWait() {
return getIntProperty(MAXWAIT, DEFAULT_MAXWAIT);
}
public Integer getInitialSize() {
return getIntProperty(INITIALSIZE, DEFAULT_INITIALSIZE);
}
public Integer getRemoveAbandonedTimeout() {
return getIntProperty(REMOVEABANDONEDTIMEOUT, DEFAULT_REMOVEABANDONEDTIMEOUT);
}
public Boolean getRemoveAbandoned() {
return getBooleanProperty(REMOVEABANDONED, DEFAULT_REMOVEABANDONED);
}
public Boolean getLogAbandoned() {
return getBooleanProperty(LOGABANDONED, DEFAULT_LOGABANDONED);
}
public String getConnectionProperties() {
return getProperty(CONNECTIONPROPERTIES, DEFAULT_CONNECTIONPROPERTIES);
}
public String getValidatorClassName() {
return getProperty(VALIDATORCLASSNAME, DEFAULT_VALIDATORCLASSNAME);
}
public String getOption() {
return getProperty(OPTION, DEFAULT_CONNECTIONPROPERTIES);
}
@Override
public String getDBToken() {
return getProperty(DB_TOKEN);
}
@Override
public Integer getCallMysqlApiPeriod() {
return getIntProperty(CALL_MYSQL_API_PERIOD, DEFAULT_CALL_MYSQL_API_PERIOD);
}
@Override
public DBModel getDBModel() {
return DBModel.toDBModel(getProperty(DB_MODEL, DEFAULT_DB_MODEL));
}
@Override
public String getLocalAccess() {
return getProperty(LOCAL_ACCESS);
}
@Override
public String getZonesPriority() {
String value = getProperty(MultiMasterStrategy.ZONES_PRIORITY);
return StringUtils.isNotEmpty(value) ? value : getProperty(IDC_PRIORITY);
}
@Override
public Long getFailoverTimeMS() {
String value = getProperty(MultiMasterStrategy.FAILOVER_TIME_MS);
return StringUtils.isNotEmpty(value) ? Long.parseLong(value) : null;
}
@Override
public Long getBlacklistTimeoutMS() {
String value = getProperty(MultiMasterStrategy.BLACKLIST_TIMEOUT_MS);
return StringUtils.isNotEmpty(value) ? Long.parseLong(value) : null;
}
@Override
public Long getFixedValidatePeriodMS() {
String value = getProperty(MultiMasterStrategy.FIXED_VALIDATE_PERIOD_MS);
return StringUtils.isNotEmpty(value) ? Long.parseLong(value) : null;
}
public String getInitSQL() {
String initSQL = getProperty(INIT_SQL);
if (initSQL != null && !initSQL.isEmpty())
return initSQL;
String initSQL2 = getProperty(INIT_SQL2);
if (initSQL2 != null && !initSQL2.isEmpty())
return initSQL2;
return null;
}
// This are current hard coded as default value
public boolean getJmxEnabled() {
return DEFAULT_JMXENABLED;
}
public String getJdbcInterceptors() {
return getProperty(JDBC_INTERCEPTORS, DEFAULT_JDBCINTERCEPTORS);
}
public String toConnectionUrl() {
return String.format("{ConnectionUrl:%s,Version:%s,CRC:%s}", getConnectionUrl(), version, getCRC());
}
public Properties toProperties() {
Properties p = new Properties();
Set<String> set = new HashSet<>();
set.add(USER_NAME);
set.add(PASSWORD);
set.add(CONNECTION_URL);
set.add(DRIVER_CLASS_NAME);
for (Map.Entry<Object, Object> entry : properties.entrySet()) {
if (!set.contains(entry.getKey())) {
p.setProperty(entry.getKey().toString(), entry.getValue().toString());
}
}
return p;
}
public boolean dynamicPoolPropertiesEnabled() {
if (properties == null || properties.isEmpty())
return false;
String value = properties.getProperty(ENABLE_DYNAMIC_POOL_PROPERTIES);
if (value == null)
return false;
return Boolean.parseBoolean(value);
}
public Integer getSessionWaitTimeout() {
return getIntProperty(SESSION_WAIT_TIMEOUT, getIntProperty(SERVER_WAIT_TIMEOUT, DEFAULT_SESSION_WAIT_TIMEOUT));
}
public DatabaseCategory getDatabaseCategory() {
return DatabaseCategory.matchWithConnectionUrl(getConnectionUrl());
}
// Rule: username concat password,and then take 8 characters of md5 code from beginning
private String getCRC() {
String crc = null;
String userName = getUserName();
String pass = getPassword();
try {
userName.concat(pass);
crc = EncryptionHelper.getCRC(userName);
} catch (Throwable e) {
}
return crc;
}
public synchronized DataSourceConfigure clone() {
DataSourceConfigure dataSourceConfigure = new DataSourceConfigure(name);
Properties p = new Properties();
for (Map.Entry<Object, Object> entry : properties.entrySet()) {
p.setProperty(entry.getKey().toString(), entry.getValue().toString());
}
dataSourceConfigure.setProperties(p);
dataSourceConfigure.setVersion(version);
dataSourceConfigure.setConnectionString(connectionString == null ? null : connectionString.clone());
dataSourceConfigure.setDataSourceId(dataSourceId);
// dataSourceConfigure.setHost(host);
// dataSourceConfigure.setValidator(validator);
return dataSourceConfigure;
}
public synchronized DataSourceConfigure cloneWithoutValidator() {
DataSourceConfigure dataSourceConfigure = new DataSourceConfigure(name);
Properties p = new Properties();
for (Map.Entry<Object, Object> entry : properties.entrySet()) {
p.setProperty(entry.getKey().toString(), entry.getValue().toString());
}
dataSourceConfigure.setProperties(p);
dataSourceConfigure.setVersion(version);
dataSourceConfigure.setConnectionString(connectionString == null ? null : connectionString.clone());
dataSourceConfigure.setDataSourceId(dataSourceId);
dataSourceConfigure.setHost(host);
return dataSourceConfigure;
}
public static DataSourceConfigure valueOf(IDataSourceConfigure configure) {
if (configure instanceof DataSourceConfigure)
return (DataSourceConfigure) configure;
else {
DataSourceConfigure dataSourceConfigure = new DataSourceConfigure();
Properties properties = new Properties();
String username = configure.getUserName();
properties.setProperty(USER_NAME, username != null ? username : "");
String password = configure.getPassword();
properties.setProperty(PASSWORD, password != null ? password : "");
String connectionUrl = configure.getConnectionUrl();
if (connectionUrl == null)
throw new DalRuntimeException("connection url cannot be null");
properties.setProperty(CONNECTION_URL, connectionUrl);
try {
HostAndPort hostAndPort = ConnectionStringParser.parseHostPortFromURL(connectionUrl);
if (StringUtils.isEmpty(hostAndPort.getHost())) {
properties.setProperty(HOST_NAME, "unknown");
} else {
properties.setProperty(HOST_NAME, hostAndPort.getHost());
}
} catch (Throwable t) {
// ignore
}
if (configure.getDriverClass() != null)
properties.setProperty(DRIVER_CLASS_NAME, configure.getDriverClass());
if (configure.getTestWhileIdle() != null)
properties.setProperty(TESTWHILEIDLE, String.valueOf(configure.getTestWhileIdle()));
if (configure.getTestOnBorrow() != null)
properties.setProperty(TESTONBORROW, String.valueOf(configure.getTestOnBorrow()));
if (configure.getTestOnReturn() != null)
properties.setProperty(TESTONRETURN, String.valueOf(configure.getTestOnReturn()));
if (configure.getValidationQuery() != null)
properties.setProperty(VALIDATIONQUERY, configure.getValidationQuery());
if (configure.getValidationQueryTimeout() != null)
properties.setProperty(VALIDATIONQUERYTIMEOUT, String.valueOf(configure.getValidationQueryTimeout()));
if (configure.getValidationInterval() != null)
properties.setProperty(VALIDATIONINTERVAL, String.valueOf(configure.getValidationInterval()));
if (configure.getTimeBetweenEvictionRunsMillis() != null)
properties.setProperty(TIMEBETWEENEVICTIONRUNSMILLIS, String.valueOf(configure.getTimeBetweenEvictionRunsMillis()));
if (configure.getMaxAge() != null)
properties.setProperty(MAX_AGE, String.valueOf(configure.getMaxAge()));
if (configure.getMaxActive() != null)
properties.setProperty(MAXACTIVE, String.valueOf(configure.getMaxActive()));
if (configure.getMinIdle() != null)
properties.setProperty(MINIDLE, String.valueOf(configure.getMinIdle()));
if (configure.getMaxWait() != null)
properties.setProperty(MAXWAIT, String.valueOf(configure.getMaxWait()));
if (configure.getInitialSize() != null)
properties.setProperty(INITIALSIZE, String.valueOf(configure.getInitialSize()));
if (configure.getRemoveAbandonedTimeout() != null)
properties.setProperty(REMOVEABANDONEDTIMEOUT, String.valueOf(configure.getRemoveAbandonedTimeout()));
if (configure.getRemoveAbandoned() != null)
properties.setProperty(REMOVEABANDONED, String.valueOf(configure.getRemoveAbandoned()));
if (configure.getLogAbandoned() != null)
properties.setProperty(LOGABANDONED, String.valueOf(configure.getLogAbandoned()));
if (configure.getMinEvictableIdleTimeMillis() != null)
properties.setProperty(MINEVICTABLEIDLETIMEMILLIS, String.valueOf(configure.getMinEvictableIdleTimeMillis()));
if (configure.getConnectionProperties() != null)
properties.setProperty(CONNECTIONPROPERTIES, configure.getConnectionProperties());
if (configure.getInitSQL() != null)
properties.setProperty(INIT_SQL, configure.getInitSQL());
if (configure.getValidatorClassName() != null)
properties.setProperty(VALIDATORCLASSNAME, configure.getValidatorClassName());
if (configure.getJdbcInterceptors() != null)
properties.setProperty(JDBC_INTERCEPTORS, configure.getJdbcInterceptors());
if (configure instanceof AbstractDataSourceConfigure) {
AbstractDataSourceConfigure configure1 = (AbstractDataSourceConfigure) configure;
if (configure1.getSessionWaitTimeout() != null)
properties.setProperty(SESSION_WAIT_TIMEOUT, String.valueOf(configure1.getSessionWaitTimeout()));
}
dataSourceConfigure.setProperties(properties);
return dataSourceConfigure;
}
}
public void replaceURL(String ip, int port) {
String newConnectionUrl = ConnectionStringParser.replaceHostAndPort(getConnectionUrl(),ip,String.valueOf(port));
setConnectionUrl(newConnectionUrl);
}
public DataSourceIdentity getDataSourceId() {
return dataSourceId;
}
public void setDataSourceId(DataSourceIdentity dataSourceId) {
this.dataSourceId = dataSourceId;
}
public HostSpec getHost() {
return host;
}
public void setHost(HostSpec host) {
this.host = host;
}
public HostConnectionValidator getValidator() {
return validator;
}
public void setValidator(HostConnectionValidator validator) {
this.validator = validator;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof DataSourceConfigure) {
DataSourceConfigure ref = (DataSourceConfigure) obj;
return equals(getConnectionUrl(), ref.getConnectionUrl()) &&
equals(getUserName(), ref.getUserName()) &&
equals(getPassword(), ref.getPassword()) &&
equals(getDriverClass(), ref.getDriverClass()) &&
equals(getTestOnBorrow(), ref.getTestOnBorrow()) &&
equals(getTestOnReturn(), ref.getTestOnReturn()) &&
equals(getTestWhileIdle(), ref.getTestWhileIdle()) &&
equals(getValidationInterval(), ref.getValidationInterval()) &&
equals(getValidationQuery(), ref.getValidationQuery()) &&
equals(getValidationQueryTimeout(), ref.getValidationQueryTimeout()) &&
equals(getValidatorClassName(), ref.getValidatorClassName()) &&
equals(getMaxActive(), ref.getMaxActive()) &&
equals(getMaxAge(), ref.getMaxAge()) &&
equals(getMaxWait(), ref.getMaxWait()) &&
equals(getMinIdle(), ref.getMinIdle()) &&
equals(getTimeBetweenEvictionRunsMillis(), ref.getTimeBetweenEvictionRunsMillis()) &&
equals(getMinEvictableIdleTimeMillis(), ref.getMinEvictableIdleTimeMillis()) &&
equals(getInitialSize(), ref.getInitialSize()) &&
equals(getInitSQL(), ref.getInitSQL()) &&
equals(getLogAbandoned(), ref.getLogAbandoned()) &&
equals(getRemoveAbandoned(), ref.getRemoveAbandoned()) &&
equals(getRemoveAbandonedTimeout(), ref.getRemoveAbandonedTimeout()) &&
equals(getJdbcInterceptors(), ref.getJdbcInterceptors()) &&
equals(getConnectionProperties(), ref.getConnectionProperties()) &&
equals(getJmxEnabled(), ref.getJmxEnabled()) &&
equals(getSessionWaitTimeout(), ref.getSessionWaitTimeout()) &&
equals(getHost(), ref.getHost()) &&
equals(getValidator(), ref.getValidator());
}
return false;
}
private boolean equals(Object obj1, Object obj2) {
return (obj1 != null && obj1.equals(obj2)) || (obj1 == null && obj2 == null);
}
@Override
public int hashCode() {
return new HashCodeGenerator().
append(getConnectionUrl()).
append(getUserName()).
append(getPassword()).
append(getDriverClass()).
append(getTestOnBorrow()).
append(getTestOnReturn()).
append(getTestWhileIdle()).
append(getValidationInterval()).
append(getValidationQuery()).
append(getValidationQueryTimeout()).
append(getValidatorClassName()).
append(getMaxActive()).
append(getMaxAge()).
append(getMaxWait()).
append(getMinIdle()).
append(getTimeBetweenEvictionRunsMillis()).
append(getMinEvictableIdleTimeMillis()).
append(getInitialSize()).
append(getInitSQL()).
append(getLogAbandoned()).
append(getRemoveAbandoned()).
append(getRemoveAbandonedTimeout()).
append(getJdbcInterceptors()).
append(getConnectionProperties()).
append(getJmxEnabled()).
append(getSessionWaitTimeout()).
append(getHost()).
append(getValidator()).
generate();
}
private static class HashCodeGenerator {
private int hashCode = 0;
public HashCodeGenerator append(Object obj) {
hashCode = hashCode * 31 + (obj != null ? obj.hashCode() : 0);
return this;
}
public int generate() {
return hashCode;
}
}
}
| apache-2.0 |
paulnguyen/cmpe279 | eclipse/Roller/src/org/apache/roller/ui/rendering/util/WeblogPreviewRequest.java | 3350 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. The ASF licenses this file to You
* under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. For additional information regarding
* copyright in this work, please see the NOTICE file in the top level
* directory of this distribution.
*/
package org.apache.roller.ui.rendering.util;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.roller.RollerException;
import org.apache.roller.ThemeNotFoundException;
import org.apache.roller.model.RollerFactory;
import org.apache.roller.model.ThemeManager;
import org.apache.roller.pojos.Theme;
/**
* Represents a request for a weblog preview.
*/
public class WeblogPreviewRequest extends WeblogPageRequest {
private static Log log = LogFactory.getLog(WeblogPreviewRequest.class);
private static final String PREVIEW_SERVLET = "/roller-ui/authoring/preview";
// lightweight attributes
private String themeName = null;
// heavyweight attributes
private Theme theme = null;
public WeblogPreviewRequest(HttpServletRequest request)
throws InvalidRequestException {
// let parent go first
super(request);
// all we need to worry about is the query params
// the only param we expect is "theme"
if(request.getParameter("theme") != null) {
this.themeName = request.getParameter("theme");
}
if(log.isDebugEnabled()) {
log.debug("theme = "+this.themeName);
}
}
boolean isValidDestination(String servlet) {
return (servlet != null && PREVIEW_SERVLET.equals(servlet));
}
public String getThemeName() {
return themeName;
}
public void setThemeName(String theme) {
this.themeName = theme;
}
// override so that previews never show login status
public String getAuthenticUser() {
return null;
}
// override so that previews never show login status
public boolean isLoggedIn() {
return false;
}
public Theme getTheme() {
if(theme == null && themeName != null) {
try {
ThemeManager themeMgr = RollerFactory.getRoller().getThemeManager();
theme = themeMgr.getTheme(themeName);
} catch(ThemeNotFoundException tnfe) {
// bogus theme specified ... don't worry about it
} catch(RollerException re) {
log.error("Error looking up theme "+themeName, re);
}
}
return theme;
}
public void setTheme(Theme theme) {
this.theme = theme;
}
}
| apache-2.0 |
gvgreat/hrillekha | crown/retail/crown-jpa/src/main/java/com/techlords/crown/CrownServiceLocator.java | 1456 | package com.techlords.crown;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.util.Locale;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import com.techlords.crown.service.CrownService;
import com.techlords.crown.service.GeneralService;
import com.techlords.crown.service.ItemService;
import com.techlords.crown.service.ReceiptService;
public final class CrownServiceLocator {
public static final CrownServiceLocator INSTANCE = new CrownServiceLocator();
private final BeanFactory factory;
private CrownServiceLocator() {
// Empty C'tor
factory = new ClassPathXmlApplicationContext(
"/com/techlords/crown/app-context.xml");
}
public final <T extends CrownService> T getCrownService(
Class<T> serviceClass) {
return factory.getBean(serviceClass);
}
public static void main(String[] args) {
System.err.println(5.4444 < 5.44441);
System.err.println(Math.round(5.334444));
System.err.println(Math.round(5.6334444));
NumberFormat format = DecimalFormat.getIntegerInstance(Locale.getDefault());
format.setGroupingUsed(false);
System.err.println(format.format(23.0d));;
double curr = -5000;
curr += 2000;
System.out.println(format.format(curr));
ItemService ser = INSTANCE.getCrownService(ItemService.class);
System.err.println(ser.findAllItems());;
}
}
| apache-2.0 |
danielyzc/integrado | integrado-ejb/src/java/be/DetalleCambioProducto.java | 5501 | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package be;
import java.io.Serializable;
import java.util.Date;
import javax.persistence.Basic;
import javax.persistence.Column;
import javax.persistence.EmbeddedId;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.validation.constraints.NotNull;
import javax.xml.bind.annotation.XmlRootElement;
/**
*
* @author root
*/
@Entity
@Table(name = "detalle_cambio_producto")
@XmlRootElement
@NamedQueries({
@NamedQuery(name = "DetalleCambioProducto.findAll", query = "SELECT d FROM DetalleCambioProducto d"),
@NamedQuery(name = "DetalleCambioProducto.findByIdCambio", query = "SELECT d FROM DetalleCambioProducto d WHERE d.detalleCambioProductoPK.idCambio = :idCambio"),
@NamedQuery(name = "DetalleCambioProducto.findByIdProductoCambio", query = "SELECT d FROM DetalleCambioProducto d WHERE d.detalleCambioProductoPK.idProductoCambio = :idProductoCambio"),
@NamedQuery(name = "DetalleCambioProducto.findByIdProductoCambiada", query = "SELECT d FROM DetalleCambioProducto d WHERE d.detalleCambioProductoPK.idProductoCambiada = :idProductoCambiada"),
@NamedQuery(name = "DetalleCambioProducto.findByEstadoExistencia", query = "SELECT d FROM DetalleCambioProducto d WHERE d.estadoExistencia = :estadoExistencia"),
@NamedQuery(name = "DetalleCambioProducto.findByFechaRegistro", query = "SELECT d FROM DetalleCambioProducto d WHERE d.fechaRegistro = :fechaRegistro")})
public class DetalleCambioProducto implements Serializable {
private static final long serialVersionUID = 1L;
@EmbeddedId
protected DetalleCambioProductoPK detalleCambioProductoPK;
@Basic(optional = false)
@NotNull
@Column(name = "estado_existencia")
private int estadoExistencia;
@Column(name = "fecha_registro")
@Temporal(TemporalType.TIMESTAMP)
private Date fechaRegistro;
@JoinColumn(name = "id_producto_cambiada", referencedColumnName = "id_producto", insertable = false, updatable = false)
@ManyToOne(optional = false, fetch = FetchType.EAGER)
private Producto producto;
@JoinColumn(name = "id_producto_cambio", referencedColumnName = "id_producto", insertable = false, updatable = false)
@ManyToOne(optional = false, fetch = FetchType.EAGER)
private Producto producto1;
@JoinColumn(name = "id_cambio", referencedColumnName = "id_cambio", insertable = false, updatable = false)
@ManyToOne(optional = false, fetch = FetchType.EAGER)
private Cambio cambio;
public DetalleCambioProducto() {
}
public DetalleCambioProducto(DetalleCambioProductoPK detalleCambioProductoPK) {
this.detalleCambioProductoPK = detalleCambioProductoPK;
}
public DetalleCambioProducto(DetalleCambioProductoPK detalleCambioProductoPK, int estadoExistencia) {
this.detalleCambioProductoPK = detalleCambioProductoPK;
this.estadoExistencia = estadoExistencia;
}
public DetalleCambioProducto(int idCambio, int idProductoCambio, int idProductoCambiada) {
this.detalleCambioProductoPK = new DetalleCambioProductoPK(idCambio, idProductoCambio, idProductoCambiada);
}
public DetalleCambioProductoPK getDetalleCambioProductoPK() {
return detalleCambioProductoPK;
}
public void setDetalleCambioProductoPK(DetalleCambioProductoPK detalleCambioProductoPK) {
this.detalleCambioProductoPK = detalleCambioProductoPK;
}
public int getEstadoExistencia() {
return estadoExistencia;
}
public void setEstadoExistencia(int estadoExistencia) {
this.estadoExistencia = estadoExistencia;
}
public Date getFechaRegistro() {
return fechaRegistro;
}
public void setFechaRegistro(Date fechaRegistro) {
this.fechaRegistro = fechaRegistro;
}
public Producto getProducto() {
return producto;
}
public void setProducto(Producto producto) {
this.producto = producto;
}
public Producto getProducto1() {
return producto1;
}
public void setProducto1(Producto producto1) {
this.producto1 = producto1;
}
public Cambio getCambio() {
return cambio;
}
public void setCambio(Cambio cambio) {
this.cambio = cambio;
}
@Override
public int hashCode() {
int hash = 0;
hash += (detalleCambioProductoPK != null ? detalleCambioProductoPK.hashCode() : 0);
return hash;
}
@Override
public boolean equals(Object object) {
// TODO: Warning - this method won't work in the case the id fields are not set
if (!(object instanceof DetalleCambioProducto)) {
return false;
}
DetalleCambioProducto other = (DetalleCambioProducto) object;
if ((this.detalleCambioProductoPK == null && other.detalleCambioProductoPK != null) || (this.detalleCambioProductoPK != null && !this.detalleCambioProductoPK.equals(other.detalleCambioProductoPK))) {
return false;
}
return true;
}
@Override
public String toString() {
return "be.DetalleCambioProducto[ detalleCambioProductoPK=" + detalleCambioProductoPK + " ]";
}
}
| apache-2.0 |
qafedev/qafe-platform | qafe-core/src/main/java/com/qualogy/qafe/bind/io/Writer.java | 6508 | /**
* Copyright 2008-2017 Qualogy Solutions B.V.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.qualogy.qafe.bind.io;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.logging.Logger;
import org.jibx.runtime.BindingDirectory;
import org.jibx.runtime.IBindingFactory;
import org.jibx.runtime.IMarshallingContext;
import org.jibx.runtime.JiBXException;
import org.xml.sax.SAXException;
import com.qualogy.qafe.bind.core.application.ApplicationStack;
import com.qualogy.qafe.bind.core.messages.Messages;
import com.qualogy.qafe.bind.domain.ApplicationMapping;
import com.qualogy.qafe.bind.io.document.DocumentLoader;
import com.qualogy.qafe.bind.orm.jibx.BindException;
import com.qualogy.qafe.bind.resource.query.QueryContainer;
import com.qualogy.qafe.bind.rules.FilterRules;
public class Writer {
public final static String OUTPUT_ENCODING_TYPE = "UTF-8";
private final static Logger log = Logger.getLogger(Writer.class.getName());
/**
* method creates dir f not exists, than continues using the dir-less method
* @param domain
* @param dir
* @param fileName
* @throws IOException
*/
public void write(Object domain, String dir, String fileName){
write(domain, dir, fileName, true);
}
/**
* validation over the output xml is on. if you require no validation,
* use write(Object, String, boolean) with the last arg set to false
* @param domain
* @param fileName
*/
public void write(Object domain, String fileName){
write(domain, fileName, true);
}
/**
* Method to write given object to xml according a jibx bind file
* @param domain
* @param fileName
* @param validating
*/
public void write(Object domain, String fileName, boolean validating){
File file = getFile(fileName);
OutputStream out = null;
try {
out = new FileOutputStream(file);
write(domain, out, validating);
} catch (FileNotFoundException e) {
throw new BindException(e);
}finally{
if(out!=null){
try {
out.close();
} catch (IOException e) {
throw new BindException(e);
}
}
}
}
/**
* method writes object to the given outputstream, validating default true
* @param domain
* @param out
*/
public void write(Object domain, OutputStream out){
write(domain, out, true);
}
/**
* method writes object to the given outputstream
* @param domain
* @param out
*/
public void write(Object domain, OutputStream out, boolean validating){
String rootNode = WriterMapping.getRootNode(domain.getClass());
String schemaLocation = WriterMapping.getSchemaLocation(domain.getClass());
doWrite(domain, new BindOutputStream(out, rootNode, schemaLocation), validating);
}
/**
* method writes object to the given outputstream
* @param domain
* @param out
*/
private void doWrite(Object domain, BindOutputStream out, boolean validating){
try{
IBindingFactory bfact = BindingDirectory.getFactory(domain.getClass());
IMarshallingContext mctx = bfact.createMarshallingContext();
mctx.setIndent(2);
mctx.startDocument(OUTPUT_ENCODING_TYPE, Boolean.TRUE, out);
mctx.marshalDocument(domain);
} catch (JiBXException e) {
throw new BindException(e);
}
if(validating){
try {
InputStream is = new ByteArrayInputStream(out.toByteArray());
validate(is);
} catch (SAXException e) {
throw new BindException(e);
} catch (IOException e) {
throw new BindException(e);
}
}
}
private void validate(InputStream in) throws SAXException, IOException {
new DocumentLoader().loadDocument(in, true);
}
/**
* method creates dir f not exists, than continues using the dir-less method
* @param domain
* @param dir
* @param fileName
* @throws IOException
*/
public void write(Object domain, String dir, String fileName, boolean validating){
String path = "";
if(dir!=null){
File directory = new File(dir);
if(!directory.exists()){
if(!directory.mkdir())
throw new BindException("directory ["+directory+"] does not exist and cannot be created");
}
path = directory.getAbsolutePath() + File.separator;
}
write(domain, path + fileName, validating);
}
private File getFile(String fileName) {
File file = null;
try {
file = new File(fileName);
if(!file.exists()){
log.info("creating new file for writting with name " + fileName);
file.createNewFile();
}
} catch (IOException e) {
throw new BindException(e);
}
return file;
}
//TODO: in configfile?
public static class WriterMapping{
private static Map<Class, String[]> MAPPING = new HashMap<Class, String[]>();
static{
MAPPING.put(ApplicationMapping.class, new String[]{"application-mapping", "http://qafe.com/schema http://www.qafe.com/schema/2.2/application-mapping.xsd"});
MAPPING.put(ApplicationStack.class, new String[]{"applications", "http://qafe.com/schema http://www.qafe.com/schema/application-context.xsd"});
MAPPING.put(QueryContainer.class, new String[]{QueryContainer.ROOT_ELEMENT_NAME, "http://qafe.com/schema http://www.qafe.com/schema/application-statements.xsd"});
MAPPING.put(Messages.class, new String[]{"messages", "http://qafe.com/schema http://www.qafe.com/schema/application-messages.xsd"});
MAPPING.put(FilterRules.class, new String[]{"filter-rules", "http://qafe.com/schema http://www.qafe.com/schema/filter-rule.xsd"});
};
private static String getRootNode(Class clazz){
return get(clazz)[0];
}
private static String getSchemaLocation(Class clazz){
return get(clazz)[1];
}
private static String[] get(Class clazz){
if(!MAPPING.containsKey(clazz)){
throw new IllegalArgumentException("Unimplemented writer object ["+clazz+"]");
}
return MAPPING.get(clazz);
}
}
}
| apache-2.0 |
jexp/idea2 | xml/impl/src/com/intellij/codeInspection/htmlInspections/AddHtmlTagOrAttributeToCustomsIntention.java | 3833 | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInspection.htmlInspections;
import com.intellij.CommonBundle;
import com.intellij.codeInsight.daemon.HighlightDisplayKey;
import com.intellij.codeInsight.intention.IntentionAction;
import com.intellij.codeInspection.InspectionProfile;
import com.intellij.codeInspection.ModifiableModel;
import com.intellij.codeInspection.ex.LocalInspectionToolWrapper;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import com.intellij.profile.codeInspection.InspectionProjectProfileManager;
import com.intellij.psi.PsiFile;
import com.intellij.util.IncorrectOperationException;
import com.intellij.xml.XmlBundle;
import org.jetbrains.annotations.NotNull;
import java.io.IOException;
/**
* Created by IntelliJ IDEA.
* User: Maxim.Mossienko
* Date: Jul 6, 2006
* Time: 5:08:37 PM
* To change this template use File | Settings | File Templates.
*/
public class AddHtmlTagOrAttributeToCustomsIntention implements IntentionAction {
private final String myName;
private final int myType;
private final String myInspectionName;
public AddHtmlTagOrAttributeToCustomsIntention(String shortName, String name, int type) {
myInspectionName = shortName;
myName = name;
myType = type;
}
@NotNull
public String getText() {
if (myType == XmlEntitiesInspection.UNKNOWN_TAG) {
return XmlBundle.message("add.custom.html.tag", myName);
}
if (myType == XmlEntitiesInspection.UNKNOWN_ATTRIBUTE) {
return XmlBundle.message("add.custom.html.attribute", myName);
}
if (myType == XmlEntitiesInspection.NOT_REQUIRED_ATTRIBUTE) {
return XmlBundle.message("add.optional.html.attribute", myName);
}
return getFamilyName();
}
@NotNull
public String getFamilyName() {
return XmlBundle.message("fix.html.family");
}
public boolean isAvailable(@NotNull Project project, Editor editor, PsiFile file) {
return true;
}
public void invoke(@NotNull Project project, Editor editor, PsiFile file) throws IncorrectOperationException {
final InspectionProjectProfileManager profileManager = InspectionProjectProfileManager.getInstance(project);
final InspectionProfile inspectionProfile = profileManager.getInspectionProfile();
final ModifiableModel model = inspectionProfile.getModifiableModel();
final LocalInspectionToolWrapper wrapper = (LocalInspectionToolWrapper)model.getInspectionTool(myInspectionName, file);
final XmlEntitiesInspection xmlEntitiesInspection = (XmlEntitiesInspection)wrapper.getTool();
xmlEntitiesInspection.setAdditionalEntries(myType, appendName(xmlEntitiesInspection.getAdditionalEntries(myType)));
model.isProperSetting(HighlightDisplayKey.find(myInspectionName));//update map with non-default settings
try {
model.commit();
}
catch (IOException e) {
Messages.showErrorDialog(project, e.getMessage(), CommonBundle.getErrorTitle());
}
}
public boolean startInWriteAction() {
return false;
}
private String appendName(String toAppend) {
if (toAppend.length() > 0) {
toAppend += "," + myName;
}
else {
toAppend = myName;
}
return toAppend;
}
}
| apache-2.0 |
yb76/cafedemo | Application/src/main/java/com/example/android/cardreader/LoyaltyCardReader.java | 13712 | /*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.cardreader;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.nfc.NfcAdapter;
import android.nfc.Tag;
import android.nfc.tech.IsoDep;
import android.nfc.tech.MifareUltralight;
import android.widget.Toast;
import com.example.android.common.logger.Log;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.nio.charset.Charset;
import java.util.Arrays;
/**
* Callback class, invoked when an NFC card is scanned while the device is running in reader mode.
*
* Reader mode can be invoked by calling NfcAdapter
*/
public class LoyaltyCardReader implements NfcAdapter.ReaderCallback {
public static MifareUltralight mfDep =null;
private static final String TAG = "LoyaltyCardReader";
private static final int PAGE_FIRSTNAME = 4;
private static final int PAGE_LASTNAME = 8;
private static final int PAGE_EMAIL = 12;
private static final int PAGE_MOBILE = 18;
private static final int PAGE_BAL = 22;
// AID for our loyalty card service.
private static final String SAMPLE_LOYALTY_CARD_AID = "F222222222";
// ISO-DEP command HEADER for selecting an AID.
// Format: [Class | Instruction | Parameter 1 | Parameter 2]
private static final String SELECT_APDU_HEADER = "00A40400";
// "OK" status word sent in response to SELECT AID command (0x9000)
private static final byte[] SELECT_OK_SW = {(byte) 0x90, (byte) 0x00};
// Weak reference to prevent retain loop. mAccountCallback is responsible for exiting
// foreground mode before it becomes invalid (e.g. during onPause() or onStop()).
private WeakReference<AccountCallback> mAccountCallback;
public interface AccountCallback {
public void onAccountReceived(String account);
}
public LoyaltyCardReader(AccountCallback accountCallback) {
mAccountCallback = new WeakReference<AccountCallback>(accountCallback);
}
/**
* Callback when a new tag is discovered by the system.
*
* <p>Communication with the card should take place here.
*
* @param tag Discovered tag
*/
@Override
public void onTagDiscovered(Tag tag) {
Log.i(TAG, "New tag discovered");
// Android's Host-based Card Emulation (HCE) feature implements the ISO-DEP (ISO 14443-4)
// protocol.
//
// In order to communicate with a device using HCE, the discovered tag should be processed
// using the IsoDep class.
/* IsoDep isoDep = IsoDep.get(tag);
if (isoDep != null) {
try {
// Connect to the remote NFC device
isoDep.connect();
// Build SELECT AID command for our loyalty card service.
// This command tells the remote device which service we wish to communicate with.
Log.i(TAG, "Requesting remote AID: " + SAMPLE_LOYALTY_CARD_AID);
byte[] command = BuildSelectApdu(SAMPLE_LOYALTY_CARD_AID);
// Send command to remote device
Log.i(TAG, "Sending: " + ByteArrayToHexString(command));
byte[] result = isoDep.transceive(command);
// If AID is successfully selected, 0x9000 is returned as the status word (last 2
// bytes of the result) by convention. Everything before the status word is
// optional payload, which is used here to hold the account number.
int resultLength = result.length;
byte[] statusWord = {result[resultLength-2], result[resultLength-1]};
byte[] payload = Arrays.copyOf(result, resultLength-2);
if (Arrays.equals(SELECT_OK_SW, statusWord)) {
// The remote NFC device will immediately respond with its stored account number
String accountNumber = new String(payload, "UTF-8");
Log.i(TAG, "Received: " + accountNumber);
// Inform CardReaderFragment of received account number
mAccountCallback.get().onAccountReceived(accountNumber);
}
} catch (IOException e) {
Log.e(TAG, "Error communicating with card: " + e.toString());
}
}
*/ int nextstep = cardInfo.getInstance().getNextStep() ;
mfDep = MifareUltralight.get(tag);
if (mfDep != null) {
try {
// Connect to the remote NFC device
mfDep.connect();
Log.i(TAG, "Connected");
if(nextstep == cardInfo.step_action.STEP_PURCHASE.ordinal()) {
Log.i(TAG, "purchase");
byte[] payload = mfDep.readPages(PAGE_BAL); // 16bytes
long bal = 0;
try {
bal = Long.parseLong(ByteArrayToAscii(payload));
} catch (NumberFormatException nfe) {
bal = 0;
}
//Log.i(TAG, "bal ="+ bal+",purchase="+cardInfo.getInstance().getPurchase());
bal = bal - cardInfo.getInstance().getPurchase();
if(bal < 0) {
bal = 0;// TODO
}
writeTag(PAGE_BAL,String.format("%-16s",String.format("%016d",bal)));
cardInfo.getInstance().setBal(bal);
}
if(nextstep == cardInfo.step_action.STEP_DEPOSIT.ordinal()||
nextstep == cardInfo.step_action.STEP_TAPCARD_WRITE_CUST_DEPOSIT.ordinal()) {
byte[] payload = mfDep.readPages(PAGE_BAL); // 16bytes
long bal = 0;
try {
bal = Long.parseLong(ByteArrayToAscii(payload));
} catch (NumberFormatException nfe) {
bal = 0;
}
bal = bal + cardInfo.getInstance().getDeposit();
Log.i(TAG, "readpages :" + ByteArrayToAscii(payload));
writeTag(PAGE_BAL,String.format("%-16s",String.format("%016d",bal)));
Log.i(TAG, "writepages :" + String.format("%016d",bal));
cardInfo.getInstance().setBal(bal);
}
if(nextstep == cardInfo.step_action.STEP_TAPCARD_WRITE_CUST.ordinal()||
nextstep == cardInfo.step_action.STEP_TAPCARD_WRITE_CUST_DEPOSIT.ordinal()) {
writeTag(PAGE_FIRSTNAME,String.format("%-16s",cardInfo.getInstance().getFirstname()));
writeTag(PAGE_LASTNAME,String.format("%-16s",cardInfo.getInstance().getLastname()));
writeTag(PAGE_EMAIL,String.format("%-16s",cardInfo.getInstance().getEmail()));
writeTag(PAGE_MOBILE,String.format("%-16s",cardInfo.getInstance().getMobile()));
Log.i(TAG, "writepages done" );
}
if(nextstep == cardInfo.step_action.STEP_TAPCARD_READ.ordinal())
{
Log.i(TAG, "check balance");
byte[] payload_fname = mfDep.readPages(PAGE_FIRSTNAME); // 16bytes
byte[] payload_lname = mfDep.readPages(PAGE_LASTNAME); // 16bytes
byte[] payload_email = mfDep.readPages(PAGE_EMAIL); // 16bytes
byte[] payload_mobile = mfDep.readPages(PAGE_MOBILE); // 16bytes
byte[] payload_bal = mfDep.readPages(PAGE_BAL); // 16bytes
if(true) {
//if(payload_fname.length==16&&payload_lname.length==16 &&payload_email.length==16&&payload_mobile.length==16 ) {
cardInfo.getInstance().setCust( ByteArrayToAscii(payload_fname),
ByteArrayToAscii(payload_lname),ByteArrayToAscii(payload_email),ByteArrayToAscii(payload_mobile)
);
}else{
cardInfo.getInstance().setCust(" "," "," "," ");
}
if(payload_bal.length==16) {
long bal = 0;
try {
bal = Long.parseLong(ByteArrayToAscii(payload_bal));
} catch (NumberFormatException nfe) {
bal = 0;
}
cardInfo.getInstance().setBal(bal);
}
Log.i(TAG, "pages: " + ByteArrayToAscii(payload_fname));
}
} catch (IOException e) {
Log.e(TAG, "Error communicating with card: " + e.toString());
} finally {
if (mfDep != null) {
try {
mfDep.close();
}
catch (IOException e) {
Log.e(TAG, "Error closing tag...", e);
}
}
}
}
String accountNumber = "98654321";
mAccountCallback.get().onAccountReceived(accountNumber);
}
/**
* Build APDU for SELECT AID command. This command indicates which service a reader is
* interested in communicating with. See ISO 7816-4.
*
* @param aid Application ID (AID) to select
* @return APDU for SELECT AID command
*/
public static byte[] BuildSelectApdu(String aid) {
// Format: [CLASS | INSTRUCTION | PARAMETER 1 | PARAMETER 2 | LENGTH | DATA]
return HexStringToByteArray(SELECT_APDU_HEADER + String.format("%02X", aid.length() / 2) + aid);
}
/**
* Utility class to convert a byte array to a hexadecimal string.
*
* @param bytes Bytes to convert
* @return String, containing hexadecimal representation.
*/
public static String ByteArrayToHexString(byte[] bytes) {
final char[] hexArray = {'0','1','2','3','4','5','6','7','8','9','A','B','C','D','E','F'};
char[] hexChars = new char[bytes.length * 2];
int v;
for ( int j = 0; j < bytes.length; j++ ) {
v = bytes[j] & 0xFF;
hexChars[j * 2] = hexArray[v >>> 4];
hexChars[j * 2 + 1] = hexArray[v & 0x0F];
}
return new String(hexChars);
}
/**
* Utility class to convert a hexadecimal string to a byte string.
*
* <p>Behavior with input strings containing non-hexadecimal characters is undefined.
*
* @param s String containing hexadecimal characters to convert
* @return Byte array generated from input
*/
public static byte[] HexStringToByteArray(String s) {
int len = s.length();
byte[] data = new byte[len / 2];
for (int i = 0; i < len; i += 2) {
data[i / 2] = (byte) ((Character.digit(s.charAt(i), 16) << 4)
+ Character.digit(s.charAt(i+1), 16));
}
return data;
}
public static String ByteArrayToAscii(byte[] data) {
StringBuilder sb = new StringBuilder(data.length);
for (int i = 0; i < data.length; ++ i) {
if (data[i] < 0) throw new IllegalArgumentException();
sb.append((char) data[i]);
}
return sb.toString();
}
public void writeTag( int pageoffset, String tagText) {
//MifareUltralight ultralight = MifareUltralight.get(tag);
try {
//ultralight.connect();
int idx = 0;
mfDep.writePage(pageoffset, tagText.substring(idx,idx+4).getBytes(Charset.forName("US-ASCII")));
idx = idx + 4;
mfDep.writePage(pageoffset+1, tagText.substring(idx,idx+4).getBytes(Charset.forName("US-ASCII")));
idx = idx + 4;
mfDep.writePage(pageoffset+2, tagText.substring(idx,idx+4).getBytes(Charset.forName("US-ASCII")));
idx = idx + 4;
mfDep.writePage(pageoffset+3, tagText.substring(idx,idx+4).getBytes(Charset.forName("US-ASCII")));
} catch (IOException e) {
Log.e(TAG, "IOException while closing MifareUltralight...", e);
} finally {
//try {
//ultralight.close();
//} catch (IOException e) {
// Log.e(TAG, "IOException while closing MifareUltralight...", e);
//}
}
}
public String readTag(Tag tag) {
MifareUltralight mifare = MifareUltralight.get(tag);
try {
mifare.connect();
byte[] payload = mifare.readPages(4);
return new String(payload, Charset.forName("US-ASCII"));
} catch (IOException e) {
Log.e(TAG, "IOException while writing MifareUltralight message...", e);
} finally {
if (mifare != null) {
try {
mifare.close();
}
catch (IOException e) {
Log.e(TAG, "Error closing tag...", e);
}
}
}
return null;
}
}
| apache-2.0 |
sacjaya/siddhi | modules/siddhi-extensions/string/src/test/java/org/wso2/siddhi/extension/string/ContainsFunctionExtensionTestCase.java | 3695 | /*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.siddhi.extension.string;
import junit.framework.Assert;
import org.apache.log4j.Logger;
import org.junit.Before;
import org.junit.Test;
import org.wso2.siddhi.core.ExecutionPlanRuntime;
import org.wso2.siddhi.core.SiddhiManager;
import org.wso2.siddhi.core.event.Event;
import org.wso2.siddhi.core.query.output.callback.QueryCallback;
import org.wso2.siddhi.core.stream.input.InputHandler;
import org.wso2.siddhi.core.util.EventPrinter;
import org.wso2.siddhi.extension.string.test.util.SiddhiTestHelper;
import java.util.concurrent.atomic.AtomicInteger;
public class ContainsFunctionExtensionTestCase {
static final Logger log = Logger.getLogger(ContainsFunctionExtensionTestCase.class);
private AtomicInteger count = new AtomicInteger(0);
private volatile boolean eventArrived;
@Before
public void init() {
count.set(0);
eventArrived = false;
}
@Test
public void testContainsFunctionExtension() throws InterruptedException {
log.info("ContainsFunctionExtensionTestCase TestCase");
SiddhiManager siddhiManager = new SiddhiManager();
String inStreamDefinition = "@config(async = 'true')define stream inputStream (symbol string, price long, " +
"volume long);";
String query = ("@info(name = 'query1') " +
"from inputStream " +
"select symbol , str:contains(symbol, 'WSO2') as isContains " +
"insert into outputStream;");
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(inStreamDefinition +
query);
executionPlanRuntime.addCallback("query1", new QueryCallback() {
@Override
public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) {
EventPrinter.print(timeStamp, inEvents, removeEvents);
for (Event inEvent : inEvents) {
count.incrementAndGet();
if (count.get() == 1) {
Assert.assertEquals(false, inEvent.getData(1));
}
if (count.get() == 2) {
Assert.assertEquals(true, inEvent.getData(1));
}
if (count.get() == 3) {
Assert.assertEquals(true, inEvent.getData(1));
}
eventArrived = true;
}
}
});
InputHandler inputHandler = executionPlanRuntime.getInputHandler("inputStream");
executionPlanRuntime.start();
inputHandler.send(new Object[]{"IBM", 700f, 100l});
inputHandler.send(new Object[]{"WSO2", 60.5f, 200l});
inputHandler.send(new Object[]{"One of the best middleware is from WSO2.", 60.5f, 200l});
SiddhiTestHelper.waitForEvents(100, 3, count, 60000);
Assert.assertEquals(3, count.get());
Assert.assertTrue(eventArrived);
executionPlanRuntime.shutdown();
}
}
| apache-2.0 |
jbeecham/ovirt-engine | backend/manager/modules/restapi/jaxrs/src/main/java/org/ovirt/engine/api/restapi/resource/BackendReadOnlyDeviceResource.java | 1675 | package org.ovirt.engine.api.restapi.resource;
import org.ovirt.engine.api.model.BaseDevice;
import org.ovirt.engine.api.model.BaseDevices;
import org.ovirt.engine.api.resource.ReadOnlyDeviceResource;
import org.ovirt.engine.api.resource.CreationResource;
import org.ovirt.engine.core.common.businessentities.IVdcQueryable;
import org.ovirt.engine.core.compat.Guid;
public class BackendReadOnlyDeviceResource<D extends BaseDevice, C extends BaseDevices, Q extends IVdcQueryable> extends AbstractBackendActionableResource<D, Q> implements ReadOnlyDeviceResource<D> {
protected AbstractBackendReadOnlyDevicesResource<D, C, Q> collection;
public BackendReadOnlyDeviceResource(Class<D> modelType,
Class<Q> entityType,
Guid guid,
AbstractBackendReadOnlyDevicesResource<D, C, Q> collection,
String... subCollections) {
super(guid.toString(), modelType, entityType, subCollections);
this.collection = collection;
}
@Override
public D get() {
Q entity = collection.lookupEntity(guid);
if (entity == null) {
return notFound();
}
return addLinks(populate(map(entity), entity));
}
@Override
public CreationResource getCreationSubresource(String ids) {
return inject(new BackendCreationResource(ids));
}
@Override
public D addParents(D device) {
return collection.addParents(device);
}
AbstractBackendReadOnlyDevicesResource<D, C, Q> getCollection() {
return collection;
}
}
| apache-2.0 |
alex09x/qsh2data | src/main/java/com/alex09x/qsh/reader/Stream.java | 754 | package com.alex09x.qsh.reader;
import com.alex09x.qsh.reader.Utils;
import java.io.DataInput;
import java.io.IOException;
import java.sql.Timestamp;
/**
* Created by alex on 12.01.14.
*/
public abstract class Stream<T> {
protected final DataInput dataInput;
protected final String symbol;
protected final double stepPrice;
public Stream(DataInput dataInput) throws IOException {
this.dataInput = dataInput;
String data = Utils.readString(dataInput);
String[] split = data.split(":");
this.symbol = split[1];
this.stepPrice = Double.valueOf(split[4]);
System.out.printf("Instrument = %s%n", symbol);
}
public abstract T read(Timestamp currentDateTime) throws IOException;
}
| apache-2.0 |
ScalaSthlm/alpakka-integration-patterns | playground/src/main/scala/scalasthlm/alpakka/playground/filesystem/impl/JimfsView.java | 7630 | package scalasthlm.alpakka.playground.filesystem.impl;
import org.apache.ftpserver.ftplet.FileSystemView;
import org.apache.ftpserver.ftplet.FtpException;
import org.apache.ftpserver.ftplet.FtpFile;
import org.apache.ftpserver.ftplet.User;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.file.DirectoryStream;
import java.nio.file.FileSystem;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.StringTokenizer;
/**
* File system view based on the in-memory jimfs file system. The root in this
* class is the user virtual root (/).
*/
public class JimfsView implements FileSystemView {
private final Logger LOG = LoggerFactory.getLogger(JimfsView.class);
// this will be the jimfs file system in runtime.
private FileSystem fileSystem;
// the root directory will always end with '/'.
private String rootDir;
// the first and the last character will always be '/'
// It is always with respect to the root directory.
private String currDir;
private User user;
private boolean caseInsensitive = false;
public JimfsView(FileSystem fileSystem, User user, boolean caseInsensitive)
throws FtpException {
if (fileSystem == null) {
throw new IllegalArgumentException("filesystem can not be null");
}
if (user == null) {
throw new IllegalArgumentException("user can not be null");
}
if (user.getHomeDirectory() == null) {
throw new IllegalArgumentException("user home directory can not be null");
}
this.fileSystem = fileSystem;
this.caseInsensitive = caseInsensitive;
// add last '/' if necessary
String rootDir = user.getHomeDirectory();
rootDir = normalizeSeparateChar(rootDir);
if (!rootDir.endsWith("/")) {
rootDir += '/';
}
LOG.debug("Jimfs filesystem view created by user \"{}\" with root \"{}\"", user.getName(), rootDir);
this.rootDir = rootDir;
this.user = user;
currDir = "/";
}
/**
* Get the user home directory. It would be the file system root
* for the specific user.
*/
public FtpFile getHomeDirectory() throws FtpException {
return new JimfsFtpFile("/", fileSystem.getPath(rootDir), user);
}
/**
* Get the current directory.
*/
public FtpFile getWorkingDirectory() throws FtpException {
FtpFile fileObj;
if (currDir.equals("/")) {
fileObj = getHomeDirectory();
} else {
Path path = fileSystem.getPath(rootDir, currDir.substring(1));
fileObj = new JimfsFtpFile(currDir, path, user);
}
return fileObj;
}
/**
* Get the file object.
*/
public FtpFile getFile(String file) {
String physicalName = getPhysicalName(file);
Path filePath = fileSystem.getPath(physicalName);
// strip the root directory and return
String userFileName = physicalName.substring(rootDir.length() - 1);
return new JimfsFtpFile(userFileName, filePath, user);
}
/**
* Change directory.
*/
public boolean changeWorkingDirectory(String dir) throws FtpException {
// not a directory - return false
dir = getPhysicalName(dir);
Path dirPath = fileSystem.getPath(dir);
if (!Files.isDirectory(dirPath)) {
return false;
}
// strip user root and add last '/' if necessary
dir = dir.substring(rootDir.length() - 1);
if (dir.charAt(dir.length() - 1) != '/') {
dir = dir + '/';
}
currDir = dir;
return true;
}
/**
* Is the file content random accessible?
*/
public boolean isRandomAccessible() {
return true;
}
/**
* Dispose the file system.
*/
public void dispose() {
// Nothing to do
}
private String normalizeSeparateChar(final String pathName) {
String normalizePathName = pathName.replace(fileSystem.getSeparator(), "/");
return normalizePathName.replace('\\', '/');
}
private String getPhysicalName(final String file) {
// get the starting directory
String normalizedRootDir = normalizeSeparateChar(rootDir);
if (normalizedRootDir.charAt(normalizedRootDir.length() - 1) != '/') {
normalizedRootDir += '/';
}
String normalizedFileName = normalizeSeparateChar(file);
String resArg;
String normalizedCurrDir = currDir;
if (normalizedFileName.charAt(0) != '/') {
if (normalizedCurrDir == null || normalizedCurrDir.length() == 0) {
normalizedCurrDir = "/";
}
normalizedCurrDir = normalizeSeparateChar(normalizedCurrDir);
if (normalizedCurrDir.charAt(0) != '/') {
normalizedCurrDir = '/' + normalizedCurrDir;
}
if (normalizedCurrDir.charAt(normalizedCurrDir.length() - 1) != '/') {
normalizedCurrDir += '/';
}
resArg = normalizedRootDir + normalizedCurrDir.substring(1);
} else {
resArg = normalizedRootDir;
}
// strip last '/'
if (resArg.charAt(resArg.length() - 1) == '/') {
resArg = resArg.substring(0, resArg.length() - 1);
}
// replace ., ~ and ..
// in this loop resArg will never end with '/'
StringTokenizer st = new StringTokenizer(normalizedFileName, "/");
while (st.hasMoreTokens()) {
String tok = st.nextToken();
// . => current directory
if (tok.equals(".")) {
continue;
}
// .. => parent directory (if not root)
if (tok.equals("..")) {
if (resArg.startsWith(normalizedRootDir)) {
int slashIndex = resArg.lastIndexOf("/");
if (slashIndex != -1) {
resArg = resArg.substring(0, slashIndex);
}
}
continue;
}
// ~ => home directory (in this case is the root directory)
if (tok.equals("~")) {
resArg = normalizedRootDir.substring(0, normalizedRootDir.length() - 1);
continue;
}
if (caseInsensitive) {
Path dir = fileSystem.getPath(resArg);
DirectoryStream<Path> dirStream = null;
try {
dirStream = Files.newDirectoryStream(dir, new NameEqualsPathFilter(tok, true));
} catch (IOException t) {
// ignore
}
List<Path> matches = new ArrayList<>(0);
if (dirStream != null) {
for (Path match : dirStream) {
matches.add(match);
}
}
if (matches.size() > 0) {
tok = matches.get(0).getFileName().toString();
}
}
resArg = resArg + '/' + tok;
}
// add last slash if necessary
if ((resArg.length()) + 1 == normalizedRootDir.length()) {
resArg += '/';
}
// final check
if (!resArg.regionMatches(0, normalizedRootDir, 0, normalizedRootDir
.length())) {
resArg = normalizedRootDir;
}
return resArg;
}
}
| apache-2.0 |
xasx/camunda-bpm-platform | engine/src/main/java/org/camunda/bpm/engine/impl/history/event/HistoryEvent.java | 9101 | /*
* Copyright © 2013-2018 camunda services GmbH and various authors (info@camunda.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.impl.history.event;
import java.io.Serializable;
import java.util.Date;
import org.camunda.bpm.engine.impl.db.DbEntity;
import org.camunda.bpm.engine.impl.db.HistoricEntity;
import org.camunda.bpm.engine.impl.db.entitymanager.DbEntityManager;
import org.camunda.bpm.engine.impl.history.handler.HistoryEventHandler;
/**
* <p>The base class for all history events.</p>
*
* <p>A history event contains data about an event that has happened
* in a process instance. Such an event may be the start of an activity,
* the end of an activity, a task instance that is created or other similar
* events...</p>
*
* <p>History events contain data in a serializable form. Some
* implementations may persist events directly or may serialize
* them as an intermediate representation for later processing
* (ie. in an asynchronous implementation).</p>
*
* <p>This class implements {@link DbEntity}. This was chosen so
* that {@link HistoryEvent}s can be easily persisted using the
* {@link DbEntityManager}. This may not be used by all {@link HistoryEventHandler}
* implementations but it does also not cause harm.</p>
*
* @author Daniel Meyer
*
*/
public class HistoryEvent implements Serializable, DbEntity, HistoricEntity {
private static final long serialVersionUID = 1L;
// constants deprecated since 7.2
@Deprecated
public static final String ACTIVITY_EVENT_TYPE_START = HistoryEventTypes.ACTIVITY_INSTANCE_START.getEventName();
@Deprecated
public static final String ACTIVITY_EVENT_TYPE_UPDATE = HistoryEventTypes.ACTIVITY_INSTANCE_END.getEventName();
@Deprecated
public static final String ACTIVITY_EVENT_TYPE_END = HistoryEventTypes.ACTIVITY_INSTANCE_END.getEventName();
@Deprecated
public static final String TASK_EVENT_TYPE_CREATE = HistoryEventTypes.TASK_INSTANCE_CREATE.getEventName();
@Deprecated
public static final String TASK_EVENT_TYPE_UPDATE = HistoryEventTypes.TASK_INSTANCE_UPDATE.getEventName();
@Deprecated
public static final String TASK_EVENT_TYPE_COMPLETE = HistoryEventTypes.TASK_INSTANCE_COMPLETE.getEventName();
@Deprecated
public static final String TASK_EVENT_TYPE_DELETE = HistoryEventTypes.TASK_INSTANCE_DELETE.getEventName();
@Deprecated
public static final String VARIABLE_EVENT_TYPE_CREATE = HistoryEventTypes.VARIABLE_INSTANCE_CREATE.getEventName();
@Deprecated
public static final String VARIABLE_EVENT_TYPE_UPDATE = HistoryEventTypes.VARIABLE_INSTANCE_UPDATE.getEventName();
@Deprecated
public static final String VARIABLE_EVENT_TYPE_DELETE = HistoryEventTypes.VARIABLE_INSTANCE_DELETE.getEventName();
@Deprecated
public static final String FORM_PROPERTY_UPDATE = HistoryEventTypes.FORM_PROPERTY_UPDATE.getEventName();
@Deprecated
public static final String INCIDENT_CREATE = HistoryEventTypes.INCIDENT_CREATE.getEventName();
@Deprecated
public static final String INCIDENT_DELETE = HistoryEventTypes.INCIDENT_DELETE.getEventName();
@Deprecated
public static final String INCIDENT_RESOLVE = HistoryEventTypes.INCIDENT_RESOLVE.getEventName();
public static final String IDENTITY_LINK_ADD = HistoryEventTypes.IDENTITY_LINK_ADD.getEventName();
public static final String IDENTITY_LINK_DELETE = HistoryEventTypes.IDENTITY_LINK_DELETE.getEventName();
/** each {@link HistoryEvent} has a unique id */
protected String id;
/** the root process instance in which the event has happened */
protected String rootProcessInstanceId;
/** the process instance in which the event has happened */
protected String processInstanceId;
/** the id of the execution in which the event has happened */
protected String executionId;
/** the id of the process definition */
protected String processDefinitionId;
/** the key of the process definition */
protected String processDefinitionKey;
/** the name of the process definition */
protected String processDefinitionName;
/** the version of the process definition */
protected Integer processDefinitionVersion;
/** the case instance in which the event has happened */
protected String caseInstanceId;
/** the id of the case execution in which the event has happened */
protected String caseExecutionId;
/** the id of the case definition */
protected String caseDefinitionId;
/** the key of the case definition */
protected String caseDefinitionKey;
/** the name of the case definition */
protected String caseDefinitionName;
/**
* The type of the activity audit event.
* @see HistoryEventType#getEventName()
* */
protected String eventType;
protected long sequenceCounter;
/* the time when the history event will be deleted */
protected Date removalTime;
// getters / setters ///////////////////////////////////
public String getProcessInstanceId() {
return processInstanceId;
}
public void setProcessInstanceId(String processInstanceId) {
this.processInstanceId = processInstanceId;
}
public String getRootProcessInstanceId() {
return rootProcessInstanceId;
}
public void setRootProcessInstanceId(String rootProcessInstanceId) {
this.rootProcessInstanceId = rootProcessInstanceId;
}
public String getExecutionId() {
return executionId;
}
public void setExecutionId(String executionId) {
this.executionId = executionId;
}
public String getProcessDefinitionId() {
return processDefinitionId;
}
public void setProcessDefinitionId(String processDefinitionId) {
this.processDefinitionId = processDefinitionId;
}
public String getProcessDefinitionKey() {
return processDefinitionKey;
}
public void setProcessDefinitionKey(String processDefinitionKey) {
this.processDefinitionKey = processDefinitionKey;
}
public String getProcessDefinitionName() {
return processDefinitionName;
}
public void setProcessDefinitionName(String processDefinitionName) {
this.processDefinitionName = processDefinitionName;
}
public Integer getProcessDefinitionVersion() {
return processDefinitionVersion;
}
public void setProcessDefinitionVersion(Integer processDefinitionVersion) {
this.processDefinitionVersion = processDefinitionVersion;
}
public String getCaseDefinitionName() {
return caseDefinitionName;
}
public void setCaseDefinitionName(String caseDefinitionName) {
this.caseDefinitionName = caseDefinitionName;
}
public String getCaseDefinitionKey() {
return caseDefinitionKey;
}
public void setCaseDefinitionKey(String caseDefinitionKey) {
this.caseDefinitionKey = caseDefinitionKey;
}
public String getCaseDefinitionId() {
return caseDefinitionId;
}
public void setCaseDefinitionId(String caseDefinitionId) {
this.caseDefinitionId = caseDefinitionId;
}
public String getCaseInstanceId() {
return caseInstanceId;
}
public void setCaseInstanceId(String caseInstanceId) {
this.caseInstanceId = caseInstanceId;
}
public String getCaseExecutionId() {
return caseExecutionId;
}
public void setCaseExecutionId(String caseExecutionId) {
this.caseExecutionId = caseExecutionId;
}
public void setId(String id) {
this.id = id;
}
public String getId() {
return id;
}
public String getEventType() {
return eventType;
}
public void setEventType(String eventType) {
this.eventType = eventType;
}
public long getSequenceCounter() {
return sequenceCounter;
}
public void setSequenceCounter(long sequenceCounter) {
this.sequenceCounter = sequenceCounter;
}
public Date getRemovalTime() {
return removalTime;
}
public void setRemovalTime(Date removalTime) {
this.removalTime = removalTime;
}
// persistent object implementation ///////////////
public Object getPersistentState() {
// events are immutable
return HistoryEvent.class;
}
// state inspection
public boolean isEventOfType(HistoryEventType type) {
return type.getEventName().equals(eventType);
}
@Override
public String toString() {
return this.getClass().getSimpleName()
+ "[id=" + id
+ ", eventType=" + eventType
+ ", executionId=" + executionId
+ ", processDefinitionId=" + processDefinitionId
+ ", processInstanceId=" + processInstanceId
+ ", rootProcessInstanceId=" + rootProcessInstanceId
+ ", removalTime=" + removalTime
+ "]";
}
}
| apache-2.0 |
wangjiegulu/RapidRouter | app/src/main/java/com/wangjie/rapidrouter/example/ThisRapidRouterMapping.java | 2113 | package com.wangjie.rapidrouter.example;
import com.wangjie.rapidrouter.core.RapidRouterMapping;
import com.wangjie.rapidrouter.core.target.RouterTarget;
import com.wangjie.rapidrouter.example.activity.AActivity;
import com.wangjie.rapidrouter.example.activity.BActivity;
import com.wangjie.rapidrouter.example.activity.CActivity;
import java.util.HashMap;
/**
* Author: wangjie Email: tiantian.china.2@gmail.com Date: 2/8/17.
*/
public class ThisRapidRouterMapping extends RapidRouterMapping {
@Override
public HashMap<String, HashMap<String, RouterTarget>> calcSimpleRouterMapper(HashMap<String, HashMap<String, RouterTarget>> routerMapper) {
HashMap<String, Class> params;
// com.wangjie.rapidrouter.example.activity.AActivity
params = new HashMap<>();
params.put("p_name", String.class);
params.put("p_age", int.class);
getEnsureMap(routerMapper, "rr").put("rapidrouter.a", new RouterTarget(AActivity.class, params));
// com.wangjie.rapidrouter.example.activity.AActivity
params = new HashMap<>();
params.put("p_name", String.class);
params.put("p_age", int.class);
getEnsureMap(routerMapper, "rr").put("rapidrouter_extra.a", new RouterTarget(AActivity.class, params));
// com.wangjie.rapidrouter.example.activity.BActivity
params = new HashMap<>();
params.put("id", long.class);
getEnsureMap(routerMapper, "rr").put("rapidrouter.b", new RouterTarget(BActivity.class, params));
// // com.wangjie.rapidrouter.example.activity.CActivity
getEnsureMap(routerMapper, "sc").put("wangL0vjie.c", new RouterTarget(AActivity.class, null));
return routerMapper;
}
@Override
public HashMap<String, RouterTarget> calcRegRouterMapper(HashMap<String, RouterTarget> routerMapper) {
HashMap<String, Class> params;
params = new HashMap<>();
params.put("paramOfCActivity", float.class);
routerMapper.put("((rr)|(sc))://wang.*jie\\.[cx].*", new RouterTarget(CActivity.class, params));
return routerMapper;
}
}
| apache-2.0 |
daisy/pipeline-issues | libs/com.xmlcalabash/src/main/java/com/xmlcalabash/drivers/Piperack.java | 2108 | package com.xmlcalabash.drivers;
import com.xmlcalabash.core.XProcConfiguration;
import com.xmlcalabash.core.XProcException;
import com.xmlcalabash.core.XProcRuntime;
import com.xmlcalabash.piperack.PiperackApplication;
import com.xmlcalabash.util.ParseArgs;
import com.xmlcalabash.util.UserArgs;
import net.sf.saxon.s9api.SaxonApiException;
import org.restlet.Component;
import org.restlet.data.Protocol;
/**
* Ths file is part of XMLCalabash.
* Created by ndw on 10/25/13.
*/
public class Piperack {
public static void main(String[] args) throws Exception {
UserArgs userArgs = null;
try {
userArgs = new ParseArgs().parsePiperack(args);
} catch (XProcException xe) {
System.err.println(xe.getMessage());
System.exit(-1);
}
XProcConfiguration config = null;
XProcRuntime runtime = null;
try {
config = userArgs.createConfiguration();
runtime = new XProcRuntime(config);
} catch (SaxonApiException e) {
System.err.println(e.getMessage());
System.exit(-1);
}
if (userArgs.isShowVersion()) {
System.out.println("Piperack, a web server for running XProc pipelines.");
XProcConfiguration.showVersion(runtime);
}
// Create a component
Component component = new Component();
component.getServers().add(Protocol.HTTP, config.piperackPort);
// Create an application
PiperackApplication application = new PiperackApplication(config, runtime);
// Attach the application to the component and start it
component.getDefaultHost().attach(application);
component.start();
while (true) {
try {
Thread.sleep(10000);
} catch (InterruptedException e) {
// nop
}
// It doesn't seem like this should be necessary!
if (application.stopped()) {
System.exit(0);
}
application.expirePipelines();
}
}
}
| apache-2.0 |
danielgarrard001/umbtransit | ParkingInfoPage.java | 8035 | import javax.swing.*;
import java.awt.*;
import javax.imageio.*;
import java.io.*;
import java.time.*;
import java.time.temporal.*;
import java.lang.Math;
import java.awt.event.*;
import java.awt.Color;
import java.awt.Graphics;
import java.applet.Applet;
import java.awt.geom.Rectangle2D;
import javax.swing.BorderFactory;
public class ParkingInfoPage extends JFrame {
final static int MAX_CAPACITY_A = 1000;
final static int MAX_CAPACITY_B = 800;
final static int MAX_CAPACITY_C = 500;
final static int MAX_CAPACITY_D = 200;
static int spacesAvailableLotA = MAX_CAPACITY_A - (int)parkingLotA(time());
static int spacesAvailableLotB = MAX_CAPACITY_B - (int)(parkingLotB(time()));
static int spacesAvailableLotC = MAX_CAPACITY_C - (int)(parkingLotC(time()));
static int spacesAvailableLotD = MAX_CAPACITY_D - (int)(parkingLotD(time()));
public ParkingInfoPage ()
{
createAndShowGUI();
}
public void createAndShowGUI() {
JPanel panel = new JPanel();
Font font = new Font("franklin gothic book", Font.BOLD, 12);
JLabel banner = new JLabel("<html><font color = white>UMass Boston</font>", SwingConstants.CENTER);
setUndecorated(true);
getRootPane().setBorder(BorderFactory.createMatteBorder(6,6,6,6,Color.BLACK));
font(banner, font);
setSize(250,400);
setLocation(500,280);
//setVisible(true);
panel.setLayout(null);
setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
//setTitle("UMass Parking app");
//setIconImage(new ImageIcon("unnamed.png").getImage());
// setUndecorated(true);
panel.setBackground(new Color(167,158,112));
//buttons
JButton a = new JButton();
a.setFocusPainted(false);
a.setForeground(new Color(0,90,139));
a.setText("<html><center>"+ "Lot A" +"<br/>"+ spacesAvailableLotA + " Spaces available");
a.setFont(font);
a.setBackground(Color.WHITE);
a.setBorderPainted(false);
setParkingIcon(a,spacesAvailableLotA ,MAX_CAPACITY_A);
a.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
button1ActionPerformed( );
}});
JButton b = new JButton();
b.setForeground(new Color(0,90,139));
b.setText("<html><center>"+ "Lot B" +"<br/>"+ spacesAvailableLotB + " Spaces available");
b.setFont(font);
b.setBackground(Color.WHITE);
b.setBorderPainted(false);
setParkingIcon(b,spacesAvailableLotB ,MAX_CAPACITY_B);
b.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
button1ActionPerformed( );
}});
JButton c = new JButton();
c.setForeground(new Color(0,90,139));
c.setText("<html><center>"+ "Lot C" +"<br/>"+ spacesAvailableLotC + " Spaces available");
c.setFont(font);
c.setBackground(Color.WHITE);
c.setBorderPainted(false);
setParkingIcon(c,spacesAvailableLotC ,MAX_CAPACITY_C);
c.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
button1ActionPerformed( );
}});
JButton d = new JButton();
d.setForeground(new Color(0,90,139));
d.setText("<html><center>"+ "Lot D" +"<br/>"+ spacesAvailableLotD + " Spaces available");
d.setFont(font);
d.setBackground(Color.WHITE);
d.setBorderPainted(false);
setParkingIcon(d,spacesAvailableLotD,MAX_CAPACITY_D);
d.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
button1ActionPerformed( );
}});
positionLabel(banner, panel);
addComponentsToPane(panel,a);
addComponentsToPane(panel,b);
addComponentsToPane(panel,c);
addComponentsToPane(panel,d);
banner.setOpaque(true);
banner.setBackground(new Color(0,90,139));
getContentPane().add(panel);
setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
setVisible(true);
}
public static void main(String[] args) {
new ParkingInfoPage ();
}
private static void setParkingIcon(JButton b,double x,final int y){
if(x == y){
ImageIcon img4 = new ImageIcon("100%.png");
b.setIcon(img4);}
if(x > (.875)*y && x < y){
ImageIcon img4 = new ImageIcon("90%.png");
b.setIcon(img4);}
if(x > (.625)*y && x <= (.875)*y){
ImageIcon img4 = new ImageIcon("75%.png");
b.setIcon(img4);}
if(x >(.375)* y && x <= (.625)*y){
ImageIcon img4 = new ImageIcon("50%.png");
b.setIcon(img4);}
if(x > (.125)*y && x <= (.375)*y){
ImageIcon img4 = new ImageIcon("25%.png");
b.setIcon(img4);}
if(x > 0 && x <= (.125)*y){
ImageIcon img4 = new ImageIcon("10%.png");
b.setIcon(img4);}
if(x == 0){
ImageIcon img4 = new ImageIcon("0%.png");
b.setIcon(img4); }
}
private static void positionLabel(JLabel label, Container container) {
label.setAlignmentX(Component.CENTER_ALIGNMENT);
container.add(label);
}
private static void centerButton(JButton button, Container container) {
button.setAlignmentX(Component.LEFT_ALIGNMENT);
container.add(Box.createRigidArea(new Dimension(5,30)));
container.add(button);
}
private static void addComponentsToPane(Container pane, JButton a) {
pane.setLayout(new BoxLayout(pane, BoxLayout.Y_AXIS));
centerButton(a, pane);
}
public void font(JLabel banner, Font f){
banner.setFont(new Font("Franklin gothic book heavy", Font.BOLD, 24));
}
private void button1ActionPerformed() {
dummyFrame x = new dummyFrame();
dispose();
}
static double time(){
LocalDateTime now = LocalDateTime.now();
double hour = now.get(ChronoField.HOUR_OF_DAY);
double minute = now.get(ChronoField.MINUTE_OF_DAY);
return (hour + (minute - hour*60)/60 ) ;
}
static double parkingLotA(double x){
double a = 200*Math.pow(x-8, 2)/Math.pow(1 + Math.pow(.25*(x-8),3),2);
double b = 100*Math.pow(x-16, 2)/Math.pow(1 + Math.pow(.29*(x-16),3),2);
if(x<=17 && x>=8)
return a;
if(x > 17 && x < 22 )
return b;
if(x < 8 || x>22)
return 35;
return 0;
}
static double parkingLotB(double x){
double a = 170*Math.pow(x-8, 2)/Math.pow(1 + Math.pow(.33*(x-8),3),2);
double b = 40*Math.pow(x-16, 2)/Math.pow(1 + Math.pow(.29*(x-16),3),2);
if(x<=17 && x>=8)
return a;
if(x > 17 && x < 22 )
return b;
if(x < 8 || x>22)
return 35;
return 0;
}
static double parkingLotC(double x){
double a = 60*Math.pow(x-8, 2)/Math.pow(1 + Math.pow(.33*(x-8),3),2);
double b = 60*Math.pow(x-16, 2)/Math.pow(1 + Math.pow(.29*(x-16),3),2);
if(x<=17 && x>=8)
return a;
if(x > 17 && x < 22 )
return b;
if(x < 8 || x>22)
return 35;
return 0;
}
static double parkingLotD(double x){
double a = 40*Math.pow(x-8, 2)/Math.pow(1 + Math.pow(.29*(x-8),2),1.5);
if(x<=17 && x>=8)
return a;
if(x > 17 && x < 22 )
return a;
if(x < 8 || x>22)
return 35;
return 0;
}
}
| apache-2.0 |
Rahul-Batra/Web-Crawler | Investopedia/src/com/rahul/investopedia/util/WebParser.java | 3063 | package com.rahul.investopedia.util;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.rahul.investopedia.dao.Alphadetails;
public class WebParser {
public static void main(String[] args) {
Alphadetails.getdetails();
ArrayList<AlphaurlDTO> url=new ArrayList<AlphaurlDTO>();
url = geturl();
System.out.println("size of url is"+url.size());
for(int i=0;i<url.size();i++)
processPage(url.get(i).getUrl());
}
public static ArrayList<AlphaurlDTO> geturl()
{
Connection con = null;
ResultSet resultSet = null;
PreparedStatement statement = null;
ArrayList<AlphaurlDTO> geturlarry=new ArrayList<AlphaurlDTO>();
String sql = "SELECT * FROM alphaurl";
try {
con = DBconnection.getConnection();
statement = con.prepareStatement(sql);
resultSet = statement.executeQuery();
while (resultSet.next()) {
AlphaurlDTO Geturl=new AlphaurlDTO();
Geturl.setUrl(resultSet.getString(1));
Geturl.setStatus(Integer.parseInt(resultSet.getString(2)));
geturlarry.add(Geturl);
}
}catch(Exception e)
{
e.printStackTrace();
}
return geturlarry;
}
public static void processPage(String url) {
try {
System.out.println("url is"+url);
String webContent = URLReader.readURL(url);
String pattern = String.format("(%s).*?(%s)",Pattern.quote("<!-- .alphabet -->"),Pattern.quote("<!-- ad_sponsorlinks -->"));
Matcher m = Pattern.compile(pattern, Pattern.DOTALL).matcher(
webContent);
int i = 1;
while (m.find()) {
String matchingvalue = m.group();
Pattern pattrn = Pattern.compile("<a\\b[^>]*href=\"[^>]*>(.*?)</a>");
Matcher match = pattrn.matcher(matchingvalue);
while (match.find()) {
String matchlist = match.group();
matchlist = matchlist.replaceAll(LoadUtilConstants.regExp2,
"");
matchlist = matchlist.replaceAll(LoadUtilConstants.scriptRegex, "");
matchlist = matchlist.replaceAll(LoadUtilConstants.commentsRegex, "");
if (!matchlist.contains(".jpg")) {
String title=matchlist.split("\"")[2].split("</")[0].replaceAll(">", "").trim();
String titleURL="http://www.investopedia.com"+matchlist.split("\"")[1];
System.out.println(i++ + " : title :" + title+ "\t url :"+ titleURL);
writedetails(title,titleURL);
}
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
static void writedetails(String title,String titleURL)
{
Connection con = null;
PreparedStatement statement = null;
String insertqry = "Insert into terms(title,url) values(?,?)";
try{
con = DBconnection.getConnection();
statement = con.prepareStatement(insertqry);
statement.setString(1, title);
statement.setString(2, titleURL);
System.out.println(title+""+titleURL);
statement.execute();
}catch(Exception e)
{
e.printStackTrace();
}
}
}
| apache-2.0 |
gdutxiaoxu/FunAPP | Fun/src/main/java/com/xujun/funapp/common/network/HttpException.java | 628 | package com.xujun.funapp.common.network;
/**
* @author meitu.xujun on 2017/4/8 17:26
* @version 0.1
*/
public class HttpException {
public static final int CUSTOM_ERROR_CODE=100;
public int code;
public Throwable e;
public String errMsg;
public HttpException(int code, Throwable e, String errMsg) {
this.code = code;
this.e = e;
this.errMsg = errMsg;
}
@Override
public String toString() {
return "HttpException{" +
"code=" + code +
", e=" + e +
", errMsg='" + errMsg + '\'' +
'}';
}
}
| apache-2.0 |
MarkusBernhardt/spring-context-template | src/main/java/com/github/markusbernhardt/springcontexttemplate/ImportStringValueResolver.java | 2551 | package com.github.markusbernhardt.springcontexttemplate;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.springframework.core.env.PropertyResolver;
import org.springframework.util.StringValueResolver;
public class ImportStringValueResolver implements StringValueResolver {
/**
* The regex pattern used later to match the variables to replace
*/
protected final Pattern pattern;
/**
* The map storing all the mappings
*/
protected final Map<String, String> mappings;
private final PropertyResolver propertyResolver;
/**
* Constructor
*
* @param mappings the map containing all value to resolved value mappings
* @param propertyResolver global property resolver
*/
public ImportStringValueResolver(Map<String, String> mappings, PropertyResolver propertyResolver) {
super();
this.propertyResolver = propertyResolver;
this.pattern = getPattern(mappings);
this.mappings = mappings;
}
/**
* Resolve the given String value, for example parsing placeholders.
*
* @param value
* the original String value
* @return the resolved String value
*/
@Override
public String resolveStringValue(String value) {
String resolvedValue = value;
do {
value = resolvedValue;
StringBuffer sb = new StringBuffer();
Matcher matcher = pattern.matcher(value);
while (matcher.find()) {
String val = mappings.get(unbracket(matcher.group(1)));
matcher.appendReplacement(sb, propertyResolver.resolvePlaceholders(val));
}
matcher.appendTail(sb);
resolvedValue = sb.toString();
} while (!value.equals(resolvedValue));
return resolvedValue;
}
private String unbracket(String expression) {
String unbracketed = expression.substring("${".length(), expression.length() - "}".length());
return unbracketed;
}
/**
* Build the regex pattern used later to match the variables to replace
*
* @param mappings
* the mappingt to match
* @return the pattern
*/
protected Pattern getPattern(Map<String, String> mappings) {
String join = "|";
StringBuilder patternString = new StringBuilder();
for (String key : mappings.keySet()) {
if (patternString.length()>0) {
patternString.append(join);
}
patternString.append("\\$\\{" + key + "\\}");
}
return Pattern.compile("(" + patternString.toString() + ")");
}
public PropertyResolver getPropertyResolver() {
return propertyResolver;
}
}
| apache-2.0 |
marcinkwiatkowski/buck | test/com/facebook/buck/randomizedtrial/RandomizedTrialTest.java | 2556 | /*
* Copyright 2017-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.randomizedtrial;
import static org.junit.Assert.assertThat;
import org.hamcrest.Matchers;
import org.junit.Test;
public class RandomizedTrialTest {
public enum BrokenEnum implements WithProbability {
GROUP1,
GROUP2,
;
@Override
public double getProbability() {
return 0.0;
}
}
public enum MutableEnum implements WithProbability {
GROUP1,
GROUP2,
;
public static double probabilityGroup1 = 0.0;
public static double probabilityGroup2 = 0.0;
@Override
public double getProbability() {
if (name().equals("GROUP1")) {
return probabilityGroup1;
} else {
return probabilityGroup2;
}
}
}
@Test
public void testCreatingWithWrongConfiguration() throws Exception {
try {
RandomizedTrial.getGroup("name", BrokenEnum.class, BrokenEnum.GROUP1);
} catch (RuntimeException e) {
assertThat(e.getMessage(), Matchers.containsString("misconfigured"));
return;
}
throw new RuntimeException("Expected to fail");
}
@Test
public void testPointStaysStable() throws Exception {
assertThat(
RandomizedTrial.getPoint("test"), Matchers.equalTo(RandomizedTrial.getPoint("test")));
}
// The following test has caused some flakiness on Windows, so we disable this for now.
// @Test
// public void testPointDifferentForDifferentTests() throws Exception {
// assertThat(
// RandomizedTrial.getPoint("test1"),
// Matchers.not(Matchers.equalTo(RandomizedTrial.getPoint("test2"))));
// }
@Test
public void testReturnsCorrectGroup() throws Exception {
double point = RandomizedTrial.getPoint("name");
MutableEnum.probabilityGroup1 = point;
MutableEnum.probabilityGroup2 = 1.0 - point;
assertThat(
RandomizedTrial.getGroup("name", MutableEnum.class, MutableEnum.GROUP1),
Matchers.equalTo(MutableEnum.GROUP2));
}
}
| apache-2.0 |
marcinkwiatkowski/buck | src/com/facebook/buck/rules/BuildInfoRecorder.java | 12816 | /*
* Copyright 2013-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.rules;
import com.facebook.buck.artifact_cache.ArtifactCache;
import com.facebook.buck.artifact_cache.ArtifactInfo;
import com.facebook.buck.event.ArtifactCompressionEvent;
import com.facebook.buck.event.BuckEventBus;
import com.facebook.buck.event.ConsoleEvent;
import com.facebook.buck.io.BorrowablePath;
import com.facebook.buck.io.MoreFiles;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.log.Logger;
import com.facebook.buck.model.BuildId;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.timing.Clock;
import com.facebook.buck.util.ObjectMappers;
import com.facebook.buck.util.cache.FileHashCache;
import com.facebook.buck.util.collect.SortedSets;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Ordering;
import com.google.common.hash.HashCode;
import com.google.common.hash.Hasher;
import com.google.common.hash.Hashing;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import java.io.IOException;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.SortedSet;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.annotation.Nullable;
/**
* Utility for recording the paths to the output files generated by a build rule, as well as any
* metadata about those output files. This data will be packaged up into an artifact that will be
* stored in the cache. The metadata will also be written to disk so it can be read on a subsequent
* build by an {@link OnDiskBuildInfo}.
*/
public class BuildInfoRecorder {
private static final Logger LOG = Logger.get(BuildRuleResolver.class);
@VisibleForTesting
static final String ABSOLUTE_PATH_ERROR_FORMAT =
"Error! '%s' is trying to record artifacts with absolute path: '%s'.";
private static final String BUCK_CACHE_DATA_ENV_VAR = "BUCK_CACHE_DATA";
private final BuildTarget buildTarget;
private final Path pathToMetadataDirectory;
private final ProjectFilesystem projectFilesystem;
private final BuildInfoStore buildInfoStore;
private final Clock clock;
private final BuildId buildId;
private final ImmutableMap<String, String> artifactExtraData;
private final Map<String, String> metadataToWrite;
private final Map<String, String> buildMetadata;
private final AtomicBoolean warnedUserOfCacheStoreFailure;
/** Every value in this set is a path relative to the project root. */
private final Set<Path> pathsToOutputs;
BuildInfoRecorder(
BuildTarget buildTarget,
ProjectFilesystem projectFilesystem,
BuildInfoStore buildInfoStore,
Clock clock,
BuildId buildId,
ImmutableMap<String, String> environment) {
this.buildTarget = buildTarget;
this.pathToMetadataDirectory =
BuildInfo.getPathToMetadataDirectory(buildTarget, projectFilesystem);
this.projectFilesystem = projectFilesystem;
this.buildInfoStore = buildInfoStore;
this.clock = clock;
this.buildId = buildId;
this.artifactExtraData =
ImmutableMap.<String, String>builder()
.put(
"artifact_data",
Optional.ofNullable(environment.get(BUCK_CACHE_DATA_ENV_VAR)).orElse("null"))
.build();
this.metadataToWrite = new LinkedHashMap<>();
this.buildMetadata = new LinkedHashMap<>();
this.pathsToOutputs = new HashSet<>();
this.warnedUserOfCacheStoreFailure = new AtomicBoolean(false);
}
private String toJson(Object value) {
try {
return ObjectMappers.WRITER.writeValueAsString(value);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private static String formatAdditionalArtifactInfo(Map<String, String> entries) {
StringBuilder builder = new StringBuilder();
for (Map.Entry<String, String> entry : entries.entrySet()) {
builder.append(entry.getKey());
builder.append('=');
builder.append(entry.getValue());
builder.append(',');
}
return builder.toString();
}
private ImmutableMap<String, String> getBuildMetadata() {
return ImmutableMap.<String, String>builder()
.put(
BuildInfo.MetadataKey.ADDITIONAL_INFO,
formatAdditionalArtifactInfo(
ImmutableMap.<String, String>builder()
.put("build_id", buildId.toString())
.put(
"timestamp",
String.valueOf(TimeUnit.MILLISECONDS.toSeconds(clock.currentTimeMillis())))
.putAll(artifactExtraData)
.build()))
.putAll(buildMetadata)
.build();
}
/**
* Writes the metadata currently stored in memory to the directory returned by {@link
* BuildInfo#getPathToMetadataDirectory(BuildTarget, ProjectFilesystem)}.
*/
public void writeMetadataToDisk(boolean clearExistingMetadata) throws IOException {
if (clearExistingMetadata) {
projectFilesystem.deleteRecursivelyIfExists(pathToMetadataDirectory);
buildInfoStore.deleteMetadata(buildTarget);
}
projectFilesystem.mkdirs(pathToMetadataDirectory);
buildInfoStore.updateMetadata(buildTarget, getBuildMetadata());
for (Map.Entry<String, String> entry : metadataToWrite.entrySet()) {
projectFilesystem.writeContentsToPath(
entry.getValue(), pathToMetadataDirectory.resolve(entry.getKey()));
}
}
/**
* Used by the build engine to record metadata describing the build (e.g. rule key, build UUID).
*/
public BuildInfoRecorder addBuildMetadata(String key, String value) {
buildMetadata.put(key, value);
return this;
}
public BuildInfoRecorder addBuildMetadata(String key, ImmutableMap<String, String> value) {
return addBuildMetadata(key, toJson(value));
}
/**
* This key/value pair is stored in memory until {@link #writeMetadataToDisk(boolean)} is invoked.
*/
public void addMetadata(String key, String value) {
metadataToWrite.put(key, value);
}
public void addMetadata(String key, ImmutableList<String> value) {
addMetadata(key, toJson(value));
}
private ImmutableSortedSet<Path> getRecordedMetadataFiles() {
return FluentIterable.from(metadataToWrite.keySet())
.transform(Paths::get)
.transform(pathToMetadataDirectory::resolve)
.toSortedSet(Ordering.natural());
}
private ImmutableSortedSet<Path> getRecordedOutputDirsAndFiles() throws IOException {
final ImmutableSortedSet.Builder<Path> paths = ImmutableSortedSet.naturalOrder();
// Add files from output directories.
for (final Path output : pathsToOutputs) {
projectFilesystem.walkRelativeFileTree(
output,
new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs)
throws IOException {
paths.add(file);
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs)
throws IOException {
paths.add(dir);
return FileVisitResult.CONTINUE;
}
});
}
return paths.build();
}
private SortedSet<Path> getRecordedDirsAndFiles() throws IOException {
return SortedSets.union(getRecordedMetadataFiles(), getRecordedOutputDirsAndFiles());
}
/** @return the outputs paths as recorded by the rule. */
public ImmutableSortedSet<Path> getOutputPaths() {
return ImmutableSortedSet.copyOf(pathsToOutputs);
}
public ImmutableSortedSet<Path> getRecordedPaths() {
return ImmutableSortedSet.<Path>naturalOrder()
.addAll(getRecordedMetadataFiles())
.addAll(pathsToOutputs)
.build();
}
public HashCode getOutputHash(FileHashCache fileHashCache) throws IOException {
Hasher hasher = Hashing.md5().newHasher();
for (Path path : getRecordedPaths()) {
hasher.putBytes(fileHashCache.get(projectFilesystem.resolve(path)).asBytes());
}
return hasher.hash();
}
public long getOutputSize() throws IOException {
long size = 0;
for (Path path : getRecordedDirsAndFiles()) {
if (projectFilesystem.isFile(path)) {
size += projectFilesystem.getFileSize(path);
}
}
return size;
}
/**
* Creates a zip file of the metadata and recorded artifacts and stores it in the artifact cache.
*/
public void performUploadToArtifactCache(
final ImmutableSet<RuleKey> ruleKeys,
ArtifactCache artifactCache,
final BuckEventBus eventBus) {
// Skip all of this if caching is disabled. Although artifactCache.store() will be a noop,
// building up the zip is wasted I/O.
if (!artifactCache.getCacheReadMode().isWritable()) {
return;
}
ArtifactCompressionEvent.Started started =
ArtifactCompressionEvent.started(ArtifactCompressionEvent.Operation.COMPRESS, ruleKeys);
eventBus.post(started);
final Path zip;
SortedSet<Path> pathsToIncludeInZip = ImmutableSortedSet.of();
ImmutableMap<String, String> buildMetadata;
try {
pathsToIncludeInZip = getRecordedDirsAndFiles();
zip =
Files.createTempFile(
"buck_artifact_" + MoreFiles.sanitize(buildTarget.getShortName()), ".zip");
buildMetadata = getBuildMetadata();
projectFilesystem.createZip(pathsToIncludeInZip, zip);
} catch (IOException e) {
eventBus.post(
ConsoleEvent.info(
"Failed to create zip for %s containing:\n%s",
buildTarget, Joiner.on('\n').join(ImmutableSortedSet.copyOf(pathsToIncludeInZip))));
e.printStackTrace();
return;
} finally {
eventBus.post(ArtifactCompressionEvent.finished(started));
}
// Store the artifact, including any additional metadata.
ListenableFuture<Void> storeFuture =
artifactCache.store(
ArtifactInfo.builder().setRuleKeys(ruleKeys).setMetadata(buildMetadata).build(),
BorrowablePath.borrowablePath(zip));
Futures.addCallback(
storeFuture,
new FutureCallback<Void>() {
@Override
public void onSuccess(Void result) {
onCompletion();
}
@Override
public void onFailure(Throwable t) {
onCompletion();
LOG.info(t, "Failed storing RuleKeys %s to the cache.", ruleKeys);
if (warnedUserOfCacheStoreFailure.compareAndSet(false, true)) {
eventBus.post(
ConsoleEvent.severe(
"Failed storing an artifact to the cache," + "see log for details."));
}
}
private void onCompletion() {
try {
Files.deleteIfExists(zip);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
});
}
/** @param pathToArtifact Relative path to the project root. */
public void recordArtifact(Path pathToArtifact) {
Preconditions.checkArgument(
!pathToArtifact.isAbsolute(), ABSOLUTE_PATH_ERROR_FORMAT, buildTarget, pathToArtifact);
pathsToOutputs.add(pathToArtifact);
}
@Nullable
@VisibleForTesting
String getMetadataFor(String key) {
return metadataToWrite.get(key);
}
Optional<String> getBuildMetadataFor(String key) {
return Optional.ofNullable(buildMetadata.get(key));
}
}
| apache-2.0 |
sequenceiq/cloudbreak | integration-test/src/main/java/com/sequenceiq/it/IntegrationTestContext.java | 1998 | package com.sequenceiq.it;
import java.util.HashMap;
import java.util.Map;
public class IntegrationTestContext {
public static final String IDENTITY_URL = "IDENTITY_URL";
public static final String AUTH_USER = "AUTH_USER";
public static final String AUTH_PASSWORD = "AUTH_PASSWORD";
private Map<String, Object> contextParameters = new HashMap<>();
private Map<String, Object> cleanUpParameters = new HashMap<>();
public IntegrationTestContext() {
}
public IntegrationTestContext(Map<String, Object> contextParameters) {
this.contextParameters = contextParameters;
}
public String getContextParam(String paramKey) {
return getContextParam(paramKey, String.class);
}
public <T> T getContextParam(String paramKey, Class<T> clazz) {
Object val = contextParameters.get(paramKey);
if (val == null || clazz.isInstance(val)) {
return clazz.cast(val);
} else {
throw new IllegalArgumentException("Param value is not type of " + clazz);
}
}
public void putContextParam(String paramKey, Object paramValue) {
putContextParam(paramKey, paramValue, false);
}
public void putContextParam(String paramKey, Object paramValue, boolean cleanUp) {
contextParameters.put(paramKey, paramValue);
if (cleanUp) {
putCleanUpParam(paramKey, paramValue);
}
}
public void putCleanUpParam(String paramKey, Object paramValue) {
cleanUpParameters.put(paramKey, paramValue);
}
public String getCleanUpParameter(String key) {
return getCleanUpParameter(key, String.class);
}
public <T> T getCleanUpParameter(String key, Class<T> clazz) {
Object val = cleanUpParameters.get(key);
if (val == null || clazz.isInstance(val)) {
return clazz.cast(val);
} else {
throw new IllegalArgumentException("Param value is not type of " + clazz);
}
}
}
| apache-2.0 |
rdblue/incubator-nifi | nar-bundles/standard-bundle/standard-processors/src/test/java/org/apache/nifi/processors/standard/TestGetFile.java | 8106 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.standard;
import org.apache.nifi.processors.standard.GetFile;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.attribute.PosixFilePermissions;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
import org.apache.nifi.util.MockFlowFile;
import org.apache.nifi.util.TestRunner;
import org.apache.nifi.util.TestRunners;
import org.junit.Test;
public class TestGetFile {
@Test
public void testFilePickedUp() throws IOException {
final File directory = new File("target/test/data/in");
deleteDirectory(directory);
assertTrue("Unable to create test data directory " + directory.getAbsolutePath(), directory.exists() || directory.mkdirs());
final File inFile = new File("src/test/resources/hello.txt");
final Path inPath = inFile.toPath();
final File destFile = new File(directory, inFile.getName());
final Path targetPath = destFile.toPath();
final Path absTargetPath = targetPath.toAbsolutePath();
final String absTargetPathStr = absTargetPath.getParent() + "/";
Files.copy(inPath, targetPath);
final TestRunner runner = TestRunners.newTestRunner(new GetFile());
runner.setProperty(GetFile.DIRECTORY, directory.getAbsolutePath());
runner.run();
runner.assertAllFlowFilesTransferred(GetFile.REL_SUCCESS, 1);
final List<MockFlowFile> successFiles = runner.getFlowFilesForRelationship(GetFile.REL_SUCCESS);
successFiles.get(0).assertContentEquals("Hello, World!".getBytes("UTF-8"));
final String path = successFiles.get(0).getAttribute("path");
assertEquals("/", path);
final String absolutePath = successFiles.get(0).getAttribute(CoreAttributes.ABSOLUTE_PATH.key());
assertEquals(absTargetPathStr, absolutePath);
}
private void deleteDirectory(final File directory) throws IOException {
if (directory.exists()) {
for (final File file : directory.listFiles()) {
if (file.isDirectory()) {
deleteDirectory(file);
}
assertTrue("Could not delete " + file.getAbsolutePath(), file.delete());
}
}
}
@Test
public void testTodaysFilesPickedUp() throws IOException {
final SimpleDateFormat sdf = new SimpleDateFormat("yyyy/MM/dd");
final String dirStruc = sdf.format(new Date());
final File directory = new File("target/test/data/in/" + dirStruc);
deleteDirectory(directory);
assertTrue("Unable to create test data directory " + directory.getAbsolutePath(), directory.exists() || directory.mkdirs());
final File inFile = new File("src/test/resources/hello.txt");
final Path inPath = inFile.toPath();
final File destFile = new File(directory, inFile.getName());
final Path targetPath = destFile.toPath();
Files.copy(inPath, targetPath);
final TestRunner runner = TestRunners.newTestRunner(new GetFile());
runner.setProperty(GetFile.DIRECTORY, "target/test/data/in/${now():format('yyyy/MM/dd')}");
runner.run();
runner.assertAllFlowFilesTransferred(GetFile.REL_SUCCESS, 1);
final List<MockFlowFile> successFiles = runner.getFlowFilesForRelationship(GetFile.REL_SUCCESS);
successFiles.get(0).assertContentEquals("Hello, World!".getBytes("UTF-8"));
}
@Test
public void testPath() throws IOException {
final SimpleDateFormat sdf = new SimpleDateFormat("yyyy/MM/dd/");
final String dirStruc = sdf.format(new Date());
final File directory = new File("target/test/data/in/" + dirStruc);
deleteDirectory(new File("target/test/data/in"));
assertTrue("Unable to create test data directory " + directory.getAbsolutePath(), directory.exists() || directory.mkdirs());
final File inFile = new File("src/test/resources/hello.txt");
final Path inPath = inFile.toPath();
final File destFile = new File(directory, inFile.getName());
final Path targetPath = destFile.toPath();
final Path absTargetPath = targetPath.toAbsolutePath();
final String absTargetPathStr = absTargetPath.getParent().toString() + "/";
Files.copy(inPath, targetPath);
final TestRunner runner = TestRunners.newTestRunner(new GetFile());
runner.setProperty(GetFile.DIRECTORY, "target/test/data/in");
runner.run();
runner.assertAllFlowFilesTransferred(GetFile.REL_SUCCESS, 1);
final List<MockFlowFile> successFiles = runner.getFlowFilesForRelationship(GetFile.REL_SUCCESS);
successFiles.get(0).assertContentEquals("Hello, World!".getBytes("UTF-8"));
final String path = successFiles.get(0).getAttribute("path");
assertEquals(dirStruc, path.replace('\\', '/'));
final String absolutePath = successFiles.get(0).getAttribute(CoreAttributes.ABSOLUTE_PATH.key());
assertEquals(absTargetPathStr, absolutePath);
}
@Test
public void testAttributes() throws IOException {
final File directory = new File("target/test/data/in/");
deleteDirectory(directory);
assertTrue("Unable to create test data directory " + directory.getAbsolutePath(), directory.exists() || directory.mkdirs());
final File inFile = new File("src/test/resources/hello.txt");
final Path inPath = inFile.toPath();
final File destFile = new File(directory, inFile.getName());
final Path targetPath = destFile.toPath();
Files.copy(inPath, targetPath);
boolean verifyLastModified = false;
try {
destFile.setLastModified(1000000000);
verifyLastModified = true;
} catch (Exception donothing) {
}
boolean verifyPermissions = false;
try {
Files.setPosixFilePermissions(targetPath, PosixFilePermissions.fromString("r--r-----"));
verifyPermissions = true;
} catch (Exception donothing) {
}
final TestRunner runner = TestRunners.newTestRunner(new GetFile());
runner.setProperty(GetFile.DIRECTORY, "target/test/data/in");
runner.run();
runner.assertAllFlowFilesTransferred(GetFile.REL_SUCCESS, 1);
final List<MockFlowFile> successFiles = runner.getFlowFilesForRelationship(GetFile.REL_SUCCESS);
if (verifyLastModified) {
try {
final DateFormat formatter = new SimpleDateFormat(GetFile.FILE_MODIFY_DATE_ATTR_FORMAT);
final Date fileModifyTime = formatter.parse(successFiles.get(0).getAttribute("file.lastModifiedTime"));
assertEquals(new Date(1000000000), fileModifyTime);
} catch (ParseException e) {
fail();
}
}
if (verifyPermissions) {
successFiles.get(0).assertAttributeEquals("file.permissions", "r--r-----");
}
}
}
| apache-2.0 |
langlan/sqldsl | src/main/java/langlan/sql/weaver/d/RealThisSupport.java | 195 | package langlan.sql.weaver.d;
public abstract class RealThisSupport<T extends RealThisSupport<T>> {
protected T realThis() {
@SuppressWarnings("unchecked")
T t = (T) this;
return t;
}
}
| apache-2.0 |
DSttr/SystemUI | SystemUI/src/com/android/systemui/tuner/StatusBarIconBlacklistFragment.java | 2564 | /*
* Copyright (C) 2016 The CyanogenMod Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.systemui.tuner;
import android.annotation.Nullable;
import android.os.Bundle;
import android.preference.Preference;
import android.preference.PreferenceFragment;
import android.preference.PreferenceGroup;
import com.android.systemui.R;
import com.android.systemui.statusbar.phone.StatusBarIconController;
public class StatusBarIconBlacklistFragment extends PreferenceFragment {
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
addPreferencesFromResource(R.xml.tuner_statusbar_icons);
}
@Override
public void onResume() {
super.onResume();
registerPrefs(getPreferenceScreen());
}
@Override
public void onPause() {
super.onPause();
unregisterPrefs(getPreferenceScreen());
}
private void registerPrefs(PreferenceGroup group) {
TunerService tunerService = TunerService.get(getContext());
final int N = group.getPreferenceCount();
for (int i = 0; i < N; i++) {
Preference pref = group.getPreference(i);
if (pref instanceof StatusBarSwitch) {
tunerService.addTunable((TunerService.Tunable) pref, StatusBarIconController.ICON_BLACKLIST);
} else if (pref instanceof PreferenceGroup) {
registerPrefs((PreferenceGroup) pref);
}
}
}
private void unregisterPrefs(PreferenceGroup group) {
TunerService tunerService = TunerService.get(getContext());
final int N = group.getPreferenceCount();
for (int i = 0; i < N; i++) {
Preference pref = group.getPreference(i);
if (pref instanceof TunerService.Tunable) {
tunerService.removeTunable((TunerService.Tunable) pref);
} else if (pref instanceof PreferenceGroup) {
registerPrefs((PreferenceGroup) pref);
}
}
}
}
| apache-2.0 |
cran/rkafkajars | java/javax/mail/internet/UniqueValue.java | 3432 | /*
* The contents of this file are subject to the terms
* of the Common Development and Distribution License
* (the "License"). You may not use this file except
* in compliance with the License.
*
* You can obtain a copy of the license at
* glassfish/bootstrap/legal/CDDLv1.0.txt or
* https://glassfish.dev.java.net/public/CDDLv1.0.html.
* See the License for the specific language governing
* permissions and limitations under the License.
*
* When distributing Covered Code, include this CDDL
* HEADER in each file and include the License file at
* glassfish/bootstrap/legal/CDDLv1.0.txt. If applicable,
* add the following below this CDDL HEADER, with the
* fields enclosed by brackets "[]" replaced with your
* own identifying information: Portions Copyright [yyyy]
* [name of copyright owner]
*/
/*
* @(#)UniqueValue.java 1.8 05/08/29
*
* Copyright 1997-2005 Sun Microsystems, Inc. All Rights Reserved.
*/
package javax.mail.internet;
import java.net.*;
import javax.mail.Session;
/**
* This is a utility class that generates unique values. The generated
* String contains only US-ASCII characters and hence is safe for use
* in RFC822 headers. <p>
*
* This is a package private class.
*
* @author John Mani
* @author Max Spivak
* @author Bill Shannon
*/
class UniqueValue {
/**
* A global part number. Access is not synchronized because the
* value is only one part of the unique value and so doesn't need
* to be accurate.
*/
private static int part = 0;
/**
* Likewise, a global id for Message-ID generation.
*/
private static int id = 0;
/**
* Get a unique value for use in a multipart boundary string.
*
* This implementation generates it by concatenating a global
* part number, a newly created object's <code>hashCode()</code>,
* and the current time (in milliseconds).
*/
public static String getUniqueBoundaryValue() {
StringBuffer s = new StringBuffer();
// Unique string is ----=_Part_<part>_<hashcode>.<currentTime>
s.append("----=_Part_").append(part++).append("_").
append(s.hashCode()).append('.').
append(System.currentTimeMillis());
return s.toString();
}
/**
* Get a unique value for use in a Message-ID.
*
* This implementation generates it by concatenating a newly
* created object's <code>hashCode()</code>, a global ID
* (incremented on every use), the current
* time (in milliseconds), the string "JavaMail", and
* this user's local address generated by
* <code>InternetAddress.getLocalAddress()</code>.
* (The address defaults to "javamailuser@localhost" if
* <code>getLocalAddress()</code> returns null.)
*
* @param ssn Session object used to get the local address
* @see javax.mail.internet.InternetAddress
*/
public static String getUniqueMessageIDValue(Session ssn) {
String suffix = null;
InternetAddress addr = InternetAddress.getLocalAddress(ssn);
if (addr != null)
suffix = addr.getAddress();
else {
suffix = "javamailuser@localhost"; // worst-case default
}
StringBuffer s = new StringBuffer();
// Unique string is <hashcode>.<id>.<currentTime>.JavaMail.<suffix>
s.append(s.hashCode()).append('.').append(id++).
append(System.currentTimeMillis()).append('.').
append("JavaMail.").
append(suffix);
return s.toString();
}
}
| apache-2.0 |
VT-Visionarium/osnap | src/main/java/edu/vt/arc/vis/osnap/layout/simpleComponents/SimpleScaleLayoutComponent.java | 5161 | /*******************************************************************************
* Copyright 2014 Virginia Tech Visionarium
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package edu.vt.arc.vis.osnap.layout.simpleComponents;
import edu.vt.arc.vis.osnap.layout.common.BaseScaleLayoutComponent;
import edu.vt.arc.vis.osnap.visualization.VisualEdge;
import edu.vt.arc.vis.osnap.visualization.VisualHyperEdge;
import edu.vt.arc.vis.osnap.visualization.VisualNode;
import edu.vt.arc.vis.osnap.visualization.VisualProperty;
import java.util.Set;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
import org.jutility.math.geometry.IScale;
import org.jutility.math.geometry.Scale;
import org.jutility.math.geometry.Scalef;
/**
* The <code>SimpleScaleLayoutComponent</code> class provides a basic
* implementation of the
* {@link edu.vt.arc.vis.osnap.layout.common.IScaleLayoutComponent} interface,
* setting the {@link Scalef Scale} of nodes, edges, and hyperedges to (1, 1,
* 1).
*
* @author Peter J. Radics
* @version 0.1
*/
@XmlType(name = "SimpleScaleLayoutComponent")
public class SimpleScaleLayoutComponent
extends BaseScaleLayoutComponent {
@XmlElement(name = "Scale", type = Scale.class)
private IScale<?> scale;
/**
* Returns the name of this <code>ILayoutComponent</code>.
*
* @return the name.
*/
public static String name() {
return "Simple Scale Layout Component";
}
/**
* Returns the description of this <code>ILayoutComponent</code>.
*
* @return the description.
*/
public static String description() {
return "The " + SimpleScaleLayoutComponent.name()
+ "provides a single scale for graph objects(nodes or edges).";
}
/**
* Returns the capabilities (the set of
* {@link edu.vt.arc.vis.osnap.visualization.VisualProperty VisualProperties}
* that can be provided) of this <code>ILayoutComponent</code>.
*
* @return the capabilities.
*/
public static Set<VisualProperty> capabilities() {
return BaseScaleLayoutComponent.capabilities();
}
/**
* Returns the scale.
*
* @return the scale.
*/
public IScale<?> getScale() {
return this.scale;
}
/**
* Sets the scale.
*
* @param scale
* the sacle.
*/
public void setScale(IScale<?> scale) {
this.scale = scale;
}
/**
* Creates a new instance of the <code>SimpleScaleLayoutComponent</code>
* class which sets the scale of graph objects to (1, 1, 1).
*/
public SimpleScaleLayoutComponent() {
this(new Scalef());
}
/**
* Creates a new instance of the <code>SimpleScaleLayoutComponent</code>
* class with the provided {@link Scalef Scale}.
*
* @param scale
* the intended scale for the graph objects.
*/
public SimpleScaleLayoutComponent(final IScale<?> scale) {
super(SimpleScaleLayoutComponent.capabilities(),
SimpleScaleLayoutComponent.name(), SimpleScaleLayoutComponent
.description(), false);
this.scale = scale;
this.setName(this.getName() + " (" + this.scale.toString() + ")");
this.setDescription(this.getDescription() + "\n\tThe scale is set to "
+ this.scale.toString() + ".");
}
/*
* (non-Javadoc)
*
* @see
* edu.vt.arc.vis.osnap.layout.common.ILayoutComponent#layout(edu.vt.arc.vis.osnap
* .visualization.VisualNode)
*/
@Override
public void layout(VisualNode visualNode) {
if (this.isEnabled(VisualProperty.NODE_SCALE)) {
visualNode.setScale(this.scale);
}
}
/*
* (non-Javadoc)
*
* @see
* edu.vt.arc.vis.osnap.layout.common.ILayoutComponent#layout(edu.vt.arc.vis.osnap
* .visualization.VisualEdge)
*/
@Override
public void layout(VisualEdge visualEdge) {
if (this.isEnabled(VisualProperty.EDGE_SCALE)) {
visualEdge.setScale(this.scale);
}
}
/*
* (non-Javadoc)
*
* @see
* edu.vt.arc.vis.osnap.layout.common.ILayoutComponent#layout(edu.vt.arc.vis.osnap
* .visualization.VisualHyperEdge)
*/
@Override
public void layout(VisualHyperEdge visualHyperEdge) {
if (this.isEnabled(VisualProperty.HYPEREDGE_SCALE)) {
visualHyperEdge.setScale(this.scale);
}
}
}
| apache-2.0 |
gouravshenoy/airavata | modules/gfac/gfac-impl/src/main/java/org/apache/airavata/gfac/impl/task/utils/bes/FileTransferBase.java | 5407 | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.gfac.impl.task.utils.bes;
import de.fzj.unicore.uas.client.StorageClient;
import de.fzj.unicore.uas.util.PropertyHelper;
import org.unigrids.services.atomic.types.GridFileType;
import org.unigrids.services.atomic.types.ProtocolType;
import java.io.File;
import java.io.FilenameFilter;
import java.util.*;
import java.util.regex.Pattern;
public class FileTransferBase {
protected Properties extraParameterSource;
protected boolean timing=false;
protected boolean recurse=false;
protected String from;
protected String to;
//index of first byte to download
protected Long startByte;
//index of last byte to download
protected Long endByte;
/**
* the creation mode
*/
protected Mode mode;
/**
* whether the job processing should fail if an error occurs
*/
protected boolean failOnError;
protected List<ProtocolType.Enum> preferredProtocols=new ArrayList<ProtocolType.Enum>();
public FileTransferBase(){
preferredProtocols.add(ProtocolType.BFT);
}
protected Map<String,String>makeExtraParameters(ProtocolType.Enum protocol){
Map<String, String> res;
if(extraParameterSource==null){
res=new HashMap<String, String>();
}
else{
String p=String.valueOf(protocol);
PropertyHelper ph=new PropertyHelper(extraParameterSource, new String[]{p,p.toLowerCase()});
res= ph.getFilteredMap();
}
if(res.size()>0){
// TODO: change it to logger
System.out.println("Have "+res.size()+" extra parameters for protocol "+protocol);
}
return res;
}
public String getTo() {
return to;
}
public String getFrom() {
return from;
}
public void setTo(String to) {
this.to = to;
}
public void setFrom(String from) {
this.from = from;
}
public Mode getMode() {
return mode;
}
public boolean isFailOnError() {
return failOnError;
}
public boolean isTiming() {
return timing;
}
public void setTiming(boolean timing) {
this.timing = timing;
}
public void setFailOnError(boolean failOnError) {
this.failOnError = failOnError;
}
public List<ProtocolType.Enum> getPreferredProtocols() {
return preferredProtocols;
}
public void setPreferredProtocols(List<ProtocolType.Enum> preferredProtocols) {
this.preferredProtocols = preferredProtocols;
}
public void setExtraParameterSource(Properties properties){
this.extraParameterSource=properties;
}
public void setRecurse(boolean recurse) {
this.recurse = recurse;
}
/**
* check if the given path denotes a valid remote directory
* @param remotePath - the path
* @param sms - the storage
* @return <code>true</code> if the remote directory exists and is a directory
*/
protected boolean isValidDirectory(String remotePath, StorageClient sms){
boolean result=false;
if(! ("/".equals(remotePath) || ".".equals(remotePath)) ){
try{
GridFileType gft=sms.listProperties(remotePath);
result=gft.getIsDirectory();
}catch(Exception ex){
result=false;
}
}
else result=true;
return result;
}
public File[] resolveWildCards(File original){
final String name=original.getName();
if(!hasWildCards(original))return new File[]{original};
File parent=original.getParentFile();
if(parent==null)parent=new File(".");
FilenameFilter filter=new FilenameFilter(){
Pattern p=createPattern(name);
public boolean accept(File file, String name){
return p.matcher(name).matches();
}
};
return parent.listFiles(filter);
}
protected boolean hasWildCards(File file){
return hasWildCards(file.getName());
}
public boolean hasWildCards(String name){
return name.contains("*") || name.contains("?");
}
private Pattern createPattern(String nameWithWildcards){
String regex=nameWithWildcards.replace("?",".").replace("*", ".*");
return Pattern.compile(regex);
}
protected ProtocolType.Enum chosenProtocol=null;
public ProtocolType.Enum getChosenProtocol(){
return chosenProtocol;
}
public Long getStartByte() {
return startByte;
}
public void setStartByte(Long startByte) {
this.startByte = startByte;
}
public Long getEndByte() {
return endByte;
}
public void setEndByte(Long endByte) {
this.endByte = endByte;
}
/**
* checks if a byte range is defined
* @return <code>true</code> iff both startByte and endByte are defined
*/
protected boolean isRange(){
return startByte!=null && endByte!=null;
}
/**
* get the number of bytes in the byte range, or "-1" if the range is open-ended
* @return
*/
protected long getRangeSize(){
if(Long.MAX_VALUE==endByte)return -1;
return endByte-startByte;
}
}
| apache-2.0 |
NLeSC/Platinum | ptk-vbrowser-vrs-sftp/src/test/java/nl/esciencecenter/vbrowser/vrs/sftp/TestSftpExists.java | 1453 | package nl.esciencecenter.vbrowser.vrs.sftp;
import nl.esciencecenter.vbrowser.vrs.sftp.jsch.SftpChannel;
import nl.esciencecenter.vbrowser.vrs.sftp.jsch.SftpConfig;
import nl.esciencecenter.vbrowser.vrs.sftp.jsch.SshSession;
import com.jcraft.jsch.JSch;
import com.jcraft.jsch.SftpException;
public class TestSftpExists {
public static void main(String args[]) {
try {
testConnect();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public static void testConnect() throws Exception {
JSch.setLogger(new DebuggingLogger());
SftpConfig config = new SftpConfig();
config.host = "localhost";
config.port = 22;
config.user = "sftptest";
config.passwd = "test1234".toCharArray();
SshSession session = new SshSession(new JSch(), config, false);
session.setUserUI(new UserRobot(config.user, config.passwd, null, true));
session.connect();
SftpChannel channel = session.createSftpChannel();
channel.connect();
channel.exists("/tmp/testdir/testfile");
try {
channel.exists("/tmp");
} catch (SftpException e) {
e.printStackTrace();
}
try {
channel.exists("/tmp/testdir/testfileNOTFOUND");
} catch (SftpException e) {
e.printStackTrace();
}
}
}
| apache-2.0 |
pitchpoint-solutions/sfs | sfs-server/src/main/java/org/sfs/nodes/compute/container/ImportContainer.java | 32065 | /*
* Copyright 2016 The Simple File Server Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sfs.nodes.compute.container;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.protobuf.InvalidProtocolBufferException;
import io.vertx.core.Handler;
import io.vertx.core.MultiMap;
import io.vertx.core.http.HttpServerResponse;
import io.vertx.core.json.JsonObject;
import io.vertx.core.logging.Logger;
import org.sfs.Server;
import org.sfs.SfsRequest;
import org.sfs.SfsVertx;
import org.sfs.VertxContext;
import org.sfs.auth.Authenticate;
import org.sfs.elasticsearch.container.LoadAccountAndContainer;
import org.sfs.elasticsearch.object.LoadObject;
import org.sfs.elasticsearch.object.PersistOrUpdateVersion;
import org.sfs.elasticsearch.object.UpdateObject;
import org.sfs.encryption.Algorithm;
import org.sfs.encryption.AlgorithmDef;
import org.sfs.filesystem.JournalFile;
import org.sfs.io.BufferEndableWriteStream;
import org.sfs.io.InflaterEndableWriteStream;
import org.sfs.io.PipedEndableWriteStream;
import org.sfs.io.PipedReadStream;
import org.sfs.nodes.all.segment.AcknowledgeSegment;
import org.sfs.nodes.compute.object.WriteNewSegment;
import org.sfs.rx.ConnectionCloseTerminus;
import org.sfs.rx.ObservableFuture;
import org.sfs.rx.RxHelper;
import org.sfs.rx.ToVoid;
import org.sfs.util.HttpRequestValidationException;
import org.sfs.validate.ValidateActionAdmin;
import org.sfs.validate.ValidateContainerIsEmpty;
import org.sfs.validate.ValidateContainerPath;
import org.sfs.validate.ValidateHeaderBetweenLong;
import org.sfs.validate.ValidateHeaderExists;
import org.sfs.validate.ValidateHeaderIsBase64Encoded;
import org.sfs.validate.ValidateObjectPath;
import org.sfs.validate.ValidateOptimisticObjectLock;
import org.sfs.validate.ValidatePath;
import org.sfs.vo.ObjectPath;
import org.sfs.vo.PersistentObject;
import org.sfs.vo.TransientObject;
import org.sfs.vo.TransientSegment;
import org.sfs.vo.XObject;
import rx.Observable;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.zip.InflaterOutputStream;
import static com.google.common.base.Charsets.UTF_8;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Predicates.notNull;
import static com.google.common.base.Splitter.on;
import static com.google.common.base.Strings.isNullOrEmpty;
import static com.google.common.collect.FluentIterable.from;
import static com.google.common.io.BaseEncoding.base64;
import static com.google.common.primitives.Longs.tryParse;
import static io.vertx.core.logging.LoggerFactory.getLogger;
import static java.lang.Boolean.TRUE;
import static java.lang.String.format;
import static java.net.HttpURLConnection.HTTP_BAD_REQUEST;
import static java.net.HttpURLConnection.HTTP_INTERNAL_ERROR;
import static java.net.HttpURLConnection.HTTP_OK;
import static java.nio.file.Paths.get;
import static java.util.Calendar.getInstance;
import static org.sfs.encryption.AlgorithmDef.fromNameIfExists;
import static org.sfs.filesystem.containerdump.DumpFileWriter.DUMP_FILE_NAME;
import static org.sfs.protobuf.XVolume.XDumpFile.CompressionType;
import static org.sfs.protobuf.XVolume.XDumpFile.CompressionType.DEFLATE;
import static org.sfs.protobuf.XVolume.XDumpFile.CompressionType.NONE;
import static org.sfs.protobuf.XVolume.XDumpFile.FirstHeader.parseFrom;
import static org.sfs.protobuf.XVolume.XDumpFile.Header;
import static org.sfs.protobuf.XVolume.XDumpFile.Header.Type;
import static org.sfs.protobuf.XVolume.XDumpFile.Header.Type.VERSION_01;
import static org.sfs.protobuf.XVolume.XDumpFile.Version01;
import static org.sfs.rx.Defer.aVoid;
import static org.sfs.rx.Defer.just;
import static org.sfs.rx.RxHelper.combineSinglesDelayError;
import static org.sfs.util.ExceptionHelper.unwrapCause;
import static org.sfs.util.KeepAliveHttpServerResponse.DELIMITER_BUFFER;
import static org.sfs.util.SfsHttpHeaders.X_SFS_IMPORT_SKIP_POSITIONS;
import static org.sfs.util.SfsHttpHeaders.X_SFS_KEEP_ALIVE_TIMEOUT;
import static org.sfs.util.SfsHttpHeaders.X_SFS_SECRET;
import static org.sfs.util.SfsHttpHeaders.X_SFS_SRC_DIRECTORY;
import static org.sfs.vo.ObjectPath.DELIMITER;
import static org.sfs.vo.ObjectPath.fromPaths;
import static org.sfs.vo.ObjectPath.fromSfsRequest;
import static rx.Observable.error;
public class ImportContainer implements Handler<SfsRequest> {
private static final Logger LOGGER = getLogger(ImportContainer.class);
@Override
public void handle(final SfsRequest httpServerRequest) {
VertxContext<Server> vertxContext = httpServerRequest.vertxContext();
aVoid()
.flatMap(new Authenticate(httpServerRequest))
.flatMap(new ValidateActionAdmin(httpServerRequest))
.map(aVoid -> httpServerRequest)
.map(new ValidateHeaderExists(X_SFS_SRC_DIRECTORY))
.map(new ValidateHeaderBetweenLong(X_SFS_KEEP_ALIVE_TIMEOUT, 10000, 300000))
.map(aVoid -> fromSfsRequest(httpServerRequest))
.map(new ValidateContainerPath())
.flatMap(new LoadAccountAndContainer(vertxContext))
.flatMap(new ValidateContainerIsEmpty(vertxContext))
.flatMap(targetPersistentContainer -> {
MultiMap headers = httpServerRequest.headers();
String importDirectory = headers.get(X_SFS_SRC_DIRECTORY);
String unparsedSkipPositions = headers.get(X_SFS_IMPORT_SKIP_POSITIONS);
Set<Long> skipPositions;
if (!isNullOrEmpty(unparsedSkipPositions)) {
skipPositions =
from(on(',').trimResults().split(unparsedSkipPositions))
.transform(input -> tryParse(input))
.filter(notNull())
.toSet();
} else {
skipPositions = new HashSet<>(0);
}
return aVoid()
.flatMap(aVoid -> {
ObservableFuture<Boolean> handler = RxHelper.observableFuture();
vertxContext.vertx().fileSystem().exists(importDirectory, handler.toHandler());
return handler
.map(destDirectoryExists -> {
if (!TRUE.equals(destDirectoryExists)) {
JsonObject jsonObject = new JsonObject()
.put("message", format("%s does not exist", importDirectory));
throw new HttpRequestValidationException(HTTP_BAD_REQUEST, jsonObject);
} else {
return (Void) null;
}
});
})
.flatMap(oVoid -> {
ObservableFuture<List<String>> handler = RxHelper.observableFuture();
vertxContext.vertx().fileSystem().readDir(importDirectory, handler.toHandler());
return handler
.map(listing -> {
if (listing.size() <= 0) {
JsonObject jsonObject = new JsonObject()
.put("message", format("%s is empty", importDirectory));
throw new HttpRequestValidationException(HTTP_BAD_REQUEST, jsonObject);
} else {
return (Void) null;
}
});
})
.flatMap(aVoid -> {
LOGGER.info("Importing into container " + targetPersistentContainer.getId() + " from " + importDirectory);
JournalFile journalFile = new JournalFile(get(importDirectory).resolve(DUMP_FILE_NAME));
return journalFile.open(vertxContext.vertx())
.map(aVoid1 -> journalFile);
})
.flatMap(journalFile -> {
SfsVertx sfsVertx = vertxContext.vertx();
return journalFile.getFirstEntry(sfsVertx)
.map(entryOptional -> {
checkState(entryOptional.isPresent(), "First dump file entry is corrupt");
return entryOptional.get();
})
.flatMap(entry ->
entry.getMetadata(sfsVertx)
.map(buffer -> {
try {
return parseFrom(buffer.getBytes());
} catch (InvalidProtocolBufferException e) {
throw new RuntimeException(e);
}
})
.flatMap(firstHeader -> {
if (firstHeader.getEncrypted()) {
return just(httpServerRequest)
.map(new ValidateHeaderExists(X_SFS_SECRET))
.map(new ValidateHeaderIsBase64Encoded(X_SFS_SECRET))
.map(new ToVoid<>())
.map(aVoid -> {
String cipherName = firstHeader.getCipherName();
checkState(!isNullOrEmpty(cipherName), "Encryption is enabled by cipher name is not specified");
AlgorithmDef algorithmDef = fromNameIfExists(cipherName);
checkState(algorithmDef != null, "Algorithm %s not found", cipherName);
return new ImportStartState(
journalFile,
entry.getNextHeaderPosition(),
algorithmDef,
base64().decode(headers.get(X_SFS_SECRET)));
});
} else {
return just(new ImportStartState(
journalFile,
entry.getNextHeaderPosition(),
null,
null));
}
}));
})
.flatMap(importStartState -> {
JournalFile journalFile = importStartState.getJournalFile();
long startPosition = importStartState.getStartPosition();
boolean encrypted = importStartState.getAlgorithmDef() != null;
byte[] secret = importStartState.getSecret();
AlgorithmDef algorithmDef = importStartState.getAlgorithmDef();
httpServerRequest.startProxyKeepAlive();
SfsVertx sfsVertx = vertxContext.vertx();
return journalFile.scan(sfsVertx, startPosition, entry -> {
// skip over any positions that should be skipped
if (skipPositions.contains(entry.getHeaderPosition())) {
return just(true);
}
Observable<Boolean> oImport = entry.getMetadata(sfsVertx)
.flatMap(buffer -> {
try {
Header header = Header.parseFrom(buffer.getBytes());
Type type = header.getType();
checkState(VERSION_01.equals(type), "Type was %s, expected %s", type, VERSION_01);
byte[] cipherDataSalt = header.getCipherDataSalt() != null ? header.getCipherDataSalt().toByteArray() : null;
byte[] cipherMetadataSalt = header.getCipherMetadataSalt() != null ? header.getCipherMetadataSalt().toByteArray() : null;
CompressionType metadataCompressionType = header.getMetadataCompressionType();
checkState(NONE.equals(metadataCompressionType) || DEFLATE.equals(metadataCompressionType), "Metadata compression type was %s, expected %s", metadataCompressionType, DEFLATE);
CompressionType dataCompressionType = header.getDataCompressionType();
checkState(NONE.equals(dataCompressionType) || DEFLATE.equals(dataCompressionType), "Data compression type was %s, expected %s", dataCompressionType, DEFLATE);
byte[] marshaledExportObject = header.getData().toByteArray();
if (encrypted) {
checkState(cipherMetadataSalt != null && cipherMetadataSalt.length > 0);
Algorithm algorithm = algorithmDef.create(secret, cipherMetadataSalt);
marshaledExportObject = algorithm.decrypt(marshaledExportObject);
}
if (DEFLATE.equals(metadataCompressionType)) {
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
try (InflaterOutputStream inflaterOutputStream = new InflaterOutputStream(byteArrayOutputStream)) {
inflaterOutputStream.write(marshaledExportObject);
} catch (IOException e) {
throw new RuntimeException(e);
}
marshaledExportObject = byteArrayOutputStream.toByteArray();
}
Version01 exportObject = Version01.parseFrom(marshaledExportObject);
ObjectPath originalObjectPath = fromPaths(exportObject.getObjectId());
String originalAccountName = originalObjectPath.accountName().get();
String originalContainerName = originalObjectPath.containerName().get();
String originalObjectName = originalObjectPath.objectName().get();
ObjectPath targetObjectPath = fromPaths(targetPersistentContainer.getId(), originalObjectName);
ValidatePath validatePath = new ValidateObjectPath();
validatePath.call(targetObjectPath);
String targetObjectId = targetObjectPath.objectPath().get();
String targetAccountName = targetObjectPath.accountName().get();
String targetContainerName = targetObjectPath.containerName().get();
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Importing object {}", exportObject.toString());
}
return just(targetObjectId)
.flatMap(new LoadObject(vertxContext, targetPersistentContainer))
.map(oPersistentObject -> {
if (oPersistentObject.isPresent()) {
PersistentObject persistentObject = oPersistentObject.get();
return persistentObject.newVersion().merge(exportObject);
} else {
final TransientObject transientObject = new TransientObject(targetPersistentContainer, targetObjectId)
.setOwnerGuid(exportObject.getOwnerGuid());
return transientObject
.newVersion()
.merge(exportObject);
}
})
.flatMap(transientVersion -> {
long length = transientVersion.getContentLength().get();
if (length > 0 && !transientVersion.isDeleted()) {
return aVoid()
.flatMap(aVoid -> {
PipedReadStream pipedReadStream = new PipedReadStream();
BufferEndableWriteStream bufferStreamConsumer = new PipedEndableWriteStream(pipedReadStream);
if (DEFLATE.equals(dataCompressionType)) {
bufferStreamConsumer = new InflaterEndableWriteStream(bufferStreamConsumer);
}
if (encrypted) {
checkState(cipherDataSalt != null && cipherDataSalt.length > 0);
Algorithm algorithm = algorithmDef.create(secret, cipherDataSalt);
bufferStreamConsumer = algorithm.decrypt(bufferStreamConsumer);
}
Observable<Void> oProducer = entry.produceData(sfsVertx, bufferStreamConsumer);
Observable<TransientSegment> oConsumer =
just(transientVersion)
.flatMap(new WriteNewSegment(vertxContext, pipedReadStream));
return combineSinglesDelayError(oProducer, oConsumer, (aVoid1, transientSegment) -> transientSegment);
})
.map(transientSegment -> transientSegment.getParent())
.doOnNext(xVersion -> {
LOGGER.debug("Finished Segment Write for {}", xVersion.getParent().getId());
});
} else {
return just(transientVersion);
}
})
.doOnNext(transientVersion -> {
Optional<String> oObjectManifest = transientVersion.getObjectManifest();
if (oObjectManifest.isPresent()) {
String objectManifest = oObjectManifest.get();
int indexOfObjectName = objectManifest.indexOf(DELIMITER);
if (indexOfObjectName > 0) {
String containerName = objectManifest.substring(0, indexOfObjectName);
// only adjust the object manifest if the manifest references objects
// in the container that was exported
if (Objects.equals(containerName, originalContainerName)) {
objectManifest = targetContainerName + DELIMITER + objectManifest.substring(indexOfObjectName + 1);
transientVersion.setObjectManifest(objectManifest);
}
}
}
})
.flatMap(new PersistOrUpdateVersion(vertxContext))
.flatMap(transientVersion -> {
long length = transientVersion.getContentLength().get();
if (length > 0 && !transientVersion.getSegments().isEmpty()) {
TransientSegment latestSegment = transientVersion.getNewestSegment().get();
return just(latestSegment)
.flatMap(new AcknowledgeSegment(httpServerRequest.vertxContext()))
.map(modified -> latestSegment.getParent());
} else {
return just(transientVersion);
}
})
.flatMap(transientVersion -> {
final long versionId = transientVersion.getId();
XObject xObject = transientVersion.getParent();
return just((PersistentObject) xObject)
.map(persistentObject -> persistentObject.setUpdateTs(getInstance()))
.flatMap(new UpdateObject(httpServerRequest.vertxContext()))
.map(new ValidateOptimisticObjectLock())
.map(persistentObject -> persistentObject.getVersion(versionId).get());
})
.map(version -> TRUE);
} catch (InvalidProtocolBufferException e) {
throw new RuntimeException(e);
}
})
.onErrorResumeNext(throwable -> error(new IgnorePositionRuntimeException(throwable, entry.getHeaderPosition())));
return RxHelper.onErrorResumeNextExponential(100, 5, () -> oImport);
});
})
.doOnNext(aVoid -> LOGGER.info("Done importing into container " + targetPersistentContainer.getId() + " from " + importDirectory))
.map(new ToVoid<>())
.map(aVoid -> {
JsonObject jsonResponse = new JsonObject();
jsonResponse.put("code", HTTP_OK);
return jsonResponse;
})
.onErrorResumeNext(throwable -> {
LOGGER.info("Failed importing into container " + targetPersistentContainer.getId() + " from " + importDirectory, throwable);
Optional<IgnorePositionRuntimeException> oIgnorePosition = unwrapCause(IgnorePositionRuntimeException.class, throwable);
if (oIgnorePosition.isPresent()) {
IgnorePositionRuntimeException ignorePositionRuntimeException = oIgnorePosition.get();
LOGGER.error("Handling Exception", ignorePositionRuntimeException);
long positionToIgnore = ignorePositionRuntimeException.getPosition();
JsonObject jsonResponse = new JsonObject();
skipPositions.add(positionToIgnore);
String joined = Joiner.on(',').join(skipPositions);
jsonResponse.put("code", HTTP_INTERNAL_ERROR);
jsonResponse.put("message", format("If you would like to ignore this position set the %s header with the value %s", X_SFS_IMPORT_SKIP_POSITIONS, joined));
jsonResponse.put(X_SFS_IMPORT_SKIP_POSITIONS, joined);
return just(jsonResponse);
} else {
return error(throwable);
}
});
})
.single()
.subscribe(new ConnectionCloseTerminus<JsonObject>(httpServerRequest) {
@Override
public void onNext(JsonObject jsonResponse) {
HttpServerResponse httpResponse = httpServerRequest.response();
httpResponse.write(jsonResponse.encode(), UTF_8.toString())
.write(DELIMITER_BUFFER);
}
});
}
private static class IgnorePositionRuntimeException extends RuntimeException {
private final long position;
public IgnorePositionRuntimeException(Throwable cause, long position) {
super(cause);
this.position = position;
}
public long getPosition() {
return position;
}
}
private static class ImportStartState {
private final JournalFile journalFile;
private final long startPosition;
private final AlgorithmDef algorithmDef;
private final byte[] secret;
public ImportStartState(JournalFile journalFile, long startPosition, AlgorithmDef algorithmDef, byte[] secret) {
this.journalFile = journalFile;
this.startPosition = startPosition;
this.algorithmDef = algorithmDef;
this.secret = secret;
}
public JournalFile getJournalFile() {
return journalFile;
}
public long getStartPosition() {
return startPosition;
}
public AlgorithmDef getAlgorithmDef() {
return algorithmDef;
}
public byte[] getSecret() {
return secret;
}
}
}
| apache-2.0 |
watson-developer-cloud/java-sdk | natural-language-understanding/src/test/java/com/ibm/watson/natural_language_understanding/v1/model/SemanticRolesEntityTest.java | 1494 | /*
* (C) Copyright IBM Corp. 2020.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.ibm.watson.natural_language_understanding.v1.model;
import static org.testng.Assert.*;
import com.ibm.cloud.sdk.core.service.model.FileWithMetadata;
import com.ibm.watson.natural_language_understanding.v1.utils.TestUtilities;
import java.io.InputStream;
import java.util.HashMap;
import java.util.List;
import org.testng.annotations.Test;
/** Unit test class for the SemanticRolesEntity model. */
public class SemanticRolesEntityTest {
final HashMap<String, InputStream> mockStreamMap = TestUtilities.createMockStreamMap();
final List<FileWithMetadata> mockListFileWithMetadata =
TestUtilities.creatMockListFileWithMetadata();
@Test
public void testSemanticRolesEntity() throws Throwable {
SemanticRolesEntity semanticRolesEntityModel = new SemanticRolesEntity();
assertNull(semanticRolesEntityModel.getType());
assertNull(semanticRolesEntityModel.getText());
}
}
| apache-2.0 |
googleads/googleads-java-lib | modules/dfp_axis/src/main/java/com/google/api/ads/admanager/axis/v202202/CreativeTemplatePage.java | 9792 | // Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* CreativeTemplatePage.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.admanager.axis.v202202;
/**
* Captures a page of {@link CreativeTemplate} objects.
*/
public class CreativeTemplatePage implements java.io.Serializable , Iterable<com.google.api.ads.admanager.axis.v202202.CreativeTemplate>{
/* The size of the total result set to which this page belongs. */
private java.lang.Integer totalResultSetSize;
/* The absolute index in the total result set on which this page
* begins. */
private java.lang.Integer startIndex;
/* The collection of creative templates contained within this
* page. */
private com.google.api.ads.admanager.axis.v202202.CreativeTemplate[] results;
public CreativeTemplatePage() {
}
public CreativeTemplatePage(
java.lang.Integer totalResultSetSize,
java.lang.Integer startIndex,
com.google.api.ads.admanager.axis.v202202.CreativeTemplate[] results) {
this.totalResultSetSize = totalResultSetSize;
this.startIndex = startIndex;
this.results = results;
}
@Override
public String toString() {
return com.google.common.base.MoreObjects.toStringHelper(this.getClass())
.omitNullValues()
// Only include length of results to avoid overly verbose output
.add("results.length", getResults() == null ? 0 : getResults().length)
.add("startIndex", getStartIndex())
.add("totalResultSetSize", getTotalResultSetSize())
.toString();
}
/**
* Gets the totalResultSetSize value for this CreativeTemplatePage.
*
* @return totalResultSetSize * The size of the total result set to which this page belongs.
*/
public java.lang.Integer getTotalResultSetSize() {
return totalResultSetSize;
}
/**
* Sets the totalResultSetSize value for this CreativeTemplatePage.
*
* @param totalResultSetSize * The size of the total result set to which this page belongs.
*/
public void setTotalResultSetSize(java.lang.Integer totalResultSetSize) {
this.totalResultSetSize = totalResultSetSize;
}
/**
* Gets the startIndex value for this CreativeTemplatePage.
*
* @return startIndex * The absolute index in the total result set on which this page
* begins.
*/
public java.lang.Integer getStartIndex() {
return startIndex;
}
/**
* Sets the startIndex value for this CreativeTemplatePage.
*
* @param startIndex * The absolute index in the total result set on which this page
* begins.
*/
public void setStartIndex(java.lang.Integer startIndex) {
this.startIndex = startIndex;
}
/**
* Gets the results value for this CreativeTemplatePage.
*
* @return results * The collection of creative templates contained within this
* page.
*/
public com.google.api.ads.admanager.axis.v202202.CreativeTemplate[] getResults() {
return results;
}
/**
* Sets the results value for this CreativeTemplatePage.
*
* @param results * The collection of creative templates contained within this
* page.
*/
public void setResults(com.google.api.ads.admanager.axis.v202202.CreativeTemplate[] results) {
this.results = results;
}
public com.google.api.ads.admanager.axis.v202202.CreativeTemplate getResults(int i) {
return this.results[i];
}
public void setResults(int i, com.google.api.ads.admanager.axis.v202202.CreativeTemplate _value) {
this.results[i] = _value;
}
/**
* Returns an iterator over this page's {@code results} that:
* <ul>
* <li>Will not be {@code null}.</li>
* <li>Will not support {@link java.util.Iterator#remove()}.</li>
* </ul>
*
* @return a non-null iterator.
*/
@Override
public java.util.Iterator<com.google.api.ads.admanager.axis.v202202.CreativeTemplate> iterator() {
if (results == null) {
return java.util.Collections.<com.google.api.ads.admanager.axis.v202202.CreativeTemplate>emptyIterator();
}
return java.util.Arrays.<com.google.api.ads.admanager.axis.v202202.CreativeTemplate>asList(results).iterator();
}
private java.lang.Object __equalsCalc = null;
public synchronized boolean equals(java.lang.Object obj) {
if (!(obj instanceof CreativeTemplatePage)) return false;
CreativeTemplatePage other = (CreativeTemplatePage) obj;
if (obj == null) return false;
if (this == obj) return true;
if (__equalsCalc != null) {
return (__equalsCalc == obj);
}
__equalsCalc = obj;
boolean _equals;
_equals = true &&
((this.totalResultSetSize==null && other.getTotalResultSetSize()==null) ||
(this.totalResultSetSize!=null &&
this.totalResultSetSize.equals(other.getTotalResultSetSize()))) &&
((this.startIndex==null && other.getStartIndex()==null) ||
(this.startIndex!=null &&
this.startIndex.equals(other.getStartIndex()))) &&
((this.results==null && other.getResults()==null) ||
(this.results!=null &&
java.util.Arrays.equals(this.results, other.getResults())));
__equalsCalc = null;
return _equals;
}
private boolean __hashCodeCalc = false;
public synchronized int hashCode() {
if (__hashCodeCalc) {
return 0;
}
__hashCodeCalc = true;
int _hashCode = 1;
if (getTotalResultSetSize() != null) {
_hashCode += getTotalResultSetSize().hashCode();
}
if (getStartIndex() != null) {
_hashCode += getStartIndex().hashCode();
}
if (getResults() != null) {
for (int i=0;
i<java.lang.reflect.Array.getLength(getResults());
i++) {
java.lang.Object obj = java.lang.reflect.Array.get(getResults(), i);
if (obj != null &&
!obj.getClass().isArray()) {
_hashCode += obj.hashCode();
}
}
}
__hashCodeCalc = false;
return _hashCode;
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(CreativeTemplatePage.class, true);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202202", "CreativeTemplatePage"));
org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("totalResultSetSize");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202202", "totalResultSetSize"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "int"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("startIndex");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202202", "startIndex"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "int"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("results");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202202", "results"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202202", "CreativeTemplate"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
elemField.setMaxOccursUnbounded(true);
typeDesc.addFieldDesc(elemField);
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
/**
* Get Custom Serializer
*/
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanSerializer(
_javaType, _xmlType, typeDesc);
}
/**
* Get Custom Deserializer
*/
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanDeserializer(
_javaType, _xmlType, typeDesc);
}
}
| apache-2.0 |
GIP-RECIA/cas | support/cas-server-support-surrogate-webflow/src/test/java/org/apereo/cas/web/flow/action/SurrogateAuthorizationActionTests.java | 3852 | package org.apereo.cas.web.flow.action;
import org.apereo.cas.authentication.CoreAuthenticationTestUtils;
import org.apereo.cas.authentication.PrincipalException;
import org.apereo.cas.authentication.surrogate.SurrogateAuthenticationService;
import org.apereo.cas.services.SurrogateRegisteredServiceAccessStrategy;
import org.apereo.cas.util.CollectionUtils;
import org.apereo.cas.web.support.WebUtils;
import lombok.SneakyThrows;
import lombok.val;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.mock.web.MockHttpServletResponse;
import org.springframework.mock.web.MockServletContext;
import org.springframework.webflow.context.servlet.ServletExternalContext;
import org.springframework.webflow.execution.Action;
import org.springframework.webflow.test.MockRequestContext;
import java.util.LinkedHashMap;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.Mockito.*;
/**
* This is {@link SurrogateAuthorizationActionTests}.
*
* @author Misagh Moayyed
* @since 5.3.0
*/
public class SurrogateAuthorizationActionTests extends BaseSurrogateInitialAuthenticationActionTests {
@Autowired
@Qualifier("surrogateAuthorizationCheck")
private Action surrogateAuthorizationCheck;
@Test
public void verifyAuthorized() {
try {
val context = new MockRequestContext();
WebUtils.putServiceIntoFlowScope(context, CoreAuthenticationTestUtils.getWebApplicationService());
WebUtils.putAuthentication(CoreAuthenticationTestUtils.getAuthentication(), context);
val registeredService = CoreAuthenticationTestUtils.getRegisteredService();
val strategy = new SurrogateRegisteredServiceAccessStrategy();
when(registeredService.getAccessStrategy()).thenReturn(strategy);
WebUtils.putRegisteredService(context, registeredService);
val request = new MockHttpServletRequest();
context.setExternalContext(new ServletExternalContext(new MockServletContext(), request, new MockHttpServletResponse()));
assertEquals("success", surrogateAuthorizationCheck.execute(context).getId());
} catch (final Exception e) {
throw new AssertionError(e);
}
}
@Test
@SneakyThrows
public void verifyNotAuthorized() {
val context = new MockRequestContext();
WebUtils.putServiceIntoFlowScope(context, CoreAuthenticationTestUtils.getWebApplicationService());
val attributes = new LinkedHashMap<String, Object>();
attributes.put(SurrogateAuthenticationService.AUTHENTICATION_ATTR_SURROGATE_ENABLED, true);
attributes.putAll(CoreAuthenticationTestUtils.getAttributeRepository().getBackingMap());
val p = CoreAuthenticationTestUtils.getPrincipal("casuser", attributes);
WebUtils.putAuthentication(CoreAuthenticationTestUtils.getAuthentication(p), context);
val registeredService = CoreAuthenticationTestUtils.getRegisteredService();
val strategy = new SurrogateRegisteredServiceAccessStrategy();
strategy.setSurrogateEnabled(true);
strategy.setSurrogateRequiredAttributes(CollectionUtils.wrap("surrogateAttribute", CollectionUtils.wrapSet("someValue")));
when(registeredService.getAccessStrategy()).thenReturn(strategy);
WebUtils.putRegisteredService(context, registeredService);
val request = new MockHttpServletRequest();
context.setExternalContext(new ServletExternalContext(new MockServletContext(), request, new MockHttpServletResponse()));
assertThrows(PrincipalException.class, () -> surrogateAuthorizationCheck.execute(context));
}
}
| apache-2.0 |
nuxleus/closure-compiler | test/com/google/javascript/jscomp/AstValidatorTest.java | 3734 | /*
* Copyright 2011 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.javascript.jscomp.AstValidator.ViolationHandler;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
/**
* @author johnlenz@google.com (John Lenz)
*/
public class AstValidatorTest extends CompilerTestCase {
private boolean lastCheckWasValid = true;
@Override
protected CompilerPass getProcessor(Compiler compiler) {
return createValidator();
}
private AstValidator createValidator() {
lastCheckWasValid = true;
return new AstValidator(new ViolationHandler() {
@Override
public void handleViolation(String message, Node n) {
lastCheckWasValid = false;
}
});
}
@Override
protected int getNumRepetitions() {
return 1;
}
@Override
protected void setUp() throws Exception {
super.enableAstValidation(false);
super.disableNormalize();
super.enableLineNumberCheck(false);
super.setUp();
}
public void testForIn() {
valid("for(var a in b);");
valid("for(var a = 1 in b);");
valid("for(a in b);");
valid("for(a in []);");
valid("for(a in {});");
}
public void testDebugger() {
valid("debugger;");
}
public void testValidScript() {
Node n = new Node(Token.SCRIPT);
expectInvalid(n, Check.SCRIPT);
n.setIsSyntheticBlock(true);
expectInvalid(n, Check.SCRIPT);
n.putProp(Node.SOURCENAME_PROP, "something");
expectValid(n, Check.SCRIPT);
expectInvalid(n, Check.STATEMENT);
expectInvalid(n, Check.EXPRESSION);
}
public void testValidStatement1() {
Node n = new Node(Token.RETURN);
expectInvalid(n, Check.EXPRESSION);
expectValid(n, Check.STATEMENT);
expectInvalid(n, Check.SCRIPT);
}
public void testValidExpression1() {
Node n = new Node(Token.ARRAYLIT, new Node(Token.EMPTY));
expectValid(n, Check.EXPRESSION);
expectInvalid(n, Check.STATEMENT);
expectInvalid(n, Check.SCRIPT);
}
public void testValidExpression2() {
Node n = new Node(Token.NOT, new Node(Token.TRUE));
expectValid(n, Check.EXPRESSION);
expectInvalid(n, Check.STATEMENT);
expectInvalid(n, Check.SCRIPT);
}
public void testInvalidEmptyStatement() {
Node n = new Node(Token.EMPTY, new Node(Token.TRUE));
expectInvalid(n, Check.STATEMENT);
n.detachChildren();
expectValid(n, Check.STATEMENT);
}
private void valid(String code) {
testSame(code);
assertTrue(lastCheckWasValid);
}
private enum Check {
SCRIPT,
STATEMENT,
EXPRESSION
}
private boolean doCheck(Node n, Check level) {
AstValidator validator = createValidator();
switch (level) {
case SCRIPT:
validator.validateScript(n);
break;
case STATEMENT:
validator.validateStatement(n);
break;
case EXPRESSION:
validator.validateExpression(n);
break;
}
return lastCheckWasValid;
}
private void expectInvalid(Node n, Check level) {
assertFalse(doCheck(n, level));
}
private void expectValid(Node n, Check level) {
assertTrue(doCheck(n, level));
}
}
| apache-2.0 |
italycappuccino/work | src/test/java/com/stone/core/work/test/cache/Foo.java | 1109 | package com.stone.core.work.test.cache;
import org.apache.commons.lang.builder.ToStringBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.stereotype.Repository;
import com.stone.core.work.entity.Car;
@Repository(value = "foo")
public class Foo {
private static final Logger logger = LoggerFactory.getLogger(Foo.class);
@Cacheable(value = "default", key = "#car.id+#car.carNo")
public String c(Car car) {
logger.debug("Foo.c()" + ToStringBuilder.reflectionToString(car));
return "c";
}
@Cacheable(value = "default", key = "#car.id+''+#car.carNo")
public String a(Car car) {
logger.debug("Foo.a()" + ToStringBuilder.reflectionToString(car));
return "a";
}
@Cacheable(value = "default", key = "T(com.stone.core.work.util.test.MD5Utils).md5(#car.id)+T(com.stone.core.work.util.test.MD5Utils).md5(#car.carNo)")
public String b(Car car) {
logger.debug("Foo.b()" + ToStringBuilder.reflectionToString(car));
return "b";
}
}
| apache-2.0 |
Chasego/kafka | streams/src/test/java/org/apache/kafka/streams/kstream/internals/SuppressScenarioTest.java | 41883 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.kstream.internals;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.LongDeserializer;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.common.utils.Bytes;
import org.apache.kafka.common.utils.Utils;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.KeyValueTimestamp;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.Topology;
import org.apache.kafka.streams.TopologyTestDriver;
import org.apache.kafka.streams.kstream.Consumed;
import org.apache.kafka.streams.kstream.Grouped;
import org.apache.kafka.streams.kstream.KGroupedStream;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KTable;
import org.apache.kafka.streams.kstream.Materialized;
import org.apache.kafka.streams.kstream.Named;
import org.apache.kafka.streams.kstream.Produced;
import org.apache.kafka.streams.kstream.SessionWindows;
import org.apache.kafka.streams.kstream.SlidingWindows;
import org.apache.kafka.streams.kstream.Suppressed;
import org.apache.kafka.streams.kstream.TimeWindows;
import org.apache.kafka.streams.kstream.Windowed;
import org.apache.kafka.streams.state.KeyValueStore;
import org.apache.kafka.streams.state.SessionStore;
import org.apache.kafka.streams.state.WindowStore;
import org.apache.kafka.streams.TestInputTopic;
import org.apache.kafka.streams.test.TestRecord;
import org.apache.kafka.test.TestUtils;
import org.junit.Test;
import java.time.Duration;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import static java.time.Duration.ZERO;
import static java.time.Duration.ofMillis;
import static java.util.Arrays.asList;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static org.apache.kafka.streams.kstream.Suppressed.BufferConfig.maxBytes;
import static org.apache.kafka.streams.kstream.Suppressed.BufferConfig.maxRecords;
import static org.apache.kafka.streams.kstream.Suppressed.BufferConfig.unbounded;
import static org.apache.kafka.streams.kstream.Suppressed.untilTimeLimit;
import static org.apache.kafka.streams.kstream.Suppressed.untilWindowCloses;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
public class SuppressScenarioTest {
private static final StringDeserializer STRING_DESERIALIZER = new StringDeserializer();
private static final StringSerializer STRING_SERIALIZER = new StringSerializer();
private static final Serde<String> STRING_SERDE = Serdes.String();
private static final LongDeserializer LONG_DESERIALIZER = new LongDeserializer();
private final Properties config = Utils.mkProperties(Utils.mkMap(
Utils.mkEntry(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath())
));
@Test
public void shouldImmediatelyEmitEventsWithZeroEmitAfter() {
final StreamsBuilder builder = new StreamsBuilder();
final KTable<String, Long> valueCounts = builder
.table(
"input",
Consumed.with(STRING_SERDE, STRING_SERDE),
Materialized.<String, String, KeyValueStore<Bytes, byte[]>>with(STRING_SERDE, STRING_SERDE)
.withCachingDisabled()
.withLoggingDisabled()
)
.groupBy((k, v) -> new KeyValue<>(v, k), Grouped.with(STRING_SERDE, STRING_SERDE))
.count();
valueCounts
.suppress(untilTimeLimit(ZERO, unbounded()))
.toStream()
.to("output-suppressed", Produced.with(STRING_SERDE, Serdes.Long()));
valueCounts
.toStream()
.to("output-raw", Produced.with(STRING_SERDE, Serdes.Long()));
final Topology topology = builder.build();
try (final TopologyTestDriver driver = new TopologyTestDriver(topology, config)) {
final TestInputTopic<String, String> inputTopic =
driver.createInputTopic("input", STRING_SERIALIZER, STRING_SERIALIZER);
inputTopic.pipeInput("k1", "v1", 0L);
inputTopic.pipeInput("k1", "v2", 1L);
inputTopic.pipeInput("k2", "v1", 2L);
verify(
drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER),
asList(
new KeyValueTimestamp<>("v1", 1L, 0L),
new KeyValueTimestamp<>("v1", 0L, 1L),
new KeyValueTimestamp<>("v2", 1L, 1L),
new KeyValueTimestamp<>("v1", 1L, 2L)
)
);
verify(
drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER),
asList(
new KeyValueTimestamp<>("v1", 1L, 0L),
new KeyValueTimestamp<>("v1", 0L, 1L),
new KeyValueTimestamp<>("v2", 1L, 1L),
new KeyValueTimestamp<>("v1", 1L, 2L)
)
);
inputTopic.pipeInput("x", "x", 3L);
verify(
drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER),
singletonList(
new KeyValueTimestamp<>("x", 1L, 3L)
)
);
verify(
drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER),
singletonList(
new KeyValueTimestamp<>("x", 1L, 3L)
)
);
inputTopic.pipeInput("x", "y", 4L);
verify(
drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER),
asList(
new KeyValueTimestamp<>("x", 0L, 4L),
new KeyValueTimestamp<>("y", 1L, 4L)
)
);
verify(
drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER),
asList(
new KeyValueTimestamp<>("x", 0L, 4L),
new KeyValueTimestamp<>("y", 1L, 4L)
)
);
}
}
@Test
public void shouldSuppressIntermediateEventsWithTimeLimit() {
final StreamsBuilder builder = new StreamsBuilder();
final KTable<String, Long> valueCounts = builder
.table(
"input",
Consumed.with(STRING_SERDE, STRING_SERDE),
Materialized.<String, String, KeyValueStore<Bytes, byte[]>>with(STRING_SERDE, STRING_SERDE)
.withCachingDisabled()
.withLoggingDisabled()
)
.groupBy((k, v) -> new KeyValue<>(v, k), Grouped.with(STRING_SERDE, STRING_SERDE))
.count();
valueCounts
.suppress(untilTimeLimit(ofMillis(2L), unbounded()))
.toStream()
.to("output-suppressed", Produced.with(STRING_SERDE, Serdes.Long()));
valueCounts
.toStream()
.to("output-raw", Produced.with(STRING_SERDE, Serdes.Long()));
final Topology topology = builder.build();
try (final TopologyTestDriver driver = new TopologyTestDriver(topology, config)) {
final TestInputTopic<String, String> inputTopic =
driver.createInputTopic("input", STRING_SERIALIZER, STRING_SERIALIZER);
inputTopic.pipeInput("k1", "v1", 0L);
inputTopic.pipeInput("k1", "v2", 1L);
inputTopic.pipeInput("k2", "v1", 2L);
verify(
drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER),
asList(
new KeyValueTimestamp<>("v1", 1L, 0L),
new KeyValueTimestamp<>("v1", 0L, 1L),
new KeyValueTimestamp<>("v2", 1L, 1L),
new KeyValueTimestamp<>("v1", 1L, 2L)
)
);
verify(
drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER),
singletonList(new KeyValueTimestamp<>("v1", 1L, 2L))
);
// inserting a dummy "tick" record just to advance stream time
inputTopic.pipeInput("tick", "tick", 3L);
verify(
drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER),
singletonList(new KeyValueTimestamp<>("tick", 1L, 3L))
);
// the stream time is now 3, so it's time to emit this record
verify(
drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER),
singletonList(new KeyValueTimestamp<>("v2", 1L, 1L))
);
inputTopic.pipeInput("tick", "tock", 4L);
verify(
drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER),
asList(
new KeyValueTimestamp<>("tick", 0L, 4L),
new KeyValueTimestamp<>("tock", 1L, 4L)
)
);
// tick is still buffered, since it was first inserted at time 3, and it is only time 4 right now.
verify(
drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER),
emptyList()
);
}
}
@Test
public void shouldSuppressIntermediateEventsWithRecordLimit() {
final StreamsBuilder builder = new StreamsBuilder();
final KTable<String, Long> valueCounts = builder
.table(
"input",
Consumed.with(STRING_SERDE, STRING_SERDE),
Materialized.<String, String, KeyValueStore<Bytes, byte[]>>with(STRING_SERDE, STRING_SERDE)
.withCachingDisabled()
.withLoggingDisabled()
)
.groupBy((k, v) -> new KeyValue<>(v, k), Grouped.with(STRING_SERDE, STRING_SERDE))
.count(Materialized.with(STRING_SERDE, Serdes.Long()));
valueCounts
.suppress(untilTimeLimit(ofMillis(Long.MAX_VALUE), maxRecords(1L).emitEarlyWhenFull()))
.toStream()
.to("output-suppressed", Produced.with(STRING_SERDE, Serdes.Long()));
valueCounts
.toStream()
.to("output-raw", Produced.with(STRING_SERDE, Serdes.Long()));
final Topology topology = builder.build();
System.out.println(topology.describe());
try (final TopologyTestDriver driver = new TopologyTestDriver(topology, config)) {
final TestInputTopic<String, String> inputTopic =
driver.createInputTopic("input", STRING_SERIALIZER, STRING_SERIALIZER);
inputTopic.pipeInput("k1", "v1", 0L);
inputTopic.pipeInput("k1", "v2", 1L);
inputTopic.pipeInput("k2", "v1", 2L);
verify(
drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER),
asList(
new KeyValueTimestamp<>("v1", 1L, 0L),
new KeyValueTimestamp<>("v1", 0L, 1L),
new KeyValueTimestamp<>("v2", 1L, 1L),
new KeyValueTimestamp<>("v1", 1L, 2L)
)
);
verify(
drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER),
asList(
// consecutive updates to v1 get suppressed into only the latter.
new KeyValueTimestamp<>("v1", 0L, 1L),
new KeyValueTimestamp<>("v2", 1L, 1L)
// the last update won't be evicted until another key comes along.
)
);
inputTopic.pipeInput("x", "x", 3L);
verify(
drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER),
singletonList(
new KeyValueTimestamp<>("x", 1L, 3L)
)
);
verify(
drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER),
singletonList(
// now we see that last update to v1, but we won't see the update to x until it gets evicted
new KeyValueTimestamp<>("v1", 1L, 2L)
)
);
}
}
@Test
public void shouldSuppressIntermediateEventsWithBytesLimit() {
final StreamsBuilder builder = new StreamsBuilder();
final KTable<String, Long> valueCounts = builder
.table(
"input",
Consumed.with(STRING_SERDE, STRING_SERDE),
Materialized.<String, String, KeyValueStore<Bytes, byte[]>>with(STRING_SERDE, STRING_SERDE)
.withCachingDisabled()
.withLoggingDisabled()
)
.groupBy((k, v) -> new KeyValue<>(v, k), Grouped.with(STRING_SERDE, STRING_SERDE))
.count();
valueCounts
// this is a bit brittle, but I happen to know that the entries are a little over 100 bytes in size.
.suppress(untilTimeLimit(ofMillis(Long.MAX_VALUE), maxBytes(200L).emitEarlyWhenFull()))
.toStream()
.to("output-suppressed", Produced.with(STRING_SERDE, Serdes.Long()));
valueCounts
.toStream()
.to("output-raw", Produced.with(STRING_SERDE, Serdes.Long()));
final Topology topology = builder.build();
System.out.println(topology.describe());
try (final TopologyTestDriver driver = new TopologyTestDriver(topology, config)) {
final TestInputTopic<String, String> inputTopic =
driver.createInputTopic("input", STRING_SERIALIZER, STRING_SERIALIZER);
inputTopic.pipeInput("k1", "v1", 0L);
inputTopic.pipeInput("k1", "v2", 1L);
inputTopic.pipeInput("k2", "v1", 2L);
verify(
drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER),
asList(
new KeyValueTimestamp<>("v1", 1L, 0L),
new KeyValueTimestamp<>("v1", 0L, 1L),
new KeyValueTimestamp<>("v2", 1L, 1L),
new KeyValueTimestamp<>("v1", 1L, 2L)
)
);
verify(
drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER),
asList(
// consecutive updates to v1 get suppressed into only the latter.
new KeyValueTimestamp<>("v1", 0L, 1L),
new KeyValueTimestamp<>("v2", 1L, 1L)
// the last update won't be evicted until another key comes along.
)
);
inputTopic.pipeInput("x", "x", 3L);
verify(
drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER),
singletonList(
new KeyValueTimestamp<>("x", 1L, 3L)
)
);
verify(
drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER),
singletonList(
// now we see that last update to v1, but we won't see the update to x until it gets evicted
new KeyValueTimestamp<>("v1", 1L, 2L)
)
);
}
}
@Test
public void shouldSupportFinalResultsForTimeWindows() {
final StreamsBuilder builder = new StreamsBuilder();
final KTable<Windowed<String>, Long> valueCounts = builder
.stream("input", Consumed.with(STRING_SERDE, STRING_SERDE))
.groupBy((String k, String v) -> k, Grouped.with(STRING_SERDE, STRING_SERDE))
.windowedBy(TimeWindows.of(ofMillis(2L)).grace(ofMillis(1L)))
.count(Materialized.<String, Long, WindowStore<Bytes, byte[]>>as("counts").withCachingDisabled());
valueCounts
.suppress(untilWindowCloses(unbounded()))
.toStream()
.map((final Windowed<String> k, final Long v) -> new KeyValue<>(k.toString(), v))
.to("output-suppressed", Produced.with(STRING_SERDE, Serdes.Long()));
valueCounts
.toStream()
.map((final Windowed<String> k, final Long v) -> new KeyValue<>(k.toString(), v))
.to("output-raw", Produced.with(STRING_SERDE, Serdes.Long()));
final Topology topology = builder.build();
System.out.println(topology.describe());
try (final TopologyTestDriver driver = new TopologyTestDriver(topology, config)) {
final TestInputTopic<String, String> inputTopic =
driver.createInputTopic("input", STRING_SERIALIZER, STRING_SERIALIZER);
inputTopic.pipeInput("k1", "v1", 0L);
inputTopic.pipeInput("k1", "v1", 1L);
inputTopic.pipeInput("k1", "v1", 2L);
inputTopic.pipeInput("k1", "v1", 1L);
inputTopic.pipeInput("k1", "v1", 0L);
inputTopic.pipeInput("k1", "v1", 5L);
// note this last record gets dropped because it is out of the grace period
inputTopic.pipeInput("k1", "v1", 0L);
verify(
drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER),
asList(
new KeyValueTimestamp<>("[k1@0/2]", 1L, 0L),
new KeyValueTimestamp<>("[k1@0/2]", 2L, 1L),
new KeyValueTimestamp<>("[k1@2/4]", 1L, 2L),
new KeyValueTimestamp<>("[k1@0/2]", 3L, 1L),
new KeyValueTimestamp<>("[k1@0/2]", 4L, 1L),
new KeyValueTimestamp<>("[k1@4/6]", 1L, 5L)
)
);
verify(
drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER),
asList(
new KeyValueTimestamp<>("[k1@0/2]", 4L, 1L),
new KeyValueTimestamp<>("[k1@2/4]", 1L, 2L)
)
);
}
}
@Test
public void shouldSupportFinalResultsForTimeWindowsWithLargeJump() {
final StreamsBuilder builder = new StreamsBuilder();
final KTable<Windowed<String>, Long> valueCounts = builder
.stream("input", Consumed.with(STRING_SERDE, STRING_SERDE))
.groupBy((String k, String v) -> k, Grouped.with(STRING_SERDE, STRING_SERDE))
.windowedBy(TimeWindows.of(ofMillis(2L)).grace(ofMillis(2L)))
.count(Materialized.<String, Long, WindowStore<Bytes, byte[]>>as("counts").withCachingDisabled().withKeySerde(STRING_SERDE));
valueCounts
.suppress(untilWindowCloses(unbounded()))
.toStream()
.map((final Windowed<String> k, final Long v) -> new KeyValue<>(k.toString(), v))
.to("output-suppressed", Produced.with(STRING_SERDE, Serdes.Long()));
valueCounts
.toStream()
.map((final Windowed<String> k, final Long v) -> new KeyValue<>(k.toString(), v))
.to("output-raw", Produced.with(STRING_SERDE, Serdes.Long()));
final Topology topology = builder.build();
System.out.println(topology.describe());
try (final TopologyTestDriver driver = new TopologyTestDriver(topology, config)) {
final TestInputTopic<String, String> inputTopic =
driver.createInputTopic("input", STRING_SERIALIZER, STRING_SERIALIZER);
inputTopic.pipeInput("k1", "v1", 0L);
inputTopic.pipeInput("k1", "v1", 1L);
inputTopic.pipeInput("k1", "v1", 2L);
inputTopic.pipeInput("k1", "v1", 0L);
inputTopic.pipeInput("k1", "v1", 3L);
inputTopic.pipeInput("k1", "v1", 0L);
inputTopic.pipeInput("k1", "v1", 4L);
// this update should get dropped, since the previous event advanced the stream time and closed the window.
inputTopic.pipeInput("k1", "v1", 0L);
inputTopic.pipeInput("k1", "v1", 30L);
verify(
drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER),
asList(
new KeyValueTimestamp<>("[k1@0/2]", 1L, 0L),
new KeyValueTimestamp<>("[k1@0/2]", 2L, 1L),
new KeyValueTimestamp<>("[k1@2/4]", 1L, 2L),
new KeyValueTimestamp<>("[k1@0/2]", 3L, 1L),
new KeyValueTimestamp<>("[k1@2/4]", 2L, 3L),
new KeyValueTimestamp<>("[k1@0/2]", 4L, 1L),
new KeyValueTimestamp<>("[k1@4/6]", 1L, 4L),
new KeyValueTimestamp<>("[k1@30/32]", 1L, 30L)
)
);
verify(
drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER),
asList(
new KeyValueTimestamp<>("[k1@0/2]", 4L, 1L),
new KeyValueTimestamp<>("[k1@2/4]", 2L, 3L),
new KeyValueTimestamp<>("[k1@4/6]", 1L, 4L)
)
);
}
}
@Test
public void shouldSupportFinalResultsForSlidingWindows() {
final StreamsBuilder builder = new StreamsBuilder();
final KTable<Windowed<String>, Long> valueCounts = builder
.stream("input", Consumed.with(STRING_SERDE, STRING_SERDE))
.groupBy((String k, String v) -> k, Grouped.with(STRING_SERDE, STRING_SERDE))
.windowedBy(SlidingWindows.withTimeDifferenceAndGrace(ofMillis(5L), ofMillis(15L)))
.count(Materialized.<String, Long, WindowStore<Bytes, byte[]>>as("counts").withCachingDisabled().withKeySerde(STRING_SERDE));
valueCounts
.suppress(untilWindowCloses(unbounded()))
.toStream()
.map((final Windowed<String> k, final Long v) -> new KeyValue<>(k.toString(), v))
.to("output-suppressed", Produced.with(STRING_SERDE, Serdes.Long()));
valueCounts
.toStream()
.map((final Windowed<String> k, final Long v) -> new KeyValue<>(k.toString(), v))
.to("output-raw", Produced.with(STRING_SERDE, Serdes.Long()));
final Topology topology = builder.build();
System.out.println(topology.describe());
try (final TopologyTestDriver driver = new TopologyTestDriver(topology, config)) {
final TestInputTopic<String, String> inputTopic =
driver.createInputTopic("input", STRING_SERIALIZER, STRING_SERIALIZER);
inputTopic.pipeInput("k1", "v1", 10L);
inputTopic.pipeInput("k1", "v1", 11L);
inputTopic.pipeInput("k1", "v1", 10L);
inputTopic.pipeInput("k1", "v1", 13L);
inputTopic.pipeInput("k1", "v1", 10L);
inputTopic.pipeInput("k1", "v1", 24L);
// this update should get dropped, since the previous event advanced the stream time and closed the window.
inputTopic.pipeInput("k1", "v1", 5L);
inputTopic.pipeInput("k1", "v1", 7L);
// final record to advance stream time and flush windows
inputTopic.pipeInput("k1", "v1", 90L);
final Comparator<TestRecord<String, Long>> comparator =
Comparator.comparing((TestRecord<String, Long> o) -> o.getKey())
.thenComparing((TestRecord<String, Long> o) -> o.timestamp());
final List<TestRecord<String, Long>> actual = drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER);
actual.sort(comparator);
verify(
actual,
asList(
// right window for k1@10 created when k1@11 is processed
new KeyValueTimestamp<>("[k1@11/16]", 1L, 11L),
// right window for k1@10 updated when k1@13 is processed
new KeyValueTimestamp<>("[k1@11/16]", 2L, 13L),
// right window for k1@11 created when k1@13 is processed
new KeyValueTimestamp<>("[k1@12/17]", 1L, 13L),
// left window for k1@24 created when k1@24 is processed
new KeyValueTimestamp<>("[k1@19/24]", 1L, 24L),
// left window for k1@10 created when k1@10 is processed
new KeyValueTimestamp<>("[k1@5/10]", 1L, 10L),
// left window for k1@10 updated when k1@10 is processed
new KeyValueTimestamp<>("[k1@5/10]", 2L, 10L),
// left window for k1@10 updated when k1@10 is processed
new KeyValueTimestamp<>("[k1@5/10]", 3L, 10L),
// left window for k1@10 updated when k1@5 is processed
new KeyValueTimestamp<>("[k1@5/10]", 4L, 10L),
// left window for k1@10 updated when k1@7 is processed
new KeyValueTimestamp<>("[k1@5/10]", 5L, 10L),
// left window for k1@11 created when k1@11 is processed
new KeyValueTimestamp<>("[k1@6/11]", 2L, 11L),
// left window for k1@11 updated when k1@10 is processed
new KeyValueTimestamp<>("[k1@6/11]", 3L, 11L),
// left window for k1@11 updated when k1@10 is processed
new KeyValueTimestamp<>("[k1@6/11]", 4L, 11L),
// left window for k1@11 updated when k1@7 is processed
new KeyValueTimestamp<>("[k1@6/11]", 5L, 11L),
// left window for k1@13 created when k1@13 is processed
new KeyValueTimestamp<>("[k1@8/13]", 4L, 13L),
// left window for k1@13 updated when k1@10 is processed
new KeyValueTimestamp<>("[k1@8/13]", 5L, 13L),
// right window for k1@90 created when k1@90 is processed
new KeyValueTimestamp<>("[k1@85/90]", 1L, 90L)
)
);
verify(
drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER),
asList(
new KeyValueTimestamp<>("[k1@5/10]", 5L, 10L),
new KeyValueTimestamp<>("[k1@6/11]", 5L, 11L),
new KeyValueTimestamp<>("[k1@8/13]", 5L, 13L),
new KeyValueTimestamp<>("[k1@11/16]", 2L, 13L),
new KeyValueTimestamp<>("[k1@12/17]", 1L, 13L),
new KeyValueTimestamp<>("[k1@19/24]", 1L, 24L)
)
);
}
}
@Test
public void shouldSupportFinalResultsForSessionWindows() {
final StreamsBuilder builder = new StreamsBuilder();
final KTable<Windowed<String>, Long> valueCounts = builder
.stream("input", Consumed.with(STRING_SERDE, STRING_SERDE))
.groupBy((String k, String v) -> k, Grouped.with(STRING_SERDE, STRING_SERDE))
.windowedBy(SessionWindows.with(ofMillis(5L)).grace(ofMillis(0L)))
.count(Materialized.<String, Long, SessionStore<Bytes, byte[]>>as("counts").withCachingDisabled());
valueCounts
.suppress(untilWindowCloses(unbounded()))
.toStream()
.map((final Windowed<String> k, final Long v) -> new KeyValue<>(k.toString(), v))
.to("output-suppressed", Produced.with(STRING_SERDE, Serdes.Long()));
valueCounts
.toStream()
.map((final Windowed<String> k, final Long v) -> new KeyValue<>(k.toString(), v))
.to("output-raw", Produced.with(STRING_SERDE, Serdes.Long()));
final Topology topology = builder.build();
System.out.println(topology.describe());
try (final TopologyTestDriver driver = new TopologyTestDriver(topology, config)) {
final TestInputTopic<String, String> inputTopic =
driver.createInputTopic("input", STRING_SERIALIZER, STRING_SERIALIZER);
// first window
inputTopic.pipeInput("k1", "v1", 0L);
inputTopic.pipeInput("k1", "v1", 5L);
// arbitrarily disordered records are admitted, because the *window* is not closed until stream-time > window-end + grace
inputTopic.pipeInput("k1", "v1", 1L);
// any record in the same partition advances stream time (note the key is different)
inputTopic.pipeInput("k2", "v1", 6L);
// late event for first window - this should get dropped from all streams, since the first window is now closed.
inputTopic.pipeInput("k1", "v1", 5L);
// just pushing stream time forward to flush the other events through.
inputTopic.pipeInput("k1", "v1", 30L);
verify(
drainProducerRecords(driver, "output-raw", STRING_DESERIALIZER, LONG_DESERIALIZER),
asList(
new KeyValueTimestamp<>("[k1@0/0]", 1L, 0L),
new KeyValueTimestamp<>("[k1@0/0]", null, 0L),
new KeyValueTimestamp<>("[k1@0/5]", 2L, 5L),
new KeyValueTimestamp<>("[k1@0/5]", null, 5L),
new KeyValueTimestamp<>("[k1@0/5]", 3L, 5L),
new KeyValueTimestamp<>("[k2@6/6]", 1L, 6L),
new KeyValueTimestamp<>("[k1@30/30]", 1L, 30L)
)
);
verify(
drainProducerRecords(driver, "output-suppressed", STRING_DESERIALIZER, LONG_DESERIALIZER),
asList(
new KeyValueTimestamp<>("[k1@0/5]", 3L, 5L),
new KeyValueTimestamp<>("[k2@6/6]", 1L, 6L)
)
);
}
}
@Test
public void shouldWorkBeforeGroupBy() {
final StreamsBuilder builder = new StreamsBuilder();
builder
.table("topic", Consumed.with(Serdes.String(), Serdes.String()))
.suppress(untilTimeLimit(ofMillis(10), unbounded()))
.groupBy(KeyValue::pair, Grouped.with(Serdes.String(), Serdes.String()))
.count()
.toStream()
.to("output", Produced.with(Serdes.String(), Serdes.Long()));
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), config)) {
final TestInputTopic<String, String> inputTopic =
driver.createInputTopic("topic", STRING_SERIALIZER, STRING_SERIALIZER);
inputTopic.pipeInput("A", "a", 0L);
inputTopic.pipeInput("tick", "tick", 10L);
verify(
drainProducerRecords(driver, "output", STRING_DESERIALIZER, LONG_DESERIALIZER),
singletonList(new KeyValueTimestamp<>("A", 1L, 0L))
);
}
}
@Test
public void shouldWorkBeforeJoinRight() {
final StreamsBuilder builder = new StreamsBuilder();
final KTable<String, String> left = builder
.table("left", Consumed.with(Serdes.String(), Serdes.String()));
final KTable<String, String> right = builder
.table("right", Consumed.with(Serdes.String(), Serdes.String()))
.suppress(untilTimeLimit(ofMillis(10), unbounded()));
left
.outerJoin(right, (l, r) -> String.format("(%s,%s)", l, r))
.toStream()
.to("output", Produced.with(Serdes.String(), Serdes.String()));
try (final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), config)) {
final TestInputTopic<String, String> inputTopicRight =
driver.createInputTopic("right", STRING_SERIALIZER, STRING_SERIALIZER);
final TestInputTopic<String, String> inputTopicLeft =
driver.createInputTopic("left", STRING_SERIALIZER, STRING_SERIALIZER);
inputTopicRight.pipeInput("B", "1", 0L);
inputTopicRight.pipeInput("A", "1", 0L);
// buffered, no output
verify(
drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER),
emptyList()
);
inputTopicRight.pipeInput("tick", "tick", 10L);
// flush buffer
verify(
drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER),
asList(
new KeyValueTimestamp<>("A", "(null,1)", 0L),
new KeyValueTimestamp<>("B", "(null,1)", 0L)
)
);
inputTopicRight.pipeInput("A", "2", 11L);
// buffered, no output
verify(
drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER),
emptyList()
);
inputTopicLeft.pipeInput("A", "a", 12L);
// should join with previously emitted right side
verify(
drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER),
singletonList(new KeyValueTimestamp<>("A", "(a,1)", 12L))
);
inputTopicLeft.pipeInput("B", "b", 12L);
// should view through to the parent KTable, since B is no longer buffered
verify(
drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER),
singletonList(new KeyValueTimestamp<>("B", "(b,1)", 12L))
);
inputTopicLeft.pipeInput("A", "b", 13L);
// should join with previously emitted right side
verify(
drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER),
singletonList(new KeyValueTimestamp<>("A", "(b,1)", 13L))
);
inputTopicRight.pipeInput("tick", "tick1", 21L);
verify(
drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER),
asList(
new KeyValueTimestamp<>("tick", "(null,tick1)", 21), // just a testing artifact
new KeyValueTimestamp<>("A", "(b,2)", 13L)
)
);
}
}
@Test
public void shouldWorkBeforeJoinLeft() {
final StreamsBuilder builder = new StreamsBuilder();
final KTable<String, String> left = builder
.table("left", Consumed.with(Serdes.String(), Serdes.String()))
.suppress(untilTimeLimit(ofMillis(10), unbounded()));
final KTable<String, String> right = builder
.table("right", Consumed.with(Serdes.String(), Serdes.String()));
left
.outerJoin(right, (l, r) -> String.format("(%s,%s)", l, r))
.toStream()
.to("output", Produced.with(Serdes.String(), Serdes.String()));
final Topology topology = builder.build();
try (final TopologyTestDriver driver = new TopologyTestDriver(topology, config)) {
final TestInputTopic<String, String> inputTopicRight =
driver.createInputTopic("right", STRING_SERIALIZER, STRING_SERIALIZER);
final TestInputTopic<String, String> inputTopicLeft =
driver.createInputTopic("left", STRING_SERIALIZER, STRING_SERIALIZER);
inputTopicLeft.pipeInput("B", "1", 0L);
inputTopicLeft.pipeInput("A", "1", 0L);
// buffered, no output
verify(
drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER),
emptyList()
);
inputTopicLeft.pipeInput("tick", "tick", 10L);
// flush buffer
verify(
drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER),
asList(
new KeyValueTimestamp<>("A", "(1,null)", 0L),
new KeyValueTimestamp<>("B", "(1,null)", 0L)
)
);
inputTopicLeft.pipeInput("A", "2", 11L);
// buffered, no output
verify(
drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER),
emptyList()
);
inputTopicRight.pipeInput("A", "a", 12L);
// should join with previously emitted left side
verify(
drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER),
singletonList(new KeyValueTimestamp<>("A", "(1,a)", 12L))
);
inputTopicRight.pipeInput("B", "b", 12L);
// should view through to the parent KTable, since B is no longer buffered
verify(
drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER),
singletonList(new KeyValueTimestamp<>("B", "(1,b)", 12L))
);
inputTopicRight.pipeInput("A", "b", 13L);
// should join with previously emitted left side
verify(
drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER),
singletonList(new KeyValueTimestamp<>("A", "(1,b)", 13L))
);
inputTopicLeft.pipeInput("tick", "tick1", 21L);
verify(
drainProducerRecords(driver, "output", STRING_DESERIALIZER, STRING_DESERIALIZER),
asList(
new KeyValueTimestamp<>("tick", "(tick1,null)", 21), // just a testing artifact
new KeyValueTimestamp<>("A", "(2,b)", 13L)
)
);
}
}
@Test
public void shouldWorkWithCogrouped() {
final StreamsBuilder builder = new StreamsBuilder();
final KGroupedStream<String, String> stream1 = builder.stream("one", Consumed.with(Serdes.String(), Serdes.String())).groupByKey(Grouped.with(Serdes.String(), Serdes.String()));
final KGroupedStream<String, String> stream2 = builder.stream("two", Consumed.with(Serdes.String(), Serdes.String())).groupByKey(Grouped.with(Serdes.String(), Serdes.String()));
final KStream<Windowed<String>, Object> cogrouped = stream1.cogroup((key, value, aggregate) -> aggregate + value).cogroup(stream2, (key, value, aggregate) -> aggregate + value)
.windowedBy(TimeWindows.of(Duration.ofMinutes(15)))
.aggregate(() -> "", Named.as("test"), Materialized.as("store"))
.suppress(Suppressed.untilWindowCloses(unbounded()))
.toStream();
}
private static <K, V> void verify(final List<TestRecord<K, V>> results,
final List<KeyValueTimestamp<K, V>> expectedResults) {
if (results.size() != expectedResults.size()) {
throw new AssertionError(printRecords(results) + " != " + expectedResults);
}
final Iterator<KeyValueTimestamp<K, V>> expectedIterator = expectedResults.iterator();
for (final TestRecord<K, V> result : results) {
final KeyValueTimestamp<K, V> expected = expectedIterator.next();
try {
assertThat(result, equalTo(new TestRecord<>(expected.key(), expected.value(), null, expected.timestamp())));
} catch (final AssertionError e) {
throw new AssertionError(printRecords(results) + " != " + expectedResults, e);
}
}
}
private static <K, V> List<TestRecord<K, V>> drainProducerRecords(final TopologyTestDriver driver,
final String topic,
final Deserializer<K> keyDeserializer,
final Deserializer<V> valueDeserializer) {
return driver.createOutputTopic(topic, keyDeserializer, valueDeserializer).readRecordsToList();
}
private static <K, V> String printRecords(final List<TestRecord<K, V>> result) {
final StringBuilder resultStr = new StringBuilder();
resultStr.append("[\n");
for (final TestRecord<?, ?> record : result) {
resultStr.append(" ").append(record).append("\n");
}
resultStr.append("]");
return resultStr.toString();
}
}
| apache-2.0 |
kbabioch/arx | src/gui/org/deidentifier/arx/gui/view/impl/wizard/HierarchyWizardModelIntervals.java | 4117 | /*
* ARX: Powerful Data Anonymization
* Copyright 2012 - 2017 Fabian Prasser, Florian Kohlmayer and contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.deidentifier.arx.gui.view.impl.wizard;
import java.util.List;
import org.deidentifier.arx.DataType;
import org.deidentifier.arx.aggregates.HierarchyBuilder;
import org.deidentifier.arx.aggregates.HierarchyBuilderIntervalBased;
import org.deidentifier.arx.aggregates.HierarchyBuilderIntervalBased.Range;
/**
* A model for interval-based builders.
*
* @author Fabian Prasser
* @param <T>
*/
public class HierarchyWizardModelIntervals<T> extends HierarchyWizardModelGrouping<T>{
/**
* Constructor to create an initial definition.
*
* @param dataType
* @param data
*/
public HierarchyWizardModelIntervals(final DataType<T> dataType, String[] data) {
super(data, dataType, true);
this.update();
}
@Override
public HierarchyBuilderIntervalBased<T> getBuilder(boolean serializable) throws Exception{
HierarchyBuilderIntervalBased<T> builder = HierarchyBuilderIntervalBased.create(super.getDataType(),
new Range<T>(super.getLowerRange().repeat,
super.getLowerRange().snap,
super.getLowerRange().label),
new Range<T>(super.getUpperRange().repeat,
super.getUpperRange().snap,
super.getUpperRange().label));
builder.setAggregateFunction(this.getDefaultFunction());
for (HierarchyWizardGroupingInterval<T> interval : super.getIntervals()) {
builder.addInterval(interval.min, interval.max, interval.function);
}
int level = 0;
for (List<HierarchyWizardGroupingGroup<T>> list : super.getModelGroups()) {
for (HierarchyWizardGroupingGroup<T> group : list){
builder.getLevel(level).addGroup(group.size, group.function);
}
level++;
}
return builder;
}
@Override
public void parse(HierarchyBuilder<T> builder) throws IllegalArgumentException {
if (!(builder instanceof HierarchyBuilderIntervalBased)) {
return;
}
super.parse((HierarchyBuilderIntervalBased<T>)builder);
}
@Override
protected void build() {
super.hierarchy = null;
super.error = null;
super.groupsizes = null;
if (data==null) return;
HierarchyBuilderIntervalBased<T> builder = null;
try {
builder = getBuilder(false);
} catch (Exception e){
super.error = e.getMessage();
return;
}
String error = builder.isValid();
if (error != null) {
super.error = error;
return;
}
try {
super.groupsizes = builder.prepare(data);
} catch(Exception e){
super.error = e.getMessage();
return;
}
try {
super.hierarchy = builder.build();
} catch(Exception e){
super.error = e.getMessage();
return;
}
}
}
| apache-2.0 |
muxiaolin/cube-sdk-dev | core/src/in/srain/cube/concurrent/SimpleTask.java | 3270 | package in.srain.cube.concurrent;
import android.os.Handler;
import android.os.Message;
import in.srain.cube.util.CLog;
import java.util.concurrent.atomic.AtomicInteger;
/**
* A class which encapsulate a task that can execute in background thread and can be cancelled.
* memory require:
* <p/>
* Shadow heap size of AtomicInteger: 12 + 4 = 16 bytes;
* Shadow heap size of SimpleTask: 12 + 4 + 4 = 20. After aligned: 24 bytes;
* Retained heap size of SimpleTask: 16 + 24 = 40 bytes.
*
* @author http://www.liaohuqiu.net
*/
public abstract class SimpleTask implements Runnable {
private static final int STATE_NEW = 0x01;
private static final int STATE_RUNNING = 0x02;
private static final int STATE_FINISH = 0x04;
private static final int STATE_CANCELLED = 0x08;
private static final int MSG_TASK_DONE = 0x01;
private static InternalHandler sHandler = new InternalHandler();
private Thread mCurrentThread;
private AtomicInteger mState = new AtomicInteger(STATE_NEW);
/**
* A worker will execute this method in a background thread
*/
public abstract void doInBackground();
/**
* will be called after doInBackground();
*/
public abstract void onFinish(boolean canceled);
/**
* When the Task is Cancelled.
*/
protected void onCancel() {
}
/**
* Restart the task, just set the state to {@link #STATE_NEW}
*/
public void restart() {
mState.set(STATE_NEW);
}
@Override
public void run() {
if (!mState.compareAndSet(STATE_NEW, STATE_RUNNING)) {
return;
}
mCurrentThread = Thread.currentThread();
doInBackground();
sHandler.obtainMessage(MSG_TASK_DONE, this).sendToTarget();
}
/**
* check whether this work is canceled.
*/
public boolean isCancelled() {
return mState.get() == STATE_CANCELLED;
}
/**
* check whether this work has done
*
* @return
*/
@SuppressWarnings({"unused"})
public boolean isDone() {
return mState.get() == STATE_FINISH;
}
public void cancel() {
CLog.d("cube-simple-task", "cancel: %s", this);
if (mState.get() >= STATE_FINISH) {
return;
} else {
if (mState.get() == STATE_RUNNING && null != mCurrentThread) {
try {
mCurrentThread.interrupt();
CLog.d("cube-simple-task", "cancel: %s", this);
} catch (Exception e) {
e.printStackTrace();
}
}
mState.set(STATE_CANCELLED);
onCancel();
}
}
private static class InternalHandler extends Handler {
@Override
public void handleMessage(Message msg) {
SimpleTask work = (SimpleTask) msg.obj;
switch (msg.what) {
case MSG_TASK_DONE:
work.mState.set(STATE_FINISH);
work.onFinish(work.isCancelled());
break;
default:
break;
}
}
}
public static void postDelay(Runnable r, long delayMillis) {
sHandler.postDelayed(r, delayMillis);
}
} | apache-2.0 |
VHAINNOVATIONS/Telepathology | Source/Java/CoreRouter/main/src/java/gov/va/med/imaging/core/router/storage/StorageDataSourceRouter.java | 8534 | /**
*
Package: MAG - VistA Imaging
WARNING: Per VHA Directive 2004-038, this routine should not be modified.
Date Created: Apr, 2010
Site Name: Washington OI Field Office, Silver Spring, MD
Developer: vhaiswlouthj
Description: DICOM Study cache manager. Maintains the cache of study instances
and expires old studies after 15 minutes.
;; +--------------------------------------------------------------------+
;; Property of the US Government.
;; No permission to copy or redistribute this software is given.
;; Use of unreleased versions of this software requires the user
;; to execute a written test agreement with the VistA Imaging
;; Development Office of the Department of Veterans Affairs,
;; telephone (301) 734-0100.
;;
;; The Food and Drug Administration classifies this software as
;; a Class II medical device. As such, it may not be changed
;; in any way. Modifications to this software may result in an
;; adulterated medical device under 21CFR820, the use of which
;; is considered to be a violation of US Federal Statutes.
;; +--------------------------------------------------------------------+
*/
package gov.va.med.imaging.core.router.storage;
import gov.va.med.imaging.core.annotations.routerfacade.FacadeRouterInterface;
import gov.va.med.imaging.core.annotations.routerfacade.FacadeRouterInterfaceCommandTester;
import gov.va.med.imaging.core.annotations.routerfacade.FacadeRouterMethod;
import gov.va.med.imaging.core.interfaces.FacadeRouter;
import gov.va.med.imaging.core.interfaces.exceptions.ConnectionException;
import gov.va.med.imaging.core.interfaces.exceptions.MethodException;
import gov.va.med.imaging.exchange.business.TimePeriod;
import gov.va.med.imaging.exchange.business.storage.Artifact;
import gov.va.med.imaging.exchange.business.storage.ArtifactDescriptor;
import gov.va.med.imaging.exchange.business.storage.ArtifactInstance;
import gov.va.med.imaging.exchange.business.storage.ArtifactRetentionPolicy;
import gov.va.med.imaging.exchange.business.storage.KeyList;
import gov.va.med.imaging.exchange.business.storage.NetworkLocationInfo;
import gov.va.med.imaging.exchange.business.storage.Provider;
import gov.va.med.imaging.exchange.business.storage.ProviderAvailability;
import gov.va.med.imaging.exchange.business.storage.ProviderFactory;
import gov.va.med.imaging.exchange.business.storage.RetentionPolicy;
import gov.va.med.imaging.exchange.business.storage.RetentionPolicyFulfillment;
import gov.va.med.imaging.exchange.business.storage.RetentionPolicyProviderMapping;
import gov.va.med.imaging.exchange.business.storage.StorageServerDatabaseConfiguration;
import gov.va.med.imaging.exchange.business.storage.StorageTransaction;
import gov.va.med.imaging.exchange.business.storage.TransferStatistics;
@FacadeRouterInterface
@FacadeRouterInterfaceCommandTester
public interface StorageDataSourceRouter
extends FacadeRouter
{
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
Artifact postArtifact(Artifact artifact) throws MethodException, ConnectionException;
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
ArtifactInstance postArtifactInstance(ArtifactInstance artifactInstance) throws MethodException, ConnectionException;
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
ArtifactRetentionPolicy postArtifactRetentionPolicy(ArtifactRetentionPolicy artifactRetentionPolicy) throws MethodException, ConnectionException;
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
ProviderAvailability postProviderAvailability(ProviderAvailability providerAvailability) throws MethodException, ConnectionException;
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
Provider postProvider(Provider provider) throws MethodException, ConnectionException;
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
RetentionPolicyFulfillment postRetentionPolicyFulfillment(RetentionPolicyFulfillment retentionPolicyFulfillment) throws MethodException, ConnectionException;
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
RetentionPolicyProviderMapping postRetentionPolicyProviderMapping(RetentionPolicyProviderMapping mapping) throws MethodException, ConnectionException;
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
StorageTransaction postStorageTransaction(StorageTransaction storageTransaction) throws MethodException, ConnectionException;
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
Integer getStorageTransactionsWithinTimePeriod(String type, String status, TimePeriod period) throws MethodException, ConnectionException;
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
TransferStatistics postTransferStatistics(TransferStatistics transferStatistics) throws MethodException, ConnectionException;
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
Boolean deleteProviderAvailability(Integer providerAvailabilityId) throws MethodException, ConnectionException;
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
Boolean deleteRetentionPolicyProviderMapping(Integer mappingId) throws MethodException, ConnectionException;
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
Artifact getArtifactAndInstancesById(Integer artifactId) throws MethodException, ConnectionException;
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
Artifact getArtifactAndInstancesByKeyList(KeyList keyList) throws MethodException, ConnectionException;
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
Artifact getArtifactAndInstancesByToken(String artifactToken) throws MethodException, ConnectionException;
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
Artifact getArtifactAsGraphById(String artifactId) throws MethodException, ConnectionException;
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
Artifact getArtifactAsGraphByToken(String artifactToken) throws MethodException, ConnectionException;
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
Artifact getArtifactByToken(String artifactToken) throws MethodException, ConnectionException;
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
NetworkLocationInfo getCurrentWriteLocation(Provider provider) throws MethodException, ConnectionException;
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
NetworkLocationInfo getCurrentJukeboxWriteLocation(Provider provider) throws MethodException, ConnectionException;
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
NetworkLocationInfo getNetworkLocationDetails(String networkLocationIEN) throws MethodException, ConnectionException;
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
StorageServerDatabaseConfiguration getStorageServerDatabaseConfiguration(ProviderFactory providerFactory) throws MethodException, ConnectionException;
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
Artifact putArtifact(Artifact artifact) throws MethodException, ConnectionException;
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
ArtifactInstance putArtifactInstanceLastAccessed(ArtifactInstance artifactInstance) throws MethodException, ConnectionException;
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
ArtifactInstance putArtifactInstanceUrl(ArtifactInstance artifactInstance) throws MethodException, ConnectionException;
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
ArtifactRetentionPolicy putArtifactRetentionPolicy(ArtifactRetentionPolicy artifactRetentionPolicy) throws MethodException, ConnectionException;
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
ProviderAvailability putProviderAvailability(ProviderAvailability providerAvailability) throws MethodException, ConnectionException;
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
Provider putProvider(Provider provider) throws MethodException, ConnectionException;
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
RetentionPolicy putRetentionPolicy(ArtifactDescriptor artifactDescriptor, RetentionPolicy retentionPolicy) throws MethodException, ConnectionException;
@FacadeRouterMethod(asynchronous=false, isChildCommand=true)
RetentionPolicyProviderMapping putRetentionPolicyProviderMapping(RetentionPolicyProviderMapping mapping) throws MethodException, ConnectionException;
}
| apache-2.0 |
Bibliome/alvisnlp | alvisnlp-core/src/main/java/fr/inra/maiage/bibliome/alvisnlp/core/corpus/dump/codec/SectionDecoder.java | 3221 | /*
Copyright 2016, 2017 Institut National de la Recherche Agronomique
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package fr.inra.maiage.bibliome.alvisnlp.core.corpus.dump.codec;
import java.io.IOException;
import java.nio.ByteBuffer;
import fr.inra.maiage.bibliome.alvisnlp.core.corpus.Document;
import fr.inra.maiage.bibliome.alvisnlp.core.corpus.Layer;
import fr.inra.maiage.bibliome.alvisnlp.core.corpus.Relation;
import fr.inra.maiage.bibliome.alvisnlp.core.corpus.Section;
import fr.inra.maiage.bibliome.alvisnlp.core.corpus.creators.SectionCreator;
import fr.inra.maiage.bibliome.alvisnlp.core.module.types.Mapping;
import fr.inra.maiage.bibliome.util.marshall.MapReadCache;
import fr.inra.maiage.bibliome.util.marshall.ReadCache;
import fr.inra.maiage.bibliome.util.marshall.Unmarshaller;
public class SectionDecoder extends ElementDecoder<Section> implements SectionCreator {
private final LayerDecoder layerDecoder;
private final Unmarshaller<Layer> layerUnmarshaller;
private final RelationDecoder relationDecoder;
private final Unmarshaller<Relation> relationUnmarshaller;
private Document doc;
SectionDecoder(Unmarshaller<String> stringUnmarshaller) throws IOException {
super(stringUnmarshaller);
this.layerDecoder = new LayerDecoder(stringUnmarshaller);
this.layerUnmarshaller = new Unmarshaller<Layer>(stringUnmarshaller.getChannel(), layerDecoder);
this.relationDecoder = new RelationDecoder(stringUnmarshaller);
ReadCache<Relation> relationCache = MapReadCache.hashMap();
this.relationUnmarshaller = new Unmarshaller<Relation>(stringUnmarshaller.getChannel(), relationDecoder, relationCache);
}
@Override
public Section decode1(ByteBuffer buffer) {
String name = readString(buffer);
String contents = readString(buffer);
Section result = new Section(this, doc, name, contents);
layerDecoder.setSection(result);
int nLayers = buffer.getInt();
for (int i = 0; i < nLayers; ++i) {
long layerRef = buffer.getLong();
layerUnmarshaller.read(layerRef);
}
relationDecoder.setSection(result);
int nRelations = buffer.getInt();
for (int i = 0; i < nRelations; ++i) {
long relRef = buffer.getLong();
relationUnmarshaller.read(relRef);
}
return result;
}
@Override
public Mapping getConstantSectionFeatures() {
return null;
}
@Override
public void setConstantSectionFeatures(Mapping constantSectionFeatures) {
}
Document getDoc() {
return doc;
}
void setDoc(Document doc) {
this.doc = doc;
}
public LayerDecoder getLayerDecoder() {
return layerDecoder;
}
public RelationDecoder getRelationDecoder() {
return relationDecoder;
}
public Unmarshaller<Relation> getRelationUnmarshaller() {
return relationUnmarshaller;
}
}
| apache-2.0 |
anuraaga/armeria | core/src/main/java/com/linecorp/armeria/unsafe/ByteBufHttpData.java | 4505 | /*
* Copyright 2017 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.linecorp.armeria.unsafe;
import static java.util.Objects.requireNonNull;
import java.io.InputStream;
import java.nio.charset.Charset;
import com.google.common.base.MoreObjects;
import com.linecorp.armeria.common.AbstractHttpData;
import com.linecorp.armeria.common.HttpData;
import com.linecorp.armeria.common.util.UnstableApi;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufHolder;
import io.netty.buffer.ByteBufInputStream;
import io.netty.buffer.ByteBufUtil;
import io.netty.buffer.Unpooled;
/**
* An {@link HttpData} that is backed by a {@link ByteBuf} for optimizing certain internal use cases. Not for
* general use.
*/
@UnstableApi
public final class ByteBufHttpData extends AbstractHttpData implements ByteBufHolder {
private final ByteBuf buf;
private final boolean endOfStream;
private final int length;
/**
* Constructs a new {@link ByteBufHttpData}. Ownership of {@code buf} is taken by this
* {@link ByteBufHttpData}, which must not be mutated anymore.
*/
public ByteBufHttpData(ByteBuf buf, boolean endOfStream) {
length = requireNonNull(buf, "buf").readableBytes();
if (length != 0) {
this.buf = buf;
} else {
buf.release();
this.buf = Unpooled.EMPTY_BUFFER;
}
this.endOfStream = endOfStream;
}
@Override
public boolean isEndOfStream() {
return endOfStream;
}
@Override
public byte[] array() {
if (buf.hasArray() && buf.arrayOffset() == 0 && buf.array().length == length) {
return buf.array();
} else {
return ByteBufUtil.getBytes(buf);
}
}
@Override
public int length() {
return length;
}
@Override
public boolean isEmpty() {
buf.touch();
return super.isEmpty();
}
@Override
public int refCnt() {
return buf.refCnt();
}
@Override
public ByteBufHttpData retain() {
buf.retain();
return this;
}
@Override
public ByteBufHttpData retain(int increment) {
buf.retain(increment);
return this;
}
@Override
public ByteBufHttpData touch() {
buf.touch();
return this;
}
@Override
public ByteBufHttpData touch(Object hint) {
buf.touch(hint);
return this;
}
@Override
public boolean release() {
return buf.release();
}
@Override
public boolean release(int decrement) {
return buf.release(decrement);
}
@Override
public ByteBuf content() {
buf.touch();
return buf;
}
@Override
public ByteBufHttpData copy() {
return new ByteBufHttpData(buf.copy(), endOfStream);
}
@Override
public ByteBufHttpData duplicate() {
return new ByteBufHttpData(buf.duplicate(), endOfStream);
}
@Override
public ByteBufHttpData retainedDuplicate() {
return new ByteBufHttpData(buf.retainedDuplicate(), endOfStream);
}
@Override
public ByteBufHttpData replace(ByteBuf content) {
requireNonNull(content, "content");
content.touch();
return new ByteBufHttpData(content, endOfStream);
}
@Override
protected byte getByte(int index) {
return buf.getByte(index);
}
@Override
public String toString(Charset charset) {
return buf.toString(charset);
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("buf", buf.toString()).toString();
}
@Override
public InputStream toInputStream() {
return new ByteBufInputStream(buf.retainedDuplicate(), true);
}
@Override
public ByteBufHttpData withEndOfStream() {
return new ByteBufHttpData(buf, true);
}
}
| apache-2.0 |
trasa/aws-sdk-java | aws-java-sdk-autoscaling/src/main/java/com/amazonaws/services/autoscaling/model/transform/ScalingActivityInProgressExceptionUnmarshaller.java | 1627 | /*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.autoscaling.model.transform;
import org.w3c.dom.Node;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.util.XpathUtils;
import com.amazonaws.transform.StandardErrorUnmarshaller;
import com.amazonaws.services.autoscaling.model.ScalingActivityInProgressException;
public class ScalingActivityInProgressExceptionUnmarshaller extends
StandardErrorUnmarshaller {
public ScalingActivityInProgressExceptionUnmarshaller() {
super(ScalingActivityInProgressException.class);
}
@Override
public AmazonServiceException unmarshall(Node node) throws Exception {
// Bail out if this isn't the right error code that this
// marshaller understands
String errorCode = parseErrorCode(node);
if (errorCode == null || !errorCode.equals("ScalingActivityInProgress"))
return null;
ScalingActivityInProgressException e = (ScalingActivityInProgressException) super
.unmarshall(node);
return e;
}
}
| apache-2.0 |
ruks/geowave | extensions/datastores/accumulo/src/main/java/mil/nga/giat/geowave/datastore/accumulo/query/InputFormatAccumuloRangeQuery.java | 2833 | package mil.nga.giat.geowave.datastore.accumulo.query;
import java.util.Iterator;
import java.util.List;
import mil.nga.giat.geowave.core.index.ByteArrayId;
import mil.nga.giat.geowave.core.store.adapter.AdapterStore;
import mil.nga.giat.geowave.core.store.filter.FilterList;
import mil.nga.giat.geowave.core.store.filter.QueryFilter;
import mil.nga.giat.geowave.core.store.index.Index;
import mil.nga.giat.geowave.datastore.accumulo.AccumuloOperations;
import mil.nga.giat.geowave.datastore.accumulo.util.InputFormatIteratorWrapper;
import org.apache.accumulo.core.client.Scanner;
import org.apache.accumulo.core.client.ScannerBase;
import org.apache.accumulo.core.client.TableNotFoundException;
import org.apache.accumulo.core.data.Range;
import org.apache.hadoop.io.Text;
import org.apache.log4j.Logger;
/**
* * Represents a query operation for a range of Accumulo row IDs. This class is
* particularly used by the InputFormat as the iterator that it returns will
* contain Entry<GeoWaveInputKey, Object> entries rather than just the object.
* This is so the input format has a way of getting the adapter ID and data ID
* to define the key.
*/
public class InputFormatAccumuloRangeQuery extends
AccumuloConstraintsQuery
{
private final static Logger LOGGER = Logger.getLogger(InputFormatAccumuloRangeQuery.class);
private final Range accumuloRange;
private final boolean isOutputWritable;
public InputFormatAccumuloRangeQuery(
final List<ByteArrayId> adapterIds,
final Index index,
final Range accumuloRange,
final List<QueryFilter> queryFilters,
final boolean isOutputWritable,
final String[] authorizations ) {
super(
adapterIds,
index,
null,
queryFilters,
authorizations);
this.accumuloRange = accumuloRange;
this.isOutputWritable = isOutputWritable;
}
@Override
protected ScannerBase getScanner(
final AccumuloOperations accumuloOperations,
final Integer limit ) {
final String tableName = index.getId().getString();
Scanner scanner;
try {
scanner = accumuloOperations.createScanner(
tableName,
getAdditionalAuthorizations());
scanner.setRange(accumuloRange);
if ((adapterIds != null) && !adapterIds.isEmpty()) {
for (final ByteArrayId adapterId : adapterIds) {
scanner.fetchColumnFamily(new Text(
adapterId.getBytes()));
}
}
return scanner;
}
catch (final TableNotFoundException e) {
LOGGER.warn(
"Unable to query table '" + tableName + "'. Table does not exist.",
e);
return null;
}
}
@Override
protected Iterator initIterator(
final AdapterStore adapterStore,
final ScannerBase scanner ) {
return new InputFormatIteratorWrapper(
adapterStore,
index,
scanner.iterator(),
isOutputWritable,
new FilterList<QueryFilter>(
clientFilters));
}
}
| apache-2.0 |
tpb1908/AndroidProjectsClient | app/src/main/java/com/tpb/projects/editors/CommentEditor.java | 6793 | package com.tpb.projects.editors;
import android.app.Dialog;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v7.app.AlertDialog;
import android.view.ViewStub;
import android.view.inputmethod.InputMethodManager;
import android.widget.Button;
import android.widget.LinearLayout;
import com.tpb.github.data.models.Comment;
import com.tpb.github.data.models.Issue;
import com.tpb.mdtext.Markdown;
import com.tpb.mdtext.imagegetter.HttpImageGetter;
import com.tpb.mdtext.views.MarkdownEditText;
import com.tpb.projects.R;
import com.tpb.projects.util.SettingsActivity;
import com.tpb.projects.util.Util;
import com.tpb.projects.util.input.KeyBoardVisibilityChecker;
import com.tpb.projects.util.input.SimpleTextChangeWatcher;
import java.io.IOException;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
/**
* Created by theo on 14/02/17.
*/
public class CommentEditor extends EditorActivity {
public static final int REQUEST_CODE_NEW_COMMENT = 1799;
public static final int REQUEST_CODE_EDIT_COMMENT = 5734;
public static final int REQUEST_CODE_COMMENT_FOR_STATE = 1400;
@BindView(R.id.comment_body_edit) MarkdownEditText mEditor;
@BindView(R.id.markdown_edit_buttons) LinearLayout mEditButtons;
@BindView(R.id.markdown_editor_discard) Button mDiscardButton;
@BindView(R.id.markdown_editor_done) Button mDoneButton;
private KeyBoardVisibilityChecker mKeyBoardChecker;
private boolean mHasBeenEdited;
private Comment mComment;
private Issue mIssue;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
final SettingsActivity.Preferences prefs = SettingsActivity.Preferences
.getPreferences(this);
setTheme(prefs.isDarkThemeEnabled() ? R.style.AppTheme_Dark : R.style.AppTheme);
setContentView(R.layout.activity_markdown_editor);
final ViewStub stub = (ViewStub) findViewById(R.id.editor_stub);
stub.setLayoutResource(R.layout.stub_comment_editor);
stub.inflate();
//Bind after inflating the stub
ButterKnife.bind(this);
final Intent launchIntent = getIntent();
if(launchIntent.hasExtra(getString(R.string.parcel_comment))) {
mComment = launchIntent.getParcelableExtra(getString(R.string.parcel_comment));
mEditor.setText(mComment.getBody());
}
if(launchIntent.hasExtra(getString(R.string.parcel_issue))) {
mIssue = launchIntent.getParcelableExtra(getString(R.string.parcel_issue));
}
mEditor.addTextChangedListener(new SimpleTextChangeWatcher() {
@Override
public void textChanged() {
mHasBeenEdited |= mEditor.isEditing();
}
});
new MarkdownButtonAdapter(this, mEditButtons,
new MarkdownButtonAdapter.MarkdownButtonListener() {
@Override
public void snippetEntered(String snippet, int relativePosition) {
if(mEditor.hasFocus() && mEditor.isEnabled() && mEditor.isEditing()) {
Util.insertString(mEditor, snippet, relativePosition);
}
}
@Override
public String getText() {
return mEditor.getInputText().toString();
}
@Override
public void previewCalled() {
if(mEditor.isEditing()) {
mEditor.saveText();
final String repo = mIssue == null ? null : mIssue.getRepoFullName();
mEditor.disableEditing();
mEditor.setMarkdown(
Markdown.formatMD(mEditor.getInputText().toString(), repo),
new HttpImageGetter(mEditor)
);
} else {
mEditor.restoreText();
mEditor.enableEditing();
}
}
}
);
mKeyBoardChecker = new KeyBoardVisibilityChecker(findViewById(android.R.id.content));
}
@Override
protected void emojiChosen(String emoji) {
Util.insertString(mEditor, String.format(":%1$s:", emoji));
}
@Override
protected void insertString(String c) {
Util.insertString(mEditor, c);
}
@OnClick(R.id.markdown_editor_done)
void onDone() {
final Intent done = new Intent();
if(mComment == null) mComment = new Comment();
mComment.setBody(mEditor.getInputText().toString());
done.putExtra(getString(R.string.parcel_comment), mComment);
if(mIssue != null) done.putExtra(getString(R.string.parcel_issue), mIssue);
setResult(RESULT_OK, done);
mHasBeenEdited = false;
finish();
}
@OnClick(R.id.markdown_editor_discard)
void onDiscard() {
onBackPressed();
}
@Override
void imageLoadComplete(String url) {
Util.insertString(mEditor, url);
}
@Override
void imageLoadException(IOException ioe) {
}
@Override
public void finish() {
if(mHasBeenEdited && !mEditor.getText().toString().isEmpty()) {
final AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle(R.string.title_discard_changes);
builder.setPositiveButton(R.string.action_yes, (dialogInterface, i) -> {
final InputMethodManager imm = (InputMethodManager) getSystemService(
Context.INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(findViewById(android.R.id.content).getWindowToken(), 0);
mDoneButton.postDelayed(super::finish, 150);
});
builder.setNegativeButton(R.string.action_no, null);
final Dialog deleteDialog = builder.create();
deleteDialog.getWindow().getAttributes().windowAnimations = R.style.DialogAnimation;
deleteDialog.show();
} else {
if(mKeyBoardChecker.isKeyboardOpen()) {
final InputMethodManager imm = (InputMethodManager) getSystemService(
Context.INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(findViewById(android.R.id.content).getWindowToken(), 0);
mDoneButton.postDelayed(super::finish, 150);
} else {
super.finish();
}
}
}
}
| apache-2.0 |
tdj-br/red5-server | src/main/java/org/red5/server/net/rtmp/RTMPConnManager.java | 15303 | /*
* RED5 Open Source Flash Server - https://github.com/Red5/
*
* Copyright 2006-2016 by respective authors (see below). All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.red5.server.net.rtmp;
import java.lang.management.ManagementFactory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import javax.management.JMX;
import javax.management.ObjectName;
import org.apache.mina.core.session.IoSession;
import org.red5.server.api.Red5;
import org.red5.server.api.scope.IBasicScope;
import org.red5.server.jmx.mxbeans.RTMPMinaTransportMXBean;
import org.red5.server.net.IConnectionManager;
import org.red5.server.net.rtmp.codec.RTMP;
import org.red5.server.net.rtmpt.RTMPTConnection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.scheduling.concurrent.CustomizableThreadFactory;
import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
/**
* Responsible for management and creation of RTMP based connections.
*
* @author The Red5 Project
*/
public class RTMPConnManager implements IConnectionManager<RTMPConnection>, ApplicationContextAware, DisposableBean {
private static final Logger log = LoggerFactory.getLogger(RTMPConnManager.class);
protected static ApplicationContext applicationContext;
private ScheduledExecutorService executor = Executors.newScheduledThreadPool(1, new CustomizableThreadFactory("ConnectionChecker-"));
protected ConcurrentMap<String, RTMPConnection> connMap = new ConcurrentHashMap<String, RTMPConnection>();
protected AtomicInteger conns = new AtomicInteger();
protected static IConnectionManager<RTMPConnection> instance;
protected boolean debug;
{
// create a scheduled job to check for dead or hung connections
executor.scheduleAtFixedRate(new Runnable() {
public void run() {
// count the connections that need closing
int closedConnections = 0;
// get all the current connections
Collection<RTMPConnection> allConns = getAllConnections();
log.debug("Checking {} connections", allConns.size());
for (RTMPConnection conn : allConns) {
if (log.isTraceEnabled()) {
log.trace("{} session: {} state: {} keep-alive running: {}", new Object[] { conn.getClass().getSimpleName(), conn.getSessionId(), RTMP.states[conn.getStateCode()], conn.running });
log.trace("Decoder lock - permits: {} queue length: {}", conn.decoderLock.availablePermits(), conn.decoderLock.getQueueLength());
log.trace("Encoder lock - permits: {} queue length: {}", conn.encoderLock.availablePermits(), conn.encoderLock.getQueueLength());
log.trace("Client streams: {} used: {}", conn.getStreams().size(), conn.getUsedStreamCount());
if (!conn.getAttributes().isEmpty()) {
log.trace("Attributes: {}", conn.getAttributes());
}
Iterator<IBasicScope> scopes = conn.getBasicScopes();
while (scopes.hasNext()) {
IBasicScope scope = scopes.next();
log.trace("Scope: {}", scope);
}
}
String sessionId = conn.getSessionId();
RTMP rtmp = conn.getState();
switch (rtmp.getState()) {
case RTMP.STATE_DISCONNECTED:
case RTMP.STATE_DISCONNECTING:
removeConnection(sessionId);
break;
default:
// XXX implement as a task and fix logic
// ghost clean up
// if (max time allowed for no response from client exceeded, ping)
// // Ping client
// conn.ping();
// // FIXME: getLastPingTime doesn't get updated right after ping
// // wait x time for lastPingTime and if exceeded, disconnect
// if (conn.getLastPingTime() > clientTTL * 1000) {
// log.info("TTL exceeded, disconnecting {}", conn);
// conn.close();
// }
// }
long ioTime = 0L;
IoSession session = conn.getIoSession();
if (conn instanceof RTMPMinaConnection) {
// get io time
ioTime = System.currentTimeMillis() - session.getLastIoTime();
if (log.isTraceEnabled()) {
log.trace("Session - write queue: {} session count: {}", session.getWriteRequestQueue().size(), session.getService().getManagedSessionCount());
}
} else if (conn instanceof RTMPTConnection) {
ioTime = System.currentTimeMillis() - ((RTMPTConnection) conn).getLastDataReceived();
}
if (log.isDebugEnabled()) {
log.debug("Session last io time: {} ms", ioTime);
}
// if exceeds max inactivity kill and clean up
if (ioTime >= conn.maxInactivity) {
log.warn("Connection {} has exceeded the max inactivity threshold of {} ms", conn.getSessionId(), conn.maxInactivity);
if (session != null) {
if (log.isDebugEnabled()) {
log.debug("Prepared to clear write queue, if session is connected: {}; closing? {}", session.isConnected(), session.isClosing());
}
if (session.isConnected()) {
// clear the write queue
session.getWriteRequestQueue().clear(session);
}
}
// call onInactive on the connection, this should cleanly close everything out
conn.onInactive();
if (!conn.isClosed()) {
log.debug("Connection {} is not closed", conn.getSessionId());
} else {
closedConnections++;
}
}
}
}
// if there is more than one connection that needed to be closed, request a GC to clean up memory.
if (closedConnections > 0) {
System.gc();
}
}
}, 7000, 30000, TimeUnit.MILLISECONDS);
}
public static IConnectionManager<RTMPConnection> getInstance() {
if (instance == null) {
log.trace("Connection manager instance does not exist");
if (applicationContext != null && applicationContext.containsBean("rtmpConnManager")) {
log.trace("Connection manager bean exists");
instance = (RTMPConnManager) applicationContext.getBean("rtmpConnManager");
} else {
log.trace("Connection manager bean doesnt exist, creating new instance");
instance = new RTMPConnManager();
}
}
return instance;
}
/** {@inheritDoc} */
public RTMPConnection createConnection(Class<?> connCls) {
RTMPConnection conn = null;
if (RTMPConnection.class.isAssignableFrom(connCls)) {
try {
// create connection
conn = createConnectionInstance(connCls);
// add to local map
connMap.put(conn.getSessionId(), conn);
log.trace("Connections: {}", conns.incrementAndGet());
// set the scheduler
if (applicationContext.containsBean("rtmpScheduler") && conn.getScheduler() == null) {
conn.setScheduler((ThreadPoolTaskScheduler) applicationContext.getBean("rtmpScheduler"));
}
log.trace("Connection created: {}", conn);
} catch (Exception ex) {
log.warn("Exception creating connection", ex);
}
}
return conn;
}
/** {@inheritDoc} */
public RTMPConnection createConnection(Class<?> connCls, String sessionId) {
throw new UnsupportedOperationException("Not implemented");
}
/**
* Adds a connection.
*
* @param conn
* connection
*/
public void setConnection(RTMPConnection conn) {
log.trace("Adding connection: {}", conn);
int id = conn.getId();
if (id == -1) {
log.debug("Connection has unsupported id, using session id hash");
id = conn.getSessionId().hashCode();
}
log.debug("Connection id: {} session id hash: {}", conn.getId(), conn.getSessionId().hashCode());
if (debug) {
log.info("Connection count (map): {}", connMap.size());
try {
RTMPMinaTransportMXBean proxy = JMX.newMXBeanProxy(ManagementFactory.getPlatformMBeanServer(), new ObjectName("org.red5.server:type=RTMPMinaTransport"), RTMPMinaTransportMXBean.class, true);
if (proxy != null) {
log.info("{}", proxy.getStatistics());
}
} catch (Exception e) {
log.warn("Error on jmx lookup", e);
}
}
}
/**
* Returns a connection for a given client id.
*
* @param clientId
* client id
* @return connection if found and null otherwise
*/
public RTMPConnection getConnection(int clientId) {
log.trace("Getting connection by client id: {}", clientId);
for (RTMPConnection conn : connMap.values()) {
if (conn.getId() == clientId) {
return connMap.get(conn.getSessionId());
}
}
return null;
}
/**
* Returns a connection for a given session id.
*
* @param sessionId
* session id
* @return connection if found and null otherwise
*/
public RTMPConnection getConnectionBySessionId(String sessionId) {
log.trace("Getting connection by session id: {}", sessionId);
if (connMap.containsKey(sessionId)) {
return connMap.get(sessionId);
}
log.debug("Connection not found for {}", sessionId);
if (log.isTraceEnabled()) {
log.trace("Connections ({}) {}", connMap.size(), connMap.values());
}
return null;
}
/** {@inheritDoc} */
public RTMPConnection removeConnection(int clientId) {
log.trace("Removing connection with id: {}", clientId);
// remove from map
for (RTMPConnection conn : connMap.values()) {
if (conn.getId() == clientId) {
// remove the conn
return removeConnection(conn.getSessionId());
}
}
log.warn("Connection was not removed by id: {}", clientId);
return null;
}
/** {@inheritDoc} */
public RTMPConnection removeConnection(String sessionId) {
log.trace("Removing connection with session id: {}", sessionId);
if (log.isTraceEnabled()) {
log.trace("Connections ({}) at pre-remove: {}", connMap.size(), connMap.values());
}
// remove from map
RTMPConnection conn = connMap.remove(sessionId);
if (conn != null) {
log.trace("Connections: {}", conns.decrementAndGet());
Red5.setConnectionLocal(null);
}
return conn;
}
/** {@inheritDoc} */
public Collection<RTMPConnection> getAllConnections() {
ArrayList<RTMPConnection> list = new ArrayList<RTMPConnection>(connMap.size());
list.addAll(connMap.values());
return list;
}
/** {@inheritDoc} */
public Collection<RTMPConnection> removeConnections() {
ArrayList<RTMPConnection> list = new ArrayList<RTMPConnection>(connMap.size());
list.addAll(connMap.values());
connMap.clear();
conns.set(0);
return list;
}
/**
* Creates a connection instance based on the supplied type.
*
* @param cls
* class
* @return connection
* @throws Exception
* on error
*/
public RTMPConnection createConnectionInstance(Class<?> cls) throws Exception {
RTMPConnection conn = null;
if (cls == RTMPMinaConnection.class) {
conn = (RTMPMinaConnection) applicationContext.getBean(RTMPMinaConnection.class);
} else if (cls == RTMPTConnection.class) {
conn = (RTMPTConnection) applicationContext.getBean(RTMPTConnection.class);
} else {
conn = (RTMPConnection) cls.newInstance();
}
return conn;
}
/**
* @param debug
* the debug to set
*/
public void setDebug(boolean debug) {
this.debug = debug;
}
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
RTMPConnManager.applicationContext = applicationContext;
}
public void destroy() throws Exception {
executor.shutdownNow();
}
}
| apache-2.0 |
wendal/alipay-sdk | src/main/java/com/alipay/api/domain/EquipmentAuthRemoveQueryBypageDTO.java | 821 | package com.alipay.api.domain;
import com.alipay.api.AlipayObject;
import com.alipay.api.internal.mapping.ApiField;
/**
* 机具解绑按照条件分页查询返回对象
*
* @author auto create
* @since 1.0, 2016-10-26 17:43:38
*/
public class EquipmentAuthRemoveQueryBypageDTO extends AlipayObject {
private static final long serialVersionUID = 6192993535998982353L;
/**
* 机具编号
*/
@ApiField("device_id")
private String deviceId;
/**
* 解绑时间
*/
@ApiField("unbind_time")
private String unbindTime;
public String getDeviceId() {
return this.deviceId;
}
public void setDeviceId(String deviceId) {
this.deviceId = deviceId;
}
public String getUnbindTime() {
return this.unbindTime;
}
public void setUnbindTime(String unbindTime) {
this.unbindTime = unbindTime;
}
}
| apache-2.0 |
lecousin/java-framework-core | net.lecousin.core/src/test/java/net/lecousin/framework/core/tests/io/provider/TestFileIOProviderFromSubPath.java | 731 | package net.lecousin.framework.core.tests.io.provider;
import java.io.File;
import net.lecousin.framework.concurrent.threads.Task;
import net.lecousin.framework.core.test.LCCoreAbstractTest;
import net.lecousin.framework.io.TemporaryFiles;
import net.lecousin.framework.io.provider.FileIOProviderFromSubPath;
import org.junit.Test;
public class TestFileIOProviderFromSubPath extends LCCoreAbstractTest {
@Test
public void test() throws Exception {
File f = TemporaryFiles.get().createFileSync("test", "fileioproviderfromname");
FileIOProviderFromSubPath provider = new FileIOProviderFromSubPath(f.getParentFile());
provider.get(f.getName()).provideIOReadable(Task.Priority.NORMAL).close();
}
}
| apache-2.0 |
phax/ph-commons | ph-collection/src/main/java/com/helger/collection/pair/IMutablePair.java | 1507 | /*
* Copyright (C) 2014-2022 Philip Helger (www.helger.com)
* philip[at]helger[dot]com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.helger.collection.pair;
import javax.annotation.Nonnull;
import com.helger.commons.state.EChange;
/**
* Interface for a writable pair.
*
* @author Philip Helger
* @param <DATA1TYPE>
* First type.
* @param <DATA2TYPE>
* Second type.
*/
public interface IMutablePair <DATA1TYPE, DATA2TYPE> extends IPair <DATA1TYPE, DATA2TYPE>
{
/**
* Set the first value.
*
* @param aFirst
* The first value. May be <code>null</code> depending on the
* implementation.
* @return {@link EChange}
*/
@Nonnull
EChange setFirst (DATA1TYPE aFirst);
/**
* Set the second value.
*
* @param aSecond
* The second value. May be <code>null</code> depending on the
* implementation.
* @return {@link EChange}
*/
@Nonnull
EChange setSecond (DATA2TYPE aSecond);
}
| apache-2.0 |
mcollovati/camel | components/camel-aws2-msk/src/generated/java/org/apache/camel/component/aws2/msk/MSK2EndpointConfigurer.java | 6290 | /* Generated by camel build tools - do NOT edit this file! */
package org.apache.camel.component.aws2.msk;
import java.util.Map;
import org.apache.camel.CamelContext;
import org.apache.camel.spi.ConfigurerStrategy;
import org.apache.camel.spi.GeneratedPropertyConfigurer;
import org.apache.camel.spi.PropertyConfigurerGetter;
import org.apache.camel.util.CaseInsensitiveMap;
import org.apache.camel.support.component.PropertyConfigurerSupport;
/**
* Generated by camel build tools - do NOT edit this file!
*/
@SuppressWarnings("unchecked")
public class MSK2EndpointConfigurer extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
private static final Map<String, Object> ALL_OPTIONS;
static {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("label", java.lang.String.class);
map.put("autoDiscoverClient", boolean.class);
map.put("lazyStartProducer", boolean.class);
map.put("mskClient", software.amazon.awssdk.services.kafka.KafkaClient.class);
map.put("operation", org.apache.camel.component.aws2.msk.MSK2Operations.class);
map.put("pojoRequest", boolean.class);
map.put("proxyHost", java.lang.String.class);
map.put("proxyPort", java.lang.Integer.class);
map.put("proxyProtocol", software.amazon.awssdk.core.Protocol.class);
map.put("region", java.lang.String.class);
map.put("trustAllCertificates", boolean.class);
map.put("basicPropertyBinding", boolean.class);
map.put("synchronous", boolean.class);
map.put("accessKey", java.lang.String.class);
map.put("secretKey", java.lang.String.class);
ALL_OPTIONS = map;
ConfigurerStrategy.addConfigurerClearer(MSK2EndpointConfigurer::clearConfigurers);
}
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
MSK2Endpoint target = (MSK2Endpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "accesskey":
case "accessKey": target.getConfiguration().setAccessKey(property(camelContext, java.lang.String.class, value)); return true;
case "autodiscoverclient":
case "autoDiscoverClient": target.getConfiguration().setAutoDiscoverClient(property(camelContext, boolean.class, value)); return true;
case "basicpropertybinding":
case "basicPropertyBinding": target.setBasicPropertyBinding(property(camelContext, boolean.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "mskclient":
case "mskClient": target.getConfiguration().setMskClient(property(camelContext, software.amazon.awssdk.services.kafka.KafkaClient.class, value)); return true;
case "operation": target.getConfiguration().setOperation(property(camelContext, org.apache.camel.component.aws2.msk.MSK2Operations.class, value)); return true;
case "pojorequest":
case "pojoRequest": target.getConfiguration().setPojoRequest(property(camelContext, boolean.class, value)); return true;
case "proxyhost":
case "proxyHost": target.getConfiguration().setProxyHost(property(camelContext, java.lang.String.class, value)); return true;
case "proxyport":
case "proxyPort": target.getConfiguration().setProxyPort(property(camelContext, java.lang.Integer.class, value)); return true;
case "proxyprotocol":
case "proxyProtocol": target.getConfiguration().setProxyProtocol(property(camelContext, software.amazon.awssdk.core.Protocol.class, value)); return true;
case "region": target.getConfiguration().setRegion(property(camelContext, java.lang.String.class, value)); return true;
case "secretkey":
case "secretKey": target.getConfiguration().setSecretKey(property(camelContext, java.lang.String.class, value)); return true;
case "synchronous": target.setSynchronous(property(camelContext, boolean.class, value)); return true;
case "trustallcertificates":
case "trustAllCertificates": target.getConfiguration().setTrustAllCertificates(property(camelContext, boolean.class, value)); return true;
default: return false;
}
}
@Override
public Map<String, Object> getAllOptions(Object target) {
return ALL_OPTIONS;
}
public static void clearBootstrapConfigurers() {
}
public static void clearConfigurers() {
ALL_OPTIONS.clear();
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
MSK2Endpoint target = (MSK2Endpoint) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "accesskey":
case "accessKey": return target.getConfiguration().getAccessKey();
case "autodiscoverclient":
case "autoDiscoverClient": return target.getConfiguration().isAutoDiscoverClient();
case "basicpropertybinding":
case "basicPropertyBinding": return target.isBasicPropertyBinding();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "mskclient":
case "mskClient": return target.getConfiguration().getMskClient();
case "operation": return target.getConfiguration().getOperation();
case "pojorequest":
case "pojoRequest": return target.getConfiguration().isPojoRequest();
case "proxyhost":
case "proxyHost": return target.getConfiguration().getProxyHost();
case "proxyport":
case "proxyPort": return target.getConfiguration().getProxyPort();
case "proxyprotocol":
case "proxyProtocol": return target.getConfiguration().getProxyProtocol();
case "region": return target.getConfiguration().getRegion();
case "secretkey":
case "secretKey": return target.getConfiguration().getSecretKey();
case "synchronous": return target.isSynchronous();
case "trustallcertificates":
case "trustAllCertificates": return target.getConfiguration().isTrustAllCertificates();
default: return null;
}
}
}
| apache-2.0 |
akdiallo/AppScool | src/appscool/model/ClasseTestModel.java | 280 | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package appscool.model;
/**
*
* @author Abibatou
*/
public class ClasseTestModel {
}
| apache-2.0 |
jamiemccrindle/bpmscript | bpmscript-core/src/main/java/org/bpmscript/js/reload/ILibraryToFile.java | 1157 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bpmscript.js.reload;
/**
* A value object that links a library to the file that it is referenced in
*/
public interface ILibraryToFile {
/**
* @return the URL for a library, typically a file path
*/
String getLibrary();
/**
* @return the File that contains the library
*/
String getFile();
}
| apache-2.0 |
coverclock/com-diag-deringer | RainbowHatThing/app/src/main/java/com/diag/deringer/rainbowhatthing/MainActivity.java | 16295 | package com.diag.deringer.rainbowhatthing;
// Copyright 2017 by the Digital Aggregates Corporation, Arvada Colorado USA.
// Licensed under the terms of the Apache License version 2.0.
// https://github.com/androidthings/contrib-drivers/tree/master/rainbowhat
// mailto:coverclock@diag.com
// https://github.com/coverclock/com-diag-deringer
import android.app.Activity;
import android.os.Bundle;
import com.google.android.things.contrib.driver.rainbowhat.RainbowHat;
import com.google.android.things.contrib.driver.ht16k33.Ht16k33;
import com.google.android.things.contrib.driver.ht16k33.AlphanumericDisplay;
import com.google.android.things.contrib.driver.pwmspeaker.Speaker;
import com.google.android.things.contrib.driver.apa102.Apa102;
import com.google.android.things.contrib.driver.bmx280.Bmx280;
import com.google.android.things.pio.Gpio;
import java.io.IOException;
import java.lang.InterruptedException;
import java.lang.Boolean;
import android.util.Log;
import android.graphics.Color;
public class MainActivity extends Activity {
private static final String TAG = "MainActivity";
public void pause(long millis) {
try {
if (millis > 0) {
Thread.sleep(millis);
} else if (millis == 0) {
Thread.yield();
} else {
// Do nothing.
}
} catch (InterruptedException e) {
/* Do nothing. */
}
}
protected class Lifecycle extends Thread {
private boolean running = false;
public Lifecycle() {
Log.i(getClass().getSimpleName(), "constructor");
}
public void lifecycleOpen() throws java.io.IOException {
Log.i(getClass().getSimpleName(), "open");
}
public void lifecycleClose() throws java.io.IOException {
Log.i(getClass().getSimpleName(), "close");
}
public void lifecycleEnable() throws java.io.IOException {
Log.i(getClass().getSimpleName(), "enable");
}
public void lifecycleDisable() throws java.io.IOException {
Log.i(getClass().getSimpleName(), "disable");
}
public void lifecycleStart() {
Log.i(getClass().getSimpleName(), "start");
synchronized (this) {
if (!running) {
running = true;
start();
}
}
}
public void lifecycleStop() {
Log.i(getClass().getSimpleName(), "stop");
synchronized (this) {
running = false;
interrupt();
}
}
public void lifecycleWait() {
Log.i(getClass().getSimpleName(), "wait");
boolean done;
synchronized (this) {
running = false;
interrupt();
}
do {
try {
join();
done = true;
} catch (InterruptedException e) {
done = false;
}
} while (!done);
}
public boolean lifecycleDone() {
boolean done;
synchronized (this) {
done = !running;
}
if (done) {
Log.i(getClass().getSimpleName(), "done");
}
return done;
}
}
protected class Leds extends Lifecycle {
protected Gpio red;
protected Gpio green;
protected Gpio blue;
@Override
public void lifecycleOpen() throws java.io.IOException {
super.lifecycleOpen();
red = RainbowHat.openLedRed();
green = RainbowHat.openLedGreen();
blue = RainbowHat.openLedBlue();
}
@Override
public void lifecycleClose() throws java.io.IOException {
super.lifecycleClose();
blue.close();
green.close();
red.close();
}
@Override
public void lifecycleEnable() throws java.io.IOException {
super.lifecycleEnable();
red.setValue(false);
green.setValue(false);
blue.setValue(false);
}
@Override
public void lifecycleDisable() throws java.io.IOException {
super.lifecycleDisable();
blue.setValue(false);
green.setValue(false);
red.setValue(false);
}
@Override
public void run() {
Log.i(getClass().getSimpleName(), "begin");
while (!lifecycleDone()) {
for (int ii = 0; ii < 8; ++ii) {
try {
red.setValue((ii & 0x4) != 0);
green.setValue((ii & 0x2) != 0);
blue.setValue((ii & 0x1) != 0);
} catch (IOException e) {
// Do nothing.
}
pause(100);
}
}
Log.i(getClass().getSimpleName(), "end");
}
}
protected class Sensor extends Lifecycle {
protected Bmx280 sensor;
@Override
public void lifecycleOpen() throws java.io.IOException {
super.lifecycleOpen();
sensor = RainbowHat.openSensor();
}
@Override
public void lifecycleClose() throws java.io.IOException {
super.lifecycleClose();
sensor.close();
}
@Override
public void lifecycleEnable() throws java.io.IOException {
super.lifecycleEnable();
sensor.setTemperatureOversampling(Bmx280.OVERSAMPLING_1X);
sensor.setPressureOversampling(Bmx280.OVERSAMPLING_1X);
}
@Override
public void run() {
Log.i(getClass().getSimpleName(), "begin");
while (!lifecycleDone()) {
try {
float[] readings = sensor.readTemperatureAndPressure();
float centigrade = readings[0];
float fahrenheit = (centigrade * 9.0f / 5.0f) + 32.0f;
float hectopascals = readings[1];
float inches = hectopascals * 0.02953f;
Log.i(getClass().getSimpleName(), centigrade + "C " + fahrenheit + "F " + hectopascals + "hPa " + inches + "in");
pause(1000);
} catch (IOException e) {
// Do nothing.
}
}
Log.i(getClass().getSimpleName(), "end");
}
}
protected class Segment extends Lifecycle {
protected AlphanumericDisplay segment;
private String[] spinner = new String[] {
"-", "\\", "|", "/",
" -", " \\", " |", " /",
" -", " \\", " |", " /",
" -", " \\", " |", " /"
};
@Override
public void lifecycleOpen() throws java.io.IOException {
super.lifecycleOpen();
segment = RainbowHat.openDisplay();
}
@Override
public void lifecycleClose() throws java.io.IOException {
super.lifecycleClose();
segment.close();
}
@Override
public void lifecycleEnable() throws java.io.IOException {
super.lifecycleEnable();
segment.setBrightness(Ht16k33.HT16K33_BRIGHTNESS_MAX);
segment.setEnabled(true);
}
@Override
public void lifecycleDisable() throws java.io.IOException {
super.lifecycleDisable();
segment.setEnabled(false);
}
@Override
public void run() {
Log.i(getClass().getSimpleName(), "begin");
while (!lifecycleDone()) {
try {
for (int ii = 0; ii < spinner.length; ++ii) {
segment.display(spinner[ii]);
pause(100);
}
} catch (IOException e) {
// Do nothing.
}
}
Log.i(getClass().getSimpleName(), "end");
}
}
protected class Strip extends Lifecycle {
protected Apa102 strip;
protected int[] rainbow = new int[RainbowHat.LEDSTRIP_LENGTH];
protected void flush() throws java.io.IOException {
for (int i = 0; i < rainbow.length; i++) {
rainbow[i] = ~0;
}
strip.write(rainbow);
}
@Override
public void lifecycleOpen() throws java.io.IOException {
super.lifecycleOpen();
strip = RainbowHat.openLedStrip();
}
@Override
public void lifecycleClose() throws java.io.IOException {
super.lifecycleClose();
strip.close();
}
@Override
public void lifecycleEnable() throws java.io.IOException {
super.lifecycleEnable();
strip.setBrightness(Apa102.MAX_BRIGHTNESS);
}
@Override
public void lifecycleDisable() throws java.io.IOException {
super.lifecycleDisable();
strip.setBrightness(0);
for (int i = 0; i < rainbow.length; i++) {
rainbow[i] = Color.HSVToColor(0xff, new float[] { 0.0f, 1.0f, 1.0f });
}
strip.write(rainbow);
flush();
}
@Override
public void run() {
Log.i(getClass().getSimpleName(), "begin");
while (!lifecycleDone()) {
try {
for (int ii = 0; ii < rainbow.length; ii++) {
rainbow[ii] = Color.HSVToColor(0xff, new float[]{ii * 360.0f / rainbow.length, 1.0f, 1.0f});
}
strip.write(rainbow);
flush();
pause(1000);
for (int ii = 0; ii < rainbow.length; ii++) {
rainbow[ii] = Color.HSVToColor(0xff, new float[]{(rainbow.length - 1 - ii) * 360.0f / rainbow.length, 1.0f, 1.0f});
}
strip.write(rainbow);
flush();
pause(1000);
} catch (IOException e) {
// Do nothing.
}
}
Log.i(getClass().getSimpleName(), "end");
}
}
public enum Noise { NONE, HOTLINE, WELCOME, TINNITUS }
private static final Object noises = new Object();
private static boolean noisy = false;
public void p() {
synchronized (noises) {
while (noisy) {
try {
noises.wait();
} catch (InterruptedException e) {
// Do nothing.
}
}
noisy = true;
}
}
public void v() {
synchronized (noises) {
noisy = false;
noises.notifyAll();
}
}
protected class Buzzer extends Lifecycle {
protected Speaker buzzer;
protected Noise noise = Noise.NONE;
public Buzzer(Noise nn) {
noise = nn;
}
private void hotline() {
try {
buzzer.play(460);
pause(150);
buzzer.stop();
buzzer.play(500);
pause(250);
buzzer.stop();
for (int ii = 0; ii < 2; ++ii) {
buzzer.play(500);
pause(150);
buzzer.stop();
}
buzzer.play(550);
pause(250);
buzzer.stop();
for (int ii = 0; ii < 2; ++ii) {
buzzer.play(550);
pause(150);
buzzer.stop();
}
buzzer.play(610);
pause(250);
buzzer.stop();
for (int ii = 0; ii < 2; ++ii) {
buzzer.play(610);
pause(150);
buzzer.stop();
}
buzzer.play(470);
pause(250);
buzzer.stop();
for (int ii = 0; ii < 2; ++ii) {
buzzer.play(470);
pause(150);
buzzer.stop();
}
buzzer.play(500);
pause(250);
buzzer.stop();
pause(500);
} catch (IOException e) {
// Do nothing.
}
}
private void welcome() {
try {
buzzer.play(294);
pause(1000);
buzzer.play(330);
pause(1000);
buzzer.play(262);
pause(1000);
buzzer.play(131);
pause(1000);
buzzer.play(196);
pause(1000);
buzzer.stop();
pause(500);
} catch (IOException e) {
// Do nothing.
}
}
private void tinnitus() {
try {
for (float frequency = 20.0f; frequency <= 20000.0f; frequency *= 1.1f) {
Log.i(getClass().getSimpleName(), frequency + "Hz");
buzzer.play(frequency);
pause(100);
}
buzzer.stop();
pause(500);
} catch (IOException e) {
// Do nothing.
}
}
@Override
public void run() {
Log.i(getClass().getSimpleName(), "begin");
while (!lifecycleDone()) {
p();
do {
if (lifecycleDone()) {
break;
}
try {
buzzer = RainbowHat.openPiezo();
switch (noise) {
case HOTLINE:
hotline();
break;
case WELCOME:
welcome();
break;
case TINNITUS:
tinnitus();
break;
default:
break;
}
buzzer.close();
} catch (IOException e) {
// Do nothing.
}
if (lifecycleDone()) {
break;
}
} while (false);
v();
pause(0);
}
Log.i(getClass().getSimpleName(), "end");
}
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Lifecycle[] lifecycles = new Lifecycle[] { new Leds(), new Sensor(), new Segment(), new Strip(), new Buzzer(Noise.HOTLINE), new Buzzer(Noise.WELCOME), new Buzzer(Noise.TINNITUS) };
try {
for (int ii = 0; ii < lifecycles.length; ++ii) {
lifecycles[ii].lifecycleOpen();
}
for (int ii = 0; ii < lifecycles.length; ++ii) {
lifecycles[ii].lifecycleEnable();
}
for (int ii = 0; ii < lifecycles.length; ++ii) {
lifecycles[ii].lifecycleStart();
}
pause(30000);
for (int ii = 0; ii < lifecycles.length; ++ii) {
lifecycles[ii].lifecycleStop();
}
for (int ii = 0; ii < lifecycles.length; ++ii) {
lifecycles[ii].lifecycleWait();
}
for (int ii = 0; ii < lifecycles.length; ++ii) {
lifecycles[ii].lifecycleDisable();
}
for (int ii = 0; ii < lifecycles.length; ++ii) {
lifecycles[ii].lifecycleClose();
}
} catch (IOException e) {
Log.e(TAG, "Failed! " + e);
}
}
}
| apache-2.0 |
emsouza/beanlib | core/src/main/java/net/sf/beanlib/spi/Transformable.java | 1429 | /*
* Copyright 2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.sf.beanlib.spi;
import net.sf.beanlib.PropertyInfo;
/**
* Object Transformer.
*
* @author Joe D. Velopar
*/
public interface Transformable {
/**
* Returns an object transformed from the input object with the given target class.
*
* @param in the input object to be transformed. If propertyInfo is non-null, the input object is a JavaBean
* property value.
* @param toClass the target class to be transformed to.
* @param propertyInfo If null, it means the in object is a root level object. Otherwise, propertyInfo contains
* information about the input object as a java bean property value to be transformed.
* @return the transformed object.
*/
<T> T transform(Object in, Class<T> toClass, PropertyInfo propertyInfo);
}
| apache-2.0 |
zoneXcoding/Mineworld | mods/craft/src/main/java/org/terasology/craft/events/crafting/ChangeLevelEvent.java | 494 | package org.terasology.craft.events.crafting;
import org.terasology.entitySystem.AbstractEvent;
import org.terasology.entitySystem.EntityRef;
public class ChangeLevelEvent extends AbstractEvent {
private float nextLevel;
private EntityRef instigator;
public ChangeLevelEvent(float nextLevel, EntityRef instigator) {
this.nextLevel = nextLevel;
this.instigator = instigator;
}
public boolean isDecreaseEvent(){
return nextLevel < 0;
}
}
| apache-2.0 |
codegist/common | src/main/java/org/codegist/common/lang/Strings.java | 2510 | /*
* Copyright 2011 CodeGist.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* ===================================================================
*
* More information at http://www.codegist.org.
*/
package org.codegist.common.lang;
/**
* @author Laurent Gilles (laurent.gilles@codegist.org)
*/
public final class Strings {
private Strings() {
throw new IllegalStateException();
}
/**
* Checks if the given string is null or blank by trimming it and checking that the length it not 0
*
* @param str String to check
* @return return if the string is blank
*/
public static boolean isBlank(String str) {
return str == null || str.trim().length() == 0;
}
public static boolean isEmpty(String str) {
return str == null || str.length() == 0;
}
/**
* Checks if the given string is not null and not blank
*
* @param str String to check
* @return return if the string is blank
* @see Strings#isBlank(String)
*/
public static boolean isNotBlank(String str) {
return !isBlank(str);
}
public static boolean isNotEmpty(String str) {
return !isEmpty(str);
}
/**
* Returns either str if not blank otherwise def
*
* @param str String to check
* @param def Default to return if str is blank
* @return either str or def
*/
public static String defaultIfBlank(String str, String def) {
return isBlank(str) ? def : str;
}
public static String defaultIfEmpty(String str, String def) {
return isEmpty(str) ? def : str;
}
public static String substringRight(String str, int length){
int strlen = str.length();
if(strlen >= length) {
return str.substring(strlen - length, strlen);
}else{
return str;
}
}
}
| apache-2.0 |
Nickname0806/Test_Q4 | java/org/apache/tomcat/util/net/AbstractJsseEndpoint.java | 9986 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tomcat.util.net;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.nio.channels.NetworkChannel;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.SSLParameters;
import javax.net.ssl.SSLSessionContext;
import org.apache.tomcat.util.compat.JreCompat;
import org.apache.tomcat.util.net.SSLHostConfig.Type;
import org.apache.tomcat.util.net.openssl.OpenSSLImplementation;
import org.apache.tomcat.util.net.openssl.ciphers.Cipher;
public abstract class AbstractJsseEndpoint<S,U> extends AbstractEndpoint<S,U> {
private String sslImplementationName = null;
private int sniParseLimit = 64 * 1024;
private SSLImplementation sslImplementation = null;
public String getSslImplementationName() {
return sslImplementationName;
}
public void setSslImplementationName(String s) {
this.sslImplementationName = s;
}
public SSLImplementation getSslImplementation() {
return sslImplementation;
}
public int getSniParseLimit() {
return sniParseLimit;
}
public void setSniParseLimit(int sniParseLimit) {
this.sniParseLimit = sniParseLimit;
}
@Override
protected Type getSslConfigType() {
if (OpenSSLImplementation.class.getName().equals(sslImplementationName)) {
return SSLHostConfig.Type.EITHER;
} else {
return SSLHostConfig.Type.JSSE;
}
}
protected void initialiseSsl() throws Exception {
if (isSSLEnabled()) {
sslImplementation = SSLImplementation.getInstance(getSslImplementationName());
for (SSLHostConfig sslHostConfig : sslHostConfigs.values()) {
createSSLContext(sslHostConfig);
}
// Validate default SSLHostConfigName
if (sslHostConfigs.get(getDefaultSSLHostConfigName()) == null) {
throw new IllegalArgumentException(sm.getString("endpoint.noSslHostConfig",
getDefaultSSLHostConfigName(), getName()));
}
}
}
@Override
protected void createSSLContext(SSLHostConfig sslHostConfig) throws IllegalArgumentException {
boolean firstCertificate = true;
for (SSLHostConfigCertificate certificate : sslHostConfig.getCertificates(true)) {
SSLUtil sslUtil = sslImplementation.getSSLUtil(certificate);
if (firstCertificate) {
firstCertificate = false;
sslHostConfig.setEnabledProtocols(sslUtil.getEnabledProtocols());
sslHostConfig.setEnabledCiphers(sslUtil.getEnabledCiphers());
}
SSLContext sslContext;
try {
sslContext = sslUtil.createSSLContext(negotiableProtocols);
sslContext.init(sslUtil.getKeyManagers(), sslUtil.getTrustManagers(), null);
} catch (Exception e) {
throw new IllegalArgumentException(e);
}
SSLSessionContext sessionContext = sslContext.getServerSessionContext();
if (sessionContext != null) {
sslUtil.configureSessionContext(sessionContext);
}
certificate.setSslContext(sslContext);
}
}
protected void destroySsl() throws Exception {
if (isSSLEnabled()) {
for (SSLHostConfig sslHostConfig : sslHostConfigs.values()) {
releaseSSLContext(sslHostConfig);
}
}
}
@Override
protected void releaseSSLContext(SSLHostConfig sslHostConfig) {
for (SSLHostConfigCertificate certificate : sslHostConfig.getCertificates(true)) {
if (certificate.getSslContext() != null) {
SSLContext sslContext = certificate.getSslContext();
if (sslContext != null) {
sslContext.destroy();
}
}
}
}
protected SSLEngine createSSLEngine(String sniHostName, List<Cipher> clientRequestedCiphers,
List<String> clientRequestedApplicationProtocols) {
SSLHostConfig sslHostConfig = getSSLHostConfig(sniHostName);
SSLHostConfigCertificate certificate = selectCertificate(sslHostConfig, clientRequestedCiphers);
SSLContext sslContext = certificate.getSslContext();
if (sslContext == null) {
throw new IllegalStateException(
sm.getString("endpoint.jsse.noSslContext", sniHostName));
}
SSLEngine engine = sslContext.createSSLEngine();
switch (sslHostConfig.getCertificateVerification()) {
case NONE:
engine.setNeedClientAuth(false);
engine.setWantClientAuth(false);
break;
case OPTIONAL:
case OPTIONAL_NO_CA:
engine.setWantClientAuth(true);
break;
case REQUIRED:
engine.setNeedClientAuth(true);
break;
}
engine.setUseClientMode(false);
engine.setEnabledCipherSuites(sslHostConfig.getEnabledCiphers());
engine.setEnabledProtocols(sslHostConfig.getEnabledProtocols());
SSLParameters sslParameters = engine.getSSLParameters();
sslParameters.setUseCipherSuitesOrder(sslHostConfig.getHonorCipherOrder());
if (JreCompat.isJre9Available() && clientRequestedApplicationProtocols.size() > 0 &&
negotiableProtocols.size() > 0) {
// Only try to negotiate if both client and server have at least
// one protocol in common
// Note: Tomcat does not explicitly negotiate http/1.1
// TODO: Is this correct? Should it change?
List<String> commonProtocols = new ArrayList<>();
commonProtocols.addAll(negotiableProtocols);
commonProtocols.retainAll(clientRequestedApplicationProtocols);
if (commonProtocols.size() > 0) {
String[] commonProtocolsArray = commonProtocols.toArray(new String[commonProtocols.size()]);
JreCompat.getInstance().setApplicationProtocols(sslParameters, commonProtocolsArray);
}
}
// In case the getter returns a defensive copy
engine.setSSLParameters(sslParameters);
return engine;
}
private SSLHostConfigCertificate selectCertificate(
SSLHostConfig sslHostConfig, List<Cipher> clientCiphers) {
Set<SSLHostConfigCertificate> certificates = sslHostConfig.getCertificates(true);
if (certificates.size() == 1) {
return certificates.iterator().next();
}
LinkedHashSet<Cipher> serverCiphers = sslHostConfig.getCipherList();
List<Cipher> candidateCiphers = new ArrayList<>();
if (sslHostConfig.getHonorCipherOrder()) {
candidateCiphers.addAll(serverCiphers);
candidateCiphers.retainAll(clientCiphers);
} else {
candidateCiphers.addAll(clientCiphers);
candidateCiphers.retainAll(serverCiphers);
}
Iterator<Cipher> candidateIter = candidateCiphers.iterator();
while (candidateIter.hasNext()) {
Cipher candidate = candidateIter.next();
for (SSLHostConfigCertificate certificate : certificates) {
if (certificate.getType().isCompatibleWith(candidate.getAu())) {
return certificate;
}
}
}
// No matches. Just return the first certificate. The handshake will
// then fail due to no matching ciphers.
return certificates.iterator().next();
}
@Override
public boolean isAlpnSupported() {
// ALPN requires TLS so if TLS is not enabled, ALPN cannot be supported
if (!isSSLEnabled()) {
return false;
}
// Depends on the SSLImplementation.
SSLImplementation sslImplementation;
try {
sslImplementation = SSLImplementation.getInstance(getSslImplementationName());
} catch (ClassNotFoundException e) {
// Ignore the exception. It will be logged when trying to start the
// end point.
return false;
}
return sslImplementation.isAlpnSupported();
}
@Override
public void unbind() throws Exception {
for (SSLHostConfig sslHostConfig : sslHostConfigs.values()) {
for (SSLHostConfigCertificate certificate : sslHostConfig.getCertificates(true)) {
certificate.setSslContext(null);
}
}
}
protected abstract NetworkChannel getServerSocket();
@Override
protected final InetSocketAddress getLocalAddress() throws IOException {
NetworkChannel serverSock = getServerSocket();
if (serverSock == null) {
return null;
}
SocketAddress sa = serverSock.getLocalAddress();
if (sa instanceof InetSocketAddress) {
return (InetSocketAddress) sa;
}
return null;
}
}
| apache-2.0 |
awholegunch/loom | integration-testing/src/test/java/com/continuuity/test/pagetest/CreateHardwaretypeTest.java | 3926 | /**
* Copyright 2012-2014, Continuuity, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.continuuity.test.pagetest;
import com.continuuity.loom.spec.HardwareType;
import com.continuuity.test.Constants;
import com.continuuity.test.GenericTest;
import com.continuuity.test.drivers.Global;
import com.continuuity.test.input.ExampleReader;
import org.junit.AfterClass;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runners.MethodSorters;
import org.openqa.selenium.By;
import org.openqa.selenium.WebElement;
import java.util.List;
import static com.continuuity.test.drivers.Global.globalDriver;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
/**
* Test GET v1/loom/hardwaretypes/hardwaretype/<hardwaretype-id>
*/
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
public class CreateHardwaretypeTest extends GenericTest {
private static final ExampleReader EXAMPLE_READER = new ExampleReader();
@Test
public void test_01_submitSmall() throws Exception {
globalDriver.get(Constants.HARDWARETYPE_CREATE_URI);
HardwareType hardwareType = EXAMPLE_READER.getHardwareTypes(Constants.HARDWARETYPES_PATH).get("small");
WebElement inputName = globalDriver.findElement(By.cssSelector("#inputName"));
inputName.sendKeys(hardwareType.getName());
WebElement inputDescription = globalDriver.findElement(By.cssSelector("#inputDescription"));
inputDescription.sendKeys(hardwareType.getDescription());
WebElement addProvider = globalDriver.findElement(By.cssSelector("#add-provider"));
addProvider.click();
addProvider.click();
List<WebElement> providerEntries = globalDriver.findElements(By.cssSelector(".provider-entry"));
providerEntries.get(0).findElement(By.cssSelector("select")).sendKeys("joyent");
providerEntries.get(0).findElement(By.cssSelector("input")).sendKeys(
hardwareType.getProviderMap().get("joyent").get("flavor"));
providerEntries.get(1).findElement(By.cssSelector("select")).sendKeys("rackspace");
providerEntries.get(1).findElement(By.cssSelector("input")).sendKeys(
hardwareType.getProviderMap().get("rackspace").get("flavor"));
providerEntries.get(2).findElement(By.cssSelector("select")).sendKeys("openstack");
providerEntries.get(2).findElement(By.cssSelector("input")).sendKeys(
hardwareType.getProviderMap().get("openstack").get("flavor"));
globalDriver.findElement(By.cssSelector("#create-hardwaretype-form")).submit();
Global.driverWait(1);
assertEquals(Constants.HARDWARETYPES_URL, globalDriver.getCurrentUrl());
}
@Test
public void test_02_submitFail() throws Exception {
globalDriver.get(Constants.HARDWARETYPE_CREATE_URI);
assertFalse(globalDriver.findElement(By.cssSelector("#notification")).isDisplayed());
WebElement inputName = globalDriver.findElement(By.cssSelector("#inputName"));
inputName.sendKeys("asdf");
WebElement inputDescription = globalDriver.findElement(By.cssSelector("#inputDescription"));
inputDescription.sendKeys("asdfsadf");
globalDriver.findElement(By.cssSelector("#create-hardwaretype-form")).submit();
Global.driverWait(1);
assertTrue(globalDriver.findElement(By.cssSelector("#notification")).isDisplayed());
}
@AfterClass
public static void tearDown() {
closeDriver();
}
}
| apache-2.0 |
lawrencezcc/Moneco-V6 | app/src/main/java/Entity/ClimateWatchMedia.java | 6828 | package Entity;
/**
* Created by liangchenzhou on 16/09/16.
*/
public class ClimateWatchMedia {
String climateKingdom;
String climateScientific;
String climateCommon;
String generalImageUrl;
String sizeIamgeUrl;
String dynamicA;
String dynamicB;
String dynamicC;
String cwIconText;
String description;
String diffA;
String diffB;
String diffC;
String diffD;
String whatToObserve;
String whenToObserve;
String whereToObserve;
String distributionUrl;
String nativeStatus;
String conservationStatus;
String phylum;
String climateClass;
String order;
String family;
String genus;
String allergic;
String diffE;
public ClimateWatchMedia(){}
//except allergic diffE attributes
public ClimateWatchMedia(String climateKingdom, String climateScientific, String climateCommon, String generalImageUrl, String sizeIamgeUrl,
String dynamicA, String dynamicB, String dynamicC, String cwIconText, String description, String diffA, String diffB,
String diffC, String diffD, String whatToObserve, String whenToObserve, String whereToObserve, String distributionUrl,
String nativeStatus, String conservationStatus, String phylum, String climateClass, String order, String family, String genus) {
this.climateKingdom = climateKingdom;
this.climateScientific = climateScientific;
this.climateCommon = climateCommon;
this.generalImageUrl = generalImageUrl;
this.sizeIamgeUrl = sizeIamgeUrl;
this.dynamicA = dynamicA;
this.dynamicB = dynamicB;
this.dynamicC = dynamicC;
this.cwIconText = cwIconText;
this.description = description;
this.diffA = diffA;
this.diffB = diffB;
this.diffC = diffC;
this.diffD = diffD;
this.whatToObserve = whatToObserve;
this.whenToObserve = whenToObserve;
this.whereToObserve = whereToObserve;
this.distributionUrl = distributionUrl;
this.nativeStatus = nativeStatus;
this.conservationStatus = conservationStatus;
this.phylum = phylum;
this.climateClass = climateClass;
this.order = order;
this.family = family;
this.genus = genus;
}
public String getClimateKingdom() {
return climateKingdom;
}
public void setClimateKingdom(String climateKingdom) {
this.climateKingdom = climateKingdom;
}
public String getClimateScientific() {
return climateScientific;
}
public void setClimateScientific(String climateScientific) {
this.climateScientific = climateScientific;
}
public String getGeneralImageUrl() {
return generalImageUrl;
}
public void setGeneralImageUrl(String generalImageUrl) {
this.generalImageUrl = generalImageUrl;
}
public String getSizeIamgeUrl() {
return sizeIamgeUrl;
}
public void setSizeIamgeUrl(String sizeIamgeUrl) {
this.sizeIamgeUrl = sizeIamgeUrl;
}
public String getDynamicA() {
return dynamicA;
}
public void setDynamicA(String dynamicA) {
this.dynamicA = dynamicA;
}
public String getDynamicB() {
return dynamicB;
}
public void setDynamicB(String dynamicB) {
this.dynamicB = dynamicB;
}
public String getDynamicC() {
return dynamicC;
}
public void setDynamicC(String dynamicC) {
this.dynamicC = dynamicC;
}
public String getCwIconText() {
return cwIconText;
}
public void setCwIconText(String cwIconText) {
this.cwIconText = cwIconText;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getDiffA() {
return diffA;
}
public void setDiffA(String diffA) {
this.diffA = diffA;
}
public String getDiffB() {
return diffB;
}
public void setDiffB(String diffB) {
this.diffB = diffB;
}
public String getDiffC() {
return diffC;
}
public void setDiffC(String diffC) {
this.diffC = diffC;
}
public String getDiffD() {
return diffD;
}
public void setDiffD(String diffD) {
this.diffD = diffD;
}
public String getWhatToObserve() {
return whatToObserve;
}
public void setWhatToObserve(String whatToObserve) {
this.whatToObserve = whatToObserve;
}
public String getWhenToObserve() {
return whenToObserve;
}
public void setWhenToObserve(String whenToObserve) {
this.whenToObserve = whenToObserve;
}
public String getWhereToObserve() {
return whereToObserve;
}
public void setWhereToObserve(String whereToObserve) {
this.whereToObserve = whereToObserve;
}
public String getDistributionUrl() {
return distributionUrl;
}
public void setDistributionUrl(String distributionUrl) {
this.distributionUrl = distributionUrl;
}
public String getNativeStatus() {
return nativeStatus;
}
public void setNativeStatus(String nativeStatus) {
this.nativeStatus = nativeStatus;
}
public String getConservationStatus() {
return conservationStatus;
}
public void setConservationStatus(String conservationStatus) {
this.conservationStatus = conservationStatus;
}
public String getPhylum() {
return phylum;
}
public void setPhylum(String phylum) {
this.phylum = phylum;
}
public String getClimateClass() {
return climateClass;
}
public void setClimateClass(String climateClass) {
this.climateClass = climateClass;
}
public String getFamily() {
return family;
}
public void setFamily(String family) {
this.family = family;
}
public String getGenus() {
return genus;
}
public void setGenus(String genus) {
this.genus = genus;
}
public String getAllergic() {
return allergic;
}
public void setAllergic(String allergic) {
this.allergic = allergic;
}
public String getDiffE() {
return diffE;
}
public void setDiffE(String diffE) {
this.diffE = diffE;
}
public String getClimateCommon() {
return climateCommon;
}
public void setClimateCommon(String climateCommon) {
this.climateCommon = climateCommon;
}
public String getOrder() {
return order;
}
public void setOrder(String order) {
this.order = order;
}
}
| apache-2.0 |
capergroup/bayou | src/main/java/edu/rice/cs/caper/bayou/core/dsl/DLoop.java | 7305 | /*
Copyright 2017 Rice University
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package edu.rice.cs.caper.bayou.core.dsl;
import edu.rice.cs.caper.bayou.core.dom_driver.Visitor;
import edu.rice.cs.caper.bayou.core.synthesizer.*;
import edu.rice.cs.caper.bayou.core.synthesizer.Type;
import org.eclipse.jdt.core.dom.*;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class DLoop extends DASTNode {
String node = "DLoop";
List<DAPICall> _cond;
List<DASTNode> _body;
public DLoop() {
this._cond = new ArrayList<>();
this._body = new ArrayList<>();
this.node = "DLoop";
}
public DLoop(List<DAPICall> cond, List<DASTNode> _body) {
this._cond = cond;
this._body = _body;
this.node = "DLoop";
}
@Override
public void updateSequences(List<Sequence> soFar, int max, int max_length) throws TooManySequencesException, TooLongSequenceException {
if (soFar.size() >= max)
throw new TooManySequencesException();
for (DAPICall call : _cond)
call.updateSequences(soFar, max, max_length);
int num_unrolls = 1;
for (int i = 0; i < num_unrolls; i++) {
for (DASTNode node : _body)
node.updateSequences(soFar, max, max_length);
for (DAPICall call : _cond)
call.updateSequences(soFar, max, max_length);
}
}
@Override
public int numStatements() {
int num = _cond.size();
for (DASTNode b : _body)
num += b.numStatements();
return num;
}
@Override
public int numLoops() {
int num = 1; // this loop
for (DASTNode b : _body)
num += b.numLoops();
return num;
}
@Override
public int numBranches() {
int num = 0;
for (DASTNode b : _body)
num += b.numBranches();
return num;
}
@Override
public int numExcepts() {
int num = 0;
for (DASTNode b : _body)
num += b.numExcepts();
return num;
}
@Override
public Set<DAPICall> bagOfAPICalls() {
Set<DAPICall> bag = new HashSet<>();
bag.addAll(_cond);
for (DASTNode b : _body)
bag.addAll(b.bagOfAPICalls());
return bag;
}
@Override
public Set<Class> exceptionsThrown() {
Set<Class> ex = new HashSet<>();
for (DAPICall c : _cond)
ex.addAll(c.exceptionsThrown());
for (DASTNode b : _body)
ex.addAll(b.exceptionsThrown());
return ex;
}
@Override
public Set<Class> exceptionsThrown(Set<String> eliminatedVars) {
return this.exceptionsThrown();
}
@Override
public boolean equals(Object o) {
if (o == null || ! (o instanceof DLoop))
return false;
DLoop loop = (DLoop) o;
return _cond.equals(loop._cond) && _body.equals(loop._body);
}
@Override
public int hashCode() {
return 7* _cond.hashCode() + 17* _body.hashCode();
}
@Override
public String toString() {
return "while (\n" + _cond + "\n) {\n" + _body + "\n}";
}
@Override
public WhileStatement synthesize(Environment env) throws SynthesisException {
AST ast = env.ast();
WhileStatement statement = ast.newWhileStatement();
/* synthesize the condition */
List<Expression> clauses = new ArrayList<>();
for (DAPICall call : _cond) {
ASTNode synth = call.synthesize(env);
if (! (synth instanceof Assignment)) /* a call that returns void cannot be in condition */
throw new SynthesisException(SynthesisException.MalformedASTFromNN);
Assignment assignment = (Assignment) synth;
ParenthesizedExpression pAssignment = ast.newParenthesizedExpression();
pAssignment.setExpression(assignment);
// if the method does not return a boolean, add != null or != 0 to the condition
if (call.method == null || (!call.method.getReturnType().equals(Boolean.class) &&
!call.method.getReturnType().equals(boolean.class))) {
InfixExpression notEqualsNull = ast.newInfixExpression();
notEqualsNull.setLeftOperand(pAssignment);
notEqualsNull.setOperator(InfixExpression.Operator.NOT_EQUALS);
if (call.method != null && call.method.getReturnType().isPrimitive())
notEqualsNull.setRightOperand(ast.newNumberLiteral("0")); // primitive but not boolean
else // some object
notEqualsNull.setRightOperand(ast.newNullLiteral());
clauses.add(notEqualsNull);
}
else
clauses.add(pAssignment);
}
switch (clauses.size()) {
case 0:
SearchTarget target = new SearchTarget(
new Type(ast.newPrimitiveType(PrimitiveType.toCode("boolean")), boolean.class));
target.setSingleUseVariable(true);
Expression var = env.search(target).getExpression();
statement.setExpression(var);
break;
case 1:
statement.setExpression(clauses.get(0));
break;
default:
InfixExpression expr = ast.newInfixExpression();
expr.setLeftOperand(clauses.get(0));
expr.setOperator(InfixExpression.Operator.CONDITIONAL_AND);
expr.setRightOperand(clauses.get(1));
for (int i = 2; i < clauses.size(); i++) {
InfixExpression joined = ast.newInfixExpression();
joined.setLeftOperand(expr);
joined.setOperator(InfixExpression.Operator.CONDITIONAL_AND);
joined.setRightOperand(clauses.get(i));
expr = joined;
}
statement.setExpression(expr);
}
/* synthesize the body under a new scope */
env.pushScope();
Block body = ast.newBlock();
for (DASTNode dNode : _body) {
ASTNode aNode = dNode.synthesize(env);
if (aNode instanceof Statement)
body.statements().add(aNode);
else
body.statements().add(ast.newExpressionStatement((Expression) aNode));
}
statement.setBody(body);
/* join with parent scope itself (the "sub-scope" of a loop if condition was false) */
List<Scope> scopes = new ArrayList<>();
scopes.add(env.popScope());
scopes.add(new Scope(env.getScope()));
env.getScope().join(scopes);
return statement;
}
}
| apache-2.0 |
jiwhiz/jiwhizblogjpa | domain-post/src/main/java/com/jiwhiz/domain/post/CommentPostRepository.java | 1808 | /*
* Copyright 2013-2014 JIWHIZ Consulting Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jiwhiz.domain.post;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.JpaRepository;
import com.jiwhiz.domain.account.UserAccount;
/**
* JPA Repository for CommentPost entity.
*
* @author Yuan Ji
*
*/
public interface CommentPostRepository extends JpaRepository<CommentPost, String> {
Page<CommentPost> findByBlogPost(BlogPost blogPost, Pageable pageable);
Page<CommentPost> findByStatusOrderByCreatedTimeDesc(CommentStatusType status, Pageable pageable);
int countByBlogPostAndStatus(BlogPost blogPost, CommentStatusType status);
int countByBlogPost(BlogPost blogPost);
Page<CommentPost> findByBlogPostOrderByCreatedTimeDesc(BlogPost blogPost, Pageable pageable);
Page<CommentPost> findByAuthorAndStatusOrderByCreatedTimeDesc(UserAccount author, CommentStatusType status, Pageable pageable);
Page<CommentPost> findByAuthorOrderByCreatedTimeDesc(UserAccount author, Pageable pageable);
Page<CommentPost> findByBlogPostAndStatusOrderByCreatedTimeAsc(BlogPost blogPost, CommentStatusType status, Pageable pageable);
}
| apache-2.0 |
sagiegurari/x2fax | src/test/java/org/fax4j/x2fax/cli/CLI2FaxRunnerTest.java | 2336 | package org.fax4j.x2fax.cli;
import java.io.File;
import java.util.Properties;
import org.fax4j.FaxClient;
import org.fax4j.FaxJob;
import org.fax4j.bridge.FaxBridge;
import org.fax4j.bridge.process.Process2FaxBridge;
import org.fax4j.spi.FaxClientSpi;
import org.fax4j.util.IOHelper;
import org.fax4j.util.ReflectionHelper;
import org.fax4j.x2fax.test.TestUtil;
import org.fax4j.x2fax.test.TestUtil.EmptyFaxClientSpi;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
/**
* Test Class
*
* @author Sagie Gur-Ari
*/
public class CLI2FaxRunnerTest
{
/**The runner to test*/
private CLI2FaxRunner runner;
/**
* Sets up the test objects.
*
* @throws Exception
* Any exception
*/
@Before
public void setUp() throws Exception
{
this.runner=new CLI2FaxRunner()
{
@Override
protected Properties getFaxBridgeConfiguration()
{
return TestUtil.createEmptyFaxClientSpiConfiguration(null);
}
};
}
/**
* Test
*
* @throws Exception
* Any exception
*/
@Test
public void initializeTest() throws Exception
{
this.runner.initialize();
FaxBridge faxBridge=this.runner.flowHelper.getFaxBridge();
Assert.assertNotNull(faxBridge);
Assert.assertEquals(Process2FaxBridge.class,faxBridge.getClass());
FaxClient faxClient=faxBridge.getFaxClient();
FaxClientSpi faxClientSpi=(FaxClientSpi)ReflectionHelper.getField(FaxClient.class,"FAX_CLIENT_SPI").get(faxClient);
Assert.assertNotNull(faxClientSpi);
Assert.assertEquals(EmptyFaxClientSpi.class,faxClientSpi.getClass());
}
/**
* Test
*
* @throws Exception
* Any exception
*/
@Test
public void getFaxClientSpiTypeTest() throws Exception
{
String output=this.runner.getFaxClientSpiType();
Assert.assertNull(output);
}
/**
* Test
*
* @throws Exception
* Any exception
*/
@Test
public void submitFaxJobTest() throws Exception
{
this.runner.initialize();
File file=File.createTempFile("temp_",".txt");
file.deleteOnExit();
IOHelper.writeTextFile("abc",file);
String[] input=new String[]{"-target_address","12345","-file",file.getPath()};
FaxJob faxJob=this.runner.submitFaxJob(input);
Assert.assertNotNull(faxJob);
Assert.assertNotNull(faxJob.getFile());
Assert.assertTrue(faxJob.getFile().exists());
file.delete();
}
} | apache-2.0 |
cniweb/ant-contrib | ant-contrib/test/resources/design/src/mod/arraydepend3/ClassDependsOnArray.java | 844 | /*
* Copyright (c) 2001-2004 Ant-Contrib project. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Created on Jan 9, 2005
*/
package mod.arraydepend3;
/**
*
* @author dhiller
*/
public class ClassDependsOnArray {
public void testArray() throws Exception {
}
}
| apache-2.0 |