text
stringlengths 7
1.01M
|
|---|
// This file was generated by Mendix Modeler.
//
// WARNING: Only the following code will be retained when actions are regenerated:
// - the import list
// - the code between BEGIN USER CODE and END USER CODE
// - the code between BEGIN EXTRA CODE and END EXTRA CODE
// Other code you write will be lost the next time you deploy the project.
// Special characters, e.g., é, ö, à, etc. are supported in comments.
package boxconnector.actions;
import com.mendix.systemwideinterfaces.core.IContext;
import com.mendix.webui.CustomJavaAction;
import com.mendix.systemwideinterfaces.core.IMendixObject;
import static boxconnector.proxies.microflows.Microflows.deleteFileImpl;
/**
* Discards a file to the trash.
*
* Required
* BoxFile: the file to delete.
* _id attribute is required.
*
* Optional
* IfMatchEtag: The etag of the file. If-Match header to prevent race conditions.
*/
public class DeleteFile extends CustomJavaAction<java.lang.Boolean>
{
private IMendixObject __BoxFileParam;
private boxconnector.proxies.BoxFile BoxFileParam;
private java.lang.String IfMatchEtag;
public DeleteFile(IContext context, IMendixObject BoxFileParam, java.lang.String IfMatchEtag)
{
super(context);
this.__BoxFileParam = BoxFileParam;
this.IfMatchEtag = IfMatchEtag;
}
@Override
public java.lang.Boolean executeAction() throws Exception
{
this.BoxFileParam = __BoxFileParam == null ? null : boxconnector.proxies.BoxFile.initialize(getContext(), __BoxFileParam);
// BEGIN USER CODE
Boolean result = deleteFileImpl(getContext(), BoxFileParam, IfMatchEtag);
return result;
// END USER CODE
}
/**
* Returns a string representation of this action
*/
@Override
public java.lang.String toString()
{
return "DeleteFile";
}
// BEGIN EXTRA CODE
// END EXTRA CODE
}
|
/*
* This file was automatically generated by EvoSuite
* Sat Dec 12 18:03:26 GMT 2015
*/
package org.exolab.jms.config;
import static org.junit.Assert.*;
import org.junit.Test;
import java.util.Enumeration;
import org.evosuite.runtime.EvoRunner;
import org.evosuite.runtime.EvoRunnerParameters;
import org.evosuite.runtime.testdata.EvoSuiteFile;
import org.evosuite.runtime.testdata.EvoSuiteLocalAddress;
import org.evosuite.runtime.testdata.EvoSuiteRemoteAddress;
import org.evosuite.runtime.testdata.EvoSuiteURL;
import org.exolab.jms.config.ConnectionFactories;
import org.exolab.jms.config.ConnectionFactory;
import org.exolab.jms.config.QueueConnectionFactory;
import org.exolab.jms.config.TopicConnectionFactory;
import org.exolab.jms.config.XAConnectionFactory;
import org.exolab.jms.config.XAQueueConnectionFactory;
import org.exolab.jms.config.XATopicConnectionFactory;
import org.junit.runner.RunWith;
import org.xml.sax.ContentHandler;
import org.xml.sax.ext.DefaultHandler2;
@RunWith(EvoRunner.class) @EvoRunnerParameters(mockJVMNonDeterminism = true, useVFS = true, useVNET = true, resetStaticState = true)
public class ConnectionFactories_ESTest extends ConnectionFactories_ESTest_scaffolding {
//Test case number: 0
/*
* 2 covered goals:
* Goal 1. org.exolab.jms.config.ConnectionFactories.setXATopicConnectionFactory([Lorg/exolab/jms/config/XATopicConnectionFactory;)V: I16 Branch 36 IF_ICMPGE L826 - true
* Goal 2. org.exolab.jms.config.ConnectionFactories.setXATopicConnectionFactory([Lorg/exolab/jms/config/XATopicConnectionFactory;)V: I16 Branch 36 IF_ICMPGE L826 - false
*/
@Test
public void test00() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
XATopicConnectionFactory[] xATopicConnectionFactoryArray0 = new XATopicConnectionFactory[3];
connectionFactories0.setXATopicConnectionFactory(xATopicConnectionFactoryArray0);
assertEquals(3, connectionFactories0.getXATopicConnectionFactoryCount());
}
//Test case number: 1
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.setXATopicConnectionFactory(ILorg/exolab/jms/config/XATopicConnectionFactory;)V: I14 Branch 35 IF_ICMPLE L811 - false
*/
@Test
public void test01() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
XATopicConnectionFactory xATopicConnectionFactory0 = new XATopicConnectionFactory();
try {
connectionFactories0.setXATopicConnectionFactory(28, xATopicConnectionFactory0);
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// no message in exception (getMessage() returned null)
//
}
}
//Test case number: 2
/*
* 2 covered goals:
* Goal 1. org.exolab.jms.config.ConnectionFactories.setXATopicConnectionFactory(ILorg/exolab/jms/config/XATopicConnectionFactory;)V: I6 Branch 34 IFLT L811 - false
* Goal 2. org.exolab.jms.config.ConnectionFactories.setXATopicConnectionFactory(ILorg/exolab/jms/config/XATopicConnectionFactory;)V: I14 Branch 35 IF_ICMPLE L811 - true
*/
@Test
public void test02() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
try {
connectionFactories0.setXATopicConnectionFactory((int) (byte)0, (XATopicConnectionFactory) null);
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// Index: 0, Size: 0
//
}
}
//Test case number: 3
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.setXATopicConnectionFactory(ILorg/exolab/jms/config/XATopicConnectionFactory;)V: I6 Branch 34 IFLT L811 - true
*/
@Test
public void test03() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
XATopicConnectionFactory xATopicConnectionFactory0 = new XATopicConnectionFactory();
try {
connectionFactories0.setXATopicConnectionFactory((-20), xATopicConnectionFactory0);
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// no message in exception (getMessage() returned null)
//
}
}
//Test case number: 4
/*
* 2 covered goals:
* Goal 1. org.exolab.jms.config.ConnectionFactories.setXAQueueConnectionFactory([Lorg/exolab/jms/config/XAQueueConnectionFactory;)V: I16 Branch 33 IF_ICMPGE L796 - true
* Goal 2. org.exolab.jms.config.ConnectionFactories.setXAQueueConnectionFactory([Lorg/exolab/jms/config/XAQueueConnectionFactory;)V: I16 Branch 33 IF_ICMPGE L796 - false
*/
@Test
public void test04() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
XAQueueConnectionFactory[] xAQueueConnectionFactoryArray0 = new XAQueueConnectionFactory[2];
connectionFactories0.setXAQueueConnectionFactory(xAQueueConnectionFactoryArray0);
assertEquals(2, connectionFactories0.getXAQueueConnectionFactoryCount());
}
//Test case number: 5
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.setXAQueueConnectionFactory(ILorg/exolab/jms/config/XAQueueConnectionFactory;)V: I14 Branch 32 IF_ICMPLE L781 - false
*/
@Test
public void test05() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
try {
connectionFactories0.setXAQueueConnectionFactory(1264, (XAQueueConnectionFactory) null);
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// no message in exception (getMessage() returned null)
//
}
}
//Test case number: 6
/*
* 2 covered goals:
* Goal 1. org.exolab.jms.config.ConnectionFactories.setXAQueueConnectionFactory(ILorg/exolab/jms/config/XAQueueConnectionFactory;)V: I6 Branch 31 IFLT L781 - false
* Goal 2. org.exolab.jms.config.ConnectionFactories.setXAQueueConnectionFactory(ILorg/exolab/jms/config/XAQueueConnectionFactory;)V: I14 Branch 32 IF_ICMPLE L781 - true
*/
@Test
public void test06() throws Throwable {
XAQueueConnectionFactory[] xAQueueConnectionFactoryArray0 = new XAQueueConnectionFactory[2];
ConnectionFactories connectionFactories0 = new ConnectionFactories();
try {
connectionFactories0.setXAQueueConnectionFactory((int) (byte)0, xAQueueConnectionFactoryArray0[1]);
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// Index: 0, Size: 0
//
}
}
//Test case number: 7
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.setXAConnectionFactory(ILorg/exolab/jms/config/XAConnectionFactory;)V: I14 Branch 29 IF_ICMPLE L751 - false
*/
@Test
public void test07() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
try {
connectionFactories0.setXAConnectionFactory(8, (XAConnectionFactory) null);
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// no message in exception (getMessage() returned null)
//
}
}
//Test case number: 8
/*
* 2 covered goals:
* Goal 1. org.exolab.jms.config.ConnectionFactories.setXAConnectionFactory(ILorg/exolab/jms/config/XAConnectionFactory;)V: I6 Branch 28 IFLT L751 - false
* Goal 2. org.exolab.jms.config.ConnectionFactories.setXAConnectionFactory(ILorg/exolab/jms/config/XAConnectionFactory;)V: I14 Branch 29 IF_ICMPLE L751 - true
*/
@Test
public void test08() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
try {
connectionFactories0.setXAConnectionFactory(0, (XAConnectionFactory) null);
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// Index: 0, Size: 0
//
}
}
//Test case number: 9
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.setXAConnectionFactory(ILorg/exolab/jms/config/XAConnectionFactory;)V: I6 Branch 28 IFLT L751 - true
*/
@Test
public void test09() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
XAConnectionFactory xAConnectionFactory0 = new XAConnectionFactory();
try {
connectionFactories0.setXAConnectionFactory((-950), xAConnectionFactory0);
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// no message in exception (getMessage() returned null)
//
}
}
//Test case number: 10
/*
* 2 covered goals:
* Goal 1. org.exolab.jms.config.ConnectionFactories.setTopicConnectionFactory([Lorg/exolab/jms/config/TopicConnectionFactory;)V: I16 Branch 27 IF_ICMPGE L736 - true
* Goal 2. org.exolab.jms.config.ConnectionFactories.setTopicConnectionFactory([Lorg/exolab/jms/config/TopicConnectionFactory;)V: I16 Branch 27 IF_ICMPGE L736 - false
*/
@Test
public void test10() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
TopicConnectionFactory[] topicConnectionFactoryArray0 = new TopicConnectionFactory[9];
connectionFactories0.setTopicConnectionFactory(topicConnectionFactoryArray0);
assertEquals(9, connectionFactories0.getTopicConnectionFactoryCount());
}
//Test case number: 11
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.setQueueConnectionFactory(ILorg/exolab/jms/config/QueueConnectionFactory;)V: I14 Branch 23 IF_ICMPLE L691 - false
*/
@Test
public void test11() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
QueueConnectionFactory queueConnectionFactory0 = new QueueConnectionFactory();
try {
connectionFactories0.setQueueConnectionFactory(47, queueConnectionFactory0);
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// no message in exception (getMessage() returned null)
//
}
}
//Test case number: 12
/*
* 2 covered goals:
* Goal 1. org.exolab.jms.config.ConnectionFactories.setQueueConnectionFactory(ILorg/exolab/jms/config/QueueConnectionFactory;)V: I6 Branch 22 IFLT L691 - false
* Goal 2. org.exolab.jms.config.ConnectionFactories.setQueueConnectionFactory(ILorg/exolab/jms/config/QueueConnectionFactory;)V: I14 Branch 23 IF_ICMPLE L691 - true
*/
@Test
public void test12() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
QueueConnectionFactory queueConnectionFactory0 = new QueueConnectionFactory();
try {
connectionFactories0.setQueueConnectionFactory(0, queueConnectionFactory0);
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// Index: 0, Size: 0
//
}
}
//Test case number: 13
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.setQueueConnectionFactory(ILorg/exolab/jms/config/QueueConnectionFactory;)V: I6 Branch 22 IFLT L691 - true
*/
@Test
public void test13() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
QueueConnectionFactory queueConnectionFactory0 = new QueueConnectionFactory();
try {
connectionFactories0.setQueueConnectionFactory((-266), queueConnectionFactory0);
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// no message in exception (getMessage() returned null)
//
}
}
//Test case number: 14
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.setConnectionFactory(ILorg/exolab/jms/config/ConnectionFactory;)V: I14 Branch 20 IF_ICMPLE L661 - false
*/
@Test
public void test14() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
ConnectionFactory connectionFactory0 = new ConnectionFactory();
try {
connectionFactories0.setConnectionFactory(286, connectionFactory0);
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// no message in exception (getMessage() returned null)
//
}
}
//Test case number: 15
/*
* 2 covered goals:
* Goal 1. org.exolab.jms.config.ConnectionFactories.setConnectionFactory(ILorg/exolab/jms/config/ConnectionFactory;)V: I6 Branch 19 IFLT L661 - false
* Goal 2. org.exolab.jms.config.ConnectionFactories.setConnectionFactory(ILorg/exolab/jms/config/ConnectionFactory;)V: I14 Branch 20 IF_ICMPLE L661 - true
*/
@Test
public void test15() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
ConnectionFactory connectionFactory0 = new ConnectionFactory();
try {
connectionFactories0.setConnectionFactory(0, connectionFactory0);
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// Index: 0, Size: 0
//
}
}
//Test case number: 16
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.getXATopicConnectionFactory(I)Lorg/exolab/jms/config/XATopicConnectionFactory;: I14 Branch 17 IF_ICMPLE L519 - false
*/
@Test
public void test16() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
try {
XATopicConnectionFactory xATopicConnectionFactory0 = connectionFactories0.getXATopicConnectionFactory(1571);
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// no message in exception (getMessage() returned null)
//
}
}
//Test case number: 17
/*
* 2 covered goals:
* Goal 1. org.exolab.jms.config.ConnectionFactories.getXATopicConnectionFactory(I)Lorg/exolab/jms/config/XATopicConnectionFactory;: I6 Branch 16 IFLT L519 - false
* Goal 2. org.exolab.jms.config.ConnectionFactories.getXATopicConnectionFactory(I)Lorg/exolab/jms/config/XATopicConnectionFactory;: I14 Branch 17 IF_ICMPLE L519 - true
*/
@Test
public void test17() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
try {
XATopicConnectionFactory xATopicConnectionFactory0 = connectionFactories0.getXATopicConnectionFactory(0);
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// Index: 0, Size: 0
//
}
}
//Test case number: 18
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.getXATopicConnectionFactory(I)Lorg/exolab/jms/config/XATopicConnectionFactory;: I6 Branch 16 IFLT L519 - true
*/
@Test
public void test18() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
try {
XATopicConnectionFactory xATopicConnectionFactory0 = connectionFactories0.getXATopicConnectionFactory((-2601));
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// no message in exception (getMessage() returned null)
//
}
}
//Test case number: 19
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.getXAQueueConnectionFactory(I)Lorg/exolab/jms/config/XAQueueConnectionFactory;: I14 Branch 14 IF_ICMPLE L482 - false
*/
@Test
public void test19() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
try {
XAQueueConnectionFactory xAQueueConnectionFactory0 = connectionFactories0.getXAQueueConnectionFactory(1310);
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// no message in exception (getMessage() returned null)
//
}
}
//Test case number: 20
/*
* 2 covered goals:
* Goal 1. org.exolab.jms.config.ConnectionFactories.getXAQueueConnectionFactory(I)Lorg/exolab/jms/config/XAQueueConnectionFactory;: I6 Branch 13 IFLT L482 - false
* Goal 2. org.exolab.jms.config.ConnectionFactories.getXAQueueConnectionFactory(I)Lorg/exolab/jms/config/XAQueueConnectionFactory;: I14 Branch 14 IF_ICMPLE L482 - true
*/
@Test
public void test20() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
try {
XAQueueConnectionFactory xAQueueConnectionFactory0 = connectionFactories0.getXAQueueConnectionFactory((int) (byte)0);
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// Index: 0, Size: 0
//
}
}
//Test case number: 21
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.getXAQueueConnectionFactory(I)Lorg/exolab/jms/config/XAQueueConnectionFactory;: I6 Branch 13 IFLT L482 - true
*/
@Test
public void test21() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
try {
XAQueueConnectionFactory xAQueueConnectionFactory0 = connectionFactories0.getXAQueueConnectionFactory((-97));
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// no message in exception (getMessage() returned null)
//
}
}
//Test case number: 22
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.getXAConnectionFactory(I)Lorg/exolab/jms/config/XAConnectionFactory;: I14 Branch 11 IF_ICMPLE L445 - true
*/
@Test
public void test22() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
try {
XAConnectionFactory xAConnectionFactory0 = connectionFactories0.getXAConnectionFactory(0);
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// Index: 0, Size: 0
//
}
}
//Test case number: 23
/*
* 2 covered goals:
* Goal 1. org.exolab.jms.config.ConnectionFactories.getXAConnectionFactory(I)Lorg/exolab/jms/config/XAConnectionFactory;: I6 Branch 10 IFLT L445 - false
* Goal 2. org.exolab.jms.config.ConnectionFactories.getXAConnectionFactory(I)Lorg/exolab/jms/config/XAConnectionFactory;: I14 Branch 11 IF_ICMPLE L445 - false
*/
@Test
public void test23() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
try {
XAConnectionFactory xAConnectionFactory0 = connectionFactories0.getXAConnectionFactory(1558);
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// no message in exception (getMessage() returned null)
//
}
}
//Test case number: 24
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.getXAConnectionFactory(I)Lorg/exolab/jms/config/XAConnectionFactory;: I6 Branch 10 IFLT L445 - true
*/
@Test
public void test24() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
try {
XAConnectionFactory xAConnectionFactory0 = connectionFactories0.getXAConnectionFactory((-244));
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// no message in exception (getMessage() returned null)
//
}
}
//Test case number: 25
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.getTopicConnectionFactory(I)Lorg/exolab/jms/config/TopicConnectionFactory;: I14 Branch 8 IF_ICMPLE L408 - true
*/
@Test
public void test25() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
try {
TopicConnectionFactory topicConnectionFactory0 = connectionFactories0.getTopicConnectionFactory(0);
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// Index: 0, Size: 0
//
}
}
//Test case number: 26
/*
* 2 covered goals:
* Goal 1. org.exolab.jms.config.ConnectionFactories.getTopicConnectionFactory(I)Lorg/exolab/jms/config/TopicConnectionFactory;: I6 Branch 7 IFLT L408 - false
* Goal 2. org.exolab.jms.config.ConnectionFactories.getTopicConnectionFactory(I)Lorg/exolab/jms/config/TopicConnectionFactory;: I14 Branch 8 IF_ICMPLE L408 - false
*/
@Test
public void test26() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
try {
TopicConnectionFactory topicConnectionFactory0 = connectionFactories0.getTopicConnectionFactory(8);
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// no message in exception (getMessage() returned null)
//
}
}
//Test case number: 27
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.getTopicConnectionFactory(I)Lorg/exolab/jms/config/TopicConnectionFactory;: I6 Branch 7 IFLT L408 - true
*/
@Test
public void test27() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
try {
TopicConnectionFactory topicConnectionFactory0 = connectionFactories0.getTopicConnectionFactory((-2847));
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// no message in exception (getMessage() returned null)
//
}
}
//Test case number: 28
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.getQueueConnectionFactory(I)Lorg/exolab/jms/config/QueueConnectionFactory;: I14 Branch 5 IF_ICMPLE L371 - true
*/
@Test
public void test28() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
try {
QueueConnectionFactory queueConnectionFactory0 = connectionFactories0.getQueueConnectionFactory((int) '\u0000');
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// Index: 0, Size: 0
//
}
}
//Test case number: 29
/*
* 2 covered goals:
* Goal 1. org.exolab.jms.config.ConnectionFactories.getQueueConnectionFactory(I)Lorg/exolab/jms/config/QueueConnectionFactory;: I6 Branch 4 IFLT L371 - false
* Goal 2. org.exolab.jms.config.ConnectionFactories.getQueueConnectionFactory(I)Lorg/exolab/jms/config/QueueConnectionFactory;: I14 Branch 5 IF_ICMPLE L371 - false
*/
@Test
public void test29() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
try {
QueueConnectionFactory queueConnectionFactory0 = connectionFactories0.getQueueConnectionFactory((int) (byte)70);
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// no message in exception (getMessage() returned null)
//
}
}
//Test case number: 30
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.getQueueConnectionFactory(I)Lorg/exolab/jms/config/QueueConnectionFactory;: I6 Branch 4 IFLT L371 - true
*/
@Test
public void test30() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
try {
QueueConnectionFactory queueConnectionFactory0 = connectionFactories0.getQueueConnectionFactory((-853));
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// no message in exception (getMessage() returned null)
//
}
}
//Test case number: 31
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.getConnectionFactory(I)Lorg/exolab/jms/config/ConnectionFactory;: I14 Branch 2 IF_ICMPLE L334 - false
*/
@Test
public void test31() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
try {
ConnectionFactory connectionFactory0 = connectionFactories0.getConnectionFactory(2273);
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// no message in exception (getMessage() returned null)
//
}
}
//Test case number: 32
/*
* 2 covered goals:
* Goal 1. org.exolab.jms.config.ConnectionFactories.getConnectionFactory(I)Lorg/exolab/jms/config/ConnectionFactory;: I6 Branch 1 IFLT L334 - false
* Goal 2. org.exolab.jms.config.ConnectionFactories.getConnectionFactory(I)Lorg/exolab/jms/config/ConnectionFactory;: I14 Branch 2 IF_ICMPLE L334 - true
*/
@Test
public void test32() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
try {
ConnectionFactory connectionFactory0 = connectionFactories0.getConnectionFactory(0);
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// Index: 0, Size: 0
//
}
}
//Test case number: 33
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.getConnectionFactory(I)Lorg/exolab/jms/config/ConnectionFactory;: I6 Branch 1 IFLT L334 - true
*/
@Test
public void test33() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
try {
ConnectionFactory connectionFactory0 = connectionFactories0.getConnectionFactory((-1389));
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// no message in exception (getMessage() returned null)
//
}
}
//Test case number: 34
/*
* 2 covered goals:
* Goal 1. org.exolab.jms.config.ConnectionFactories.addTopicConnectionFactory(Lorg/exolab/jms/config/TopicConnectionFactory;)V: root-Branch
* Goal 2. org.exolab.jms.config.ConnectionFactories.getTopicConnectionFactory()[Lorg/exolab/jms/config/TopicConnectionFactory;: I21 Branch 9 IF_ICMPGE L422 - false
*/
@Test
public void test34() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
TopicConnectionFactory topicConnectionFactory0 = new TopicConnectionFactory();
connectionFactories0.addTopicConnectionFactory(topicConnectionFactory0);
boolean boolean0 = connectionFactories0.isValid();
assertFalse(boolean0);
}
//Test case number: 35
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.addXATopicConnectionFactory(ILorg/exolab/jms/config/XATopicConnectionFactory;)V: root-Branch
*/
@Test
public void test35() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
try {
connectionFactories0.addXATopicConnectionFactory((-990), (XATopicConnectionFactory) null);
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// Index: -990, Size: 0
//
}
}
//Test case number: 36
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.clearConnectionFactory()V: root-Branch
*/
@Test
public void test36() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
connectionFactories0.clearConnectionFactory();
assertEquals(0, connectionFactories0.getXAQueueConnectionFactoryCount());
}
//Test case number: 37
/*
* 3 covered goals:
* Goal 1. org.exolab.jms.config.ConnectionFactories.addXATopicConnectionFactory(Lorg/exolab/jms/config/XATopicConnectionFactory;)V: root-Branch
* Goal 2. org.exolab.jms.config.ConnectionFactories.getXATopicConnectionFactory()[Lorg/exolab/jms/config/XATopicConnectionFactory;: I21 Branch 18 IF_ICMPGE L533 - true
* Goal 3. org.exolab.jms.config.ConnectionFactories.getXATopicConnectionFactory()[Lorg/exolab/jms/config/XATopicConnectionFactory;: I21 Branch 18 IF_ICMPGE L533 - false
*/
@Test
public void test37() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
XATopicConnectionFactory xATopicConnectionFactory0 = new XATopicConnectionFactory();
connectionFactories0.addXATopicConnectionFactory(xATopicConnectionFactory0);
boolean boolean0 = connectionFactories0.isValid();
assertFalse(boolean0);
}
//Test case number: 38
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.addQueueConnectionFactory(Lorg/exolab/jms/config/QueueConnectionFactory;)V: root-Branch
*/
@Test
public void test38() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
QueueConnectionFactory queueConnectionFactory0 = new QueueConnectionFactory();
connectionFactories0.addQueueConnectionFactory(queueConnectionFactory0);
assertEquals(0, connectionFactories0.getXAConnectionFactoryCount());
}
//Test case number: 39
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.removeConnectionFactory(Lorg/exolab/jms/config/ConnectionFactory;)Z: root-Branch
*/
@Test
public void test39() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
boolean boolean0 = connectionFactories0.removeConnectionFactory((ConnectionFactory) null);
assertFalse(boolean0);
}
//Test case number: 40
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.enumerateXATopicConnectionFactory()Ljava/util/Enumeration;: root-Branch
*/
@Test
public void test40() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
Enumeration enumeration0 = connectionFactories0.enumerateXATopicConnectionFactory();
assertNotNull(enumeration0);
}
//Test case number: 41
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.marshal(Lorg/xml/sax/ContentHandler;)V: root-Branch
*/
@Test
public void test41() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
DefaultHandler2 defaultHandler2_0 = new DefaultHandler2();
// Undeclared exception!
try {
connectionFactories0.marshal((ContentHandler) defaultHandler2_0);
fail("Expecting exception: VerifyError");
} catch(VerifyError e) {
//
// (class: org/exolab/castor/util/NestedIOException, method: printStackTrace signature: ()V) Illegal use of nonvirtual function call
//
}
}
//Test case number: 42
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.addConnectionFactory(Lorg/exolab/jms/config/ConnectionFactory;)V: root-Branch
*/
@Test
public void test42() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
connectionFactories0.addConnectionFactory((ConnectionFactory) null);
assertEquals(0, connectionFactories0.getQueueConnectionFactoryCount());
}
//Test case number: 43
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.addTopicConnectionFactory(ILorg/exolab/jms/config/TopicConnectionFactory;)V: root-Branch
*/
@Test
public void test43() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
try {
connectionFactories0.addTopicConnectionFactory((-2331), (TopicConnectionFactory) null);
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// Index: -2331, Size: 0
//
}
}
//Test case number: 44
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.enumerateConnectionFactory()Ljava/util/Enumeration;: root-Branch
*/
@Test
public void test44() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
Enumeration enumeration0 = connectionFactories0.enumerateConnectionFactory();
assertNotNull(enumeration0);
}
//Test case number: 45
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.clearTopicConnectionFactory()V: root-Branch
*/
@Test
public void test45() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
connectionFactories0.clearTopicConnectionFactory();
assertEquals(0, connectionFactories0.getTopicConnectionFactoryCount());
}
//Test case number: 46
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.clearXAConnectionFactory()V: root-Branch
*/
@Test
public void test46() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
connectionFactories0.clearXAConnectionFactory();
assertEquals(0, connectionFactories0.getXAQueueConnectionFactoryCount());
}
//Test case number: 47
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.getXAQueueConnectionFactoryCount()I: root-Branch
*/
@Test
public void test47() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
int int0 = connectionFactories0.getXAQueueConnectionFactoryCount();
assertEquals(0, int0);
}
//Test case number: 48
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.getConnectionFactoryCount()I: root-Branch
*/
@Test
public void test48() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
int int0 = connectionFactories0.getConnectionFactoryCount();
assertEquals(0, int0);
}
//Test case number: 49
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.removeXATopicConnectionFactory(Lorg/exolab/jms/config/XATopicConnectionFactory;)Z: root-Branch
*/
@Test
public void test49() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
XATopicConnectionFactory xATopicConnectionFactory0 = new XATopicConnectionFactory();
boolean boolean0 = connectionFactories0.removeXATopicConnectionFactory(xATopicConnectionFactory0);
assertFalse(boolean0);
}
//Test case number: 50
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.getTopicConnectionFactoryCount()I: root-Branch
*/
@Test
public void test50() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
int int0 = connectionFactories0.getTopicConnectionFactoryCount();
assertEquals(0, int0);
}
//Test case number: 51
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.getXATopicConnectionFactoryCount()I: root-Branch
*/
@Test
public void test51() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
int int0 = connectionFactories0.getXATopicConnectionFactoryCount();
assertEquals(0, int0);
}
//Test case number: 52
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.clearXATopicConnectionFactory()V: root-Branch
*/
@Test
public void test52() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
connectionFactories0.clearXATopicConnectionFactory();
assertEquals(0, connectionFactories0.getQueueConnectionFactoryCount());
}
//Test case number: 53
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.addXAConnectionFactory(ILorg/exolab/jms/config/XAConnectionFactory;)V: root-Branch
*/
@Test
public void test53() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
XAConnectionFactory xAConnectionFactory0 = new XAConnectionFactory();
try {
connectionFactories0.addXAConnectionFactory((-1681), xAConnectionFactory0);
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// Index: -1681, Size: 0
//
}
}
//Test case number: 54
/*
* 9 covered goals:
* Goal 1. org.exolab.jms.config.ConnectionFactories.addXAQueueConnectionFactory(ILorg/exolab/jms/config/XAQueueConnectionFactory;)V: root-Branch
* Goal 2. org.exolab.jms.config.ConnectionFactories.isValid()Z: root-Branch
* Goal 3. org.exolab.jms.config.ConnectionFactories.validate()V: root-Branch
* Goal 4. org.exolab.jms.config.ConnectionFactories.getConnectionFactory()[Lorg/exolab/jms/config/ConnectionFactory;: I21 Branch 3 IF_ICMPGE L348 - true
* Goal 5. org.exolab.jms.config.ConnectionFactories.getQueueConnectionFactory()[Lorg/exolab/jms/config/QueueConnectionFactory;: I21 Branch 6 IF_ICMPGE L385 - true
* Goal 6. org.exolab.jms.config.ConnectionFactories.getTopicConnectionFactory()[Lorg/exolab/jms/config/TopicConnectionFactory;: I21 Branch 9 IF_ICMPGE L422 - true
* Goal 7. org.exolab.jms.config.ConnectionFactories.getXAConnectionFactory()[Lorg/exolab/jms/config/XAConnectionFactory;: I21 Branch 12 IF_ICMPGE L459 - true
* Goal 8. org.exolab.jms.config.ConnectionFactories.getXAQueueConnectionFactory()[Lorg/exolab/jms/config/XAQueueConnectionFactory;: I21 Branch 15 IF_ICMPGE L496 - true
* Goal 9. org.exolab.jms.config.ConnectionFactories.getXAQueueConnectionFactory()[Lorg/exolab/jms/config/XAQueueConnectionFactory;: I21 Branch 15 IF_ICMPGE L496 - false
*/
@Test
public void test54() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
connectionFactories0.addXAQueueConnectionFactory(0, (XAQueueConnectionFactory) null);
boolean boolean0 = connectionFactories0.isValid();
assertFalse(boolean0);
}
//Test case number: 55
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.removeXAQueueConnectionFactory(Lorg/exolab/jms/config/XAQueueConnectionFactory;)Z: root-Branch
*/
@Test
public void test55() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
XAQueueConnectionFactory xAQueueConnectionFactory0 = new XAQueueConnectionFactory();
boolean boolean0 = connectionFactories0.removeXAQueueConnectionFactory(xAQueueConnectionFactory0);
assertFalse(boolean0);
}
//Test case number: 56
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.addConnectionFactory(ILorg/exolab/jms/config/ConnectionFactory;)V: root-Branch
*/
@Test
public void test56() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
ConnectionFactory connectionFactory0 = new ConnectionFactory();
try {
connectionFactories0.addConnectionFactory(4939, connectionFactory0);
fail("Expecting exception: IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException e) {
//
// Index: 4939, Size: 0
//
}
}
//Test case number: 57
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.enumerateXAConnectionFactory()Ljava/util/Enumeration;: root-Branch
*/
@Test
public void test57() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
Enumeration enumeration0 = connectionFactories0.enumerateXAConnectionFactory();
assertNotNull(enumeration0);
}
//Test case number: 58
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.enumerateQueueConnectionFactory()Ljava/util/Enumeration;: root-Branch
*/
@Test
public void test58() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
Enumeration enumeration0 = connectionFactories0.enumerateQueueConnectionFactory();
assertNotNull(enumeration0);
}
//Test case number: 59
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.getXAConnectionFactoryCount()I: root-Branch
*/
@Test
public void test59() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
int int0 = connectionFactories0.getXAConnectionFactoryCount();
assertEquals(0, int0);
}
//Test case number: 60
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.clearQueueConnectionFactory()V: root-Branch
*/
@Test
public void test60() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
connectionFactories0.clearQueueConnectionFactory();
assertEquals(0, connectionFactories0.getXAQueueConnectionFactoryCount());
}
//Test case number: 61
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.addXAQueueConnectionFactory(Lorg/exolab/jms/config/XAQueueConnectionFactory;)V: root-Branch
*/
@Test
public void test61() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
XAQueueConnectionFactory xAQueueConnectionFactory0 = new XAQueueConnectionFactory();
connectionFactories0.addXAQueueConnectionFactory(xAQueueConnectionFactory0);
assertEquals(0, connectionFactories0.getQueueConnectionFactoryCount());
}
//Test case number: 62
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.removeXAConnectionFactory(Lorg/exolab/jms/config/XAConnectionFactory;)Z: root-Branch
*/
@Test
public void test62() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
XAConnectionFactory xAConnectionFactory0 = new XAConnectionFactory();
boolean boolean0 = connectionFactories0.removeXAConnectionFactory(xAConnectionFactory0);
assertFalse(boolean0);
}
//Test case number: 63
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.getQueueConnectionFactoryCount()I: root-Branch
*/
@Test
public void test63() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
int int0 = connectionFactories0.getQueueConnectionFactoryCount();
assertEquals(0, int0);
}
//Test case number: 64
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.enumerateXAQueueConnectionFactory()Ljava/util/Enumeration;: root-Branch
*/
@Test
public void test64() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
Enumeration enumeration0 = connectionFactories0.enumerateXAQueueConnectionFactory();
assertNotNull(enumeration0);
}
//Test case number: 65
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.clearXAQueueConnectionFactory()V: root-Branch
*/
@Test
public void test65() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
connectionFactories0.clearXAQueueConnectionFactory();
assertEquals(0, connectionFactories0.getQueueConnectionFactoryCount());
}
//Test case number: 66
/*
* 1 covered goal:
* Goal 1. org.exolab.jms.config.ConnectionFactories.removeQueueConnectionFactory(Lorg/exolab/jms/config/QueueConnectionFactory;)Z: root-Branch
*/
@Test
public void test66() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
QueueConnectionFactory queueConnectionFactory0 = new QueueConnectionFactory();
boolean boolean0 = connectionFactories0.removeQueueConnectionFactory(queueConnectionFactory0);
assertFalse(boolean0);
}
//Test case number: 67
/*
* 2 covered goals:
* Goal 1. org.exolab.jms.config.ConnectionFactories.enumerateTopicConnectionFactory()Ljava/util/Enumeration;: root-Branch
* Goal 2. org.exolab.jms.config.ConnectionFactories.<init>()V: root-Branch
*/
@Test
public void test67() throws Throwable {
ConnectionFactories connectionFactories0 = new ConnectionFactories();
Enumeration enumeration0 = connectionFactories0.enumerateTopicConnectionFactory();
assertNotNull(enumeration0);
}
}
|
package org.puneetha.InvertedIndex;
import java.io.IOException;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
public class WordcountReducer extends Reducer<Text, Text, Text, Text> {
@Override
public void reduce(final Text key, final Iterable<Text> values,
final Context context) throws IOException, InterruptedException {
StringBuilder stringBuilder = new StringBuilder();
for (Text value : values) {
stringBuilder.append(value.toString());
if (values.iterator().hasNext()) {
stringBuilder.append(" -> ");
}
}
context.write(key, new Text(stringBuilder.toString()));
}
}
|
/*
* Copyright (C) 2010 Garen J. Torikian
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package biz.varkon.shelvesom.activity.videogames;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.provider.LiveFolders;
import biz.varkon.shelvesom.R;
import biz.varkon.shelvesom.provider.videogames.VideoGamesStore;
public class VideoGamesLiveFolder extends Activity {
public static final Uri CONTENT_URI = Uri
.parse("content://VideoGamesProvider/live_folders/videogames");
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
final Intent intent = getIntent();
final String action = intent.getAction();
if (LiveFolders.ACTION_CREATE_LIVE_FOLDER.equals(action)) {
setResult(
RESULT_OK,
createLiveFolder(this, CONTENT_URI,
getString(R.string.live_folder_videogames),
R.drawable.ic_livefolder_videogame_icon));
} else {
setResult(RESULT_CANCELED);
}
finish();
}
private static Intent createLiveFolder(Context context, Uri uri,
String name, int icon) {
final Intent intent = new Intent();
intent.setData(uri);
intent.putExtra(LiveFolders.EXTRA_LIVE_FOLDER_NAME, name);
intent.putExtra(LiveFolders.EXTRA_LIVE_FOLDER_ICON,
Intent.ShortcutIconResource.fromContext(context, icon));
intent.putExtra(LiveFolders.EXTRA_LIVE_FOLDER_DISPLAY_MODE,
LiveFolders.DISPLAY_MODE_LIST);
intent.putExtra(LiveFolders.EXTRA_LIVE_FOLDER_BASE_INTENT, new Intent(
Intent.ACTION_VIEW, VideoGamesStore.VideoGame.CONTENT_URI));
return intent;
}
}
|
package dt.pilot.shared.repo;
import org.springframework.data.mongodb.repository.MongoRepository;
import org.springframework.stereotype.Repository;
import dt.pilot.shared.entity.MongoTestEntity;
@Repository
public interface TestEntityRepo extends MongoRepository<MongoTestEntity, String> {
}
|
/*
-------------------------------------------------------------------
|
| CRUDyLeaf - A Domain Specific Language for generating Spring Boot
| REST resources from entity CRUD operations.
| Author: Omar S. Gómez (2020)
| File Date: Thu Jan 14 19:34:36 ECT 2021
|
-------------------------------------------------------------------
*/
package com.tienda.nomina.repository;
import org.springframework.data.mongodb.repository.MongoRepository;
import org.springframework.stereotype.Repository;
import com.tienda.nomina.model.Cargo;
import java.util.Optional;
@Repository
public interface CargoRepository extends MongoRepository<Cargo, String> {
Optional <Cargo> findByIdCargo(String idCargo);
void deleteByIdCargo(String idCargo);
}
|
/*
* Copyright 2018 Broadband Forum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.broadband_forum.obbaa.pma.impl;
import static org.broadband_forum.obbaa.dmyang.entities.DeviceManagerNSConstants.ALIGNED;
import static org.broadband_forum.obbaa.dmyang.entities.DeviceManagerNSConstants.IN_ERROR;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import javax.persistence.EntityManager;
import javax.persistence.EntityTransaction;
import org.broadband_forum.obbaa.device.adapter.AdapterContext;
import org.broadband_forum.obbaa.device.adapter.AdapterManager;
import org.broadband_forum.obbaa.device.adapter.DeviceInterface;
import org.broadband_forum.obbaa.dmyang.dao.DeviceDao;
import org.broadband_forum.obbaa.dmyang.entities.Device;
import org.broadband_forum.obbaa.dmyang.entities.DeviceMgmt;
import org.broadband_forum.obbaa.dmyang.tx.TxService;
import org.broadband_forum.obbaa.netconf.api.messages.DocumentToPojoTransformer;
import org.broadband_forum.obbaa.netconf.api.messages.NetConfResponse;
import org.broadband_forum.obbaa.netconf.api.messages.Notification;
import org.broadband_forum.obbaa.netconf.api.util.DocumentUtils;
import org.broadband_forum.obbaa.netconf.api.util.NetconfMessageBuilderException;
import org.broadband_forum.obbaa.netconf.persistence.EntityDataStoreManager;
import org.broadband_forum.obbaa.netconf.persistence.PersistenceManagerUtil;
import org.broadband_forum.obbaa.pma.PmaRegistry;
import org.broadband_forum.obbaa.pma.PmaSession;
import org.broadband_forum.obbaa.pma.PmaSessionTemplate;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
public class AlignmentTimerTest {
private static String SAMPLE_EMPTY_NC_RES = "<rpc-reply xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\" " +
"message-id=\"101012\">\n" +
" <data>\n" +
" </data>\n" +
"</rpc-reply>";
private static String SAMPLE_NON_EMPTY_NC_RES = "<rpc-reply " +
"xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\" " +
"message-id=\"101012\">\n" +
" <data>\n" +
" <root-config xmlns=\"urn:bbf:yang:obbaa:network-manager\">\n" +
" <config1>\n" +
" </config1>\n" +
" </root-config>\n" +
" </data>\n" +
"</rpc-reply>";
private static String EDIT_CONFIG_REQ = "<rpc message-id=\"1\" xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\n" +
" <edit-config>\n" +
" <target>\n" +
" <running/>\n" +
" </target>\n" +
" <default-operation>merge</default-operation>\n" +
" <test-option>set</test-option>\n" +
" <error-option>stop-on-error</error-option>\n" +
" <config>\n" +
" <root-config xmlns=\"urn:bbf:yang:obbaa:network-manager\">\n" +
" <config1>\n" +
" </config1>\n" +
" </root-config>\n" +
" </config>\n" +
" </edit-config>\n" +
"</rpc>\n";
private static String OK_RESPONSE = "<rpc-reply xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\n" +
" <ok/>\n" +
"</rpc-reply>";
private static String ERROR_RESPONSE = "<rpc-reply xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\n" +
" <rpc-error>\n" +
" <error-type>application</error-type>\n" +
" <error-tag>operation-failed</error-tag>\n" +
" <error-severity>error</error-severity>\n" +
" </rpc-error>\n" +
"</rpc-reply>";
private AlignmentTimer m_timer;
private List<Device> m_devices;
private Device m_newDevice;
private Device m_alignedDevice;
private Device m_misalignedDevice;
private Device m_inErrorDevice;
@Mock
private PmaRegistry m_pmaRegistry;
@Mock
private PmaSession m_pmaSession;
@Mock
private DeviceDao m_deviceDao;
private TxService m_txService;
@Mock
private PersistenceManagerUtil m_persistenceMgrUtil;
@Mock
private EntityDataStoreManager entityDSM;
@Mock
private EntityManager entityMgr;
@Mock
private EntityTransaction entityTx;
@Mock
private AdapterManager m_adapterMgr;
@Mock
private DeviceInterface m_devInterface;
@Mock
private AdapterContext m_context;
private void reviseStrForWindows() {
SAMPLE_EMPTY_NC_RES = "<rpc-reply xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\" " +
"message-id=\"101012\">\r\n" +
" <data>\r\n" +
" </data>\r\n" +
"</rpc-reply>";
SAMPLE_NON_EMPTY_NC_RES = "<rpc-reply " +
"xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\" " +
"message-id=\"101012\">\r\n" +
" <data>\r\n" +
" <root-config xmlns=\"urn:bbf:yang:obbaa:network-manager\">\r\n" +
" <config1>\r\n" +
" </config1>\r\n" +
" </root-config>\r\n" +
" </data>\r\n" +
"</rpc-reply>";
EDIT_CONFIG_REQ = "<rpc message-id=\"1\" xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\r\n" +
" <edit-config>\r\n" +
" <target>\r\n" +
" <running/>\r\n" +
" </target>\r\n" +
" <default-operation>merge</default-operation>\r\n" +
" <test-option>set</test-option>\r\n" +
" <error-option>stop-on-error</error-option>\r\n" +
" <config>\r\n" +
" <root-config xmlns=\"urn:bbf:yang:obbaa:network-manager\">\r\n" +
" <config1>\r\n" +
" </config1>\r\n" +
" </root-config>\r\n" +
" </config>\r\n" +
" </edit-config>\r\n" +
"</rpc>\r\n";
OK_RESPONSE = "<rpc-reply xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\r\n" +
" <ok/>\r\n" +
"</rpc-reply>";
ERROR_RESPONSE = "<rpc-reply xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">\r\n" +
" <rpc-error>\r\n" +
" <error-type>application</error-type>\r\n" +
" <error-tag>operation-failed</error-tag>\r\n" +
" <error-severity>error</error-severity>\r\n" +
" </rpc-error>\r\n" +
"</rpc-reply>";
}
@Before
public void setUp() throws ExecutionException, NetconfMessageBuilderException {
if (System.getProperty("os.name").startsWith("Windows")) {
reviseStrForWindows();
}
MockitoAnnotations.initMocks(this);
m_txService = new TxService();
when(m_persistenceMgrUtil.getEntityDataStoreManager()).thenReturn(entityDSM);
when(entityDSM.getEntityManager()).thenReturn(entityMgr);
when(entityMgr.getTransaction()).thenReturn(entityTx);
when(entityTx.isActive()).thenReturn(true);
m_devices = new ArrayList<>();
m_newDevice = createDevice("new-device");
m_devices.add(m_newDevice);
m_alignedDevice = createDevice("aligned-device");
m_alignedDevice.getDeviceManagement().getDeviceState().setConfigAlignmentState(ALIGNED);
m_devices.add(m_alignedDevice);
m_misalignedDevice = createDevice("misaligned-device");
m_misalignedDevice.getDeviceManagement().getDeviceState().setConfigAlignmentState("2 Edits pending");
m_devices.add(m_misalignedDevice);
m_inErrorDevice = createDevice("in-error");
m_inErrorDevice.getDeviceManagement().getDeviceState().setConfigAlignmentState(IN_ERROR + ", blah blah");
m_devices.add(m_inErrorDevice);
when(m_deviceDao.findAllDevices()).thenReturn(m_devices);
m_timer = new AlignmentTimer(m_pmaRegistry, m_adapterMgr, m_deviceDao, m_txService);
when(m_adapterMgr.getAdapterContext(any())).thenReturn(m_context);
when(m_context.getDeviceInterface()).thenReturn(m_devInterface);
Map<NetConfResponse, List<Notification>> map = new HashMap<>();
NetConfResponse netConfResponse = DocumentToPojoTransformer.getNetconfResponse(DocumentUtils.stringToDocument(SAMPLE_EMPTY_NC_RES));
map.put(netConfResponse, Collections.emptyList());
when(m_pmaSession.executeNC(anyString())).thenReturn(map);
doAnswer(invocation -> {
Object[] args = invocation.getArguments();
return ((PmaSessionTemplate) args[1]).execute(m_pmaSession);
}).when(m_pmaRegistry).executeWithPmaSession(anyString(), anyObject());
}
private Device createDevice(String deviceName) {
Device device = new Device();
device.setDeviceName(deviceName);
DeviceMgmt deviceMgmt = new DeviceMgmt();
device.setDeviceManagement(deviceMgmt);
return device;
}
@Test
public void testNewDevicesAreAligned() throws ExecutionException {
m_timer.runAlignment();
verify(m_pmaSession, times(2)).align();
verify(m_pmaSession, never()).forceAlign();
}
@Test
public void testNewDevicesWithConfigurationsAreAligned() throws ExecutionException, NetconfMessageBuilderException {
Map<NetConfResponse, List<Notification>> map = new HashMap<>();
NetConfResponse netConfResponse = DocumentToPojoTransformer.getNetconfResponse(DocumentUtils.stringToDocument(SAMPLE_NON_EMPTY_NC_RES));
map.put(netConfResponse, Collections.emptyList());
when(m_pmaSession.executeNC(anyString())).thenReturn(map);
m_timer.runAlignment();
verify(m_pmaSession, times(2)).align();
}
@Test
public void testUploadConfig() throws ExecutionException, InterruptedException, NetconfMessageBuilderException {
m_newDevice.getDeviceManagement().setPushPmaConfigurationToDevice("false");
Future<NetConfResponse> responseFuture = mock(Future.class);
NetConfResponse netConfResponse = null;
try {
netConfResponse = DocumentToPojoTransformer.getNetconfResponse(DocumentUtils.stringToDocument(SAMPLE_NON_EMPTY_NC_RES));
} catch (NetconfMessageBuilderException e) {
throw new RuntimeException(e);
}
when(responseFuture.get()).thenReturn(netConfResponse);
when(m_devInterface.getConfig(eq(m_newDevice), anyObject())).thenReturn(responseFuture);
Map<NetConfResponse, List<Notification>> map = new HashMap<>();
NetConfResponse netConfRespons = DocumentToPojoTransformer.getNetconfResponse(DocumentUtils.stringToDocument(OK_RESPONSE));
map.put(netConfRespons, Collections.emptyList());
when(m_pmaSession.executeNC(EDIT_CONFIG_REQ)).thenReturn(map);
m_timer.runAlignment();
assertEquals("true", m_newDevice.getDeviceManagement().getPushPmaConfigurationToDevice());
verify(m_deviceDao).updateDeviceAlignmentState("new-device", ALIGNED);
}
@Test
public void testUploadConfigError() throws ExecutionException, InterruptedException, NetconfMessageBuilderException {
m_newDevice.getDeviceManagement().setPushPmaConfigurationToDevice("false");
Future<NetConfResponse> responseFuture = mock(Future.class);
NetConfResponse netConfResponse = null;
try {
netConfResponse = DocumentToPojoTransformer.getNetconfResponse(DocumentUtils.stringToDocument(SAMPLE_NON_EMPTY_NC_RES));
} catch (NetconfMessageBuilderException e) {
throw new RuntimeException(e);
}
when(responseFuture.get()).thenReturn(netConfResponse);
when(m_devInterface.getConfig(eq(m_newDevice), anyObject())).thenReturn(responseFuture);
Map<NetConfResponse, List<Notification>> map = new HashMap<>();
NetConfResponse netConfRespons = DocumentToPojoTransformer.getNetconfResponse(DocumentUtils.stringToDocument(ERROR_RESPONSE));
map.put(netConfRespons, Collections.emptyList());
when(m_pmaSession.executeNC(EDIT_CONFIG_REQ)).thenReturn(map);
m_timer.runAlignment();
assertEquals("false", m_newDevice.getDeviceManagement().getPushPmaConfigurationToDevice());
verify(m_deviceDao, never()).updateDeviceAlignmentState(anyString(), anyString());
assertTrue(m_newDevice.isNeverAligned());
}
@Test
public void testUploadConfigEmptyResponseFromDevice() throws ExecutionException, InterruptedException {
m_newDevice.getDeviceManagement().setPushPmaConfigurationToDevice("false");
Future<NetConfResponse> responseFuture = mock(Future.class);
NetConfResponse netConfResponse = null;
try {
netConfResponse = DocumentToPojoTransformer.getNetconfResponse(DocumentUtils.stringToDocument(SAMPLE_EMPTY_NC_RES));
} catch (NetconfMessageBuilderException e) {
throw new RuntimeException(e);
}
when(responseFuture.get()).thenReturn(netConfResponse);
when(m_devInterface.getConfig(eq(m_newDevice), anyObject())).thenReturn(responseFuture);
m_timer.runAlignment();
assertEquals("true", m_newDevice.getDeviceManagement().getPushPmaConfigurationToDevice());
verify(m_deviceDao).updateDeviceAlignmentState("new-device", ALIGNED);
}
}
|
/*
Copyright (C) 2009 Volker Berlin (i-net software)
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
arising from the use of this software.
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it
freely, subject to the following restrictions:
1. The origin of this software must not be misrepresented; you must not
claim that you wrote the original software. If you use this software
in a product, an acknowledgment in the product documentation would be
appreciated but is not required.
2. Altered source versions must be plainly marked as such, and must not be
misrepresented as being the original software.
3. This notice may not be removed or altered from any source distribution.
Jeroen Frijters
jeroen@frijters.net
*/
package java_.awt;
import java.awt.*;
import junit.ikvm.ReferenceData;
import org.junit.*;
import static org.junit.Assert.*;
/**
* @author Volker Berlin
*/
public class FrameTest extends WindowTest{
@BeforeClass
public static void setUpBeforeClass() throws Exception{
reference = new ReferenceData();
}
@AfterClass
public static void tearDownAfterClass() throws Exception{
if(reference != null){
reference.save();
}
}
@Override
protected Window createWindow(){
return new Frame();
}
}
|
package com.technologies.future;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
/**
* Created by Orgie on 05.02.2017.
*/
public class JSONMapper {
private Map<String, Object> map = new HashMap<String, Object>();
private String type;
private JSONObject json;
public List<Object> JSONArrayLoop(JSONArray json){
List<Object> list = new ArrayList<Object>();
Object value = null;
for(int i = 0; i < json.length(); i++){
value = null;
try {
value = json.get(i);
} catch (JSONException e) {
// Something went wrong!
}
if(value.getClass() == JSONArray.class) {
value = JSONArrayLoop((JSONArray) value);
}else if(value.getClass() == JSONObject.class){
value = JSONObjectLoop((JSONObject) value);
}
list.add(value);
}
return list;
}
public Map<String, Object> JSONObjectLoop(JSONObject json){
Map<String, Object> localMap = new HashMap<String, Object>();
Iterator<String> iter = json.keys();
while (iter.hasNext()) {
String key = iter.next();
Object value = null;
try {
value = json.get(key);
} catch (JSONException e) {
// Something went wrong!
}
if(value.getClass() == JSONArray.class) {
value = JSONArrayLoop((JSONArray) value);
}else if(value.getClass() == JSONObject.class){
value = JSONObjectLoop((JSONObject) value);
}
localMap.put(key, value);
}
return localMap;
}
public JSONArray ListLoop(List _list){
JSONArray localJson = new JSONArray();
for(int i = 0; i < _list.size(); i++){
Object value = null;
value = _list.get(i);
if(value.getClass() == ArrayList.class) {
value = ListLoop((List) value);
}else if(value.getClass() == HashMap.class){
value = MapLoop((Map) value);
}
localJson.put(value);
}
return localJson;
}
public JSONObject MapLoop(Map _map){
JSONObject localJson = new JSONObject();
Iterator<Map.Entry> iter = _map.entrySet().iterator();
while (iter.hasNext()) {
Map.Entry<String, Object> key = iter.next();
Object value = null;
value = key.getValue();
if(value.getClass() == ArrayList.class) {
value = ListLoop((List) value);
}else if(value.getClass() == HashMap.class){
value = MapLoop((Map) value);
}
try {
localJson.put(key.getKey(), value);
} catch (JSONException e) {
e.printStackTrace();
}
}
return localJson;
}
public Object ioStream(String _path, String _option, Object _value){
String[] array = _path.split("\\.");
int number = 0;
Object target = map;
Class cls;
if(array.length < 1){
if(_option.equals("write")) {
map.put(_path, _value);
}
target = map.get(_path);
//Log.e("!!", _path);
}
for(int i = 0; i < array.length; i++){
cls = target.getClass();
//Log.e(array[i], cls.getName() + " " + target.toString());
if(cls == HashMap.class){
if( _option.equals("write") && i == (array.length - 1) ) {
((Map)target).put(array[i], _value);
}
target = ((Map)target).get(array[i]);
}else if(cls == ArrayList.class){
try {
number = Integer.parseInt(array[i]);
if( _option.equals("write") && i == (array.length - 1) ) {
((List) target).set(number, _value);
}
target = ((List) target).get(number);
}catch(NumberFormatException e){
target = null;
}
}
}
return target;
}
public Object get(String _path){
return ioStream(_path, "read", "");
}
public String getString(String _path) {
Object target = get(_path);
if(target.getClass() == String.class){
return (String) target;
}
return null;
}
public Integer getInt(String _path) {
try {
return new Integer( getString(_path) );
}catch(NumberFormatException e){
return null;
}
}
public Map getMap(String _path) {
Object target = get(_path);
if(target.getClass() == HashMap.class){
return (Map) target;
}
return null;
}
public List getList(String _path) {
Object target = get(_path);
if(target.getClass() == ArrayList.class){
return (List) target;
}
return null;
}
public String getJSON(){
json = MapLoop(map);
return json.toString();
}
public Object set(String _path, Object _value){
return ioStream(_path, "write", _value);
}
public void setJSON(String _stringJSON){
if(_stringJSON.charAt(0) == '{'){
type = "object";
}
if(_stringJSON.charAt(0) == '['){
type = "array";
}
json = new JSONObject();
if(type == "array"){
JSONArray array = new JSONArray();
try {
array = new JSONArray(_stringJSON);
} catch (JSONException e) {
e.printStackTrace();
}
try {
json.put("array", array);
} catch (JSONException e) {
e.printStackTrace();
}
}else if (type == "object"){
try {
json = new JSONObject(_stringJSON);
} catch (JSONException e) {
e.printStackTrace();
}
}
map = JSONObjectLoop(json);
}
public JSONMapper(String _stringJSON){
setJSON(_stringJSON);
}
}
|
/**
* Copyright 2016 William Van Woensel
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*
*
* @author wvw
*
*/
package wvw.mobibench.devserv.server.serial;
import wvw.mobibench.devserv.server.msg.ResponseTypes;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
public class Serializer {
private GsonBuilder gsonBuilder = new GsonBuilder();
private Gson gson = gsonBuilder.create();
public Serializer() {
EnumSerializer enumSerial = new EnumSerializer();
EnumDeserializer enumDeserial = new EnumDeserializer();
gsonBuilder.registerTypeAdapter(ResponseTypes.class, enumSerial);
gsonBuilder.registerTypeAdapter(ResponseTypes.class, enumDeserial);
}
public String serialize(Object data) {
return gson.toJson(data);
}
public <T> T deserialize(String json, Class<T> classOfT) {
return gson.fromJson(json, classOfT);
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.client.python;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.List;
import java.util.Optional;
import static java.util.Objects.requireNonNull;
/** Options for the {@link PythonDriver}. */
final class PythonDriverOptions {
@Nullable private final String entryPointModule;
@Nullable private final String entryPointScript;
@Nonnull private final List<String> programArgs;
@Nullable
String getEntryPointModule() {
return entryPointModule;
}
Optional<String> getEntryPointScript() {
return Optional.ofNullable(entryPointScript);
}
@Nonnull
List<String> getProgramArgs() {
return programArgs;
}
PythonDriverOptions(
@Nullable String entryPointModule,
@Nullable String entryPointScript,
List<String> programArgs) {
this.entryPointModule = entryPointModule;
this.entryPointScript = entryPointScript;
this.programArgs = requireNonNull(programArgs, "programArgs");
}
}
|
/*
* Copyright 2014-2022 Real Logic Limited.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.aeron.cluster;
import io.aeron.*;
import io.aeron.cluster.client.ClusterException;
import io.aeron.cluster.service.ClusterCounters;
import io.aeron.cluster.service.ClusteredServiceContainer;
import org.agrona.*;
import org.agrona.concurrent.status.AtomicCounter;
import org.agrona.concurrent.status.CountersReader;
import java.io.File;
import java.nio.MappedByteBuffer;
import java.nio.charset.StandardCharsets;
import static io.aeron.CncFileDescriptor.*;
/**
* Toggle control {@link ToggleState}s for a cluster node such as {@link ToggleState#SUSPEND} or
* {@link ToggleState#RESUME}. This can only be applied to the {@link io.aeron.cluster.service.Cluster.Role#LEADER}.
*/
public class ClusterControl
{
/**
* Toggle states for controlling the cluster node once it has entered the active state after initialising.
* The toggle can only we switched into a new state from {@link #NEUTRAL} and will be reset by the
* {@link io.aeron.cluster.ConsensusModule} once the triggered action is complete.
*/
public enum ToggleState
{
/**
* Inactive state, not accepting new actions.
*/
INACTIVE(0),
/**
* Neutral state ready to accept a new action.
*/
NEUTRAL(1),
/**
* Suspend processing of ingress and timers.
*/
SUSPEND(2),
/**
* Resume processing of ingress and timers.
*/
RESUME(3),
/**
* Take a snapshot of cluster state.
*/
SNAPSHOT(4),
/**
* Shut down the cluster in an orderly fashion by taking a snapshot first then terminating.
*/
SHUTDOWN(5),
/**
* Abort processing and terminate the cluster without taking a snapshot.
*/
ABORT(6);
private final int code;
private static final ToggleState[] STATES = values();
ToggleState(final int code)
{
if (code != ordinal())
{
throw new IllegalArgumentException(name() + " - code must equal ordinal value: code=" + code);
}
this.code = code;
}
/**
* Code to be used as the indicator in the control toggle counter.
*
* @return code to be used as the indicator in the control toggle counter.
*/
public final int code()
{
return code;
}
/**
* Toggle the control counter to trigger the requested {@link ToggleState}.
* <p>
* This action is thread safe and will succeed if the toggle is in the {@link ToggleState#NEUTRAL} state,
* or if toggle is {@link ToggleState#SUSPEND} and requested state is {@link ToggleState#RESUME}.
*
* @param controlToggle to change to the trigger state.
* @return true if the counter toggles or false if it is in a state other than {@link ToggleState#NEUTRAL}.
*/
public final boolean toggle(final AtomicCounter controlToggle)
{
if (code() == RESUME.code() && controlToggle.get() == SUSPEND.code())
{
return controlToggle.compareAndSet(SUSPEND.code(), RESUME.code());
}
return controlToggle.compareAndSet(NEUTRAL.code(), code());
}
/**
* Reset the toggle to the {@link #NEUTRAL} state.
*
* @param controlToggle to be reset.
*/
public static void reset(final AtomicCounter controlToggle)
{
controlToggle.set(NEUTRAL.code());
}
/**
* Activate the toggle by setting it to the {@link #NEUTRAL} state.
*
* @param controlToggle to be activated.
*/
public static void activate(final AtomicCounter controlToggle)
{
controlToggle.set(NEUTRAL.code());
}
/**
* Activate the toggle by setting it to the {@link #INACTIVE} state.
*
* @param controlToggle to be deactivated.
*/
public static void deactivate(final AtomicCounter controlToggle)
{
controlToggle.set(INACTIVE.code());
}
/**
* Get the {@link ToggleState} for a given control toggle.
*
* @param controlToggle to get the current state for.
* @return the state for the current control toggle.
* @throws ClusterException if the counter is not one of the valid values.
*/
public static ToggleState get(final AtomicCounter controlToggle)
{
if (controlToggle.isClosed())
{
throw new ClusterException("counter is closed");
}
final long toggleValue = controlToggle.get();
if (toggleValue < 0 || toggleValue > (STATES.length - 1))
{
throw new ClusterException("invalid toggle value: " + toggleValue);
}
return STATES[(int)toggleValue];
}
}
/**
* Counter type id for the control toggle.
*/
public static final int CONTROL_TOGGLE_TYPE_ID = AeronCounters.CLUSTER_CONTROL_TOGGLE_TYPE_ID;
/**
* Map a {@link CountersReader} over the provided {@link File} for the CnC file.
*
* @param cncFile for the counters.
* @return a {@link CountersReader} over the provided CnC file.
*/
public static CountersReader mapCounters(final File cncFile)
{
final MappedByteBuffer cncByteBuffer = IoUtil.mapExistingFile(cncFile, "cnc");
final DirectBuffer cncMetaData = createMetaDataBuffer(cncByteBuffer);
final int cncVersion = cncMetaData.getInt(cncVersionOffset(0));
CncFileDescriptor.checkVersion(cncVersion);
return new CountersReader(
createCountersMetaDataBuffer(cncByteBuffer, cncMetaData),
createCountersValuesBuffer(cncByteBuffer, cncMetaData),
StandardCharsets.US_ASCII);
}
/**
* Find the control toggle counter or return null if not found.
*
* @param counters to search within.
* @param clusterId to which the allocated counter belongs.
* @return the control toggle counter or return null if not found.
*/
public static AtomicCounter findControlToggle(final CountersReader counters, final int clusterId)
{
final int counterId = ClusterCounters.find(counters, CONTROL_TOGGLE_TYPE_ID, clusterId);
if (Aeron.NULL_VALUE != counterId)
{
return new AtomicCounter(counters.valuesBuffer(), counterId, null);
}
return null;
}
/**
* Main method for launching the process.
*
* @param args passed to the process.
*/
public static void main(final String[] args)
{
checkUsage(args);
final ToggleState toggleState = ToggleState.valueOf(args[0].toUpperCase());
final File cncFile = CommonContext.newDefaultCncFile();
System.out.println("Command `n Control file " + cncFile);
final CountersReader countersReader = mapCounters(cncFile);
final int clusterId = ClusteredServiceContainer.Configuration.clusterId();
final AtomicCounter controlToggle = findControlToggle(countersReader, clusterId);
if (null == controlToggle)
{
System.out.println("Failed to find control toggle");
System.exit(0);
}
if (toggleState.toggle(controlToggle))
{
System.out.println(toggleState + " toggled successfully");
}
else
{
System.out.println(toggleState + " did NOT toggle: current state=" + ToggleState.get(controlToggle));
}
}
private static void checkUsage(final String[] args)
{
if (1 != args.length)
{
System.out.format("Usage: [-Daeron.dir=<directory containing CnC file> -Daeron.cluster.id=<id>] " +
ClusterControl.class.getName() + " <action>%n");
System.exit(0);
}
}
}
|
package org.im97mori.rbt.ble.characteristic.as;
import android.os.Parcel;
import android.os.Parcelable;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
/**
* 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034)
*
* @see AccelerationMemoryData1
* @see AccelerationMemoryData2
* @see AccelerationMemoryData3
* @see AccelerationMemoryData4
* @see AccelerationMemoryData5
* @see AccelerationMemoryData6
* @see AccelerationMemoryData7
* @see AccelerationMemoryData8
* @see AccelerationMemoryData9
* @see AccelerationMemoryData10
* @see AccelerationMemoryData11
* @see AccelerationMemoryData12
* @see AccelerationMemoryData13
*/
@SuppressWarnings("unused")
public class AccelerationMemoryData implements Parcelable {
/**
* @see android.os.Parcelable.Creator
*/
public static final Creator<AccelerationMemoryData> CREATOR = new Creator<AccelerationMemoryData>() {
/**
* {@inheritDoc}
*/
@Override
@NonNull
public AccelerationMemoryData createFromParcel(@NonNull Parcel in) {
return new AccelerationMemoryData(in);
}
/**
* {@inheritDoc}
*/
@Override
@NonNull
public AccelerationMemoryData[] newArray(int size) {
return new AccelerationMemoryData[size];
}
};
/**
* 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 1 / 13
*/
private AccelerationMemoryData1 mAccelerationMemoryData1;
/**
* 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 2 / 13
*/
private AccelerationMemoryData2 mAccelerationMemoryData2;
/**
* 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 3 / 13
*/
private AccelerationMemoryData3 mAccelerationMemoryData3;
/**
* 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 4 / 13
*/
private AccelerationMemoryData4 mAccelerationMemoryData4;
/**
* 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 5 / 13
*/
private AccelerationMemoryData5 mAccelerationMemoryData5;
/**
* 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 6 / 13
*/
private AccelerationMemoryData6 mAccelerationMemoryData6;
/**
* 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 7 / 13
*/
private AccelerationMemoryData7 mAccelerationMemoryData7;
/**
* 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 8 / 13
*/
private AccelerationMemoryData8 mAccelerationMemoryData8;
/**
* 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 9 / 13
*/
private AccelerationMemoryData9 mAccelerationMemoryData9;
/**
* 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 10 / 13
*/
private AccelerationMemoryData10 mAccelerationMemoryData10;
/**
* 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 11 / 13
*/
private AccelerationMemoryData11 mAccelerationMemoryData11;
/**
* 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 12 / 13
*/
private AccelerationMemoryData12 mAccelerationMemoryData12;
/**
* 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 13 / 13
*/
private AccelerationMemoryData13 mAccelerationMemoryData13;
/**
* @return 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 1 / 13
*/
@Nullable
public AccelerationMemoryData1 getAccelerationMemoryData1() {
return mAccelerationMemoryData1;
}
/**
* Constructor
*/
public AccelerationMemoryData() {
}
/**
* Constructor from {@link Parcel}
*
* @param in Parcel
*/
private AccelerationMemoryData(@NonNull Parcel in) {
mAccelerationMemoryData1 = in.readParcelable(AccelerationMemoryData1.class.getClassLoader());
mAccelerationMemoryData2 = in.readParcelable(AccelerationMemoryData2.class.getClassLoader());
mAccelerationMemoryData3 = in.readParcelable(AccelerationMemoryData3.class.getClassLoader());
mAccelerationMemoryData4 = in.readParcelable(AccelerationMemoryData4.class.getClassLoader());
mAccelerationMemoryData5 = in.readParcelable(AccelerationMemoryData5.class.getClassLoader());
mAccelerationMemoryData6 = in.readParcelable(AccelerationMemoryData6.class.getClassLoader());
mAccelerationMemoryData7 = in.readParcelable(AccelerationMemoryData7.class.getClassLoader());
mAccelerationMemoryData8 = in.readParcelable(AccelerationMemoryData8.class.getClassLoader());
mAccelerationMemoryData9 = in.readParcelable(AccelerationMemoryData9.class.getClassLoader());
mAccelerationMemoryData10 = in.readParcelable(AccelerationMemoryData10.class.getClassLoader());
mAccelerationMemoryData11 = in.readParcelable(AccelerationMemoryData11.class.getClassLoader());
mAccelerationMemoryData12 = in.readParcelable(AccelerationMemoryData12.class.getClassLoader());
mAccelerationMemoryData13 = in.readParcelable(AccelerationMemoryData13.class.getClassLoader());
}
/**
* {@inheritDoc}
*/
@Override
public int describeContents() {
return 0;
}
/**
* {@inheritDoc}
*/
@Override
public void writeToParcel(@NonNull Parcel dest, int flags) {
dest.writeParcelable(mAccelerationMemoryData1, flags);
dest.writeParcelable(mAccelerationMemoryData2, flags);
dest.writeParcelable(mAccelerationMemoryData3, flags);
dest.writeParcelable(mAccelerationMemoryData4, flags);
dest.writeParcelable(mAccelerationMemoryData5, flags);
dest.writeParcelable(mAccelerationMemoryData6, flags);
dest.writeParcelable(mAccelerationMemoryData7, flags);
dest.writeParcelable(mAccelerationMemoryData8, flags);
dest.writeParcelable(mAccelerationMemoryData9, flags);
dest.writeParcelable(mAccelerationMemoryData10, flags);
dest.writeParcelable(mAccelerationMemoryData11, flags);
dest.writeParcelable(mAccelerationMemoryData12, flags);
dest.writeParcelable(mAccelerationMemoryData13, flags);
}
/**
* @param accelerationMemoryData1 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 1 / 13
*/
public void setAccelerationMemoryData1(@NonNull AccelerationMemoryData1 accelerationMemoryData1) {
this.mAccelerationMemoryData1 = accelerationMemoryData1;
}
/**
* @return 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 2 / 13
*/
@Nullable
public AccelerationMemoryData2 getAccelerationMemoryData2() {
return mAccelerationMemoryData2;
}
/**
* @param accelerationMemoryData2 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 2 / 13
*/
public void setAccelerationMemoryData2(@NonNull AccelerationMemoryData2 accelerationMemoryData2) {
this.mAccelerationMemoryData2 = accelerationMemoryData2;
}
/**
* @return 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 3 / 13
*/
@Nullable
public AccelerationMemoryData3 getAccelerationMemoryData3() {
return mAccelerationMemoryData3;
}
/**
* @param accelerationMemoryData3 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 3 / 13
*/
public void setAccelerationMemoryData3(AccelerationMemoryData3 accelerationMemoryData3) {
this.mAccelerationMemoryData3 = accelerationMemoryData3;
}
/**
* @return 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 4 / 13
*/
@Nullable
public AccelerationMemoryData4 getAccelerationMemoryData4() {
return mAccelerationMemoryData4;
}
/**
* @param accelerationMemoryData4 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 4 / 13
*/
public void setAccelerationMemoryData4(@NonNull AccelerationMemoryData4 accelerationMemoryData4) {
this.mAccelerationMemoryData4 = accelerationMemoryData4;
}
/**
* @return 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 5 / 13
*/
@Nullable
public AccelerationMemoryData5 getAccelerationMemoryData5() {
return mAccelerationMemoryData5;
}
/**
* @param accelerationMemoryData5 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 5 / 13
*/
public void setAccelerationMemoryData5(AccelerationMemoryData5 accelerationMemoryData5) {
this.mAccelerationMemoryData5 = accelerationMemoryData5;
}
/**
* @return 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 6 / 13
*/
@Nullable
public AccelerationMemoryData6 getAccelerationMemoryData6() {
return mAccelerationMemoryData6;
}
/**
* @param accelerationMemoryData6 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 6 / 13
*/
public void setAccelerationMemoryData6(@NonNull AccelerationMemoryData6 accelerationMemoryData6) {
this.mAccelerationMemoryData6 = accelerationMemoryData6;
}
/**
* @return 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 7 / 13
*/
@Nullable
public AccelerationMemoryData7 getAccelerationMemoryData7() {
return mAccelerationMemoryData7;
}
/**
* @param accelerationMemoryData7 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 7 / 13
*/
public void setAccelerationMemoryData7(@NonNull AccelerationMemoryData7 accelerationMemoryData7) {
this.mAccelerationMemoryData7 = accelerationMemoryData7;
}
/**
* @return 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 8 / 13
*/
@Nullable
public AccelerationMemoryData8 getAccelerationMemoryData8() {
return mAccelerationMemoryData8;
}
/**
* @param accelerationMemoryData8 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 8 / 13
*/
public void setAccelerationMemoryData8(@NonNull AccelerationMemoryData8 accelerationMemoryData8) {
this.mAccelerationMemoryData8 = accelerationMemoryData8;
}
/**
* @return 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 9 / 13
*/
@Nullable
public AccelerationMemoryData9 getAccelerationMemoryData9() {
return mAccelerationMemoryData9;
}
/**
* @param accelerationMemoryData9 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 9 / 13
*/
public void setAccelerationMemoryData9(@NonNull AccelerationMemoryData9 accelerationMemoryData9) {
this.mAccelerationMemoryData9 = accelerationMemoryData9;
}
/**
* @return 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 10 / 13
*/
@Nullable
public AccelerationMemoryData10 getAccelerationMemoryData10() {
return mAccelerationMemoryData10;
}
/**
* @param accelerationMemoryData10 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 10 / 13
*/
public void setAccelerationMemoryData10(@NonNull AccelerationMemoryData10 accelerationMemoryData10) {
this.mAccelerationMemoryData10 = accelerationMemoryData10;
}
/**
* @return 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 11 / 13
*/
@Nullable
public AccelerationMemoryData11 getAccelerationMemoryData11() {
return mAccelerationMemoryData11;
}
/**
* @param accelerationMemoryData11 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 11 / 13
*/
public void setAccelerationMemoryData11(@NonNull AccelerationMemoryData11 accelerationMemoryData11) {
this.mAccelerationMemoryData11 = accelerationMemoryData11;
}
/**
* @return 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 12 / 13
*/
@Nullable
public AccelerationMemoryData12 getAccelerationMemoryData12() {
return mAccelerationMemoryData12;
}
/**
* @param accelerationMemoryData12 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 12 / 13
*/
public void setAccelerationMemoryData12(@NonNull AccelerationMemoryData12 accelerationMemoryData12) {
this.mAccelerationMemoryData12 = accelerationMemoryData12;
}
/**
* @return 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 13 / 13
*/
@Nullable
public AccelerationMemoryData13 getAccelerationMemoryData13() {
return mAccelerationMemoryData13;
}
/**
* @param accelerationMemoryData13 2.3.5 Acceleration memory data [Data] (Characteristics UUID: 0x5034) 13 / 13
*/
public void setAccelerationMemoryData13(@NonNull AccelerationMemoryData13 accelerationMemoryData13) {
this.mAccelerationMemoryData13 = accelerationMemoryData13;
}
}
|
/*
* Copyright 2017 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.nativescript.tns.arlib.rendering;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import android.opengl.Matrix;
import de.javagl.obj.Obj;
import de.javagl.obj.ObjData;
import de.javagl.obj.ObjReader;
import de.javagl.obj.ObjUtils;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.nio.ShortBuffer;
/** Renders an object loaded from an OBJ file in OpenGL. */
public class ObjectRenderer {
private static final String TAG = ObjectRenderer.class.getSimpleName();
/**
* Blend mode.
*
* @see #setBlendMode(BlendMode)
*/
public enum BlendMode {
/** Multiplies the destination color by the source alpha. */
Shadow,
/** Normal alpha blending. */
Grid
}
// Shader names.
private static final String VERTEX_SHADER_NAME = "shaders/object.vert";
private static final String FRAGMENT_SHADER_NAME = "shaders/object.frag";
private static final int COORDS_PER_VERTEX = 3;
// Note: the last component must be zero to avoid applying the translational part of the matrix.
private static final float[] LIGHT_DIRECTION = new float[] {0.250f, 0.866f, 0.433f, 0.0f};
private final float[] viewLightDirection = new float[4];
// Object vertex buffer variables.
private int vertexBufferId;
private int verticesBaseAddress;
private int texCoordsBaseAddress;
private int normalsBaseAddress;
private int indexBufferId;
private int indexCount;
private int program;
private final int[] textures = new int[1];
// Shader location: model view projection matrix.
private int modelViewUniform;
private int modelViewProjectionUniform;
// Shader location: object attributes.
private int positionAttribute;
private int normalAttribute;
private int texCoordAttribute;
// Shader location: texture sampler.
private int textureUniform;
// Shader location: environment properties.
private int lightingParametersUniform;
// Shader location: material properties.
private int materialParametersUniform;
// Shader location: color correction property
private int colorCorrectionParameterUniform;
private BlendMode blendMode = null;
// Temporary matrices allocated here to reduce number of allocations for each frame.
private final float[] modelMatrix = new float[16];
private final float[] modelViewMatrix = new float[16];
private final float[] modelViewProjectionMatrix = new float[16];
// Set some default material properties to use for lighting.
private float ambient = 0.3f;
private float diffuse = 1.0f;
private float specular = 1.0f;
private float specularPower = 6.0f;
public ObjectRenderer() {}
/**
* Creates and initializes OpenGL resources needed for rendering the model.
*
* @param context Context for loading the shader and below-named model and texture assets.
* @param objAssetName Name of the OBJ file containing the model geometry.
* @param diffuseTextureAssetName Name of the PNG file containing the diffuse texture map.
*/
public void createOnGlThread(Context context, String objAssetName, String diffuseTextureAssetName) throws IOException {
final int vertexShader = ShaderUtil.loadGLShader(TAG, context, GLES20.GL_VERTEX_SHADER, VERTEX_SHADER_NAME);
final int fragmentShader = ShaderUtil.loadGLShader(TAG, context, GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER_NAME);
program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vertexShader);
GLES20.glAttachShader(program, fragmentShader);
GLES20.glLinkProgram(program);
GLES20.glUseProgram(program);
ShaderUtil.checkGLError(TAG, "Program creation");
modelViewUniform = GLES20.glGetUniformLocation(program, "u_ModelView");
modelViewProjectionUniform = GLES20.glGetUniformLocation(program, "u_ModelViewProjection");
positionAttribute = GLES20.glGetAttribLocation(program, "a_Position");
normalAttribute = GLES20.glGetAttribLocation(program, "a_Normal");
texCoordAttribute = GLES20.glGetAttribLocation(program, "a_TexCoord");
textureUniform = GLES20.glGetUniformLocation(program, "u_Texture");
lightingParametersUniform = GLES20.glGetUniformLocation(program, "u_LightingParameters");
materialParametersUniform = GLES20.glGetUniformLocation(program, "u_MaterialParameters");
colorCorrectionParameterUniform = GLES20.glGetUniformLocation(program, "u_ColorCorrectionParameters");
ShaderUtil.checkGLError(TAG, "Program parameters");
// Read the texture.
Bitmap textureBitmap =
BitmapFactory.decodeStream(context.getAssets().open(diffuseTextureAssetName));
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glGenTextures(textures.length, textures, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
GLES20.glTexParameteri(
GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, textureBitmap, 0);
GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
textureBitmap.recycle();
ShaderUtil.checkGLError(TAG, "Texture loading");
// Read the obj file.
InputStream objInputStream = context.getAssets().open(objAssetName);
Obj obj = ObjReader.read(objInputStream);
// Prepare the Obj so that its structure is suitable for
// rendering with OpenGL:
// 1. Triangulate it
// 2. Make sure that texture coordinates are not ambiguous
// 3. Make sure that normals are not ambiguous
// 4. Convert it to single-indexed data
obj = ObjUtils.convertToRenderable(obj);
// OpenGL does not use Java arrays. ByteBuffers are used instead to provide data in a format
// that OpenGL understands.
// Obtain the data from the OBJ, as direct buffers:
IntBuffer wideIndices = ObjData.getFaceVertexIndices(obj, 3);
FloatBuffer vertices = ObjData.getVertices(obj);
FloatBuffer texCoords = ObjData.getTexCoords(obj, 2);
FloatBuffer normals = ObjData.getNormals(obj);
// Convert int indices to shorts for GL ES 2.0 compatibility
ShortBuffer indices =
ByteBuffer.allocateDirect(2 * wideIndices.limit())
.order(ByteOrder.nativeOrder())
.asShortBuffer();
while (wideIndices.hasRemaining()) {
indices.put((short) wideIndices.get());
}
indices.rewind();
int[] buffers = new int[2];
GLES20.glGenBuffers(2, buffers, 0);
vertexBufferId = buffers[0];
indexBufferId = buffers[1];
// Load vertex buffer
verticesBaseAddress = 0;
texCoordsBaseAddress = verticesBaseAddress + 4 * vertices.limit();
normalsBaseAddress = texCoordsBaseAddress + 4 * texCoords.limit();
final int totalBytes = normalsBaseAddress + 4 * normals.limit();
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vertexBufferId);
GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, totalBytes, null, GLES20.GL_STATIC_DRAW);
GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, verticesBaseAddress, 4 * vertices.limit(), vertices);
GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, texCoordsBaseAddress, 4 * texCoords.limit(), texCoords);
GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, normalsBaseAddress, 4 * normals.limit(), normals);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
// Load index buffer
GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, indexBufferId);
indexCount = indices.limit();
GLES20.glBufferData(GLES20.GL_ELEMENT_ARRAY_BUFFER, 2 * indexCount, indices, GLES20.GL_STATIC_DRAW);
GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, 0);
ShaderUtil.checkGLError(TAG, "OBJ buffer load");
Matrix.setIdentityM(modelMatrix, 0);
}
/**
* Selects the blending mode for rendering.
*
* @param blendMode The blending mode. Null indicates no blending (opaque rendering).
*/
public void setBlendMode(BlendMode blendMode) {
this.blendMode = blendMode;
}
/**
* Updates the object model matrix and applies scaling.
*
* @param modelMatrix A 4x4 model-to-world transformation matrix, stored in column-major order.
* @param scaleFactor A separate scaling factor to apply before the {@code modelMatrix}.
* @see android.opengl.Matrix
*/
public void updateModelMatrix(float[] modelMatrix, float scaleFactor) {
float[] scaleMatrix = new float[16];
Matrix.setIdentityM(scaleMatrix, 0);
scaleMatrix[0] = scaleFactor;
scaleMatrix[5] = scaleFactor;
scaleMatrix[10] = scaleFactor;
Matrix.multiplyMM(this.modelMatrix, 0, modelMatrix, 0, scaleMatrix, 0);
}
/**
* Sets the surface characteristics of the rendered model.
*
* @param ambient Intensity of non-directional surface illumination.
* @param diffuse Diffuse (matte) surface reflectivity.
* @param specular Specular (shiny) surface reflectivity.
* @param specularPower Surface shininess. Larger values result in a smaller, sharper specular
* highlight.
*/
public void setMaterialProperties(
float ambient, float diffuse, float specular, float specularPower) {
this.ambient = ambient;
this.diffuse = diffuse;
this.specular = specular;
this.specularPower = specularPower;
}
/**
* Draws the model.
*
* @param cameraView A 4x4 view matrix, in column-major order.
* @param cameraPerspective A 4x4 projection matrix, in column-major order.
* @param colorCorrectionRgba Illumination intensity. Combined with diffuse and specular material properties.
* @see #setBlendMode(BlendMode)
* @see #updateModelMatrix(float[], float)
* @see #setMaterialProperties(float, float, float, float)
* @see android.opengl.Matrix
*/
public void draw(float[] cameraView, float[] cameraPerspective, float[] colorCorrectionRgba) {
ShaderUtil.checkGLError(TAG, "Before draw");
// Build the ModelView and ModelViewProjection matrices
// for calculating object position and light.
Matrix.multiplyMM(modelViewMatrix, 0, cameraView, 0, modelMatrix, 0);
Matrix.multiplyMM(modelViewProjectionMatrix, 0, cameraPerspective, 0, modelViewMatrix, 0);
GLES20.glUseProgram(program);
// Set the lighting environment properties.
Matrix.multiplyMV(viewLightDirection, 0, modelViewMatrix, 0, LIGHT_DIRECTION, 0);
normalizeVec3(viewLightDirection);
GLES20.glUniform4f(
lightingParametersUniform,
viewLightDirection[0],
viewLightDirection[1],
viewLightDirection[2],
1.f);
GLES20.glUniform4f(
colorCorrectionParameterUniform,
colorCorrectionRgba[0],
colorCorrectionRgba[1],
colorCorrectionRgba[2],
colorCorrectionRgba[3]);
// Set the object material properties.
GLES20.glUniform4f(materialParametersUniform, ambient, diffuse, specular, specularPower);
// Attach the object texture.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
GLES20.glUniform1i(textureUniform, 0);
// Set the vertex attributes.
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vertexBufferId);
GLES20.glVertexAttribPointer(
positionAttribute, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, 0, verticesBaseAddress);
GLES20.glVertexAttribPointer(normalAttribute, 3, GLES20.GL_FLOAT, false, 0, normalsBaseAddress);
GLES20.glVertexAttribPointer(
texCoordAttribute, 2, GLES20.GL_FLOAT, false, 0, texCoordsBaseAddress);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
// Set the ModelViewProjection matrix in the shader.
GLES20.glUniformMatrix4fv(modelViewUniform, 1, false, modelViewMatrix, 0);
GLES20.glUniformMatrix4fv(modelViewProjectionUniform, 1, false, modelViewProjectionMatrix, 0);
// Enable vertex arrays
GLES20.glEnableVertexAttribArray(positionAttribute);
GLES20.glEnableVertexAttribArray(normalAttribute);
GLES20.glEnableVertexAttribArray(texCoordAttribute);
if (blendMode != null) {
GLES20.glDepthMask(false);
GLES20.glEnable(GLES20.GL_BLEND);
switch (blendMode) {
case Shadow:
// Multiplicative blending function for Shadow.
GLES20.glBlendFunc(GLES20.GL_ZERO, GLES20.GL_ONE_MINUS_SRC_ALPHA);
break;
case Grid:
// Grid, additive blending function.
GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
break;
}
}
GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, indexBufferId);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, indexCount, GLES20.GL_UNSIGNED_SHORT, 0);
GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, 0);
if (blendMode != null) {
GLES20.glDisable(GLES20.GL_BLEND);
GLES20.glDepthMask(true);
}
// Disable vertex arrays
GLES20.glDisableVertexAttribArray(positionAttribute);
GLES20.glDisableVertexAttribArray(normalAttribute);
GLES20.glDisableVertexAttribArray(texCoordAttribute);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
ShaderUtil.checkGLError(TAG, "After draw");
}
private static void normalizeVec3(float[] v) {
float reciprocalLength = 1.0f / (float) Math.sqrt(v[0] * v[0] + v[1] * v[1] + v[2] * v[2]);
v[0] *= reciprocalLength;
v[1] *= reciprocalLength;
v[2] *= reciprocalLength;
}
}
|
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.sagemaker.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.Request;
import com.amazonaws.http.HttpMethodName;
import com.amazonaws.services.sagemaker.model.*;
import com.amazonaws.transform.Marshaller;
import com.amazonaws.protocol.*;
import com.amazonaws.protocol.Protocol;
import com.amazonaws.annotation.SdkInternalApi;
/**
* DescribeCompilationJobRequest Marshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class DescribeCompilationJobRequestProtocolMarshaller implements Marshaller<Request<DescribeCompilationJobRequest>, DescribeCompilationJobRequest> {
private static final OperationInfo SDK_OPERATION_BINDING = OperationInfo.builder().protocol(Protocol.AWS_JSON).requestUri("/")
.httpMethodName(HttpMethodName.POST).hasExplicitPayloadMember(false).hasPayloadMembers(true)
.operationIdentifier("SageMaker.DescribeCompilationJob").serviceName("AmazonSageMaker").build();
private final com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory;
public DescribeCompilationJobRequestProtocolMarshaller(com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory) {
this.protocolFactory = protocolFactory;
}
public Request<DescribeCompilationJobRequest> marshall(DescribeCompilationJobRequest describeCompilationJobRequest) {
if (describeCompilationJobRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
final ProtocolRequestMarshaller<DescribeCompilationJobRequest> protocolMarshaller = protocolFactory.createProtocolMarshaller(SDK_OPERATION_BINDING,
describeCompilationJobRequest);
protocolMarshaller.startMarshalling();
DescribeCompilationJobRequestMarshaller.getInstance().marshall(describeCompilationJobRequest, protocolMarshaller);
return protocolMarshaller.finishMarshalling();
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
|
package org.sag.acminer.phases.acminerdebug.handler;
public interface OutputHandler {
}
|
/**
* This file is part of the Harmony package.
*
* (c) Mickael Gaillard <mickael.gaillard@tactfactory.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
package com.tactfactory.harmony.command;
import java.util.LinkedHashMap;
import net.xeoh.plugins.base.annotations.PluginImplementation;
import com.tactfactory.harmony.Console;
import com.tactfactory.harmony.command.base.CommandBase;
import com.tactfactory.harmony.generator.MenuGenerator;
import com.tactfactory.harmony.platform.IAdapter;
import com.tactfactory.harmony.utils.ConsoleUtils;
/**
* Command class for Menu generation.
*/
@PluginImplementation
public class MenuCommand extends CommandBase {
/** Bundle name. */
public static final String BUNDLE = "orm";
/** Subject. */
public static final String SUBJECT = "menu";
/** Action crud. */
public static final String ACTION_UPDATE = "update";
/** Command : ORM:GENERATE:ENTITIES. */
public static final String UPDATE_MENU =
BUNDLE + SEPARATOR + SUBJECT + SEPARATOR + ACTION_UPDATE;
@Override
public final void execute(
final String action,
final String[] args,
final String option) {
ConsoleUtils.display("> ORM Generator");
this.setCommandArgs(Console.parseCommandArgs(args));
//this.generateMetas();
for(IAdapter adapter : this.getAdapters()) {
try {
if (action.equals(UPDATE_MENU)) {
this.generateMetas(); //TODO MG : why ?
new MenuGenerator(adapter).updateMenu();
}
} catch (final Exception e) {
ConsoleUtils.displayError(e);
}
}
}
@Override
public final void summary() {
LinkedHashMap<String, String> commands =
new LinkedHashMap<String, String>();
commands.put(UPDATE_MENU, "Update the menu");
ConsoleUtils.displaySummary(
"Menu",
commands);
}
@Override
public final boolean isAvailableCommand(final String command) {
boolean result = false;
if (UPDATE_MENU.equals(command)) {
result = true;
}
return result;
}
}
|
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package co.elastic.apm.agent.impl.context.web;
import co.elastic.apm.agent.impl.transaction.Outcome;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
import static co.elastic.apm.agent.impl.context.web.ResultUtil.getOutcomeByHttpClientStatus;
import static co.elastic.apm.agent.impl.context.web.ResultUtil.getOutcomeByHttpServerStatus;
import static co.elastic.apm.agent.impl.context.web.ResultUtil.getResultByHttpStatus;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.SoftAssertions.assertSoftly;
class ResultUtilTest {
@ParameterizedTest
@CsvSource({
"-1,FAILURE,FAILURE,",
"0,FAILURE,FAILURE,",
"1,FAILURE,FAILURE,",
"99,FAILURE,FAILURE,",
"100,SUCCESS,SUCCESS,HTTP 1xx",
"199,SUCCESS,SUCCESS,HTTP 1xx",
"200,SUCCESS,SUCCESS,HTTP 2xx",
"299,SUCCESS,SUCCESS,HTTP 2xx",
"300,SUCCESS,SUCCESS,HTTP 3xx",
"399,SUCCESS,SUCCESS,HTTP 3xx",
"400,FAILURE,SUCCESS,HTTP 4xx",
"499,FAILURE,SUCCESS,HTTP 4xx",
"500,FAILURE,FAILURE,HTTP 5xx",
"599,FAILURE,FAILURE,HTTP 5xx",
"600,FAILURE,FAILURE,"
})
void testHttpStatus(int status, Outcome clientOutcome, Outcome serverOutcome, String expectedResult) {
assertThat(getOutcomeByHttpClientStatus(status)).isEqualTo(clientOutcome);
assertThat(getOutcomeByHttpServerStatus(status)).isEqualTo(serverOutcome);
assertThat(getResultByHttpStatus(status)).isEqualTo(expectedResult);
}
}
|
/*
* Copyright 2018-2020 adorsys GmbH & Co KG
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.adorsys.psd2.xs2a.service.mapper.spi_xs2a_mappers;
import de.adorsys.psd2.xs2a.core.sca.ChallengeData;
import de.adorsys.psd2.xs2a.spi.domain.common.SpiChallengeData;
import org.mapstruct.Mapper;
@Mapper(componentModel = "spring")
public interface SpiToXs2aChallengeDataMapper {
ChallengeData toChallengeData(SpiChallengeData spiChallengeData);
}
|
/*
* Copyright Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags and
* the COPYRIGHT.txt file distributed with this work.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.teiid.dqp.internal.datamgr;
import java.util.*;
import org.teiid.api.exception.query.FunctionExecutionException;
import org.teiid.api.exception.query.QueryMetadataException;
import org.teiid.client.metadata.ParameterInfo;
import org.teiid.common.buffer.TupleBuffer;
import org.teiid.common.buffer.TupleSource;
import org.teiid.core.CoreConstants;
import org.teiid.core.TeiidComponentException;
import org.teiid.core.TeiidProcessingException;
import org.teiid.core.TeiidRuntimeException;
import org.teiid.core.types.ArrayImpl;
import org.teiid.core.types.DataTypeManager;
import org.teiid.language.*;
import org.teiid.language.Argument.Direction;
import org.teiid.language.Comparison.Operator;
import org.teiid.language.DerivedColumn;
import org.teiid.language.Select;
import org.teiid.language.SortSpecification.NullOrdering;
import org.teiid.language.SortSpecification.Ordering;
import org.teiid.language.SubqueryComparison.Quantifier;
import org.teiid.language.WindowSpecification;
import org.teiid.metadata.BaseColumn;
import org.teiid.metadata.Column;
import org.teiid.metadata.Procedure;
import org.teiid.metadata.ProcedureParameter;
import org.teiid.metadata.Table;
import org.teiid.query.QueryPlugin;
import org.teiid.query.function.FunctionDescriptor;
import org.teiid.query.function.FunctionMethods;
import org.teiid.query.metadata.QueryMetadataInterface;
import org.teiid.query.metadata.TempMetadataID;
import org.teiid.query.optimizer.relational.rules.RulePlaceAccess;
import org.teiid.query.sql.lang.*;
import org.teiid.query.sql.lang.Command;
import org.teiid.query.sql.lang.Delete;
import org.teiid.query.sql.lang.GroupBy;
import org.teiid.query.sql.lang.Insert;
import org.teiid.query.sql.lang.Limit;
import org.teiid.query.sql.lang.OrderBy;
import org.teiid.query.sql.lang.SetClause;
import org.teiid.query.sql.lang.SetQuery;
import org.teiid.query.sql.lang.Update;
import org.teiid.query.sql.symbol.AggregateSymbol;
import org.teiid.query.sql.symbol.AliasSymbol;
import org.teiid.query.sql.symbol.Array;
import org.teiid.query.sql.symbol.Constant;
import org.teiid.query.sql.symbol.ElementSymbol;
import org.teiid.query.sql.symbol.Expression;
import org.teiid.query.sql.symbol.ExpressionSymbol;
import org.teiid.query.sql.symbol.Function;
import org.teiid.query.sql.symbol.GroupSymbol;
import org.teiid.query.sql.symbol.ScalarSubquery;
import org.teiid.query.sql.symbol.SearchedCaseExpression;
import org.teiid.query.sql.symbol.Symbol;
import org.teiid.query.sql.symbol.WindowFrame;
import org.teiid.query.sql.symbol.WindowFrame.FrameBound;
import org.teiid.query.sql.symbol.WindowFunction;
import org.teiid.query.util.CommandContext;
import org.teiid.translator.ExecutionFactory.NullOrder;
import org.teiid.translator.SourceSystemFunctions;
import org.teiid.translator.TranslatorException;
public class LanguageBridgeFactory {
private final class TupleBufferList extends AbstractList<List<?>> implements RandomAccess {
private final TupleBuffer tb;
private TupleBufferList(TupleBuffer tb) {
this.tb = tb;
if (tb.getRowCount() > Integer.MAX_VALUE) {
throw new AssertionError("TupleBuffer too large for TupleBufferList"); //$NON-NLS-1$
}
}
@Override
public List<?> get(int index) {
if (index < 0 || index >= size()) {
throw new IndexOutOfBoundsException(String.valueOf(index));
}
try {
return tb.getBatch(index+1).getTuple(index+1);
} catch (TeiidComponentException e) {
throw new TeiidRuntimeException(QueryPlugin.Event.TEIID30483, e);
}
}
@Override
public int size() {
return (int)tb.getRowCount();
}
}
private final class TupleSourceIterator implements Iterator<List<?>> {
private final TupleSource ts;
List<?> nextRow;
private TupleSourceIterator(TupleSource ts) {
this.ts = ts;
}
@Override
public boolean hasNext() {
if (nextRow == null) {
try {
nextRow = ts.nextTuple();
} catch (TeiidComponentException e) {
throw new TeiidRuntimeException(QueryPlugin.Event.TEIID30484, e);
} catch (TeiidProcessingException e) {
throw new TeiidRuntimeException(QueryPlugin.Event.TEIID30485, e);
}
}
return nextRow != null;
}
@Override
public List<?> next() {
if (nextRow == null && !hasNext()) {
throw new NoSuchElementException();
}
List<?> result = nextRow;
nextRow = null;
return result;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
}
private RuntimeMetadataImpl metadataFactory = null;
private int valueIndex = 0;
private List<List<?>> allValues = new LinkedList<List<?>>();
private Map<String, List<? extends List<?>>> dependentSets;
private boolean convertIn;
private boolean supportsConcat2;
private int maxInCriteriaSize;
//state to handle with name exclusion
private IdentityHashMap<Object, GroupSymbol> remappedGroups;
private String excludeWithName;
private CommandContext commandContext;
//state to handle null ordering
private boolean supportsNullOrdering;
private NullOrder sourceNullOrder;
public LanguageBridgeFactory(QueryMetadataInterface metadata) {
if (metadata != null) {
metadataFactory = new RuntimeMetadataImpl(metadata);
}
}
public LanguageBridgeFactory(RuntimeMetadataImpl metadata) {
this.metadataFactory = metadata;
}
public void setConvertIn(boolean convertIn) {
this.convertIn = convertIn;
}
public void setSupportsConcat2(boolean supportsConcat2) {
this.supportsConcat2 = supportsConcat2;
}
public void setExcludeWithName(String excludeWithName) {
this.excludeWithName = excludeWithName;
}
public org.teiid.language.Command translate(Command command) {
try {
if (command == null) {
return null;
}
if (command instanceof Query) {
Select result = translate((Query)command);
result.setDependentValues(this.dependentSets);
setProjected(result);
return result;
} else if (command instanceof SetQuery) {
org.teiid.language.SetQuery result = translate((SetQuery)command);
setProjected(result);
return result;
} else if (command instanceof Insert) {
return translate((Insert)command);
} else if (command instanceof Update) {
return translate((Update)command);
} else if (command instanceof Delete) {
return translate((Delete)command);
} else if (command instanceof StoredProcedure) {
return translate((StoredProcedure)command);
} else if (command instanceof BatchedUpdateCommand) {
return translate((BatchedUpdateCommand)command);
}
throw new AssertionError(command.getClass().getName() + " " + command); //$NON-NLS-1$
} finally {
this.allValues.clear();
this.dependentSets = null;
this.valueIndex = 0;
}
}
private void setProjected(QueryExpression qe) {
if (qe instanceof Select) {
Select select = (Select)qe;
for (DerivedColumn dc : select.getDerivedColumns()) {
dc.setProjected(true);
}
} else {
org.teiid.language.SetQuery sq = (org.teiid.language.SetQuery)qe;
setProjected(sq.getLeftQuery());
setProjected(sq.getRightQuery());
}
}
QueryExpression translate(QueryCommand command) {
if (command instanceof Query) {
return translate((Query)command);
}
return translate((SetQuery)command);
}
org.teiid.language.SetQuery translate(SetQuery union) {
org.teiid.language.SetQuery result = new org.teiid.language.SetQuery();
result.setWith(translate(union.getWith()));
result.setAll(union.isAll());
switch (union.getOperation()) {
case UNION:
result.setOperation(org.teiid.language.SetQuery.Operation.UNION);
break;
case INTERSECT:
result.setOperation(org.teiid.language.SetQuery.Operation.INTERSECT);
break;
case EXCEPT:
result.setOperation(org.teiid.language.SetQuery.Operation.EXCEPT);
break;
}
result.setLeftQuery(translate(union.getLeftQuery()));
result.setRightQuery(translate(union.getRightQuery()));
result.setOrderBy(translate(union.getOrderBy(), true));
result.setLimit(translate(union.getLimit()));
return result;
}
/* Query */
Select translate(Query query) {
With with = translate(query.getWith());
List<Expression> symbols = query.getSelect().getSymbols();
List<DerivedColumn> translatedSymbols = new ArrayList<DerivedColumn>(symbols.size());
for (Iterator<Expression> i = symbols.iterator(); i.hasNext();) {
Expression symbol = i.next();
String alias = null;
if(symbol instanceof AliasSymbol) {
alias = ((AliasSymbol)symbol).getOutputName();
symbol = ((AliasSymbol)symbol).getSymbol();
}
org.teiid.language.Expression iExp = translate(symbol);
DerivedColumn selectSymbol = new DerivedColumn(alias, iExp);
translatedSymbols.add(selectSymbol);
}
List<TableReference> items = null;
if (query.getFrom() != null) {
List<FromClause> clauses = query.getFrom().getClauses();
items = new ArrayList<TableReference>(clauses.size());
for (Iterator<FromClause> i = clauses.iterator(); i.hasNext();) {
items.add(translate(i.next()));
}
}
Select q = new Select(translatedSymbols, query
.getSelect().isDistinct(), items,
translate(query.getCriteria()), translate(query.getGroupBy()),
translate(query.getHaving()), translate(query.getOrderBy(), false));
q.setLimit(translate(query.getLimit()));
q.setWith(with);
return q;
}
public With translate(List<WithQueryCommand> with) {
if (with == null || with.isEmpty()) {
return null;
}
With result = new With();
ArrayList<WithItem> items = new ArrayList<WithItem>(with.size());
for (WithQueryCommand withQueryCommand : with) {
WithItem item = new WithItem();
GroupSymbol group = withQueryCommand.getGroupSymbol();
if (withQueryCommand.getCommand() != null && excludeWithName != null && excludeWithName.equalsIgnoreCase(group.getName())) {
group = RulePlaceAccess.recontextSymbol(withQueryCommand.getGroupSymbol(), commandContext.getGroups());
group.setDefinition(null);
if (remappedGroups == null) {
remappedGroups = new IdentityHashMap<Object, GroupSymbol>();
}
this.remappedGroups.put(group.getMetadataID(), group);
}
item.setTable(translate(group));
if (withQueryCommand.getColumns() != null) {
List<ColumnReference> translatedElements = new ArrayList<ColumnReference>(withQueryCommand.getColumns().size());
for (ElementSymbol es: withQueryCommand.getColumns()) {
ColumnReference cr = translate(es);
translatedElements.add(cr);
if (withQueryCommand.getCommand() == null) {
//we want to convey the metadata to the source layer if possible
Object mid = es.getMetadataID();
if (mid instanceof TempMetadataID) {
TempMetadataID tid = (TempMetadataID)mid;
mid = tid.getOriginalMetadataID();
}
if (mid instanceof Column) {
cr.setMetadataObject((Column)mid);
}
}
}
item.setColumns(translatedElements);
}
if (withQueryCommand.getCommand() != null) {
item.setSubquery(translate(withQueryCommand.getCommand()));
} else {
item.setDependentValues(new TupleBufferList(withQueryCommand.getTupleBuffer()));
}
item.setRecusive(withQueryCommand.isRecursive());
items.add(item);
}
result.setItems(items);
return result;
}
public TableReference translate(FromClause clause) {
if (clause == null) {
return null;
}
if (clause instanceof JoinPredicate) {
return translate((JoinPredicate)clause);
} else if (clause instanceof SubqueryFromClause) {
return translate((SubqueryFromClause)clause);
} else if (clause instanceof UnaryFromClause) {
return translate((UnaryFromClause)clause);
}
throw new AssertionError(clause.getClass().getName() + " " + clause); //$NON-NLS-1$
}
Join translate(JoinPredicate join) {
List crits = join.getJoinCriteria();
Criteria crit = null;
if (crits.size() == 1) {
crit = (Criteria)crits.get(0);
} else if (crits.size() > 1) {
crit = new CompoundCriteria(crits);
}
Join.JoinType joinType = Join.JoinType.INNER_JOIN;
if(join.getJoinType().equals(JoinType.JOIN_INNER)) {
joinType = Join.JoinType.INNER_JOIN;
} else if(join.getJoinType().equals(JoinType.JOIN_LEFT_OUTER)) {
joinType = Join.JoinType.LEFT_OUTER_JOIN;
} else if(join.getJoinType().equals(JoinType.JOIN_RIGHT_OUTER)) {
joinType = Join.JoinType.RIGHT_OUTER_JOIN;
} else if(join.getJoinType().equals(JoinType.JOIN_FULL_OUTER)) {
joinType = Join.JoinType.FULL_OUTER_JOIN;
} else if(join.getJoinType().equals(JoinType.JOIN_CROSS)) {
joinType = Join.JoinType.CROSS_JOIN;
}
return new Join(translate(join.getLeftClause()),
translate(join.getRightClause()),
joinType,
translate(crit));
}
TableReference translate(SubqueryFromClause clause) {
if (clause.getCommand() instanceof StoredProcedure) {
NamedProcedureCall result = new NamedProcedureCall(translate((StoredProcedure)clause.getCommand()), clause.getOutputName());
result.setLateral(clause.isLateral());
result.getCall().setTableReference(true);
return result;
}
DerivedTable result = new DerivedTable(translate((QueryCommand)clause.getCommand()), clause.getOutputName());
result.setLateral(clause.isLateral());
return result;
}
NamedTable translate(UnaryFromClause clause) {
return translate(clause.getGroup());
}
public Condition translate(Criteria criteria) {
if (criteria == null) {
return null;
}
if (criteria instanceof CompareCriteria) {
return translate((CompareCriteria)criteria);
} else if (criteria instanceof CompoundCriteria) {
return translate((CompoundCriteria)criteria);
} else if (criteria instanceof ExistsCriteria) {
return translate((ExistsCriteria)criteria);
} else if (criteria instanceof IsNullCriteria) {
return translate((IsNullCriteria)criteria);
}else if (criteria instanceof MatchCriteria) {
return translate((MatchCriteria)criteria);
} else if (criteria instanceof NotCriteria) {
return translate((NotCriteria)criteria);
} else if (criteria instanceof SetCriteria) {
return translate((SetCriteria)criteria);
} else if (criteria instanceof SubqueryCompareCriteria) {
return translate((SubqueryCompareCriteria)criteria);
} else if (criteria instanceof SubquerySetCriteria) {
return translate((SubquerySetCriteria)criteria);
} else if (criteria instanceof DependentSetCriteria) {
return translate((DependentSetCriteria)criteria);
} else if (criteria instanceof IsDistinctCriteria) {
return translate((IsDistinctCriteria)criteria);
}
throw new AssertionError(criteria.getClass().getName() + " " + criteria); //$NON-NLS-1$
}
org.teiid.language.IsDistinct translate(IsDistinctCriteria criteria) {
return new IsDistinct(translate((Expression) criteria.getLeftRowValue()),
translate((Expression)criteria.getRightRowValue()), criteria.isNegated());
}
org.teiid.language.Comparison translate(DependentSetCriteria criteria) {
Operator operator = Operator.EQ;
org.teiid.language.Expression arg = null;
final TupleBuffer tb = criteria.getDependentValueSource().getTupleBuffer();
if (criteria.getValueExpression() instanceof Array) {
Array array = (Array)criteria.getValueExpression();
List<org.teiid.language.Expression> params = new ArrayList<org.teiid.language.Expression>();
Class<?> baseType = null;
for (Expression ex : array.getExpressions()) {
if (baseType == null) {
baseType = ex.getType();
} else if (!baseType.equals(ex.getType())) {
baseType = DataTypeManager.DefaultDataClasses.OBJECT;
}
params.add(createParameter(criteria, tb, ex));
}
arg = new org.teiid.language.Array(baseType, params);
} else {
Expression ex = criteria.getValueExpression();
arg = createParameter(criteria, tb, ex);
}
if (this.dependentSets == null) {
this.dependentSets = new HashMap<String, List<? extends List<?>>>();
}
this.dependentSets.put(criteria.getContextSymbol(), new TupleBufferList(tb));
Comparison result = new org.teiid.language.Comparison(translate(criteria.getExpression()),
arg, operator);
return result;
}
private Parameter createParameter(DependentSetCriteria criteria,
final TupleBuffer tb, Expression ex) {
Parameter p = new Parameter();
p.setType(ex.getType());
p.setValueIndex(tb.getSchema().indexOf(ex));
p.setDependentValueId(criteria.getContextSymbol());
return p;
}
org.teiid.language.Comparison translate(CompareCriteria criteria) {
Operator operator = Operator.EQ;
switch(criteria.getOperator()) {
case CompareCriteria.EQ:
operator = Operator.EQ;
break;
case CompareCriteria.NE:
operator = Operator.NE;
break;
case CompareCriteria.LT:
operator = Operator.LT;
break;
case CompareCriteria.LE:
operator = Operator.LE;
break;
case CompareCriteria.GT:
operator = Operator.GT;
break;
case CompareCriteria.GE:
operator = Operator.GE;
break;
}
return new org.teiid.language.Comparison(translate(criteria.getLeftExpression()),
translate(criteria.getRightExpression()), operator);
}
AndOr translate(CompoundCriteria criteria) {
List nestedCriteria = criteria.getCriteria();
int size = nestedCriteria.size();
AndOr.Operator op = criteria.getOperator() == CompoundCriteria.AND?AndOr.Operator.AND:AndOr.Operator.OR;
AndOr result = new AndOr(translate((Criteria)nestedCriteria.get(size - 2)), translate((Criteria)nestedCriteria.get(size - 1)), op);
for (int i = nestedCriteria.size() - 3; i >= 0; i--) {
result = new AndOr(translate((Criteria)nestedCriteria.get(i)), result, op);
}
return result;
}
Condition translate(ExistsCriteria criteria) {
Exists exists = new Exists(translate(criteria.getCommand()));
if (criteria.isNegated()) {
return new Not(exists);
}
return exists;
}
IsNull translate(IsNullCriteria criteria) {
return new IsNull(translate(criteria.getExpression()), criteria.isNegated());
}
Like translate(MatchCriteria criteria) {
Character escapeChar = null;
if(criteria.getEscapeChar() != MatchCriteria.NULL_ESCAPE_CHAR) {
escapeChar = new Character(criteria.getEscapeChar());
}
Like like = new Like(translate(criteria.getLeftExpression()),
translate(criteria.getRightExpression()),
escapeChar,
criteria.isNegated());
like.setMode(criteria.getMode());
return like;
}
Condition translate(SetCriteria criteria) {
Collection expressions = criteria.getValues();
List<org.teiid.language.Expression> translatedExpressions = translateExpressionList(expressions);
org.teiid.language.Expression expr = translate(criteria.getExpression());
if (convertIn) {
Condition condition = null;
for (org.teiid.language.Expression expression : translatedExpressions) {
if (condition == null) {
condition = new Comparison(expr, expression, criteria.isNegated()?Operator.NE:Operator.EQ);
} else {
condition = new AndOr(new Comparison(expr, expression, criteria.isNegated()?Operator.NE:Operator.EQ), condition, criteria.isNegated()?AndOr.Operator.AND:AndOr.Operator.OR);
}
}
return condition;
}
if (maxInCriteriaSize > 0 && translatedExpressions.size() > maxInCriteriaSize) {
Condition condition = null;
int count = translatedExpressions.size()/maxInCriteriaSize + ((translatedExpressions.size()%maxInCriteriaSize!=0)?1:0);
for (int i = 0; i < count; i++) {
List<org.teiid.language.Expression> subList = translatedExpressions.subList(maxInCriteriaSize*i, Math.min(translatedExpressions.size(), maxInCriteriaSize*(i+1)));
List<org.teiid.language.Expression> translatedExpressionsSubList = new ArrayList<org.teiid.language.Expression>(subList);
if (condition == null) {
condition = new In(expr, translatedExpressionsSubList, criteria.isNegated());
} else {
condition = new AndOr(condition, new In(expr, translatedExpressionsSubList, criteria.isNegated()), criteria.isNegated()?AndOr.Operator.AND:AndOr.Operator.OR);
}
}
return condition;
}
return new In(expr,
translatedExpressions,
criteria.isNegated());
}
SubqueryComparison translate(SubqueryCompareCriteria criteria) {
Quantifier quantifier = Quantifier.ALL;
switch(criteria.getPredicateQuantifier()) {
case SubqueryCompareCriteria.ALL:
quantifier = Quantifier.ALL;
break;
case SubqueryCompareCriteria.ANY:
quantifier = Quantifier.SOME;
break;
case SubqueryCompareCriteria.SOME:
quantifier = Quantifier.SOME;
break;
}
Operator operator = Operator.EQ;
switch(criteria.getOperator()) {
case SubqueryCompareCriteria.EQ:
operator = Operator.EQ;
break;
case SubqueryCompareCriteria.NE:
operator = Operator.NE;
break;
case SubqueryCompareCriteria.LT:
operator = Operator.LT;
break;
case SubqueryCompareCriteria.LE:
operator = Operator.LE;
break;
case SubqueryCompareCriteria.GT:
operator = Operator.GT;
break;
case SubqueryCompareCriteria.GE:
operator = Operator.GE;
break;
}
return new SubqueryComparison(translate(criteria.getLeftExpression()),
operator,
quantifier,
translate(criteria.getCommand()));
}
SubqueryIn translate(SubquerySetCriteria criteria) {
return new SubqueryIn(translate(criteria.getExpression()),
criteria.isNegated(),
translate(criteria.getCommand()));
}
Not translate(NotCriteria criteria) {
return new Not(translate(criteria.getCriteria()));
}
public org.teiid.language.GroupBy translate(GroupBy groupBy) {
if(groupBy == null){
return null;
}
List items = groupBy.getSymbols();
List<org.teiid.language.Expression> translatedItems = new ArrayList<org.teiid.language.Expression>();
for (Iterator i = items.iterator(); i.hasNext();) {
translatedItems.add(translate((Expression)i.next()));
}
org.teiid.language.GroupBy result = new org.teiid.language.GroupBy(translatedItems);
result.setRollup(groupBy.isRollup());
return result;
}
public org.teiid.language.OrderBy translate(OrderBy orderBy, boolean set) {
if(orderBy == null){
return null;
}
List<OrderByItem> items = orderBy.getOrderByItems();
List<SortSpecification> translatedItems = new ArrayList<SortSpecification>();
for (int i = 0; i < items.size(); i++) {
Expression symbol = items.get(i).getSymbol();
Ordering direction = items.get(i).isAscending() ? Ordering.ASC: Ordering.DESC;
SortSpecification orderByItem = null;
if(!set && (items.get(i).isUnrelated() || symbol instanceof ElementSymbol)){
orderByItem = new SortSpecification(direction, translate(symbol));
} else {
orderByItem = new SortSpecification(direction, new ColumnReference(null, Symbol.getShortName(((Symbol)symbol).getOutputName()), null, symbol.getType()));
}
orderByItem.setNullOrdering(items.get(i).getNullOrdering());
translatedItems.add(orderByItem);
}
org.teiid.language.OrderBy result = new org.teiid.language.OrderBy(translatedItems);
if (orderBy.isUserOrdering() && commandContext != null) {
NullOrder teiidNullOrder = commandContext.getOptions().getDefaultNullOrder();
if (!supportsNullOrdering
|| (sourceNullOrder != teiidNullOrder && commandContext.getOptions().isPushdownDefaultNullOrder())) {
correctNullOrdering(result,
supportsNullOrdering, sourceNullOrder, commandContext.getOptions().getDefaultNullOrder());
}
}
return result;
}
/* Expressions */
public org.teiid.language.Expression translate(Expression expr) {
if (expr == null) {
return null;
}
if (expr instanceof Constant) {
return translate((Constant)expr);
} else if (expr instanceof AggregateSymbol) {
return translate((AggregateSymbol)expr);
} else if (expr instanceof Function) {
return translate((Function)expr);
} else if (expr instanceof ScalarSubquery) {
return translate((ScalarSubquery)expr);
} else if (expr instanceof SearchedCaseExpression) {
return translate((SearchedCaseExpression)expr);
} else if (expr instanceof ElementSymbol) {
return translate((ElementSymbol)expr);
} else if (expr instanceof ExpressionSymbol) {
return translate((ExpressionSymbol)expr);
} else if (expr instanceof Criteria) {
Condition c = translate((Criteria)expr);
c.setExpression(true);
return c;
} else if (expr instanceof WindowFunction) {
return translate((WindowFunction)expr);
} else if (expr instanceof Array) {
return translate((Array)expr);
}
throw new AssertionError(expr.getClass().getName() + " " + expr); //$NON-NLS-1$
}
org.teiid.language.Array translate(Array array) {
return new org.teiid.language.Array(array.getComponentType(), translateExpressionList(array.getExpressions()));
}
org.teiid.language.WindowFunction translate(WindowFunction windowFunction) {
org.teiid.language.WindowFunction result = new org.teiid.language.WindowFunction();
result.setFunction(translate(windowFunction.getFunction()));
WindowSpecification ws = new WindowSpecification();
ws.setOrderBy(translate(windowFunction.getWindowSpecification().getOrderBy(), false));
List<Expression> partition = windowFunction.getWindowSpecification().getPartition();
if (partition != null) {
ArrayList<org.teiid.language.Expression> partitionList = translateExpressionList(partition);
ws.setPartition(partitionList);
}
WindowFrame frame = windowFunction.getWindowSpecification().getWindowFrame();
if (frame != null) {
org.teiid.language.WindowFrame resultFrame = new org.teiid.language.WindowFrame(frame.getMode());
resultFrame.setStart(translate(frame.getStart()));
resultFrame.setEnd(translate(frame.getEnd()));
ws.setWindowFrame(resultFrame);
}
result.setWindowSpecification(ws);
return result;
}
org.teiid.language.WindowFrame.FrameBound translate(FrameBound frameBound) {
if (frameBound == null) {
return null;
}
org.teiid.language.WindowFrame.FrameBound result = new org.teiid.language.WindowFrame.FrameBound(frameBound.getBoundMode());
result.setBound(frameBound.getBound());
return result;
}
private ArrayList<org.teiid.language.Expression> translateExpressionList(
Collection<? extends Expression> list) {
ArrayList<org.teiid.language.Expression> result = new ArrayList<org.teiid.language.Expression>(list.size());
for (Expression ex : list) {
result.add(translate(ex));
}
return result;
}
org.teiid.language.Expression translate(Constant constant) {
if (constant.isMultiValued()) {
Parameter result = new Parameter();
result.setType(constant.getType());
final List<?> values = (List<?>)constant.getValue();
allValues.add(values);
result.setValueIndex(valueIndex++);
return result;
}
if (constant.getValue() instanceof ArrayImpl) {
//TODO: we could check if there is a common base type (also needs to be in the dependent logic)
// and expand binding options in the translators
//we currently support the notion of a mixed type array, since we consider object a common base type
//that will not work for all sources, so instead of treating this as a single array (as commented out below),
//we just turn it into an array of parameters
//Literal result = new Literal(av.getValues(), org.teiid.language.Array.class);
//result.setBindEligible(constant.isBindEligible());
//return result;
ArrayImpl av = (ArrayImpl)constant.getValue();
List<Constant> vals = new ArrayList<Constant>();
Class<?> baseType = null;
for (Object o : av.getValues()) {
Constant c = new Constant(o);
c.setBindEligible(constant.isBindEligible());
vals.add(c);
if (baseType == null) {
baseType = c.getType();
} else if (!baseType.equals(c.getType())) {
baseType = DataTypeManager.DefaultDataClasses.OBJECT;
}
}
return new org.teiid.language.Array(baseType, translateExpressionList(vals));
}
Literal result = new Literal(constant.getValue(), constant.getType());
result.setBindEligible(constant.isBindEligible());
return result;
}
org.teiid.language.Expression translate(Function function) {
Expression [] args = function.getArgs();
List<org.teiid.language.Expression> params = new ArrayList<org.teiid.language.Expression>(args.length);
for (int i = 0; i < args.length; i++) {
params.add(translate(args[i]));
}
String name = function.getName();
if (function.getFunctionDescriptor() != null) {
name = function.getFunctionDescriptor().getName();
if (!supportsConcat2 && function.getFunctionDescriptor().getMethod().getParent() == null && name.equalsIgnoreCase(SourceSystemFunctions.CONCAT2)) {
Expression[] newArgs = new Expression[args.length];
boolean useCase = true;
for(int i=0; i<args.length; i++) {
if (args[i] instanceof Constant) {
newArgs[i] = args[i];
useCase = false;
} else {
Function f = new Function(SourceSystemFunctions.IFNULL, new Expression[] {args[i], new Constant("")}); //$NON-NLS-1$
newArgs[i] = f;
f.setType(args[i].getType());
FunctionDescriptor descriptor =
metadataFactory.getMetadata().getFunctionLibrary().findFunction(SourceSystemFunctions.IFNULL, new Class[] { args[i].getType(), DataTypeManager.DefaultDataClasses.STRING });
f.setFunctionDescriptor(descriptor);
}
}
Function concat = new Function(SourceSystemFunctions.CONCAT, newArgs);
concat.setType(DataTypeManager.DefaultDataClasses.STRING);
if (!useCase) {
return translate(concat);
}
FunctionDescriptor descriptor =
metadataFactory.getMetadata().getFunctionLibrary().findFunction(SourceSystemFunctions.CONCAT, new Class[] { DataTypeManager.DefaultDataClasses.STRING, DataTypeManager.DefaultDataClasses.STRING });
concat.setFunctionDescriptor(descriptor);
List<CompoundCriteria> when = Arrays.asList(new CompoundCriteria(CompoundCriteria.AND, new IsNullCriteria(args[0]), new IsNullCriteria(args[1])));
Constant nullConstant = new Constant(null, DataTypeManager.DefaultDataClasses.STRING);
List<Constant> then = Arrays.asList(nullConstant);
SearchedCaseExpression caseExpr = new SearchedCaseExpression(when, then);
caseExpr.setElseExpression(concat);
caseExpr.setType(DataTypeManager.DefaultDataClasses.STRING);
return translate(caseExpr);
}
if (function.getFunctionDescriptor().getMethod().getParent() == null && name.equalsIgnoreCase(SourceSystemFunctions.TIMESTAMPADD)
&& function.getArg(1).getType() == DataTypeManager.DefaultDataClasses.LONG) {
//TEIID-5406 only allow integer literal pushdown for backwards compatibility
if (params.get(1) instanceof Literal) {
try {
params.set(1, new Literal(FunctionMethods.integerRangeCheck((Long)((Literal)params.get(1)).getValue()), DataTypeManager.DefaultDataClasses.INTEGER));
} catch (FunctionExecutionException e) {
//corner case - for now we'll just throw an exception, but we could also prevent pushdown
throw new TeiidRuntimeException(QueryPlugin.Event.TEIID31275, e);
}
} else {
//cast - will be supported by the check in CriteriaCapabilityValidatorVisitor
params.set(1, new org.teiid.language.Function(SourceSystemFunctions.CONVERT,
Arrays.asList(params.get(1), new Literal(DataTypeManager.DefaultDataTypes.INTEGER, DataTypeManager.DefaultDataClasses.STRING)), DataTypeManager.DefaultDataClasses.INTEGER));
}
}
//check for translator pushdown functions, and use the name in source if possible
if (function.getFunctionDescriptor().getMethod().getNameInSource() != null &&
(CoreConstants.SYSTEM_MODEL.equals(function.getFunctionDescriptor().getSchema())
|| (function.getFunctionDescriptor().getMethod().getParent() != null && function.getFunctionDescriptor().getMethod().getParent().isPhysical())) ) {
name = function.getFunctionDescriptor().getMethod().getNameInSource();
}
} else {
name = Symbol.getShortName(name);
}
//if there is any ambiguity in the function name it will be up to the translator logic to check the
//metadata
org.teiid.language.Function result = new org.teiid.language.Function(name, params, function.getType());
if (function.getFunctionDescriptor() != null) {
result.setMetadataObject(function.getFunctionDescriptor().getMethod());
}
return result;
}
SearchedCase translate(SearchedCaseExpression expr) {
ArrayList<SearchedWhenClause> whens = new ArrayList<SearchedWhenClause>();
for (int i = 0; i < expr.getWhenCount(); i++) {
whens.add(new SearchedWhenClause(translate(expr.getWhenCriteria(i)), translate(expr.getThenExpression(i))));
}
return new SearchedCase(whens,
translate(expr.getElseExpression()),
expr.getType());
}
org.teiid.language.Expression translate(ScalarSubquery ss) {
return new org.teiid.language.ScalarSubquery(translate(ss.getCommand()));
}
org.teiid.language.Expression translate(AliasSymbol symbol) {
return translate(symbol.getSymbol());
}
ColumnReference translate(ElementSymbol symbol) {
ColumnReference element = new ColumnReference(translate(symbol.getGroupSymbol()), Symbol.getShortName(symbol.getOutputName()), null, symbol.getType());
if (element.getTable().getMetadataObject() == null) {
//handle procedure resultset columns
if (symbol.getMetadataID() instanceof TempMetadataID) {
TempMetadataID tid = (TempMetadataID)symbol.getMetadataID();
if (tid.getOriginalMetadataID() instanceof Column && !(((Column)tid.getOriginalMetadataID()).getParent() instanceof Table)) {
element.setMetadataObject(metadataFactory.getElement(tid.getOriginalMetadataID()));
}
}
return element;
}
Object mid = symbol.getMetadataID();
element.setMetadataObject(metadataFactory.getElement(mid));
return element;
}
AggregateFunction translate(AggregateSymbol symbol) {
List<org.teiid.language.Expression> params = new ArrayList<org.teiid.language.Expression>(symbol.getArgs().length);
for (Expression expression : symbol.getArgs()) {
params.add(translate(expression));
}
String name = symbol.getAggregateFunction().name();
if (symbol.getFunctionDescriptor() != null) {
name = Symbol.getShortName(symbol.getFunctionDescriptor().getName());
} else if (symbol.getAggregateFunction() == AggregateSymbol.Type.USER_DEFINED) {
name = symbol.getName();
}
AggregateFunction af = new AggregateFunction(name,
symbol.isDistinct(),
params,
symbol.getType());
af.setCondition(translate(symbol.getCondition()));
af.setOrderBy(translate(symbol.getOrderBy(), false));
if (symbol.getFunctionDescriptor() != null) {
af.setMetadataObject(symbol.getFunctionDescriptor().getMethod());
}
return af;
}
org.teiid.language.Expression translate(ExpressionSymbol symbol) {
return translate(symbol.getExpression());
}
/* Insert */
org.teiid.language.Insert translate(Insert insert) {
List<ElementSymbol> elements = insert.getVariables();
List<ColumnReference> translatedElements = new ArrayList<ColumnReference>();
for (ElementSymbol elementSymbol : elements) {
translatedElements.add(translate(elementSymbol));
}
Iterator<List<?>> parameterValues = null;
InsertValueSource valueSource = null;
if (insert.getQueryExpression() != null) {
valueSource = translate(insert.getQueryExpression());
} else if (insert.getTupleSource() != null) {
final TupleSource ts = insert.getTupleSource();
parameterValues = new TupleSourceIterator(ts);
List<org.teiid.language.Expression> translatedValues = new ArrayList<org.teiid.language.Expression>();
for (int i = 0; i < insert.getVariables().size(); i++) {
ElementSymbol es = insert.getVariables().get(i);
Parameter param = new Parameter();
param.setType(es.getType());
param.setValueIndex(i);
translatedValues.add(param);
}
valueSource = new ExpressionValueSource(translatedValues);
} else {
// This is for the simple one row insert.
List values = insert.getValues();
List<org.teiid.language.Expression> translatedValues = new ArrayList<org.teiid.language.Expression>();
for (Iterator i = values.iterator(); i.hasNext();) {
translatedValues.add(translate((Expression)i.next()));
}
valueSource = new ExpressionValueSource(translatedValues);
}
org.teiid.language.Insert result = new org.teiid.language.Insert(translate(insert.getGroup()),
translatedElements,
valueSource);
result.setParameterValues(parameterValues);
setBatchValues(result);
result.setUpsert(insert.isUpsert());
return result;
}
private void setBatchValues(BulkCommand bc) {
if (valueIndex == 0) {
return;
}
if (bc.getParameterValues() != null) {
throw new IllegalStateException("Already set batch values"); //$NON-NLS-1$
}
int rowCount = allValues.get(0).size();
List<List<?>> result = new ArrayList<List<?>>(rowCount);
for (int i = 0; i < rowCount; i++) {
List<Object> row = new ArrayList<Object>(allValues.size());
for (List<?> vals : allValues) {
row.add(vals.get(i));
}
result.add(row);
}
bc.setParameterValues(result.iterator());
}
/* Update */
org.teiid.language.Update translate(Update update) {
org.teiid.language.Update result = new org.teiid.language.Update(translate(update.getGroup()),
translate(update.getChangeList()),
translate(update.getCriteria()));
setBatchValues(result);
return result;
}
List<org.teiid.language.SetClause> translate(SetClauseList setClauseList) {
List<org.teiid.language.SetClause> clauses = new ArrayList<org.teiid.language.SetClause>(setClauseList.getClauses().size());
for (SetClause setClause : setClauseList.getClauses()) {
clauses.add(translate(setClause));
}
return clauses;
}
org.teiid.language.SetClause translate(SetClause setClause) {
return new org.teiid.language.SetClause(translate(setClause.getSymbol()), translate(setClause.getValue()));
}
/* Delete */
org.teiid.language.Delete translate(Delete delete) {
org.teiid.language.Delete deleteImpl = new org.teiid.language.Delete(translate(delete.getGroup()),
translate(delete.getCriteria()));
setBatchValues(deleteImpl);
return deleteImpl;
}
/* Execute */
Call translate(StoredProcedure sp) {
Procedure proc = null;
if(sp.getProcedureID() != null) {
try {
proc = this.metadataFactory.getProcedure(sp.getGroup().getName());
} catch (TranslatorException e) {
throw new TeiidRuntimeException(QueryPlugin.Event.TEIID30486, e);
}
}
Class<?> returnType = null;
List<Argument> translatedParameters = new ArrayList<Argument>();
for (SPParameter param : sp.getParameters()) {
Direction direction = Direction.IN;
switch(param.getParameterType()) {
case ParameterInfo.IN:
direction = Direction.IN;
break;
case ParameterInfo.INOUT:
direction = Direction.INOUT;
break;
case ParameterInfo.OUT:
direction = Direction.OUT;
break;
case ParameterInfo.RESULT_SET:
continue; //already part of the metadata
case ParameterInfo.RETURN_VALUE:
returnType = param.getClassType();
continue;
}
if (param.isUsingDefault() && BaseColumn.OMIT_DEFAULT.equalsIgnoreCase(metadataFactory.getMetadata().getExtensionProperty(param.getMetadataID(), BaseColumn.DEFAULT_HANDLING, false))) {
continue;
}
ProcedureParameter metadataParam = metadataFactory.getParameter(param);
//we can assume for now that all arguments will be literals, which may be multivalued
org.teiid.language.Expression value = null;
if (direction != Direction.OUT) {
if (param.isVarArg()) {
ArrayImpl av = (ArrayImpl) ((Constant)param.getExpression()).getValue();
if (av != null) {
for (Object obj : av.getValues()) {
Argument arg = new Argument(direction, new Literal(obj, param.getClassType().getComponentType()), param.getClassType().getComponentType(), metadataParam);
translatedParameters.add(arg);
}
}
break;
}
value = translate(param.getExpression());
}
Argument arg = new Argument(direction, value, param.getClassType(), metadataParam);
translatedParameters.add(arg);
}
Call call = new Call(removeSchemaName(sp.getProcedureName()), translatedParameters, proc);
call.setReturnType(returnType);
return call;
}
public NamedTable translate(GroupSymbol symbol) {
String alias = null;
String fullGroup = symbol.getOutputName();
if(symbol.getOutputDefinition() != null) {
alias = symbol.getOutputName();
fullGroup = symbol.getOutputDefinition();
if (remappedGroups != null) {
GroupSymbol remappedGroup = remappedGroups.get(symbol.getMetadataID());
if (remappedGroup != null && remappedGroup != symbol) {
fullGroup = remappedGroup.getName();
}
}
}
fullGroup = removeSchemaName(fullGroup);
NamedTable group = new NamedTable(fullGroup, alias, null);
try {
group.setMetadataObject(metadataFactory.getGroup(symbol.getMetadataID()));
} catch (QueryMetadataException e) {
throw new TeiidRuntimeException(QueryPlugin.Event.TEIID30487, e);
} catch (TeiidComponentException e) {
throw new TeiidRuntimeException(QueryPlugin.Event.TEIID30488, e);
}
return group;
}
private String removeSchemaName(String fullGroup) {
//remove the model name
int index = fullGroup.indexOf(Symbol.SEPARATOR);
if (index > 0) {
fullGroup = fullGroup.substring(index + 1);
}
return fullGroup;
}
/* Batched Updates */
BatchedUpdates translate(BatchedUpdateCommand command) {
List<Command> updates = command.getUpdateCommands();
List<org.teiid.language.Command> translatedUpdates = new ArrayList<org.teiid.language.Command>(updates.size());
for (Iterator<Command> i = updates.iterator(); i.hasNext();) {
translatedUpdates.add(translate(i.next()));
}
BatchedUpdates batchedUpdates = new BatchedUpdates(translatedUpdates);
batchedUpdates.setSingleResult(command.isSingleResult());
return batchedUpdates;
}
org.teiid.language.Limit translate(Limit limit) {
if (limit == null) {
return null;
}
int rowOffset = 0;
if (limit.getOffset() != null) {
Literal c1 = (Literal)translate(limit.getOffset());
rowOffset = ((Integer)c1.getValue()).intValue();
}
Literal c2 = (Literal)translate(limit.getRowLimit());
int rowLimit = Integer.MAX_VALUE;
if (c2 != null) {
rowLimit = ((Integer)c2.getValue()).intValue();
}
return new org.teiid.language.Limit(rowOffset, rowLimit);
}
public void setMaxInPredicateSize(int maxInCriteriaSize) {
this.maxInCriteriaSize = maxInCriteriaSize;
}
public void setCommandContext(CommandContext commandContext) {
this.commandContext = commandContext;
}
public static void correctNullOrdering(org.teiid.language.OrderBy orderBy, boolean supportsNullOrdering,
NullOrder sourceNullOrder, NullOrder teiidNullOrder) {
for (SortSpecification item : orderBy.getSortSpecifications()) {
if (item.getNullOrdering() != null) {
if (!supportsNullOrdering) {
item.setNullOrdering(null);
}
} else if (supportsNullOrdering) {
//try to match the expected default
if (item.getOrdering() == Ordering.ASC) {
if (teiidNullOrder == NullOrder.FIRST || teiidNullOrder == NullOrder.LOW) {
if (sourceNullOrder != NullOrder.FIRST && sourceNullOrder != NullOrder.LOW) {
item.setNullOrdering(NullOrdering.FIRST);
}
} else {
if (sourceNullOrder != NullOrder.LAST && sourceNullOrder != NullOrder.HIGH) {
item.setNullOrdering(NullOrdering.LAST);
}
}
} else {
if (teiidNullOrder == NullOrder.LAST || teiidNullOrder == NullOrder.LOW) {
if (sourceNullOrder != NullOrder.LAST && sourceNullOrder != NullOrder.LOW) {
item.setNullOrdering(NullOrdering.LAST);
}
} else {
if (sourceNullOrder != NullOrder.FIRST && sourceNullOrder != NullOrder.HIGH) {
item.setNullOrdering(NullOrdering.FIRST);
}
}
}
}
}
}
public void setSourceNullOrder(NullOrder sourceNullOrder) {
this.sourceNullOrder = sourceNullOrder;
}
public void setSupportsNullOrdering(boolean supportsNullOrdering) {
this.supportsNullOrdering = supportsNullOrdering;
}
}
|
/*
* Copyright 2020 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.quarkus.runtime.cli.command;
import static org.keycloak.quarkus.runtime.cli.Picocli.NO_PARAM_LABEL;
import org.keycloak.quarkus.runtime.Environment;
import org.keycloak.quarkus.runtime.cli.ExecutionExceptionHandler;
import org.keycloak.quarkus.runtime.configuration.KeycloakConfigSourceProvider;
import picocli.CommandLine;
import picocli.CommandLine.Command;
import picocli.CommandLine.Option;
@Command(name = "keycloak",
header = {
"Keycloak - Open Source Identity and Access Management",
"",
"Find more information at: https://www.keycloak.org/docs/latest"
},
description = "%nUse this command-line tool to manage your Keycloak cluster.",
footerHeading = "Examples:",
footer = { " Start the server in development mode for local development or testing:%n%n"
+ " $ ${COMMAND-NAME} start-dev%n%n"
+ " Building an optimized server runtime:%n%n"
+ " $ ${COMMAND-NAME} build <OPTIONS>%n%n"
+ " Start the server in production mode:%n%n"
+ " $ ${COMMAND-NAME} start <OPTIONS>%n%n"
+ " Enable auto-completion to bash/zsh:%n%n"
+ " $ source <(${COMMAND-NAME} tools completion)%n%n"
+ " Please, take a look at the documentation for more details before deploying in production.",
"",
"Use \"${COMMAND-NAME} start --help\" for the available options when starting the server.",
"Use \"${COMMAND-NAME} <command> --help\" for more information about other commands."
},
version = {
"Keycloak ${sys:kc.version}",
"JVM: ${java.version} (${java.vendor} ${java.vm.name} ${java.vm.version})",
"OS: ${os.name} ${os.version} ${os.arch}"
},
optionListHeading = "Options:",
commandListHeading = "Commands:",
abbreviateSynopsis = true,
subcommands = {
Build.class,
Start.class,
StartDev.class,
Export.class,
Import.class,
ShowConfig.class,
Tools.class
})
public final class Main {
@CommandLine.Spec
CommandLine.Model.CommandSpec spec;
@Option(names = "-D<key>=<value>",
description = "Set a Java system property",
order = 0)
Boolean sysProps;
@Option(names = { "-h", "--help" },
description = "This help message.",
usageHelp = true)
boolean help;
@Option(names = { "-V", "--version" },
description = "Show version information",
versionHelp = true)
boolean version;
@Option(names = { "-v", "--verbose" },
description = "Print out error details when running this command.",
paramLabel = NO_PARAM_LABEL)
public void setVerbose(boolean verbose) {
ExecutionExceptionHandler exceptionHandler = (ExecutionExceptionHandler) spec.commandLine().getExecutionExceptionHandler();
exceptionHandler.setVerbose(verbose);
}
@Option(names = {"-pf", "--profile"},
description = "Set the profile. Use 'dev' profile to enable development mode.")
public void setProfile(String profile) {
Environment.setProfile(profile);
}
@Option(names = { "-cf", "--config-file" },
arity = "1",
description = "Set the path to a configuration file. By default, configuration properties are read from the \"keycloak.properties\" file in the \"conf\" directory.",
paramLabel = "file")
public void setConfigFile(String path) {
System.setProperty(KeycloakConfigSourceProvider.KEYCLOAK_CONFIG_FILE_PROP, path);
}
}
|
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.compute.model;
/**
* Model definition for PacketMirroringMirroredResourceInfo.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class PacketMirroringMirroredResourceInfo extends com.google.api.client.json.GenericJson {
/**
* A set of virtual machine instances that are being mirrored. They must live in zones contained
* in the same region as this packetMirroring.
*
* Note that this config will apply only to those network interfaces of the Instances that belong
* to the network specified in this packetMirroring.
*
* You may specify a maximum of 50 Instances.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<PacketMirroringMirroredResourceInfoInstanceInfo> instances;
/**
* A set of subnetworks for which traffic from/to all VM instances will be mirrored. They must
* live in zones contained in the same region as this packetMirroring.
*
* You may specify a maximum of 5 subnetworks.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<PacketMirroringMirroredResourceInfoSubnetInfo> subnetworks;
/**
* A set of mirrored tags. Traffic from/to all VM instances that have one or more of these tags
* will be mirrored.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> tags;
/**
* A set of virtual machine instances that are being mirrored. They must live in zones contained
* in the same region as this packetMirroring.
*
* Note that this config will apply only to those network interfaces of the Instances that belong
* to the network specified in this packetMirroring.
*
* You may specify a maximum of 50 Instances.
* @return value or {@code null} for none
*/
public java.util.List<PacketMirroringMirroredResourceInfoInstanceInfo> getInstances() {
return instances;
}
/**
* A set of virtual machine instances that are being mirrored. They must live in zones contained
* in the same region as this packetMirroring.
*
* Note that this config will apply only to those network interfaces of the Instances that belong
* to the network specified in this packetMirroring.
*
* You may specify a maximum of 50 Instances.
* @param instances instances or {@code null} for none
*/
public PacketMirroringMirroredResourceInfo setInstances(java.util.List<PacketMirroringMirroredResourceInfoInstanceInfo> instances) {
this.instances = instances;
return this;
}
/**
* A set of subnetworks for which traffic from/to all VM instances will be mirrored. They must
* live in zones contained in the same region as this packetMirroring.
*
* You may specify a maximum of 5 subnetworks.
* @return value or {@code null} for none
*/
public java.util.List<PacketMirroringMirroredResourceInfoSubnetInfo> getSubnetworks() {
return subnetworks;
}
/**
* A set of subnetworks for which traffic from/to all VM instances will be mirrored. They must
* live in zones contained in the same region as this packetMirroring.
*
* You may specify a maximum of 5 subnetworks.
* @param subnetworks subnetworks or {@code null} for none
*/
public PacketMirroringMirroredResourceInfo setSubnetworks(java.util.List<PacketMirroringMirroredResourceInfoSubnetInfo> subnetworks) {
this.subnetworks = subnetworks;
return this;
}
/**
* A set of mirrored tags. Traffic from/to all VM instances that have one or more of these tags
* will be mirrored.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getTags() {
return tags;
}
/**
* A set of mirrored tags. Traffic from/to all VM instances that have one or more of these tags
* will be mirrored.
* @param tags tags or {@code null} for none
*/
public PacketMirroringMirroredResourceInfo setTags(java.util.List<java.lang.String> tags) {
this.tags = tags;
return this;
}
@Override
public PacketMirroringMirroredResourceInfo set(String fieldName, Object value) {
return (PacketMirroringMirroredResourceInfo) super.set(fieldName, value);
}
@Override
public PacketMirroringMirroredResourceInfo clone() {
return (PacketMirroringMirroredResourceInfo) super.clone();
}
}
|
package com.github.wxiaoqi.security.gate.fallback;
import com.github.wxiaoqi.security.api.vo.authority.PermissionInfo;
import com.github.wxiaoqi.security.gate.feign.IUserService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
import org.springframework.web.bind.annotation.PathVariable;
import java.util.List;
/**
* @author ace
* @create 2018/3/7.
*/
@Service
@Slf4j
public class UserServiceFallback implements IUserService{
@Override
public List<PermissionInfo> getPermissionByUsername(@PathVariable("username") String username) {
log.error("调用{}异常{}","getPermissionByUsername",username);
return null;
}
@Override
public List<PermissionInfo> getAllPermissionInfo() {
log.error("调用{}异常","getPermissionByUsername");
return null;
}
}
|
package com.alaya.ens;
import com.alaya.tx.ChainId;
/**
* ENS registry contract addresses.
*/
public class Contracts {
public static final String MAINNET = "0x314159265dd8dbb310642f98f50c066173c1259b";
public static final String ROPSTEN = "0x112234455c3a32fd11230c42e7bccd4a84e02010";
public static final String RINKEBY = "0xe7410170f87102df0055eb195163a03b7f2bff4a";
public static String resolveRegistryContract(String chainId) {
switch (Byte.valueOf(chainId)) {
case ChainId.MAINNET:
return MAINNET;
case ChainId.ROPSTEN:
return ROPSTEN;
case ChainId.RINKEBY:
return RINKEBY;
default:
throw new EnsResolutionException(
"Unable to resolve ENS registry contract for network id: " + chainId);
}
}
}
|
package com.sun.corba.se.PortableActivationIDL;
/**
* com/sun/corba/se/PortableActivationIDL/_ORBProxyImplBase.java .
* Generated by the IDL-to-Java compiler (portable), version "3.2"
* from c:/re/workspace/8-2-build-windows-amd64-cygwin/jdk8u144/9417/corba/src/share/classes/com/sun/corba/se/PortableActivationIDL/activation.idl
* Friday, July 21, 2017 9:58:51 PM PDT
*/
/** ORB callback interface, passed to Activator in registerORB method.
*/
public abstract class _ORBProxyImplBase extends org.omg.CORBA.portable.ObjectImpl
implements com.sun.corba.se.PortableActivationIDL.ORBProxy, org.omg.CORBA.portable.InvokeHandler
{
// Constructors
public _ORBProxyImplBase ()
{
}
private static java.util.Hashtable _methods = new java.util.Hashtable ();
static
{
_methods.put ("activate_adapter", new java.lang.Integer (0));
}
public org.omg.CORBA.portable.OutputStream _invoke (String $method,
org.omg.CORBA.portable.InputStream in,
org.omg.CORBA.portable.ResponseHandler $rh)
{
org.omg.CORBA.portable.OutputStream out = null;
java.lang.Integer __method = (java.lang.Integer)_methods.get ($method);
if (__method == null)
throw new org.omg.CORBA.BAD_OPERATION (0, org.omg.CORBA.CompletionStatus.COMPLETED_MAYBE);
switch (__method.intValue ())
{
/** Method used to cause ORB to activate the named adapter, if possible.
* This will cause the named POA to register itself with the activator as
* a side effect. This should always happen before this call can complete.
* This method returns true if adapter activation succeeded, otherwise it
* returns false.
*/
case 0: // PortableActivationIDL/ORBProxy/activate_adapter
{
String name[] = org.omg.PortableInterceptor.AdapterNameHelper.read (in);
boolean $result = false;
$result = this.activate_adapter (name);
out = $rh.createReply();
out.write_boolean ($result);
break;
}
default:
throw new org.omg.CORBA.BAD_OPERATION (0, org.omg.CORBA.CompletionStatus.COMPLETED_MAYBE);
}
return out;
} // _invoke
// Type-specific CORBA::Object operations
private static String[] __ids = {
"IDL:PortableActivationIDL/ORBProxy:1.0"};
public String[] _ids ()
{
return (String[])__ids.clone ();
}
} // class _ORBProxyImplBase
|
package main.java;
import javax.swing.*;
import java.awt.*;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
/**
* Created by Rick on 07-Oct-18.
*/
class GameScreen extends JComponent implements MouseListener {
/**
* Rectangle (bounds) of the game board
*/
private final Rectangle mBoardBounds =
new Rectangle(Board.BOARD_PADDING, Board.BOARD_PADDING,
Board.BOARD_SIZE, Board.BOARD_SIZE);
GameScreen(Window window) {
this.addMouseListener(this);
// Set board to default value
Board.resetBoard();
// Set listener that fires when the game is finished
Component component = this;
Board.setOnGameFinishedListener(winner -> {
// Start the game
window.remove(component);
window.add(new FinishedScreen(window, winner), BorderLayout.CENTER);
});
// Add listener that fires when the game state is updated
Board.setOnStateChangedListener(() -> {
// Redraw the screen
window.revalidate();
window.repaint();
});
}
/**
* Draw the game screen
*/
public void paint(Graphics g) {
// Set anti-alias
Graphics2D graphics = (Graphics2D) g;
graphics.setRenderingHint(
RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON
);
// Draw title
graphics.setFont(new Font(null, Font.PLAIN, 40));
Window.drawCenteredString(g, "Checkers", Window.WIDTH / 2, 50);
// Draw active player
graphics.setFont(new Font(null, Font.PLAIN, 20));
String player = Board.getActivePlayer() == Board.Player.BLACK ? "Black" : "White";
Window.drawCenteredString(g, player + " player is next", Window.WIDTH / 2, 80);
// Draw board
Board.draw(graphics);
}
@Override
public void mouseClicked(MouseEvent e) {
Point point = e.getPoint();
// If clicked on the board, fire the Board.onClick function
if (mBoardBounds.contains(point)) {
// Adjust reference point to the board
point.translate(-Board.BOARD_PADDING, -Board.BOARD_PADDING);
Board.onClick(point);
}
}
@Override
public void mousePressed(MouseEvent e) {
}
@Override
public void mouseReleased(MouseEvent e) {
}
@Override
public void mouseEntered(MouseEvent e) {
}
@Override
public void mouseExited(MouseEvent e) {
}
}
|
package com.live2o3.example;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Example local unit test, which will execute on the development machine (host).
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() {
assertEquals(4, 2 + 2);
}
}
|
package org.jabref.gui.actions;
import org.jabref.gui.JabRefFrame;
import org.jabref.gui.shared.SharedDatabaseLoginDialogView;
/**
* Opens a shared database.
*/
public class ConnectToSharedDatabaseCommand extends SimpleCommand {
private final JabRefFrame jabRefFrame;
public ConnectToSharedDatabaseCommand(JabRefFrame jabRefFrame) {
this.jabRefFrame = jabRefFrame;
}
@Override
public void execute() {
new SharedDatabaseLoginDialogView(jabRefFrame).showAndWait();
}
}
|
package eu.operando.activity;
import android.app.Activity;
import android.app.ProgressDialog;
import android.content.ClipData;
import android.content.ClipboardManager;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.v4.content.ContextCompat;
import android.util.Log;
import android.view.MenuItem;
import android.view.View;
import android.widget.ExpandableListView;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
import org.adblockplus.libadblockplus.android.webview.BuildConfig;
import java.util.ArrayList;
import eu.operando.AuthenticationRequiredActivity;
import eu.operando.R;
import eu.operando.adapter.IdentitiesExpandableListViewAdapter;
import eu.operando.tasks.AccordionOnGroupExpandListener;
import eu.operando.customView.OperandoProgressDialog;
import eu.operando.models.Identity;
import eu.operando.storage.Storage;
import eu.operando.swarmService.SwarmService;
import eu.operando.swarmService.models.IdentityListSwarmEntity;
import eu.operando.swarmclient.models.Swarm;
import eu.operando.swarmclient.models.SwarmCallback;
public class IdentitiesActivity extends AuthenticationRequiredActivity implements IdentitiesExpandableListViewAdapter.IdentityListener {
private ExpandableListView identitiesELV;
private LinearLayout defaultRealIdentity;
private View addIdentityBtn;
ArrayList<Identity> identities;
private Identity realIdentity;
private Identity defaultIdentity;
public static void start(Context context) {
Intent starter = new Intent(context, IdentitiesActivity.class);
context.startActivity(starter);
((Activity) context).overridePendingTransition(R.anim.fade_in, R.anim.fade_out);
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_identities);
if (Storage.isUserLogged()) {
initUI();
setActions();
} else {
setViewForAuthenticationRequired();
}
}
private void initUI() {
setToolbar();
identitiesELV = (ExpandableListView) findViewById(R.id.identities_elv);
addIdentityBtn = findViewById(R.id.addIdentityBtn);
defaultRealIdentity = (LinearLayout) findViewById(R.id.default_real_identity);
if (BuildConfig.DEBUG)
((TextView) findViewById(R.id.realIdentityTV)).setText("privacy_wizard@rms.ro");
}
private void setActions() {
addIdentityBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
CreateIdentityActivity.start(IdentitiesActivity.this);
}
});
defaultRealIdentity.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (realIdentity != null && !realIdentity.equals(defaultIdentity)) {
updateIdentity(realIdentity, "updateDefaultSubstituteIdentity");
} else {
Toast.makeText(IdentitiesActivity.this, R.string.default_identity_toast, Toast.LENGTH_SHORT).show();
}
}
});
identitiesELV.setOnGroupExpandListener(new AccordionOnGroupExpandListener(identitiesELV));
}
@Override
protected void onResume() {
super.onResume();
if (Storage.isUserLogged()) {
getIdentities();
}
}
public void getIdentities() {
SwarmService.getInstance().getIdentitiesList(new SwarmCallback<IdentityListSwarmEntity>() {
@Override
public void call(final IdentityListSwarmEntity result) {
runOnUiThread(new Runnable() {
@Override
public void run() {
setIdentities(result);
}
});
}
});
}
private void setIdentities(IdentityListSwarmEntity result) {
Log.d("ide", "call() called with: getResult = [" + result + "]");
identities = result.getIdentities();
setRealIdentity();
identitiesELV.setAdapter(new IdentitiesExpandableListViewAdapter(IdentitiesActivity.this,
identities));
}
private void setRealIdentity() {
if (identities.size() > 0) {
for (int index = 0; index < identities.size(); ++index) {
Identity i = identities.get(index);
if (i.isReal()) {
realIdentity = i;
identities.remove(realIdentity);
--index;
((TextView) findViewById(R.id.realIdentityTV)).setText(i.getEmail());
}
if (i.isDefault()) {
defaultIdentity = i;
if (defaultRealIdentity != null) {
if (defaultIdentity.equals(realIdentity)) {
defaultRealIdentity.setBackgroundColor(ContextCompat.getColor(this,
R.color.identities_button_inactive_background));
} else {
defaultRealIdentity.setBackgroundColor(ContextCompat.getColor(this,
R.color.identities_button_active_background));
}
}
}
}
}
}
@Override
public void onBackPressed() {
finish();
overridePendingTransition(R.anim.fade_in, R.anim.fade_out);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == android.R.id.home) {
onBackPressed();
}
return super.onOptionsItemSelected(item);
}
public void updateIdentity(Identity identity, String method) {
if (identity.isDefault()) {
Toast.makeText(this, R.string.default_identity_toast, Toast.LENGTH_SHORT).show();
return;
}
final ProgressDialog dialog = new OperandoProgressDialog(this);
dialog.setCancelable(false);
dialog.setMessage("Please wait...");
dialog.show();
SwarmService.getInstance().updateIdentity(new SwarmCallback<Swarm>() {
@Override
public void call(final Swarm result) {
runOnUiThread(new Runnable() {
@Override
public void run() {
getIdentities();
dialog.dismiss();
}
});
}
}, method, identity.getEmail());
}
public void setClipboard(Identity identity) {
ClipboardManager clipboard = (ClipboardManager)
this.getSystemService(Context.CLIPBOARD_SERVICE);
ClipData clip = ClipData.newPlainText("identity", identity.getEmail());
if (clipboard != null) {
clipboard.setPrimaryClip(clip);
Toast.makeText(this, "Identity was copied to clipboard", Toast.LENGTH_SHORT).show();
}
}
}
|
// Targeted by JavaCPP version 1.5.1: DO NOT EDIT THIS FILE
package org.bytedeco.cuda.nppc;
import java.nio.*;
import org.bytedeco.javacpp.*;
import org.bytedeco.javacpp.annotation.*;
import org.bytedeco.cuda.cudart.*;
import static org.bytedeco.cuda.global.cudart.*;
import static org.bytedeco.cuda.global.nppc.*;
@Properties(inherit = org.bytedeco.cuda.presets.nppc.class)
public class NppiHaarBuffer extends Pointer {
static { Loader.load(); }
/** Default native constructor. */
public NppiHaarBuffer() { super((Pointer)null); allocate(); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public NppiHaarBuffer(long size) { super((Pointer)null); allocateArray(size); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public NppiHaarBuffer(Pointer p) { super(p); }
private native void allocate();
private native void allocateArray(long size);
@Override public NppiHaarBuffer position(long position) {
return (NppiHaarBuffer)super.position(position);
}
/** size of the buffer */
public native int haarBufferSize(); public native NppiHaarBuffer haarBufferSize(int setter);
/** buffer */
public native @Cast("Npp32s*") IntPointer haarBuffer(); public native NppiHaarBuffer haarBuffer(IntPointer setter);
}
|
package ddth.dasp.test;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import ddth.dasp.common.id.IdGenerator;
import ddth.dasp.test.utils.Benchmark;
import ddth.dasp.test.utils.BenchmarkResult;
import ddth.dasp.test.utils.Operation;
public class TestIdGenerator {
private static void testIdGen() {
final Map<Object, Boolean> map = new ConcurrentHashMap<Object, Boolean>();
final IdGenerator ID_GENERATOR = IdGenerator.getInstance(IdGenerator.getMacAddr());
BenchmarkResult result = new Benchmark(new Operation() {
@Override
public void run(int runId) {
// long commentId = ID_GENERATOR.generateId64();
long commentId = ID_GENERATOR.generateId48();
Object id = commentId;
// final StringBuffer commentIdHex = new
// StringBuffer(Long.toHexString(commentId));
// while (commentIdHex.length() < 16) {
// commentIdHex.insert(0, '0');
// }
// String id = commentIdHex.toString();
if (map.containsKey(id)) {
System.out.println("Was generated: " + commentId);
} else {
map.put(id, Boolean.TRUE);
}
}
}, 1000000, 16).run();
System.out.println(result.summarize());
}
/**
* @param args
*/
public static void main(String[] args) {
for (int i = 0; i < 10; i++) {
testIdGen();
}
}
}
|
package com.google.android.gms.common.api.internal;
import com.google.android.gms.common.api.internal.BackgroundDetector.BackgroundStateChangeListener;
final class zabi implements BackgroundStateChangeListener {
private final /* synthetic */ GoogleApiManager zaim;
zabi(GoogleApiManager googleApiManager) {
this.zaim = googleApiManager;
}
public final void onBackgroundStateChanged(boolean z) {
this.zaim.handler.sendMessage(this.zaim.handler.obtainMessage(1, Boolean.valueOf(z)));
}
}
|
package geometry;
public class VoronoiTile {
public final Point2D seed;
public final Polygon2D outline;
public VoronoiTile(Point2D seed, Polygon2D border) {
this.seed = seed;
this.outline = border;
}
public int countVertices() {
if (outline == null)
return 0;
return outline.countVertices();
}
public boolean hasVertex(Point2D pt) {
if (outline == null)
return false;
return outline.hasVertex(pt);
}
}
|
/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=======================================================================*/
// This class has been generated, DO NOT EDIT!
package org.tensorflow.op.data.experimental;
import org.tensorflow.Operand;
import org.tensorflow.Operation;
import org.tensorflow.OperationBuilder;
import org.tensorflow.Output;
import org.tensorflow.op.RawOp;
import org.tensorflow.op.Scope;
import org.tensorflow.op.annotation.Endpoint;
import org.tensorflow.types.family.TType;
/**
* The StatsAggregatorHandleV2 operation
*/
public final class StatsAggregatorHandle extends RawOp implements Operand<TType> {
/**
* The name of this op, as known by TensorFlow core engine
*/
public static final String OP_NAME = "StatsAggregatorHandleV2";
private Output<? extends TType> handle;
@SuppressWarnings("unchecked")
private StatsAggregatorHandle(Operation operation) {
super(operation);
int outputIdx = 0;
handle = operation.output(outputIdx++);
}
/**
* Factory method to create a class wrapping a new StatsAggregatorHandleV2 operation.
*
* @param scope current scope
* @param options carries optional attribute values
* @return a new instance of StatsAggregatorHandle
*/
@Endpoint(
describeByClass = true
)
public static StatsAggregatorHandle create(Scope scope, Options... options) {
OperationBuilder opBuilder = scope.env().opBuilder("StatsAggregatorHandleV2", scope.makeOpName("StatsAggregatorHandle"));
opBuilder = scope.apply(opBuilder);
if (options != null) {
for (Options opts : options) {
if (opts.container != null) {
opBuilder.setAttr("container", opts.container);
}
if (opts.sharedName != null) {
opBuilder.setAttr("shared_name", opts.sharedName);
}
}
}
return new StatsAggregatorHandle(opBuilder.build());
}
/**
* Sets the container option.
*
* @param container the container option
* @return this Options instance.
*/
public static Options container(String container) {
return new Options().container(container);
}
/**
* Sets the sharedName option.
*
* @param sharedName the sharedName option
* @return this Options instance.
*/
public static Options sharedName(String sharedName) {
return new Options().sharedName(sharedName);
}
/**
* Gets handle.
*
* @return handle.
*/
public Output<? extends TType> handle() {
return handle;
}
@Override
@SuppressWarnings("unchecked")
public Output<TType> asOutput() {
return (Output<TType>) handle;
}
/**
* Optional attributes for {@link org.tensorflow.op.data.experimental.StatsAggregatorHandle}
*/
public static class Options {
private String container;
private String sharedName;
private Options() {
}
/**
* Sets the container option.
*
* @param container the container option
* @return this Options instance.
*/
public Options container(String container) {
this.container = container;
return this;
}
/**
* Sets the sharedName option.
*
* @param sharedName the sharedName option
* @return this Options instance.
*/
public Options sharedName(String sharedName) {
this.sharedName = sharedName;
return this;
}
}
}
|
/*
* Copyright (c) 2008-2014 Haulmont. All rights reserved.
* Use is subject to license terms, see http://www.cuba-platform.com/license for details.
*/
package com.haulmont.cuba.desktop.gui.components;
import com.haulmont.chile.core.model.MetaClass;
import com.haulmont.cuba.gui.components.Action;
import com.haulmont.cuba.gui.components.IFrame;
import com.haulmont.cuba.gui.components.PickerField;
import com.haulmont.cuba.gui.components.SearchPickerField;
import com.haulmont.cuba.gui.data.CollectionDatasource;
import com.haulmont.cuba.gui.data.Datasource;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.swing.*;
import java.util.Collection;
/**
* @author artamonov
*/
public class DesktopSearchPickerField extends DesktopSearchField implements SearchPickerField {
protected DesktopPickerField pickerField;
public DesktopSearchPickerField() {
pickerField = new DesktopPickerField(new Picker());
}
@Override
public JComponent getComposition() {
return pickerField.getComposition();
}
@Override
public MetaClass getMetaClass() {
return pickerField.getMetaClass();
}
@Override
public void setMetaClass(MetaClass metaClass) {
pickerField.setMetaClass(metaClass);
}
@Override
public PickerField.LookupAction addLookupAction() {
PickerField.LookupAction action = new PickerField.LookupAction(this);
addAction(action);
return action;
}
@Override
public PickerField.ClearAction addClearAction() {
PickerField.ClearAction action = new PickerField.ClearAction(this);
addAction(action);
return action;
}
@Override
public PickerField.OpenAction addOpenAction() {
PickerField.OpenAction action = new PickerField.OpenAction(this);
addAction(action);
return action;
}
@Override
public void addFieldListener(PickerField.FieldListener listener) {
throw new UnsupportedOperationException();
}
@Override
public void setFieldEditable(boolean editable) {
throw new UnsupportedOperationException();
}
@Override
public void addAction(Action action) {
pickerField.addAction(action);
}
@Override
public void addAction(Action action, int index) {
pickerField.addAction(action, index);
}
@Override
public void removeAction(@Nullable Action action) {
pickerField.removeAction(action);
}
@Override
public void removeAction(@Nullable String id) {
pickerField.removeAction(id);
}
@Override
public void removeAllActions() {
pickerField.removeAllActions();
}
@Override
public Collection<Action> getActions() {
return pickerField.getActions();
}
@Override
@Nullable
public Action getAction(String id) {
return pickerField.getAction(id);
}
@Nonnull
@Override
public Action getActionNN(String id) {
Action action = getAction(id);
if (action == null) {
throw new IllegalStateException("Unable to find action with id " + id);
}
return action;
}
@Override
public void setFrame(IFrame frame) {
super.setFrame(frame);
pickerField.setFrame(frame);
}
@Override
public void setDatasource(Datasource datasource, String property) {
super.setDatasource(datasource, property);
pickerField.setDatasource(datasource, property);
}
@Override
public void setOptionsDatasource(CollectionDatasource datasource) {
super.setOptionsDatasource(datasource);
if (pickerField.getMetaClass() == null && datasource != null) {
pickerField.setMetaClass(datasource.getMetaClass());
}
}
@Override
public void setEditable(boolean editable) {
super.setEditable(editable);
pickerField.setEditable(editable);
}
@Override
public void updateEnabled() {
super.updateEnabled();
pickerField.setParentEnabled(isEnabledWithParent());
}
private class Picker extends com.haulmont.cuba.desktop.sys.vcl.Picker {
@Override
protected void initEditor() {
// put LookupField into PickerField composition
editor = DesktopSearchPickerField.super.getComposition();
}
@Override
public JComponent getInputField() {
return getInputComponent();
}
@Override
public Object getValue() {
return null;
}
@Override
public void setValue(Object value) {
}
}
}
|
/*
* JasperReports - Free Java Reporting Library.
* Copyright (C) 2001 - 2014 TIBCO Software Inc. All rights reserved.
* http://www.jaspersoft.com
*
* Unless you have purchased a commercial license agreement from Jaspersoft,
* the following license terms apply:
*
* This program is part of JasperReports.
*
* JasperReports is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* JasperReports is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with JasperReports. If not, see <http://www.gnu.org/licenses/>.
*/
package net.sf.jasperreports.engine;
import java.awt.Color;
import java.util.UUID;
import net.sf.jasperreports.engine.type.ModeEnum;
import net.sf.jasperreports.engine.type.PositionTypeEnum;
import net.sf.jasperreports.engine.type.StretchTypeEnum;
/**
*
*
* @author Lucian Chirita (lucianc@users.sourceforge.net)
* @version $Id: ElementDecorator.java 7199 2014-08-27 13:58:10Z teodord $
*/
public abstract class ElementDecorator implements JRElement
{
private final JRElement element;
public ElementDecorator(JRElement decorated)
{
this.element = decorated;
}
public Object clone()
{
try
{
return super.clone();
}
catch (CloneNotSupportedException e)
{
// never
throw new JRRuntimeException(e);
}
}
public void collectExpressions(JRExpressionCollector collector)
{
element.collectExpressions(collector);
}
public JRElementGroup getElementGroup()
{
return element.getElementGroup();
}
public UUID getUUID()
{
return element.getUUID();
}
public String getKey()
{
return element.getKey();
}
public PositionTypeEnum getPositionTypeValue()
{
return element.getPositionTypeValue();
}
public JRExpression getPrintWhenExpression()
{
return element.getPrintWhenExpression();
}
public JRGroup getPrintWhenGroupChanges()
{
return element.getPrintWhenGroupChanges();
}
public JRPropertyExpression[] getPropertyExpressions()
{
return element.getPropertyExpressions();
}
public StretchTypeEnum getStretchTypeValue()
{
return element.getStretchTypeValue();
}
public int getX()
{
return element.getX();
}
public int getY()
{
return element.getY();
}
public boolean isPrintInFirstWholeBand()
{
return element.isPrintInFirstWholeBand();
}
public boolean isPrintRepeatedValues()
{
return element.isPrintRepeatedValues();
}
public boolean isPrintWhenDetailOverflows()
{
return element.isPrintWhenDetailOverflows();
}
public boolean isRemoveLineWhenBlank()
{
return element.isRemoveLineWhenBlank();
}
@Deprecated
public void setPositionType(byte positionType)
{
throw new UnsupportedOperationException();
}
public void setPositionType(PositionTypeEnum positionType)
{
throw new UnsupportedOperationException();
}
public void setPrintInFirstWholeBand(boolean isPrintInFirstWholeBand)
{
throw new UnsupportedOperationException();
}
public void setPrintRepeatedValues(boolean isPrintRepeatedValues)
{
throw new UnsupportedOperationException();
}
public void setPrintWhenDetailOverflows(boolean isPrintWhenDetailOverflows)
{
throw new UnsupportedOperationException();
}
public void setRemoveLineWhenBlank(boolean isRemoveLineWhenBlank)
{
throw new UnsupportedOperationException();
}
@Deprecated
public void setStretchType(byte stretchType)
{
throw new UnsupportedOperationException();
}
public void setStretchType(StretchTypeEnum stretchTypeEnum)
{
throw new UnsupportedOperationException();
}
public void setWidth(int width)
{
throw new UnsupportedOperationException();
}
public void setX(int x)
{
throw new UnsupportedOperationException();
}
public Object clone(JRElementGroup parentGroup)
{
throw new UnsupportedOperationException();
}
@Override
public JRElement clone(JRElementGroup parentGroup, int y)
{
throw new UnsupportedOperationException();
}
public void visit(JRVisitor visitor)
{
element.visit(visitor);
}
public Color getBackcolor()
{
return element.getBackcolor();
}
public Color getForecolor()
{
return element.getForecolor();
}
public int getHeight()
{
return element.getHeight();
}
public ModeEnum getModeValue()
{
return element.getModeValue();
}
public Color getOwnBackcolor()
{
return element.getOwnBackcolor();
}
public Color getOwnForecolor()
{
return element.getOwnForecolor();
}
public ModeEnum getOwnModeValue()
{
return element.getOwnModeValue();
}
public int getWidth()
{
return element.getWidth();
}
public void setBackcolor(Color backcolor)
{
throw new UnsupportedOperationException();
}
public void setForecolor(Color forecolor)
{
throw new UnsupportedOperationException();
}
@Deprecated
public void setMode(byte mode)
{
throw new UnsupportedOperationException();
}
@Deprecated
public void setMode(Byte mode)
{
throw new UnsupportedOperationException();
}
public void setMode(ModeEnum mode)
{
throw new UnsupportedOperationException();
}
public JRDefaultStyleProvider getDefaultStyleProvider()
{
return element.getDefaultStyleProvider();
}
public JRStyle getStyle()
{
return element.getStyle();
}
public String getStyleNameReference()
{
return element.getStyleNameReference();
}
public JRPropertiesHolder getParentProperties()
{
return element.getParentProperties();
}
public JRPropertiesMap getPropertiesMap()
{
return element.getPropertiesMap();
}
public boolean hasProperties()
{
return element.hasProperties();
}
}
|
/*
* MIT License
*
* Copyright (c) 2021 MASES s.r.l.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
/**************************************************************************************
* <auto-generated>
* This code was generated from a template using JCOReflector
*
* Manual changes to this file may cause unexpected behavior in your application.
* Manual changes to this file will be overwritten if the code is regenerated.
* </auto-generated>
*************************************************************************************/
package system.runtime.remoting.services;
import org.mases.jcobridge.*;
import org.mases.jcobridge.netreflection.*;
import java.util.ArrayList;
// Import section
import system.runtime.remoting.services.ITrackingHandler;
import system.runtime.remoting.services.ITrackingHandlerImplementation;
/**
* The base .NET class managing System.Runtime.Remoting.Services.TrackingServices, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089. Extends {@link NetObject}.
* <p>
*
* See: <a href="https://docs.microsoft.com/en-us/dotnet/api/System.Runtime.Remoting.Services.TrackingServices" target="_top">https://docs.microsoft.com/en-us/dotnet/api/System.Runtime.Remoting.Services.TrackingServices</a>
*/
public class TrackingServices extends NetObject {
/**
* Fully assembly qualified name: mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
*/
public static final String assemblyFullName = "mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089";
/**
* Assembly name: mscorlib
*/
public static final String assemblyShortName = "mscorlib";
/**
* Qualified class name: System.Runtime.Remoting.Services.TrackingServices
*/
public static final String className = "System.Runtime.Remoting.Services.TrackingServices";
static JCOBridge bridge = JCOBridgeInstance.getInstance(assemblyFullName);
/**
* The type managed from JCOBridge. See {@link JCType}
*/
public static JCType classType = createType();
static JCEnum enumInstance = null;
JCObject classInstance = null;
static JCType createType() {
try {
String classToCreate = className + ", "
+ (JCOReflector.getUseFullAssemblyName() ? assemblyFullName : assemblyShortName);
if (JCOReflector.getDebug())
JCOReflector.writeLog("Creating %s", classToCreate);
JCType typeCreated = bridge.GetType(classToCreate);
if (JCOReflector.getDebug())
JCOReflector.writeLog("Created: %s",
(typeCreated != null) ? typeCreated.toString() : "Returned null value");
return typeCreated;
} catch (JCException e) {
JCOReflector.writeLog(e);
return null;
}
}
void addReference(String ref) throws Throwable {
try {
bridge.AddReference(ref);
} catch (JCNativeException jcne) {
throw translateException(jcne);
}
}
public TrackingServices(Object instance) throws Throwable {
super(instance);
if (instance instanceof JCObject) {
classInstance = (JCObject) instance;
} else
throw new Exception("Cannot manage object, it is not a JCObject");
}
public String getJCOAssemblyName() {
return assemblyFullName;
}
public String getJCOClassName() {
return className;
}
public String getJCOObjectName() {
return className + ", " + (JCOReflector.getUseFullAssemblyName() ? assemblyFullName : assemblyShortName);
}
public Object getJCOInstance() {
return classInstance;
}
public void setJCOInstance(JCObject instance) {
classInstance = instance;
super.setJCOInstance(classInstance);
}
public JCType getJCOType() {
return classType;
}
/**
* Try to cast the {@link IJCOBridgeReflected} instance into {@link TrackingServices}, a cast assert is made to check if types are compatible.
* @param from {@link IJCOBridgeReflected} instance to be casted
* @return {@link TrackingServices} instance
* @throws java.lang.Throwable in case of error during cast operation
*/
public static TrackingServices cast(IJCOBridgeReflected from) throws Throwable {
NetType.AssertCast(classType, from);
return new TrackingServices(from.getJCOInstance());
}
// Constructors section
public TrackingServices() throws Throwable {
try {
// add reference to assemblyName.dll file
addReference(JCOReflector.getUseFullAssemblyName() ? assemblyFullName : assemblyShortName);
setJCOInstance((JCObject)classType.NewObject());
} catch (JCNativeException jcne) {
throw translateException(jcne);
}
}
// Methods section
public static void RegisterTrackingHandler(ITrackingHandler handler) throws Throwable, system.ArgumentNullException, system.ArgumentException, system.TypeLoadException, system.InvalidOperationException, system.MissingMethodException, system.reflection.TargetInvocationException, system.NotSupportedException, system.globalization.CultureNotFoundException, system.ArgumentOutOfRangeException, system.OutOfMemoryException, system.FormatException, system.runtime.remoting.RemotingException {
if (classType == null)
throw new UnsupportedOperationException("classType is null.");
try {
classType.Invoke("RegisterTrackingHandler", handler == null ? null : handler.getJCOInstance());
} catch (JCNativeException jcne) {
throw translateException(jcne);
}
}
public static void UnregisterTrackingHandler(ITrackingHandler handler) throws Throwable, system.ArgumentNullException, system.ArgumentException, system.TypeLoadException, system.InvalidOperationException, system.MissingMethodException, system.reflection.TargetInvocationException, system.NotSupportedException, system.globalization.CultureNotFoundException, system.ArgumentOutOfRangeException, system.OutOfMemoryException, system.FormatException, system.runtime.remoting.RemotingException {
if (classType == null)
throw new UnsupportedOperationException("classType is null.");
try {
classType.Invoke("UnregisterTrackingHandler", handler == null ? null : handler.getJCOInstance());
} catch (JCNativeException jcne) {
throw translateException(jcne);
}
}
// Properties section
public final static ITrackingHandler[] getRegisteredHandlers() throws Throwable, system.ArgumentException {
if (classType == null)
throw new UnsupportedOperationException("classType is null.");
try {
ArrayList<ITrackingHandler> resultingArrayList = new ArrayList<ITrackingHandler>();
JCObject resultingObjects = (JCObject)classType.Get("RegisteredHandlers");
for (Object resultingObject : resultingObjects) {
resultingArrayList.add(new ITrackingHandlerImplementation(resultingObject));
}
ITrackingHandler[] resultingArray = new ITrackingHandler[resultingArrayList.size()];
resultingArray = resultingArrayList.toArray(resultingArray);
return resultingArray;
} catch (JCNativeException jcne) {
throw translateException(jcne);
}
}
// Instance Events section
}
|
package net.meeusen.crypto;
import java.math.BigInteger;
import net.meeusen.util.ByteString;
import org.bouncycastle.crypto.params.ECDomainParameters;
import org.bouncycastle.jce.ECNamedCurveTable;
import org.bouncycastle.jce.spec.ECNamedCurveParameterSpec;
import org.bouncycastle.math.ec.ECCurve;
import org.bouncycastle.math.ec.ECCurve.Config;
public class MyEccDomain {
/*
* Curve P-256
p = 115792089210356248762697446949407573530086143415290314195533631308867097853951
r = 115792089210356248762697446949407573529996955224135760342422259061068512044369
s = c49d3608 86e70493 6a6678e1 139d26b7 819f7e90
c = 7efba166 2985be94 03cb055c 75d4f7e0 ce8d84a9 c5114abc af317768 0104fa0d
b = 5ac635d8 aa3a93e7 b3ebbd55 769886bc 651d06b0 cc53b0f6 3bce3c3e 27d2604b
Gx = 6b17d1f2 e12c4247 f8bce6e5 63a440f2 77037d81 2deb33a0 f4a13945 d898c296
Gy = 4fe342e2 fe1a7f9b 8ee7eb4a 7c0f9e16 2bce3357 6b315ece cbb64068 37bf51f5
* */
private String namedCurve;
private ECNamedCurveParameterSpec curveparams;
int cofactor_h = 1;
ECCurve bccurve ;
public MyEccDomain(String curvename) {
this.namedCurve=curvename;
this.curveparams = ECNamedCurveTable.getParameterSpec(namedCurve);
}
public String getCurveName() {
return namedCurve;
}
public BigInteger getPrime() {
return curveparams.getCurve().getField().getCharacteristic();
}
public BigInteger getA() {
return curveparams.getCurve().getA().toBigInteger();
}
public BigInteger getB() {
return curveparams.getCurve().getB().toBigInteger();
}
public BigInteger getGx() {
return curveparams.getG().getXCoord().toBigInteger();
}
public BigInteger getGy() {
return curveparams.getG().getYCoord().toBigInteger();
}
public BigInteger getOrderN() {
return curveparams.getN();
}
public java.security.spec.ECField getField() {
return new java.security.spec.ECFieldFp(getPrime());
}
public java.security.spec.EllipticCurve getCurve() {
return new java.security.spec.EllipticCurve(getField(), getA(), getB());
}
public java.security.spec.ECParameterSpec getEcParamSpec() {
return new java.security.spec.ECParameterSpec(getCurve(), getG(), getOrderN(), getCofactorH()) ; // ECParameterSpec(EllipticCurve curve, ECPoint g, BigInteger n, int h)
}
public ECDomainParameters getBcParamSpec() {
ECCurve.Fp bccurve = new ECCurve.Fp(this.getPrime(), this.getA(), this.getB() );
org.bouncycastle.math.ec.ECPoint bc_g = bccurve.createPoint(this.getGx(),this.getGy());
org.bouncycastle.crypto.params.ECDomainParameters domparams = new ECDomainParameters(bccurve, bc_g, this.getOrderN());
return domparams;
}
public int getCofactorH() {
return 1;
}
public java.security.spec.ECPoint getG() {
return new java.security.spec.ECPoint(getGx(),getGy());
}
public static String bi2strh (BigInteger bi) {
return new ByteString(bi.toByteArray()).toHexString() ;
}
public String toString() {
String nl="\n";
return this.namedCurve + nl
+ "a-dec: " + getA()+ nl
+ "a-hex: " + bi2strh(getA())+ nl
+ "b-dec: " + getB()+ nl
+ "b-hex: " + bi2strh(getB())+ nl
+ "p-dec: " + getPrime()+ nl
+ "p-hex: " + bi2strh(getPrime())+ nl
+ "gx-hex: " + bi2strh(getGx())+ nl
+ "gy-hex: " + bi2strh(getGy())+ nl
;
}
public static void main(String[] args) {
MyEccDomain md = new MyEccDomain("P-256");
System.out.println(md);
}
}
|
/*******************************************************************************
* Copyright 2016 by the Department of Computer Science (University of Genova and University of Oxford)
*
* This file is part of LogMapC an extension of LogMap matcher for conservativity principle.
*
* LogMapC is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* LogMapC is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with LogMapC. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package util;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import logmap.LogMapWrapper;
import ontology.AxiomExplanation;
//import org.mindswap.pellet.PelletOptions;
import org.semanticweb.HermiT.Reasoner;
import org.semanticweb.elk.owl.exceptions.ElkException;
import org.semanticweb.elk.owlapi.ElkClassExpressionConverter;
import org.semanticweb.elk.owlapi.ElkConverter;
import org.semanticweb.elk.owlapi.ElkReasoner;
import org.semanticweb.elk.owlapi.ElkReasonerFactory;
import org.semanticweb.elk.owlapi.proofs.AxiomExpressionWrap;
import org.semanticweb.elk.owlapi.proofs.ElkToOwlProofConverter;
import org.semanticweb.elk.owlapi.proofs.Proofs;
import org.semanticweb.elk.owlapi.wrapper.OwlConverter;
import org.semanticweb.owl.explanation.api.Explanation;
import org.semanticweb.owl.explanation.api.ExplanationGenerator;
import org.semanticweb.owl.explanation.api.ExplanationGeneratorFactory;
import org.semanticweb.owl.explanation.api.ExplanationGeneratorInterruptedException;
import org.semanticweb.owl.explanation.impl.blackbox.Configuration;
import org.semanticweb.owl.explanation.impl.blackbox.EntailmentCheckerFactory;
import org.semanticweb.owl.explanation.impl.blackbox.checker.BlackBoxExplanationGeneratorFactory;
import org.semanticweb.owl.explanation.impl.blackbox.checker.SatisfiabilityEntailmentCheckerFactory;
import org.semanticweb.owl.explanation.impl.laconic.LaconicExplanationGeneratorFactory;
import org.semanticweb.owlapi.apibinding.OWLManager;
import org.semanticweb.owlapi.io.IRIDocumentSource;
import org.semanticweb.owlapi.io.OWLOntologyDocumentSource;
import org.semanticweb.owlapi.io.OWLXMLOntologyFormat;
import org.semanticweb.owlapi.model.AxiomType;
import org.semanticweb.owlapi.model.IRI;
import org.semanticweb.owlapi.model.OWLAxiom;
import org.semanticweb.owlapi.model.OWLClass;
import org.semanticweb.owlapi.model.OWLClassExpression;
import org.semanticweb.owlapi.model.OWLDataFactory;
import org.semanticweb.owlapi.model.OWLDataProperty;
import org.semanticweb.owlapi.model.OWLDatatype;
import org.semanticweb.owlapi.model.OWLEntity;
import org.semanticweb.owlapi.model.OWLException;
import org.semanticweb.owlapi.model.OWLNamedIndividual;
import org.semanticweb.owlapi.model.OWLObjectProperty;
import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.model.OWLOntologyChange;
import org.semanticweb.owlapi.model.OWLOntologyChangeListener;
import org.semanticweb.owlapi.model.OWLOntologyCreationException;
import org.semanticweb.owlapi.model.OWLOntologyFormat;
import org.semanticweb.owlapi.model.OWLOntologyLoaderConfiguration;
import org.semanticweb.owlapi.model.OWLOntologyManager;
import org.semanticweb.owlapi.model.OWLOntologyStorageException;
import org.semanticweb.owlapi.model.OWLSubClassOfAxiom;
import org.semanticweb.owlapi.model.OWLSubDataPropertyOfAxiom;
import org.semanticweb.owlapi.model.OWLSubObjectPropertyOfAxiom;
import org.semanticweb.owlapi.model.UnknownOWLOntologyException;
import org.semanticweb.owlapi.profiles.OWL2ELProfile;
import org.semanticweb.owlapi.profiles.OWLProfileReport;
import org.semanticweb.owlapi.reasoner.InferenceType;
import org.semanticweb.owlapi.reasoner.Node;
import org.semanticweb.owlapi.reasoner.NodeSet;
import org.semanticweb.owlapi.reasoner.OWLReasoner;
import org.semanticweb.owlapi.reasoner.OWLReasonerFactory;
import org.semanticweb.owlapi.reasoner.ReasonerInterruptedException;
import org.semanticweb.owlapi.reasoner.TimeOutException;
import org.semanticweb.owlapi.util.DLExpressivityChecker;
import org.semanticweb.owlapi.util.DefaultPrefixManager;
import org.semanticweb.owlapi.util.InferredAxiomGenerator;
import org.semanticweb.owlapi.util.InferredEquivalentClassAxiomGenerator;
import org.semanticweb.owlapi.util.InferredSubClassAxiomGenerator;
import org.semanticweb.owlapi.util.OWLOntologyWalker;
import org.semanticweb.owlapi.util.OWLOntologyWalkerVisitor;
import org.semanticweb.owlapi.util.SimpleIRIShortFormProvider;
import org.semanticweb.owlapi.util.Version;
import org.semanticweb.owlapitools.proofs.ExplainingOWLReasoner;
import org.semanticweb.owlapitools.proofs.OWLInference;
import org.semanticweb.owlapitools.proofs.exception.ProofGenerationException;
import org.semanticweb.owlapitools.proofs.expressions.ExpressionUtils;
import org.semanticweb.owlapitools.proofs.expressions.OWLAxiomExpression;
import org.semanticweb.owlapitools.proofs.expressions.OWLExpression;
import com.clarkparsia.owlapi.explanation.DefaultExplanationGenerator;
import reasoning.ExtDisjReasoner;
import reasoning.UnsupportedDTHermitReasonerFactory;
import scc.graphAlgo.DFSReachability;
import scc.graphAlgo.NodeReachability;
import scc.graphDataStructure.LightAdjacencyList;
import scc.graphDataStructure.LightNode;
import scc.ontology.ExplanationProgMonitor;
import thread.ConservativityExplanationThread;
import thread.EntailmentExplanationThread;
import thread.EntailmentTracingThread;
import thread.OntoClassificationThread;
import thread.SatExplanationThread;
import thread.UnsatExplanationThread;
import uk.ac.manchester.cs.owlapi.modularity.ModuleType;
import uk.ac.manchester.cs.owlapi.modularity.SyntacticLocalityModuleExtractor;
import uk.ac.manchester.syntactic_locality.OntologyModuleExtractor;
import uk.ac.manchester.syntactic_locality.OntologyModuleExtractor.TYPEMODULE;
import uk.ac.ox.krr.logmap2.indexing.JointIndexManager;
import uk.ac.ox.krr.logmap2.mappings.objects.MappingObjectStr;
import uk.ac.ox.krr.logmap2.utilities.Utilities;
import visitor.disjToConj.ClassificationOverapproximator;
import visitor.disjToConj.OWLAxiomOverapproximationVisitor;
import auxStructures.Pair;
import enumerations.OS;
import enumerations.REASONER_KIND;
public class OntoUtil {
static final String freshClassPrefix = "#Class_";
static int nextFreshId = 0;
private static final DateFormat dateFormat = new SimpleDateFormat(
"yyyyMMdd_HHmmss");
private static Calendar cal = Calendar.getInstance();
public static OWLReasonerFactory reasonerFactory;
private static ExplanationProgMonitor progMonitor = new ExplanationProgMonitor();
private static SimpleIRIShortFormProvider shortFormProvider = new SimpleIRIShortFormProvider();
private static Set<OWLReasoner> reasoners = new HashSet<>();
// private static OWLDataFactory dataFactory = new OWLDataFactoryImpl(false,
// false);
private static OWLDataFactory dataFactory = OWLManager.getOWLDataFactory();
private static LinkedList<OWLOntologyManager> managers = new LinkedList<>();
private static Map<REASONER_KIND, Boolean> owlLinkReasonersActive = new HashMap<>(
REASONER_KIND.values().length);
private static Map<REASONER_KIND, String> owlLinkReasonersCmd = new HashMap<>(
REASONER_KIND.values().length);
private static List<Process> systemProcesses = new LinkedList<>();
static {
managers.add(OWLManager.createOWLOntologyManager(dataFactory));
owlLinkReasonersActive.put(REASONER_KIND.KONCLUDE, false);
owlLinkReasonersCmd.put(REASONER_KIND.KONCLUDE,
"lib/Konclude-v0.6.0-408-linux64/myKonclude");
Runtime.getRuntime().addShutdownHook(new Thread() {
public void run() {
if (!Params.os.equals(OS.LINUX)) {
return;
}
System.out.println("Running Shutdown Hook");
int c = 0, excC = 0;
for (Process p : systemProcesses) {
// if(p.isAlive()){
// c++;
// p.destroyForcibly();
// }
}
ProcessBuilder pb = new ProcessBuilder(new String[]
// {"pgrep","Konclude"});
// {"/bin/sh","-c","kill","`pgrep Konclude`"});
{ "/bin/sh", "-c", "pgrep Konclude | xargs kill" });
Process proc = null;
InputStream in = null;
StringBuilder output = new StringBuilder();
try {
pb.redirectErrorStream(true);
proc = pb.start();
in = proc.getInputStream();
} catch (IOException e1) {
e1.printStackTrace();
}
byte[] data = null;
try {
data = new byte[in.available()];
in.read(data);
output.append(new String(data, "UTF-8"));
in.close();
} catch (UnsupportedEncodingException e1) {
e1.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
System.out.println(output);
System.out.println(c + " process(es) terminated");
/*c = 0;
for (OWLlinkServer s : owlLinkServers) {
try {
s.stop();
} catch (InterruptedException e) {
excC++;
}
c++;
}
System.out.println(c + " server(s) terminated (" + excC
+ " interrupted)");
*/
}
});
}
public static OWLEntity getEntityFromName(OWLOntology o, String label){
for (OWLEntity e : o.getSignature()) {
if(e.isOWLClass() || e.isOWLDataProperty() || e.isOWLObjectProperty() ||
e.isOWLNamedIndividual()){
if(label.equals(getIRIShortFragment(e.getIRI())))
return e;
}
}
return null;
}
static public String getDLName(OWLOntology onto) {
return new DLExpressivityChecker(Collections.singleton(onto))
.getDescriptionLogicName();
}
public static OWLOntologyManager getManager(boolean createNew) {
if (createNew) {
managers.add(OWLManager.createOWLOntologyManager(dataFactory));
if (!Params.bufferingReasoner && !Params.incrementalReasoning) {
managers.getLast().addOntologyChangeListener(
new OWLOntologyChangeListener() {
@Override
public void ontologiesChanged(
List<? extends OWLOntologyChange> arg0)
throws OWLException {
}
});
}
}
return managers.getLast();
}
public static OWLDataFactory getDataFactory() {
// return manager.getOWLDataFactory();
return dataFactory;
}
public static void disposeReasoners(OWLReasoner... r) {
for (OWLReasoner reas : r) {
if (reas != null) {
reas.interrupt();
reas.dispose();
reasoners.remove(reas);
if (reas instanceof ExtDisjReasoner)
reasoners.remove(((ExtDisjReasoner) reas).getReasoner());
}
}
}
public static Set<OWLAxiom> convertAlignmentToAxioms(OWLOntology fstO,
OWLOntology sndO, Set<MappingObjectStr> mappings) {
Set<OWLAxiom> alignment = new HashSet<>();
Set<OWLEntity> sig1 = fstO.getSignature(true), sig2 = sndO
.getSignature(true);
for (MappingObjectStr m : mappings) {
alignment.addAll(OntoUtil.convertMappingToAxiom(sig1, sig2, m));
// if(OntoUtil.convertMappingToAxiom(sig1,sig2,m).size()
// != LogMapWrapper.countMappings(m))
// FileUtil.writeErrorLogAndConsole(
// m + ": "+
// OntoUtil.convertMappingToAxiom(sig1,sig2,m).size() +
// " vs " + LogMapWrapper.countMappings(m));
}
if (alignment.size() != LogMapWrapper.countMappings(mappings)) {
if (Params.oaei)
FileUtil.writeErrorLogAndConsole(alignment.size()
+ " mapping(s) but expected "
+ LogMapWrapper.countMappings(mappings));
else
// throw new RuntimeException(alignment.size()
// + " mapping(s) but expected " +
// LogMapWrapper.countMappings(mappings));
FileUtil.writeErrorLogAndConsole(alignment.size()
+ " mapping(s) but expected "
+ LogMapWrapper.countMappings(mappings));
}
return alignment;
}
private static void alterOntologyWithAxioms(OWLOntology o,
Set<OWLAxiom> axioms, OWLOntologyManager manager, boolean remove,
boolean suppressOutput) {
List<OWLOntologyChange> changes = remove ? manager.removeAxioms(o,
axioms) : manager.addAxioms(o, axioms);
manager.applyChanges(changes);
if (!suppressOutput)
FileUtil.writeLogAndConsole(changes.size() + " axioms "
+ (remove ? "removed" : "added"));
}
public static void addAxiomsToOntology(OWLOntology o,
OWLOntologyManager manager, Set<OWLAxiom> axioms,
boolean suppressOutput) {
alterOntologyWithAxioms(o, axioms, manager, false, suppressOutput);
}
public static void removeAxiomsFromOntology(OWLOntology o,
OWLOntologyManager manager, Set<OWLAxiom> axioms,
boolean suppressOutput) {
alterOntologyWithAxioms(o, axioms, manager, true, suppressOutput);
}
public static Set<Explanation<OWLAxiom>> getTracingForAxiom(OWLReasoner r,
OWLAxiom ax, int limit, int timeout, int printEach)
throws ProofGenerationException, TimeoutException {
List<Set<Explanation<OWLAxiom>>> l;
try {
l = Util.runExplanationCallables(TimeUnit.SECONDS, timeout,
Collections.singleton(
new EntailmentTracingThread(ax, r, limit, timeout,
Params.suppressFullReasoningOutput, printEach)));
} catch (InterruptedException | ExecutionException e) {
FileUtil.writeErrorLogAndConsole(e.getMessage());
return null;
} catch (TimeoutException e) {
FileUtil.writeLogAndConsole("Timeout of " + timeout +
"(s) reached while computing the trace");
throw e;
}
Set<Explanation<OWLAxiom>> res = new HashSet<>();
for (Set<Explanation<OWLAxiom>> s : l)
res.addAll(s);
return res;
// ExecutorService executor = Executors.newFixedThreadPool(1);
//
// long time = Util.getMSec();
//
// EntailmentTracingThread thread = new EntailmentTracingThread(ax, r,
// limit, timeout, Params.suppressFullReasoningOutput, printEach);
//
// Future<Set<Explanation<OWLAxiom>>> f = executor.submit(thread);
//
// executor.shutdown();
//
// Set<Explanation<OWLAxiom>> res = null;
//
// try {
// res = f.get(timeout, TimeUnit.SECONDS);
// } catch (InterruptedException | ExecutionException e) {
// FileUtil.writeErrorLogAndConsole(e.getMessage());
// return null;
// } catch (TimeoutException e) {
// FileUtil.writeLogAndConsole("Timeout of " + timeout
// + "(s) reached while computing the trace");
// f.cancel(true);
// executor.shutdownNow();
// try {
// org.semanticweb.elk.reasoner.Reasoner internalReasoner =
// ((ElkReasoner) r).getInternalReasoner();
// if(!internalReasoner.isInterrupted()){
// FileUtil.writeLogAndConsole("Interrupting ELK");
// internalReasoner.interrupt();
// // otherwise next request will fail!
// internalReasoner.clearInterrupt();
// }
// throw e;
// }
// catch(ReasonerInterruptedException e1){
// // expected, do nothing else than printing
// FileUtil.writeLogAndConsole("Reasoner interrupted after timeout");
// }
// } finally {
// executor.shutdownNow();
// time = Util.getDiffmsec(time);
// }
//
// return res;
}
private static OWLReasoner recreateReasonerAndDisposeOld(OWLReasoner rOld) {
OWLReasoner rNew = getReasoner(rOld.getRootOntology(),
REASONER_KIND.getKind(rOld),
getManager(false));
OntoUtil.disposeReasoners(rOld);
return rNew;
}
public static Set<Explanation<OWLAxiom>> getTracingForAxiom(OWLReasoner r,
OWLAxiom ax, int limit, int timeout, boolean suppressOutput,
int printEach) throws ProofGenerationException,
InterruptedException, ExecutionException, TimeoutException {
// for comparison only
boolean just = false;
boolean proof = false;
boolean traceJust = true;
boolean fullOutput = false;
// Get the first derivable expression which corresponds to the
// entailment.
// "Derivable" means that it can provide access to inferences which
// directly derived it.
OWLExpression derived = null;
try {
derived = ((ElkReasoner) r).getDerivedExpression(ax);
}
catch(ProofGenerationException e){
FileUtil.writeErrorLogAndConsole(REASONER_KIND.ELKTRACE.toString()
+ " interrupted while deriving expression");
return null;
}
Set<Explanation<OWLAxiom>> res = null;
// return unwindProofs(derived, limit, suppressOutput);
long time = Util.getMSec();
if(Thread.currentThread().isInterrupted())
return null;
if(traceJust){
res = Collections.singleton(new Explanation<OWLAxiom>(ax,
Proofs.getUsedAxioms((ExplainingOWLReasoner) r,ax,limit>1)));
FileUtil.writeLogAndConsole(Util.getDiffmsec(time) +
" (ms) Flatten" + (fullOutput ? (": " + res) : ""));
if(Thread.currentThread().isInterrupted())
return null;
if(res.iterator().hasNext()){
res = OntoUtil.getExplanationForAxiom(ax,
res.iterator().next().getAxioms(), limit, reasonerFactory,
timeout, Params.laconicJust, printEach);
FileUtil.writeLogAndConsole(Util.getDiffmsec(time) +
" (ms) Trace+Just " + ax + (fullOutput ? (": " + res) : ""));
}
}
if(Thread.currentThread().isInterrupted())
return null;
if(just){
time = Util.getMSec();
res = OntoUtil.getExplanationForAxiom(ax, r.getRootOntology(), limit,
reasonerFactory, timeout, Params.laconicJust, printEach);
FileUtil.writeLogAndConsole(Util.getDiffmsec(time) +
" (ms) Justifications " + ax + (fullOutput ? (": " + res) : ""));
}
if(Thread.currentThread().isInterrupted())
return null;
if(proof){
time = Util.getMSec();
try {
res = getProofsAsExplanations(derived, limit, suppressOutput);
FileUtil.writeLogAndConsole(Util.getDiffmsec(time)
+ " (ms) Proofs " + ax + (fullOutput ? (": " + res) : ""));
// for (Explanation<OWLAxiom> explanation : res)
// if(!isEntailed(explanation.getAxioms(), ax))
// FileUtil.writeErrorLogAndConsole(explanation +
// " does not entail axiom " + ax);
}
catch(NoSuchElementException e){
e.printStackTrace();
FileUtil.writeErrorLogAndConsole(e.toString());
System.exit(1);
}
}
return res;
}
public static boolean isEntailed(Set<OWLAxiom> axioms, OWLAxiom ax) {
OWLOntologyManager manager = getManager(false);
OWLOntology onto = null;
boolean res = false;
OWLReasoner r = null;
try {
onto = manager.createOntology(axioms);
r = getReasoner(onto, REASONER_KIND.PELLET, manager);
res = r.isEntailed(ax);
} catch (OWLOntologyCreationException e) {
e.printStackTrace();
}
finally {
OntoUtil.disposeReasoners(r);
}
return res;
}
private static Set<Explanation<OWLAxiom>> getProofsAsExplanations(
OWLExpression expression, int limit, boolean suppressOutput)
throws ProofGenerationException {
Set<LinkedList<OWLExpression>> completedProofs = new HashSet<>();
LinkedList<Pair<LinkedList<OWLExpression>>> incompleteProofs =
new LinkedList<>();
LinkedList<OWLExpression> tmpProof = new LinkedList<>();
LinkedList<OWLExpression> toBeExpanded = new LinkedList<>();
toBeExpanded.add(expression);
incompleteProofs.add(new Pair<>(toBeExpanded,tmpProof));
Set<Explanation<OWLAxiom>> res = new HashSet<>();
while (!incompleteProofs.isEmpty()) {
Pair<LinkedList<OWLExpression>> proof = incompleteProofs.get(0);
LinkedList<OWLExpression> toExpand = proof.getFirst();
int branches = 0;
if(!toExpand.isEmpty()){
LinkedList<OWLExpression> expanded = proof.getSecond();
OWLExpression tmpExpr = toExpand.removeFirst();
expanded.add(tmpExpr);
for (OWLInference inf : tmpExpr.getInferences()) {
if(++branches > 1){
Pair<LinkedList<OWLExpression>> newProof =
new Pair<>(new LinkedList<>(toBeExpanded),
new LinkedList<>(expanded));
newProof.getFirst().addAll(inf.getPremises());
incompleteProofs.add(newProof);
}
else
toExpand.addAll(inf.getPremises());
if(limit > 0 && branches >= limit)
break;
}
}
if(toExpand.isEmpty()){
incompleteProofs.remove(proof);
completedProofs.add(proof.getSecond());
}
if(Thread.interrupted())
break;
}
for (LinkedList<OWLExpression> proof : completedProofs) {
Set<OWLAxiom> just = new HashSet<>();
for (OWLExpression expr : proof)
just.add(ExpressionUtils.getAxiom(expr));
Explanation<OWLAxiom> expl = new Explanation<>(
ExpressionUtils.getAxiom(expression), just);
res.add(expl);
}
return res;
}
private static Set<Explanation<OWLAxiom>> unwindProofs(
OWLExpression expression, int limit, boolean suppressOutput)
throws ProofGenerationException {
// Start recursive unwinding
LinkedList<OWLExpression> toDo = new LinkedList<OWLExpression>();
Set<OWLExpression> done = new HashSet<OWLExpression>();
toDo.add(expression);
done.add(expression);
int numInf = 0;
Set<Explanation<OWLAxiom>> res = new HashSet<>();
Set<OWLAxiom> just = new HashSet<>();
while (true) {
OWLExpression next = toDo.poll();
if (next == null)
break;
if (!suppressOutput)
FileUtil.writeLogAndConsole("Current expression: " + next);
for (OWLInference inf : next.getInferences()) {
if (!suppressOutput)
FileUtil.writeLogAndConsole("\t\t" + inf);
// Recursively unwind premise inferences
for (OWLExpression premise : inf.getPremises()) {
if (!suppressOutput)
FileUtil.writeLogAndConsole("\t\t\tPremise: " + premise);
if (done.add(premise))
toDo.addFirst(premise);
}
if ( // (limit > 0 && ++numInf >= limit) ||
Thread.interrupted())
break;
}
}
for (OWLExpression expr : done)
just.add(ExpressionUtils.getAxiom(expr));
Explanation<OWLAxiom> expl = new Explanation<>(
ExpressionUtils.getAxiom(expression), just);
res.add(expl);
return res;
}
public static Set<Explanation<OWLAxiom>> getExplanationForAxiom(
OWLAxiom ax, OWLOntology alignOnto, int limit,
OWLReasonerFactory reasonerFactory, int timeout, boolean laconic,
int printEach) throws InterruptedException, ExecutionException, TimeoutException {
return getExplanationForAxiom(ax, alignOnto.getAxioms(), limit,
reasonerFactory, timeout, laconic, printEach);
}
public static Set<Explanation<OWLAxiom>> getExplanationForAxiom(
OWLAxiom ax, Set<OWLAxiom> axioms, int limit,
OWLReasonerFactory reasonerFactory, int timeout, boolean laconic,
int printEach) throws InterruptedException, ExecutionException,
TimeoutException, TimeOutException {
EntailmentExplanationThread explThread = new EntailmentExplanationThread(
axioms, ax, reasonerFactory, limit,
Params.suppressFullReasoningOutput, timeout * 1000, laconic, printEach);
List<Set<Explanation<OWLAxiom>>> repairs = null;
// Set<Explanation<OWLAxiom>> repair = null;
// repair = explThread.call();
try {
repairs = Util.runExplanationCallables(TimeUnit.SECONDS, timeout*limit,
Collections.singletonList(explThread));
}
catch(ExecutionException e){
Throwable ee = e.getCause ();
if (ee instanceof TimeOutException)
throw (TimeOutException) ee;
else if(ee instanceof ExplanationGeneratorInterruptedException)
throw new TimeOutException();
else
throw e;
}
if(repairs == null || repairs.isEmpty())
return null;
return repairs.iterator().next();
}
public static Set<Explanation<OWLAxiom>> getExplanationForUnsat(OWLClass c,
OWLOntology alignOnto, int limit,
OWLReasonerFactory reasonerFactory, int timeout, int printEach)
throws InterruptedException, ExecutionException,
TimeoutException, TimeOutException {
if (c.isOWLNothing()) {
FileUtil.writeLogAndConsole("Bottom class is empty by definition");
return null;
}
UnsatExplanationThread explThread = new UnsatExplanationThread(
alignOnto, c, reasonerFactory, limit,
Params.suppressFullReasoningOutput, timeout * 1000, // sec -> msec
printEach);
List<Set<Explanation<OWLAxiom>>> repairs = null;
// Set<Explanation<OWLAxiom>> repair = null;
// repair = explThread.call();
try {
repairs = Util.runExplanationCallables(TimeUnit.SECONDS, timeout*limit,
Collections.singletonList(explThread));
}
catch(ExecutionException e){
Throwable ee = e.getCause ();
if (ee instanceof TimeOutException)
throw (TimeOutException) ee;
else if(ee instanceof ExplanationGeneratorInterruptedException)
throw new TimeOutException();
else
throw e;
}
if(repairs == null || repairs.isEmpty())
return null;
return repairs.iterator().next();
}
public static Set<MappingObjectStr> repairUnsatisfiabilitiesFullReasoning(
Set<OWLClass> unsats, OWLOntology fstO, OWLOntology sndO,
OWLOntology alignOnto, Set<MappingObjectStr> mappings,
boolean useELK) {
Set<MappingObjectStr> repair = new HashSet<>();
Map<OWLAxiom, Double> mappingsMap = new HashMap<>();
Set<OWLEntity> sig1 = fstO.getSignature(true);
Set<OWLEntity> sig2 = sndO.getSignature(true);
for (MappingObjectStr m : mappings)
for (OWLAxiom ax : convertMappingToAxiom(sig1, sig2, m))
mappingsMap.put(ax, m.getConfidence());
unsats.remove(OntoUtil.getDataFactory().getOWLNothing());
int d = 0;
for (OWLClass c : unsats) {
++d;
SatExplanationThread explThread = new SatExplanationThread(
alignOnto, mappingsMap, c, reasonerFactory, 1, useELK,
Params.suppressFullReasoningOutput,
Params.timeoutFullRepairExplanation);
Set<OWLAxiom> locRepair = null;
try {
locRepair = explThread.call();
}
/*catch (org.semanticweb.HermiT.datatypes.UnsupportedDatatypeException e) {
FileUtil.writeLogAndConsole("Unsupported datatype, switching to Pellet");
explThread.changeReasonerFactoryPellet();
reasonerFactory = new PelletReasonerFactory();
try {
locRepair = explThread.call();
} catch (org.semanticweb.owlapi.reasoner.TimeOutException
| ExplanationGeneratorInterruptedException e1) {
locRepair = elkExplanationUnsatRepair(alignOnto,
mappingsMap, c, Params.suppressFullReasoningOutput);
}
} */
catch (org.semanticweb.owlapi.reasoner.TimeOutException
| ExplanationGeneratorInterruptedException e) {
locRepair = elkExplanationUnsatRepair(alignOnto, mappingsMap,
c, Params.suppressFullReasoningOutput);
}
if (locRepair == null || locRepair.isEmpty()) {
FileUtil.writeErrorLogAndConsole("Class " + c
+ " cannot be repaired");
continue;
}
FileUtil.writeLogAndConsole(d + " repaired class " + c + ": "
+ locRepair);
repair.addAll(OntoUtil.convertAxiomsToAlignment(locRepair));
if (Params.singleClassFullRepairStep)
break;
}
return repair;
}
public static Set<OWLAxiom> elkExplanationUnsatRepair(
OWLOntology alignOnto, Map<OWLAxiom, Double> mappingsMap,
OWLClass c, boolean suppressOutput) {
if (reasonerFactory instanceof ElkReasonerFactory)
return null;
FileUtil.writeLogAndConsole("Explanation call using "
+ reasonerFactory.getReasonerName() + " timed out after "
+ (Params.timeoutFullRepairExplanation / 1000)
+ " s, switching to ELK");
SatExplanationThread satThread = new SatExplanationThread(alignOnto,
mappingsMap, c, reasonerFactory, 1, true, suppressOutput, 0);
Set<OWLAxiom> locRepair = satThread.call();
if (locRepair == null || locRepair.isEmpty()) {
try {
locRepair = new SatExplanationThread(alignOnto, mappingsMap, c,
reasonerFactory, 1, false, suppressOutput, 0).call();
} catch (org.semanticweb.owlapi.reasoner.TimeOutException
| ExplanationGeneratorInterruptedException e) {
throw new Error(reasonerFactory.getReasonerName() + " failed");
}
}
return locRepair;
}
public static boolean checkDirectViolation(OWLOntology inputOnto,
OWLOntology alignOnto, Pair<OWLClass> v, boolean useELK,
OWLReasoner alignR, boolean suppressOutput) {
return checkDirectViolation(inputOnto, alignOnto, v, reasonerFactory,
Params.maxExplanationsForDirectViol, useELK, alignR,
suppressOutput);
}
public static boolean checkDirectViolation(OWLOntology inputOnto,
OWLOntology alignOnto, Pair<OWLClass> v,
OWLReasonerFactory reasonerFactory, int limit, boolean useELK,
OWLReasoner alignR, boolean suppressOutput) {
ConservativityExplanationThread explThread = new ConservativityExplanationThread(
inputOnto, alignOnto, v, reasonerFactory, limit, true, alignR,
suppressOutput);
return explThread.call();
}
public static Set<Explanation<OWLAxiom>> computeSubsumptionExplanation(
OWLOntology o, OWLOntologyManager manager, OWLSubClassOfAxiom ax,
int limit, boolean useELK) {
EntailmentCheckerFactory<OWLAxiom> ecf = new SatisfiabilityEntailmentCheckerFactory(
!useELK ? reasonerFactory : new ElkReasonerFactory());
ExplanationGeneratorFactory<OWLAxiom> explGenFactory = new LaconicExplanationGeneratorFactory<OWLAxiom>(
new BlackBoxExplanationGeneratorFactory<OWLAxiom>(
new Configuration<OWLAxiom>(ecf)));
ExplanationGenerator<OWLAxiom> exManager = explGenFactory
.createExplanationGenerator(o);
return limit == 0 ? exManager.getExplanations(ax) : exManager
.getExplanations(ax, limit);
}
public static Set<Explanation<OWLAxiom>> computeSubsumptionExplanation(
OWLOntology o, OWLOntologyManager manager, OWLSubClassOfAxiom ax,
boolean useELK) {
return computeSubsumptionExplanation(o, manager, ax, 0, useELK);
}
public static Set<AxiomExplanation> computeExplanations(
OWLOntologyManager manager, OWLClass c, OWLReasoner reasoner) {
DefaultExplanationGenerator exManager = new DefaultExplanationGenerator(
manager, OntoUtil.reasonerFactory, reasoner.getRootOntology(),
reasoner, progMonitor);
Set<Set<OWLAxiom>> explanations = exManager.getExplanations(c,
Params.explanationsNumber);
Set<AxiomExplanation> axiomExplanations = new HashSet<>();
for (Set<OWLAxiom> explanation : explanations)
axiomExplanations.add(new AxiomExplanation(getDataFactory()
.getOWLDeclarationAxiom(c), explanation));
return axiomExplanations;
}
static public Set<AxiomExplanation> computeExplanations(
OWLOntologyManager manager, OWLAxiom a, OWLReasoner reasoner) {
DefaultExplanationGenerator exManager = new DefaultExplanationGenerator(
manager, OntoUtil.reasonerFactory, reasoner.getRootOntology(),
reasoner, progMonitor);
Set<Set<OWLAxiom>> explanations = exManager.getExplanations(a,
Params.explanationsNumber);
Set<AxiomExplanation> axiomExplanations = new HashSet<>();
for (Set<OWLAxiom> explanation : explanations)
axiomExplanations.add(new AxiomExplanation(a, explanation));
return axiomExplanations;
}
// public static OWLAxiom createDisjointAxiom(OWLDataFactory dataFactory,
// OWLClass c1, OWLClass c2){
// OWLAxiom disjAx = null;
// if(!c1.isAnonymous() && c1.isClassExpressionLiteral() &&
// !c2.isAnonymous() && c2.isClassExpressionLiteral()){
// disjAx = dataFactory.getOWLDisjointClassesAxiom(
// c1,c2);
// if(disjAx.getClassesInSignature().size() != 2){
// FileUtil.writeErrorLogAndConsole(disjAx);
// return null;
// }
// }
// return disjAx;
// }
//
// public static Set<OWLAxiom> createDisjAxioms(Set<OWLClass> classes,
// OWLDataFactory dataFac){
// Set<OWLAxiom> disj = new HashSet<>();
// for (OWLClass c1 : classes) {
// for (OWLClass c2 : classes) {
// if(!c1.equals(c2)){
// disj.add(dataFac.getOWLDisjointClassesAxiom(c1, c2));
// }
// }
// }
// return disj;
// }
public static void approximateAxiom(OWLAxiom ax) {
ax.accept(new OWLAxiomOverapproximationVisitor(dataFactory, false));
}
// it takes an ontology and returns its overapproximation wrt classification
public static OWLOntology overApproximateOntologyClassification(
OWLOntologyManager manager, OWLOntology onto,
JointIndexManager index) {
ClassificationOverapproximator.computeApproximation(onto, index);
return onto;
}
/*public static boolean checkLocality(OWLAxiom ax, Set<OWLEntity> signature,
OWLOntologyManager manager, OWLReasonerFactory fac) {
LocalityEvaluator eval = (manager == null) ? new SyntacticLocalityEvaluator(
LocalityClass.TOP_BOTTOM) : new SemanticLocalityEvaluator(
manager, fac);
return eval.isLocal(ax, signature);
}*/
public static void printClassification(Node<OWLClass> parent,
OWLReasoner reasoner, int depth) {
// skip bottom
if (parent.isBottomNode()) {
return;
}
DefaultPrefixManager pm = new DefaultPrefixManager(reasoner
.getRootOntology().getOntologyID().getOntologyIRI().toString());
// Print an indent to denote parent-child relationships
printClassificationIndent(depth);
// Now print the node (containing the child classes)
printClassificationNode(pm, parent);
for (Node<OWLClass> child : reasoner.getSubClasses(
parent.getRepresentativeElement(), true)) {
printClassification(child, reasoner, depth + 1);
}
}
private static void printClassificationIndent(int depth) {
for (int i = 0; i < depth; i++)
FileUtil.writeLogAndConsole(" ");
}
private static void printClassificationNode(DefaultPrefixManager pm,
Node<OWLClass> node) {
// Print out a node as a list of class names in curly brackets
FileUtil.writeLogAndConsoleNONL("{");
for (Iterator<OWLClass> it = node.getEntities().iterator(); it
.hasNext();) {
OWLClass cls = it.next();
// User a prefix manager to provide a slightly nicer shorter name
FileUtil.writeLogAndConsoleNONL(pm.getShortForm(cls));
if (it.hasNext()) {
FileUtil.writeLogAndConsole(" ");
}
}
FileUtil.writeLogAndConsole("}");
}
public static void printClassification(OWLOntology filter,
Node<OWLClass> parent, OWLReasoner reasoner, int depth) {
// skip bottom
if (parent.isBottomNode()) {
return;
}
DefaultPrefixManager pm = new DefaultPrefixManager(reasoner
.getRootOntology().getOntologyID().getOntologyIRI().toString());
boolean toFilter = true;
for (OWLClass owlClass : parent.getEntities())
if (filter.containsClassInSignature(owlClass.getIRI()))
toFilter = false;
if (!toFilter) {
// Print an indent to denote parent-child relationships
printClassificationIndent(depth);
// Now print the node (containing the child classes)
printClassificationNode(filter, pm, parent);
}
depth = toFilter ? depth : depth + 1;
for (Node<OWLClass> child : reasoner.getSubClasses(
parent.getRepresentativeElement(), true)) {
printClassification(filter, child, reasoner, depth);
}
}
public static boolean checkClassification(List<OWLReasoner> reasoners) {
for (OWLReasoner r : reasoners) {
if (!checkClassification(r))
return false;
}
return true;
}
public static boolean checkClassification(OWLReasoner r) {
try {
r.isConsistent();
}
catch(ReasonerInterruptedException e){
FileUtil.writeErrorLogAndConsole("Reasoner was interrupted, cannot proceed");
return false;
}
catch(NullPointerException e){
FileUtil.writeErrorLogAndConsole("Reasoner timed out, cannot proceed");
return false;
}
return isPrecomputed(r, InferenceType.CLASS_HIERARCHY);
}
private static void printClassificationNode(OWLOntology filter,
DefaultPrefixManager pm, Node<OWLClass> node) {
// Print out a node as a list of class names in curly brackets
FileUtil.writeLogAndConsoleNONL("{");
for (Iterator<OWLClass> it = node.getEntities().iterator(); it
.hasNext();) {
OWLClass cls = it.next();
if (!filter.containsClassInSignature(cls.getIRI()))
continue;
// User a prefix manager to provide a slightly nicer shorter name
FileUtil.writeLogAndConsoleNONL(pm.getShortForm(cls));
if (it.hasNext()) {
FileUtil.writeLogAndConsoleNONL(" ");
}
}
FileUtil.writeLogAndConsole("}");
}
public static long ontologyClassification(boolean alignedOnto,
boolean printIt, List<OWLReasoner> reasoners, boolean tryPellet,
boolean useExtended) {
long time = ontologyClassification(alignedOnto, printIt, reasoners,
Params.reasonerKind, tryPellet);
List<OWLReasoner> listCopy = new ArrayList<>(reasoners);
Iterator<OWLReasoner> itr = listCopy.iterator();
while (itr.hasNext()) {
OWLReasoner r = itr.next();
int index = reasoners.indexOf(r);
// we use an extended version providing only direct disjoint classes
// if(alignedOnto || OntoUtil.isELKReasoner(r)){
if (useExtended)
r = new ExtDisjReasoner(r);
reasoners.remove(index);
reasoners.add(index, r);
// }
}
return time;
}
public static long ontologyClassification(boolean alignedOnto,
boolean printIt, List<OWLReasoner> reasoners, boolean tryPellet) {
return ontologyClassification(alignedOnto, printIt, reasoners,
tryPellet, true);
}
public static boolean isELKReasoner(OWLReasoner r) {
if (Params.oaei)
return r.getClass().getName().toLowerCase().contains("elk");
return r.getReasonerName() == null
|| r.getReasonerName().toLowerCase().contains("elk");
}
public static boolean isClassificationPrecomputed(OWLReasoner r) {
return isPrecomputed(r, InferenceType.CLASS_HIERARCHY);
}
public static boolean isPrecomputed(OWLReasoner r, InferenceType infType) {
// if(r instanceof OWLlinkReasoner || r instanceof
// OWLlinkHTTPXMLReasoner)
// return ((OWLlinkReasoner)
// r).isPrecomputed(InferenceType.CLASS_HIERARCHY);
// try {
return r.isPrecomputed(infType);
// }
// catch(NullPointerException e){
// FileUtil.writeErrorLogAndConsole("isPrecomputed fired a NullPointerException, returning false");
// return false;
// }
}
public static long ontologyClassification(boolean alignedOnto,
boolean printIt, List<OWLReasoner> reasoners,
REASONER_KIND currentReasoner, boolean tryPellet) {
boolean tryPelletL = currentReasoner.equals(REASONER_KIND.PELLET) ? false
: tryPellet;
int timeout = alignedOnto ? Params.alignOntoClassificationTimeout
: Params.inputOntoClassificationTimeout;
if (timeout == 0)
timeout = Integer.MAX_VALUE;
ExecutorService executor = Executors.newFixedThreadPool(alignedOnto ? 1
: 2);
List<Future<Boolean>> futures = new ArrayList<>(reasoners.size());
List<OntoClassificationThread> threads = new ArrayList<>(
reasoners.size());
long time = Util.getMSec();
for (OWLReasoner r : reasoners) {
if (!isClassificationPrecomputed(r)) {
OntoClassificationThread t = new OntoClassificationThread(r);
threads.add(t);
futures.add(executor.submit(t));
}
}
executor.shutdown();
try {
for (Future<Boolean> f : futures) {
if (!f.get(timeout, TimeUnit.SECONDS)) {
// inconsistent ontology!
FileUtil.writeLogAndConsole("\nInconsistent ontology, "
+ "skipping it");
return -1;
}
}
} catch (InterruptedException | ExecutionException e) {
FileUtil.writeErrorLogAndConsole(e.getMessage());
return -1;
} catch (TimeoutException e) {
if (currentReasoner.equals(Params.reasonerAfterTimeout)) {
FileUtil.writeLogAndConsole("Timeout of " + timeout
+ "(s) reached with " + currentReasoner
+ ", skipping this test");
for (OntoClassificationThread t : threads) {
OWLReasoner r = t.getReasoner();
r.interrupt();
OntoUtil.disposeReasoners(r);
}
return -1;
}
FileUtil.writeLogAndConsole("Timeout of "
+ timeout
+ "(s) reached with "
+ currentReasoner
+ ", trying with "
+ (tryPelletL ? REASONER_KIND.PELLET
: Params.reasonerAfterTimeout));
// (alignedOnto ? Params.reasonerBasic
// : Params.reasonerAfterTimeout));
for (OntoClassificationThread t : threads) {
OWLReasoner r = t.getReasoner();
if (!isClassificationPrecomputed(t.getReasoner())) {
OWLOntology o = r.getRootOntology();
int index = reasoners.indexOf(r);
r.interrupt();
OntoUtil.disposeReasoners(r);
if (tryPelletL)
Params.reasonerKind = REASONER_KIND.PELLET;
r = OntoUtil.getReasoner(o,
tryPelletL ? REASONER_KIND.PELLET
: Params.reasonerAfterTimeout,
getManager(false));
// (alignedOnto ? Params.reasonerBasic
// : Params.reasonerAfterTimeout), manager);
reasoners.remove(index);
reasoners.add(index, r);
}
}
// hopefully at this point all the reasoners are interrupted and
// disposed
for (Future<Boolean> f : futures)
f.cancel(true);
executor.shutdownNow();
long newTime = ontologyClassification(alignedOnto, printIt,
reasoners, tryPelletL ? REASONER_KIND.PELLET
: Params.reasonerAfterTimeout,
!currentReasoner.equals(REASONER_KIND.PELLET));
// (alignedOnto ? Params.reasonerBasic
// : Params.reasonerAfterTimeout));
// for (OWLReasoner owlReasoner : reasoners)
// if(!isClassificationPrecomputed(owlReasoner))
// throw new Error(owlReasoner.getRootOntology().
// getOntologyID().getOntologyIRI() +
// " REASONER NOT CLASSIFIED");
return newTime;
} finally {
for (OntoClassificationThread t : threads) {
if (!isClassificationPrecomputed(t.getReasoner())){
t.getReasoner().interrupt();
}
}
for (Future<Boolean> f : futures)
if (!f.isCancelled() && !f.isDone())
f.cancel(true);
executor.shutdownNow();
time = Util.getDiffmsec(time);
}
if (printIt)
for (OWLReasoner r : reasoners)
if (isClassificationPrecomputed(r))
OntoUtil.printClassification(r.getTopClassNode(), r, 0);
// for (OWLReasoner owlReasoner : reasoners)
// if(!isClassificationPrecomputed(owlReasoner))
// throw new Error(owlReasoner.getRootOntology().getOntologyID().
// getOntologyIRI() + " REASONER NOT CLASSIFIED");
return time;
}
public static OWLOntology moduleExtractor(OWLOntology onto,
Set<OWLEntity> seedSig) {
OntologyModuleExtractor moduleExt =
// new OntologyModuleExtractor(onto);
new OntologyModuleExtractor(OWLManager.createOWLOntologyManager(),
onto, TYPEMODULE.BOTTOM_LOCALITY);
OWLOntology module;
try {
module = moduleExt.extractAsOntology(seedSig,
IRI.create("http://module.owl"));
} catch (OWLOntologyCreationException e) {
FileUtil.writeErrorLogAndConsole("Error while creating module for "
+ "signature " + seedSig + ", ontology " + onto);
return null;
} finally {
moduleExt.clearStrutures();
}
return module;
}
public static OWLOntology extractModule(OWLOntologyManager manager,
OWLReasoner r, Set<OWLEntity> sig, String IRISuffix, ModuleType type)
throws OWLOntologyCreationException, OWLOntologyStorageException {
OWLOntology onto = r.getRootOntology();
// We now add all subclasses (direct and indirect) of the chosen
// classes. Ideally, it should be done using a DL reasoner, in order to
// take inferred subclass relations into account. We are using the
// structural reasoner of the OWL API for simplicity.
Set<OWLEntity> seedSig = new HashSet<OWLEntity>(sig);
for (OWLEntity ent : sig) {
if (OWLClass.class.isAssignableFrom(ent.getClass())) {
NodeSet<OWLClass> subClasses = r.getSubClasses((OWLClass) ent,
false);
seedSig.addAll(subClasses.getFlattened());
}
}
// Output for debugging purposes
FileUtil.writeLogAndConsole("Extracting the module for this seed signature:");
for (OWLEntity ent : seedSig) {
FileUtil.writeLogAndConsole(" " + ent);
}
FileUtil.writeLogAndConsole("\nSome statistics of the original ontology:");
FileUtil.writeLogAndConsole(" " + onto.getSignature(true).size()
+ " entities");
FileUtil.writeLogAndConsole(" " + onto.getLogicalAxiomCount()
+ " logical axioms");
FileUtil.writeLogAndConsole(" "
+ (onto.getAxiomCount() - onto.getLogicalAxiomCount())
+ " other axioms\n");
// We now extract a locality-based module. For most reuse purposes, the
// module type should be STAR -- this yields the smallest possible
// locality-based module. These modules guarantee that all entailments
// of the original ontology that can be formulated using only terms from
// the seed signature or the module will also be entailments of the
// module. In easier words, the module preserves all knowledge of the
// ontology about the terms in the seed signature or the module.
SyntacticLocalityModuleExtractor sme = new SyntacticLocalityModuleExtractor(
manager, onto, type);
IRI moduleIRI = IRI.create(manager.getOntologyDocumentIRI(onto)
.toString() + IRISuffix);
OWLOntology mod = sme.extractAsOntology(seedSig, moduleIRI);
// Output for debugging purposes
FileUtil.writeLogAndConsole("Some statistics of the module:");
FileUtil.writeLogAndConsole(" " + mod.getSignature(true).size()
+ " entities");
FileUtil.writeLogAndConsole(" " + mod.getLogicalAxiomCount()
+ " logical axioms");
FileUtil.writeLogAndConsole(" "
+ (mod.getAxiomCount() - mod.getLogicalAxiomCount())
+ " other axioms\n");
// // And we save the module.
// System.out.println("Saving the module as "
// + mod.getOntologyID().getOntologyIRI());
// manager.saveOntology(mod);
return mod;
}
static public void chooseReasoner(String mappingFile, String trackName) {
if (mappingFile.endsWith("ASE-ekaw-iasted.rdf")
|| mappingFile.endsWith("MaasMatch-ekaw-iasted.rdf"))
Params.reasonerKind = REASONER_KIND.HERMIT;
else if (mappingFile.endsWith("MaasMatch-conference-iasted.rdf"))
Params.reasonerKind = REASONER_KIND.PELLET;
// else if(mappingFile.contains("iasted"))
// Params.reasonerKind = ENUM_REASONER.PELLET;
else if (trackName.equals("anatomy")
|| mappingFile.endsWith("MapSSS-edas-ekaw.rdf"))
Params.reasonerKind = REASONER_KIND.HERMIT;
else if (trackName.equals("largebio")) {
// if(mappingFile.contains("SNOMED2NCI"))
// Params.reasonerKind = ENUM_REASONER.ELK;
// else
Params.reasonerKind = REASONER_KIND.HERMIT;
} else if (trackName.equals("conference"))
Params.reasonerKind = REASONER_KIND.HERMIT;
else if (trackName.equals("library"))
Params.reasonerKind = REASONER_KIND.HERMIT;
else
Params.reasonerKind = REASONER_KIND.HERMIT;
}
static public void removeDatatypes(OWLOntology onto,
OWLOntologyManager manager) {
Set<OWLAxiom> toRemove = new HashSet<>();
for (OWLDatatype dt : onto.getDatatypesInSignature(true))
toRemove.addAll(onto.getDatatypeDefinitions(dt));
removeAxiomsFromOntology(onto, manager, toRemove, true);
}
static public OWLReasoner getReasoner(OWLOntology onto,
REASONER_KIND reasonerKind, OWLOntologyManager manager) {
OWLReasoner r = null;
// OWLReasonerConfiguration rc = new SimpleConfiguration(
// Params.inputOntoClassificationTimeout / 60 * 1000);
try {
FileUtil.writeLogAndConsoleNONL(getDLName(onto));
switch (reasonerKind) {
case HERMIT:
// Arrays.toString(
// ((URLClassLoader)OntoUtil.class.getClassLoader()).getURLs());
reasonerFactory = getReasonerFactory(reasonerKind);
if (Params.oaei
&& onto.getDatatypesInSignature(true).size() > 0) {
reasonerFactory = new ElkReasonerFactory();
// reasonerFactory = new
// org.semanticweb.owlapi.reasoner.structural.StructuralReasonerFactory();
// r = reasonerFactory.createReasoner(onto);
break;
}
org.semanticweb.HermiT.Configuration c = new org.semanticweb.HermiT.Configuration();
c.ignoreUnsupportedDatatypes = true;
// c.individualTaskTimeout =
// Params.inputOntoClassificationTimeout / 60 * 1000;
c.bufferChanges = Params.bufferingReasoner;
r = new Reasoner(c, onto);
break;
case ELKTRACE:
case ELK:
reasonerFactory = getReasonerFactory(reasonerKind);
break;
default:
reasonerFactory = new org.semanticweb.owlapi.reasoner.structural.StructuralReasonerFactory();
break;
}
// if(!manager.contains(onto.getOntologyID().getOntologyIRI()))
// System.out.println(onto);
if (r == null) {
r = Params.bufferingReasoner ? reasonerFactory
.createNonBufferingReasoner(onto)// ,rc)
: reasonerFactory.createReasoner(onto);// ,rc);
}
// else
// System.out.println(r);
FileUtil.writeLogAndConsole(", Reasoner: "
+ getReasonerInfoString(r));
if (reasonerKind.equals(REASONER_KIND.PELLET)
&& Params.incrementalReasoning)
manager.addOntologyChangeListener((OWLOntologyChangeListener) r);
reasoners.add(r);
return r;
} catch (UnknownOWLOntologyException e) {
e.printStackTrace();
}
//catch (MalformedURLException e) {
// e.printStackTrace();
//}
reasoners.add(r);
return r;
}
public static String getReasonerInfoString(OWLReasoner r) {
Version v;
String name;
if (Params.oaei) {
v = new Version(0, 0, 0, 0);
name = r.getClass().getName();
} else {
v = r.getReasonerVersion();
name = r.getReasonerName();
}
return (name != null ? name : "ELK") + " " + v.getMajor() + "."
+ v.getMinor() + "." + v.getPatch() + " (build " + v.getBuild()
+ ")";
}
public static String extractPrefix(OWLOntology s) {
// if(s==null)
// System.out.println();
return s.toString().indexOf("<") >= 0 ? s.toString().substring(
s.toString().indexOf("<") + 1, s.toString().indexOf(">")) :
s.getOntologyID().toString();
}
static public OWLOntology load(String iriString, boolean local,
OWLOntologyManager manager) throws OWLOntologyCreationException {
OWLOntologyLoaderConfiguration config = new OWLOntologyLoaderConfiguration();
config = config.setLoadAnnotationAxioms(false);
IRI iri = null;
if (!local)
iri = IRI.create(iriString);
else
iri = IRI.create(new File(iriString));
OWLOntologyDocumentSource source = new IRIDocumentSource(iri);
return manager.loadOntologyFromOntologyDocument(source, config);
}
public static String getCurrTime() {
return dateFormat.format(cal.getTime());
}
static public void save(OWLOntology onto, String destFile,
OWLOntologyManager manager) throws OWLOntologyStorageException,
OWLOntologyCreationException, IOException {
// File file = File.createTempFile("owlapiexamples", "saving");
File file = new File(destFile);
// manager.saveOntology(onto, IRI.create(file.toURI()));
// By default ontologies are saved in the format from which they were
// loaded. In this case the ontology was loaded from an rdf/xml file We
// can get information about the format of an ontology from its manager
OWLOntologyFormat format = manager.getOntologyFormat(onto);
// We can save the ontology in a different format Lets save the ontology
// in owl/xml format
OWLXMLOntologyFormat owlxmlFormat = new OWLXMLOntologyFormat();
// Some ontology formats support prefix names and prefix IRIs. In our
// case we loaded the pizza ontology from an rdf/xml format, which
// supports prefixes. When we save the ontology in the new format we
// will copy the prefixes over so that we have nicely abbreviated IRIs
// in the new ontology document
if (format.isPrefixOWLOntologyFormat()) {
owlxmlFormat.copyPrefixesFrom(format.asPrefixOWLOntologyFormat());
}
manager.saveOntology(onto, owlxmlFormat, IRI.create(file.toURI()));
}
public void shouldWalkOntology(OWLOntology onto, OWLOntologyManager manager)
throws OWLOntologyCreationException {
// This example shows how to use an ontology walker to walk the asserted
// structure of an ontology. Suppose we want to find the axioms that use
// a some values from (existential restriction) we can use the walker to
// do this. We'll use the pizza ontology as an example. Load the
// ontology from the web:
IRI ontoIRI = manager.getOntologyDocumentIRI(onto);
OWLOntologyManager man = OWLManager.createOWLOntologyManager();
OWLOntology ont = man.loadOntologyFromOntologyDocument(ontoIRI);
// Create the walker. Pass in the pizza ontology - we need to put it
// into a set though, so we just create a singleton set in this case.
OWLOntologyWalker walker = new OWLOntologyWalker(
Collections.singleton(ont));
// Now ask our walker to walk over the ontology. We specify a visitor
// who gets visited by the various objects as the walker encounters
// them. We need to create out visitor. This can be any ordinary
// visitor, but we will extend the OWLOntologyWalkerVisitor because it
// provides a convenience method to get the current axiom being visited
// as we go. Create an instance and override the
// visit(OWLObjectSomeValuesFrom) method, because we are interested in
// some values from restrictions.
OWLOntologyWalkerVisitor<Object> visitor = new OWLOntologyWalkerVisitor<Object>(
walker) {
@Override
public Object visit(OWLObjectSomeValuesFrom desc) {
// Print out the restriction
FileUtil.writeLogAndConsole(desc.toString());
// Print out the axiom where the restriction is used
FileUtil.writeLogAndConsole(" " + getCurrentAxiom()
+ "\n");
// We don't need to return anything here.
return null;
}
};
// Now ask the walker to walk over the ontology structure using our
// visitor instance.
walker.walkStructure(visitor);
}
static public OWLClass createFreshClass(OWLOntology onto,
OWLDataFactory dataFactory, OWLOntologyManager manager) {
OWLClass cls = dataFactory.getOWLClass(freshClassPrefix + nextFreshId,
new DefaultPrefixManager(onto.getOntologyID().getOntologyIRI()
.toString()));
++nextFreshId;
return cls;
}
static public URI stringToURI(String str) {
URI uri = null;
try {
return new URI(str);
} catch (URISyntaxException e) {
e.printStackTrace();
}
return uri;
}
static public String extractOBOOntologyIRI(OWLOntology o) {
String iriStr = o.getOntologyID().getOntologyIRI().toString();
// int oboIndex = iriStr.indexOf("/obo/");
// int fragIndex = iriStr.indexOf('/', oboIndex);
// iriStr = iriStr.substring(0,oboIndex) + iriStr.substring(fragIndex);
// System.out.println(iriStr);
// return iriStr;
return iriStr.substring(0, iriStr.lastIndexOf('/') + 1);
}
static public void unloadAllOntologies(OWLOntologyManager manager) {
Iterator<OWLOntology> itr = manager.getOntologies().iterator();
OWLOntology o = null;
while (itr.hasNext()) {
o = itr.next();
manager.removeOntology(o);
}
// if(o != null)
// Util.getUsedMemoryAndClean(1024,250);
}
public static Set<MappingObjectStr> convertAxiomsToAlignment(
Set<OWLAxiom> axioms) {
Set<MappingObjectStr> align = new HashSet<>();
Map<OWLEntity, OWLAxiom> entity2Axiom = new HashMap<>();
Set<AxiomType<?>> types = new HashSet<>();
types.add(AxiomType.EQUIVALENT_CLASSES);
types.add(AxiomType.SUBCLASS_OF);
types.add(AxiomType.EQUIVALENT_DATA_PROPERTIES);
types.add(AxiomType.SUB_DATA_PROPERTY);
types.add(AxiomType.EQUIVALENT_OBJECT_PROPERTIES);
types.add(AxiomType.SUB_OBJECT_PROPERTY);
Set<OWLAxiom> axs = new HashSet<>();
for (OWLAxiom ax : axioms) {
if (types.contains(ax.getAxiomType())) {
AxiomType<?> type = ax.getAxiomType();
if (type.equals(AxiomType.SUBCLASS_OF)) {
OWLSubClassOfAxiom scAx = (OWLSubClassOfAxiom) ax;
if (!((OWLSubClassOfAxiom) ax).getSubClass().isAnonymous()) {
entity2Axiom.put(((OWLSubClassOfAxiom) ax)
.getSubClass().asOWLClass(), ax);
} else {
FileUtil.writeErrorLogAndConsole("Cannot convert to mappings axioms involving "
+ "other than named classes");
continue;
}
} else if (type.equals(AxiomType.SUB_OBJECT_PROPERTY)) {
OWLSubObjectPropertyOfAxiom sopAx = (OWLSubObjectPropertyOfAxiom) ax;
if (!sopAx.getSubProperty().isAnonymous()) {
entity2Axiom.put(sopAx.getSubProperty()
.asOWLObjectProperty(), ax);
} else {
FileUtil.writeErrorLogAndConsole("Cannot convert to mappings axioms involving "
+ "other than named object properties");
continue;
}
} else if (type.equals(AxiomType.SUB_DATA_PROPERTY)) {
OWLSubDataPropertyOfAxiom sdpAx = (OWLSubDataPropertyOfAxiom) ax;
if (!sdpAx.getSubProperty().isAnonymous()) {
entity2Axiom.put(sdpAx.getSubProperty()
.asOWLDataProperty(), ax);
} else {
FileUtil.writeErrorLogAndConsole("Cannot convert to mappings axioms involving "
+ "other than named object properties");
continue;
}
}
axs.add(ax);
} else {
FileUtil.writeErrorLogAndConsole("Axiom type "
+ ax.getAxiomType() + " is not supported, ignoring it");
continue;
}
for (OWLEntity e : ax.getSignature()) {
AxiomType<?> type = ax.getAxiomType();
if (type.equals(AxiomType.EQUIVALENT_CLASSES)) {
if (!e.isOWLClass()) {
FileUtil.writeErrorLogAndConsole("Cannot convert to mappings "
+ "axioms involving other than named classes");
axs.remove(ax);
break;
} else {
if (ax.getAxiomType().equals(
AxiomType.EQUIVALENT_CLASSES))
entity2Axiom.put(e.asOWLClass(), ax);
// already added, nothing to do
}
} else if (type.equals(AxiomType.EQUIVALENT_OBJECT_PROPERTIES)) {
if (!e.isOWLObjectProperty()) {
FileUtil.writeErrorLogAndConsole("Cannot convert to mappings axioms involving "
+ "other than named object properties");
axs.remove(ax);
break;
} else {
if (ax.getAxiomType().equals(
AxiomType.EQUIVALENT_OBJECT_PROPERTIES))
entity2Axiom.put(e.asOWLObjectProperty(), ax);
}
} else if (type.equals(AxiomType.EQUIVALENT_DATA_PROPERTIES)) {
if (!e.isOWLDataProperty()) {
FileUtil.writeErrorLogAndConsole("Cannot convert to mappings axioms involving "
+ "other than named data properties");
axs.remove(ax);
break;
} else {
if (ax.getAxiomType().equals(
AxiomType.EQUIVALENT_DATA_PROPERTIES))
entity2Axiom.put(e.asOWLDataProperty(), ax);
}
}
}
}
for (OWLAxiom ax : axs) {
OWLEntity[] entities = ax.getSignature().toArray(new OWLEntity[0]);
if (entities.length != 2) {
FileUtil.writeErrorLogAndConsole("Method supports exactly 2 classes, not "
+ entities.length + ", skipping axiom " + ax);
continue;
}
entities = new OWLEntity[2];
if (ax.getAxiomType().equals(AxiomType.EQUIVALENT_CLASSES)) {
align.add(new MappingObjectStr(entities[0].getIRI().toString(),
entities[1].getIRI().toString(), 1, Utilities.EQ,
Utilities.CLASSES));
// align.add(new MappingObjectStr(
// classes[1].getIRI().toString(),
// classes[0].getIRI().toString(),
// 1, Utilities.EQ, Utilities.CLASSES));
} else if (ax.getAxiomType().equals(
AxiomType.EQUIVALENT_OBJECT_PROPERTIES)) {
align.add(new MappingObjectStr(entities[0].getIRI().toString(),
entities[1].getIRI().toString(), 1, Utilities.EQ,
Utilities.OBJECTPROPERTIES));
} else if (ax.getAxiomType().equals(
AxiomType.EQUIVALENT_DATA_PROPERTIES)) {
align.add(new MappingObjectStr(entities[0].getIRI().toString(),
entities[1].getIRI().toString(), 1, Utilities.EQ,
Utilities.DATAPROPERTIES));
} else if (ax.getAxiomType().equals(AxiomType.SUBCLASS_OF)) {
if (entity2Axiom.containsKey(entities[1])
&& entity2Axiom.get(entities[1]).getAxiomType()
.equals(AxiomType.SUBCLASS_OF)) {
align.add(new MappingObjectStr(entities[0].getIRI()
.toString(), entities[1].getIRI().toString(), 1,
Utilities.EQ, Utilities.CLASSES));
// align.add(new MappingObjectStr(
// classes[1].getIRI().toString(),
// classes[0].getIRI().toString(),
// 1, Utilities.EQ, Utilities.CLASSES));
} else {
entities[0] = ((OWLSubClassOfAxiom) ax).getSubClass()
.asOWLClass();
entities[1] = ((OWLSubClassOfAxiom) ax).getSuperClass()
.asOWLClass();
align.add(new MappingObjectStr(entities[0].getIRI()
.toString(), entities[1].getIRI().toString(), 1,
Utilities.L2R, Utilities.CLASSES));
}
} else if (ax.getAxiomType().equals(AxiomType.SUB_OBJECT_PROPERTY)) {
if (entity2Axiom.containsKey(entities[1])
&& entity2Axiom.get(entities[1]).getAxiomType()
.equals(AxiomType.SUB_OBJECT_PROPERTY)) {
align.add(new MappingObjectStr(entities[0].getIRI()
.toString(), entities[1].getIRI().toString(), 1,
Utilities.EQ, Utilities.OBJECTPROPERTIES));
} else {
entities[0] = ((OWLSubObjectPropertyOfAxiom) ax)
.getSubProperty().asOWLObjectProperty();
entities[1] = ((OWLSubObjectPropertyOfAxiom) ax)
.getSuperProperty().asOWLObjectProperty();
align.add(new MappingObjectStr(entities[0].getIRI()
.toString(), entities[1].getIRI().toString(), 1,
Utilities.L2R, Utilities.OBJECTPROPERTIES));
}
} else if (ax.getAxiomType().equals(AxiomType.SUB_DATA_PROPERTY)) {
if (entity2Axiom.containsKey(entities[1])
&& entity2Axiom.get(entities[1]).getAxiomType()
.equals(AxiomType.SUB_DATA_PROPERTY)) {
align.add(new MappingObjectStr(entities[0].getIRI()
.toString(), entities[1].getIRI().toString(), 1,
Utilities.EQ, Utilities.DATAPROPERTIES));
// align.add(new MappingObjectStr(
// classes[1].getIRI().toString(),
// classes[0].getIRI().toString(),
// 1, Utilities.EQ, Utilities.CLASSES));
} else {
entities[0] = ((OWLSubDataPropertyOfAxiom) ax)
.getSubProperty().asOWLDataProperty();
entities[1] = ((OWLSubDataPropertyOfAxiom) ax)
.getSuperProperty().asOWLDataProperty();
align.add(new MappingObjectStr(entities[0].getIRI()
.toString(), entities[1].getIRI().toString(), 1,
Utilities.L2R, Utilities.DATAPROPERTIES));
}
}
}
return align;
}
public static Set<OWLAxiom> convertMappingToAxiom(Set<OWLEntity> sig1,
Set<OWLEntity> sig2, MappingObjectStr mapping) {
Set<OWLAxiom> axioms = new HashSet<>();
switch (mapping.getTypeOfMapping()) {
case Utilities.CLASSES:
OWLClass c1 = dataFactory.getOWLClass(IRI.create(mapping
.getIRIStrEnt1())),
c2 = dataFactory.getOWLClass(IRI.create(mapping.getIRIStrEnt2()));
switch (mapping.getMappingDirection()) {
case Utilities.EQ:
if ((!sig1.contains(c1) && !sig1.contains(c2))
|| (!sig1.contains(c2) && !sig2.contains(c2)))
break;
axioms.add(dataFactory.getOWLSubClassOfAxiom(c1, c2));
axioms.add(dataFactory.getOWLSubClassOfAxiom(c2, c1));
break;
case Utilities.L2R:
if ((!sig1.contains(c1) || !sig2.contains(c2)))
break;
axioms.add(dataFactory.getOWLSubClassOfAxiom(c1, c2));
break;
case Utilities.R2L:
if ((!sig1.contains(c1) || !sig2.contains(c2)))
break;
axioms.add(dataFactory.getOWLSubClassOfAxiom(c2, c1));
break;
default:
break;
}
break;
case Utilities.DATAPROPERTIES:
OWLDataProperty dp1 = dataFactory.getOWLDataProperty(IRI
.create(mapping.getIRIStrEnt1())),
dp2 = dataFactory.getOWLDataProperty(IRI.create(mapping
.getIRIStrEnt2()));
switch (mapping.getMappingDirection()) {
case Utilities.EQ:
if ((!sig1.contains(dp1) && !sig1.contains(dp2))
|| (!sig1.contains(dp2) && !sig2.contains(dp2)))
break;
axioms.add(dataFactory.getOWLSubDataPropertyOfAxiom(dp1, dp2));
axioms.add(dataFactory.getOWLSubDataPropertyOfAxiom(dp2, dp1));
break;
case Utilities.L2R:
if ((!sig1.contains(dp1) || !sig2.contains(dp2)))
break;
axioms.add(dataFactory.getOWLSubDataPropertyOfAxiom(dp1, dp2));
break;
case Utilities.R2L:
if ((!sig2.contains(dp1) || !sig1.contains(dp2)))
break;
axioms.add(dataFactory.getOWLSubDataPropertyOfAxiom(dp2, dp1));
break;
default:
break;
}
break;
case Utilities.OBJECTPROPERTIES:
OWLObjectProperty op1 = dataFactory.getOWLObjectProperty(IRI
.create(mapping.getIRIStrEnt1())),
op2 = dataFactory.getOWLObjectProperty(IRI.create(mapping
.getIRIStrEnt2()));
switch (mapping.getMappingDirection()) {
case Utilities.EQ:
if ((!sig1.contains(op1) && !sig1.contains(op2))
|| (!sig1.contains(op2) && !sig2.contains(op2)))
break;
axioms.add(dataFactory.getOWLSubObjectPropertyOfAxiom(op1, op2));
axioms.add(dataFactory.getOWLSubObjectPropertyOfAxiom(op2, op1));
break;
case Utilities.L2R:
if ((!sig1.contains(op1) || !sig2.contains(op2)))
break;
axioms.add(dataFactory.getOWLSubObjectPropertyOfAxiom(op1, op2));
break;
case Utilities.R2L:
if ((!sig2.contains(op1) || !sig1.contains(op2)))
break;
axioms.add(dataFactory.getOWLSubObjectPropertyOfAxiom(op2, op1));
break;
default:
break;
}
case Utilities.INSTANCES:
OWLNamedIndividual i1 = dataFactory.getOWLNamedIndividual(IRI
.create(mapping.getIRIStrEnt1())),
i2 = dataFactory.getOWLNamedIndividual(IRI.create(mapping
.getIRIStrEnt2()));
if (!sig1.contains(i1) || !sig2.contains(i2))
break;
axioms.add(dataFactory.getOWLSameIndividualAxiom(i1, i2));
break;
case Utilities.UNKNOWN:
FileUtil.writeErrorLogAndConsole("Unknown mapping kind: " + mapping);
break;
default:
break;
}
return axioms;
}
public static Pair<List<Pair<OWLClass>>> graphDetectionDirectViolations(
OWLOntology fstOnto, OWLOntology sndOnto, OWLOntology alignOnto,
Pair<List<Pair<OWLClass>>> viols) {
long start = Util.getMSec();
LightAdjacencyList adj = new LightAdjacencyList(fstOnto, sndOnto,
alignOnto);
NodeReachability r = new DFSReachability(adj, false);
List<Pair<OWLClass>> dirViol1 = new ArrayList<>();
List<Pair<OWLClass>> dirViol2 = new ArrayList<>();
List<Pair<OWLClass>> viol1 = viols.getFirst();
List<Pair<OWLClass>> viol2 = viols.getSecond();
for (Pair<OWLClass> v : viol1)
if (r.areReachable(v.getFirst(), v.getSecond()))
dirViol1.add(v);
for (Pair<OWLClass> v : viol2)
if (r.areReachable(v.getFirst(), v.getSecond()))
dirViol2.add(v);
FileUtil.writeLogAndConsole("Graph direct violation detection: "
+ Util.getDiffmsec(start) + " (ms)");
return new Pair<>(dirViol1, dirViol2);
}
public static List<Pair<OWLClass>> explanationDetectionDirectViolations(
OWLOntology inputOnto, OWLOntology alignOnto,
List<Pair<Integer>> violations, JointIndexManager index,
int ontoId, OWLReasoner alignR, boolean suppressOutput) {
long start = Util.getMSec();
List<Pair<OWLClass>> owlViols = new ArrayList<>(violations.size());
for (Pair<Integer> p : violations)
owlViols.add(LogMapWrapper.getOWLClassFromIndexPair(p, index));
List<Pair<OWLClass>> res = explanationDetectionDirectViolations(
inputOnto, alignOnto, owlViols, ontoId, alignR, suppressOutput);
FileUtil.writeLogAndConsole("Explanation direct violation detection: "
+ Util.getDiffmsec(start) + " (ms)");
return res;
}
public static boolean compareDirectViolations(List<Pair<OWLClass>> dirR,
List<Pair<Integer>> dirI, JointIndexManager index) {
if (dirR.size() != dirI.size())
return compareDirectViolations(dirR, "explanations",
LogMapWrapper.getOWLClassFromIndexPair(dirI, index),
"index");
return false;
}
public static boolean compareDirectViolations(List<Pair<OWLClass>> dirR,
String dirRLabel, List<Pair<OWLClass>> dirIC, String dirICLabel) {
// if(dirR.size() != dirIC.size()){
Set<Pair<OWLClass>> intersection = Util
.computeIntersection(dirR, dirIC);
dirR.removeAll(intersection);
dirIC.removeAll(intersection);
FileUtil.writeLogAndConsole("Detected by " + dirICLabel
+ " but not by " + dirRLabel + ": " + dirIC.size() + "\n"
+ dirIC.toString().replace(">>, ", ">>,\n"));
FileUtil.writeLogAndConsole("Detected by " + dirRLabel + " but not by "
+ dirICLabel + ": " + dirR.size() + "\n"
+ dirR.toString().replace(">>, ", ">>,\n"));
return true;
// }
// return false;
}
public static List<Pair<OWLClass>> explanationDetectionDirectViolations(
OWLOntology inputOnto, OWLOntology alignOnto,
List<Pair<OWLClass>> violations, int ontoId, OWLReasoner alignR,
boolean suppressOutput) {
if (violations.isEmpty())
return Collections.emptyList();
List<Pair<OWLClass>> dirViols = new ArrayList<>();
// Set<OWLClass> classSig = inputOnto.getClassesInSignature();
// classSig.remove(OntoUtil.getDataFactory().getOWLThing());
// classSig.remove(OntoUtil.getDataFactory().getOWLNothing());
FileUtil.writeLogAndConsole("Direct violations for onto " + ontoId
+ ":");
int count = 0, processed = 0;
for (Pair<OWLClass> pair : violations) {
if (!suppressOutput)
FileUtil.writeLogAndConsole("\tProcessing violation "
+ ++processed + ":");
// if(OntoUtil.isDirectViolation(pair,classSig,false))
// ++count;
if (OntoUtil.checkDirectViolation(inputOnto, alignOnto, pair,
false, alignR, suppressOutput)) {
dirViols.add(pair);
++count;
} else {
if (!suppressOutput)
FileUtil.writeLogAndConsole("\tNOT DIRECT: " + pair + "\n");
}
}
FileUtil.writeLogAndConsole("Direct violations: " + count + "/"
+ violations.size());
return dirViols;
}
// a violation "A isA B" is direct iff there is at least a path in one of
// its explanations not traversing any element of the same input ontology
// of A and B
public static boolean isDirectViolation(OWLOntology o, Pair<OWLClass> v,
Set<OWLClass> classSig, boolean useELK) {
Set<Explanation<OWLAxiom>> expls = computeSubsumptionExplanation(o,
getManager(false), getSubClassOfAxiom(v), useELK);
FileUtil.writeLogAndConsole("\tFound " + expls.size() + " explanations");
int count = 0;
ext: for (Explanation<OWLAxiom> expl : expls) {
FileUtil.writeLogAndConsole("\t\tProcessing explanation " + ++count);
OWLClass nextSrc = v.getFirst(), nextDst = v.getSecond();
if (expl.isJustificationEntailment()) {
FileUtil.writeLogAndConsole("\t" + expl.toString());
return true;
}
Set<OWLAxiom> axioms = new HashSet<>(expl.getAxioms());
while (true) {
OWLAxiom ax;
Iterator<OWLAxiom> itr = axioms.iterator();
while (itr.hasNext()) {
ax = itr.next();
if (!ax.getAxiomType().equals(AxiomType.SUBCLASS_OF))
continue ext;
OWLSubClassOfAxiom subAx = (OWLSubClassOfAxiom) ax;
OWLClassExpression subCE = subAx.getSubClass(), supCE = subAx
.getSuperClass();
if (subCE.isAnonymous()
|| !subCE.isClassExpressionLiteral()
|| subCE.isBottomEntity() || subCE.isTopEntity())
continue ext;
if (supCE.isAnonymous()
|| !supCE.isClassExpressionLiteral()
|| supCE.isBottomEntity() || supCE.isTopEntity())
continue ext;
OWLClass subC = subCE.asOWLClass(), supC = supCE
.asOWLClass();
if (!v.getFirst().equals(subC) && classSig.contains(subC)
|| !v.getSecond().equals(supC)
&& classSig.contains(supC))
continue ext;
if (subC.equals(nextSrc)) {
if (supC.equals(nextDst)) {
FileUtil.writeLogAndConsole("\t" + expl.toString());
return true;
}
nextSrc = supC;
itr.remove();
}
if (supC.equals(nextDst)) {
nextDst = subC;
itr.remove();
}
}
}
}
return false;
}
public static OWLSubClassOfAxiom getSubClassOfAxiom(Pair<OWLClass> p) {
return getSubClassOfAxiom(p.getFirst(), p.getSecond());
}
public static OWLSubClassOfAxiom getSubClassOfAxiom(OWLClass a, OWLClass b) {
return dataFactory.getOWLSubClassOfAxiom(a, b);
}
public static OWLOntology getAlignedOntology(OWLOntologyManager manager,
Set<OWLAxiom> alignment, OWLOntology... ontos) {
OWLOntology o = null;
try {
o = manager.createOntology(alignment);
} catch (OWLOntologyCreationException e) {
e.printStackTrace();
}
for (OWLOntology onto : ontos)
alterOntologyWithAxioms(o, onto.getAxioms(), manager, false, true);
return o;
}
public static boolean saveClassificationAxioms(OWLOntology o,
OWLReasoner r, OWLOntologyManager manager) {
// try {
List<InferredAxiomGenerator<? extends OWLAxiom>> gens = new ArrayList<InferredAxiomGenerator<? extends OWLAxiom>>();
gens.add(new InferredSubClassAxiomGenerator());
gens.add(new InferredEquivalentClassAxiomGenerator());
int preAxioms = o.getLogicalAxiomCount();
Set<OWLAxiom> axioms = new HashSet<>();
for (InferredAxiomGenerator<? extends OWLAxiom> gen : gens)
axioms.addAll(gen.createAxioms(manager, r));
// classified ontology closed by inference would be too big, avoid
// materialisation
if (preAxioms > Params.minAxiomsInferenceBlock
&& preAxioms * Params.maxInferredAxiomsTimes < axioms.size()) {
FileUtil.writeLogAndConsole("Skip aligned ontology inference materialisation (> "
+ axioms.size()
/ preAxioms
+ "x the original ontology size)");
return true;
}
OntoUtil.addAxiomsToOntology(o, manager, axioms, true);
// InferredOntologyGenerator iog =
// new InferredOntologyGenerator(r,gens);
//
// iog.fillOntology(manager, o);
FileUtil.writeLogAndConsole("Logical axioms (pre->post): " + preAxioms
+ "->" + o.getLogicalAxiomCount());
// }
// catch(org.semanticweb.owlapi.reasoner.InconsistentOntologyException
// e){
// FileUtil.writeErrorLogAndConsole("Inconsistent ontology cannot be saved!");
// return false;
// }
return true;
}
public static OWLAxiom createDisjointAxiom(OWLDataFactory dataFactory,
OWLClass c1, OWLClass c2) {
OWLAxiom disjAx = null;
if (!c1.isAnonymous() && c1.isClassExpressionLiteral()
&& !c2.isAnonymous() && c2.isClassExpressionLiteral()) {
disjAx = dataFactory.getOWLDisjointClassesAxiom(c1, c2);
if (disjAx.getClassesInSignature().size() != 2) {
FileUtil.writeErrorLogAndConsole(disjAx.toString());
return null;
}
}
return disjAx;
}
public static Set<OWLAxiom> createDisjAxioms(Set<OWLClass> classes,
OWLDataFactory dataFac) {
Set<OWLAxiom> disj = new HashSet<>();
for (OWLClass c1 : classes) {
for (OWLClass c2 : classes) {
if (!c1.equals(c2)) {
disj.add(dataFac.getOWLDisjointClassesAxiom(c1, c2));
}
}
}
return disj;
}
public static void checkActiveReasoners(boolean clean) {
if (reasoners.size() > 0) {
FileUtil.writeLogAndConsole(reasoners.size()
+ " were not disposed " + "at the end of a test");
if (clean)
disposeAllReasoners();
}
}
public static void disposeAllReasoners() {
for (OWLReasoner r : reasoners) {
r.dispose();
}
reasoners.clear();
}
public static String getIRIShortFragment(String iri) {
return shortFormProvider.getShortForm(IRI.create(iri));
}
public static String getIRIShortFragment(IRI iri) {
return getIRIShortFragment(iri.toString());
}
public static void unloadAllOntologies() {
for (OWLOntologyManager manager : managers)
unloadAllOntologies(manager);
}
public static void unloadOntologies(boolean all, OWLOntology... ontos) {
if (!all)
unloadOntologies(ontos);
try {
for (OWLOntologyManager manager : managers)
for (OWLOntology o : ontos)
if (manager.contains(o.getOntologyID().getOntologyIRI()))
manager.removeOntology(o);
} catch (UnknownOWLOntologyException e) {
e.printStackTrace();
}
}
public static void unloadOntologies(OWLOntologyManager manager,
OWLOntology... ontos) {
try {
for (OWLOntology o : ontos)
if (manager.contains(o.getOntologyID().getOntologyIRI()))
manager.removeOntology(o);
} catch (UnknownOWLOntologyException e) {
e.printStackTrace();
}
}
public static void unloadOntologies(OWLOntology... ontos) {
try {
for (OWLOntology o : ontos)
if (getManager(false).contains(
o.getOntologyID().getOntologyIRI()))
getManager(false).removeOntology(o);
} catch (UnknownOWLOntologyException e) {
e.printStackTrace();
}
}
public static Pair<OWLClass> getNamedClassesFromSubClassAxiom(
OWLSubClassOfAxiom subAx, boolean allowsTopBot) {
Pair<OWLClass> p = null;
OWLClass c1, c2;
OWLClassExpression ce1, ce2;
ce1 = subAx.getSubClass();
ce2 = subAx.getSuperClass();
if (!(ce1.isAnonymous() || ce2.isAnonymous())) {
c1 = ce1.asOWLClass();
c2 = ce2.asOWLClass();
if (allowsTopBot
|| !(c1.isBottomEntity() || c1.isTopEntity()
|| c2.isBottomEntity() || c2.isTopEntity()))
p = new Pair<>(c1, c2);
}
return p;
}
public static String getGraphIRIObjectProperty(OWLObjectProperty role) {
return "EXISTS" + LightNode.iriProvider.getShortForm(role.getIRI())
+ "DotThing";
}
public static String getGraphIRIObjectProperty(OWLObjectProperty role,
String prefix) {
return prefix + "EXISTS"
+ LightNode.iriProvider.getShortForm(role.getIRI())
+ "DotThing";
}
public static String getGraphIRIObjectProperty(String nodeIRI, String prefix) {
return prefix + "EXISTS"
+ LightNode.iriProvider.getShortForm(IRI.create(nodeIRI))
+ "DotThing";
}
public static OWLClass getNamedClassesFromSubClassAxiom(
OWLClassExpression ce, boolean allowsTopBot) {
OWLClass c;
if (!ce.isAnonymous()) {
c = ce.asOWLClass();
if (allowsTopBot || !(c.isBottomEntity() || c.isTopEntity()))
return c;
}
return null;
}
public static OWLProfileReport checkELProfile(OWLOntology onto) {
OWL2ELProfile profile = new OWL2ELProfile();
return profile.checkOntology(onto);
}
public static boolean isExtendedReasoner(OWLReasoner r) {
return r.getReasonerName().toLowerCase().contains("extended");
}
public static OWLClass getOWLClassFromIRI(String iriStr) {
return dataFactory.getOWLClass(IRI.create(iriStr));
}
public static Pair<OWLClass> getOWLClassesFromIRIs(String iriStr1,
String iriStr2) {
return new Pair<OWLClass>(dataFactory.getOWLClass(IRI.create(iriStr1)),
dataFactory.getOWLClass(IRI.create(iriStr2)));
}
public static void addEntityDeclarationToOntology(OWLOntology o,
OWLOntologyManager manager, OWLEntity e) {
alterOntologyWithEntityDeclaration(o, manager, e, true);
}
public static void alterOntologyWithEntityDeclaration(OWLOntology o,
OWLOntologyManager manager, OWLEntity e, boolean add) {
Set<OWLAxiom> axioms = new HashSet<>();
axioms.add(getDataFactory().getOWLDeclarationAxiom(e));
if (add)
OntoUtil.addAxiomsToOntology(o, manager, axioms, true);
else
OntoUtil.removeAxiomsFromOntology(o, manager, axioms, true);
}
public static void addEntitiesDeclarationToOntology(OWLOntology o,
OWLOntologyManager manager, Collection<OWLEntity> c) {
for (OWLEntity e : c)
addEntityDeclarationToOntology(o, manager, e);
}
public static void removeEntityDeclarationToOntology(OWLOntology o,
OWLOntologyManager manager, OWLEntity e) {
alterOntologyWithEntityDeclaration(o, manager, e, false);
}
public static void removeEntitiesDeclarationToOntology(OWLOntology o,
OWLOntologyManager manager, Collection<OWLEntity> c) {
for (OWLEntity e : c)
removeEntityDeclarationToOntology(o, manager, e);
}
public static OWLReasonerFactory getReasonerFactory(REASONER_KIND rk) {
switch (rk) {
case HERMIT:
// return new org.semanticweb.HermiT.Reasoner.ReasonerFactory();
return new UnsupportedDTHermitReasonerFactory();
case ELK:
case ELKTRACE:
return new ElkReasonerFactory();
default:
return new org.semanticweb.owlapi.reasoner.structural.StructuralReasonerFactory();
}
}
public static String extractIRIString(OWLOntology onto) {
return onto.getOntologyID().getOntologyIRI().toString();
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.aws2.ddb;
import java.util.HashMap;
import java.util.Map;
import org.apache.camel.Exchange;
import software.amazon.awssdk.services.dynamodb.DynamoDbClient;
import software.amazon.awssdk.services.dynamodb.model.BatchGetItemRequest;
import software.amazon.awssdk.services.dynamodb.model.BatchGetItemResponse;
import software.amazon.awssdk.services.dynamodb.model.KeysAndAttributes;
public class BatchGetItemsCommand extends AbstractDdbCommand {
public BatchGetItemsCommand(DynamoDbClient ddbClient, Ddb2Configuration configuration, Exchange exchange) {
super(ddbClient, configuration, exchange);
}
@Override
public void execute() {
BatchGetItemResponse result = ddbClient.batchGetItem(BatchGetItemRequest.builder().requestItems(determineBatchItems()).build());
Map tmp = new HashMap<>();
tmp.put(Ddb2Constants.BATCH_RESPONSE, result.responses());
tmp.put(Ddb2Constants.UNPROCESSED_KEYS, result.unprocessedKeys());
addToResults(tmp);
}
@SuppressWarnings("unchecked")
private Map<String, KeysAndAttributes> determineBatchItems() {
return exchange.getIn().getHeader(Ddb2Constants.BATCH_ITEMS, Map.class);
}
}
|
package cn.alphahub.mall.ware.convertor;
import cn.alphahub.common.mq.StockDetailTo;
import cn.alphahub.mall.ware.domain.WareOrderTaskDetail;
import cn.hutool.json.JSONUtil;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
class BeanUtilTest {
@BeforeEach
void setUp() {
System.err.println("------------------");
}
@AfterEach
void tearDown() {
System.err.println("------------------");
}
@Test
void copy() {
WareOrderTaskDetail taskDetail = new WareOrderTaskDetail();
taskDetail.setId(10086L);
taskDetail.setSkuId(10010L);
taskDetail.setSkuName("这是一个示例");
taskDetail.setSkuNum(56);
taskDetail.setTaskId(10000L);
taskDetail.setWareId(1L);
taskDetail.setLockStatus(2);
StockDetailTo detailTo = BeanUtil.INSTANCE.copy(taskDetail);
System.out.println("taskDetail: " + JSONUtil.toJsonStr(taskDetail));
System.out.println("detailTo: " + JSONUtil.toJsonStr(detailTo));
}
}
|
/*
* This file is generated by jOOQ.
*/
package com.example.db.generated;
import org.jetbrains.annotations.NotNull;
import org.jooq.Schema;
import org.jooq.impl.CatalogImpl;
import javax.annotation.processing.Generated;
import java.util.Arrays;
import java.util.List;
/**
* This class is generated by jOOQ.
*/
@Generated(
value = {
"https://www.jooq.org",
"jOOQ version:3.14.11"
},
comments = "This class is generated by jOOQ"
)
@SuppressWarnings({"all", "unchecked", "rawtypes"})
public class DefaultCatalog extends CatalogImpl {
private static final long serialVersionUID = 1L;
/**
* The reference instance of <code>DEFAULT_CATALOG</code>
*/
public static final DefaultCatalog DEFAULT_CATALOG = new DefaultCatalog();
/**
* The schema <code>demo</code>.
*/
public final Demo DEMO = Demo.DEMO;
/**
* No further instances allowed
*/
private DefaultCatalog() {
super("");
}
@Override
@NotNull
public final List<Schema> getSchemas() {
return Arrays.<Schema>asList(
Demo.DEMO);
}
}
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tez.runtime.library.output;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.BitSet;
import java.util.List;
import com.google.protobuf.ByteString;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.tez.common.TezCommonUtils;
import org.apache.tez.common.TezRuntimeFrameworkConfigs;
import org.apache.tez.common.TezUtilsInternal;
import org.apache.tez.runtime.api.Event;
import org.apache.tez.runtime.api.OutputContext;
import org.apache.tez.runtime.api.events.DataMovementEvent;
import org.apache.tez.runtime.library.api.TezRuntimeConfiguration;
import org.apache.tez.runtime.library.partitioner.HashPartitioner;
import org.apache.tez.runtime.library.shuffle.impl.ShuffleUserPayloads;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
// Tests which don't require parameterization
public class TestUnorderedKVOutput2 {
private Configuration conf;
private FileSystem localFs;
private Path workingDir;
@Before
public void setup() throws IOException {
conf = new Configuration();
localFs = FileSystem.getLocal(conf);
workingDir = new Path(System.getProperty("test.build.data",
System.getProperty("java.io.tmpdir", "/tmp")),
TestUnorderedKVOutput2.class.getName()).makeQualified(
localFs.getUri(), localFs.getWorkingDirectory());
conf.set(TezRuntimeConfiguration.TEZ_RUNTIME_KEY_CLASS, Text.class.getName());
conf.set(TezRuntimeConfiguration.TEZ_RUNTIME_VALUE_CLASS, Text.class.getName());
conf.set(TezRuntimeConfiguration.TEZ_RUNTIME_PARTITIONER_CLASS,
HashPartitioner.class.getName());
conf.setStrings(TezRuntimeFrameworkConfigs.LOCAL_DIRS, workingDir.toString());
}
@After
public void cleanup() throws IOException {
localFs.delete(workingDir, true);
}
@Test(timeout = 5000)
public void testNonStartedOutput() throws Exception {
OutputContext outputContext = OutputTestHelpers.createOutputContext();
int numPartitions = 1;
UnorderedKVOutput output = new UnorderedKVOutput(outputContext, numPartitions);
output.initialize();
List<Event> events = output.close();
assertEquals(1, events.size());
Event event1 = events.get(0);
assertTrue(event1 instanceof DataMovementEvent);
DataMovementEvent dme = (DataMovementEvent) event1;
ByteBuffer bb = dme.getUserPayload();
ShuffleUserPayloads.DataMovementEventPayloadProto shufflePayload =
ShuffleUserPayloads.DataMovementEventPayloadProto.parseFrom(ByteString.copyFrom(bb));
assertTrue(shufflePayload.hasEmptyPartitions());
byte[] emptyPartitions = TezCommonUtils.decompressByteStringToByteArray(shufflePayload
.getEmptyPartitions());
BitSet emptyPartionsBitSet = TezUtilsInternal.fromByteArray(emptyPartitions);
assertEquals(numPartitions, emptyPartionsBitSet.cardinality());
for (int i = 0 ; i < numPartitions ; i++) {
assertTrue(emptyPartionsBitSet.get(i));
}
}
@Test(timeout = 10000)
public void testClose() throws Exception {
OutputContext outputContext = OutputTestHelpers.createOutputContext(conf, workingDir);
int numPartitions = 1;
UnorderedKVOutput output = new UnorderedKVOutput(outputContext, numPartitions);
output.initialize();
output.start();
assertNotNull(output.getWriter());
output.close();
assertNull(output.getWriter());
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.serde2.io;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.WritableComparator;
/**
* ShortWritable.
*
*/
public class ShortWritable implements WritableComparable {
private short value;
public void write(DataOutput out) throws IOException {
out.writeShort(value);
}
public void readFields(DataInput in) throws IOException {
value = in.readShort();
}
public ShortWritable(short s) {
value = s;
}
public ShortWritable() {
value = 0;
}
public void set(short value) {
this.value = value;
}
public short get() {
return value;
}
@Override
public boolean equals(Object o) {
if (o == null || o.getClass() != ShortWritable.class) {
return false;
}
return get() == ((ShortWritable) o).get();
}
@Override
public int hashCode() {
return value;
}
@Override
public String toString() {
return String.valueOf(get());
}
public int compareTo(Object o) {
int thisValue = value;
int thatValue = ((ShortWritable) o).value;
return thisValue - thatValue;
}
/** A Comparator optimized for BytesWritable. */
public static class Comparator extends WritableComparator {
public Comparator() {
super(ShortWritable.class);
}
/**
* Compare the buffers in serialized form.
*/
@Override
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
int a1 = (short) (readUnsignedShort(b1, s1));
int a2 = (short) (readUnsignedShort(b2, s2));
return a1 - a2;
}
}
static { // register this comparator
WritableComparator.define(ShortWritable.class, new Comparator());
}
}
|
package io.confluent.developer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Collection;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.stream.Collectors;
public class KafkaProducerApplication {
private final Producer<String, String> producer;
final String outTopic;
public KafkaProducerApplication(final Producer<String, String> producer,
final String topic) {
this.producer = producer;
outTopic = topic;
}
public Future<RecordMetadata> produce(final String message) {
final String[] parts = message.split("-");
final String key, value;
if (parts.length > 1) {
key = parts[0];
value = parts[1];
} else {
key = "NO-KEY";
value = parts[0];
}
final ProducerRecord<String, String> producerRecord = new ProducerRecord<>(outTopic, key, value);
return producer.send(producerRecord);
}
public void shutdown() {
producer.close();
}
public static Properties loadProperties(String fileName) throws IOException {
final Properties envProps = new Properties();
final FileInputStream input = new FileInputStream(fileName);
envProps.load(input);
input.close();
return envProps;
}
public void printMetadata(final Collection<Future<RecordMetadata>> metadata,
final String fileName) {
System.out.println("Offsets and timestamps committed in batch from " + fileName);
metadata.forEach(m -> {
try {
final RecordMetadata recordMetadata = m.get();
System.out.println("Record written to offset " + recordMetadata.offset() + " timestamp " + recordMetadata.timestamp());
} catch (InterruptedException | ExecutionException e) {
if (e instanceof InterruptedException) {
Thread.currentThread().interrupt();
}
}
});
}
public static void main(String[] args) throws Exception {
if (args.length < 2) {
throw new IllegalArgumentException(
"This program takes two arguments: the path to an environment configuration file and" +
"the path to the file with records to send");
}
final Properties props = KafkaProducerApplication.loadProperties(args[0]);
final String topic = props.getProperty("output.topic.name");
final Producer<String, String> producer = new KafkaProducer<>(props);
final KafkaProducerApplication producerApp = new KafkaProducerApplication(producer, topic);
// Attach shutdown handler to catch Control-C.
Runtime.getRuntime().addShutdownHook(new Thread(producerApp::shutdown));
String filePath = args[1];
try {
List<String> linesToProduce = Files.readAllLines(Paths.get(filePath));
List<Future<RecordMetadata>> metadata = linesToProduce.stream()
.filter(l -> !l.trim().isEmpty())
.map(producerApp::produce)
.collect(Collectors.toList());
producerApp.printMetadata(metadata, filePath);
} catch (IOException e) {
System.err.println(String.format("Error reading file %s due to %s", filePath, e));
}
producerApp.shutdown();
}
}
|
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.chen.deskclock.stopwatch;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.RectF;
import android.util.AttributeSet;
import android.view.View;
import com.chen.deskclock.R;
import com.chen.deskclock.ThemeUtils;
import com.chen.deskclock.Utils;
import com.chen.deskclock.data.DataModel;
import com.chen.deskclock.data.Lap;
import com.chen.deskclock.data.Stopwatch;
import java.util.List;
/**
* Custom view that draws a reference lap as a circle when one exists.
*/
public final class StopwatchCircleView extends View {
/** The size of the dot indicating the user's position within the reference lap. */
private final float mDotRadius;
/** An amount to subtract from the true radius to account for drawing thicknesses. */
private final float mRadiusOffset;
/** Used to scale the width of the marker to make it similarly visible on all screens. */
private final float mScreenDensity;
/** The color indicating the remaining portion of the current lap. */
private final int mRemainderColor;
/** The color indicating the completed portion of the lap. */
private final int mCompletedColor;
/** The size of the stroke that paints the lap circle. */
private final float mStrokeSize;
/** The size of the stroke that paints the marker for the end of the prior lap. */
private final float mMarkerStrokeSize;
private final Paint mPaint = new Paint();
private final Paint mFill = new Paint();
private final RectF mArcRect = new RectF();
@SuppressWarnings("unused")
public StopwatchCircleView(Context context) {
this(context, null);
}
public StopwatchCircleView(Context context, AttributeSet attrs) {
super(context, attrs);
final Resources resources = context.getResources();
final float dotDiameter = resources.getDimension(R.dimen.circletimer_dot_size);
mDotRadius = dotDiameter / 2f;
mScreenDensity = resources.getDisplayMetrics().density;
mStrokeSize = resources.getDimension(R.dimen.circletimer_circle_size);
mMarkerStrokeSize = resources.getDimension(R.dimen.circletimer_marker_size);
mRadiusOffset = Utils.calculateRadiusOffset(mStrokeSize, dotDiameter, mMarkerStrokeSize);
mRemainderColor = Color.WHITE;
mCompletedColor = ThemeUtils.resolveColor(context, R.attr.colorAccent);
mPaint.setAntiAlias(true);
mPaint.setStyle(Paint.Style.STROKE);
mFill.setAntiAlias(true);
mFill.setColor(mCompletedColor);
mFill.setStyle(Paint.Style.FILL);
}
/**
* Start the animation if it is not currently running.
*/
void update() {
postInvalidateOnAnimation();
}
@Override
public void onDraw(Canvas canvas) {
// Compute the size and location of the circle to be drawn.
final int xCenter = getWidth() / 2;
final int yCenter = getHeight() / 2;
final float radius = Math.min(xCenter, yCenter) - mRadiusOffset;
// Reset old painting state.
mPaint.setColor(mRemainderColor);
mPaint.setStrokeWidth(mStrokeSize);
final List<Lap> laps = getLaps();
// If a reference lap does not exist or should not be drawn, draw a simple white circle.
if (laps.isEmpty() || !DataModel.getDataModel().canAddMoreLaps()) {
// Draw a complete white circle; no red arc required.
canvas.drawCircle(xCenter, yCenter, radius, mPaint);
// No need to continue animating the plain white circle.
return;
}
// The first lap is the reference lap to which all future laps are compared.
final Stopwatch stopwatch = getStopwatch();
final int lapCount = laps.size();
final Lap firstLap = laps.get(lapCount - 1);
final Lap priorLap = laps.get(0);
final long firstLapTime = firstLap.getLapTime();
final long currentLapTime = stopwatch.getTotalTime() - priorLap.getAccumulatedTime();
// Draw a combination of red and white arcs to create a circle.
mArcRect.top = yCenter - radius;
mArcRect.bottom = yCenter + radius;
mArcRect.left = xCenter - radius;
mArcRect.right = xCenter + radius;
final float redPercent = (float) currentLapTime / (float) firstLapTime;
final float whitePercent = 1 - (redPercent > 1 ? 1 : redPercent);
// Draw a white arc to indicate the amount of reference lap that remains.
canvas.drawArc(mArcRect, 270 + (1 - whitePercent) * 360, whitePercent * 360, false, mPaint);
// Draw a red arc to indicate the amount of reference lap completed.
mPaint.setColor(mCompletedColor);
canvas.drawArc(mArcRect, 270, redPercent * 360 , false, mPaint);
// Starting on lap 2, a marker can be drawn indicating where the prior lap ended.
if (lapCount > 1) {
mPaint.setColor(mRemainderColor);
mPaint.setStrokeWidth(mMarkerStrokeSize);
final float markerAngle = (float) priorLap.getLapTime() / (float) firstLapTime * 360;
final float startAngle = 270 + markerAngle;
final float sweepAngle = mScreenDensity * (float) (360 / (radius * Math.PI));
canvas.drawArc(mArcRect, startAngle, sweepAngle, false, mPaint);
}
// Draw a red dot to indicate current position relative to reference lap.
final float dotAngleDegrees = 270 + redPercent * 360;
final double dotAngleRadians = Math.toRadians(dotAngleDegrees);
final float dotX = xCenter + (float) (radius * Math.cos(dotAngleRadians));
final float dotY = yCenter + (float) (radius * Math.sin(dotAngleRadians));
canvas.drawCircle(dotX, dotY, mDotRadius, mFill);
// If the stopwatch is not running it does not require continuous updates.
if (stopwatch.isRunning()) {
postInvalidateOnAnimation();
}
}
private Stopwatch getStopwatch() {
return DataModel.getDataModel().getStopwatch();
}
private List<Lap> getLaps() {
return DataModel.getDataModel().getLaps();
}
}
|
package database;
import java.math.BigDecimal;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeMap;
import org.mapdb.BTreeKeySerializer;
import org.mapdb.BTreeMap;
import org.mapdb.Bind;
import org.mapdb.DB;
import org.mapdb.Fun;
import org.mapdb.Fun.Tuple2;
import org.mapdb.Fun.Tuple3;
import qora.account.Account;
import utils.ObserverMessage;
import database.DBSet;
public class BalanceMap extends DBMap<Tuple2<String, Long>, BigDecimal>
{
public static final long QORA_KEY = 0l;
private Map<Integer, Integer> observableData = new HashMap<Integer, Integer>();
@SuppressWarnings("rawtypes")
private BTreeMap assetKeyMap;
public BalanceMap(DBSet databaseSet, DB database)
{
super(databaseSet, database);
this.observableData.put(DBMap.NOTIFY_ADD, ObserverMessage.ADD_BALANCE_TYPE);
this.observableData.put(DBMap.NOTIFY_REMOVE, ObserverMessage.REMOVE_BALANCE_TYPE);
//this.observableData.put(DBMap.NOTIFY_LIST, ObserverMessage.LIST_BALANCE_TYPE);
}
public BalanceMap(BalanceMap parent)
{
super(parent);
}
protected void createIndexes(DB database){}
@SuppressWarnings({ "unchecked"})
@Override
protected Map<Tuple2<String, Long>, BigDecimal> getMap(DB database)
{
//OPEN MAP
BTreeMap<Tuple2<String, Long>, BigDecimal> map = database.createTreeMap("balances")
.keySerializer(BTreeKeySerializer.TUPLE2)
.counterEnable()
.makeOrGet();
//HAVE/WANT KEY
this.assetKeyMap = database.createTreeMap("balances_key_asset")
.comparator(Fun.COMPARATOR)
.counterEnable()
.makeOrGet();
//BIND ASSET KEY
Bind.secondaryKey(map, this.assetKeyMap, new Fun.Function2<Tuple3<Long, BigDecimal, String>, Tuple2<String, Long>, BigDecimal>() {
@Override
public Tuple3<Long, BigDecimal, String> run(Tuple2<String, Long> key, BigDecimal value) {
return new Tuple3<Long, BigDecimal, String>(key.b, value.negate(), key.a);
}
});
//RETURN
return map;
}
@Override
protected Map<Tuple2<String, Long>, BigDecimal> getMemoryMap()
{
return new TreeMap<Tuple2<String, Long>, BigDecimal>(Fun.TUPLE2_COMPARATOR);
}
@Override
protected BigDecimal getDefaultValue()
{
return BigDecimal.ZERO.setScale(8);
}
@Override
protected Map<Integer, Integer> getObservableData()
{
return this.observableData;
}
public void set(String address, BigDecimal value)
{
this.set(address, QORA_KEY, value);
}
public void set(String address, long key, BigDecimal value)
{
this.set(new Tuple2<String, Long>(address, key), value);
}
public BigDecimal get(String address)
{
return this.get(address, QORA_KEY);
}
public BigDecimal get(String address, long key)
{
return this.get(new Tuple2<String, Long>(address, key));
}
@SuppressWarnings({ "unchecked", "rawtypes" })
public SortableList<Tuple2<String, Long>, BigDecimal> getBalancesSortableList(long key)
{
//FILTER ALL KEYS
Collection<Tuple2<String, Long>> keys = ((BTreeMap<Tuple3, Tuple2<String, Long>>) this.assetKeyMap).subMap(
Fun.t3(key, null, null),
Fun.t3(key, Fun.HI(), Fun.HI())).values();
//RETURN
return new SortableList<Tuple2<String, Long>, BigDecimal>(this, keys);
}
@SuppressWarnings({ "unchecked", "rawtypes" })
public SortableList<Tuple2<String, Long>, BigDecimal> getBalancesSortableList(Account account)
{
BTreeMap map = (BTreeMap) this.map;
//FILTER ALL KEYS
Collection keys = ((BTreeMap<Tuple2, BigDecimal>) map).subMap(
Fun.t2(account.getAddress(), null),
Fun.t2(account.getAddress(), Fun.HI())).keySet();
//RETURN
return new SortableList<Tuple2<String, Long>, BigDecimal>(this, keys);
}
}
|
package Discarpet.mixins;
import Discarpet.script.events.ChatEvents;
import net.minecraft.entity.Entity;
import net.minecraft.server.MinecraftServer;
import net.minecraft.server.world.ServerWorld;
import net.minecraft.text.Text;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.Shadow;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
import java.util.Iterator;
import java.util.UUID;
@Mixin(MinecraftServer.class)
public abstract class MinecraftServer_systemMessageEventMixin {
@Shadow
public abstract Iterable<ServerWorld> getWorlds();
@Inject(at = @At("RETURN"),method = "sendSystemMessage(Lnet/minecraft/text/Text;Ljava/util/UUID;)V")
public void redirectChatToScarpet(Text message, UUID senderUuid ,CallbackInfo ci) {
Iterator<ServerWorld> worlds = this.getWorlds().iterator();
Entity entity = null;
while(worlds.hasNext() && entity == null) {
entity = worlds.next().getEntity(senderUuid);
}
ChatEvents.SYSTEM_MESSAGE.onSystemMessage(message,entity);
}
}
|
package org.fraunhofer.cese.madcap.backend.models;
import com.googlecode.objectify.annotation.Entity;
import com.googlecode.objectify.annotation.Id;
import com.googlecode.objectify.annotation.Index;
import org.json.JSONObject;
import java.util.HashMap;
/**
* Created by MMueller on 12/9/2016.
*/
@Entity
public class ActivityEntry implements Comparable<ActivityEntry>, DatastoreEntry {
@Id
private String id;
@Index
private Long timestamp;
private double onBicycle;
private double inVehicle;
private double onFoot;
private double running;
private double still;
private double tilting;
private double walking;
private double unknown;
@Index
private String userID;
public ActivityEntry(){
}
public ActivityEntry(ProbeEntry probeEntry){
id = probeEntry.getId();
timestamp = probeEntry.getTimestamp();
userID = probeEntry.getUserID();
JSONObject dataJsonObject = new JSONObject(probeEntry.getSensorData());
onBicycle = dataJsonObject.getDouble("onBicycle");
inVehicle = dataJsonObject.getDouble("inVehicle");
onFoot = dataJsonObject.getDouble("onFoot");
running = dataJsonObject.getDouble("running");
still = dataJsonObject.getDouble("still");
tilting = dataJsonObject.getDouble("tilting");
walking = dataJsonObject.getDouble("walking");
unknown = dataJsonObject.getDouble("unknown");
}
/**
* Gets the propability for the user being on a bike.
* @return the propability.
*/
public double getOnBicycle() {
return onBicycle;
}
public void setOnBicycle(double onBicycle) {
this.onBicycle = onBicycle;
}
/**
* Gets the propability for the user being in a vehicle.
* @return the propability.
*/
public double getInVehicle() {
return inVehicle;
}
/**
* Sets the propability for the user being in a vehicle.
* @param inVehicle propability.
*/
public void setInVehicle(double inVehicle) {
this.inVehicle = inVehicle;
}
/**
* Gets the propability for the user being on foot.
* @return the propability.
*/
public double getOnFoot() {
return onFoot;
}
/**
* Sets the propability for the user being onFoot.
* @param onFoot propability.
*/
public void setOnFoot(double onFoot) {
this.onFoot = onFoot;
}
/**
* Gets the propability for the user running.
* @return the propability.
*/
public double getRunning() {
return running;
}
/**
* Sets the propability for the user being running.
* @param running propability.
*/
public void setRunning(double running) {
this.running = running;
}
/**
* Gets the propability for the user to be standing still.
* @return the propability.
*/
public double getStill() {
return still;
}
/**
* Sets the propability for the user standing still.
* @param still propability.
*/
public void setStill(double still) {
this.still = still;
}
/**
* Gets the propability for the user tilting his phone.
* @return the propability.
*/
public double getTilting() {
return tilting;
}
/**
* Sets the propability for the user tilting his device.
* @param tilting propability.
*/
public void setTilting(double tilting) {
this.tilting = tilting;
}
/**
* Gets the propability for the user walking.
* @return propability.
*/
public double getWalking() {
return walking;
}
/**
* Sets the propability for the user being walking.
* @param walking propability.
*/
public void setWalking(double walking) {
this.walking = walking;
}
/**
* Gets the propability that it is no predefined activity.
* @return the propability.
*/
public double getUnknown() {
return unknown;
}
/**
* Sets the propability that it is no predefined activity.
* @param unknown propability;
*/
public void setUnknown(double unknown) {
this.unknown = unknown;
}
/**
* Compares this object with the specified object for order. Returns a
* negative integer, zero, or a positive integer as this object is less
* than, equal to, or greater than the specified object.
* <p>
* <p>The implementor must ensure <tt>sgn(x.compareTo(y)) ==
* -sgn(y.compareTo(x))</tt> for all <tt>x</tt> and <tt>y</tt>. (This
* implies that <tt>x.compareTo(y)</tt> must throw an exception iff
* <tt>y.compareTo(x)</tt> throws an exception.)
* <p>
* <p>The implementor must also ensure that the relation is transitive:
* <tt>(x.compareTo(y)>0 && y.compareTo(z)>0)</tt> implies
* <tt>x.compareTo(z)>0</tt>.
* <p>
* <p>Finally, the implementor must ensure that <tt>x.compareTo(y)==0</tt>
* implies that <tt>sgn(x.compareTo(z)) == sgn(y.compareTo(z))</tt>, for
* all <tt>z</tt>.
* <p>
* <p>It is strongly recommended, but <i>not</i> strictly required that
* <tt>(x.compareTo(y)==0) == (x.equals(y))</tt>. Generally speaking, any
* class that implements the <tt>Comparable</tt> interface and violates
* this condition should clearly indicate this fact. The recommended
* language is "Note: this class has a natural ordering that is
* inconsistent with equals."
* <p>
* <p>In the foregoing description, the notation
* <tt>sgn(</tt><i>expression</i><tt>)</tt> designates the mathematical
* <i>signum</i> function, which is defined to return one of <tt>-1</tt>,
* <tt>0</tt>, or <tt>1</tt> according to whether the value of
* <i>expression</i> is negative, zero or positive.
*
* @param o the object to be compared.
* @return a negative integer, zero, or a positive integer as this object
* is less than, equal to, or greater than the specified object.
* @throws NullPointerException if the specified object is null
* @throws ClassCastException if the specified object's type prevents it
* from being compared to this object.
*/
@Override
public int compareTo(ActivityEntry o) {
return 0;
}
@Override
public String getId() {
return id;
}
@Override
public void setId(String s) {
id = s;
}
@Override
public Long getTimestamp() {
return timestamp;
}
@Override
public void setTimestamp(Long l) {
timestamp = l;
}
@Override
public String getUserID() {
return userID;
}
@Override
public void setUserID(String s) {
userID = s;
}
/**
* Returns a hash code value for the object. This method is
* supported for the benefit of hash tables such as those provided by
* {@link HashMap}.
* <p>
* The general contract of {@code hashCode} is:
* <ul>
* <li>Whenever it is invoked on the same object more than once during
* an execution of a Java application, the {@code hashCode} method
* must consistently return the same integer, provided no information
* used in {@code equals} comparisons on the object is modified.
* This integer need not remain consistent from one execution of an
* application to another execution of the same application.
* <li>If two objects are equal according to the {@code equals(Object)}
* method, then calling the {@code hashCode} method on each of
* the two objects must produce the same integer result.
* <li>It is <em>not</em> required that if two objects are unequal
* according to the {@link Object#equals(Object)}
* method, then calling the {@code hashCode} method on each of the
* two objects must produce distinct integer results. However, the
* programmer should be aware that producing distinct integer results
* for unequal objects may improve the performance of hash tables.
* </ul>
* <p>
* As much as is reasonably practical, the hashCode method defined by
* class {@code Object} does return distinct integers for distinct
* objects. (This is typically implemented by converting the internal
* address of the object into an integer, but this implementation
* technique is not required by the
* Java™ programming language.)
*
* @return a hash code value for this object.
* @see Object#equals(Object)
* @see System#identityHashCode
*/
@Override
public int hashCode() {
return super.hashCode();
}
/**
* Indicates whether some other object is "equal to" this one.
* <p>
* The {@code equals} method implements an equivalence relation
* on non-null object references:
* <ul>
* <li>It is <i>reflexive</i>: for any non-null reference value
* {@code x}, {@code x.equals(x)} should return
* {@code true}.
* <li>It is <i>symmetric</i>: for any non-null reference values
* {@code x} and {@code y}, {@code x.equals(y)}
* should return {@code true} if and only if
* {@code y.equals(x)} returns {@code true}.
* <li>It is <i>transitive</i>: for any non-null reference values
* {@code x}, {@code y}, and {@code z}, if
* {@code x.equals(y)} returns {@code true} and
* {@code y.equals(z)} returns {@code true}, then
* {@code x.equals(z)} should return {@code true}.
* <li>It is <i>consistent</i>: for any non-null reference values
* {@code x} and {@code y}, multiple invocations of
* {@code x.equals(y)} consistently return {@code true}
* or consistently return {@code false}, provided no
* information used in {@code equals} comparisons on the
* objects is modified.
* <li>For any non-null reference value {@code x},
* {@code x.equals(null)} should return {@code false}.
* </ul>
* <p>
* The {@code equals} method for class {@code Object} implements
* the most discriminating possible equivalence relation on objects;
* that is, for any non-null reference values {@code x} and
* {@code y}, this method returns {@code true} if and only
* if {@code x} and {@code y} refer to the same object
* ({@code x == y} has the value {@code true}).
* <p>
* Note that it is generally necessary to override the {@code hashCode}
* method whenever this method is overridden, so as to maintain the
* general contract for the {@code hashCode} method, which states
* that equal objects must have equal hash codes.
*
* @param o the reference object with which to compare.
* @return {@code true} if this object is the same as the obj
* argument; {@code false} otherwise.
* @see #hashCode()
* @see HashMap
*/
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ActivityEntry that = (ActivityEntry) o;
return (onBicycle == that.getOnBicycle()
&& inVehicle == that.getInVehicle()
&& onFoot == that.getOnFoot()
&& running == that.getOnFoot()
&& still == that.getStill()
&& tilting == that.getTilting()
&& walking == that.getWalking()
&& unknown == that.getUnknown());
}
@Override
public String toString() {
return "ActivityEntry{"+
"id=" + id +
"\"onBicycle\": " + onBicycle +
", \"inVehicle\": " + inVehicle +
", \"onFoot\": " + onFoot +
", \"running\": " + running +
", \"still\": " + still +
", \"tilting\": " + tilting +
", \"walking\": " + walking +
", \"unknown\": " + unknown +
'}';
}
}
|
// ============================================================================
//
// Copyright (C) 2006-2019 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
// ============================================================================
package org.talend.repository.imports;
import java.io.BufferedOutputStream;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.reflect.InvocationTargetException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.log4j.Logger;
import org.eclipse.core.resources.IWorkspace;
import org.eclipse.core.resources.IWorkspaceRunnable;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.FileLocator;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Path;
import org.eclipse.core.runtime.Status;
import org.eclipse.core.runtime.jobs.ISchedulingRule;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.common.util.URI;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.EObjectImpl;
import org.eclipse.emf.ecore.resource.Resource;
import org.eclipse.emf.ecore.resource.ResourceSet;
import org.eclipse.emf.ecore.resource.URIConverter;
import org.eclipse.emf.ecore.resource.impl.ExtensibleURIConverterImpl;
import org.eclipse.emf.ecore.util.EcoreUtil;
import org.eclipse.emf.ecore.xmi.impl.XMIResourceImpl;
import org.osgi.framework.FrameworkUtil;
import org.talend.commons.CommonsPlugin;
import org.talend.commons.exception.PersistenceException;
import org.talend.commons.runtime.model.emf.EmfHelper;
import org.talend.commons.runtime.model.emf.TalendXMIResource;
import org.talend.commons.runtime.model.repository.ERepositoryStatus;
import org.talend.commons.runtime.utils.io.FileCopyUtils;
import org.talend.commons.ui.runtime.exception.ExceptionHandler;
import org.talend.commons.utils.VersionUtils;
import org.talend.commons.utils.time.TimeMeasure;
import org.talend.core.CorePlugin;
import org.talend.core.GlobalServiceRegister;
import org.talend.core.PluginChecker;
import org.talend.core.context.Context;
import org.talend.core.context.RepositoryContext;
import org.talend.core.hadoop.IHadoopClusterService;
import org.talend.core.hadoop.repository.HadoopRepositoryUtil;
import org.talend.core.language.ECodeLanguage;
import org.talend.core.model.general.ILibrariesService;
import org.talend.core.model.metadata.builder.connection.Connection;
import org.talend.core.model.metadata.builder.connection.ConnectionPackage;
import org.talend.core.model.properties.BusinessProcessItem;
import org.talend.core.model.properties.ByteArray;
import org.talend.core.model.properties.ConnectionItem;
import org.talend.core.model.properties.ContextItem;
import org.talend.core.model.properties.FileItem;
import org.talend.core.model.properties.FolderItem;
import org.talend.core.model.properties.FolderType;
import org.talend.core.model.properties.Item;
import org.talend.core.model.properties.JobletProcessItem;
import org.talend.core.model.properties.LinkDocumentationItem;
import org.talend.core.model.properties.LinkType;
import org.talend.core.model.properties.MigrationTask;
import org.talend.core.model.properties.NotationHolder;
import org.talend.core.model.properties.ProcessItem;
import org.talend.core.model.properties.Project;
import org.talend.core.model.properties.PropertiesPackage;
import org.talend.core.model.properties.Property;
import org.talend.core.model.properties.ReferenceFileItem;
import org.talend.core.model.properties.RoutineItem;
import org.talend.core.model.properties.SQLPatternItem;
import org.talend.core.model.properties.SnippetItem;
import org.talend.core.model.properties.TDQItem;
import org.talend.core.model.properties.User;
import org.talend.core.model.properties.helper.ByteArrayResource;
import org.talend.core.model.relationship.RelationshipItemBuilder;
import org.talend.core.model.repository.ERepositoryObjectType;
import org.talend.core.model.repository.IRepositoryViewObject;
import org.talend.core.model.repository.RepositoryViewObject;
import org.talend.core.model.utils.MigrationUtil;
import org.talend.core.repository.model.PropertiesProjectResourceImpl;
import org.talend.core.repository.model.ProxyRepositoryFactory;
import org.talend.core.repository.ui.actions.RestoreFolderUtil;
import org.talend.core.repository.utils.ProjectDataJsonProvider;
import org.talend.core.repository.utils.XmiResourceManager;
import org.talend.core.ui.IJobletProviderService;
import org.talend.core.ui.component.ComponentsFactoryProvider;
import org.talend.designer.business.model.business.BusinessPackage;
import org.talend.designer.business.model.business.BusinessProcess;
import org.talend.designer.codegen.ICodeGeneratorService;
import org.talend.designer.codegen.ITalendSynchronizer;
import org.talend.designer.core.model.utils.emf.component.IMPORTType;
import org.talend.designer.core.model.utils.emf.talendfile.ElementParameterType;
import org.talend.designer.core.model.utils.emf.talendfile.ParametersType;
import org.talend.designer.core.model.utils.emf.talendfile.TalendFilePackage;
import org.talend.model.emf.CwmResource;
import org.talend.repository.ProjectManager;
import org.talend.repository.RepositoryWorkUnit;
import org.talend.repository.constants.FileConstants;
import org.talend.repository.i18n.Messages;
import org.talend.repository.imports.ItemRecord.State;
import org.talend.repository.imports.TreeBuilder.ProjectNode;
import org.talend.repository.model.IProxyRepositoryFactory;
import org.talend.repository.model.IRepositoryNode.ENodeType;
import org.talend.repository.model.IRepositoryNode.EProperties;
import org.talend.repository.model.RepositoryNode;
/**
*
* @deprecated won't use this class, and shouldn't do any modification in this class too.
*
* it had moved to new import system (IImportHandler) with extension point
* "org.talend.repository.items.importexport.handler".
*/
@Deprecated
public class ImportItemUtil {
private static Logger log = Logger.getLogger(ImportItemUtil.class);
private final XmiResourceManager xmiResourceManager = new XmiResourceManager();
private boolean hasErrors = false;
private static RepositoryObjectCache cache = new RepositoryObjectCache();
private final TreeBuilder treeBuilder = new TreeBuilder();
private final Set<String> deletedItems = new HashSet<String>();
private Map<ERepositoryObjectType, Set<String>> foldersCreated = new HashMap<ERepositoryObjectType, Set<String>>();
private final Set<Project> updatedProjects = new HashSet<Project>();
private final Map<IPath, Project> projects = new HashMap<IPath, Project>();
private final Map<String, Set<String>> routineExtModulesMap = new HashMap<String, Set<String>>();
private Map<String, List<MigrationTask>> migrationTasksToApplyPerProject = new HashMap<String, List<MigrationTask>>();
private Map<String, Boolean> migrationTasksStatusPerProject = new HashMap<String, Boolean>();
private boolean statAndLogsSettingsReloaded = false;
private boolean implicitSettingsReloaded = false;
private static boolean hasJoblets = false;
private RestoreFolderUtil restoreFolder;
private static final String ADAPT_NEW_MIGRATION_TASK_SYSTEM_ID = "org.talend.repository.model.migration.UpdateExistentMigrationTasksToAdaptNewMigrationSystemMigrationTask"; //$NON-NLS-1$
public void clear() {
deletedItems.clear();
}
public void setErrors(boolean errors) {
hasErrors = errors;
}
public boolean hasErrors() {
return hasErrors;
}
private static IPath getPath(RepositoryNode node) {
if (node.getType() == ENodeType.STABLE_SYSTEM_FOLDER || node.getType() == ENodeType.SYSTEM_FOLDER) {
String prefix = ""; //$NON-NLS-1$
ERepositoryObjectType type = (ERepositoryObjectType) node.getProperties(EProperties.CONTENT_TYPE);
if (type == ERepositoryObjectType.METADATA_FILE_DELIMITED || type == ERepositoryObjectType.METADATA_FILE_POSITIONAL
|| type == ERepositoryObjectType.METADATA_FILE_REGEXP || type == ERepositoryObjectType.METADATA_FILE_XML
|| type == ERepositoryObjectType.METADATA_FILE_LDIF || type == ERepositoryObjectType.METADATA_FILE_EXCEL
|| type == ERepositoryObjectType.METADATA_SALESFORCE_SCHEMA
|| type == ERepositoryObjectType.METADATA_GENERIC_SCHEMA
|| type == ERepositoryObjectType.METADATA_LDAP_SCHEMA || type == ERepositoryObjectType.METADATA_CONNECTIONS
|| type == ERepositoryObjectType.METADATA_SAPCONNECTIONS
|| type == ERepositoryObjectType.METADATA_HEADER_FOOTER) {
prefix = ERepositoryObjectType.METADATA.toString();
}
return new Path(prefix).append(node.getLabel());
}
String label = node.getObject().getProperty().getLabel();
return getPath(node.getParent()).append(label);
}
private boolean checkItem(ItemRecord itemRecord, boolean overwrite) {
boolean result = false;
try {
Item item = itemRecord.getItem();
if (item instanceof TDQItem) {
return false; // hide tdq first
}
ERepositoryObjectType itemType = ERepositoryObjectType.getItemType(item);
if (itemType == null) {
itemRecord.addError(Messages.getString("ImportItemUtil.unsupportItem")); //$NON-NLS-1$
return false; // can't import this item.
}
cache.initialize(itemType);
boolean isAllowMultipleName = (itemType == ERepositoryObjectType.SQLPATTERNS || itemType == ERepositoryObjectType.METADATA_FILE_XML);
String itemPath = null;
if (item.getState() != null) {
itemPath = item.getState().getPath();
} else {
itemRecord.addError(Messages.getString("ImportItemUtil.unsupportItem"));
return false;
}
boolean nameAvailable = true;
IRepositoryViewObject itemWithSameId = null;
IRepositoryViewObject itemWithSameName = null;
// take care, in cache it's RepositoryViewObject, not RepositoryObject
for (IRepositoryViewObject current : cache.getItemsFromRepository().get(itemType)) {
final Property property = itemRecord.getProperty();
if (property != null) {
if (property.getLabel() != null && property.getLabel().equalsIgnoreCase(current.getLabel())
&& property.getId() != current.getId()) {
// To check SQLPattern in same path. see bug 0005038: unable to add a SQLPattern into
// repository.
if (!isAllowMultipleName || current.getPath().equals(itemPath)) {
nameAvailable = false;
}
// overwrite the item with same label but diff id: 15787: import items does not overwrite some
// elements
if (!nameAvailable) {
itemWithSameName = current;
}
}
if (property.getId() != null && property.getId().equalsIgnoreCase(current.getId())) {
itemWithSameId = current;
}
}
}
itemRecord.setExistingItemWithSameId(itemWithSameId);
boolean idAvailable = itemWithSameId == null;
boolean isSystem = false;
// we do not import built in routines
if (item.eClass().equals(PropertiesPackage.eINSTANCE.getRoutineItem())) {
RoutineItem routineItem = (RoutineItem) item;
if (routineItem.isBuiltIn()) {
isSystem = true;
}
}
// we do not import system sql patterns
if (item.eClass().equals(PropertiesPackage.eINSTANCE.getSQLPatternItem())) {
SQLPatternItem sqlPatternItem = (SQLPatternItem) item;
if (sqlPatternItem.isSystem()) {
isSystem = true;
}
}
if (isSystem) {
itemRecord.addError(Messages.getString("RepositoryUtil.isSystem"));
return false;
}
if (nameAvailable) {
if (idAvailable) {
if (!isSystem) {
result = true;
} /*
* else { itemRecord.addError(Messages.getString("RepositoryUtil.isSystemRoutine")); //$NON-NLS-1$
* }
*/
} else {
// same id but different name,no need to care overwrite cause the item will be considered as a
// different one,see bug 20445
itemRecord.setState(State.ID_EXISTED);
// if (overwrite) {
// result = true;
// } else {
// see bug 0005222: [Import items] [Errors and Warnings]
// id is already in use
result = true;
// RepositoryNode nodeWithSameId = RepositoryNodeUtilities.getRepositoryNode(itemWithSameId);
// IPath path = getPath(nodeWithSameId);
// itemRecord.addError(Messages.getString(
// "RepositoryUtil.idUsed", itemWithSameId.getLabel(), path.toOSString())); //$NON-NLS-1$
// }
}
} else {
if (idAvailable) {
// same name but different id
itemRecord.setState(State.NAME_EXISTED);
if (!isSystem && overwrite) {
// if anything system, don't replace the source item if same name.
// if not from system, can overwrite.
itemRecord.setExistingItemWithSameId(itemWithSameName);
result = true;
}
// TDI-21399,TDI-21401
// if item is locked, cannot overwrite
if (result && overwrite && itemWithSameName != null) {
ERepositoryStatus status = itemWithSameName.getRepositoryStatus();
if (status == ERepositoryStatus.LOCK_BY_OTHER || status == ERepositoryStatus.LOCK_BY_USER) {
itemRecord.addError(Messages.getString("RepositoryUtil.itemLocked")); //$NON-NLS-1$
return false;
}
}
} else {
// same name and same id
itemRecord.setState(State.NAME_AND_ID_EXISTED);
if (overwrite) {
result = true;
}
if (!isSystem && overwrite
&& !itemWithSameName.getProperty().getLabel().equals(itemWithSameId.getProperty().getLabel())) {
// if anything system, don't replace the source item if same name.
// if not from system, can overwrite.
itemRecord.setExistingItemWithSameId(itemWithSameName);
result = true;
}
}
if (!result && !isSystem) {
itemRecord.addError(Messages.getString("RepositoryUtil.nameUsed")); //$NON-NLS-1$
}
}
if (result && overwrite && itemRecord.getState() == State.NAME_AND_ID_EXISTED) {
// if item is locked, cannot overwrite
if (checkIfLocked(itemRecord)) {
itemRecord.addError(Messages.getString("RepositoryUtil.itemLocked")); //$NON-NLS-1$
result = false;
}
}
} catch (Exception e) {
log.error("Error when checking item :" + itemRecord.getPath(), e);
}
return result;
}
/**
* DOC hcw Comment method "checkIfLocked".
*
* @param itemRecord
* @return
* @throws PersistenceException
*/
private boolean checkIfLocked(ItemRecord itemRecord) throws PersistenceException {
Boolean lockState = cache.getItemLockState(itemRecord);
if (lockState != null) {
return lockState.booleanValue();
}
List<IRepositoryViewObject> list = cache.findObjectsByItem(itemRecord);
for (IRepositoryViewObject obj : list) {
ERepositoryStatus status = obj.getRepositoryStatus();
if (status == ERepositoryStatus.LOCK_BY_OTHER || status == ERepositoryStatus.LOCK_BY_USER) {
itemRecord.setLocked(true);
cache.setItemLockState(itemRecord, true);
return true;
}
}
cache.setItemLockState(itemRecord, false);
return false;
}
@SuppressWarnings("unchecked")
public List<ItemRecord> importItemRecords(final ResourcesManager manager, final List<ItemRecord> itemRecords,
final IProgressMonitor monitor, final boolean overwrite, final IPath destinationPath, final String contentType) {
TimeMeasure.display = CommonsPlugin.isDebugMode();
TimeMeasure.displaySteps = CommonsPlugin.isDebugMode();
TimeMeasure.measureActive = CommonsPlugin.isDebugMode();
TimeMeasure.begin("importItemRecords");
hasJoblets = false;
statAndLogsSettingsReloaded = false;
implicitSettingsReloaded = false;
restoreFolder = new RestoreFolderUtil();
Collections.sort(itemRecords, new Comparator<ItemRecord>() {
@Override
public int compare(ItemRecord o1, ItemRecord o2) {
if (o1.getProperty().getItem() instanceof RoutineItem && o2.getProperty().getItem() instanceof RoutineItem) {
return 0;
} else if (!(o1.getProperty().getItem() instanceof RoutineItem)
&& !(o2.getProperty().getItem() instanceof RoutineItem)) {
// TUP-2548 sort items by label
String label = o1.getLabel();
if (label == null) {
return -1;
}
final String label2 = o2.getLabel();
if (label2 == null) {
return 1;
}
return label.compareTo(label2);
} else if (o1.getProperty().getItem() instanceof RoutineItem) {
return -1;
} else {
return 1;
}
}
});
monitor.beginTask(Messages.getString("ImportItemWizardPage.ImportSelectedItems"), itemRecords.size() * 2 + 1); //$NON-NLS-1$
RepositoryWorkUnit repositoryWorkUnit = new RepositoryWorkUnit("Import Items") { //$NON-NLS-1$
@Override
public void run() throws PersistenceException {
final IWorkspaceRunnable op = new IWorkspaceRunnable() {
@Override
public void run(IProgressMonitor monitor) throws CoreException {
final IProxyRepositoryFactory factory = CorePlugin.getDefault().getProxyRepositoryFactory();
// bug 10520
final Set<String> overwriteDeletedItems = new HashSet<String>();
final Set<String> idDeletedBeforeImport = new HashSet<String>();
Map<String, String> nameToIdMap = new HashMap<String, String>();
for (ItemRecord itemRecord : itemRecords) {
if (!monitor.isCanceled()) {
if (itemRecord.isValid()) {
if (itemRecord.getState() == State.ID_EXISTED) {
String id = nameToIdMap.get(itemRecord.getProperty().getLabel()
+ ERepositoryObjectType.getItemType(itemRecord.getProperty().getItem())
.toString());
if (id == null) {
/*
* if id exsist then need to genrate new id for this job,in this case the
* job won't override the old one
*/
id = EcoreUtil.generateUUID();
nameToIdMap.put(itemRecord.getProperty().getLabel()
+ ERepositoryObjectType.getItemType(itemRecord.getProperty().getItem())
.toString(), id);
}
itemRecord.getProperty().setId(id);
}
}
}
}
for (ItemRecord itemRecord : itemRecords) {
if (!monitor.isCanceled()) {
if (itemRecord.isValid()) {
importItemRecord(manager, itemRecord, overwrite, destinationPath, overwriteDeletedItems,
idDeletedBeforeImport, contentType, monitor);
IRepositoryViewObject object;
try {
Property property = itemRecord.getProperty();
if (property == null) {
object = factory.getSpecificVersion(itemRecord.getItemId(),
itemRecord.getItemVersion(), true);
property = object.getProperty();
}
RelationshipItemBuilder.getInstance().addOrUpdateItem(property.getItem(), true);
itemRecord.setProperty(null);
ProxyRepositoryFactory.getInstance().unloadResources(property);
} catch (PersistenceException e) {
ExceptionHandler.process(e);
}
statAndLogsSettingsReloaded = false;
implicitSettingsReloaded = false;
monitor.worked(1);
}
}
}
// deploy routines Jar
if (!getRoutineExtModulesMap().isEmpty()) {
Set<String> extRoutines = new HashSet<String>();
for (String id : getRoutineExtModulesMap().keySet()) {
Set<String> set = getRoutineExtModulesMap().get(id);
if (set != null) {
extRoutines.addAll(set);
}
}
if (manager instanceof ProviderManager || manager instanceof ZipFileManager) {
deployJarToDesForArchive(manager, extRoutines);
} else {
deployJarToDes(manager, extRoutines);
}
}
if (PluginChecker.isJobLetPluginLoaded()) {
IJobletProviderService service = (IJobletProviderService) GlobalServiceRegister.getDefault()
.getService(IJobletProviderService.class);
if (service != null) {
service.loadComponentsFromProviders();
}
}
checkDeletedFolders();
monitor.done();
TimeMeasure.step("importItemRecords", "before save");
if (RelationshipItemBuilder.getInstance().isNeedSaveRelations()) {
RelationshipItemBuilder.getInstance().saveRelations();
TimeMeasure.step("importItemRecords", "save relations");
} else {
// only save the project here if no relation need to be saved, since project will already be
// saved
// with relations
try {
factory.saveProject(ProjectManager.getInstance().getCurrentProject());
} catch (PersistenceException e) {
throw new CoreException(new Status(IStatus.ERROR, FrameworkUtil.getBundle(this.getClass())
.getSymbolicName(), "Import errors", e));
}
TimeMeasure.step("importItemRecords", "save project");
}
}
};
IWorkspace workspace = ResourcesPlugin.getWorkspace();
try {
ISchedulingRule schedulingRule = workspace.getRoot();
// the update the project files need to be done in the workspace runnable to avoid all
// notification
// of changes before the end of the modifications.
workspace.run(op, schedulingRule, IWorkspace.AVOID_UPDATE, monitor);
} catch (CoreException e) {
// ?
}
}
};
repositoryWorkUnit.setAvoidUnloadResources(true);
repositoryWorkUnit.setUnloadResourcesAfterRun(true);
ProxyRepositoryFactory.getInstance().executeRepositoryWorkUnit(repositoryWorkUnit);
monitor.done();
// for (ItemRecord itemRecord : itemRecords) {
// itemRecord.clear();
// }
clearAllData();
if (hasJoblets) {
ComponentsFactoryProvider.getInstance().resetSpecificComponents();
}
TimeMeasure.end("importItemRecords");
TimeMeasure.display = false;
TimeMeasure.displaySteps = false;
TimeMeasure.measureActive = false;
return itemRecords;
}
private void checkDeletedFolders() {
ProxyRepositoryFactory repFactory = ProxyRepositoryFactory.getInstance();
if (!foldersCreated.isEmpty()) {
for (ERepositoryObjectType itemType : foldersCreated.keySet()) {
for (String folder : foldersCreated.get(itemType)) {
FolderItem folderItem = repFactory.getFolderItem(ProjectManager.getInstance().getCurrentProject(), itemType,
new Path(folder));
if (folderItem != null) {
folderItem.getState().setDeleted(true);
}
}
}
foldersCreated.clear();
}
}
public void clearAllData() {
deletedItems.clear();
if ((!CommonsPlugin.isSameProjectLogonCommline() && CommonsPlugin.isHeadless()) || !CommonsPlugin.isHeadless()
|| !ProjectManager.getInstance().getCurrentProject().isLocal()) {
cache.clear();
}
treeBuilder.clear();
xmiResourceManager.unloadResources();
xmiResourceManager.resetResourceSet();
projects.clear();
foldersCreated.clear();
}
private void importItemRecord(ResourcesManager manager, ItemRecord itemRecord, boolean overwrite, IPath destinationPath,
final Set<String> overwriteDeletedItems, final Set<String> idDeletedBeforeImport, String contentType,
final IProgressMonitor monitor) {
monitor.subTask(Messages.getString("ImportItemWizardPage.Importing") + itemRecord.getItemName()); //$NON-NLS-1$
resolveItem(manager, itemRecord);
if (!itemRecord.isValid()) {
return;
}
int num = 0;
for (Object obj : itemRecord.getResourceSet().getResources()) {
if (!(obj instanceof PropertiesProjectResourceImpl)) {
if (obj instanceof XMIResourceImpl) {
num++;
if (num > 2) {// The is no explanation for this value and what is this loop for to I increased
// it to
// 2 so that metadata migration for 4.1 works
try {
throw new InvocationTargetException(new PersistenceException("The source file of "
+ itemRecord.getLabel() + " has error,Please check it!"));
} catch (InvocationTargetException e) {
ExceptionHandler.process(e);
}
return;
}
}
}
}
final Item item = itemRecord.getItem();
if (item != null) {
ProxyRepositoryFactory repFactory = ProxyRepositoryFactory.getInstance();
ERepositoryObjectType itemType = ERepositoryObjectType.getItemType(item);
IPath path = new Path(item.getState().getPath());
if (destinationPath != null && itemType.name().equals(contentType)) {
path = destinationPath.append(path);
}
try {
FolderItem folderItem = repFactory
.getFolderItem(ProjectManager.getInstance().getCurrentProject(), itemType, path);
if (folderItem == null) {
// if this folder does not exists (and it's parents), it will check if the folder was originally
// deleted in source project.
// if yes, it will set back the delete status to the folder, to keep the same as the original
// project when import.
// Without this code, deleted folders of items imported will not be in the recycle bin after import.
// delete status is set finally in the function checkDeletedFolders
IPath curPath = path;
EList deletedFoldersFromOriginalProject = itemRecord.getItemProject().getDeletedFolders();
while (folderItem == null && !curPath.isEmpty() && !curPath.isRoot()) {
if (deletedFoldersFromOriginalProject.contains(new Path(itemType.getFolder()).append(
curPath.toPortableString()).toPortableString())) {
if (!foldersCreated.containsKey(itemType)) {
foldersCreated.put(itemType, new HashSet<String>());
}
foldersCreated.get(itemType).add(curPath.toPortableString());
}
if (curPath.segments().length > 0) {
curPath = curPath.removeLastSegments(1);
folderItem = repFactory.getFolderItem(ProjectManager.getInstance().getCurrentProject(), itemType,
curPath);
}
}
}
repFactory.createParentFoldersRecursively(ProjectManager.getInstance().getCurrentProject(), itemType, path, true);
} catch (Exception e) {
logError(e);
path = new Path(""); //$NON-NLS-1$
}
try {
Item tmpItem = item;
// delete existing items before importing, this should be done
// once for a different id
String id = itemRecord.getProperty().getId();
IRepositoryViewObject lastVersion = itemRecord.getExistingItemWithSameId();
if (lastVersion != null
&& overwrite
&& !itemRecord.isLocked()
&& (itemRecord.getState() == State.ID_EXISTED || itemRecord.getState() == State.NAME_EXISTED || itemRecord
.getState() == State.NAME_AND_ID_EXISTED) && !deletedItems.contains(id)) {
if (!overwriteDeletedItems.contains(id)) { // bug 10520.
ERepositoryStatus status = repFactory.getStatus(lastVersion);
if (status == ERepositoryStatus.DELETED) {
repFactory.restoreObject(lastVersion, path); // restore first.
}
overwriteDeletedItems.add(id);
}
/* only delete when name exsit rather than id exist */
if (itemRecord.getState().equals(ItemRecord.State.NAME_EXISTED)
|| itemRecord.getState().equals(ItemRecord.State.NAME_AND_ID_EXISTED)) {
if (!idDeletedBeforeImport.contains(id)) {
// TDI-19535 (check if exists, delete all items with same id)
List<IRepositoryViewObject> allVersionToDelete = repFactory.getAllVersion(ProjectManager
.getInstance().getCurrentProject(), lastVersion.getId(), false);
String importingLabel = itemRecord.getProperty().getLabel();
String existLabel = lastVersion.getProperty().getLabel();
for (IRepositoryViewObject currentVersion : allVersionToDelete) {
repFactory.forceDeleteObjectPhysical(lastVersion, currentVersion.getVersion(),
isNeedDeleteOnRemote(importingLabel, existLabel));
}
idDeletedBeforeImport.add(id);
}
}
lastVersion = null;
// List<IRepositoryObject> list = cache.findObjectsByItem(itemRecord);
// if (!list.isEmpty()) {
// // this code will delete all version of item with same
// // id
// repFactory.forceDeleteObjectPhysical(list.get(0));
// deletedItems.add(id);
// }
}
User author = itemRecord.getProperty().getAuthor();
if (author != null) {
if (!repFactory.setAuthorByLogin(tmpItem, author.getLogin())) {
tmpItem.getProperty().setAuthor(null); // author will be
// the logged
// user in
// create method
}
}
if (item instanceof JobletProcessItem) {
hasJoblets = true;
}
if (tmpItem instanceof ProcessItem && !statAndLogsSettingsReloaded && !implicitSettingsReloaded) {
ProcessItem processItem = (ProcessItem) tmpItem;
ParametersType paType = processItem.getProcess().getParameters();
boolean statsPSettingRemoved = false;
// for commanline import project setting
if (itemRecord.isRemoveProjectStatslog()) {
if (paType != null) {
String paramName = "STATANDLOG_USE_PROJECT_SETTINGS";
EList listParamType = paType.getElementParameter();
for (int j = 0; j < listParamType.size(); j++) {
ElementParameterType pType = (ElementParameterType) listParamType.get(j);
if (pType != null && paramName.equals(pType.getName())) {
pType.setValue(Boolean.FALSE.toString());
statsPSettingRemoved = true;
break;
}
}
}
}
// 14446: item apply project setting param if use project setting
String statslogUsePSetting = null;
String implicitUsePSetting = null;
if (paType != null) {
EList listParamType = paType.getElementParameter();
for (int j = 0; j < listParamType.size(); j++) {
ElementParameterType pType = (ElementParameterType) listParamType.get(j);
if (pType != null) {
if (!statsPSettingRemoved && "STATANDLOG_USE_PROJECT_SETTINGS".equals(pType.getName())) {
statslogUsePSetting = pType.getValue();
}
if ("IMPLICITCONTEXT_USE_PROJECT_SETTINGS".equals(pType.getName())) {
implicitUsePSetting = pType.getValue();
}
if (statsPSettingRemoved && implicitUsePSetting != null || !statsPSettingRemoved
&& implicitUsePSetting != null && statslogUsePSetting != null) {
break;
}
}
}
}
if (statslogUsePSetting != null && Boolean.parseBoolean(statslogUsePSetting) && !statAndLogsSettingsReloaded) {
CorePlugin.getDefault().getDesignerCoreService()
.reloadParamFromProjectSettings(paType, "STATANDLOG_USE_PROJECT_SETTINGS");
statAndLogsSettingsReloaded = true;
}
if (implicitUsePSetting != null && Boolean.parseBoolean(implicitUsePSetting) && !implicitSettingsReloaded) {
CorePlugin.getDefault().getDesignerCoreService()
.reloadParamFromProjectSettings(paType, "IMPLICITCONTEXT_USE_PROJECT_SETTINGS");
implicitSettingsReloaded = true;
}
}
if (lastVersion == null || itemRecord.getState().equals(ItemRecord.State.ID_EXISTED)) {
// import has not been developed to cope with migration in mind
// so some model may not be able to load like the ConnectionItems
// in that case items needs to be copied before migration
// here we check that the loading of the item failed before calling the create method
boolean isConnectionEmptyBeforeMigration = tmpItem instanceof ConnectionItem
&& ((ConnectionItem) tmpItem).getConnection().eResource() == null
&& !itemRecord.getMigrationTasksToApply().isEmpty();
repFactory.create(tmpItem, path, true);
if (isConnectionEmptyBeforeMigration) {// copy the file before migration, this is bad because it
// should not refer to Filesytem
// but this is a quick hack and anyway the migration task only works on files
// IPath itemPath = itemRecord.getPath().removeFileExtension().addFileExtension(
// FileConstants.ITEM_EXTENSION);
InputStream is = manager.getStream(itemRecord.getPath().removeFileExtension()
.addFileExtension(FileConstants.ITEM_EXTENSION));
try {
URI propertyResourceURI = EcoreUtil.getURI(((ConnectionItem) tmpItem).getProperty());
URI relativePlateformDestUri = propertyResourceURI.trimFileExtension().appendFileExtension(
FileConstants.ITEM_EXTENSION);
URL fileURL = FileLocator.toFileURL(new java.net.URL(
"platform:/resource" + relativePlateformDestUri.toPlatformString(true))); //$NON-NLS-1$
OutputStream os = new FileOutputStream(fileURL.getFile());
try {
FileCopyUtils.copyStreams(is, os);
} finally {
os.close();
}
} finally {
is.close();
}
repFactory.unloadResources(tmpItem.getProperty());
} else {
// connections from migrations (from 4.0.x or previous version) doesn't support reference or
// screenshots
// so no need to call this code.
// It's needed to avoid to call the save method mainly just before or after the copy of the old
// connection since it will
copyScreenshotFile(manager, itemRecord);
boolean haveRef = copyReferenceFiles(manager, tmpItem, itemRecord.getPath());
if (haveRef) {
repFactory.save(tmpItem, true);
}
}
itemRecord.setImportPath(path.toPortableString());
itemRecord.setRepositoryType(itemType);
itemRecord.setItemId(itemRecord.getProperty().getId());
itemRecord.setItemVersion(itemRecord.getProperty().getVersion());
itemRecord.setImported(true);
cache.addToCache(tmpItem);
} else if (VersionUtils.compareTo(lastVersion.getProperty().getVersion(), tmpItem.getProperty().getVersion()) < 0) {
repFactory.forceCreate(tmpItem, path);
itemRecord.setImportPath(path.toPortableString());
itemRecord.setItemId(itemRecord.getProperty().getId());
itemRecord.setRepositoryType(itemType);
itemRecord.setItemVersion(itemRecord.getProperty().getVersion());
itemRecord.setImported(true);
cache.addToCache(tmpItem);
} else {
PersistenceException e = new PersistenceException(Messages.getString(
"ImportItemUtil.persistenceException", tmpItem.getProperty())); //$NON-NLS-1$
itemRecord.addError(e.getMessage());
logError(e);
}
if (tmpItem != null) {
// RelationshipItemBuilder.getInstance().addOrUpdateItem(tmpItem, true);
if (tmpItem.getState() != null) {
if (itemType != null) {
final Set<String> folders = restoreFolder.getFolders(itemType);
if (folders != null) {
for (String folderPath : folders) {
if (folderPath != null && folderPath.equals(path.toString())) {
FolderItem folderItem = repFactory.getFolderItem(ProjectManager.getInstance()
.getCurrentProject(), itemType, path);
if (folderItem != null) {
folderItem.getState().setDeleted(false);
while (!(folderItem.getParent() instanceof Project)) {
folderItem = (FolderItem) folderItem.getParent();
if (folderItem.getType() == FolderType.SYSTEM_FOLDER_LITERAL) {
break;
}
folderItem.getState().setDeleted(false);
}
}
break;
}
}
}
}
}
}
} catch (Exception e) {
itemRecord.addError(e.getMessage());
logError(e);
}
}
String label = itemRecord.getLabel();
EList<Resource> resources = itemRecord.getResourceSet().getResources();
Iterator<Resource> iterator = resources.iterator();
while (iterator.hasNext()) {
Resource res = iterator.next();
// Due to the system of lazy loading for db repository of ByteArray,
// it can't be unloaded just after create the item.
if (res != null && !(res instanceof ByteArrayResource)) {
res.unload();
iterator.remove();
}
}
TimeMeasure.step("importItemRecords", "Import item: " + label);
applyMigrationTasks(itemRecord, monitor);
TimeMeasure.step("importItemRecords", "applyMigrationTasks: " + label);
}
private boolean isNeedDeleteOnRemote(String importingLabel, String existLabel) {
if (importingLabel != null && importingLabel.equalsIgnoreCase(importingLabel) && !importingLabel.equals(existLabel)) {
return true;
}
return false;
}
// added by dlin 2011-7-25 don't like .item and .property ,just copy .screenshot file will be ok
private void copyScreenshotFile(ResourcesManager manager, ItemRecord itemRecord) throws IOException {
int id = itemRecord.getItem().eClass().getClassifierID();
if (id != PropertiesPackage.PROCESS_ITEM && id != PropertiesPackage.JOBLET_PROCESS_ITEM) {
return;
}
OutputStream os = null;
InputStream is = null;
try {
URI propertyResourceURI = EcoreUtil.getURI(itemRecord.getItem().getProperty());
URI relativePlateformDestUri = propertyResourceURI.trimFileExtension().appendFileExtension(
FileConstants.SCREENSHOT_EXTENSION);
URL fileURL = FileLocator.toFileURL(new java.net.URL(
"platform:/resource" + relativePlateformDestUri.toPlatformString(true))); //$NON-NLS-1$
// for migration task ,there is not .screeenshot file in preceding version - begin
boolean hasScreenshotFile = false;
Iterator it = manager.getPaths().iterator();
IPath screenshotNeeded = itemRecord.getPath().removeFileExtension()
.addFileExtension(FileConstants.SCREENSHOT_EXTENSION);
while (it.hasNext()) {
IPath path = (IPath) it.next();
if (path.equals(screenshotNeeded)) {
hasScreenshotFile = true;
break;
}
}
if (!hasScreenshotFile) {
return;
}
// for migration task ,there is not .screeenshot file in preceding version - begin
os = new FileOutputStream(fileURL.getFile());
manager.getPaths().iterator().next();
is = manager.getStream(screenshotNeeded);
FileCopyUtils.copyStreams(is, os);
} finally {
if (os != null) {
os.close();
}
if (is != null) {
is.close();
}
}
}
private boolean copyReferenceFiles(ResourcesManager manager, Item tmpItem, IPath pathToRead) throws IOException {
OutputStream os = null;
InputStream is = null;
boolean haveRef = false;
List<ReferenceFileItem> refItems = tmpItem.getReferenceResources();
URI propertyResourceURI = EcoreUtil.getURI(tmpItem.getProperty());
for (ReferenceFileItem refItem : refItems) {
haveRef = true;
URI relativePlateformDestUri = propertyResourceURI.trimFileExtension().appendFileExtension(refItem.getExtension());
try {
URL fileURL = FileLocator.toFileURL(new java.net.URL(
"platform:/resource" + relativePlateformDestUri.toPlatformString(true))); //$NON-NLS-1$
os = new FileOutputStream(fileURL.getFile());
is = manager.getStream(pathToRead.removeFileExtension().addFileExtension(refItem.getExtension()));
FileCopyUtils.copyStreams(is, os);
} finally {
if (os != null) {
os.close();
}
if (is != null) {
is.close();
}
}
}
return haveRef;
}
/**
* DOC ycbai Comment method "applyMigrationTasks".
*
* @param itemRecord
* @param monitor
*/
private void applyMigrationTasks(ItemRecord itemRecord, IProgressMonitor monitor) {
Context ctx = CorePlugin.getContext();
RepositoryContext repositoryContext = (RepositoryContext) ctx.getProperty(Context.REPOSITORY_CONTEXT_KEY);
org.talend.core.model.general.Project project = repositoryContext.getProject();
ERepositoryObjectType repositoryType = itemRecord.getRepositoryType();
Item item = null;
try {
List<IRepositoryViewObject> allVersion = ProxyRepositoryFactory.getInstance().getAllVersion(
ProjectManager.getInstance().getCurrentProject(), itemRecord.getItemId(), itemRecord.getImportPath(),
repositoryType);
for (IRepositoryViewObject repositoryObject : allVersion) {
if (repositoryObject.getProperty().getVersion().equals(itemRecord.getItemVersion())) {
item = repositoryObject.getProperty().getItem();
}
}
if (item == null) {
return;
}
CorePlugin.getDefault().getMigrationToolService()
.executeMigrationTasksForImport(project, item, itemRecord.getMigrationTasksToApply(), monitor);
if (item instanceof RoutineItem) {
RoutineItem rItem = (RoutineItem) item;
Set<String> set = routineExtModulesMap.get(rItem.getProperty().getId());
if (set == null) {
set = new HashSet<String>();
routineExtModulesMap.put(rItem.getProperty().getId(), set);
}
for (IMPORTType type : (List<IMPORTType>) rItem.getImports()) {
set.add(type.getMODULE());
}
}
itemRecord.setExistingItemWithSameId(null);
itemRecord.clear();
itemRecord.setProperty(item.getProperty());
} catch (Exception e) {
logError(e);
}
}
// private void applyMigrationTasks(ItemRecord itemRecord, IProgressMonitor monitor) {
// Context ctx = CorePlugin.getContext();
// RepositoryContext repositoryContext = (RepositoryContext) ctx.getProperty(Context.REPOSITORY_CONTEXT_KEY);
// ITalendSynchronizer routineSynchronizer = getRoutineSynchronizer();
//
// ERepositoryObjectType repositoryType = itemRecord.getRepositoryType();
//
// Item item = null;
// try {
// List<IRepositoryViewObject> allVersion = ProxyRepositoryFactory.getInstance().getAllVersion(
// ProjectManager.getInstance().getCurrentProject(), itemRecord.getItemId(), itemRecord.getImportPath(),
// repositoryType);
// for (IRepositoryViewObject repositoryObject : allVersion) {
// if (repositoryObject.getProperty().getVersion().equals(itemRecord.getItemVersion())) {
// item = repositoryObject.getProperty().getItem();
// }
// }
// } catch (Exception e) {
// logError(e);
// }
//
// if (item == null) {
// return;
// }
//
// List<IProjectMigrationTask> toExecute = new ArrayList<IProjectMigrationTask>();
// for (String taskId : itemRecord.getMigrationTasksToApply()) {
// IProjectMigrationTask task = GetTasksHelper.getInstance().getProjectTask(taskId);
// if (task == null) {
// log.warn(Messages.getString("ImportItemUtil.taskLogWarn", taskId)); //$NON-NLS-1$
// } else if (!task.isDeprecated()) {
// toExecute.add(task);
// }
//
// }
// Collections.sort(toExecute, new Comparator<IProjectMigrationTask>() {
//
// public int compare(IProjectMigrationTask o1, IProjectMigrationTask o2) {
// return o1.getOrder().compareTo(o2.getOrder());
// }
// });
//
// IProxyRepositoryFactory factory = ProxyRepositoryFactory.getInstance();
//
// for (IProjectMigrationTask task : toExecute) {
// monitor.subTask(Messages.getString("ImportItemUtil.taskMonitor", task.getName(), itemRecord.getItemName())); //$NON-NLS-1$
// try {
// // in case the resource has been modified (see MergeTosMetadataMigrationTask for example)
// if ((item.getProperty().eResource() == null || item.eResource() == null)) {
// Property updatedProperty = factory.reload(item.getProperty());
// item = updatedProperty.getItem();
// }
//
// if (item != null) {
// ExecutionResult executionResult = task.execute(repositoryContext.getProject(), item);
// if (executionResult == ExecutionResult.FAILURE) {
// log.warn(Messages.getString("ImportItemUtil.itemLogWarn", itemRecord.getItemName(), task.getName())); //$NON-NLS-1$
// // TODO smallet add a warning/error to the job using
// // model
// }
// }
// } catch (Exception e) {
// log.warn(Messages.getString("ImportItemUtil.itemLogException", itemRecord.getItemName(), task.getName()), e); //$NON-NLS-1$
// try {
// factory.deleteObjectPhysical(new RepositoryObject(item.getProperty()));
// break;// stop migrating the object it has be deleted
// } catch (PersistenceException e1) {
// log.error("Could not delete physical item(" + item.getProperty().getLabel() + "), Project may be corrupted.",
// e);
// }
// }
// }
//
// try {
// if (item != null && item instanceof RoutineItem) {
// RoutineUtils.changeRoutinesPackage(item);
// RoutineItem routineItem = (RoutineItem) item;
// routineSynchronizer.forceSyncRoutine(routineItem);
// routineSynchronizer.syncRoutine(routineItem, true);
// routineSynchronizer.getFile(routineItem);
// }
// // if (item.getProperty().eResource().isModified()) {
// // ProxyRepositoryFactory.getInstance().save(item, true);
// // item.getProperty().eResource().setModified(false);
// // }
// if (item.getProperty().eResource() != null) {
// ProxyRepositoryFactory.getInstance().unloadResources(item.getProperty());
// if (item.getParent() != null && item.getParent() instanceof FolderItem) {
// ((FolderItem) item.getParent()).getChildren().remove(item);
// item.setParent(null);
// }
// }
//
// itemRecord.setExistingItemWithSameId(null);
// itemRecord.clear();
//
// } catch (Exception e) {
// logError(e);
// }
// }
private ITalendSynchronizer getRoutineSynchronizer() {
ICodeGeneratorService service = (ICodeGeneratorService) GlobalServiceRegister.getDefault().getService(
ICodeGeneratorService.class);
ECodeLanguage lang = ((RepositoryContext) CorePlugin.getContext().getProperty(Context.REPOSITORY_CONTEXT_KEY))
.getProject().getLanguage();
ITalendSynchronizer routineSynchronizer = null;
switch (lang) {
case JAVA:
routineSynchronizer = service.createJavaRoutineSynchronizer();
break;
case PERL:
routineSynchronizer = service.createPerlRoutineSynchronizer();
break;
default:
throw new UnsupportedOperationException(Messages.getString("ImportItemUtil.unknowException", lang)); //$NON-NLS-1$
}
return routineSynchronizer;
}
private void logError(Exception e) {
hasErrors = true;
// IStatus status;
// String messageStatus = e.getMessage() != null ? e.getMessage() : ""; //$NON-NLS-1$
ExceptionHandler.process(e);
// status = new Status(IStatus.ERROR, RepositoryLocalProviderPlugin.PLUGIN_ID, IStatus.OK, messageStatus, e);
// RepositoryLocalProviderPlugin.getDefault().getLog().log(status);
}
public List<ProjectNode> getTreeViewInput() {
return treeBuilder.getInput();
}
/**
* need to returns sorted items by version to correctly import them later.
*/
public List<ItemRecord> populateItems(ResourcesManager collector, boolean overwrite, IProgressMonitor progressMonitor) {
TimeMeasure.display = CommonsPlugin.isDebugMode();
TimeMeasure.displaySteps = CommonsPlugin.isDebugMode();
TimeMeasure.measureActive = CommonsPlugin.isDebugMode();
TimeMeasure.begin("populateItems");
treeBuilder.clear();
if ((!CommonsPlugin.isSameProjectLogonCommline() && CommonsPlugin.isHeadless()) || !CommonsPlugin.isHeadless()
|| !ProjectManager.getInstance().getCurrentProject().isLocal()) {
cache.clear();
}
projects.clear();
routineExtModulesMap.clear();
List<ItemRecord> items = new ArrayList<ItemRecord>();
int nbItems = 0;
for (IPath path : collector.getPaths()) {
if (isPropertyPath(path)) {
nbItems++;
}
}
progressMonitor.beginTask("Populate items to import", nbItems); //$NON-NLS-1$
for (IPath path : collector.getPaths()) {
if (!progressMonitor.isCanceled()) {
if (isPropertyPath(path)) {
// IPath itemPath = getItemPath(path);
// if (collector.getPaths().contains(itemPath)) { //commet by tdq import
ItemRecord itemRecord = computeItemRecord(collector, path);
if (itemRecord.getProperty() != null) {
items.add(itemRecord);
if (checkItem(itemRecord, overwrite)) {
InternalEObject author = (InternalEObject) itemRecord.getProperty().getAuthor();
URI uri = null;
if (author != null) {
uri = author.eProxyURI();
}
IPath projectFilePath = getValidProjectFilePath(collector, path, uri);
if (projectFilePath != null) {
Project project = computeProject(collector, itemRecord, projectFilePath);
if (checkProject(project, itemRecord)) {
if (!checkHadoopSubitem(collector, itemRecord)) {
treeBuilder.addItem(project, itemRecord);
}
// set item project into record.
itemRecord.setItemProject(project);
// we can try to import item
// and we will try to resolve user
if (uri != null) {
User user = (User) project.eResource().getEObject(uri.fragment());
itemRecord.getProperty().setAuthor(user);
}
}
} else {
ERepositoryObjectType itemType = ERepositoryObjectType.getItemType(itemRecord.getItem());
if (itemType.isDIItemType()) {
itemRecord.addError(Messages.getString("RepositoryUtil.ProjectNotFound")); //$NON-NLS-1$
}
}
}
}
// }
progressMonitor.worked(1);
}
} else {
break;
}
}
Collections.sort(items, new Comparator<ItemRecord>() {
@Override
public int compare(ItemRecord o1, ItemRecord o2) {
return VersionUtils.compareTo(o1.getProperty().getVersion(), o2.getProperty().getVersion());
}
});
if (!CommonsPlugin.isHeadless() || !ProjectManager.getInstance().getCurrentProject().isLocal()) {
for (List<IRepositoryViewObject> list : cache.getItemsFromRepository().values()) {
list.clear();
}
cache.getItemsFromRepository().clear();
}
TimeMeasure.end("populateItems");
TimeMeasure.display = false;
TimeMeasure.displaySteps = false;
TimeMeasure.measureActive = false;
return items;
}
private boolean checkProject(Project project, ItemRecord itemRecord) {
boolean checkProject = false;
// update the old project which hasn't adapted to the new migration task system.
if (!updatedProjects.contains(project)) {
CorePlugin.getDefault().getMigrationToolService().updateMigrationSystem(project, false);
updatedProjects.add(project);
}
Project currentProject = ProjectManager.getInstance().getCurrentProject().getEmfProject();
if (project != null) {
if (checkMigrationTasks(currentProject, project, itemRecord)) {
checkProject = true;
}
} else {
itemRecord.addError(Messages.getString("RepositoryUtil.ProjectNotFound")); //$NON-NLS-1$
}
return checkProject;
}
/**
* DOC ycbai Comment method "checkHadoopSubitem".
*
* Check whether or not the itemRecord is a subitem record of a hadoop cluster item record.
*
* @param itemRecord
* @return
*/
public static boolean checkHadoopSubitem(ResourcesManager manager, ItemRecord itemRecord) {
Item item = itemRecord.getItem();
if (item == null) {
return false;
}
IHadoopClusterService hadoopClusterService = HadoopRepositoryUtil.getHadoopClusterService();
if (hadoopClusterService != null && hadoopClusterService.isHadoopSubItem(item)) {
new ImportItemUtil().resolveItem(manager, itemRecord);
return hadoopClusterService.isValidHadoopSubItem(item);
}
return false;
}
/**
* DOC ycbai Comment method "collectHadoopSubrecords".
*
* Collect all subitem records belong to a hadoop cluster.
*
* @param totalItemRecords
* @param itemRecord
* @return
*/
public static Set<ItemRecord> collectHadoopSubrecords(ResourcesManager manager, List<ItemRecord> totalItemRecords,
ItemRecord itemRecord) {
Set<ItemRecord> subnodes = new HashSet<ItemRecord>();
Item item = itemRecord.getItem();
if (item == null) {
return subnodes;
}
IHadoopClusterService hadoopClusterService = HadoopRepositoryUtil.getHadoopClusterService();
if (hadoopClusterService != null && hadoopClusterService.isHadoopClusterItem(item)) {
new ImportItemUtil().resolveItem(manager, itemRecord);
List<String> subitemIds = hadoopClusterService.getSubitemIdsOfHadoopCluster(item);
if (subitemIds.size() == 0) {
return subnodes;
}
for (ItemRecord ir : totalItemRecords) {
if (ir.getProperty() != null && subitemIds.contains(ir.getProperty().getId())) {
subnodes.add(ir);
}
}
}
return subnodes;
}
private List<String> getOptionnalMigrationTasks() {
List<String> toReturn = new ArrayList<String>();
toReturn.add("org.talend.repository.documentation.migrationtask.generatejobdocmigrationtask"); //$NON-NLS-1$
// old task, added for an old version of TOS, not used anymore.
toReturn.add("org.talend.repository.migration.ReplaceOldContextScriptCodeMigrationTask"); //$NON-NLS-1$
toReturn.add("org.talend.designer.core.model.process.migration.SynchronizeSchemaOnlyForPerlDemo"); //$NON-NLS-1$
toReturn.add("org.talend.repository.model.migration.RenametFSFilterRow"); //$NON-NLS-1$
return toReturn;
}
/**
* DOC ycbai Comment method "checkMigrationTasks".
*
* @param currentProject
* @param importedProject
* @param itemRecord
* @return
*/
private boolean checkMigrationTasks(Project currentProject, Project importedProject, ItemRecord itemRecord) {
String importedProjectLabel = importedProject.getTechnicalLabel();
if (migrationTasksStatusPerProject.containsKey(importedProjectLabel)) {
if (migrationTasksStatusPerProject.get(importedProjectLabel)) {
itemRecord.setMigrationTasksToApply(migrationTasksToApplyPerProject.get(importedProjectLabel));
return true;
} else {
String message = Messages.getString("ImportItemUtil.cannotImportMessage", importedProjectLabel); //$NON-NLS-1$
itemRecord.addError(message);
log.info("'" + itemRecord.getItemName() + "' " + message);
return false;
}
}
boolean canApplyMigration = false;
List<MigrationTask> migrationTasks = new ArrayList<MigrationTask>();
if (CorePlugin.getDefault().getMigrationToolService().checkMigrationTasks(importedProject)) {
List<MigrationTask> currentProjectMigrationTasks = new ArrayList<MigrationTask>(currentProject.getMigrationTask());
List<MigrationTask> importedProjectMigrationTasks = new ArrayList<MigrationTask>(importedProject.getMigrationTask());
MigrationUtil.removeMigrationTaskByIds(importedProjectMigrationTasks, getOptionnalMigrationTasks());
MigrationUtil.removeMigrationTaskById(importedProjectMigrationTasks,
"org.talend.repository.model.migration.AutoUpdateRelationsMigrationTask"); //$NON-NLS-1$
MigrationUtil.removeMigrationTaskByMigrationTasks(currentProjectMigrationTasks, importedProjectMigrationTasks);
itemRecord.setMigrationTasksToApply(currentProjectMigrationTasks);
migrationTasks = currentProjectMigrationTasks;
canApplyMigration = true;
migrationTasksStatusPerProject.put(importedProjectLabel, true);
} else {
String message = Messages.getString("ImportItemUtil.cannotImportMessage", importedProjectLabel); //$NON-NLS-1$
itemRecord.addError(message);
log.info("'" + itemRecord.getItemName() + "' " + message);
migrationTasksStatusPerProject.put(importedProjectLabel, false);
}
migrationTasksToApplyPerProject.put(importedProjectLabel, migrationTasks);
return canApplyMigration;
}
@SuppressWarnings("unchecked")
// private boolean checkMigrationTasks(Project project, ItemRecord itemRecord, Project currentProject) {
// List<String> itemMigrationTasks = new ArrayList<String>(project.getMigrationTasks());
// List<String> projectMigrationTasks = new ArrayList<String>(currentProject.getMigrationTasks());
//
// itemMigrationTasks.removeAll(getOptionnalMigrationTasks());
//
// // check version + revision
// // String oldProjectVersion = project.getProductVersion();
// // String currentProjectVersion = currentProject.getProductVersion();
// // boolean currentVersionIsValid = isVersionValid(currentProjectVersion);
// // boolean oldVersionIsValid = isVersionValid(oldProjectVersion);
// // if (currentVersionIsValid && oldVersionIsValid) {
// // boolean canImport = canContinueImport(oldProjectVersion, currentProjectVersion);
// // if (!canImport) {
// // String message = "The version of " + project.getLabel() + " should be lower than the current project.";
// // itemRecord.addError(message);
// // log.info(message);
// //
// // return false;
// // }
// // }
//
// // Talend Platform Big Data edition-5.0.2.r78327 / Talend Open Studio for Data Integration-5.1.0NB.r80928
//
// // the 2 are valid versions SO
//
// // 1. Check if all the migration tasks of the items are done in the
// // project:
// // if not, the item use a more recent version of TOS: impossible to
// // import (forward compatibility)
// // if no correct version and revision found in the productVersion, do same as before
// if (!projectMigrationTasks.containsAll(itemMigrationTasks)) {
// itemMigrationTasks.removeAll(projectMigrationTasks);
//
// String message = Messages.getString("ImportItemUtil.message", itemRecord.getItemName(), itemMigrationTasks); //$NON-NLS-1$
// itemRecord.addError(message);
// log.info(message);
//
// return false;
// }
// // force to redo this migration task, even if already did before.
// itemMigrationTasks.remove("org.talend.repository.model.migration.AutoUpdateRelationsMigrationTask");
//
// // 2. Get all the migration tasks to apply on this item on import
// // (backwards compatibility)
// // (those that are in the project but not in the item)
// projectMigrationTasks.removeAll(itemMigrationTasks);
// itemRecord.setMigrationTasksToApply(projectMigrationTasks);
//
// return true;
// }
private IPath getValidProjectFilePath(ResourcesManager collector, IPath path, URI uri) {
IPath projectFilePath = path.removeLastSegments(1);
while (projectFilePath.lastSegment() != null
&& !collector.getPaths().contains(projectFilePath.append(FileConstants.LOCAL_PROJECT_FILENAME))) {
projectFilePath = projectFilePath.removeLastSegments(1);
}
if (collector.getPaths().contains(projectFilePath.append(FileConstants.LOCAL_PROJECT_FILENAME))) {
return projectFilePath.append(FileConstants.LOCAL_PROJECT_FILENAME);
}
return null;
}
private ItemRecord computeItemRecord(ResourcesManager collector, IPath path) {
ItemRecord itemRecord = new ItemRecord(path);
computeProperty(collector, itemRecord);
return itemRecord;
}
private void computeProperty(ResourcesManager manager, ItemRecord itemRecord) {
InputStream stream = null;
try {
stream = manager.getStream(itemRecord.getPath());
final Resource resource = createResource(itemRecord, itemRecord.getPath(), false);
URIConverter uriConverter = resource.getResourceSet().getURIConverter();
resource.getResourceSet().setURIConverter(new ExtensibleURIConverterImpl() {
/*
* (non-Javadoc)
*
* @see org.eclipse.emf.ecore.resource.impl.ExtensibleURIConverterImpl#createInputStream(org.eclipse.
* emf.common.util.URI, java.util.Map)
*/
@Override
public InputStream createInputStream(URI uri, Map<?, ?> options) throws IOException {
InputStream inputStream = null;
EPackage ePackage = resource.getResourceSet().getPackageRegistry().getEPackage(uri.toString());
if (ePackage != null || !"http".equals(uri.scheme())) {
inputStream = super.createInputStream(uri, options);
} else {
inputStream = null;
}
return inputStream;
}
});
EmfHelper.loadResource(resource, stream, null);
resource.getResourceSet().setURIConverter(uriConverter);
itemRecord.setProperty((Property) EcoreUtil.getObjectByType(resource.getContents(),
PropertiesPackage.eINSTANCE.getProperty()));
} catch (Exception e) {
// ignore, must be one invalid or unknown item
} finally {
if (stream != null) {
try {
stream.close();
} catch (IOException e) {
// ignore
}
}
}
}
public void resolveItem(ResourcesManager manager, ItemRecord itemRecord) {
if (itemRecord.isResolved()) {
return;
}
InputStream stream = null;
try {
final Item item = itemRecord.getItem();
boolean byteArray = (item instanceof FileItem);
IPath itemPath = getItemPath(itemRecord.getPath(), item);
Set<IPath> paths = manager.getPaths();
// check the item file
if (!paths.contains(itemPath)) {
itemRecord.addError(itemRecord.getItemName() + " " + Messages.getString("ImportItemUtil.MissingItemFile") + " - "
+ itemPath);
log.error(itemRecord.getItemName()
+ " " + Messages.getString("ImportItemUtil.MissingItemFile") + " - " + itemPath); //$NON-NLS-1$
return;
}
stream = manager.getStream(itemPath);
Resource resource = createResource(itemRecord, itemPath, byteArray);
if (byteArray) {
// TDI-24612
// This part fixes a problem of import of routines from .tar.gz.
// Seems either the Tar stream or emf got problems to read this.
ByteArrayOutputStream baos = new ByteArrayOutputStream();
byte[] buf = new byte[1024];
int i = 0;
while ((i = stream.read(buf)) != -1) {
baos.write(buf, 0, i);
}
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
resource.load(bais, null);
} else {
resource.load(stream, null);
}
for (ReferenceFileItem rfItem : (List<ReferenceFileItem>) item.getReferenceResources()) {
itemPath = getReferenceItemPath(itemRecord.getPath(), rfItem.getExtension());
stream = manager.getStream(itemPath);
Resource rfResource = createResource(itemRecord, itemPath, true);
rfResource.load(stream, null);
}
Iterator<EObject> itRef = item.eCrossReferences().iterator();
IPath parentPath = itemRecord.getPath().removeLastSegments(1);
while (itRef.hasNext()) {
EObject object = itRef.next();
String linkedFile = EcoreUtil.getURI(object).toFileString();
IPath linkedPath = parentPath.append(linkedFile);
if (!paths.contains(linkedPath)) {
if (linkedFile != null && !linkedFile.equals(itemPath.lastSegment())
&& linkedFile.endsWith(itemPath.getFileExtension())) {
if (object.eIsProxy()) {
// if original href of the item point to some missing item file
// and if we can get the original item file from the name, recover it, but add a warning
((EObjectImpl) object).eSetProxyURI(URI.createFileURI(itemPath.lastSegment()));
log.warn(itemRecord.getItemName()
+ " " + Messages.getString("ImportItemUtil.NotHrefCurrentItemFile") + " - " + itemRecord.getPath()); //$NON-NLS-1$
}
}
}
EcoreUtil.resolve(object, resource);
}
} catch (IOException e) {
// ignore
} finally {
if (stream != null) {
try {
stream.close();
} catch (IOException e) {
// ignore
}
}
}
itemRecord.setResolved(true);
}
/**
*
* cLi Comment method "resetItemReference".
*
* resolve the encode some special character(bug 6252), maybe, It's not better to resolve this by manually.
*
* such as, "[" is "%5B", "]" is "%5D", etc.
*/
@SuppressWarnings("unchecked")
private void resetItemReference(ItemRecord itemRecord, Resource resource) {
Item item = itemRecord.getItem();
EList<EObject> contents = resource.getContents();
/*
* ignore job. no need, because it can't be allowed input special char for name.
*/
if (item instanceof ProcessItem) {
// ((ProcessItem) item).setProcess((ProcessType) EcoreUtil.getObjectByType(contents,
// TalendFilePackage.eINSTANCE
// .getProcessType()));
} else
/*
* ignore joblet. no need, because it can't be allowed input special char for name.
*/
if (item instanceof JobletProcessItem) {
// JobletProcessItem jobletProcessItem = (JobletProcessItem) item;
//
// jobletProcessItem.setJobletProcess((JobletProcess) EcoreUtil.getObjectByType(contents,
// JobletPackage.eINSTANCE
// .getJobletProcess()));
// jobletProcessItem
// .setIcon((ByteArray) EcoreUtil.getObjectByType(contents, PropertiesPackage.eINSTANCE.getByteArray()));
} else
// connectionItem
if (item instanceof ConnectionItem) {
((ConnectionItem) item).setConnection((Connection) EcoreUtil.getObjectByType(contents,
ConnectionPackage.eINSTANCE.getConnection()));
} else
// context
if (item instanceof ContextItem) {
EList contexts = ((ContextItem) item).getContext();
contexts.clear();
contexts.addAll(EcoreUtil.getObjectsByType(contents, TalendFilePackage.eINSTANCE.getContextType()));
} else
// file
if (item instanceof FileItem) {
/*
* ignore routine, no need, because it can't be allowed input special char for name.
*/
if (item instanceof RoutineItem) {
return;
}
FileItem fileItem = (FileItem) item;
fileItem.setContent((ByteArray) EcoreUtil.getObjectByType(contents, PropertiesPackage.eINSTANCE.getByteArray()));
} else
// snippet
if (item instanceof SnippetItem) {
EList variables = ((SnippetItem) item).getVariables();
variables.clear();
variables.addAll(EcoreUtil.getObjectsByType(contents, PropertiesPackage.eINSTANCE.getSnippetVariable()));
} else
// link doc
if (item instanceof LinkDocumentationItem) {
((LinkDocumentationItem) item).setLink((LinkType) EcoreUtil.getObjectByType(contents,
PropertiesPackage.eINSTANCE.getLinkType()));
} else
// business
if (item instanceof BusinessProcessItem) {
BusinessProcessItem businessProcessItem = (BusinessProcessItem) item;
businessProcessItem.setSemantic((BusinessProcess) EcoreUtil.getObjectByType(contents,
BusinessPackage.eINSTANCE.getBusinessProcess()));
businessProcessItem.setNotationHolder((NotationHolder) EcoreUtil.getObjectByType(contents,
PropertiesPackage.eINSTANCE.getNotationHolder()));
}
}
private Project computeProject(ResourcesManager manager, ItemRecord itemRecord, IPath path) {
InputStream stream = null;
try {
if (!projects.containsKey(path)) {
stream = manager.getStream(path);
Resource resource = createResource(itemRecord, path, false);
resource.load(stream, null);
EmfHelper.loadResource(resource, stream, null);
Project project = (Project) EcoreUtil.getObjectByType(resource.getContents(), PropertiesPackage.eINSTANCE.getProject());
IPath projectRootPath = path.removeLastSegments(1);
ProjectDataJsonProvider.loadProjectData(project, projectRootPath, manager);
projects.put(path, project);
}
return projects.get(path);
} catch (IOException | PersistenceException e) {
// ignore
} finally {
if (stream != null) {
try {
stream.close();
} catch (IOException e) {
// ignore
}
}
}
return null;
}
private Resource createResource(ItemRecord itemRecord, IPath path, boolean byteArrayResource) throws FileNotFoundException {
Resource resource;
ResourceSet resourceSet = itemRecord.getResourceSet();
if (byteArrayResource) {
resource = new ByteArrayResource(getURI(path));
resourceSet.getResources().add(resource);
} else {
if (FileConstants.ITEM_EXTENSION.equals(path.getFileExtension())) {
String projectName = "";
if (itemRecord.getItemProject() != null) {
projectName = itemRecord.getItemProject().getTechnicalLabel();
projectName = projectName.toLowerCase();
}
// note: do similar code as the CwmResourceFactory
String business = projectName + "/businessProcess/"; //$NON-NLS-1$
String context = projectName + "/context/"; //$NON-NLS-1$
String process = projectName + "/process/"; //$NON-NLS-1$
String joblet = projectName + "/joblets/"; //$NON-NLS-1$
String pathString = path.toPortableString();
pathString = pathString.toLowerCase();
// PTODO, maybe will bring bugs, like mr job,route, maybe jobscript
if (pathString.contains(process) || pathString.contains(context) || pathString.contains(business)
|| pathString.contains(joblet)) {
resource = new TalendXMIResource(getURI(path));
} else {
resource = new CwmResource(getURI(path));
}
resourceSet.getResources().add(resource);
} else {
resource = resourceSet.createResource(getURI(path));
}
}
return resource;
}
private URI getURI(IPath path) {
return URI.createURI(path.lastSegment());
}
private boolean isPropertyPath(IPath path) {
return xmiResourceManager.isPropertyFile(path.lastSegment());
}
private IPath getItemPath(IPath path, Item item) {
IPath removeFileExtension = path.removeFileExtension();
if (!item.isNeedVersion()) {
String portableString = removeFileExtension.toPortableString();
String substring = portableString.substring(0, portableString.lastIndexOf('_'));
removeFileExtension = new Path(substring);
}
if (item.getFileExtension() != null) {
return removeFileExtension.addFileExtension(item.getFileExtension());
} else {
return removeFileExtension.addFileExtension(FileConstants.ITEM_EXTENSION);
}
}
private IPath getReferenceItemPath(IPath path, String extension) {
return path.removeFileExtension().addFileExtension(extension);
}
/**
*
* DOC hcw ImportItemUtil class global comment. Detailled comment
*/
static class RepositoryObjectCache {
static ProxyRepositoryFactory factory = ProxyRepositoryFactory.getInstance();
private final Set<ERepositoryObjectType> types = new HashSet<ERepositoryObjectType>();
private final Map<String, Boolean> lockState = new HashMap<String, Boolean>();
// key is id of IRepositoryObject, value is a list of IRepositoryObject
// with same id
private final Map<String, List<IRepositoryViewObject>> cache = new HashMap<String, List<IRepositoryViewObject>>();
private final Map<ERepositoryObjectType, List<IRepositoryViewObject>> itemsFromRepository = new HashMap<ERepositoryObjectType, List<IRepositoryViewObject>>();
public List<IRepositoryViewObject> findObjectsByItem(ItemRecord itemRecord) throws PersistenceException {
Item item = itemRecord.getItem();
ERepositoryObjectType type = ERepositoryObjectType.getItemType(item);
initialize(type);
List<IRepositoryViewObject> result = cache.get(itemRecord.getProperty().getId());
if (result == null) {
result = Collections.EMPTY_LIST;
}
return result;
}
public void addToCache(Item tmpItem) {
ERepositoryObjectType itemType = ERepositoryObjectType.getItemType(tmpItem);
IRepositoryViewObject newObject = new RepositoryViewObject(tmpItem.getProperty(), true);
List<IRepositoryViewObject> items = cache.get(newObject.getId());
if (items == null) {
items = new ArrayList<IRepositoryViewObject>();
cache.put(newObject.getId(), items);
}
items.add(newObject);
List<IRepositoryViewObject> list = itemsFromRepository.get(itemType);
if (list != null) {
list.add(newObject);
} else {
List<IRepositoryViewObject> newList = new ArrayList<IRepositoryViewObject>();
newList.add(newObject);
itemsFromRepository.put(itemType, newList);
}
}
public void initialize(ERepositoryObjectType itemType) throws PersistenceException {
if (!types.contains(itemType)) {
types.add(itemType);
// load object by type
List<IRepositoryViewObject> list = factory.getAll(itemType, true, false);
// change to RepositoryViewObject to save memory
// (could be enhanced directly in repository for future versions)
List<IRepositoryViewObject> newList = new ArrayList<IRepositoryViewObject>();
for (IRepositoryViewObject obj : list) {
IRepositoryViewObject newObject = new RepositoryViewObject(obj.getProperty(), true);
// items with same id
List<IRepositoryViewObject> items = cache.get(newObject.getId());
if (items == null) {
items = new ArrayList<IRepositoryViewObject>();
cache.put(newObject.getId(), items);
}
items.add(newObject);
newList.add(newObject);
}
itemsFromRepository.put(itemType, newList);
}
}
public void setItemLockState(ItemRecord itemRecord, boolean state) {
lockState.put(itemRecord.getProperty().getId(), state);
}
public Boolean getItemLockState(ItemRecord itemRecord) {
return lockState.get(itemRecord.getProperty().getId());
}
public void clear() {
types.clear();
cache.clear();
lockState.clear();
itemsFromRepository.clear();
}
public Map<ERepositoryObjectType, List<IRepositoryViewObject>> getItemsFromRepository() {
return itemsFromRepository;
}
}
@SuppressWarnings("unchecked")
private void deployJarToDes(final ResourcesManager manager, Set<String> extRoutines) {
File file = null;
if (extRoutines.isEmpty()) {
return;
}
Set<URL> jarsToDeploy = new HashSet<URL>();
for (Object element : manager.getPaths()) {
String value = element.toString();
file = new File(value);
if (extRoutines.contains(file.getName())) {
try {
jarsToDeploy.add(file.toURL());
} catch (MalformedURLException e) {
ExceptionHandler.process(e);
}
}
}
if (jarsToDeploy.size() > 0) {
ILibrariesService libService = (ILibrariesService) GlobalServiceRegister.getDefault().getService(
ILibrariesService.class);
try {
libService.deployLibrarys(jarsToDeploy.toArray(new URL[0]));
} catch (IOException e) {
ExceptionHandler.process(e);
}
}
}
private void deployJarToDesForArchive(final ResourcesManager manager, Set<String> extRoutines) {
if (extRoutines.isEmpty()) {
return;
}
IPath tmpDir = new Path(System.getProperty("user.dir") + File.separatorChar + "tmpJar"); //$NON-NLS-1$
Set<URL> jarsToDeploy = new HashSet<URL>();
File dirFile = tmpDir.toFile();
for (IPath path : manager.getPaths()) {
String fileName = path.lastSegment();
if (extRoutines.contains(fileName)) {
try {
InputStream is = manager.getStream(path);
if (!dirFile.exists()) {
dirFile.mkdirs();
}
File temFile = tmpDir.append(fileName).toFile();
if (temFile.exists()) {
temFile.delete();
}
byte[] b = new byte[1024];
int length = 0;
BufferedOutputStream fos = new BufferedOutputStream(new FileOutputStream(temFile, true));
while ((length = is.read(b)) != -1) {
fos.write(b, 0, length);
}
fos.close();
jarsToDeploy.add(temFile.toURI().toURL());
} catch (MalformedURLException e) {
ExceptionHandler.process(e);
} catch (IOException e) {
ExceptionHandler.process(e);
}
}
}
if (jarsToDeploy.size() > 0) {
ILibrariesService libService = (ILibrariesService) GlobalServiceRegister.getDefault().getService(
ILibrariesService.class);
try {
libService.deployLibrarys(jarsToDeploy.toArray(new URL[0]));
} catch (IOException e) {
ExceptionHandler.process(e);
}
}
dirFile.delete();
}
public Map<String, Set<String>> getRoutineExtModulesMap() {
return routineExtModulesMap;
}
private boolean isVersionValid(String version) {
if (version == null) {
return false;
}
Pattern p = Pattern.compile("\\-(\\d{1,2}\\.\\d{1,2}\\.\\d{1,2}).+r(\\d{1,6})");
Matcher matcher = p.matcher(version);
String product = null;
String commit = null;
while (matcher.find()) {
product = matcher.group(1);
commit = matcher.group(2);
}
if (product == null || commit == null) {
return false;
}
return true;
}
private boolean canContinueImport(String oldProjectVersion, String currentProjectVersion) {
Pattern p = Pattern.compile("\\-(\\d{1,2}\\.\\d{1,2}\\.\\d{1,2}).+r(\\d{1,6})");
Matcher oldMatcher = p.matcher(oldProjectVersion);
Matcher currentMatcher = p.matcher(currentProjectVersion);
String oldProduct = null;
String oldCommit = null;
while (oldMatcher.find()) {
oldProduct = oldMatcher.group(1);
oldCommit = oldMatcher.group(2);
}
String currentProduct = null;
String currentCommit = null;
while (currentMatcher.find()) {
currentProduct = currentMatcher.group(1);
currentCommit = currentMatcher.group(2);
if (Integer.valueOf(oldCommit) > Integer.valueOf(currentCommit)) {
return false;
}
}
String cp = currentProduct.replaceAll("\\.", "");
String op = oldProduct.replaceAll("\\.", "");
if (Integer.valueOf(op) > Integer.valueOf(cp)) {
return false;
}
return true;
}
}
|
package uk.co.samuelpratt.sudoku.puzzlescanner;
import android.support.test.InstrumentationRegistry;
import android.support.test.runner.AndroidJUnit4;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.opencv.core.Mat;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
@RunWith(AndroidJUnit4.class)
public class EndToEndTest {
@Test
public void endToEnd_getMatForPosition_CorrectMatReturned() throws PuzzleNotFoundException, IOException {
Mat extractedPuzzleMat = getExtractedPuzzleMat(R.drawable.sudoku3);
PuzzleParser sut = new PuzzleParser(extractedPuzzleMat, InstrumentationRegistry.getContext());
for (int y = 0; y < 9; y++) {
for (int x = 0; x < 9; x++) {
Mat digit = sut.getMatForPosition(x, y);
String imageName = new Object() {
}.getClass().getEnclosingMethod().getName();
imageName = imageName + "y=" + y + "x=" + x;
BitmapFixture.writePngForMat(digit, imageName);
}
}
}
@Test
public void endToEnd_getPuzzle_allDigitsMatch() throws PuzzleNotFoundException, IOException {
Mat extractedPuzzleMat = getExtractedPuzzleMat(R.drawable.sudoku);
PuzzleParser sut = new PuzzleParser(extractedPuzzleMat, InstrumentationRegistry.getContext());
Integer[][] extractedPuzzle = sut.getPuzzle();
//Assert
Integer[][] expectedPuzzle = new Integer[][]{
{null, null, null, 9, null, 5, 2, null, 7},
{null, null, 1, null, null, 4, 6, 8, null},
{2, null, null, null, null, null, null, null, 1},
{null, null, 3, null, 5, null, null, null, null},
{null, null, 8, 3, null, 2, null, 4, null},
{5, null, null, null, 7, null, null, null, null},
{9, null, null, null, null, 3, null, 7, null},
{null, null, 6, null, null, null, null, 5, null},
{3, null, null, null, 8, null, null, null, null},
};
assertPuzzlesMatch(expectedPuzzle, extractedPuzzle);
}
@Test
public void endToEnd_getPuzzle2_allDigitsMatch() throws PuzzleNotFoundException, IOException {
Mat extractedPuzzleMat = getExtractedPuzzleMat(R.drawable.sudoku2);
PuzzleParser sut = new PuzzleParser(extractedPuzzleMat, InstrumentationRegistry.getContext());
Integer[][] extractedPuzzle = sut.getPuzzle();
//Assert
Integer[][] expectedPuzzle = new Integer[][]{
{null, null, null, 9, null, 5, 2, null, 7},
{null, null, 1, null, null, 4, 6, 8, null},
{2, null, null, null, null, null, null, null, 1},
{null, null, 3, null, 5, null, null, null, null},
{null, null, 8, 3, null, 2, null, 4, null},
{5, null, null, null, 7, null, null, null, null},
{9, null, null, null, null, 3, null, 7, null},
{null, null, 6, null, null, null, null, 5, null},
{3, null, null, null, 8, null, null, null, null},
};
assertPuzzlesMatch(expectedPuzzle, extractedPuzzle);
}
@Test
@Ignore
public void endToEnd_getPuzzle3_allDigitsMatch() throws PuzzleNotFoundException, IOException {
Mat extractedPuzzleMat = getExtractedPuzzleMat(R.drawable.sudoku3);
PuzzleParser sut = new PuzzleParser(extractedPuzzleMat, InstrumentationRegistry.getContext());
Integer[][] extractedPuzzle = sut.getPuzzle();
//Assert
Integer[][] expectedPuzzle = new Integer[][]{
{null, null, null, null, null, null, 2, 4, null},
{3, null, 2, null, null, null, null, null, 1},
{9, null, null, null, null, null, null, null, null},
{null, null, 8, null, null, 4, 5, 7, null},
{4, null, null, null, null, 2, 3, null, null},
{null, null, 6, 1, null, 5, null, 9, null},
{null, null, 7, null, null, null, null, null, null},
{null, 1, null, 2, 3, 8, null, null, null},
{null, 5, null, null, null, null, null, null, null},
};
// This won't pass at the moment. It misses on a about 11 points.
// Due to the amout of noise in image. Need to have a think
// About if this is worth fixing.
assertPuzzlesMatch(expectedPuzzle, extractedPuzzle);
}
@Test
public void endToEnd_getPuzzle4_allDigitsMatch() throws PuzzleNotFoundException, IOException {
Mat extractedPuzzleMat = getExtractedPuzzleMat(R.drawable.sudoku4);
PuzzleParser sut = new PuzzleParser(extractedPuzzleMat, InstrumentationRegistry.getContext());
Integer[][] extractedPuzzle = sut.getPuzzle();
//Assert
Integer[][] expectedPuzzle = new Integer[][]{
{8, null, null, null, null, null, null, null, null},
{null, null, 7, 5, null, null, null, null, 9},
{null, 3, null, null, null, null, 1, 8, null},
{null, 6, null, null, null, 1, null, 5, null},
{null, null, 9, null, 4, null, null, null, null},
{null, null, null, 7, 5, null, null, null, null},
{null, null, 2, null, 7, null, null, null, 4},
{null, null, null, null, null, 3, 6, 1, null},
{null, null, null, null, null, null, 8, null, null},
};
assertPuzzlesMatch(expectedPuzzle, extractedPuzzle);
}
private Mat getExtractedPuzzleMat(int resource) throws PuzzleNotFoundException {
//Read in the test Puzzle
Mat mat = BitmapFixture.readBitMapFromResouce(resource);
//Find the Puzzle
PuzzleFinder finder = new PuzzleFinder(mat);
Mat thresholdMat = finder.getThresholdMat();
Mat largestBlobMat = finder.getLargestBlobMat();
PuzzleOutLine puzzleOutline = finder.findOutLine();
//Extract the Puzzle
PuzzleExtractor extractor = new PuzzleExtractor(thresholdMat, largestBlobMat, puzzleOutline);
return extractor.getExtractedPuzzleMat();
}
private void assertPuzzlesMatch(Integer[][] expectedPuzzle, Integer[][] extractedPuzzle) {
List<String> failedPoints = new ArrayList<>();
for (int y = 0; y < 9; y++) {
for (int x = 0; x < 9; x++) {
if (expectedPuzzle[x][y] != extractedPuzzle[x][y])
failedPoints.add(String.format("Values at X=%d, Y=%d do not match. Expected %d, got %d\n", x, y, expectedPuzzle[x][y], extractedPuzzle[x][y]));
}
}
if (failedPoints.size() != 0) {
Assert.fail(String.format("Found %d points where the grids don't match", failedPoints.size()));
}
}
}
|
/*
* Copyright (c) 2008-2019, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.aggregation;
import com.hazelcast.config.Config;
import com.hazelcast.config.InMemoryFormat;
import com.hazelcast.config.MapConfig;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.map.IMap;
import com.hazelcast.projection.Projections;
import com.hazelcast.test.HazelcastParallelParametersRunnerFactory;
import com.hazelcast.test.HazelcastTestSupport;
import com.hazelcast.test.TestHazelcastInstanceFactory;
import com.hazelcast.test.annotation.ParallelJVMTest;
import com.hazelcast.test.annotation.QuickTest;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameter;
import org.junit.runners.Parameterized.Parameters;
import org.junit.runners.Parameterized.UseParametersRunnerFactory;
import java.io.Serializable;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.TreeSet;
import static com.hazelcast.spi.properties.GroupProperty.AGGREGATION_ACCUMULATION_PARALLEL_EVALUATION;
import static com.hazelcast.spi.properties.GroupProperty.PARTITION_COUNT;
import static java.util.Arrays.asList;
import static java.util.Collections.emptySet;
import static java.util.Collections.singletonList;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
@RunWith(Parameterized.class)
@UseParametersRunnerFactory(HazelcastParallelParametersRunnerFactory.class)
@Category({QuickTest.class, ParallelJVMTest.class})
public class AggregatorsSpecTest extends HazelcastTestSupport {
public static final int PERSONS_COUNT = 999;
@SuppressWarnings("DefaultAnnotationParam")
@Parameter(0)
public InMemoryFormat inMemoryFormat;
@Parameter(1)
public boolean parallelAccumulation;
@Parameter(2)
public String postfix;
@Parameters(name = "{0} parallelAccumulation={1}, postfix={2}")
public static Collection<Object[]> parameters() {
return asList(new Object[][]{
{InMemoryFormat.BINARY, false, ""},
{InMemoryFormat.OBJECT, false, ""},
{InMemoryFormat.BINARY, true, ""},
{InMemoryFormat.OBJECT, true, ""},
{InMemoryFormat.BINARY, false, "[any]"},
{InMemoryFormat.OBJECT, false, "[any]"},
{InMemoryFormat.BINARY, true, "[any]"},
{InMemoryFormat.OBJECT, true, "[any]"},
});
}
@Test
public void testAggregators() {
IMap<Integer, Person> map = getMapWithNodeCount(3, parallelAccumulation);
populateMapWithPersons(map, postfix, PERSONS_COUNT);
assertMinAggregators(map, postfix);
assertMaxAggregators(map, postfix);
assertSumAggregators(map, postfix);
assertAverageAggregators(map, postfix);
assertCountAggregators(map, postfix);
assertDistinctAggregators(map, postfix);
}
public static void assertMinAggregators(IMap<Integer, Person> map, String p) {
assertNoDataMissing(map, PERSONS_COUNT);
assertEquals(Double.valueOf(1), map.aggregate(Aggregators.doubleMin("doubleValue" + p)));
assertEquals(Long.valueOf(1), map.aggregate(Aggregators.longMin("longValue" + p)));
assertEquals(Integer.valueOf(1), map.aggregate(Aggregators.integerMin("intValue" + p)));
assertEquals(Integer.valueOf(1), map.aggregate(Aggregators.integerMin("optionalIntValue" + p)));
assertEquals(BigDecimal.valueOf(1), map.aggregate(Aggregators.bigDecimalMin("bigDecimalValue" + p)));
assertEquals(BigInteger.valueOf(1), map.aggregate(Aggregators.bigIntegerMin("bigIntegerValue" + p)));
assertEquals(Double.valueOf(1), map.aggregate(Aggregators.comparableMin("doubleValue" + p)));
assertEquals(Long.valueOf(1), map.aggregate(Aggregators.comparableMin("longValue" + p)));
assertEquals(Integer.valueOf(1), map.aggregate(Aggregators.comparableMin("intValue" + p)));
assertEquals(Integer.valueOf(1), map.aggregate(Aggregators.comparableMin("optionalIntValue" + p)));
assertEquals(BigDecimal.valueOf(1), map.aggregate(Aggregators.comparableMin("bigDecimalValue" + p)));
assertEquals(BigInteger.valueOf(1), map.aggregate(Aggregators.comparableMin("bigIntegerValue" + p)));
assertEquals("1", map.aggregate(Aggregators.comparableMin("comparableValue" + p)));
assertEquals("1", map.aggregate(Aggregators.comparableMin("optionalComparableValue" + p)));
}
public static void assertMaxAggregators(IMap<Integer, Person> map, String p) {
assertNoDataMissing(map, PERSONS_COUNT);
assertEquals(Double.valueOf(999), map.aggregate(Aggregators.doubleMax("doubleValue" + p)));
assertEquals(Long.valueOf(999), map.aggregate(Aggregators.longMax("longValue" + p)));
assertEquals(Integer.valueOf(999), map.aggregate(Aggregators.integerMax("intValue" + p)));
assertEquals(Integer.valueOf(999), map.aggregate(Aggregators.integerMax("optionalIntValue" + p)));
assertEquals(BigDecimal.valueOf(999), map.aggregate(Aggregators.bigDecimalMax("bigDecimalValue" + p)));
assertEquals(BigInteger.valueOf(999), map.aggregate(Aggregators.bigIntegerMax("bigIntegerValue" + p)));
assertEquals(Double.valueOf(999), map.aggregate(Aggregators.comparableMax("doubleValue" + p)));
assertEquals(Long.valueOf(999), map.aggregate(Aggregators.comparableMax("longValue" + p)));
assertEquals(Integer.valueOf(999), map.aggregate(Aggregators.comparableMax("intValue" + p)));
assertEquals(Integer.valueOf(999), map.aggregate(Aggregators.comparableMax("optionalIntValue" + p)));
assertEquals(BigDecimal.valueOf(999), map.aggregate(Aggregators.comparableMax("bigDecimalValue" + p)));
assertEquals(BigInteger.valueOf(999), map.aggregate(Aggregators.comparableMax("bigIntegerValue" + p)));
assertEquals("999", map.aggregate(Aggregators.comparableMax("comparableValue" + p)));
assertEquals("999", map.aggregate(Aggregators.comparableMax("optionalComparableValue" + p)));
}
public static void assertSumAggregators(IMap<Integer, Person> map, String p) {
assertNoDataMissing(map, PERSONS_COUNT);
assertEquals(Double.valueOf(499500.0d), map.aggregate(Aggregators.doubleSum("doubleValue" + p)));
assertEquals(Long.valueOf(499500), map.aggregate(Aggregators.longSum("longValue" + p)));
assertEquals(Long.valueOf(499500), map.aggregate(Aggregators.integerSum("intValue" + p)));
assertEquals(Long.valueOf(499500), map.aggregate(Aggregators.integerSum("optionalIntValue" + p)));
assertEquals(BigDecimal.valueOf(499500), map.aggregate(Aggregators.bigDecimalSum("bigDecimalValue" + p)));
assertEquals(BigInteger.valueOf(499500), map.aggregate(Aggregators.bigIntegerSum("bigIntegerValue" + p)));
assertEquals(Long.valueOf(499500), map.aggregate(Aggregators.fixedPointSum("doubleValue" + p)));
assertEquals(Long.valueOf(499500), map.aggregate(Aggregators.fixedPointSum("longValue" + p)));
assertEquals(Long.valueOf(499500), map.aggregate(Aggregators.fixedPointSum("intValue" + p)));
assertEquals(Long.valueOf(499500), map.aggregate(Aggregators.fixedPointSum("optionalIntValue" + p)));
assertEquals(Long.valueOf(499500), map.aggregate(Aggregators.fixedPointSum("bigIntegerValue" + p)));
assertEquals(Long.valueOf(499500), map.aggregate(Aggregators.fixedPointSum("bigDecimalValue" + p)));
assertEquals(Double.valueOf(499500), map.aggregate(Aggregators.floatingPointSum("doubleValue" + p)));
assertEquals(Double.valueOf(499500), map.aggregate(Aggregators.floatingPointSum("longValue" + p)));
assertEquals(Double.valueOf(499500), map.aggregate(Aggregators.floatingPointSum("intValue" + p)));
assertEquals(Double.valueOf(499500), map.aggregate(Aggregators.floatingPointSum("optionalIntValue" + p)));
assertEquals(Double.valueOf(499500), map.aggregate(Aggregators.floatingPointSum("bigIntegerValue" + p)));
assertEquals(Double.valueOf(499500), map.aggregate(Aggregators.floatingPointSum("bigDecimalValue" + p)));
}
public static void assertAverageAggregators(IMap<Integer, Person> map, String p) {
assertNoDataMissing(map, PERSONS_COUNT);
assertEquals(Double.valueOf(500.0d), map.aggregate(Aggregators.doubleAvg("doubleValue" + p)));
assertEquals(Double.valueOf(500.0d), map.aggregate(Aggregators.longAvg("longValue" + p)));
assertEquals(Double.valueOf(500.0d), map.aggregate(Aggregators.integerAvg("intValue" + p)));
assertEquals(Double.valueOf(500.0d), map.aggregate(Aggregators.integerAvg("optionalIntValue" + p)));
assertEquals(BigDecimal.valueOf(500), map.aggregate(Aggregators.bigDecimalAvg("bigDecimalValue" + p)));
assertEquals(BigDecimal.valueOf(500), map.aggregate(Aggregators.bigIntegerAvg("bigIntegerValue" + p)));
assertEquals(Double.valueOf(500.0d), map.aggregate(Aggregators.numberAvg("doubleValue" + p)));
assertEquals(Double.valueOf(500.0d), map.aggregate(Aggregators.numberAvg("longValue" + p)));
assertEquals(Double.valueOf(500.0d), map.aggregate(Aggregators.numberAvg("intValue" + p)));
assertEquals(Double.valueOf(500.0d), map.aggregate(Aggregators.numberAvg("optionalIntValue" + p)));
assertEquals(Double.valueOf(500.0d), map.aggregate(Aggregators.numberAvg("bigDecimalValue" + p)));
assertEquals(Double.valueOf(500.0d), map.aggregate(Aggregators.numberAvg("bigIntegerValue" + p)));
}
public static void assertCountAggregators(IMap<Integer, Person> map, String p) {
assertNoDataMissing(map, PERSONS_COUNT);
assertEquals(Long.valueOf(999), map.aggregate(Aggregators.count("doubleValue" + p)));
assertEquals(Long.valueOf(999), map.aggregate(Aggregators.count("longValue" + p)));
assertEquals(Long.valueOf(999), map.aggregate(Aggregators.count("intValue" + p)));
assertEquals(Long.valueOf(999), map.aggregate(Aggregators.count("optionalIntValue" + p)));
assertEquals(Long.valueOf(999), map.aggregate(Aggregators.count("bigDecimalValue" + p)));
assertEquals(Long.valueOf(999), map.aggregate(Aggregators.count("bigIntegerValue" + p)));
assertEquals(Long.valueOf(999), map.aggregate(Aggregators.count("comparableValue" + p)));
assertEquals(Long.valueOf(999), map.aggregate(Aggregators.count("optionalComparableValue" + p)));
}
public static void assertDistinctAggregators(IMap<Integer, Person> map, String p) {
// projections do not support [any] but we have one element only so here we go.
assertNoDataMissing(map, PERSONS_COUNT);
String projection = p.contains("[any]") ? "[0]" : "";
assertCollectionEquals(map.project(Projections.singleAttribute("doubleValue" + projection)),
map.aggregate(Aggregators.distinct("doubleValue" + p)));
assertCollectionEquals(map.project(Projections.singleAttribute("longValue" + projection)),
map.aggregate(Aggregators.distinct("longValue" + p)));
assertCollectionEquals(map.project(Projections.singleAttribute("intValue" + projection)),
map.aggregate(Aggregators.distinct("intValue" + p)));
assertCollectionEquals(map.project(Projections.singleAttribute("optionalIntValue" + projection)),
map.aggregate(Aggregators.distinct("optionalIntValue" + p)));
assertCollectionEquals(map.project(Projections.singleAttribute("bigDecimalValue" + projection)),
map.aggregate(Aggregators.distinct("bigDecimalValue" + p)));
assertCollectionEquals(map.project(Projections.singleAttribute("bigIntegerValue" + projection)),
map.aggregate(Aggregators.distinct("bigIntegerValue" + p)));
assertCollectionEquals(map.project(Projections.singleAttribute("comparableValue" + projection)),
map.aggregate(Aggregators.distinct("comparableValue" + p)));
assertCollectionEquals(map.project(Projections.singleAttribute("optionalComparableValue" + projection)),
map.aggregate(Aggregators.distinct("optionalComparableValue" + p)));
}
@Test
public void testAggregators_nullCornerCases() {
IMap<Integer, Person> map = getMapWithNodeCount(3, parallelAccumulation);
map.put(0, postfix.contains("[any]") ? PersonAny.nulls() : new Person());
if (postfix.contains("[any]")) {
assertMinAggregatorsAnyCornerCase(map, postfix);
assertMaxAggregatorsAnyCornerCase(map, postfix);
assertSumAggregatorsAnyCornerCase(map, postfix);
assertAverageAggregatorsAnyCornerCase(map, postfix);
assertCountAggregatorsAnyCornerCase(map, postfix, 0);
assertDistinctAggregatorsAnyCornerCase(map, postfix, emptySet());
} else {
assertMinAggregatorsAnyCornerCase(map, postfix);
assertMaxAggregatorsAnyCornerCase(map, postfix);
// sum and avg do not accept null values, thus skipped
assertCountAggregatorsAnyCornerCase(map, postfix, 1);
HashSet<?> expected = new HashSet<>();
expected.add(null);
assertDistinctAggregatorsAnyCornerCase(map, postfix, expected);
}
}
@Test
public void testAggregators_emptyCornerCases() {
IMap<Integer, Person> map = getMapWithNodeCount(3, parallelAccumulation);
if (postfix.contains("[any]")) {
map.put(0, PersonAny.empty());
assertMinAggregatorsAnyCornerCase(map, postfix);
assertMaxAggregatorsAnyCornerCase(map, postfix);
assertSumAggregatorsAnyCornerCase(map, postfix);
assertAverageAggregatorsAnyCornerCase(map, postfix);
assertCountAggregatorsAnyCornerCase(map, postfix, 0);
assertDistinctAggregatorsAnyCornerCase(map, postfix, emptySet());
}
}
private void assertMinAggregatorsAnyCornerCase(IMap<Integer, Person> map, String p) {
assertNoDataMissing(map, 1);
assertNull(map.aggregate(Aggregators.doubleMin("doubleValue" + p)));
assertNull(map.aggregate(Aggregators.longMin("longValue" + p)));
assertNull(map.aggregate(Aggregators.integerMin("intValue" + p)));
assertNull(map.aggregate(Aggregators.integerMin("optionalIntValue" + p)));
assertNull(map.aggregate(Aggregators.bigDecimalMin("bigDecimalValue" + p)));
assertNull(map.aggregate(Aggregators.bigIntegerMin("bigIntegerValue" + p)));
assertNull(map.aggregate(Aggregators.comparableMin("doubleValue" + p)));
assertNull(map.aggregate(Aggregators.comparableMin("longValue" + p)));
assertNull(map.aggregate(Aggregators.comparableMin("intValue" + p)));
assertNull(map.aggregate(Aggregators.comparableMin("optionalIntValue" + p)));
assertNull(map.aggregate(Aggregators.comparableMin("bigDecimalValue" + p)));
assertNull(map.aggregate(Aggregators.comparableMin("bigIntegerValue" + p)));
assertNull(map.aggregate(Aggregators.comparableMin("comparableValue" + p)));
assertNull(map.aggregate(Aggregators.comparableMin("optionalComparableValue" + p)));
}
public static void assertMaxAggregatorsAnyCornerCase(IMap<Integer, Person> map, String p) {
assertNoDataMissing(map, 1);
assertNull(map.aggregate(Aggregators.doubleMax("doubleValue" + p)));
assertNull(map.aggregate(Aggregators.longMax("longValue" + p)));
assertNull(map.aggregate(Aggregators.integerMax("intValue" + p)));
assertNull(map.aggregate(Aggregators.integerMax("optionalIntValue" + p)));
assertNull(map.aggregate(Aggregators.bigDecimalMax("bigDecimalValue" + p)));
assertNull(map.aggregate(Aggregators.bigIntegerMax("bigIntegerValue" + p)));
assertNull(map.aggregate(Aggregators.comparableMax("doubleValue" + p)));
assertNull(map.aggregate(Aggregators.comparableMax("longValue" + p)));
assertNull(map.aggregate(Aggregators.comparableMax("intValue" + p)));
assertNull(map.aggregate(Aggregators.comparableMax("optionalIntValue" + p)));
assertNull(map.aggregate(Aggregators.comparableMax("bigDecimalValue" + p)));
assertNull(map.aggregate(Aggregators.comparableMax("bigIntegerValue" + p)));
assertNull(map.aggregate(Aggregators.comparableMax("comparableValue" + p)));
assertNull(map.aggregate(Aggregators.comparableMax("optionalComparableValue" + p)));
}
public static void assertSumAggregatorsAnyCornerCase(IMap<Integer, Person> map, String p) {
assertNoDataMissing(map, 1);
assertEquals(Double.valueOf(0), map.aggregate(Aggregators.doubleSum("doubleValue" + p)));
assertEquals(Long.valueOf(0), map.aggregate(Aggregators.longSum("longValue" + p)));
assertEquals(Long.valueOf(0), map.aggregate(Aggregators.integerSum("intValue" + p)));
assertEquals(Long.valueOf(0), map.aggregate(Aggregators.integerSum("optionalIntValue" + p)));
assertEquals(BigDecimal.valueOf(0), map.aggregate(Aggregators.bigDecimalSum("bigDecimalValue" + p)));
assertEquals(BigInteger.valueOf(0), map.aggregate(Aggregators.bigIntegerSum("bigIntegerValue" + p)));
assertEquals(Long.valueOf(0), map.aggregate(Aggregators.fixedPointSum("doubleValue" + p)));
assertEquals(Long.valueOf(0), map.aggregate(Aggregators.fixedPointSum("longValue" + p)));
assertEquals(Long.valueOf(0), map.aggregate(Aggregators.fixedPointSum("intValue" + p)));
assertEquals(Long.valueOf(0), map.aggregate(Aggregators.fixedPointSum("optionalIntValue" + p)));
assertEquals(Long.valueOf(0), map.aggregate(Aggregators.fixedPointSum("bigIntegerValue" + p)));
assertEquals(Long.valueOf(0), map.aggregate(Aggregators.fixedPointSum("bigDecimalValue" + p)));
assertEquals(Double.valueOf(0), map.aggregate(Aggregators.floatingPointSum("doubleValue" + p)));
assertEquals(Double.valueOf(0), map.aggregate(Aggregators.floatingPointSum("longValue" + p)));
assertEquals(Double.valueOf(0), map.aggregate(Aggregators.floatingPointSum("intValue" + p)));
assertEquals(Double.valueOf(0), map.aggregate(Aggregators.floatingPointSum("optionalIntValue" + p)));
assertEquals(Double.valueOf(0), map.aggregate(Aggregators.floatingPointSum("bigIntegerValue" + p)));
assertEquals(Double.valueOf(0), map.aggregate(Aggregators.floatingPointSum("bigDecimalValue" + p)));
}
public static void assertAverageAggregatorsAnyCornerCase(IMap<Integer, Person> map, String p) {
assertNoDataMissing(map, 1);
assertNull(map.aggregate(Aggregators.doubleAvg("doubleValue" + p)));
assertNull(map.aggregate(Aggregators.longAvg("longValue" + p)));
assertNull(map.aggregate(Aggregators.integerAvg("intValue" + p)));
assertNull(map.aggregate(Aggregators.integerAvg("optionalIntValue" + p)));
assertNull(map.aggregate(Aggregators.bigDecimalAvg("bigDecimalValue" + p)));
assertNull(map.aggregate(Aggregators.bigIntegerAvg("bigIntegerValue" + p)));
assertNull(map.aggregate(Aggregators.numberAvg("doubleValue" + p)));
assertNull(map.aggregate(Aggregators.numberAvg("longValue" + p)));
assertNull(map.aggregate(Aggregators.numberAvg("intValue" + p)));
assertNull(map.aggregate(Aggregators.numberAvg("optionalIntValue" + p)));
assertNull(map.aggregate(Aggregators.numberAvg("bigDecimalValue" + p)));
assertNull(map.aggregate(Aggregators.numberAvg("bigIntegerValue" + p)));
}
public static void assertCountAggregatorsAnyCornerCase(IMap<Integer, Person> map, String p, long value) {
assertNoDataMissing(map, 1);
assertEquals(Long.valueOf(value), map.aggregate(Aggregators.count("doubleValue" + p)));
assertEquals(Long.valueOf(value), map.aggregate(Aggregators.count("longValue" + p)));
assertEquals(Long.valueOf(value), map.aggregate(Aggregators.count("intValue" + p)));
assertEquals(Long.valueOf(value), map.aggregate(Aggregators.count("optionalIntValue" + p)));
assertEquals(Long.valueOf(value), map.aggregate(Aggregators.count("bigDecimalValue" + p)));
assertEquals(Long.valueOf(value), map.aggregate(Aggregators.count("bigIntegerValue" + p)));
assertEquals(Long.valueOf(value), map.aggregate(Aggregators.count("comparableValue" + p)));
assertEquals(Long.valueOf(value), map.aggregate(Aggregators.count("optionalComparableValue" + p)));
}
public static void assertDistinctAggregatorsAnyCornerCase(IMap<Integer, Person> map, String p, Set result) {
assertNoDataMissing(map, 1);
assertEquals(result, map.aggregate(Aggregators.distinct("doubleValue" + p)));
assertEquals(result, map.aggregate(Aggregators.distinct("longValue" + p)));
assertEquals(result, map.aggregate(Aggregators.distinct("intValue" + p)));
assertEquals(result, map.aggregate(Aggregators.distinct("optionalIntValue" + p)));
assertEquals(result, map.aggregate(Aggregators.distinct("bigDecimalValue" + p)));
assertEquals(result, map.aggregate(Aggregators.distinct("bigIntegerValue" + p)));
assertEquals(result, map.aggregate(Aggregators.distinct("comparableValue" + p)));
assertEquals(result, map.aggregate(Aggregators.distinct("optionalComparableValue" + p)));
}
protected <K, V> IMap<K, V> getMapWithNodeCount(int nodeCount, boolean parallelAccumulation) {
if (nodeCount < 1) {
throw new IllegalArgumentException("node count < 1");
}
MapConfig mapConfig = new MapConfig()
.setName("aggr")
.setInMemoryFormat(inMemoryFormat);
Config config = getConfig()
.setProperty(PARTITION_COUNT.getName(), String.valueOf(nodeCount))
.setProperty(AGGREGATION_ACCUMULATION_PARALLEL_EVALUATION.getName(), String.valueOf(parallelAccumulation))
.addMapConfig(mapConfig);
TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(nodeCount);
HazelcastInstance instance = factory.newInstances(config)[0];
return instance.getMap("aggr");
}
private static void assertCollectionEquals(Collection<?> a, Collection<?> b) {
TreeSet<?> aSorted = new TreeSet<>(a);
TreeSet<?> bSorted = new TreeSet<>(b);
assertEquals(aSorted, bSorted);
}
private static void assertNoDataMissing(IMap<Integer, Person> map, int expectedSize) {
assertEquals("There is missing data in the map!", expectedSize, map.size());
}
public static void populateMapWithPersons(IMap<Integer, Person> map, String postfix, int count) {
for (int i = 1; i <= count; i++) {
map.put(i, postfix.contains("[any]") ? new PersonAny(i) : new Person(i));
}
assertNoDataMissing(map, count);
}
@SuppressWarnings("WeakerAccess")
public static class Person implements Serializable {
public Integer intValue;
public Double doubleValue;
public Long longValue;
public BigDecimal bigDecimalValue;
public BigInteger bigIntegerValue;
public String comparableValue;
public Person() {
}
public Person(int numberValue) {
this.intValue = numberValue;
this.doubleValue = (double) numberValue;
this.longValue = (long) numberValue;
this.bigDecimalValue = BigDecimal.valueOf(numberValue);
this.bigIntegerValue = BigInteger.valueOf(numberValue);
this.comparableValue = String.valueOf(numberValue);
}
@SuppressWarnings("unused")
public Optional getOptionalIntValue() {
return Optional.ofNullable(intValue);
}
@SuppressWarnings("unused")
public Optional getOptionalComparableValue() {
return Optional.ofNullable(comparableValue);
}
}
@SuppressWarnings("WeakerAccess")
public static class PersonAny extends Person implements Serializable {
public int[] intValue;
public double[] doubleValue;
public long[] longValue;
public List<BigDecimal> bigDecimalValue;
public List<BigInteger> bigIntegerValue;
public List<String> comparableValue;
public PersonAny() {
}
public PersonAny(int numberValue) {
this.intValue = new int[]{numberValue};
this.doubleValue = new double[]{numberValue};
this.longValue = new long[]{numberValue};
this.bigDecimalValue = singletonList(BigDecimal.valueOf(numberValue));
this.bigIntegerValue = singletonList(BigInteger.valueOf(numberValue));
this.comparableValue = singletonList(String.valueOf(numberValue));
}
@Override
public Optional getOptionalIntValue() {
return Optional.ofNullable(intValue);
}
@Override
public Optional getOptionalComparableValue() {
return Optional.ofNullable(comparableValue);
}
public static PersonAny empty() {
PersonAny person = new PersonAny();
person.intValue = new int[]{};
person.doubleValue = new double[]{};
person.longValue = new long[]{};
person.bigDecimalValue = new ArrayList<>();
person.bigIntegerValue = new ArrayList<>();
person.comparableValue = new ArrayList<>();
return person;
}
public static PersonAny nulls() {
return new PersonAny();
}
}
}
|
package com.macro.mall.portal.controller;
import com.macro.mall.common.api.CommonResult;
import com.macro.mall.portal.service.UmsMemberReceiveAddressService;
import com.macro.mall.ums.model.UmsMemberReceiveAddress;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.*;
import java.util.List;
/**
* 会员收货地址管理Controller
* Created by macro on 2018/8/28.
*/
@Controller
@Api(tags = "UmsMemberReceiveAddressController", description = "会员收货地址管理")
@RequestMapping("/member/address")
public class UmsMemberReceiveAddressController {
@Autowired
private UmsMemberReceiveAddressService memberReceiveAddressService;
@ApiOperation("添加收货地址")
@RequestMapping(value = "/add", method = RequestMethod.POST)
@ResponseBody
public CommonResult add(@RequestBody UmsMemberReceiveAddress address) {
boolean ret = memberReceiveAddressService.add(address);
if (ret) {
return CommonResult.success(ret);
}
return CommonResult.failed();
}
@ApiOperation("删除收货地址")
@RequestMapping(value = "/delete/{id}", method = RequestMethod.POST)
@ResponseBody
public CommonResult delete(@PathVariable Long id) {
boolean ret = memberReceiveAddressService.delete(id);
if (ret) {
return CommonResult.success(ret);
}
return CommonResult.failed();
}
@ApiOperation("修改收货地址")
@RequestMapping(value = "/update/{id}", method = RequestMethod.POST)
@ResponseBody
public CommonResult update(@PathVariable Long id, @RequestBody UmsMemberReceiveAddress address) {
boolean ret = memberReceiveAddressService.update(id, address);
if (ret) {
return CommonResult.success(ret);
}
return CommonResult.failed();
}
@ApiOperation("显示所有收货地址")
@RequestMapping(value = "/list", method = RequestMethod.GET)
@ResponseBody
public CommonResult<List<UmsMemberReceiveAddress>> list() {
List<UmsMemberReceiveAddress> addressList = memberReceiveAddressService.list();
return CommonResult.success(addressList);
}
@ApiOperation("获取收货地址详情")
@RequestMapping(value = "/{id}", method = RequestMethod.GET)
@ResponseBody
public CommonResult<UmsMemberReceiveAddress> getItem(@PathVariable Long id) {
UmsMemberReceiveAddress address = memberReceiveAddressService.getItem(id);
return CommonResult.success(address);
}
}
|
package app.util;
import com.gargoylesoftware.htmlunit.util.Cookie;
import com.gargoylesoftware.htmlunit.util.NameValuePair;
import java.util.List;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.safety.Whitelist;
import java.util.Iterator;
import java.util.Set;
public class WebTools {
/**
* Removes all HTML tags from texts and removes extra whitespaces at the start and the end of text.
* This is the only method using Jsoup package.
*
* @param htmlString a HTML string to remove tags from
* @return text extracted from HTML string without tags and unnecessary whitespaces
*/
public static String cleanseTextFromHtmlTags(String htmlString) {
htmlString = htmlString.replaceAll("<br>", "");
Document doc = Jsoup.parse(htmlString);
doc.outputSettings(new Document.OutputSettings().prettyPrint(false));
doc.select("p").prepend("\\n");
String s = doc.html().replaceAll("\\\\n", "\n");
return Jsoup.clean(s, "", Whitelist.none(), new Document.OutputSettings().prettyPrint(false))
.replaceAll("^\\s*|\\s*$", "")
.replaceAll(" ", "");
}
/**
* Converts Cookies to a single string value, so it can be used to send with web requests.
* Requires htmlunit package.
*
* @param cookies a set of cookies to convert to string
* @return a string of chained cookie name and values with semi colons separating them
*/
public static String cookiesAsRequestHeader(Set<Cookie> cookies) {
Iterator<Cookie> it = cookies.iterator();
StringBuilder sb = new StringBuilder();
while (it.hasNext()) {
Cookie c = it.next();
sb.append(c.getName()).append("=").append(c.getValue()).append("; ");
}
sb.setLength(sb.length() - 1);
return sb.toString();
}
}
|
/*
* Copyright 2000-2019 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.diff.merge;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.NlsContexts;
import com.intellij.openapi.util.UserDataHolder;
import com.intellij.openapi.util.UserDataHolderBase;
import com.intellij.util.concurrency.annotations.RequiresEdt;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* @see com.intellij.diff.DiffRequestFactory
*/
public abstract class MergeRequest implements UserDataHolder {
protected final UserDataHolderBase myUserDataHolder = new UserDataHolderBase();
@NlsContexts.DialogTitle
@Nullable
public abstract String getTitle();
/**
* Called on conflict resolve end. Should be called exactly once for each request that was shown.
* <p>
* MergeRequest should keep the initial state of its content and restore it on {@link MergeResult#CANCEL}
*/
@RequiresEdt
public abstract void applyResult(@NotNull MergeResult result);
/**
* Called when merge request life cycle is retargeted to another one.
* <p>
* In this case, {@link #applyResult} will never be called for this request.
* The caller should appropriately transfer {@link MergeCallback}.
*/
public void resultRetargeted() {
}
@RequiresEdt
public void onAssigned(boolean assigned) {
}
@Nullable
@Override
public <T> T getUserData(@NotNull Key<T> key) {
return myUserDataHolder.getUserData(key);
}
@Override
public <T> void putUserData(@NotNull Key<T> key, @Nullable T value) {
myUserDataHolder.putUserData(key, value);
}
}
|
/*
* Copyright 2018 Genentech Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gene.bioinfo.ms.gp2s.repository;
import com.gene.bioinfo.ms.gp2s.domain.ModelLink;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.stereotype.Repository;
import javax.transaction.Transactional;
import java.util.List;
@Repository
@Transactional
public interface ModelLinkRepository extends JpaRepository<ModelLink, Long> {
@Query(value = "select modelLink from ModelLink modelLink where modelLink.parentModel.id = ?1 or modelLink.childModel.id = ?1 "
+ "order by modelLink.id asc")
List<ModelLink> getAllRelationsForModel(final Long mapId);
void deleteByParentModel_Id(final Long modelId);
void deleteByChildModel_Id(final Long modelId);
}
|
package bot.dish;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.Timer;
import bot.main.AnnouncerBot;
import bot.main.MessageSender;
import bot.user.UserManager;
/**
*
* Manages the timer tasks for all dishwasher
*
* @author schieljn
*/
public class DishTimer {
private Timer[] timer;
private int[] washer;
private final MessageSender sender;
private final UserManager users;
private final int WASHING_TIME;
private static final int REMEMBER_TIME = 60;
public DishTimer(MessageSender sender, UserManager users) {
washer = new int[8];
this.sender = sender;
this.users = users;
this.timer = new Timer[8];
WASHING_TIME=Integer.parseInt(AnnouncerBot.CONFIG[0]);
}
/**
*
* Sets the state of a specific washer to finished
*
* @param washer The index of the washer
*/
public void setWasherStateReady(int washer) {
this.washer[washer] = 2;
}
/**
*
* Turns the timer of a dishwasher on if the timer is stopped, else turns it off
*
* @param dishwasher The name of the dishwasher
* @param number The index of the dishwasher
* @return "Geschirrreinigungsapparat wurde geleert" if timer was stopped and
* "Geschirrreinigungsapparat wurde beladen" if timer was started
*/
public String toggleTimer(String dishwasher, int number) {
String output;
if (washer[number] != 0) {
timer[number].cancel();
timer[number].purge();
output = "Geschirrreinigungsapparat wurde geleert";
washer[number] = 0;
} else {
output = "Geschirrreinigungsapparat wurde beladen";
timer[number] = new Timer(dishwasher, true);
timer[number].schedule(new DishTimerTask(dishwasher, sender, users, 2 + "" + number, number, this),
1000 * 60 * WASHING_TIME, 1000 * 60 * REMEMBER_TIME);
washer[number] = 1;
}
sendDishWaserRequest("changeWasherState=" + washer[number] + "" + number);
return output;
}
/**
*
* Sends a state of a specific dishwasher to the dishwasher display
*
* @param request Specifies the state and the dishwasher.
* Syntax: [state][index]
* Replace [state] with 0 for ready, 1 for active and 2 for finished
* Replace [index] with the index of the dishwasher
*
*/
public static void sendDishWaserRequest(String request) {
String url = "http://10.53.101.14?" + request;
HttpURLConnection con = null;
try {
URL myurl = new URL(url);
con = (HttpURLConnection) myurl.openConnection();
con.setDoOutput(true);
con.setRequestMethod("POST");
con.setRequestProperty("User-Agent", "Java client");
con.setRequestProperty("Content-Type", "application/x-www-form-urlencoded");
StringBuilder content;
content = new StringBuilder();
try (BufferedReader in = new BufferedReader(new InputStreamReader(con.getInputStream()))) {
String line;
while ((line = in.readLine()) != null) {
content.append(line);
content.append(System.lineSeparator());
}
} catch (IOException e) {
System.out.println("Connection timed out");
}
// System.out.println(content.toString());
} catch (IOException e1) {
e1.printStackTrace();
} finally {
con.disconnect();
}
}
}
|
package com.github.netty.protocol.nrpc.exception;
/**
* RpcResponseException
* @author wangzihao
* 2018/8/21/021
*/
public class RpcResponseException extends RpcException {
/**
* Error status code
*/
private Integer status;
public RpcResponseException(Integer status,String message) {
super(message, null, false, false);
this.status = status;
}
public RpcResponseException(Integer status, String message, boolean writableStackTrace) {
super(message, null, false, writableStackTrace);
this.status = status;
}
public Integer getStatus() {
return status;
}
}
|
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ext.mssql.model;
import org.eclipse.core.runtime.IAdaptable;
import org.jkiss.code.NotNull;
import org.jkiss.code.Nullable;
import org.jkiss.dbeaver.DBException;
import org.jkiss.dbeaver.Log;
import org.jkiss.dbeaver.ModelPreferences;
import org.jkiss.dbeaver.ext.mssql.SQLServerConstants;
import org.jkiss.dbeaver.ext.mssql.SQLServerUtils;
import org.jkiss.dbeaver.ext.mssql.model.session.SQLServerSessionManager;
import org.jkiss.dbeaver.model.*;
import org.jkiss.dbeaver.model.admin.sessions.DBAServerSessionManager;
import org.jkiss.dbeaver.model.connection.DBPConnectionConfiguration;
import org.jkiss.dbeaver.model.exec.*;
import org.jkiss.dbeaver.model.exec.jdbc.*;
import org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource;
import org.jkiss.dbeaver.model.impl.jdbc.JDBCExecutionContext;
import org.jkiss.dbeaver.model.impl.jdbc.JDBCUtils;
import org.jkiss.dbeaver.model.impl.jdbc.cache.JDBCObjectCache;
import org.jkiss.dbeaver.model.meta.Association;
import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor;
import org.jkiss.dbeaver.model.struct.*;
import org.jkiss.dbeaver.utils.GeneralUtils;
import org.jkiss.utils.BeanUtils;
import org.jkiss.utils.CommonUtils;
import java.sql.SQLException;
import java.util.Collection;
import java.util.List;
import java.util.Properties;
public class SQLServerDataSource extends JDBCDataSource implements DBSObjectSelector, DBSInstanceContainer, /*DBCQueryPlanner, */IAdaptable {
private static final Log log = Log.getLog(SQLServerDataSource.class);
// Delegate data type reading to the driver
private final SystemDataTypeCache dataTypeCache = new SystemDataTypeCache();
private final DatabaseCache databaseCache = new DatabaseCache();
private String activeDatabaseName;
private boolean supportsColumnProperty;
private String serverVersion;
public SQLServerDataSource(DBRProgressMonitor monitor, DBPDataSourceContainer container)
throws DBException
{
super(monitor, container, new SQLServerDialect());
}
public boolean supportsColumnProperty() {
return supportsColumnProperty;
}
@Override
protected DBPDataSourceInfo createDataSourceInfo(DBRProgressMonitor monitor, @NotNull JDBCDatabaseMetaData metaData)
{
SQLServerDataSourceInfo info = new SQLServerDataSourceInfo(this, metaData);
if (isDataWarehouseServer(monitor)) {
info.setSupportsResultSetScroll(false);
}
return info;
}
boolean isDataWarehouseServer(DBRProgressMonitor monitor) {
return getServerVersion(monitor).contains(SQLServerConstants.SQL_DW_SERVER_LABEL);
}
public String getServerVersion() {
return serverVersion;
}
String getServerVersion(DBRProgressMonitor monitor) {
if (serverVersion == null) {
try (JDBCSession session = DBUtils.openMetaSession(monitor, this, "Read server version")) {
serverVersion = JDBCUtils.queryString(session, "SELECT @@VERSION");
} catch (Exception e) {
log.debug("Error reading SQL Server version: " + e.getMessage());
serverVersion = "";
}
}
return serverVersion;
}
@NotNull
@Override
public DBPDataSource getDataSource() {
return this;
}
@Override
protected Properties getAllConnectionProperties(@NotNull DBRProgressMonitor monitor, JDBCExecutionContext context, String purpose, DBPConnectionConfiguration connectionInfo) throws DBCException {
Properties properties = super.getAllConnectionProperties(monitor, context, purpose, connectionInfo);
if (!getContainer().getPreferenceStore().getBoolean(ModelPreferences.META_CLIENT_NAME_DISABLE)) {
// App name
properties.put(
SQLServerUtils.isDriverJtds(getContainer().getDriver()) ? SQLServerConstants.APPNAME_CLIENT_PROPERTY : SQLServerConstants.APPLICATION_NAME_CLIENT_PROPERTY,
CommonUtils.truncateString(DBUtils.getClientApplicationName(getContainer(), context, purpose), 64));
}
fillConnectionProperties(connectionInfo, properties);
SQLServerAuthentication authSchema = SQLServerUtils.detectAuthSchema(connectionInfo);
authSchema.getInitializer().initializeAuthentication(connectionInfo, properties);
return properties;
}
@Override
protected void initializeContextState(@NotNull DBRProgressMonitor monitor, @NotNull JDBCExecutionContext context, boolean setActiveObject) throws DBCException {
super.initializeContextState(monitor, context, setActiveObject);
if (setActiveObject ) {
SQLServerDatabase defaultObject = getDefaultObject();
if (defaultObject!= null && !isDataWarehouseServer(monitor)) {
setCurrentDatabase(monitor, context, defaultObject);
}
}
}
@Override
public Object getDataSourceFeature(String featureId) {
switch (featureId) {
case DBConstants.FEATURE_LIMIT_AFFECTS_DML:
return true;
case DBConstants.FEATURE_MAX_STRING_LENGTH:
return 8000;
}
return super.getDataSourceFeature(featureId);
}
@Override
public void initialize(@NotNull DBRProgressMonitor monitor) throws DBException {
super.initialize(monitor);
this.dataTypeCache.getAllObjects(monitor, this);
try (JDBCSession session = DBUtils.openMetaSession(monitor, this, "Load data source meta info")) {
this.activeDatabaseName = SQLServerUtils.getCurrentDatabase(session);
try {
JDBCUtils.queryString(session, "SELECT COLUMNPROPERTY(0, NULL, NULL)");
this.supportsColumnProperty = true;
} catch (Exception e) {
this.supportsColumnProperty = false;
}
} catch (Throwable e) {
log.error("Error during connection initialization", e);
}
}
//////////////////////////////////////////////////////////////////
// Data types
@NotNull
@Override
public DBPDataKind resolveDataKind(@NotNull String typeName, int valueType) {
return getLocalDataType(valueType).getDataKind();
}
@Override
public List<SQLServerDataType> getLocalDataTypes() {
return dataTypeCache.getCachedObjects();
}
SQLServerDataType getSystemDataType(int systemTypeId) {
for (SQLServerDataType dt : dataTypeCache.getCachedObjects()) {
if (dt.getObjectId() == systemTypeId) {
return dt;
}
}
log.debug("System data type " + systemTypeId + " not found");
SQLServerDataType sdt = new SQLServerDataType(this, String.valueOf(systemTypeId), systemTypeId, DBPDataKind.OBJECT, java.sql.Types.OTHER);
dataTypeCache.cacheObject(sdt);
return sdt;
}
@Override
public SQLServerDataType getLocalDataType(String typeName) {
return dataTypeCache.getCachedObject(typeName);
}
@Override
public SQLServerDataType getLocalDataType(int typeID) {
DBSDataType dt = super.getLocalDataType(typeID);
if (dt == null) {
log.debug("System data type " + typeID + " not found");
}
return (SQLServerDataType) dt;
}
@Override
public String getDefaultDataTypeName(@NotNull DBPDataKind dataKind) {
switch (dataKind) {
case BOOLEAN: return "bit";
case NUMERIC: return "int";
case STRING: return "varchar";
case DATETIME: return SQLServerConstants.TYPE_DATETIME;
case BINARY: return "binary";
case CONTENT: return "varbinary";
case ROWID: return "uniqueidentifier";
default:
return super.getDefaultDataTypeName(dataKind);
}
}
//////////////////////////////////////////////////////////
// Databases
protected boolean isShowAllSchemas() {
return CommonUtils.toBoolean(getContainer().getConnectionConfiguration().getProviderProperty(SQLServerConstants.PROP_SHOW_ALL_SCHEMAS));
}
//////////////////////////////////////////////////////////
// Windows authentication
@Override
protected String getConnectionUserName(@NotNull DBPConnectionConfiguration connectionInfo) {
if (SQLServerUtils.isWindowsAuth(connectionInfo)) {
return "";
} else {
return super.getConnectionUserName(connectionInfo);
}
}
@Override
protected String getConnectionUserPassword(@NotNull DBPConnectionConfiguration connectionInfo) {
if (SQLServerUtils.isWindowsAuth(connectionInfo)) {
return "";
} else {
return super.getConnectionUserPassword(connectionInfo);
}
}
//////////////////////////////////////////////////////////////
// Databases
@Override
public boolean supportsDefaultChange() {
return true;
}
@Nullable
@Override
public SQLServerDatabase getDefaultObject() {
return activeDatabaseName == null ? null : databaseCache.getCachedObject(activeDatabaseName);
}
@Override
public void setDefaultObject(@NotNull DBRProgressMonitor monitor, @NotNull DBSObject object)
throws DBException
{
final SQLServerDatabase oldSelectedEntity = getDefaultObject();
if (!(object instanceof SQLServerDatabase)) {
throw new IllegalArgumentException("Invalid object type: " + object);
}
for (JDBCExecutionContext context : getDefaultInstance().getAllContexts()) {
if (!setCurrentDatabase(monitor, context, (SQLServerDatabase) object)) {
return;
}
}
activeDatabaseName = object.getName();
// Send notifications
if (oldSelectedEntity != null) {
DBUtils.fireObjectSelect(oldSelectedEntity, false);
}
if (this.activeDatabaseName != null) {
DBUtils.fireObjectSelect(object, true);
}
}
@Override
public boolean refreshDefaultObject(@NotNull DBCSession session) throws DBException {
try {
final String currentSchema = SQLServerUtils.getCurrentDatabase((JDBCSession) session);
if (currentSchema != null && !CommonUtils.equalObjects(currentSchema, activeDatabaseName)) {
final SQLServerDatabase newDatabase = databaseCache.getCachedObject(currentSchema);
if (newDatabase != null) {
setDefaultObject(session.getProgressMonitor(), newDatabase);
return true;
}
}
return false;
} catch (SQLException e) {
throw new DBException(e, this);
}
}
private boolean setCurrentDatabase(DBRProgressMonitor monitor, JDBCExecutionContext executionContext, SQLServerDatabase object) throws DBCException {
if (object == null) {
log.debug("Null current schema");
return false;
}
try (JDBCSession session = executionContext.openSession(monitor, DBCExecutionPurpose.UTIL, "Set active database")) {
SQLServerUtils.setCurrentDatabase(session, object.getName());
return true;
} catch (SQLException e) {
log.error(e);
return false;
}
}
@Association
public Collection<SQLServerDatabase> getDatabases(DBRProgressMonitor monitor) throws DBException {
return databaseCache.getAllObjects(monitor, this);
}
@Override
public Collection<? extends DBSObject> getChildren(@NotNull DBRProgressMonitor monitor) throws DBException {
return databaseCache.getAllObjects(monitor, this);
}
@Override
public DBSObject getChild(@NotNull DBRProgressMonitor monitor, @NotNull String childName) throws DBException {
return databaseCache.getObject(monitor, this, childName);
}
@Override
public Class<? extends DBSObject> getChildType(@NotNull DBRProgressMonitor monitor) throws DBException {
return SQLServerDatabase.class;
}
@Override
public void cacheStructure(@NotNull DBRProgressMonitor monitor, int scope) throws DBException {
databaseCache.getAllObjects(monitor, this);
}
@Override
public DBSObject refreshObject(@NotNull DBRProgressMonitor monitor) throws DBException {
databaseCache.clearCache();
return super.refreshObject(monitor);
}
@Override
public DBCQueryTransformer createQueryTransformer(DBCQueryTransformType type) {
if (type == DBCQueryTransformType.RESULT_SET_LIMIT) {
//if (!SQLServerUtils.isDriverAzure(getContainer().getDriver())) {
return new QueryTransformerTop();
//}
}
return super.createQueryTransformer(type);
}
@Override
public <T> T getAdapter(Class<T> adapter) {
if (adapter == DBSStructureAssistant.class) {
return adapter.cast(new SQLServerStructureAssistant(this));
} else if (adapter == DBAServerSessionManager.class) {
return adapter.cast(new SQLServerSessionManager(this));
}
return super.getAdapter(adapter);
}
@Override
public ErrorPosition[] getErrorPosition(DBRProgressMonitor monitor, DBCExecutionContext context, String query, Throwable error) {
Throwable rootCause = GeneralUtils.getRootCause(error);
if (rootCause != null && SQLServerConstants.SQL_SERVER_EXCEPTION_CLASS_NAME.equals(rootCause.getClass().getName())) {
// Read line number from SQLServerError class
try {
Object serverError = rootCause.getClass().getMethod("getSQLServerError").invoke(rootCause);
if (serverError != null) {
Object serverErrorLine = BeanUtils.readObjectProperty(serverError, "lineNumber");
if (serverErrorLine instanceof Number) {
ErrorPosition pos = new ErrorPosition();
pos.line = ((Number) serverErrorLine).intValue() - 1;
return new ErrorPosition[] {pos};
}
}
} catch (Throwable e) {
// ignore
}
}
return super.getErrorPosition(monitor, context, query, error);
}
static class DatabaseCache extends JDBCObjectCache<SQLServerDataSource, SQLServerDatabase> {
DatabaseCache() {
setListOrderComparator(DBUtils.nameComparator());
}
@NotNull
@Override
protected JDBCStatement prepareObjectsStatement(@NotNull JDBCSession session, @NotNull SQLServerDataSource owner) throws SQLException {
StringBuilder sql = new StringBuilder("SELECT db.* FROM sys.databases db");
DBSObjectFilter databaseFilters = owner.getContainer().getObjectFilter(SQLServerDatabase.class, null, false);
if (databaseFilters != null && databaseFilters.isEnabled()) {
JDBCUtils.appendFilterClause(sql, databaseFilters, "name", true);
}
sql.append("\nORDER BY db.name");
JDBCPreparedStatement dbStat = session.prepareStatement(sql.toString());
if (databaseFilters != null) {
JDBCUtils.setFilterParameters(dbStat, 1, databaseFilters);
}
return dbStat;
}
@Override
protected SQLServerDatabase fetchObject(@NotNull JDBCSession session, @NotNull SQLServerDataSource owner, @NotNull JDBCResultSet resultSet) throws SQLException, DBException {
return new SQLServerDatabase(owner, resultSet);
}
}
private class SystemDataTypeCache extends JDBCObjectCache<SQLServerDataSource, SQLServerDataType> {
@NotNull
@Override
protected JDBCStatement prepareObjectsStatement(@NotNull JDBCSession session, @NotNull SQLServerDataSource sqlServerDataSource) throws SQLException {
return session.prepareStatement("SELECT * FROM sys.types WHERE is_user_defined = 0 order by name");
}
@Override
protected SQLServerDataType fetchObject(@NotNull JDBCSession session, @NotNull SQLServerDataSource dataSource, @NotNull JDBCResultSet resultSet) throws SQLException, DBException {
return new SQLServerDataType(dataSource, resultSet);
}
}
}
|
package com.xevgnov.exception;
public class SudokuFileFormatViolationException extends SudokuValidationException {
public SudokuFileFormatViolationException(String message) {
super(message);
}
public SudokuFileFormatViolationException(String message, Throwable cause) {
super(message, cause);
}
}
|
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package libcore.dalvik.system;
import junit.framework.TestCase;
/**
* Test JNI behavior
*/
public final class JniTest extends TestCase {
static {
System.loadLibrary("javacoretests");
}
/** @return this argument of method */
private native JniTest returnThis();
/** @return class argument of method */
private static native Class<JniTest> returnClass();
private native Object returnObjectArgFrom16(int arg_no,
Object o1, Object o2, Object o3, Object o4, Object o5,
Object o6, Object o7, Object o8, Object o9, Object o10,
Object o11, Object o12, Object o13, Object o14, Object o15,
Object o16);
private native boolean returnBooleanArgFrom16(int arg_no,
boolean o1, boolean o2, boolean o3, boolean o4, boolean o5,
boolean o6, boolean o7, boolean o8, boolean o9, boolean o10,
boolean o11, boolean o12, boolean o13, boolean o14, boolean o15,
boolean o16);
private native char returnCharArgFrom16(int arg_no,
char o1, char o2, char o3, char o4, char o5,
char o6, char o7, char o8, char o9, char o10,
char o11, char o12, char o13, char o14, char o15,
char o16);
private native byte returnByteArgFrom16(int arg_no,
byte o1, byte o2, byte o3, byte o4, byte o5,
byte o6, byte o7, byte o8, byte o9, byte o10,
byte o11, byte o12, byte o13, byte o14, byte o15,
byte o16);
private native short returnShortArgFrom16(int arg_no,
short o1, short o2, short o3, short o4, short o5,
short o6, short o7, short o8, short o9, short o10,
short o11, short o12, short o13, short o14, short o15,
short o16);
private native int returnIntArgFrom16(int arg_no,
int o1, int o2, int o3, int o4, int o5,
int o6, int o7, int o8, int o9, int o10,
int o11, int o12, int o13, int o14, int o15,
int o16);
private native long returnLongArgFrom16(int arg_no,
long o1, long o2, long o3, long o4, long o5,
long o6, long o7, long o8, long o9, long o10,
long o11, long o12, long o13, long o14, long o15,
long o16);
private native float returnFloatArgFrom16(int arg_no,
float o1, float o2, float o3, float o4, float o5,
float o6, float o7, float o8, float o9, float o10,
float o11, float o12, float o13, float o14, float o15,
float o16);
private native double returnDoubleArgFrom16(int arg_no,
double o1, double o2, double o3, double o4, double o5,
double o6, double o7, double o8, double o9, double o10,
double o11, double o12, double o13, double o14, double o15,
double o16);
/** Test cases for implicit this argument */
public void testPassingThis() {
assertEquals(this, returnThis());
}
/** Test cases for implicit class argument */
public void testPassingClass() {
assertEquals(JniTest.class, returnClass());
}
/** Test passing object references as arguments to a native method */
public void testPassingObjectReferences() {
final Object[] literals = {"Bradshaw", "Isherwood", "Oldknow", "Mallet",
JniTest.class, null, Integer.valueOf(0)};
final Object[] a = new Object[16];
// test selection from a list of object literals where the literals are all the same
for(Object literal : literals) {
for(int i = 0; i < 16; i++) {
a[i] = literal;
}
for(int i = 0; i < 16; i++) {
assertEquals(a[i], returnObjectArgFrom16(i, a[0], a[1], a[2], a[3], a[4],
a[5], a[6], a[7], a[8], a[9], a[10],
a[11], a[12], a[13], a[14], a[15]));
}
}
// test selection from a list of object literals where the literals are shuffled
for(int j = 0; j < literals.length; j++) {
for(int i = 0; i < 16; i++) {
a[i] = literals[(i + j) % literals.length];
}
for(int i = 0; i < 16; i++) {
assertEquals(a[i], returnObjectArgFrom16(i, a[0], a[1], a[2], a[3], a[4],
a[5], a[6], a[7], a[8], a[9], a[10],
a[11], a[12], a[13], a[14], a[15]));
}
}
}
/** Test passing booleans as arguments to a native method */
public void testPassingBooleans() {
final boolean[] literals = {true, false, false, true};
final boolean[] a = new boolean[16];
// test selection from a list of object literals where the literals are all the same
for(boolean literal : literals) {
for(int i = 0; i < 16; i++) {
a[i] = literal;
}
for(int i = 0; i < 16; i++) {
assertEquals(a[i], returnBooleanArgFrom16(i, a[0], a[1], a[2], a[3], a[4],
a[5], a[6], a[7], a[8], a[9], a[10],
a[11], a[12], a[13], a[14], a[15]));
}
}
// test selection from a list of object literals where the literals are shuffled
for(int j = 0; j < literals.length; j++) {
for(int i = 0; i < 16; i++) {
a[i] = literals[(i + j) % literals.length];
}
for(int i = 0; i < 16; i++) {
assertEquals(a[i], returnBooleanArgFrom16(i, a[0], a[1], a[2], a[3], a[4],
a[5], a[6], a[7], a[8], a[9], a[10],
a[11], a[12], a[13], a[14], a[15]));
}
}
}
/** Test passing characters as arguments to a native method */
public void testPassingChars() {
final char[] literals = {Character.MAX_VALUE, Character.MIN_VALUE,
Character.MAX_HIGH_SURROGATE, Character.MAX_LOW_SURROGATE,
Character.MIN_HIGH_SURROGATE, Character.MIN_LOW_SURROGATE,
'a', 'z', 'A', 'Z', '0', '9'};
final char[] a = new char[16];
// test selection from a list of object literals where the literals are all the same
for(char literal : literals) {
for(int i = 0; i < 16; i++) {
a[i] = literal;
}
for(int i = 0; i < 16; i++) {
assertEquals(a[i], returnCharArgFrom16(i, a[0], a[1], a[2], a[3], a[4],
a[5], a[6], a[7], a[8], a[9], a[10],
a[11], a[12], a[13], a[14], a[15]));
}
}
// test selection from a list of object literals where the literals are shuffled
for(int j = 0; j < literals.length; j++) {
for(int i = 0; i < 16; i++) {
a[i] = literals[(i + j) % literals.length];
}
for(int i = 0; i < 16; i++) {
assertEquals(a[i], returnCharArgFrom16(i, a[0], a[1], a[2], a[3], a[4],
a[5], a[6], a[7], a[8], a[9], a[10],
a[11], a[12], a[13], a[14], a[15]));
}
}
}
/** Test passing bytes as arguments to a native method */
public void testPassingBytes() {
final byte[] literals = {Byte.MAX_VALUE, Byte.MIN_VALUE, 0, -1};
final byte[] a = new byte[16];
// test selection from a list of object literals where the literals are all the same
for(byte literal : literals) {
for(int i = 0; i < 16; i++) {
a[i] = literal;
}
for(int i = 0; i < 16; i++) {
assertEquals(a[i], returnByteArgFrom16(i, a[0], a[1], a[2], a[3], a[4],
a[5], a[6], a[7], a[8], a[9], a[10],
a[11], a[12], a[13], a[14], a[15]));
}
}
// test selection from a list of object literals where the literals are shuffled
for(int j = 0; j < literals.length; j++) {
for(int i = 0; i < 16; i++) {
a[i] = literals[(i + j) % literals.length];
}
for(int i = 0; i < 16; i++) {
assertEquals(a[i], returnByteArgFrom16(i, a[0], a[1], a[2], a[3], a[4],
a[5], a[6], a[7], a[8], a[9], a[10],
a[11], a[12], a[13], a[14], a[15]));
}
}
}
/** Test passing shorts as arguments to a native method */
public void testPassingShorts() {
final short[] literals = {Byte.MAX_VALUE, Byte.MIN_VALUE, Short.MAX_VALUE, Short.MIN_VALUE, 0, -1};
final short[] a = new short[16];
// test selection from a list of object literals where the literals are all the same
for(short literal : literals) {
for(int i = 0; i < 16; i++) {
a[i] = literal;
}
for(int i = 0; i < 16; i++) {
assertEquals(a[i], returnShortArgFrom16(i, a[0], a[1], a[2], a[3], a[4],
a[5], a[6], a[7], a[8], a[9], a[10],
a[11], a[12], a[13], a[14], a[15]));
}
}
// test selection from a list of object literals where the literals are shuffled
for(int j = 0; j < literals.length; j++) {
for(int i = 0; i < 16; i++) {
a[i] = literals[(i + j) % literals.length];
}
for(int i = 0; i < 16; i++) {
assertEquals(a[i], returnShortArgFrom16(i, a[0], a[1], a[2], a[3], a[4],
a[5], a[6], a[7], a[8], a[9], a[10],
a[11], a[12], a[13], a[14], a[15]));
}
}
}
/** Test passing ints as arguments to a native method */
public void testPassingInts() {
final int[] literals = {Byte.MAX_VALUE, Byte.MIN_VALUE, Short.MAX_VALUE, Short.MIN_VALUE,
Integer.MAX_VALUE, Integer.MIN_VALUE, 0, -1};
final int[] a = new int[16];
// test selection from a list of object literals where the literals are all the same
for(int literal : literals) {
for(int i = 0; i < 16; i++) {
a[i] = literal;
}
for(int i = 0; i < 16; i++) {
assertEquals(a[i], returnIntArgFrom16(i, a[0], a[1], a[2], a[3], a[4],
a[5], a[6], a[7], a[8], a[9], a[10],
a[11], a[12], a[13], a[14], a[15]));
}
}
// test selection from a list of object literals where the literals are shuffled
for(int j = 0; j < literals.length; j++) {
for(int i = 0; i < 16; i++) {
a[i] = literals[(i + j) % literals.length];
}
for(int i = 0; i < 16; i++) {
assertEquals(a[i], returnIntArgFrom16(i, a[0], a[1], a[2], a[3], a[4],
a[5], a[6], a[7], a[8], a[9], a[10],
a[11], a[12], a[13], a[14], a[15]));
}
}
}
/** Test passing longs as arguments to a native method */
public void testPassingLongs() {
final long[] literals = {Byte.MAX_VALUE, Byte.MIN_VALUE, Short.MAX_VALUE, Short.MIN_VALUE,
Integer.MAX_VALUE, Integer.MIN_VALUE, Long.MAX_VALUE, Long.MIN_VALUE, 0, -1};
final long[] a = new long[16];
// test selection from a list of object literals where the literals are all the same
for(long literal : literals) {
for(int i = 0; i < 16; i++) {
a[i] = literal;
}
for(int i = 0; i < 16; i++) {
assertEquals(a[i], returnLongArgFrom16(i, a[0], a[1], a[2], a[3], a[4],
a[5], a[6], a[7], a[8], a[9], a[10],
a[11], a[12], a[13], a[14], a[15]));
}
}
// test selection from a list of object literals where the literals are shuffled
for(int j = 0; j < literals.length; j++) {
for(int i = 0; i < 16; i++) {
a[i] = literals[(i + j) % literals.length];
}
for(int i = 0; i < 16; i++) {
assertEquals(a[i], returnLongArgFrom16(i, a[0], a[1], a[2], a[3], a[4],
a[5], a[6], a[7], a[8], a[9], a[10],
a[11], a[12], a[13], a[14], a[15]));
}
}
}
/** Test passing floats as arguments to a native method */
public void testPassingFloats() {
final float[] literals = {Byte.MAX_VALUE, Byte.MIN_VALUE, Short.MAX_VALUE, Short.MIN_VALUE,
Integer.MAX_VALUE, Integer.MIN_VALUE, Long.MAX_VALUE, Long.MIN_VALUE,
Float.MAX_VALUE, Float.MIN_VALUE, Float.MIN_NORMAL, Float.NaN,
Float.NEGATIVE_INFINITY, Float.POSITIVE_INFINITY, (float)Math.E, (float)Math.PI, 0, -1};
final float[] a = new float[16];
// test selection from a list of object literals where the literals are all the same
for(float literal : literals) {
for(int i = 0; i < 16; i++) {
a[i] = literal;
}
for(int i = 0; i < 16; i++) {
assertEquals(a[i], returnFloatArgFrom16(i, a[0], a[1], a[2], a[3], a[4],
a[5], a[6], a[7], a[8], a[9], a[10],
a[11], a[12], a[13], a[14], a[15]));
}
}
// test selection from a list of object literals where the literals are shuffled
for(int j = 0; j < literals.length; j++) {
for(int i = 0; i < 16; i++) {
a[i] = literals[(i + j) % literals.length];
}
for(int i = 0; i < 16; i++) {
assertEquals(a[i], returnFloatArgFrom16(i, a[0], a[1], a[2], a[3], a[4],
a[5], a[6], a[7], a[8], a[9], a[10],
a[11], a[12], a[13], a[14], a[15]));
}
}
}
/** Test passing doubles as arguments to a native method */
public void testPassingDoubles() {
final double[] literals = {Byte.MAX_VALUE, Byte.MIN_VALUE, Short.MAX_VALUE, Short.MIN_VALUE,
Integer.MAX_VALUE, Integer.MIN_VALUE, Long.MAX_VALUE, Long.MIN_VALUE,
Float.MAX_VALUE, Float.MIN_VALUE, Float.MIN_NORMAL, Float.NaN,
Float.NEGATIVE_INFINITY, Float.POSITIVE_INFINITY,
Double.MAX_VALUE, Double.MIN_VALUE, Double.MIN_NORMAL, Double.NaN,
Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY,
Math.E, Math.PI, 0, -1};
final double[] a = new double[16];
// test selection from a list of object literals where the literals are all the same
for(double literal : literals) {
for(int i = 0; i < 16; i++) {
a[i] = literal;
}
for(int i = 0; i < 16; i++) {
assertEquals(a[i], returnDoubleArgFrom16(i, a[0], a[1], a[2], a[3], a[4],
a[5], a[6], a[7], a[8], a[9], a[10],
a[11], a[12], a[13], a[14], a[15]));
}
}
// test selection from a list of object literals where the literals are shuffled
for(int j = 0; j < literals.length; j++) {
for(int i = 0; i < 16; i++) {
a[i] = literals[(i + j) % literals.length];
}
for(int i = 0; i < 16; i++) {
assertEquals(a[i], returnDoubleArgFrom16(i, a[0], a[1], a[2], a[3], a[4],
a[5], a[6], a[7], a[8], a[9], a[10],
a[11], a[12], a[13], a[14], a[15]));
}
}
}
private static native Class<?> envGetSuperclass(Class<?> clazz);
public void testGetSuperclass() {
assertEquals(Object.class, envGetSuperclass(String.class));
assertEquals(null, envGetSuperclass(Object.class));
assertEquals(null, envGetSuperclass(int.class));
assertEquals(null, envGetSuperclass(Runnable.class));
}
}
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package org.oastem.frc.ascent.shooting;
import org.oastem.frc.control.DriveSystem;
/**
*
* @author STEM
*/
public class GoalDriver implements Runnable {
private double angle = 0.0;
private double width = 0.0;
private double lastAngle = 0.0;
private double lastWidth = 0.0;
private double left = 0.0;
private double right = 0.0;
private double lastLeft = 0.0;
private double lastRight = 0.0;
private long lastUpdate = System.currentTimeMillis();
private final double DRIVE_SPEED = 0.33; // base driving speed
private final int DRIVE_DELAY = 50;
private final double GOAL_DISTANCE = 100;
private final double ANGLE_DRIVE_RATIO = 0.33;
private final double ZONE = 0.2; // for angle
private final double GOAL_THRESHOLD = 5; // distance threshold
private final double SPEED_THRESHOLD = 0.15; // threshold to average out motor speeds
private boolean canShoot = false;
private boolean stop = false;
public GoalDriver() {
}
public GoalDriver(double angle, double width) {
update(angle, width);
}
public void update(double angle, double width) {
lastAngle = this.angle;
lastWidth = this.width;
this.angle = angle;
this.width = width;
lastUpdate = System.currentTimeMillis();
}
public boolean canShoot() {
return canShoot;
}
public void stop() {
stop = true;
}
public void run() {
DriveSystem drive = DriveSystem.getInstance();
//double ok = 0.0;
while (true) {
if (stop) {
break;
} else if (System.currentTimeMillis() - lastUpdate > 5000) {
long time = (System.currentTimeMillis() - lastUpdate)/1000;
System.out.println("GoalDriver: No updates received for " + time + " seconds!");
drive.tankDrive(0.0, 0.0);
} else {
double avgWidth = (lastWidth + width)/2;
double avgAngle = (lastAngle + angle)/2;
System.out.println("Angler PID Output: " + avgAngle);
lastLeft = left;
lastRight = right;
left = 0.0;
right = 0.0;
double delta = avgWidth - GOAL_DISTANCE;
System.out.println("Driver PID width: " + avgWidth);
if (delta > GOAL_THRESHOLD) {
// too close, back up
left = -DRIVE_SPEED * delta/50;
right = -DRIVE_SPEED * delta/50;
} else if (delta < -GOAL_THRESHOLD) {
// too far, go forward
left = DRIVE_SPEED * delta/50;
right = DRIVE_SPEED * delta/50;
}
if (avgAngle < -ZONE) {
left += avgAngle * ANGLE_DRIVE_RATIO;
right -= avgAngle * ANGLE_DRIVE_RATIO;
} else if (avgAngle > ZONE) {
left += avgAngle * ANGLE_DRIVE_RATIO;
right -= avgAngle * ANGLE_DRIVE_RATIO;
}
// slowly rev up the motor instead of slamming it
double dLeft = left - lastLeft;
double dRight = right - lastRight;
if (Math.abs(dLeft) < SPEED_THRESHOLD) {
left = (lastLeft + left)/2;
}
if (Math.abs(dRight) < SPEED_THRESHOLD) {
right = (lastRight + right)/2;
}
canShoot = (Math.abs(left) < ZONE && Math.abs(right) < ZONE);
drive.tankDrive(left, right);
}
try {
Thread.sleep(DRIVE_DELAY);
} catch (Exception e) {
}
}
}
}
|
package tw.fondus.fews.adapter.pi.flow.longtime.nchc;
import org.junit.Test;
import tw.fondus.fews.adapter.pi.argument.PiIOArguments;
/**
* Unit test of Model pre-adapter for running NCHC long time flow model from Delft-FEWS.
*
* @author Chao
*
*/
public class LTFPreAdapterTest {
@Test
public void run(){
String[] args = new String[]{
"-b",
"src/test/resources/",
"-i",
"Rainfall.xml,WaterLevel.xml",
"-o",
"DATA_INP_RAIN.txt,DATA_INP_WL.txt,INPUT_DATE_DECADE.TXT"
};
PiIOArguments arguments = PiIOArguments.instance();
new LTFPreAdapter().execute( args, arguments );
}
}
|
package com.jkinvest.jkl.core.file.controller;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.validation.constraints.NotNull;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.jkinvest.jkl.base.base.R;
import com.jkinvest.jkl.base.base.controller.SuperController;
import com.jkinvest.jkl.base.base.request.PageParams;
import com.jkinvest.jkl.base.log.annotation.SysLog;
import com.jkinvest.jkl.base.utils.BeanPlusUtil;
import com.jkinvest.jkl.core.file.dto.FilePageReqDTO;
import com.jkinvest.jkl.core.file.dto.FileUpdateDTO;
import com.jkinvest.jkl.core.file.dto.FolderDTO;
import com.jkinvest.jkl.core.file.dto.FolderSaveDTO;
import com.jkinvest.jkl.core.file.entity.File;
import com.jkinvest.jkl.core.file.manager.FileRestManager;
import com.jkinvest.jkl.core.file.service.FileService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import io.swagger.annotations.ApiResponse;
import io.swagger.annotations.ApiResponses;
import lombok.extern.slf4j.Slf4j;
/**
* <p>
* 文件表 前端控制器
* </p>
*
* @author zuihou
* @since 2019-04-29
*/
@Validated
@RestController
@RequestMapping("/file")
@Slf4j
@Api(value = "文件表", tags = "文件表")
public class FileController extends SuperController<FileService, Long, File, FilePageReqDTO, FolderSaveDTO, FileUpdateDTO> {
@Autowired
private FileRestManager fileRestManager;
@Override
public void query(PageParams<FilePageReqDTO> params, IPage<File> page, Long defSize) {
fileRestManager.page(page, params.getModel());
}
@Override
public R<File> handlerSave(FolderSaveDTO model) {
FolderDTO folder = baseService.saveFolder(model);
return success(BeanPlusUtil.toBean(folder, File.class));
}
/**
* 上传文件
*
* @param
* @return
* @author zuihou
* @date 2019-05-06 16:28
*/
@ApiOperation(value = "上传文件", notes = "上传文件 ")
@ApiResponses({
@ApiResponse(code = 60102, message = "文件夹为空"),
})
@ApiImplicitParams({
@ApiImplicitParam(name = "folderId", value = "文件夹id", dataType = "long", paramType = "query"),
@ApiImplicitParam(name = "file", value = "附件", dataType = "MultipartFile", allowMultiple = true, required = true),
})
@RequestMapping(value = "/upload", method = RequestMethod.POST)
@SysLog("上传文件")
public R<File> upload(
@NotNull(message = "文件夹不能为空")
@RequestParam(value = "folderId") Long folderId,
@RequestParam(value = "file") MultipartFile simpleFile) {
//1,先将文件存在本地,并且生成文件名
log.info("contentType={}, name={} , sfname={}", simpleFile.getContentType(), simpleFile.getName(), simpleFile.getOriginalFilename());
// 忽略路径字段,只处理文件类型
if (simpleFile.getContentType() == null) {
return fail("文件为空");
}
File file = baseService.upload(simpleFile, folderId);
return success(file);
}
@Override
public R<File> handlerUpdate(FileUpdateDTO fileUpdateDTO) {
// 判断文件名是否有 后缀
if (StringUtils.isNotEmpty(fileUpdateDTO.getSubmittedFileName())) {
File oldFile = baseService.getById(fileUpdateDTO.getId());
if (oldFile.getExt() != null && !fileUpdateDTO.getSubmittedFileName().endsWith(oldFile.getExt())) {
fileUpdateDTO.setSubmittedFileName(fileUpdateDTO.getSubmittedFileName() + "." + oldFile.getExt());
}
}
File file = BeanPlusUtil.toBean(fileUpdateDTO, File.class);
baseService.updateById(file);
return success(file);
}
@Override
public R<Boolean> handlerDelete(List<Long> ids) {
Long userId = getUserId();
return success(baseService.removeList(userId, ids));
}
/**
* 下载一个文件或多个文件打包下载
*
* @param ids
* @param response
* @throws Exception
*/
@ApiOperation(value = "下载一个文件或多个文件打包下载", notes = "下载一个文件或多个文件打包下载")
@GetMapping(value = "/download", produces = "application/octet-stream")
@SysLog("下载文件")
public void download(
@ApiParam(name = "ids[]", value = "文件id 数组")
@RequestParam(value = "ids[]") Long[] ids,
HttpServletRequest request, HttpServletResponse response) throws Exception {
fileRestManager.download(request, response, ids, null);
}
}
|
package com.github.catstiger.common.sql.sync;
import javax.persistence.ManyToMany;
public interface ManyToManyCreator {
/**
* 创建ManyToMany交叉表, 字段必须为一个Collection的子类,使用@ManyToMany标注,其属性
* {@link ManyToMany#targetEntity()} 指向另一个实体类, {@link ManyToMany#mappedBy()}指向另一实体类的对应
* 的属性名。
* “另一个”实体类中对应的字段,<b>不可以</b>指明{@link ManyToMany#mappedBy()}属性!
* @param entityClass 主导Entity
* @param fieldName 字段名
*/
public void createCrossTable(Class<?> entityClass, String fieldName);
}
|
package io.vertx.tp.ipc.service;
import static io.grpc.MethodDescriptor.generateFullMethodName;
import static io.grpc.stub.ClientCalls.asyncBidiStreamingCall;
import static io.grpc.stub.ServerCalls.asyncBidiStreamingCall;
import static io.grpc.stub.ServerCalls.asyncUnimplementedStreamingCall;
/**
*
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler (version 1.6.1)",
comments = "Source: zero.def.service.proto")
public final class DupliexServiceGrpc {
private DupliexServiceGrpc() {
}
public static final String SERVICE_NAME = "io.vertx.tp.ipc.service.DupliexService";
// Static method descriptors that strictly reflect the proto.
@io.grpc.ExperimentalApi("https://github.com/grpc/grpc-java/issues/1901")
public static final io.grpc.MethodDescriptor<io.vertx.tp.ipc.eon.StreamClientRequest,
io.vertx.tp.ipc.eon.StreamServerResponse> METHOD_DUPLIEX_CALL =
io.grpc.MethodDescriptor.<io.vertx.tp.ipc.eon.StreamClientRequest, io.vertx.tp.ipc.eon.StreamServerResponse>newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.BIDI_STREAMING)
.setFullMethodName(generateFullMethodName(
"io.vertx.tp.ipc.service.DupliexService", "DupliexCall"))
.setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
io.vertx.tp.ipc.eon.StreamClientRequest.getDefaultInstance()))
.setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(
io.vertx.tp.ipc.eon.StreamServerResponse.getDefaultInstance()))
.build();
/**
* Creates a new async stub that supports all call types for the service
*/
public static DupliexServiceStub newStub(io.grpc.Channel channel) {
return new DupliexServiceStub(channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static DupliexServiceBlockingStub newBlockingStub(
io.grpc.Channel channel) {
return new DupliexServiceBlockingStub(channel);
}
/**
* Creates a new ListenableFuture-style stub that supports unary calls on the service
*/
public static DupliexServiceFutureStub newFutureStub(
io.grpc.Channel channel) {
return new DupliexServiceFutureStub(channel);
}
/**
*
*/
public static abstract class DupliexServiceImplBase implements io.grpc.BindableService {
/**
* <pre>
* Full: Client -> Server -> Client
* </pre>
*/
public io.grpc.stub.StreamObserver<io.vertx.tp.ipc.eon.StreamClientRequest> dupliexCall(
io.grpc.stub.StreamObserver<io.vertx.tp.ipc.eon.StreamServerResponse> responseObserver) {
return asyncUnimplementedStreamingCall(METHOD_DUPLIEX_CALL, responseObserver);
}
@java.lang.Override
public final io.grpc.ServerServiceDefinition bindService() {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
METHOD_DUPLIEX_CALL,
asyncBidiStreamingCall(
new MethodHandlers<
io.vertx.tp.ipc.eon.StreamClientRequest,
io.vertx.tp.ipc.eon.StreamServerResponse>(
this, METHODID_DUPLIEX_CALL)))
.build();
}
}
/**
*
*/
public static final class DupliexServiceStub extends io.grpc.stub.AbstractStub<DupliexServiceStub> {
private DupliexServiceStub(io.grpc.Channel channel) {
super(channel);
}
private DupliexServiceStub(io.grpc.Channel channel,
io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected DupliexServiceStub build(io.grpc.Channel channel,
io.grpc.CallOptions callOptions) {
return new DupliexServiceStub(channel, callOptions);
}
/**
* <pre>
* Full: Client -> Server -> Client
* </pre>
*/
public io.grpc.stub.StreamObserver<io.vertx.tp.ipc.eon.StreamClientRequest> dupliexCall(
io.grpc.stub.StreamObserver<io.vertx.tp.ipc.eon.StreamServerResponse> responseObserver) {
return asyncBidiStreamingCall(
getChannel().newCall(METHOD_DUPLIEX_CALL, getCallOptions()), responseObserver);
}
}
/**
*
*/
public static final class DupliexServiceBlockingStub extends io.grpc.stub.AbstractStub<DupliexServiceBlockingStub> {
private DupliexServiceBlockingStub(io.grpc.Channel channel) {
super(channel);
}
private DupliexServiceBlockingStub(io.grpc.Channel channel,
io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected DupliexServiceBlockingStub build(io.grpc.Channel channel,
io.grpc.CallOptions callOptions) {
return new DupliexServiceBlockingStub(channel, callOptions);
}
}
/**
*
*/
public static final class DupliexServiceFutureStub extends io.grpc.stub.AbstractStub<DupliexServiceFutureStub> {
private DupliexServiceFutureStub(io.grpc.Channel channel) {
super(channel);
}
private DupliexServiceFutureStub(io.grpc.Channel channel,
io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected DupliexServiceFutureStub build(io.grpc.Channel channel,
io.grpc.CallOptions callOptions) {
return new DupliexServiceFutureStub(channel, callOptions);
}
}
private static final int METHODID_DUPLIEX_CALL = 0;
private static final class MethodHandlers<Req, Resp> implements
io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final DupliexServiceImplBase serviceImpl;
private final int methodId;
MethodHandlers(DupliexServiceImplBase serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_DUPLIEX_CALL:
return (io.grpc.stub.StreamObserver<Req>) serviceImpl.dupliexCall(
(io.grpc.stub.StreamObserver<io.vertx.tp.ipc.eon.StreamServerResponse>) responseObserver);
default:
throw new AssertionError();
}
}
}
private static final class DupliexServiceDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier {
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return io.vertx.tp.ipc.service.UpIpcService.getDescriptor();
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (DupliexServiceGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new DupliexServiceDescriptorSupplier())
.addMethod(METHOD_DUPLIEX_CALL)
.build();
}
}
}
return result;
}
}
|
/*
* JasperReports - Free Java Reporting Library.
* Copyright (C) 2001 - 2013 Jaspersoft Corporation. All rights reserved.
* http://www.jaspersoft.com
*
* Unless you have purchased a commercial license agreement from Jaspersoft,
* the following license terms apply:
*
* This program is part of JasperReports.
*
* JasperReports is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* JasperReports is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with JasperReports. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* Contributors:
* Eugene D - eugenedruy@users.sourceforge.net
* Adrian Jackson - iapetus@users.sourceforge.net
* David Taylor - exodussystems@users.sourceforge.net
* Lars Kristensen - llk@users.sourceforge.net
*/
package net.sf.jasperreports.engine.export.draw;
import java.awt.Graphics2D;
import net.sf.jasperreports.engine.DefaultJasperReportsContext;
import net.sf.jasperreports.engine.JRPrintText;
import net.sf.jasperreports.engine.JasperReportsContext;
import net.sf.jasperreports.engine.export.AwtTextRenderer;
import net.sf.jasperreports.engine.type.ModeEnum;
import net.sf.jasperreports.engine.util.JRStyledText;
/**
* @author Teodor Danciu (teodord@users.sourceforge.net)
* @version $Id: TextDrawer.java 5878 2013-01-07 20:23:13Z teodord $
*/
public class TextDrawer extends ElementDrawer<JRPrintText>
{
/**
*
*/
protected AwtTextRenderer textRenderer;
/**
* @deprecated Replaced by {@link #TextDrawer(JasperReportsContext, AwtTextRenderer)}.
*/
public TextDrawer(
AwtTextRenderer textRenderer
)
{
this(DefaultJasperReportsContext.getInstance(), textRenderer);
}
/**
*
*/
public TextDrawer(
JasperReportsContext jasperReportsContext,
AwtTextRenderer textRenderer
)
{
super(jasperReportsContext);
this.textRenderer = textRenderer;
}
/**
*
*/
public void draw(Graphics2D grx, JRPrintText text, int offsetX, int offsetY)
{
textRenderer.initialize(grx, text, offsetX, offsetY);
JRStyledText styledText = textRenderer.getStyledText();
if (styledText == null)
{
return;
}
double angle = 0;
switch (text.getRotationValue())
{
case LEFT :
{
angle = - Math.PI / 2;
break;
}
case RIGHT :
{
angle = Math.PI / 2;
break;
}
case UPSIDE_DOWN :
{
angle = Math.PI;
break;
}
case NONE :
default :
{
}
}
grx.rotate(angle, textRenderer.getX(), textRenderer.getY());
if (text.getModeValue() == ModeEnum.OPAQUE)
{
grx.setColor(text.getBackcolor());
grx.fillRect(textRenderer.getX(), textRenderer.getY(), textRenderer.getWidth(), textRenderer.getHeight());
}
// else
// {
// /*
// grx.setColor(text.getForecolor());
// grx.setStroke(new BasicStroke(1));
// grx.drawRect(x, y, width, height);
// */
// }
String allText = textRenderer.getPlainText();
if (allText.length() > 0)
{
grx.setColor(text.getForecolor());
/* */
textRenderer.render();
}
grx.rotate(-angle, textRenderer.getX(), textRenderer.getY());
/* */
drawBox(grx, text.getLineBox(), text, offsetX, offsetY);
}
}
|
package com.xiazhenyu.registry;
import org.apache.curator.x.discovery.ServiceInstance;
/**
* Date: 2021/8/17
* <p>
* Description:
*
* @author xiazhenyu
*/
public abstract class AbstractServiceInstanceListener<T> implements ServiceInstanceListener<T> {
public void onFresh(ServiceInstance<T> serviceInstance, ServerInfoEvent serverInfoEvent) {
switch (serverInfoEvent) {
case ON_REGISTRY:
onRegistry(serviceInstance);
break;
case ON_REMOVE:
onRemove(serviceInstance);
break;
case ON_UPDATE:
onUpdate(serviceInstance);
break;
}
}
}
|
package cz.skodape.taskrunner.http;
import cz.skodape.taskrunner.storage.instance.storage.WritableTaskStorage;
import cz.skodape.taskrunner.storage.template.TaskTemplateStorage;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.handler.ContextHandlerCollection;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.glassfish.jersey.media.multipart.MultiPartFeature;
import org.glassfish.jersey.server.ResourceConfig;
import org.glassfish.jersey.servlet.ServletContainer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class JettyHttpServer {
private static final Logger LOG =
LoggerFactory.getLogger(JettyHttpServer.class);
private final TaskRestApi restApi;
private final int port;
private Server server;
public JettyHttpServer(
WritableTaskStorage taskStorage,
TaskTemplateStorage templateStorage,
int port) {
this.restApi = new TaskRestApi(taskStorage, templateStorage);
this.port = port;
}
public void start() throws HttpServerException {
server = new Server(port);
// TODO Add default 404 response
ServletContextHandler contextHandler = new ServletContextHandler();
contextHandler.setContextPath("/");
contextHandler.addServlet(getRestApiServlet(), "/api/v1/*");
server.setHandler(new ContextHandlerCollection(contextHandler));
try {
server.start();
} catch (Exception ex) {
throw new HttpServerException("Can't start server.", ex);
}
LOG.info("HTTP server listening at: {}", port);
}
private ServletHolder getRestApiServlet() {
ResourceConfig resourceConfig = new ResourceConfig();
resourceConfig.registerInstances(restApi);
resourceConfig.register(MultiPartFeature.class);
//
ServletHolder holder = new ServletHolder();
holder.setServlet(new ServletContainer(resourceConfig));
//
return holder;
}
public void stop() {
if (server != null) {
try {
server.stop();
} catch (Exception ex) {
LOG.error("Can't stop server.", ex);
}
}
}
}
|
package p;
class A {
class Inner {
//conflicting name
int a;
//needs enclosing instance
{
foo();
}
}
void foo() {
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.codehaus.groovy.runtime.powerassert;
import org.codehaus.groovy.runtime.InvokerHelper;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
/**
* Creates a string representation of an assertion and its recorded values.
*
* @author Peter Niederwieser
*/
public final class AssertionRenderer {
private final String text;
private final ValueRecorder recorder;
private final List<StringBuilder> lines = new ArrayList<StringBuilder>();
// startColumns.get(i) is the first non-empty column of lines.get(i)
private final List<Integer> startColumns = new ArrayList<Integer>();
private AssertionRenderer(String text, ValueRecorder recorder) {
if (text.contains("\n"))
throw new IllegalArgumentException("source text may not contain line breaks");
this.text = text;
this.recorder = recorder;
}
/**
* Creates a string representation of an assertion and its recorded values.
*
* @param text the assertion's source text
* @param recorder a recorder holding the values recorded during evaluation
* of the assertion
* @return a string representation of the assertion and its recorded values
*/
public static String render(String text, ValueRecorder recorder) {
return new AssertionRenderer(text, recorder).render();
}
private String render() {
renderText();
sortValues();
renderValues();
return linesToString();
}
private void renderText() {
lines.add(new StringBuilder(text));
startColumns.add(0);
lines.add(new StringBuilder()); // empty line
startColumns.add(0);
}
private void sortValues() {
// it's important to use a stable sort here, otherwise
// renderValues() will skip the wrong values
Collections.sort(recorder.getValues(),
new Comparator<Value>() {
public int compare(Value v1, Value v2) {
return v2.getColumn() - v1.getColumn();
}
}
);
}
private void renderValues() {
List<Value> values = recorder.getValues();
int valuesSize = values.size();
nextValue:
for (int i = 0; i < valuesSize; i++) {
final Value value = values.get(i);
final int startColumn = value.getColumn();
if (startColumn < 1) continue; // skip values with unknown source position
// if multiple values are associated with the same column, only
// render the value which was recorded last (i.e. the value
// corresponding to the outermost expression)
// important for GROOVY-4344
Value next = i + 1 < valuesSize ? values.get(i + 1) : null;
if (next != null && next.getColumn() == startColumn) continue;
String str = valueToString(value.getValue());
if (str == null) continue; // null signals the value shouldn't be rendered
String[] strs = str.split("\r\n|\r|\n");
int endColumn = strs.length == 1 ?
startColumn + str.length() : // exclusive
Integer.MAX_VALUE; // multi-line strings are always placed on new lines
for (int j = 1; j < lines.size(); j++)
if (endColumn < startColumns.get(j)) {
placeString(lines.get(j), str, startColumn);
startColumns.set(j, startColumn);
continue nextValue;
} else {
placeString(lines.get(j), "|", startColumn);
if (j > 1) // make sure that no values are ever placed on empty line
startColumns.set(j, startColumn + 1); // + 1: no whitespace required between end of value and "|"
}
// value could not be placed on existing lines, so place it on new line(s)
for (String s : strs) {
StringBuilder newLine = new StringBuilder();
lines.add(newLine);
placeString(newLine, s, startColumn);
startColumns.add(startColumn);
}
}
}
private String linesToString() {
StringBuilder firstLine = lines.get(0);
for (int i = 1; i < lines.size(); i++)
firstLine.append('\n').append(lines.get(i).toString());
return firstLine.toString();
}
private static void placeString(StringBuilder line, String str, int column) {
while (line.length() < column)
line.append(' ');
line.replace(column - 1, column - 1 + str.length(), str);
}
/**
* Returns a string representation of the given value, or <tt>null</tt> if
* the value should not be included (because it does not add any valuable
* information).
*
* @param value a value
* @return a string representation of the given value
*/
private static String valueToString(Object value) {
String toString;
try {
toString = InvokerHelper.format(value, true, -1, false);
} catch (Exception e) {
return String.format("%s (toString() threw %s)",
javaLangObjectToString(value), e.getClass().getName());
}
if (toString == null) {
return String.format("%s (toString() == null)", javaLangObjectToString(value));
}
if (toString.equals("")) {
if (hasStringLikeType(value)) return "\"\"";
return String.format("%s (toString() == \"\")", javaLangObjectToString(value));
}
return toString;
}
private static boolean hasStringLikeType(Object value) {
Class<?> clazz = value.getClass();
return clazz == String.class || clazz == StringBuffer.class || clazz == StringBuilder.class;
}
private static String javaLangObjectToString(Object value) {
String hash = Integer.toHexString(System.identityHashCode(value));
return value.getClass().getName() + "@" + hash;
}
}
|
package module2149_public_tests_more.a;
import java.util.zip.*;
import javax.annotation.processing.*;
import javax.lang.model.*;
/**
* Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut
* labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum.
* Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet.
*
* @see java.awt.datatransfer.DataFlavor
* @see java.beans.beancontext.BeanContext
* @see java.io.File
*/
@SuppressWarnings("all")
public abstract class Foo3<B> extends module2149_public_tests_more.a.Foo2<B> implements module2149_public_tests_more.a.IFoo3<B> {
java.rmi.Remote f0 = null;
java.nio.file.FileStore f1 = null;
java.sql.Array f2 = null;
public B element;
public static Foo3 instance;
public static Foo3 getInstance() {
return instance;
}
public static <T> T create(java.util.List<T> input) {
return module2149_public_tests_more.a.Foo2.create(input);
}
public String getName() {
return module2149_public_tests_more.a.Foo2.getInstance().getName();
}
public void setName(String string) {
module2149_public_tests_more.a.Foo2.getInstance().setName(getName());
return;
}
public B get() {
return (B)module2149_public_tests_more.a.Foo2.getInstance().get();
}
public void set(Object element) {
this.element = (B)element;
module2149_public_tests_more.a.Foo2.getInstance().set(this.element);
}
public B call() throws Exception {
return (B)module2149_public_tests_more.a.Foo2.getInstance().call();
}
}
|
/*
* Copyright 2017 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.pipeline.stage.destination.redis;
import com.streamsets.pipeline.api.ConfigDefBean;
import com.streamsets.pipeline.api.ConfigGroups;
import com.streamsets.pipeline.api.GenerateResourceBundle;
import com.streamsets.pipeline.api.StageDef;
import com.streamsets.pipeline.api.Target;
import com.streamsets.pipeline.configurablestage.DTarget;
@StageDef(
version = 2,
upgrader = RedisDTargetUpgrader.class,
label = "Redis",
description = "Writes data to Redis",
icon = "redis.png",
onlineHelpRefUrl ="index.html#datacollector/UserGuide/Destinations/Redis.html#task_mzk_lw2_gw"
)
@ConfigGroups(Groups.class)
@GenerateResourceBundle
public class RedisDTarget extends DTarget{
@ConfigDefBean(groups = {"REDIS"})
public RedisTargetConfig conf;
@Override
protected Target createTarget() {
return new RedisTarget(this.conf);
}
}
|
/*
* Copyright 2016 Sai Pullabhotla.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jmethods.catatumbo.mappers;
import com.google.cloud.datastore.EntityValue;
import com.google.cloud.datastore.FullEntity;
import com.google.cloud.datastore.IncompleteKey;
import com.google.cloud.datastore.NullValue;
import com.google.cloud.datastore.Value;
import com.google.cloud.datastore.ValueBuilder;
import com.google.cloud.datastore.ValueType;
import com.jmethods.catatumbo.Indexer;
import com.jmethods.catatumbo.Mapper;
import com.jmethods.catatumbo.MappingException;
import com.jmethods.catatumbo.impl.ConstructorMetadata;
import com.jmethods.catatumbo.impl.EmbeddableIntrospector;
import com.jmethods.catatumbo.impl.EmbeddableMetadata;
import com.jmethods.catatumbo.impl.PropertyMetadata;
/**
* An implementation of {@link Mapper} interface to map embedded objects.
*
* @author Sai Pullabhotla
*
*/
public class EmbeddedObjectMapper implements Mapper {
/**
* The Embeddable class.
*/
private final Class<?> clazz;
/**
* Metadata of the Embeddable class.
*/
private final EmbeddableMetadata metadata;
/**
* Creates a new instance of <code>EmbeddedObjectMapper</code>.
*
* @param clazz
* the Embeddable class
*/
public EmbeddedObjectMapper(Class<?> clazz) {
this.clazz = clazz;
this.metadata = EmbeddableIntrospector.introspect(clazz);
}
@Override
public ValueBuilder<?, ?, ?> toDatastore(Object input) {
if (input == null) {
return NullValue.newBuilder();
}
try {
FullEntity.Builder<IncompleteKey> entityBuilder = FullEntity.newBuilder();
for (PropertyMetadata propertyMetadata : metadata.getPropertyMetadataCollection()) {
Object propertyValue = propertyMetadata.getReadMethod().invoke(input);
if (propertyValue == null && propertyMetadata.isOptional()) {
continue;
}
ValueBuilder<?, ?, ?> valueBuilder = propertyMetadata.getMapper()
.toDatastore(propertyValue);
// ListValues cannot have indexing turned off. Indexing is turned on by
// default, so we don't touch excludeFromIndexes for ListValues.
if (valueBuilder.getValueType() != ValueType.LIST) {
valueBuilder.setExcludeFromIndexes(!propertyMetadata.isIndexed());
}
Value<?> value = valueBuilder.build();
entityBuilder.set(propertyMetadata.getMappedName(), value);
Indexer indexer = propertyMetadata.getSecondaryIndexer();
if (indexer != null) {
entityBuilder.set(propertyMetadata.getSecondaryIndexName(), indexer.index(value));
}
}
return EntityValue.newBuilder(entityBuilder.build());
} catch (Throwable exp) {
throw new MappingException(exp);
}
}
@Override
public Object toModel(Value<?> input) {
if (input.getType() == ValueType.NULL) {
return null;
}
try {
FullEntity<?> entity = ((EntityValue) input).get();
ConstructorMetadata constructorMetadata = metadata.getConstructorMetadata();
Object embeddedObject = constructorMetadata.getConstructorMethodHandle().invoke();
for (PropertyMetadata propertyMetadata : metadata.getPropertyMetadataCollection()) {
String mappedName = propertyMetadata.getMappedName();
if (entity.contains(mappedName)) {
Value<?> propertyValue = entity.getValue(mappedName);
Object fieldValue = propertyMetadata.getMapper().toModel(propertyValue);
propertyMetadata.getWriteMethod().invoke(embeddedObject, fieldValue);
}
}
if (constructorMetadata.isBuilderConstructionStrategy()) {
embeddedObject = metadata.getConstructorMetadata().getBuildMethodHandle()
.invoke(embeddedObject);
}
return embeddedObject;
} catch (Throwable exp) {
throw new MappingException(exp);
}
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.sink.elasticsearch;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import java.io.IOException;
import java.util.Map;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.conf.ComponentConfiguration;
import org.elasticsearch.common.xcontent.XContentBuilder;
/**
* Basic serializer that serializes the event body and header fields into
* individual fields</p>
*
* A best effort will be used to determine the content-type, if it cannot be
* determined fields will be indexed as Strings
*/
public class ElasticSearchDynamicSerializer implements
ElasticSearchEventSerializer {
@Override
public void configure(Context context) {
// NO-OP...
}
@Override
public void configure(ComponentConfiguration conf) {
// NO-OP...
}
@Override
public XContentBuilder getContentBuilder(Event event) throws IOException {
XContentBuilder builder = jsonBuilder().startObject();
appendBody(builder, event);
appendHeaders(builder, event);
builder.endObject();
return builder;
}
private void appendBody(XContentBuilder builder, Event event)
throws IOException {
ContentBuilderUtil.appendField(builder, "body", event.getBody());
}
private void appendHeaders(XContentBuilder builder, Event event)
throws IOException {
Map<String, String> headers = event.getHeaders();
for (String key : headers.keySet()) {
ContentBuilderUtil.appendField(builder, key,
headers.get(key).getBytes(charset));
}
}
}
|
/*
* Copyright 2016-2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.workspaces.model;
import javax.annotation.Generated;
/**
* <p>
* The resource could not be created.
* </p>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ResourceCreationFailedException extends com.amazonaws.services.workspaces.model.AmazonWorkspacesException {
private static final long serialVersionUID = 1L;
/**
* Constructs a new ResourceCreationFailedException with the specified error message.
*
* @param message
* Describes the error encountered.
*/
public ResourceCreationFailedException(String message) {
super(message);
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.query;
import java.sql.SQLException;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteDataStreamer;
import org.apache.ignite.cache.query.FieldsQueryCursor;
import org.apache.ignite.cache.query.SqlFieldsQuery;
import org.apache.ignite.cache.query.SqlQuery;
import org.apache.ignite.internal.GridKernalContext;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.managers.IgniteMBeansManager;
import org.apache.ignite.internal.pagemem.PageMemory;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.processors.cache.CacheGroupContext;
import org.apache.ignite.internal.processors.cache.GridCacheContext;
import org.apache.ignite.internal.processors.cache.GridCacheContextInfo;
import org.apache.ignite.internal.processors.cache.mvcc.MvccSnapshot;
import org.apache.ignite.internal.processors.cache.persistence.CacheDataRow;
import org.apache.ignite.internal.processors.cache.persistence.RootPage;
import org.apache.ignite.internal.processors.cache.persistence.checkpoint.CheckpointTimeoutLock;
import org.apache.ignite.internal.processors.cache.persistence.defragmentation.LinkMap;
import org.apache.ignite.internal.processors.cache.persistence.pagemem.PageMemoryEx;
import org.apache.ignite.internal.processors.cache.persistence.tree.reuse.ReuseList;
import org.apache.ignite.internal.processors.odbc.jdbc.JdbcParameterMeta;
import org.apache.ignite.internal.processors.query.schema.SchemaIndexCacheVisitor;
import org.apache.ignite.internal.util.GridAtomicLong;
import org.apache.ignite.internal.util.GridSpinBusyLock;
import org.apache.ignite.internal.util.collection.IntMap;
import org.apache.ignite.internal.util.lang.GridCloseableIterator;
import org.apache.ignite.lang.IgniteBiTuple;
import org.apache.ignite.lang.IgniteFuture;
import org.apache.ignite.spi.indexing.IndexingQueryFilter;
import org.apache.ignite.thread.IgniteThreadPoolExecutor;
import org.jetbrains.annotations.Nullable;
/**
* Abstraction for internal indexing implementation.
*/
public interface GridQueryIndexing {
/**
* Starts indexing.
*
* @param ctx Context.
* @param busyLock Busy lock.
* @throws IgniteCheckedException If failed.
*/
public void start(GridKernalContext ctx, GridSpinBusyLock busyLock) throws IgniteCheckedException;
/**
* Stops indexing.
*
* @throws IgniteCheckedException If failed.
*/
public void stop() throws IgniteCheckedException;
/**
* Performs necessary actions on disconnect of a stateful client (say, one associated with a transaction).
*
* @throws IgniteCheckedException If failed.
*/
public void onClientDisconnect() throws IgniteCheckedException;
/**
* Generate SqlFieldsQuery from SqlQuery.
*
* @param cacheName Cache name.
* @param qry Query.
* @return Fields query.
*/
public SqlFieldsQuery generateFieldsQuery(String cacheName, SqlQuery qry);
/**
* Detect whether SQL query should be executed in distributed or local manner and execute it.
* @param schemaName Schema name.
* @param qry Query.
* @param cliCtx Client context.
* @param keepBinary Keep binary flag.
* @param failOnMultipleStmts Whether an exception should be thrown for multiple statements query.
* @return Cursor.
*/
public List<FieldsQueryCursor<List<?>>> querySqlFields(
String schemaName,
SqlFieldsQuery qry,
SqlClientContext cliCtx,
boolean keepBinary,
boolean failOnMultipleStmts,
GridQueryCancel cancel
);
/**
* Execute an INSERT statement using data streamer as receiver.
*
* @param schemaName Schema name.
* @param qry Query.
* @param params Query parameters.
* @param streamer Data streamer to feed data to.
* @return Update counter.
* @throws IgniteCheckedException If failed.
*/
public long streamUpdateQuery(String schemaName, String qry, @Nullable Object[] params,
IgniteDataStreamer<?, ?> streamer) throws IgniteCheckedException;
/**
* Execute a batched INSERT statement using data streamer as receiver.
*
* @param schemaName Schema name.
* @param qry Query.
* @param params Query parameters.
* @param cliCtx Client connection context.
* @return Update counters.
* @throws IgniteCheckedException If failed.
*/
public List<Long> streamBatchedUpdateQuery(String schemaName, String qry, List<Object[]> params,
SqlClientContext cliCtx) throws IgniteCheckedException;
/**
* Executes text query.
*
* @param schemaName Schema name.
* @param cacheName Cache name.
* @param qry Text query.
* @param typeName Type name.
* @param filter Cache name and key filter. @return Queried rows.
* @param limit Limits response records count. If 0 or less, the limit considered to be Integer.MAX_VALUE, that is virtually no limit.
* @throws IgniteCheckedException If failed.
*/
public <K, V> GridCloseableIterator<IgniteBiTuple<K, V>> queryLocalText(String schemaName, String cacheName,
String qry, String typeName, IndexingQueryFilter filter, int limit) throws IgniteCheckedException;
/**
* Create new index locally.
*
* @param schemaName Schema name.
* @param tblName Table name.
* @param idxDesc Index descriptor.
* @param ifNotExists Ignore operation if index exists (instead of throwing an error).
* @param cacheVisitor Cache visitor
* @throws IgniteCheckedException if failed.
*/
public void dynamicIndexCreate(String schemaName, String tblName, QueryIndexDescriptorImpl idxDesc,
boolean ifNotExists, SchemaIndexCacheVisitor cacheVisitor) throws IgniteCheckedException;
/**
* Remove index from the cache.
*
* @param schemaName Schema name.
* @param idxName Index name.
* @param ifExists Ignore operation if index does not exist (instead of throwing an error).
* @throws IgniteCheckedException If failed.
*/
public void dynamicIndexDrop(String schemaName, String idxName, boolean ifExists) throws IgniteCheckedException;
/**
* Add columns to dynamic table.
*
* @param schemaName Schema name.
* @param tblName Table name.
* @param cols Columns to add.
* @param ifTblExists Ignore operation if target table does not exist (instead of throwing an error).
* @param ifColNotExists Ignore operation if column already exists (instead of throwing an error) - is honored only
* for single column case.
* @throws IgniteCheckedException If failed.
*/
public void dynamicAddColumn(String schemaName, String tblName, List<QueryField> cols, boolean ifTblExists,
boolean ifColNotExists) throws IgniteCheckedException;
/**
* Drop columns from dynamic table.
*
* @param schemaName Schema name.
* @param tblName Table name.
* @param cols Columns to drop.
* @param ifTblExists Ignore operation if target table does not exist (instead of throwing an error).
* @param ifColExists Ignore operation if column does not exist (instead of throwing an error) - is honored only
* for single column case.
* @throws IgniteCheckedException If failed.
*/
public void dynamicDropColumn(String schemaName, String tblName, List<String> cols, boolean ifTblExists,
boolean ifColExists) throws IgniteCheckedException;
/**
* Registers cache.
*
* @param cacheName Cache name.
* @param schemaName Schema name.
* @param cacheInfo Cache context info.
* @throws IgniteCheckedException If failed.
*/
public void registerCache(String cacheName, String schemaName, GridCacheContextInfo<?, ?> cacheInfo)
throws IgniteCheckedException;
/**
* Unregisters cache.
*
* @param cacheInfo Cache context info.
* @param rmvIdx If {@code true}, will remove index.
* @throws IgniteCheckedException If failed to drop cache schema.
*/
public void unregisterCache(GridCacheContextInfo cacheInfo, boolean rmvIdx) throws IgniteCheckedException;
/**
* Destroy founded index which belongs to stopped cache.
*
* @param page Root page.
* @param indexName Index name.
* @param grpId Group id which contains garbage.
* @param pageMemory Page memory to work with.
* @param removeId Global remove id.
* @param reuseList Reuse list where free pages should be stored.
* @param mvccEnabled Is mvcc enabled for group or not.
* @throws IgniteCheckedException If failed.
*/
public void destroyOrphanIndex(
RootPage page,
String indexName,
int grpId,
PageMemory pageMemory,
final GridAtomicLong removeId,
final ReuseList reuseList,
boolean mvccEnabled) throws IgniteCheckedException;
/**
*
* @param cctx Cache context.
* @param ids Involved cache ids.
* @param parts Partitions.
* @param schema Schema name.
* @param qry Query string.
* @param params Query parameters.
* @param flags Flags.
* @param pageSize Fetch page size.
* @param timeout Timeout.
* @param topVer Topology version.
* @param mvccSnapshot MVCC snapshot.
* @param cancel Query cancel object.
* @return Cursor over entries which are going to be changed.
* @throws IgniteCheckedException If failed.
*/
public UpdateSourceIterator<?> executeUpdateOnDataNodeTransactional(
GridCacheContext<?, ?> cctx,
int[] ids,
int[] parts,
String schema,
String qry,
Object[] params,
int flags,
int pageSize,
int timeout,
AffinityTopologyVersion topVer,
MvccSnapshot mvccSnapshot,
GridQueryCancel cancel
) throws IgniteCheckedException;
/**
* Registers type if it was not known before or updates it otherwise.
*
* @param cacheInfo Cache context info.
* @param desc Type descriptor.
* @param isSql {@code true} in case table has been created from SQL.
* @throws IgniteCheckedException If failed.
* @return {@code True} if type was registered, {@code false} if for some reason it was rejected.
*/
public boolean registerType(GridCacheContextInfo cacheInfo, GridQueryTypeDescriptor desc,
boolean isSql) throws IgniteCheckedException;
/**
* Jdbc parameters metadata of the specified query.
*
* @param schemaName the default schema name for query.
* @param sql Sql query.
* @return metadata describing all the parameters, even in case of multi-statement.
* @throws SQLException if failed to get meta.
*/
public List<JdbcParameterMeta> parameterMetaData(String schemaName, SqlFieldsQuery sql) throws IgniteSQLException;
/**
* Metadata of the result set that is returned if specified query gets executed.
*
* @param schemaName the default schema name for query.
* @param sql Sql query.
* @return metadata or {@code null} if provided query is multi-statement or id it's not a SELECT statement.
* @throws SQLException if failed to get meta.
*/
@Nullable public List<GridQueryFieldMetadata> resultMetaData(String schemaName, SqlFieldsQuery sql)
throws IgniteSQLException;
/**
* Updates index. Note that key is unique for cache, so if cache contains multiple indexes
* the key should be removed from indexes other than one being updated.
*
* @param cctx Cache context.
* @param type Type descriptor.
* @param row New row.
* @param prevRow Previous row.
* @param prevRowAvailable Whether previous row is available.
* @throws IgniteCheckedException If failed.
*/
public void store(GridCacheContext cctx,
GridQueryTypeDescriptor type,
CacheDataRow row,
CacheDataRow prevRow,
boolean prevRowAvailable) throws IgniteCheckedException;
/**
* Removes index entry by key.
*
* @param cctx Cache context.
* @param type Type descriptor.
* @param row Row.
* @throws IgniteCheckedException If failed.
*/
public void remove(GridCacheContext cctx, GridQueryTypeDescriptor type, CacheDataRow row)
throws IgniteCheckedException;
/**
* Rebuild indexes for the given cache if necessary.
*
* @param cctx Cache context.
* @return Future completed when index rebuild finished.
*/
IgniteInternalFuture<?> rebuildIndexesFromHash(GridCacheContext cctx);
/**
* Mark as rebuild needed for the given cache.
*
* @param cctx Cache context.
*/
void markAsRebuildNeeded(GridCacheContext cctx);
/**
* Returns backup filter.
*
* @param topVer Topology version.
* @param parts Partitions.
* @return Backup filter.
*/
public IndexingQueryFilter backupFilter(AffinityTopologyVersion topVer, int[] parts);
/**
* Client disconnected callback.
*
* @param reconnectFut Reconnect future.
*/
public void onDisconnected(IgniteFuture<?> reconnectFut);
/**
* Collect queries that already running more than specified duration.
*
* @param duration Duration to check.
* @return Collection of long running queries.
*/
public Collection<GridRunningQueryInfo> runningQueries(long duration);
/**
* Cancel specified queries.
*
* @param queries Queries ID's to cancel.
*/
public void cancelQueries(Collection<Long> queries);
/**
* Cancels all executing queries.
*/
public void onKernalStop();
/**
* Gets database schema from cache name.
*
* @param cacheName Cache name. {@code null} would be converted to an empty string.
* @return Schema name. Should not be null since we should not fail for an invalid cache name.
*/
public String schema(String cacheName);
/**
* Gets database schemas names.
*
* @return Schema names.
*/
public Set<String> schemasNames();
/**
* Whether passed sql statement is single insert statement eligible for streaming.
*
* @param schemaName name of the schema.
* @param sql sql statement.
*/
public boolean isStreamableInsertStatement(String schemaName, SqlFieldsQuery sql) throws SQLException;
/**
* Return row cache cleaner.
*
* @param cacheGroupId Cache group id.
* @return Row cache cleaner.
*/
public GridQueryRowCacheCleaner rowCacheCleaner(int cacheGroupId);
/**
* Return context for registered cache info.
*
* @param cacheName Cache name.
* @return Cache context for registered cache or {@code null} in case the cache has not been registered.
*/
@Nullable public GridCacheContextInfo registeredCacheInfo(String cacheName);
/**
* Clear cache info and clear parser cache on call cache.close() on client node.
*
* @param cacheName Cache name to clear.
*/
public void closeCacheOnClient(String cacheName);
/**
* Initialize table's cache context created for not started cache.
*
* @param ctx Cache context.
* @throws IgniteCheckedException If failed.
*
* @return {@code true} If context has been initialized.
*/
public boolean initCacheContext(GridCacheContext ctx) throws IgniteCheckedException;
/**
* Register SQL JMX beans.
*
* @param mbMgr Ignite MXBean manager.
* @throws IgniteCheckedException On bean registration error.
*/
void registerMxBeans(IgniteMBeansManager mbMgr) throws IgniteCheckedException;
/**
* Return table information filtered by given patterns.
*
* @param schemaNamePtrn Filter by schema name. Can be {@code null} to don't use the filter.
* @param tblNamePtrn Filter by table name. Can be {@code null} to don't use the filter.
* @param tblTypes Filter by table type. As Of now supported only 'TABLES' and 'VIEWS'.
* Can be {@code null} or empty to don't use the filter.
*
* @return Column information filtered by given patterns.
*/
Collection<TableInformation> tablesInformation(String schemaNamePtrn, String tblNamePtrn, String... tblTypes);
/**
* Return column information filtered by given patterns.
*
* @param schemaNamePtrn Filter by schema name. Can be {@code null} to don't use the filter.
* @param tblNamePtrn Filter by table name. Can be {@code null} to don't use the filter.
* @param colNamePtrn Filter by column name. Can be {@code null} to don't use the filter.
*
* @return Column information filtered by given patterns.
*/
Collection<ColumnInformation> columnsInformation(String schemaNamePtrn, String tblNamePtrn, String colNamePtrn);
/**
* Return index size by schema, table and index name.
*
* @param schemaName Schema name.
* @param tblName Table name.
* @param idxName Index name.
* @return Index size (Number of elements) or {@code 0} if index not found.
*/
default long indexSize(String schemaName, String tblName, String idxName) throws IgniteCheckedException {
return 0;
}
/**
* Information about secondary indexes efficient (actual) inline size.
*
* @return Map with inline sizes. The key of entry is a full index name (with schema and table name), the value of
* entry is a inline size.
*/
default Map<String, Integer> secondaryIndexesInlineSize() {
return Collections.emptyMap();
}
/**
* Defragment index partition.
*
* @param grpCtx Old group context.
* @param newCtx New group context.
* @param partPageMem Partition page memory.
* @param mappingByPart Mapping page memory.
* @param cpLock Defragmentation checkpoint read lock.
* @param cancellationChecker Cancellation checker.
* @param defragmentationThreadPool Thread pool for defragmentation.
* @throws IgniteCheckedException If failed.
*/
void defragment(
CacheGroupContext grpCtx,
CacheGroupContext newCtx,
PageMemoryEx partPageMem,
IntMap<LinkMap> mappingByPart,
CheckpointTimeoutLock cpLock,
Runnable cancellationChecker,
IgniteThreadPoolExecutor defragmentationThreadPool
) throws IgniteCheckedException;
}
|
package xyz.rexlin600.sms.aliyun.core.request;
import cn.hutool.json.JSONObject;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* The type Sms response.
*
* @author hekunlin
* @since 2020 /9/2
*/
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Data
public class SmsResponse {
/**
* 发送回执ID,可根据该ID在接口QuerySendDetails中查询具体的发送状态
*/
private String bizId;
/**
* 请求状态码
* <p>
* 返回OK代表请求成功
* 其他错误码详见错误码列表 https://help.aliyun.com/document_detail/101346.html?spm=a2c4g.11186623.2.14.3bf556e03nBWgO
*/
private String code;
/**
* 状态码的描述
*/
private String message;
/**
* 请求ID
*/
private String requestId;
public static SmsResponse build(JSONObject jsonObject) {
return SmsResponse.builder()
.bizId(jsonObject.getStr("BizId"))
.code(jsonObject.getStr("Code"))
.message(jsonObject.getStr("Message"))
.requestId(jsonObject.getStr("RequestId"))
.build();
}
// -----------------------------------------------------------------------------------------------
// EXTRA METHOD
// -----------------------------------------------------------------------------------------------
/**
* Is success boolean.
*
* @return the boolean
*/
public boolean isSuccess() {
return "OK".equals(code);
}
}
|
/*
* Copyright 2011 by Graz University of Technology, Austria
* MOCCA has been developed by the E-Government Innovation Center EGIZ, a joint
* initiative of the Federal Chancellery Austria and Graz University of Technology.
*
* Licensed under the EUPL, Version 1.1 or - as soon they will be approved by
* the European Commission - subsequent versions of the EUPL (the "Licence");
* You may not use this work except in compliance with the Licence.
* You may obtain a copy of the Licence at:
* http://www.osor.eu/eupl/
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the Licence is distributed on an "AS IS" basis,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Licence for the specific language governing permissions and
* limitations under the Licence.
*
* This product combines work with different licenses. See the "NOTICE" text
* file for details on the various modules and licenses.
* The "NOTICE" text file is part of the distribution. Any derivative works
* that you distribute must include a readable copy of the "NOTICE" text file.
*/
package at.gv.egiz.smcc.cio;
/**
*
* @author clemens
*/
public abstract class CIO {
/** CommonObjectAttributes */
protected String label;
protected byte[] authId;
/**
* @return the authId
*/
public byte[] getAuthId() {
return authId;
}
public String getLabel() {
return label;
}
/**
* @deprecated
* @param label the label to set
*/
public void setLabel(String label) {
this.label = label;
}
/**
* @deprecated
* @param authId the authId to set
*/
public void setAuthId(byte[] authId) {
this.authId = authId;
}
@Override
public String toString() {
return "CIO " + label;
}
}
|
package ee.ituk.api.door;
import ee.ituk.api.door.domain.Door;
import ee.ituk.api.user.domain.User;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import java.util.List;
@Getter
@Setter
@NoArgsConstructor
public class UserDoorsDto {
private Long id;
private String firstName;
private String lastName;
private String cardNumber;
private List<Door> doors;
public UserDoorsDto(User user, List<Door> doors) {
this.id = user.getId();
this.firstName = user.getFirstName();
this.lastName = user.getLastName();
this.cardNumber = user.getCardNumber();
this.doors = doors;
}
}
|
package com.fsck.k9.ui.crypto;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.List;
import android.app.Activity;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.annotation.WorkerThread;
import com.fsck.k9.QMail;
import com.fsck.k9.autocrypt.AutocryptOperations;
import com.fsck.k9.crypto.MessageCryptoStructureDetector;
import com.fsck.k9.mail.Address;
import com.fsck.k9.mail.Body;
import com.fsck.k9.mail.BodyPart;
import com.fsck.k9.mail.Flag;
import com.fsck.k9.mail.Message;
import com.fsck.k9.mail.MessagingException;
import com.fsck.k9.mail.Multipart;
import com.fsck.k9.mail.Part;
import com.fsck.k9.mail.internet.MessageExtractor;
import com.fsck.k9.mail.internet.MimeBodyPart;
import com.fsck.k9.mail.internet.MimeMultipart;
import com.fsck.k9.mail.internet.SizeAware;
import com.fsck.k9.mail.internet.TextBody;
import com.fsck.k9.mailstore.CryptoResultAnnotation;
import com.fsck.k9.mailstore.CryptoResultAnnotation.CryptoError;
import com.fsck.k9.mailstore.CryptoResultAnnotation.CryptoProviderType;
import com.fsck.k9.mailstore.MessageHelper;
import com.fsck.k9.mailstore.MimePartStreamParser;
import com.fsck.k9.mailstore.util.FileFactory;
import com.fsck.k9.provider.DecryptedFileProvider;
import org.apache.commons.io.IOUtils;
import org.openintents.openpgp.IOpenPgpService2;
import org.openintents.openpgp.OpenPgpDecryptionResult;
import org.openintents.openpgp.OpenPgpError;
import org.openintents.openpgp.OpenPgpSignatureResult;
import org.openintents.openpgp.util.OpenPgpApi;
import org.openintents.openpgp.util.OpenPgpApi.IOpenPgpCallback;
import org.openintents.openpgp.util.OpenPgpApi.IOpenPgpSinkResultCallback;
import org.openintents.openpgp.util.OpenPgpApi.OpenPgpDataSink;
import org.openintents.openpgp.util.OpenPgpApi.OpenPgpDataSource;
import org.openintents.openpgp.util.OpenPgpServiceConnection;
import org.openintents.smime.ISMimeService2;
import org.openintents.smime.SMimeDecryptionResult;
import org.openintents.smime.SMimeError;
import org.openintents.smime.SMimeSignatureResult;
import org.openintents.smime.util.SMimeApi;
import org.openintents.smime.util.SMimeApi.ISMimeSinkResultCallback;
import org.openintents.smime.util.SMimeApi.SMimeDataSink;
import org.openintents.smime.util.SMimeApi.SMimeDataSource;
import org.openintents.smime.util.SMimeServiceConnection;
import timber.log.Timber;
public class MessageCryptoHelper {
private static final int INVALID_OPENPGP_RESULT_CODE = -1;
private static final int INVALID_SMIME_RESULT_CODE = -1;
private static final MimeBodyPart NO_REPLACEMENT_PART = null;
private static final int REQUEST_CODE_USER_INTERACTION = 124;
private final Context context;
private final String openPgpProviderPackage;
private final String sMimeProviderPackage;
private final boolean hasOpenPgpProvider;
private final boolean hasSMimeProvider;
private final AutocryptOperations autocryptOperations;
private final Object callbackLock = new Object();
private final Deque<CryptoPart> partsToProcess = new ArrayDeque<>();
@Nullable
private MessageCryptoCallback callback;
private Message currentMessage;
private OpenPgpDecryptionResult cachedOpenPgpDecryptionResult;
private SMimeDecryptionResult cachedSMimeDecryptionResult;
private MessageCryptoAnnotations queuedResult;
private PendingIntent queuedPendingIntent;
private MessageCryptoAnnotations messageAnnotations;
private CryptoPart currentCryptoPart;
private Intent currentCryptoResult;
private Intent userInteractionResultIntent;
private State state;
private OpenPgpApi.CancelableBackgroundOperation cancelableOpenPgpBackgroundOperation;
private SMimeApi.CancelableBackgroundOperation cancelableSMimeBackgroundOperation;
private boolean isCancelled;
private boolean processSignedOnly;
private OpenPgpApi openPgpApi;
private OpenPgpServiceConnection openPgpServiceConnection;
private OpenPgpApiFactory openPgpApiFactory;
private SMimeApi sMimeApi;
private SMimeServiceConnection sMimeServiceConnection;
private SMimeApiFactory sMimeApiFactory;
public MessageCryptoHelper(Context context, OpenPgpApiFactory openPgpApiFactory, SMimeApiFactory sMimeApiFactory,
AutocryptOperations autocryptOperations) {
this.context = context.getApplicationContext();
this.autocryptOperations = autocryptOperations;
this.openPgpApiFactory = openPgpApiFactory;
this.sMimeApiFactory = sMimeApiFactory;
hasOpenPgpProvider = QMail.isOpenPgpProviderConfigured();
openPgpProviderPackage = QMail.getOpenPgpProvider();
hasSMimeProvider = QMail.isSMimeProviderConfigured();
sMimeProviderPackage = QMail.getSMimeProvider();
}
public boolean isConfiguredForOutdatedCryptoProvider() {
return isConfiguredForOutdatedOpenPgpProvider() || isConfiguredForOutdatedSMimeProvider();
}
public boolean isConfiguredForOutdatedOpenPgpProvider() {
return !openPgpProviderPackage.equals(QMail.getOpenPgpProvider());
}
public boolean isConfiguredForOutdatedSMimeProvider() {
return !sMimeProviderPackage.equals(QMail.getSMimeProvider());
}
public void asyncStartOrResumeProcessingMessage(Message message, MessageCryptoCallback callback,
OpenPgpDecryptionResult cachedOpenPgpDecryptionResult, SMimeDecryptionResult cachedSMimeDecryptionResult, boolean processSignedOnly) {
if (this.currentMessage != null) {
reattachCallback(message, callback);
return;
}
this.messageAnnotations = new MessageCryptoAnnotations();
this.state = State.START;
this.currentMessage = message;
this.cachedOpenPgpDecryptionResult = cachedOpenPgpDecryptionResult;
this.cachedSMimeDecryptionResult = cachedSMimeDecryptionResult;
this.callback = callback;
this.processSignedOnly = processSignedOnly;
nextStep();
}
private void findPartsForAutocryptPass() {
boolean otherCryptoPerformed = !messageAnnotations.isEmpty();
if (otherCryptoPerformed) {
return;
}
if (hasOpenPgpProvider && autocryptOperations.hasAutocryptHeader(currentMessage)) {
CryptoPart cryptoPart = new CryptoPart(CryptoProviderType.OPENPGP, CryptoPartType.PLAIN_AUTOCRYPT, currentMessage);
partsToProcess.add(cryptoPart);
}
}
private void findPartsForMultipartEncryptionPass() {
List<Part> encryptedParts = MessageCryptoStructureDetector.findMultipartEncryptedParts(currentMessage);
for (Part part : encryptedParts) {
if (!MessageHelper.isCompletePartAvailable(part)) {
addErrorAnnotation(part, CryptoError.OPENPGP_ENCRYPTED_BUT_INCOMPLETE, MessageHelper.createEmptyPart());
continue;
}
if (MessageCryptoStructureDetector.isMultipartEncryptedOpenPgpProtocol(part)) {
CryptoPart cryptoPart = new CryptoPart(CryptoProviderType.OPENPGP, CryptoPartType.PGP_ENCRYPTED, part);
partsToProcess.add(cryptoPart);
continue;
}
if (MessageCryptoStructureDetector.isMultipartEncryptedSMimeProtocol(part)) {
CryptoPart cryptoPart = new CryptoPart(CryptoProviderType.SMIME, CryptoPartType.SMIME_ENCRYPTED, part);
partsToProcess.add(cryptoPart);
continue;
}
addErrorAnnotation(part, CryptoError.ENCRYPTED_BUT_UNSUPPORTED, MessageHelper.createEmptyPart());
}
}
private void findPartsForMultipartSignaturePass() {
List<Part> signedParts = MessageCryptoStructureDetector
.findMultipartSignedParts(currentMessage, messageAnnotations);
for (Part part : signedParts) {
if (!processSignedOnly) {
boolean isEncapsulatedSignature =
messageAnnotations.findKeyForAnnotationWithReplacementPart(part) != null;
if (!isEncapsulatedSignature) {
continue;
}
}
if (!MessageHelper.isCompletePartAvailable(part)) {
MimeBodyPart replacementPart = getMultipartSignedContentPartIfAvailable(part);
addErrorAnnotation(part, CryptoError.OPENPGP_SIGNED_BUT_INCOMPLETE, replacementPart);
continue;
}
if (MessageCryptoStructureDetector.isMultipartSignedOpenPgpProtocol(part)) {
CryptoPart cryptoPart = new CryptoPart(CryptoProviderType.OPENPGP, CryptoPartType.PGP_SIGNED, part);
partsToProcess.add(cryptoPart);
continue;
}
if (MessageCryptoStructureDetector.isMultipartSignedSMimeProtocol(part)) {
CryptoPart cryptoPart = new CryptoPart(CryptoProviderType.SMIME, CryptoPartType.SMIME_SIGNED, part);
partsToProcess.add(cryptoPart);
continue;
}
MimeBodyPart replacementPart = getMultipartSignedContentPartIfAvailable(part);
addErrorAnnotation(part, CryptoError.SIGNED_BUT_UNSUPPORTED, replacementPart);
}
}
private void findPartsForPgpInlinePass() {
List<Part> inlineParts = MessageCryptoStructureDetector.findPgpInlineParts(currentMessage);
for (Part part : inlineParts) {
if (!processSignedOnly && !MessageCryptoStructureDetector.isPartPgpInlineEncrypted(part)) {
continue;
}
if (!currentMessage.getFlags().contains(Flag.X_DOWNLOADED_FULL)) {
if (MessageCryptoStructureDetector.isPartPgpInlineEncrypted(part)) {
addErrorAnnotation(part, CryptoError.OPENPGP_ENCRYPTED_BUT_INCOMPLETE, NO_REPLACEMENT_PART);
} else {
addErrorAnnotation(part, CryptoError.OPENPGP_SIGNED_BUT_INCOMPLETE, NO_REPLACEMENT_PART);
}
continue;
}
CryptoPart cryptoPart = new CryptoPart(CryptoProviderType.OPENPGP, CryptoPartType.PGP_INLINE, part);
partsToProcess.add(cryptoPart);
}
}
private void addErrorAnnotation(Part part, CryptoError error, MimeBodyPart replacementPart) {
CryptoResultAnnotation annotation = CryptoResultAnnotation.createErrorAnnotation(error, replacementPart);
messageAnnotations.put(part, annotation);
}
private void nextStep() {
if (isCancelled) {
return;
}
while (state != State.FINISHED && partsToProcess.isEmpty()) {
findPartsForNextPass();
}
if (state == State.FINISHED) {
callbackReturnResult();
return;
}
if (hasOpenPgpProvider && !isBoundToOpenPgpProviderService()) {
connectToOpenPgpProviderService();
return;
}
if (hasSMimeProvider && !isBoundToSMimeProviderService()) {
connectToSMimeProviderService();
return;
}
currentCryptoPart = partsToProcess.peekFirst();
if (currentCryptoPart.type == CryptoPartType.PLAIN_AUTOCRYPT) {
processAutocryptHeaderForCurrentPart();
} else {
decryptOrVerifyCurrentPart();
}
}
private boolean isBoundToOpenPgpProviderService() {
return openPgpApi != null;
}
private boolean isBoundToSMimeProviderService() {
return sMimeApi != null;
}
private void connectToOpenPgpProviderService() {
openPgpServiceConnection = new OpenPgpServiceConnection(context, openPgpProviderPackage,
new OpenPgpServiceConnection.OnBound() {
@Override
public void onBound(IOpenPgpService2 service) {
openPgpApi = openPgpApiFactory.createOpenPgpApi(context, service);
nextStep();
}
@Override
public void onError(Exception e) {
// TODO actually handle (hand to ui, offer retry?)
Timber.e(e, "Couldn't connect to OpenPgpService");
}
});
openPgpServiceConnection.bindToService();
}
private void connectToSMimeProviderService() {
sMimeServiceConnection = new SMimeServiceConnection(context, sMimeProviderPackage,
new SMimeServiceConnection.OnBound() {
@Override
public void onBound(ISMimeService2 service) {
sMimeApi = sMimeApiFactory.createSMimeApi(context, service);
nextStep();
}
@Override
public void onError(Exception e) {
// TODO actually handle (hand to ui, offer retry?)
Timber.e(e, "Couldn't connect to SMimeService");
}
});
sMimeServiceConnection.bindToService();
}
private void decryptOrVerifyCurrentPart() {
Intent apiIntent = userInteractionResultIntent;
userInteractionResultIntent = null;
if (apiIntent == null) {
apiIntent = getDecryptVerifyIntent();
}
decryptVerify(apiIntent);
}
@NonNull
private Intent getDecryptVerifyIntent() {
switch (currentCryptoPart.providerType) {
case OPENPGP:
return getOpenPgpDecryptVerifyIntent();
case SMIME:
return getSMimeDecryptVerifyIntent();
default:
throw new IllegalArgumentException();
}
}
@NonNull
private Intent getOpenPgpDecryptVerifyIntent() {
Intent decryptIntent = new Intent(OpenPgpApi.ACTION_DECRYPT_VERIFY);
Address[] from = currentMessage.getFrom();
if (from.length > 0) {
decryptIntent.putExtra(OpenPgpApi.EXTRA_SENDER_ADDRESS, from[0].getAddress());
// we add this here independently of the autocrypt peer update, to allow picking up signing keys as gossip
decryptIntent.putExtra(OpenPgpApi.EXTRA_AUTOCRYPT_PEER_ID, from[0].getAddress());
}
autocryptOperations.addAutocryptPeerUpdateToIntentIfPresent(currentMessage, decryptIntent);
decryptIntent.putExtra(OpenPgpApi.EXTRA_SUPPORT_OVERRIDE_CRYPTO_WARNING, true);
decryptIntent.putExtra(OpenPgpApi.EXTRA_DECRYPTION_RESULT, cachedOpenPgpDecryptionResult);
return decryptIntent;
}
@NonNull
private Intent getSMimeDecryptVerifyIntent() {
Intent decryptIntent = new Intent(SMimeApi.ACTION_DECRYPT_VERIFY);
Address[] from = currentMessage.getFrom();
if (from.length > 0) {
decryptIntent.putExtra(SMimeApi.EXTRA_SENDER_ADDRESS, from[0].getAddress());
}
decryptIntent.putExtra(SMimeApi.EXTRA_SUPPORT_OVERRIDE_CRYPTO_WARNING, true);
decryptIntent.putExtra(SMimeApi.EXTRA_DECRYPTION_RESULT, cachedSMimeDecryptionResult);
return decryptIntent;
}
private void decryptVerify(Intent apiIntent) {
try {
CryptoPartType cryptoPartType = currentCryptoPart.type;
switch (cryptoPartType) {
case PGP_SIGNED: {
callAsyncOpenPgpDetachedVerify(apiIntent);
return;
}
case PGP_ENCRYPTED: {
callAsyncOpenPgpDecrypt(apiIntent);
return;
}
case PGP_INLINE: {
callAsyncOpenPgpInlineOperation(apiIntent);
return;
}
case SMIME_SIGNED: {
callAsyncSMimeDetachedVerify(apiIntent);
return;
}
case SMIME_ENCRYPTED: {
callAsyncSMimeDecrypt(apiIntent);
return;
}
case PLAIN_AUTOCRYPT:
throw new IllegalStateException("This part type must have been handled previously!");
}
throw new IllegalStateException("Unknown crypto part type: " + cryptoPartType);
} catch (IOException e) {
Timber.e(e, "IOException");
} catch (MessagingException e) {
Timber.e(e, "MessagingException");
}
}
private void processAutocryptHeaderForCurrentPart() {
Intent intent = new Intent(OpenPgpApi.ACTION_UPDATE_AUTOCRYPT_PEER);
boolean hasInlineKeyData = autocryptOperations.addAutocryptPeerUpdateToIntentIfPresent(
(Message) currentCryptoPart.part, intent);
if (hasInlineKeyData) {
Timber.d("Passing autocrypt data from plain mail to OpenPGP API");
// We don't care about the result here, so we just call this fire-and-forget wait to minimize delay
openPgpApi.executeApiAsync(intent, null, null, new IOpenPgpCallback() {
@Override
public void onReturn(Intent result) {
Timber.d("Autocrypt update OK!");
}
});
}
onCryptoFinished();
}
private void callAsyncOpenPgpInlineOperation(Intent intent) throws IOException {
if (openPgpApi == null) {
CryptoResultAnnotation annotation =
CryptoResultAnnotation.createErrorAnnotation(CryptoError.OPENPGP_ENCRYPTED_NO_PROVIDER, null);
addCryptoResultAnnotationToMessage(annotation);
onCryptoFinished();
return;
}
OpenPgpDataSource dataSource = getDataSourceForOpenPgpEncryptedOrInlineData();
OpenPgpDataSink<MimeBodyPart> dataSink = getDataSinkForOpenPgpDecryptedInlineData();
cancelableOpenPgpBackgroundOperation = openPgpApi.executeApiAsync(intent, dataSource, dataSink,
new IOpenPgpSinkResultCallback<MimeBodyPart>() {
@Override
public void onProgress(int current, int max) {
Timber.d("received progress status: %d / %d", current, max);
callbackProgress(current, max);
}
@Override
public void onReturn(Intent result, MimeBodyPart bodyPart) {
cancelableOpenPgpBackgroundOperation = null;
currentCryptoResult = result;
onCryptoOperationReturned(bodyPart);
}
});
}
public void cancelIfRunning() {
detachCallback();
isCancelled = true;
if (cancelableOpenPgpBackgroundOperation != null) {
cancelableOpenPgpBackgroundOperation.cancelOperation();
}
if (cancelableSMimeBackgroundOperation != null) {
cancelableSMimeBackgroundOperation.cancelOperation();
}
}
private void callAsyncOpenPgpDecrypt(Intent intent) throws IOException {
if (openPgpApi == null) {
CryptoResultAnnotation annotation =
CryptoResultAnnotation.createErrorAnnotation(CryptoError.OPENPGP_ENCRYPTED_NO_PROVIDER, null);
addCryptoResultAnnotationToMessage(annotation);
onCryptoFinished();
return;
}
OpenPgpDataSource dataSource = getDataSourceForOpenPgpEncryptedOrInlineData();
OpenPgpDataSink<MimeBodyPart> openPgpDataSink = getDataSinkForOpenPgpDecryptedData();
cancelableOpenPgpBackgroundOperation = openPgpApi.executeApiAsync(intent, dataSource, openPgpDataSink,
new IOpenPgpSinkResultCallback<MimeBodyPart>() {
@Override
public void onReturn(Intent result, MimeBodyPart decryptedPart) {
cancelableOpenPgpBackgroundOperation = null;
currentCryptoResult = result;
onCryptoOperationReturned(decryptedPart);
}
@Override
public void onProgress(int current, int max) {
Timber.d("received progress status: %d / %d", current, max);
callbackProgress(current, max);
}
});
}
private void callAsyncOpenPgpDetachedVerify(Intent intent) throws IOException, MessagingException {
if (openPgpApi == null) {
MimeBodyPart replacementPart = getMultipartSignedContentPartIfAvailable(currentCryptoPart.part);
CryptoResultAnnotation annotation =
CryptoResultAnnotation.createErrorAnnotation(CryptoError.OPENPGP_SIGNED_NO_PROVIDER, replacementPart);
addCryptoResultAnnotationToMessage(annotation);
onCryptoFinished();
return;
}
OpenPgpDataSource dataSource = getDataSourceForOpenPgpSignedData(currentCryptoPart.part);
byte[] signatureData = MessageCryptoStructureDetector.getSignatureData(currentCryptoPart.part);
intent.putExtra(OpenPgpApi.EXTRA_DETACHED_SIGNATURE, signatureData);
openPgpApi.executeApiAsync(intent, dataSource, new IOpenPgpSinkResultCallback<Void>() {
@Override
public void onReturn(Intent result, Void dummy) {
cancelableOpenPgpBackgroundOperation = null;
currentCryptoResult = result;
onCryptoOperationReturned(null);
}
@Override
public void onProgress(int current, int max) {
Timber.d("received progress status: %d / %d", current, max);
callbackProgress(current, max);
}
});
}
private void callAsyncSMimeDecrypt(Intent intent) throws IOException {
if (sMimeApi == null) {
CryptoResultAnnotation annotation =
CryptoResultAnnotation.createErrorAnnotation(CryptoError.SMIME_ENCRYPTED_NO_PROVIDER, null);
addCryptoResultAnnotationToMessage(annotation);
onCryptoFinished();
return;
}
SMimeDataSource dataSource = getDataSourceForSMimeEncryptedOrInlineData();
SMimeDataSink<MimeBodyPart> sMimeDataSink = getDataSinkForSMimeDecryptedData();
cancelableSMimeBackgroundOperation = sMimeApi.executeApiAsync(intent, dataSource, sMimeDataSink,
new ISMimeSinkResultCallback<MimeBodyPart>() {
@Override
public void onReturn(Intent result, MimeBodyPart decryptedPart) {
cancelableSMimeBackgroundOperation = null;
currentCryptoResult = result;
onCryptoOperationReturned(decryptedPart);
}
@Override
public void onProgress(int current, int max) {
Timber.d("received progress status: %d / %d", current, max);
callbackProgress(current, max);
}
});
}
private void callAsyncSMimeDetachedVerify(Intent intent) throws IOException, MessagingException {
if (sMimeApi == null) {
MimeBodyPart replacementPart = getMultipartSignedContentPartIfAvailable(currentCryptoPart.part);
CryptoResultAnnotation annotation =
CryptoResultAnnotation.createErrorAnnotation(CryptoError.SMIME_SIGNED_NO_PROVIDER, replacementPart);
addCryptoResultAnnotationToMessage(annotation);
onCryptoFinished();
return;
}
SMimeDataSource dataSource = getDataSourceForSMimeSignedData(currentCryptoPart.part);
byte[] signatureData = MessageCryptoStructureDetector.getSignatureData(currentCryptoPart.part);
intent.putExtra(SMimeApi.EXTRA_DETACHED_SIGNATURE, signatureData);
sMimeApi.executeApiAsync(intent, dataSource, new ISMimeSinkResultCallback<Void>() {
@Override
public void onReturn(Intent result, Void dummy) {
cancelableSMimeBackgroundOperation = null;
currentCryptoResult = result;
onCryptoOperationReturned(null);
}
@Override
public void onProgress(int current, int max) {
Timber.d("received progress status: %d / %d", current, max);
callbackProgress(current, max);
}
});
}
private OpenPgpDataSink<MimeBodyPart> getDataSinkForOpenPgpDecryptedInlineData() {
return new OpenPgpDataSink<MimeBodyPart>() {
@Override
public MimeBodyPart processData(InputStream is) throws IOException {
try {
ByteArrayOutputStream decryptedByteOutputStream = new ByteArrayOutputStream();
IOUtils.copy(is, decryptedByteOutputStream);
TextBody body = new TextBody(new String(decryptedByteOutputStream.toByteArray()));
return new MimeBodyPart(body, "text/plain");
} catch (MessagingException e) {
Timber.e(e, "MessagingException");
}
return null;
}
};
}
private OpenPgpDataSource getDataSourceForOpenPgpSignedData(final Part signedPart) throws IOException {
return new OpenPgpDataSource() {
@Override
public void writeTo(OutputStream os) throws IOException {
try {
Multipart multipartSignedMultipart = (Multipart) signedPart.getBody();
BodyPart signatureBodyPart = multipartSignedMultipart.getBodyPart(0);
Timber.d("signed data type: %s", signatureBodyPart.getMimeType());
signatureBodyPart.writeTo(os);
} catch (MessagingException e) {
Timber.e(e, "Exception while writing message to crypto provider");
}
}
};
}
private OpenPgpDataSource getDataSourceForOpenPgpEncryptedOrInlineData() throws IOException {
return new OpenPgpApi.OpenPgpDataSource() {
@Override
public Long getSizeForProgress() {
Part part = currentCryptoPart.part;
CryptoPartType cryptoPartType = currentCryptoPart.type;
Body body;
if (cryptoPartType == CryptoPartType.PGP_ENCRYPTED) {
Multipart multipartEncryptedMultipart = (Multipart) part.getBody();
BodyPart encryptionPayloadPart = multipartEncryptedMultipart.getBodyPart(1);
body = encryptionPayloadPart.getBody();
} else if (cryptoPartType == CryptoPartType.PGP_INLINE) {
body = part.getBody();
} else {
throw new IllegalStateException("part to stream must be encrypted or inline!");
}
if (body instanceof SizeAware) {
return ((SizeAware) body).getSize();
}
return null;
}
@Override
@WorkerThread
public void writeTo(OutputStream os) throws IOException {
try {
Part part = currentCryptoPart.part;
CryptoPartType cryptoPartType = currentCryptoPart.type;
if (cryptoPartType == CryptoPartType.PGP_ENCRYPTED) {
Multipart multipartEncryptedMultipart = (Multipart) part.getBody();
BodyPart encryptionPayloadPart = multipartEncryptedMultipart.getBodyPart(1);
Body encryptionPayloadBody = encryptionPayloadPart.getBody();
encryptionPayloadBody.writeTo(os);
} else if (cryptoPartType == CryptoPartType.PGP_INLINE) {
String text = MessageExtractor.getTextFromPart(part);
os.write(text.getBytes());
} else {
throw new IllegalStateException("part to stream must be encrypted or inline!");
}
} catch (MessagingException e) {
Timber.e(e, "MessagingException while writing message to crypto provider");
}
}
};
}
private OpenPgpDataSink<MimeBodyPart> getDataSinkForOpenPgpDecryptedData() throws IOException {
return new OpenPgpDataSink<MimeBodyPart>() {
@Override
@WorkerThread
public MimeBodyPart processData(InputStream is) throws IOException {
try {
FileFactory fileFactory =
DecryptedFileProvider.getFileFactory(context);
return MimePartStreamParser.parse(fileFactory, is);
} catch (MessagingException e) {
Timber.e(e, "Something went wrong while parsing the decrypted MIME part");
//TODO: pass error to main thread and display error message to user
return null;
}
}
};
}
private SMimeDataSink<MimeBodyPart> getDataSinkForSMimeDecryptedInlineData() {
return new SMimeDataSink<MimeBodyPart>() {
@Override
public MimeBodyPart processData(InputStream is) throws IOException {
try {
ByteArrayOutputStream decryptedByteOutputStream = new ByteArrayOutputStream();
IOUtils.copy(is, decryptedByteOutputStream);
TextBody body = new TextBody(new String(decryptedByteOutputStream.toByteArray()));
return new MimeBodyPart(body, "text/plain");
} catch (MessagingException e) {
Timber.e(e, "MessagingException");
}
return null;
}
};
}
private SMimeDataSource getDataSourceForSMimeSignedData(final Part signedPart) throws IOException {
return new SMimeDataSource() {
@Override
public void writeTo(OutputStream os) throws IOException {
try {
Multipart multipartSignedMultipart = (Multipart) signedPart.getBody();
BodyPart signatureBodyPart = multipartSignedMultipart.getBodyPart(0);
Timber.d("signed data type: %s", signatureBodyPart.getMimeType());
signatureBodyPart.writeTo(os);
} catch (MessagingException e) {
Timber.e(e, "Exception while writing message to crypto provider");
}
}
};
}
private SMimeDataSource getDataSourceForSMimeEncryptedOrInlineData() throws IOException {
return new SMimeApi.SMimeDataSource() {
@Override
public Long getSizeForProgress() {
Part part = currentCryptoPart.part;
CryptoPartType cryptoPartType = currentCryptoPart.type;
Body body;
if (cryptoPartType == CryptoPartType.SMIME_ENCRYPTED) {
Multipart multipartEncryptedMultipart = (Multipart) part.getBody();
BodyPart encryptionPayloadPart = multipartEncryptedMultipart.getBodyPart(1);
body = encryptionPayloadPart.getBody();
} else {
throw new IllegalStateException("part to stream must be encrypted or inline!");
}
if (body instanceof SizeAware) {
return ((SizeAware) body).getSize();
}
return null;
}
@Override
@WorkerThread
public void writeTo(OutputStream os) throws IOException {
try {
Part part = currentCryptoPart.part;
CryptoPartType cryptoPartType = currentCryptoPart.type;
if (cryptoPartType == CryptoPartType.SMIME_ENCRYPTED) {
Multipart multipartEncryptedMultipart = (Multipart) part.getBody();
BodyPart encryptionPayloadPart = multipartEncryptedMultipart.getBodyPart(1);
Body encryptionPayloadBody = encryptionPayloadPart.getBody();
encryptionPayloadBody.writeTo(os);
} else {
throw new IllegalStateException("part to stream must be encrypted or inline!");
}
} catch (MessagingException e) {
Timber.e(e, "MessagingException while writing message to crypto provider");
}
}
};
}
private SMimeDataSink<MimeBodyPart> getDataSinkForSMimeDecryptedData() throws IOException {
return new SMimeDataSink<MimeBodyPart>() {
@Override
@WorkerThread
public MimeBodyPart processData(InputStream is) throws IOException {
try {
FileFactory fileFactory =
DecryptedFileProvider.getFileFactory(context);
return MimePartStreamParser.parse(fileFactory, is);
} catch (MessagingException e) {
Timber.e(e, "Something went wrong while parsing the decrypted MIME part");
//TODO: pass error to main thread and display error message to user
return null;
}
}
};
}
private void onCryptoOperationReturned(MimeBodyPart decryptedPart) {
if (currentCryptoResult == null) {
Timber.e("Internal error: we should have a result here!");
return;
}
try {
handleCryptoOperationResult(decryptedPart);
} finally {
currentCryptoResult = null;
}
}
private void handleCryptoOperationResult(MimeBodyPart outputPart) {
switch (currentCryptoPart.providerType) {
case OPENPGP:
handleOpenPgpOperationResult(outputPart);
break;
case SMIME:
handleSMimeOperationResult(outputPart);
break;
default:
throw new IllegalArgumentException();
}
}
private void handleOpenPgpOperationResult(MimeBodyPart outputPart) {
int resultCode = currentCryptoResult.getIntExtra(OpenPgpApi.RESULT_CODE, INVALID_OPENPGP_RESULT_CODE);
Timber.d("OpenPGP API decryptVerify result code: %d", resultCode);
switch (resultCode) {
case INVALID_OPENPGP_RESULT_CODE: {
Timber.e("Internal error: no result code!");
break;
}
case OpenPgpApi.RESULT_CODE_USER_INTERACTION_REQUIRED: {
handleOpenPgpUserInteractionRequest();
break;
}
case OpenPgpApi.RESULT_CODE_ERROR: {
handleOpenPgpOperationError();
break;
}
case OpenPgpApi.RESULT_CODE_SUCCESS: {
handleOpenPgpOperationSuccess(outputPart);
break;
}
}
}
private void handleOpenPgpUserInteractionRequest() {
PendingIntent pendingIntent = currentCryptoResult.getParcelableExtra(OpenPgpApi.RESULT_INTENT);
if (pendingIntent == null) {
throw new AssertionError("Expecting PendingIntent on USER_INTERACTION_REQUIRED!");
}
callbackPendingIntent(pendingIntent);
}
private void handleOpenPgpOperationError() {
OpenPgpError error = currentCryptoResult.getParcelableExtra(OpenPgpApi.RESULT_ERROR);
Timber.w("OpenPGP API error: %s", error.getMessage());
onCryptoOperationFailed(error);
}
private void handleOpenPgpOperationSuccess(MimeBodyPart outputPart) {
OpenPgpDecryptionResult decryptionResult =
currentCryptoResult.getParcelableExtra(OpenPgpApi.RESULT_DECRYPTION);
OpenPgpSignatureResult signatureResult =
currentCryptoResult.getParcelableExtra(OpenPgpApi.RESULT_SIGNATURE);
PendingIntent pendingIntent = currentCryptoResult.getParcelableExtra(OpenPgpApi.RESULT_INTENT);
PendingIntent insecureWarningPendingIntent = currentCryptoResult.getParcelableExtra(OpenPgpApi.RESULT_INSECURE_DETAIL_INTENT);
boolean overrideCryptoWarning = currentCryptoResult.getBooleanExtra(
OpenPgpApi.RESULT_OVERRIDE_CRYPTO_WARNING, false);
CryptoResultAnnotation resultAnnotation = CryptoResultAnnotation.createOpenPgpResultAnnotation(decryptionResult,
signatureResult, pendingIntent, insecureWarningPendingIntent, outputPart, overrideCryptoWarning);
onCryptoOperationSuccess(resultAnnotation);
}
private void handleSMimeOperationResult(MimeBodyPart outputPart) {
int resultCode = currentCryptoResult.getIntExtra(SMimeApi.RESULT_CODE, INVALID_OPENPGP_RESULT_CODE);
Timber.d("S/MIME API decryptVerify result code: %d", resultCode);
switch (resultCode) {
case INVALID_SMIME_RESULT_CODE: {
Timber.e("Internal error: no result code!");
break;
}
case SMimeApi.RESULT_CODE_USER_INTERACTION_REQUIRED: {
handleSMimeUserInteractionRequest();
break;
}
case SMimeApi.RESULT_CODE_ERROR: {
handleSMimeOperationError();
break;
}
case SMimeApi.RESULT_CODE_SUCCESS: {
handleSMimeOperationSuccess(outputPart);
break;
}
}
}
private void handleSMimeUserInteractionRequest() {
PendingIntent pendingIntent = currentCryptoResult.getParcelableExtra(SMimeApi.RESULT_INTENT);
if (pendingIntent == null) {
throw new AssertionError("Expecting PendingIntent on USER_INTERACTION_REQUIRED!");
}
callbackPendingIntent(pendingIntent);
}
private void handleSMimeOperationError() {
SMimeError error = currentCryptoResult.getParcelableExtra(SMimeApi.RESULT_ERROR);
Timber.w("S/MIME API error: %s", error.getMessage());
onCryptoOperationFailed(error);
}
private void handleSMimeOperationSuccess(MimeBodyPart outputPart) {
SMimeDecryptionResult decryptionResult =
currentCryptoResult.getParcelableExtra(SMimeApi.RESULT_DECRYPTION);
SMimeSignatureResult signatureResult =
currentCryptoResult.getParcelableExtra(SMimeApi.RESULT_SIGNATURE);
PendingIntent pendingIntent = currentCryptoResult.getParcelableExtra(SMimeApi.RESULT_INTENT);
PendingIntent insecureWarningPendingIntent = currentCryptoResult.getParcelableExtra(SMimeApi.RESULT_INSECURE_DETAIL_INTENT);
boolean overrideCryptoWarning = currentCryptoResult.getBooleanExtra(
SMimeApi.RESULT_OVERRIDE_CRYPTO_WARNING, false);
CryptoResultAnnotation resultAnnotation = CryptoResultAnnotation.createSMimeResultAnnotation(decryptionResult,
signatureResult, pendingIntent, insecureWarningPendingIntent, outputPart, overrideCryptoWarning);
onCryptoOperationSuccess(resultAnnotation);
}
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (isCancelled) {
return;
}
if (requestCode != REQUEST_CODE_USER_INTERACTION) {
throw new IllegalStateException("got an activity result that wasn't meant for us. this is a bug!");
}
if (resultCode == Activity.RESULT_OK) {
userInteractionResultIntent = data;
nextStep();
} else {
onCryptoOperationCanceled();
}
}
private void onCryptoOperationSuccess(CryptoResultAnnotation resultAnnotation) {
addCryptoResultAnnotationToMessage(resultAnnotation);
onCryptoFinished();
}
private void propagateEncapsulatedSignedPart(CryptoResultAnnotation resultAnnotation, Part part) {
Part encapsulatingPart = messageAnnotations.findKeyForAnnotationWithReplacementPart(part);
CryptoResultAnnotation encapsulatingPartAnnotation = messageAnnotations.get(encapsulatingPart);
if (encapsulatingPart != null && resultAnnotation.hasSignatureResult()) {
CryptoResultAnnotation replacementAnnotation =
encapsulatingPartAnnotation.withEncapsulatedResult(resultAnnotation);
messageAnnotations.put(encapsulatingPart, replacementAnnotation);
}
}
private void onCryptoOperationCanceled() {
// there are weird states that get us here when we're not actually processing any part. just skip in that case
// see https://github.com/k9mail/k-9/issues/1878
if (currentCryptoPart != null) {
CryptoResultAnnotation errorPart;
switch (currentCryptoPart.providerType) {
case OPENPGP:
errorPart = CryptoResultAnnotation.createOpenPgpCanceledAnnotation();
addCryptoResultAnnotationToMessage(errorPart);
break;
case SMIME:
errorPart = CryptoResultAnnotation.createSMimeCanceledAnnotation();
addCryptoResultAnnotationToMessage(errorPart);
break;
}
}
onCryptoFinished();
}
private void onCryptoOperationFailed(OpenPgpError error) {
CryptoResultAnnotation annotation;
if (currentCryptoPart.type == CryptoPartType.PGP_SIGNED) {
MimeBodyPart replacementPart = getMultipartSignedContentPartIfAvailable(currentCryptoPart.part);
annotation = CryptoResultAnnotation.createOpenPgpSignatureErrorAnnotation(error, replacementPart);
} else {
annotation = CryptoResultAnnotation.createOpenPgpEncryptionErrorAnnotation(error);
}
addCryptoResultAnnotationToMessage(annotation);
onCryptoFinished();
}
private void onCryptoOperationFailed(SMimeError error) {
CryptoResultAnnotation annotation;
if (currentCryptoPart.type == CryptoPartType.SMIME_SIGNED) {
MimeBodyPart replacementPart = getMultipartSignedContentPartIfAvailable(currentCryptoPart.part);
annotation = CryptoResultAnnotation.createSMimeSignatureErrorAnnotation(error, replacementPart);
} else {
annotation = CryptoResultAnnotation.createSMimeEncryptionErrorAnnotation(error);
}
addCryptoResultAnnotationToMessage(annotation);
onCryptoFinished();
}
private void addCryptoResultAnnotationToMessage(CryptoResultAnnotation resultAnnotation) {
Part part = currentCryptoPart.part;
messageAnnotations.put(part, resultAnnotation);
propagateEncapsulatedSignedPart(resultAnnotation, part);
}
private void onCryptoFinished() {
boolean currentPartIsFirstInQueue = partsToProcess.peekFirst() == currentCryptoPart;
if (!currentPartIsFirstInQueue) {
throw new IllegalStateException(
"Trying to remove part from queue that is not the currently processed one!");
}
if (currentCryptoPart != null) {
partsToProcess.removeFirst();
currentCryptoPart = null;
} else {
Timber.e(new Throwable(), "Got to onCryptoFinished() with no part in processing!");
}
nextStep();
}
private void findPartsForNextPass() {
switch (state) {
case START: {
state = State.ENCRYPTION;
findPartsForMultipartEncryptionPass();
return;
}
case ENCRYPTION: {
state = State.SIGNATURES_AND_INLINE;
findPartsForMultipartSignaturePass();
findPartsForPgpInlinePass();
return;
}
case SIGNATURES_AND_INLINE: {
state = State.AUTOCRYPT;
findPartsForAutocryptPass();
return;
}
case AUTOCRYPT: {
state = State.FINISHED;
return;
}
default: {
throw new IllegalStateException("unhandled state");
}
}
}
private void cleanupAfterProcessingFinished() {
partsToProcess.clear();
openPgpApi = null;
if (openPgpServiceConnection != null) {
openPgpServiceConnection.unbindFromService();
}
openPgpServiceConnection = null;
}
public void detachCallback() {
synchronized (callbackLock) {
callback = null;
}
}
private void reattachCallback(Message message, MessageCryptoCallback callback) {
if (!message.equals(currentMessage)) {
throw new AssertionError("Callback may only be reattached for the same message!");
}
synchronized (callbackLock) {
this.callback = callback;
boolean hasCachedResult = queuedResult != null || queuedPendingIntent != null;
if (hasCachedResult) {
Timber.d("Returning cached result or pending intent to reattached callback");
deliverResult();
}
}
}
private void callbackPendingIntent(PendingIntent pendingIntent) {
synchronized (callbackLock) {
queuedPendingIntent = pendingIntent;
deliverResult();
}
}
private void callbackReturnResult() {
synchronized (callbackLock) {
cleanupAfterProcessingFinished();
queuedResult = messageAnnotations;
messageAnnotations = null;
deliverResult();
}
}
private void callbackProgress(int current, int max) {
synchronized (callbackLock) {
if (callback != null) {
callback.onCryptoHelperProgress(current, max);
}
}
}
// This method must only be called inside a synchronized(callbackLock) block!
private void deliverResult() {
if (isCancelled) {
return;
}
if (callback == null) {
Timber.d("Keeping crypto helper result in queue for later delivery");
return;
}
if (queuedResult != null) {
callback.onCryptoOperationsFinished(queuedResult);
} else if (queuedPendingIntent != null) {
callback.startPendingIntentForCryptoHelper(
queuedPendingIntent.getIntentSender(), REQUEST_CODE_USER_INTERACTION, null, 0, 0, 0);
queuedPendingIntent = null;
} else {
throw new IllegalStateException("deliverResult() called with no result!");
}
}
private static class CryptoPart {
public final CryptoProviderType providerType;
public final CryptoPartType type;
public final Part part;
CryptoPart(CryptoProviderType providerType, CryptoPartType type, Part part) {
this.providerType = providerType;
this.type = type;
this.part = part;
}
}
private enum CryptoPartType {
PGP_INLINE,
PGP_ENCRYPTED,
PGP_SIGNED,
PLAIN_AUTOCRYPT,
SMIME_ENCRYPTED,
SMIME_SIGNED
}
@Nullable
private static MimeBodyPart getMultipartSignedContentPartIfAvailable(Part part) {
MimeBodyPart replacementPart = NO_REPLACEMENT_PART;
Body body = part.getBody();
if (body instanceof MimeMultipart) {
MimeMultipart multipart = ((MimeMultipart) part.getBody());
if (multipart.getCount() >= 1) {
replacementPart = (MimeBodyPart) multipart.getBodyPart(0);
}
}
return replacementPart;
}
private enum State {
START, ENCRYPTION, SIGNATURES_AND_INLINE, AUTOCRYPT, FINISHED
}
}
|
package com.atguigu.gmall.pms.entity;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.io.Serializable;
import java.util.Date;
import lombok.Data;
/**
* 商品评价
*
* @author chen
* @email cwj@atguigu.com
* @date 2019-12-02 18:36:14
*/
@ApiModel
@Data
@TableName("pms_spu_comment")
public class SpuCommentEntity implements Serializable {
private static final long serialVersionUID = 1L;
/**
* id
*/
@TableId
@ApiModelProperty(name = "id",value = "id")
private Long id;
/**
* sku_id
*/
@ApiModelProperty(name = "skuId",value = "sku_id")
private Long skuId;
/**
* spu_id
*/
@ApiModelProperty(name = "spuId",value = "spu_id")
private Long spuId;
/**
* 商品名字
*/
@ApiModelProperty(name = "spuName",value = "商品名字")
private String spuName;
/**
* 会员昵称
*/
@ApiModelProperty(name = "memberNickName",value = "会员昵称")
private String memberNickName;
/**
* 星级
*/
@ApiModelProperty(name = "star",value = "星级")
private Integer star;
/**
* 会员ip
*/
@ApiModelProperty(name = "memberIp",value = "会员ip")
private String memberIp;
/**
* 创建时间
*/
@ApiModelProperty(name = "createTime",value = "创建时间")
private Date createTime;
/**
* 显示状态[0-不显示,1-显示]
*/
@ApiModelProperty(name = "showStatus",value = "显示状态[0-不显示,1-显示]")
private Integer showStatus;
/**
* 购买时属性组合
*/
@ApiModelProperty(name = "spuAttributes",value = "购买时属性组合")
private String spuAttributes;
/**
* 点赞数
*/
@ApiModelProperty(name = "likesCount",value = "点赞数")
private Integer likesCount;
/**
* 回复数
*/
@ApiModelProperty(name = "replyCount",value = "回复数")
private Integer replyCount;
/**
* 评论图片/视频[json数据;[{type:文件类型,url:资源路径}]]
*/
@ApiModelProperty(name = "resources",value = "评论图片/视频[json数据;[{type:文件类型,url:资源路径}]]")
private String resources;
/**
* 内容
*/
@ApiModelProperty(name = "content",value = "内容")
private String content;
/**
* 用户头像
*/
@ApiModelProperty(name = "memberIcon",value = "用户头像")
private String memberIcon;
/**
* 评论类型[0 - 对商品的直接评论,1 - 对评论的回复]
*/
@ApiModelProperty(name = "commentType",value = "评论类型[0 - 对商品的直接评论,1 - 对评论的回复]")
private Integer commentType;
}
|
package com.udacity.vehicles.client.prices;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import org.springframework.web.reactive.function.client.WebClient;
/**
* Implements a class to interface with the Pricing Client for price data.
*/
@Component
public class PriceClient {
private static final Logger log = LoggerFactory.getLogger(PriceClient.class);
private final WebClient client;
public PriceClient(WebClient pricing) {
this.client = pricing;
}
// In a real-world application we'll want to add some resilience
// to this method with retries/CB/failover capabilities
// We may also want to cache the results so we don't need to
// do a request every time
/**
* Gets a vehicle price from the pricing client, given vehicle ID.
* @param vehicleId ID number of the vehicle for which to get the price
* @return Currency and price of the requested vehicle,
* error message that the vehicle ID is invalid, or note that the
* service is down.
*/
public String getPrice(Long vehicleId) {
try {
String path = "prices/" + vehicleId.toString();
Price price = client
.get()
.uri(uriBuilder -> uriBuilder
.path(path)
.build()
)
.retrieve().bodyToMono(Price.class).block();
return String.format("%s %s", price.getCurrency(), price.getPrice());
} catch (Exception e) {
log.error("Unexpected error retrieving price for vehicle {}", vehicleId, e);
}
return "(consult price)";
}
}
|
/*
* Copyright 2020 Haulmont.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.jmix.core.accesscontext;
public interface AccessContext {
}
|
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.groovy.lang;
import com.intellij.codeInsight.daemon.LineMarkerInfo;
import com.intellij.codeInsight.daemon.impl.DaemonCodeAnalyzerImpl;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.project.Project;
import com.intellij.testFramework.fixtures.LightCodeInsightFixtureTestCase;
import org.jetbrains.plugins.groovy.util.TestUtils;
import java.util.List;
/**
* @author Maxim.Medvedev
*/
public class GroovyLineMarkerTest extends LightCodeInsightFixtureTestCase {
@Override
protected String getBasePath() {
return TestUtils.getTestDataPath() + "lineMarker/";
}
public void testInterface() throws Throwable {
doSimpleTest(3);}
public void testGStringMethodName() throws Throwable {
doSimpleTest(3);}
public void testStringMethodName() throws Throwable {
doSimpleTest(3);}
public void testAllGStringMethodName() throws Throwable {
doSimpleTest(3);}
public void testJavaToGroovy() throws Throwable {
myFixture.configureByFiles(getTestName(false)+".groovy", getTestName(false)+".java");
doTest(1);
}
public void testGroovyToJava() throws Throwable {
myFixture.configureByFiles(getTestName(false)+".groovy", getTestName(false)+".java");
doTest(2);
}
public void testJavaToGroovy2() throws Throwable {
myFixture.configureByFiles("JavaToGroovy.java", "JavaToGroovy.groovy");
doTest(2);
}
public void testGroovyToJava2() throws Throwable {
myFixture.configureByFiles("GroovyToJava.java", "GroovyToJava.groovy");
doTest(1);
}
private void doSimpleTest(int count) throws Throwable{
myFixture.configureByFile(getTestName(false)+".groovy");
doTest(count);
}
private void doTest(int count) {
final Editor editor = myFixture.getEditor();
final Project project = myFixture.getProject();
myFixture.doHighlighting();
final List<LineMarkerInfo> infoList = DaemonCodeAnalyzerImpl.getLineMarkers(editor.getDocument(), project);
assertEquals(count, infoList.size());
}
}
|
package com.ruoyi.system.domain;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import com.ruoyi.common.annotation.Excel;
import com.ruoyi.common.core.domain.BaseEntity;
/**
* 学生信息表II对象 student
*
* @author K7L
* @date 2022-03-12
*/
public class Student extends BaseEntity
{
private static final long serialVersionUID = 1L;
/** */
private Long id;
/** */
@Excel(name = "姓名")
private String sname;
/** */
@Excel(name = "性别")
private String sgender;
/** */
@Excel(name = "城市")
private String scity;
public void setId(Long id)
{
this.id = id;
}
public Long getId()
{
return id;
}
public void setSname(String sname)
{
this.sname = sname;
}
public String getSname()
{
return sname;
}
public void setSgender(String sgender)
{
this.sgender = sgender;
}
public String getSgender()
{
return sgender;
}
public void setScity(String scity)
{
this.scity = scity;
}
public String getScity()
{
return scity;
}
@Override
public String toString() {
return new ToStringBuilder(this,ToStringStyle.MULTI_LINE_STYLE)
.append("id", getId())
.append("sname", getSname())
.append("sgender", getSgender())
.append("scity", getScity())
.toString();
}
}
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.bulk;
import org.apache.lucene.util.Constants;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.ActionType;
import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.AutoCreateIndex;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodeRole;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.AtomicArray;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.index.IndexingPressure;
import org.elasticsearch.indices.SystemIndices;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.VersionUtils;
import org.elasticsearch.test.transport.CapturingTransport;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.HashSet;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.LongSupplier;
import static org.elasticsearch.test.ClusterServiceUtils.createClusterService;
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
public class TransportBulkActionTookTests extends ESTestCase {
private static ThreadPool threadPool;
private ClusterService clusterService;
@BeforeClass
public static void beforeClass() {
threadPool = new TestThreadPool("TransportBulkActionTookTests");
}
@AfterClass
public static void afterClass() {
ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS);
threadPool = null;
}
@Before
public void setUp() throws Exception {
super.setUp();
DiscoveryNode discoveryNode = new DiscoveryNode("node", ESTestCase.buildNewFakeTransportAddress(), Collections.emptyMap(),
DiscoveryNodeRole.BUILT_IN_ROLES, VersionUtils.randomCompatibleVersion(random(), Version.CURRENT));
clusterService = createClusterService(threadPool, discoveryNode);
}
@After
public void tearDown() throws Exception {
super.tearDown();
clusterService.close();
}
private TransportBulkAction createAction(boolean controlled, AtomicLong expected) {
CapturingTransport capturingTransport = new CapturingTransport();
TransportService transportService = capturingTransport.createTransportService(clusterService.getSettings(), threadPool,
TransportService.NOOP_TRANSPORT_INTERCEPTOR,
boundAddress -> clusterService.localNode(), null, Collections.emptySet());
transportService.start();
transportService.acceptIncomingRequests();
IndexNameExpressionResolver resolver = new Resolver();
ActionFilters actionFilters = new ActionFilters(new HashSet<>());
NodeClient client = new NodeClient(Settings.EMPTY, threadPool) {
@Override
public <Request extends ActionRequest, Response extends ActionResponse>
void doExecute(ActionType<Response> action, Request request, ActionListener<Response> listener) {
listener.onResponse((Response)new CreateIndexResponse(false, false, null));
}
};
if (controlled) {
return new TestTransportBulkAction(
threadPool,
transportService,
clusterService,
client,
actionFilters,
resolver,
null,
expected::get) {
@Override
void executeBulk(
Task task,
BulkRequest bulkRequest,
long startTimeNanos,
ActionListener<BulkResponse> listener,
AtomicArray<BulkItemResponse> responses,
Map<String, IndexNotFoundException> indicesThatCannotBeCreated) {
expected.set(1000000);
super.executeBulk(task, bulkRequest, startTimeNanos, listener, responses, indicesThatCannotBeCreated);
}
};
} else {
return new TestTransportBulkAction(
threadPool,
transportService,
clusterService,
client,
actionFilters,
resolver,
null,
System::nanoTime) {
@Override
void executeBulk(
Task task,
BulkRequest bulkRequest,
long startTimeNanos,
ActionListener<BulkResponse> listener,
AtomicArray<BulkItemResponse> responses,
Map<String, IndexNotFoundException> indicesThatCannotBeCreated) {
long elapsed = spinForAtLeastOneMillisecond();
expected.set(elapsed);
super.executeBulk(task, bulkRequest, startTimeNanos, listener, responses, indicesThatCannotBeCreated);
}
};
}
}
// test unit conversion with a controlled clock
public void testTookWithControlledClock() throws Exception {
runTestTook(true);
}
// test took advances with System#nanoTime
public void testTookWithRealClock() throws Exception {
runTestTook(false);
}
private void runTestTook(boolean controlled) throws Exception {
String bulkAction = copyToStringFromClasspath("/org/elasticsearch/action/bulk/simple-bulk.json");
// translate Windows line endings (\r\n) to standard ones (\n)
if (Constants.WINDOWS) {
bulkAction = Strings.replace(bulkAction, "\r\n", "\n");
}
BulkRequest bulkRequest = new BulkRequest();
bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, XContentType.JSON);
AtomicLong expected = new AtomicLong();
TransportBulkAction action = createAction(controlled, expected);
action.doExecute(null, bulkRequest, new ActionListener<BulkResponse>() {
@Override
public void onResponse(BulkResponse bulkItemResponses) {
if (controlled) {
assertThat(
bulkItemResponses.getTook().getMillis(),
equalTo(TimeUnit.MILLISECONDS.convert(expected.get(), TimeUnit.NANOSECONDS)));
} else {
assertThat(
bulkItemResponses.getTook().getMillis(),
greaterThanOrEqualTo(TimeUnit.MILLISECONDS.convert(expected.get(), TimeUnit.NANOSECONDS)));
}
}
@Override
public void onFailure(Exception e) {
}
});
}
static class Resolver extends IndexNameExpressionResolver {
Resolver() {
super(new ThreadContext(Settings.EMPTY));
}
@Override
public String[] concreteIndexNames(ClusterState state, IndicesRequest request) {
return request.indices();
}
}
static class TestTransportBulkAction extends TransportBulkAction {
TestTransportBulkAction(
ThreadPool threadPool,
TransportService transportService,
ClusterService clusterService,
NodeClient client,
ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver,
AutoCreateIndex autoCreateIndex,
LongSupplier relativeTimeProvider) {
super(
threadPool,
transportService,
clusterService,
null,
client,
actionFilters,
indexNameExpressionResolver,
autoCreateIndex,
new IndexingPressure(Settings.EMPTY),
new SystemIndices(Map.of()),
relativeTimeProvider);
}
@Override
boolean needToCheck() {
return randomBoolean();
}
@Override
boolean shouldAutoCreate(String index, ClusterState state) {
return randomBoolean();
}
}
}
|
/*
* Copyright 2015 Waterloo Mobile Studio
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.wms.opensource.shopfast.shopify;
public class Constants {
public final static int PRODUCTS_PER_PAGE = 25;
}
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.snapshots;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.repositories.blobstore.ESBlobStoreRepositoryIntegTestCase;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
public class FsBlobStoreRepositoryIT extends ESBlobStoreRepositoryIntegTestCase {
@Override
protected void createTestRepository(String name) {
assertAcked(client().admin().cluster().preparePutRepository(name)
.setType("fs").setSettings(Settings.builder()
.put("location", randomRepoPath())
.put("compress", randomBoolean())
.put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES)));
}
}
|
/*
* Copyright 2012 International Business Machines Corp.
*
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership. Licensed under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibm.jbatch.container.impl;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.batch.operations.JobExecutionAlreadyCompleteException;
import javax.batch.operations.JobExecutionNotMostRecentException;
import javax.batch.operations.JobRestartException;
import javax.batch.operations.JobStartException;
import javax.batch.operations.NoSuchJobExecutionException;
import com.ibm.jbatch.container.IExecutionElementController;
import com.ibm.jbatch.container.context.impl.JobContextImpl;
import com.ibm.jbatch.container.exception.BatchContainerRuntimeException;
import com.ibm.jbatch.container.jobinstance.RuntimeFlowInSplitExecution;
import com.ibm.jbatch.container.jobinstance.RuntimeJobExecution;
import com.ibm.jbatch.container.services.IBatchKernelService;
import com.ibm.jbatch.container.servicesmanager.ServicesManager;
import com.ibm.jbatch.container.servicesmanager.ServicesManagerImpl;
import com.ibm.jbatch.container.status.ExecutionStatus;
import com.ibm.jbatch.container.status.ExtendedBatchStatus;
import com.ibm.jbatch.container.status.SplitExecutionStatus;
import com.ibm.jbatch.container.util.BatchFlowInSplitWorkUnit;
import com.ibm.jbatch.container.util.BatchParallelWorkUnit;
import com.ibm.jbatch.container.util.FlowInSplitBuilderConfig;
import com.ibm.jbatch.jsl.model.Flow;
import com.ibm.jbatch.jsl.model.JSLJob;
import com.ibm.jbatch.jsl.model.Split;
public class SplitControllerImpl implements IExecutionElementController {
private final static String sourceClass = SplitControllerImpl.class.getName();
private final static Logger logger = Logger.getLogger(sourceClass);
private final RuntimeJobExecution jobExecution;
private volatile List<BatchFlowInSplitWorkUnit> parallelBatchWorkUnits;
private final ServicesManager servicesManager;
private final IBatchKernelService batchKernel;
private final JobContextImpl jobContext;
private final BlockingQueue<BatchFlowInSplitWorkUnit> completedWorkQueue = new LinkedBlockingQueue<BatchFlowInSplitWorkUnit>();
private final long rootJobExecutionId;
final List<JSLJob> subJobs = new ArrayList<JSLJob>();
protected Split split;
public SplitControllerImpl(RuntimeJobExecution jobExecution, Split split, long rootJobExecutionId) {
this.jobExecution = jobExecution;
this.jobContext = jobExecution.getJobContext();
this.rootJobExecutionId = rootJobExecutionId;
this.split = split;
servicesManager = ServicesManagerImpl.getInstance();
batchKernel = servicesManager.getBatchKernelService();
}
@Override
public void stop() {
// It's possible we may try to stop a split before any
// sub steps have been started.
synchronized (subJobs) {
if (parallelBatchWorkUnits != null) {
for (BatchParallelWorkUnit subJob : parallelBatchWorkUnits) {
try {
batchKernel.stopJob(subJob.getJobExecutionImpl().getExecutionId());
} catch (Exception e) {
// TODO - Is this what we want to know.
// Blow up if it happens to force the issue.
throw new IllegalStateException(e);
}
}
}
}
}
@Override
public SplitExecutionStatus execute() throws JobRestartException, JobStartException, JobExecutionAlreadyCompleteException, JobExecutionNotMostRecentException, NoSuchJobExecutionException {
String sourceMethod = "execute";
if (logger.isLoggable(Level.FINER)) {
logger.entering(sourceClass, sourceMethod, "Root JobExecution Id = " + rootJobExecutionId);
}
// Build all sub jobs from partitioned step
buildSubJobBatchWorkUnits();
// kick off the threads
executeWorkUnits();
// Deal with the results.
SplitExecutionStatus status = waitForCompletionAndAggregateStatus();
if (logger.isLoggable(Level.FINER)) {
logger.exiting(sourceClass, sourceMethod, status);
}
return status;
}
/**
* Note we restart all flows. There is no concept of "the flow completed". It is only steps
* within the flows that may have already completed and so may not have needed to be rerun.
*
*/
private void buildSubJobBatchWorkUnits() {
List<Flow> flows = this.split.getFlows();
parallelBatchWorkUnits = new ArrayList<BatchFlowInSplitWorkUnit>();
// Build all sub jobs from flows in split
synchronized (subJobs) {
for (Flow flow : flows) {
subJobs.add(PartitionedStepBuilder.buildFlowInSplitSubJob(jobExecution.getExecutionId(), jobContext, this.split, flow));
}
for (JSLJob job : subJobs) {
int count = batchKernel.getJobInstanceCount(job.getId());
FlowInSplitBuilderConfig config = new FlowInSplitBuilderConfig(job, completedWorkQueue, rootJobExecutionId);
if (count == 0) {
parallelBatchWorkUnits.add(batchKernel.buildNewFlowInSplitWorkUnit(config));
} else if (count == 1) {
parallelBatchWorkUnits.add(batchKernel.buildOnRestartFlowInSplitWorkUnit(config));
} else {
throw new IllegalStateException("There is an inconsistency somewhere in the internal subjob creation");
}
}
}
}
private void executeWorkUnits () {
// Then start or restart all subjobs in parallel
for (BatchParallelWorkUnit work : parallelBatchWorkUnits) {
int count = batchKernel.getJobInstanceCount(work.getJobExecutionImpl().getJobInstance().getJobName());
assert (count <= 1);
if (count == 1) {
batchKernel.startGeneratedJob(work);
} else if (count > 1) {
batchKernel.restartGeneratedJob(work);
} else {
throw new IllegalStateException("There is an inconsistency somewhere in the internal subjob creation");
}
}
}
private SplitExecutionStatus waitForCompletionAndAggregateStatus() {
SplitExecutionStatus splitStatus = new SplitExecutionStatus();
ExtendedBatchStatus aggregateTerminatingStatus = null;
for (int i=0; i < subJobs.size(); i++) {
BatchFlowInSplitWorkUnit batchWork;
try {
batchWork = completedWorkQueue.take(); //wait for each thread to finish and then look at it's status
} catch (InterruptedException e) {
throw new BatchContainerRuntimeException(e);
}
RuntimeFlowInSplitExecution flowExecution = batchWork.getJobExecutionImpl();
ExecutionStatus flowStatus = flowExecution.getFlowStatus();
if (logger.isLoggable(Level.FINE)) {
logger.fine("Subjob " + flowExecution.getExecutionId() + "ended with flow-in-split status: " + flowStatus);
}
aggregateTerminatingStatusFromSingleFlow(aggregateTerminatingStatus, flowStatus, splitStatus);
}
// If this is still set to 'null' that means all flows completed normally without terminating the job.
if (aggregateTerminatingStatus == null) {
logger.fine("Setting normal split status as no contained flows ended the job.");
aggregateTerminatingStatus = ExtendedBatchStatus.NORMAL_COMPLETION;
}
splitStatus.setExtendedBatchStatus(aggregateTerminatingStatus);
logger.fine("Returning from waitForCompletionAndAggregateStatus with return value: " + splitStatus);
return splitStatus;
}
//
// A <fail> and an uncaught exception are peers. They each take precedence over a <stop>, which take precedence over an <end>.
// Among peers the last one seen gets to set the exit stauts.
//
private void aggregateTerminatingStatusFromSingleFlow(ExtendedBatchStatus aggregateStatus, ExecutionStatus flowStatus,
SplitExecutionStatus splitStatus) {
String exitStatus = flowStatus.getExitStatus();
String restartOn = flowStatus.getRestartOn();
ExtendedBatchStatus flowBatchStatus = flowStatus.getExtendedBatchStatus();
logger.fine("Aggregating possible terminating status for flow ending with status: " + flowStatus
+ ", restartOn = " + restartOn);
if ( flowBatchStatus.equals(ExtendedBatchStatus.JSL_END) || flowBatchStatus.equals(ExtendedBatchStatus.JSL_STOP) ||
flowBatchStatus.equals(ExtendedBatchStatus.JSL_FAIL) || flowBatchStatus.equals(ExtendedBatchStatus.EXCEPTION_THROWN) ) {
if (aggregateStatus == null) {
logger.fine("A flow detected as ended because of a terminating condition: " + flowBatchStatus.name()
+ ". First flow detected in terminating state. Setting exitStatus if non-null.");
setInJobContext(flowBatchStatus, exitStatus, restartOn);
aggregateStatus = flowBatchStatus;
} else {
splitStatus.setCouldMoreThanOneFlowHaveTerminatedJob(true);
if (aggregateStatus.equals(ExtendedBatchStatus.JSL_END)) {
logger.warning("Current flow's batch and exit status will take precedence over and override earlier one from <end> transition element. " +
"Overriding, setting exit status if non-null and preparing to end job.");
setInJobContext(flowBatchStatus, exitStatus, restartOn);
aggregateStatus = flowBatchStatus;
} else if (aggregateStatus.equals(ExtendedBatchStatus.JSL_STOP)) {
// Everything but an <end> overrides a <stop>
if (!(flowBatchStatus.equals(ExtendedBatchStatus.JSL_END))) {
logger.warning("Current flow's batch and exit status will take precedence over and override earlier one from <stop> transition element. " +
"Overriding, setting exit status if non-null and preparing to end job.");
setInJobContext(flowBatchStatus, exitStatus, restartOn);
aggregateStatus = flowBatchStatus;
} else {
logger.fine("End does not override stop. The flow with <end> will effectively be ignored with respect to terminating the job.");
}
} else if (aggregateStatus.equals(ExtendedBatchStatus.JSL_FAIL) || aggregateStatus.equals(ExtendedBatchStatus.EXCEPTION_THROWN)) {
if (flowBatchStatus.equals(ExtendedBatchStatus.JSL_FAIL) || flowBatchStatus.equals(ExtendedBatchStatus.EXCEPTION_THROWN)) {
logger.warning("Current flow's batch and exit status will take precedence over and override earlier one from <fail> transition element or exception thrown. " +
"Overriding, setting exit status if non-null and preparing to end job.");
setInJobContext(flowBatchStatus, exitStatus, restartOn);
aggregateStatus = flowBatchStatus;
} else {
logger.fine("End and stop do not override exception thrown or <fail>. The flow with <end> or <stop> will effectively be ignored with respect to terminating the job.");
}
}
}
} else {
logger.fine("Flow completing normally without any terminating transition or exception thrown.");
}
}
private void setInJobContext(ExtendedBatchStatus flowBatchStatus, String exitStatus, String restartOn) {
if (exitStatus != null) {
jobContext.setExitStatus(exitStatus);
}
if (ExtendedBatchStatus.JSL_STOP.equals(flowBatchStatus)) {
if (restartOn != null) {
jobContext.setRestartOn(restartOn);
}
}
}
public List<BatchFlowInSplitWorkUnit> getParallelJobExecs() {
return parallelBatchWorkUnits;
}
@Override
public List<Long> getLastRunStepExecutions() {
List<Long> stepExecIdList = new ArrayList<Long>();
for (BatchFlowInSplitWorkUnit workUnit : parallelBatchWorkUnits) {
List<Long> stepExecIds = workUnit.getController().getLastRunStepExecutions();
stepExecIdList.addAll(stepExecIds);
}
return stepExecIdList;
}
}
|
/*
* (C) Copyright IBM Corp. 2019
*
* SPDX-License-Identifier: Apache-2.0
*/
package com.ibm.fhir.search.uri.test;
import static org.testng.Assert.assertEquals;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.testng.annotations.Test;
import com.ibm.fhir.search.SearchConstants.Type;
import com.ibm.fhir.search.context.FHIRSearchContext;
import com.ibm.fhir.search.context.FHIRSearchContextFactory;
import com.ibm.fhir.search.parameters.QueryParameter;
import com.ibm.fhir.search.parameters.QueryParameterValue;
import com.ibm.fhir.search.util.SearchUtil;
/**
* Test class for the URI Builder
*
* @author pbastide
*
*/
public class UriTest {
@Test
public void testUriBadSecurity() throws URISyntaxException {
String incoming =
"https://localhost:9443/fhir-server/api/v4/_search?_count=10&_security=http://ibm.com/fhir/security&_fudge=tag&_page=1";
String requestUriString = incoming.split("\\?")[0];
QueryParameterValue value = new QueryParameterValue();
value.setValueString("http://ibm.com/fhir/security");
List<QueryParameterValue> values = Arrays.asList(value);
QueryParameter parameter = new QueryParameter(Type.TOKEN, "_security", null, null, values);
List<QueryParameter> searchParameters = new ArrayList<>();
searchParameters.add(parameter);
QueryParameterValue value2 = new QueryParameterValue();
value2.setValueString("tag");
List<QueryParameterValue> values2 = Arrays.asList(value2);
QueryParameter parameter2 = new QueryParameter(Type.TOKEN, "_fudge", null, null, values2);
searchParameters.add(parameter2);
FHIRSearchContext ctx = FHIRSearchContextFactory.createSearchContext();
ctx.setPageNumber(1);
ctx.setPageSize(10);
ctx.setSearchParameters(searchParameters);
assertEquals(SearchUtil.buildSearchSelfUri(requestUriString, ctx),
incoming);
}
}
|
package com.interview.string;
import java.util.HashSet;
import java.util.Set;
/**
* References
* http://www.geeksforgeeks.org/length-of-the-longest-substring-without-repeating-characters/
* https://leetcode.com/problems/longest-substring-without-repeating-characters/
*/
public class LongestSubstringWithoutRepetingCharacter {
public int lengthOfLongestSubstring(String s) {
Set<Character> uniqueSet = new HashSet<>();
int maxSize = 0;
int start = 0;
for(int i = 0; i < s.length(); i++) {
if(!uniqueSet.contains(s.charAt(i))) {
uniqueSet.add(s.charAt(i));
if(uniqueSet.size() > maxSize) {
maxSize = uniqueSet.size();
}
} else {
while (s.charAt(start) != s.charAt(i)) {
uniqueSet.remove(s.charAt(start));
start++;
}
start++;
}
}
return maxSize;
}
public static void main(String args[]){
LongestSubstringWithoutRepetingCharacter lsw = new LongestSubstringWithoutRepetingCharacter();
System.out.println(lsw.lengthOfLongestSubstring("ABCDECAMNCZB"));
}
}
|
package brennus;
import brennus.model.CaseBlockStatement;
import brennus.model.LiteralExpression;
import brennus.model.Statement;
/**
* builds a case
*
* @author Julien Le Dem
*
* @param <T> the type of the parent of the switch to return on completion
*/
public final class CaseBuilder<T> extends StatementBuilder<CaseBuilder<T>> {
public interface CaseStatementHandler<T> {
SwitchBuilder<T> handleStatement(CaseBlockStatement caseStatement);
}
private final LiteralExpression literalExpression;
private final CaseStatementHandler<T> statementHandler;
private final ImmutableList<Statement> statements;
private final int line;
CaseBuilder(LiteralExpression literalExpression, CaseStatementHandler<T> statementHandler, Builder builder) {
this(
literalExpression,
statementHandler,
builder,
ImmutableList.<Statement>empty(),
builder.getSourceLineNumber());
}
private CaseBuilder(
LiteralExpression literalExpression,
CaseStatementHandler<T> statementHandler,
Builder builder,
ImmutableList<Statement> statements,
int line) {
super(builder);
this.literalExpression = literalExpression;
this.statementHandler = statementHandler;
this.statements = statements;
this.line = line;
}
private CaseBuilder<T> addStatement(Statement statement) {
return new CaseBuilder<T>(literalExpression, statementHandler, builder, statements.append(statement), line);
}
public SwitchBuilder<T> endCase() {
return statementHandler.handleStatement(new CaseBlockStatement(line, literalExpression, statements, false));
}
protected StatementHandler<CaseBuilder<T>> statementHandler() {
return new StatementHandler<CaseBuilder<T>>() {
public CaseBuilder<T> handleStatement(Statement statement) {
return addStatement(statement);
}
};
}
public SwitchBuilder<T> breakCase() {
return statementHandler.handleStatement(new CaseBlockStatement(line, literalExpression, statements, true));
}
public <S> S map(Function<CaseBuilder<T>, S> function) {
return function.apply(this);
}
}
|
package widget.picture;
import android.annotation.SuppressLint;
import android.view.View;
import com.chaos.util.java.toast.ToastKit;
import com.chaos.widget.picture.pulse.PulseView;
import com.example.chaos.R;
import base.BaseActivity;
import butterknife.BindView;
import value.ChaosMagic;
/**
* @desc: 图片脉动页
* @author: zsp
* @date: 2021/9/26 3:13 下午
*/
public class PicturePulseActivity extends BaseActivity implements View.OnClickListener, PulseView.PulseListener {
@SuppressLint("NonConstantResourceId")
@BindView(R.id.picturePulseActivityPv)
PulseView picturePulseActivityPv;
private int counter;
/**
* 布局资源 ID
*
* @return 布局资源 ID
*/
@Override
protected int layoutResId() {
return R.layout.activity_picture_pulse;
}
/**
* 初始控件
*/
@Override
protected void stepUi() {
}
/**
* 初始配置
*/
@Override
protected void initConfiguration() {
}
/**
* 设置监听
*/
@Override
protected void setListener() {
picturePulseActivityPv.setPulseListener(this);
picturePulseActivityPv.setOnClickListener(this);
}
/**
* 开始逻辑
*/
@Override
protected void startLogic() {
}
@Override
public void onClick(View v) {
if (counter++ % ChaosMagic.INT_TWO == 0) {
picturePulseActivityPv.startPulse();
} else {
picturePulseActivityPv.finishPulse();
}
}
/**
* OnStart trigger when we start draw pulse.
*/
@Override
public void onStartPulse() {
ToastKit.showShort(getString(R.string.startPulse));
}
/**
* OnFinish trigger when all of pulse models reached their max fraction
*/
@Override
public void onFinishPulse() {
ToastKit.showShort(getString(R.string.finishPulse));
}
}
|
/**
* JHOVE2 - Next-generation architecture for format-aware characterization
*
* Copyright (c) 2009 by The Regents of the University of California.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* o Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* o Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* o Neither the name of the University of California/California Digital
* Library, Ithaka Harbors/Portico, or Stanford University, nor the names of
* its contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package org.jhove2.module.format.riff;
import java.io.EOFException;
import java.io.IOException;
import org.jhove2.annotation.ReportableProperty;
import org.jhove2.core.JHOVE2;
import org.jhove2.core.JHOVE2Exception;
import org.jhove2.core.io.Input;
import org.jhove2.core.source.Source;
import com.sleepycat.persist.model.Persistent;
/** RIFF format LIST chunk.
*
* @author slabrams
*/
@Persistent
public class LISTChunk
extends GenericChunk
{
/** Chunk factory. */
protected ChunkFactory chunkFactory;
/** List type. */
protected String listType;
/** Instantiate a new <code>ListeChunk</code>. */
public LISTChunk() {
super();
}
/**
* Parse a RIFF format LIST chunk.
*
* @param jhove2
* JHOVE2 framework
* @param source
* RIFF source unit
* @param input
* RIFF source input
* @return Number of bytes consumed
* @throws EOFException
* If End-of-File is reached reading the source unit
* @throws IOException
* If an I/O exception is raised reading the source unit
* @throws JHOVE2Exception
* @see org.jhove2.module.format.Parser#parse(org.jhove2.core.JHOVE2,
* org.jhove2.core.source.Source, org.jhove2.core.io.Input)
*/
@Override
public long parse(JHOVE2 jhove2, Source source, Input input)
throws EOFException, IOException, JHOVE2Exception
{
long consumed = super.parse(jhove2, source, input);
/* List type. */
StringBuffer sb = new StringBuffer(4);
for (int i=0; i<4; i++) {
short b = input.readUnsignedByte();
sb.append((char) b);
}
this.listType = sb.toString();
consumed += 4;
/* Child chunks. */
long pos = input.getPosition();
long max = this.getNextChunkOffset();
while (pos < max) {
sb = new StringBuffer(4);
for (int i=0; i<4; i++) {
short b = input.readUnsignedByte();
sb.append((char) b);
}
consumed += 4;
Chunk chunk = this.chunkFactory.getChunk(sb.toString());
consumed += chunk.parse(jhove2, source, input);
this.chunks.add(chunk);
pos = chunk.getNextChunkOffset();
input.setPosition(pos);
}
return consumed;
}
/** Get chunk factory.
* @return Chunk factory
*/
public ChunkFactory getChunkFactory() {
return this.chunkFactory;
}
/** Get the list type.
* @return List type
*/
@ReportableProperty(order=1, value="List type.")
public String getListType() {
return this.listType;
}
/** Set chunk factory.
* @param factory Chunk factory
*/
public void setChunkFactory(ChunkFactory factory) {
this.chunkFactory = factory;
}
}
|
package cz.muni.fi.pv243.mustech.service;
import cz.muni.fi.pv243.mustech.dal.OptionRepository;
import cz.muni.fi.pv243.mustech.model.Issue;
import cz.muni.fi.pv243.mustech.model.Option;
import javax.inject.Inject;
import javax.inject.Named;
import javax.transaction.Transactional;
/**
* Option service implementing and initializing generic service
* @author Tomas
*/
@Named
@Transactional
public class OptionService extends AbstractGenericService<Option, OptionRepository> {
@Inject
private PrincipalChecker<Issue> issuePrincipalChecker;
@Override
public boolean canAccess(String principalName, Option entity) {
return issuePrincipalChecker.canAccess(principalName, entity.getPoll().getIssue());
}
}
|
package com.example.vivian.voicechat;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Example local unit test, which will execute on the development machine (host).
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() throws Exception {
assertEquals(4, 2 + 2);
}
}
|
package com.PT.service.impl;
import com.PT.dao.DepositInfoMapper;
import com.PT.dao.DepositRechargeRecordMapper;
import com.PT.dao.StoreMapper;
import com.PT.dao.YkatCommonUtilMapper;
import com.PT.entity.DepositRechargeRecord;
import com.PT.entity.DepositRechargeRecordExample;
import com.PT.entity.Store;
import com.PT.service.DepositService;
import com.PT.service.LogService;
import com.PT.tools.QueryToMap;
import com.PT.tools.ToStrings;
import com.PT.tools.YkatCommonUtil;
import com.PT.tools.YkatConstant;
import com.github.pagehelper.PageHelper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Service
public class DepositServiceImpl implements DepositService {
@Autowired
private DepositRechargeRecordMapper depositRechargeRecordMapper;
@Autowired
private DepositInfoMapper depositInfoMapper;
@Autowired
private LogService logService;
@Autowired
private StoreMapper storeMapper;
@Autowired
private YkatCommonUtilMapper ykatCommonUtilMapper;
/**
* 查询保证金补足记录
* @param userId 唯一用户标识
* @param page
* @param ipp
* @param queryCondition
* @return
* @throws Exception
*/
public Map<String,Object> listDepositRecord(int userId, int page, int ipp, String queryCondition) throws Exception
{
Map factors = new HashMap();
if(queryCondition!=null && !"".equals(queryCondition)) { //有搜索条件时
factors = QueryToMap.stringToMap(queryCondition);
if (factors.containsKey("time")) {
YkatCommonUtil.putFromAndToDate(factors, (String) factors.get("time"));
}
}
factors.put("userId",userId);
PageHelper.startPage(page,ipp);
List<Map<String, Object> > records = depositInfoMapper.selectByFactor(factors);
int maxPage = (depositInfoMapper.countByFactor(factors)-1)/ipp+1;
Map<String, Object> resultMap = new HashMap<String, Object>();
resultMap.put("records",records);
resultMap.put("maxPage",maxPage);
Integer currentDeposit = ykatCommonUtilMapper.getCurrentDepositByUserId(userId);
resultMap.put("currentDeposit",currentDeposit);
return resultMap;
}
/**
* 删除保证金补足记录
* @param userId 唯一用户表示
* @param ids 一组保证金 id号
* @throws Exception
*/
@Transactional
@Override
public void deleteDepositRecord(int userId, List<Integer> ids) throws Exception
{
if ( ids==null || ids.size() == 0){
throw new Exception("缺少参数");
}
DepositRechargeRecordExample example = new DepositRechargeRecordExample();
example.createCriteria().andIdIn(ids);
String descp = ToStrings.integerListToStrings(ids,'&');
DepositRechargeRecord deleteTo = new DepositRechargeRecord();
deleteTo.setStatus(YkatConstant.DELETE_DEPOSIT_STATE);
int deleteResult = depositRechargeRecordMapper.updateByExampleSelective(deleteTo,example);
logService.insertLog(userId,"delete","on table ykat_deposit_recharge_records " +
"by ids in ["+descp+"]");
}
/**
* 添加保证金补足记录
* @param userId
* @param parameterMap
* @throws Exception
*/
@Transactional
@Override
public void addDepositRecord(int userId, Map<String, Object> parameterMap) throws Exception{
String checkMessage = YkatCommonUtil.checkMapHasNull(parameterMap);
if(!"success".equals(checkMessage)){
throw new Exception(checkMessage);
}
Integer rechargeMoney = (Integer) parameterMap.get("money");
if(!(rechargeMoney> 0 && rechargeMoney <= 100000)){
throw new Exception("金额不能超过100000");
}
String bankcardId = (String) parameterMap.get("cardId");
if ("".equals(bankcardId)){
throw new Exception("缺少银行卡号信息");
}
Integer bankcardPrimaryKey = ykatCommonUtilMapper.getBankcardPrimaryKeyByCardId(bankcardId);//获得主键
if(bankcardId == null){
throw new Exception("不存在的银行卡");
}
Integer currentMoney = ykatCommonUtilMapper.getCurrentDepositByUserId(userId);//当前商店的保证金
Integer storeId = ykatCommonUtilMapper.getStoreIdByUserId(userId);//商店的id主键
currentMoney += rechargeMoney;
DepositRechargeRecord rechargeRecord = new DepositRechargeRecord();
rechargeRecord.setCreatedAt(new Date());
rechargeRecord.setRechargeMoney(rechargeMoney);
rechargeRecord.setCurrentMoney(currentMoney);
rechargeRecord.setRechargeTime(new Date());
rechargeRecord.setStoreId(storeId);
rechargeRecord.setBankcardId(bankcardPrimaryKey);
Integer status = currentMoney >= YkatConstant.ENOUGH_DEPOSIT ? YkatConstant.ENOUGH_DEPOSIT_STATE:YkatConstant.SHORT_OF_DEPOSIT_STATE;
rechargeRecord.setStatus(status);
Store store = new Store();
store.setId(storeId);
store.setDeposit(currentMoney);
if(depositRechargeRecordMapper.insertSelective(rechargeRecord)>0 && storeMapper.updateByPrimaryKeySelective(store)>0){
logService.insertLog(userId,"insert","recharge deposit of store "+storeId+" "+rechargeMoney+" ¥");
}else{
throw new RuntimeException("添加保证金失败");
}
}
}
|
// Copyright 2018-2021 Polyaxon, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*
* Polyaxon SDKs and REST API specification.
* Polyaxon SDKs and REST API specification.
*
* The version of the OpenAPI document: 1.10.1
* Contact: contact@polyaxon.com
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
package org.openapitools.client.model;
import java.util.Objects;
import java.util.Arrays;
import com.google.gson.TypeAdapter;
import com.google.gson.annotations.JsonAdapter;
import com.google.gson.annotations.SerializedName;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.io.IOException;
/**
* V1HostPathConnection
*/
public class V1HostPathConnection {
public static final String SERIALIZED_NAME_HOST_PATH = "hostPath";
@SerializedName(SERIALIZED_NAME_HOST_PATH)
private String hostPath;
public static final String SERIALIZED_NAME_MOUNT_PATH = "mountPath";
@SerializedName(SERIALIZED_NAME_MOUNT_PATH)
private String mountPath;
public static final String SERIALIZED_NAME_READ_ONLY = "readOnly";
@SerializedName(SERIALIZED_NAME_READ_ONLY)
private Boolean readOnly;
public static final String SERIALIZED_NAME_KIND = "kind";
@SerializedName(SERIALIZED_NAME_KIND)
private Object kind;
public V1HostPathConnection hostPath(String hostPath) {
this.hostPath = hostPath;
return this;
}
/**
* Get hostPath
* @return hostPath
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public String getHostPath() {
return hostPath;
}
public void setHostPath(String hostPath) {
this.hostPath = hostPath;
}
public V1HostPathConnection mountPath(String mountPath) {
this.mountPath = mountPath;
return this;
}
/**
* Get mountPath
* @return mountPath
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public String getMountPath() {
return mountPath;
}
public void setMountPath(String mountPath) {
this.mountPath = mountPath;
}
public V1HostPathConnection readOnly(Boolean readOnly) {
this.readOnly = readOnly;
return this;
}
/**
* Get readOnly
* @return readOnly
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public Boolean getReadOnly() {
return readOnly;
}
public void setReadOnly(Boolean readOnly) {
this.readOnly = readOnly;
}
public V1HostPathConnection kind(Object kind) {
this.kind = kind;
return this;
}
/**
* Get kind
* @return kind
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public Object getKind() {
return kind;
}
public void setKind(Object kind) {
this.kind = kind;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
V1HostPathConnection v1HostPathConnection = (V1HostPathConnection) o;
return Objects.equals(this.hostPath, v1HostPathConnection.hostPath) &&
Objects.equals(this.mountPath, v1HostPathConnection.mountPath) &&
Objects.equals(this.readOnly, v1HostPathConnection.readOnly) &&
Objects.equals(this.kind, v1HostPathConnection.kind);
}
@Override
public int hashCode() {
return Objects.hash(hostPath, mountPath, readOnly, kind);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class V1HostPathConnection {\n");
sb.append(" hostPath: ").append(toIndentedString(hostPath)).append("\n");
sb.append(" mountPath: ").append(toIndentedString(mountPath)).append("\n");
sb.append(" readOnly: ").append(toIndentedString(readOnly)).append("\n");
sb.append(" kind: ").append(toIndentedString(kind)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
|
/*
* Copyright 2013-2016 Sergey Ignatov, Alexander Zolotov, Florin Patan
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.goide.inspections.suppression;
import com.goide.inspections.GoMultiplePackagesInspection;
import com.goide.inspections.GoUnusedImportInspection;
import com.goide.inspections.unresolved.GoUnresolvedReferenceInspection;
import com.goide.inspections.unresolved.GoUnusedGlobalVariableInspection;
import com.goide.inspections.unresolved.GoUnusedVariableInspection;
import com.goide.quickfix.GoQuickFixTestBase;
import org.jetbrains.annotations.NotNull;
public class GoSuppressionFixTest extends GoQuickFixTestBase {
@Override
protected void setUp() throws Exception {
super.setUp();
myFixture.enableInspections(GoUnusedImportInspection.class);
myFixture.enableInspections(GoUnresolvedReferenceInspection.class);
myFixture.enableInspections(GoUnusedGlobalVariableInspection.class);
myFixture.enableInspections(GoUnusedVariableInspection.class);
myFixture.enableInspections(GoMultiplePackagesInspection.class);
}
public void testImportStatement() {
doTest("Suppress for import");
}
public void testFirstImportStatement() {
doTest("Suppress for import");
}
public void testFunctionSuppressionFix() {
doTest("Suppress for function");
}
public void testFunctionSuppressionFixWithExistingComment() {
doTest("Suppress for function");
}
public void testStatementSuppressionFix() {
doTest("Suppress for statement");
}
public void testStatementSuppressionFixWithExistingComment() {
doTest("Suppress for statement");
}
public void testSwitchCaseSuppressionFix() {
doTest("Suppress all inspections for case");
}
public void testSwitchCaseSuppressionFixWithExistingComment() {
doTest("Suppress all inspections for case");
}
public void testSelectCaseSuppressionFix() {
doTest("Suppress all inspections for case");
}
public void testSelectCaseSuppressionFixWithExistingComment() {
doTest("Suppress all inspections for case");
}
public void testVariableDeclarationSuppressionFix() {
doTest("Suppress for variable");
}
public void testVariableDeclarationSuppressionFixWithExistingComment() {
doTest("Suppress for variable");
}
public void testFunctionAllSuppressionFix() {
doTest("Suppress all inspections for function");
}
public void testFunctionAllSuppressionFixWithExistingComment() {
doTest("Suppress all inspections for function");
}
public void testStatementAllSuppressionFix() {
doTest("Suppress all inspections for statement");
}
public void testStatementAllSuppressionFixWithExistingComment() {
doTest("Suppress all inspections for statement");
}
public void testSwitchCaseAllSuppressionFix() {
doTest("Suppress all inspections for case");
}
public void testSwitchCaseAllSuppressionFixWithExistingComment() {
doTest("Suppress all inspections for case");
}
public void testSelectCaseAllSuppressionFix() {
doTest("Suppress all inspections for case");
}
public void testSelectCaseAllSuppressionFixWithExistingComment() {
doTest("Suppress all inspections for case");
}
public void testVariableDeclarationAllSuppressionFix() {
doTest("Suppress all inspections for variable");
}
public void testVariableDeclarationAllSuppressionFixWithExistingComment() {
doTest("Suppress all inspections for variable");
}
public void testInnerVariableDeclarationSuppressionFix() {
doTestNoFix("Suppress for variable");
}
public void testInnerVariableDeclarationSuppressionFix2() {
doTest("Suppress for statement");
}
public void testInnerVariableDeclarationSuppressionFix3() {
doTest("Suppress for function");
}
public void testCommClauseStatementSuppressionFix() {
doTest("Suppress all inspections for statement");
}
public void testPackageClause() {
myFixture.configureByText("a.go", "package somePackage");
doTest("Suppress for package statement");
}
public void testPackageClauseSuppressAll() {
myFixture.configureByText("a.go", "package somePackage");
doTest("Suppress all inspections for package statement");
}
public void testSuppressedNestedSelect() {
myFixture.testHighlighting(getTestName(true) + ".go");
}
@NotNull
@Override
protected String getBasePath() {
return "inspections/suppression/fix";
}
@Override
protected void doTest(@NotNull String quickFixName) {
super.doTest(quickFixName);
myFixture.testHighlighting(String.format("%s-after-highlighting.go", getTestName(true)));
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.